blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2
values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 213
values | src_encoding stringclasses 30
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 2 10.3M | extension stringclasses 246
values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ddb356445bf02c7df3723d467bc763ca2c73ba9e | 41f81d8496262182c73e855e9d3d4fcee8dc659d | /emailSpammer/spam.py | c607dec4db5b686b1519479c207a8685ce7e851c | [] | no_license | ishank-dev/MyPythonScripts | 5e372bf829941e3db5746cc32cdaf2fddb33f0a8 | a50e17f0a5bd45b8086e429abff3d9a1d42286d5 | refs/heads/master | 2021-07-11T09:41:15.994267 | 2019-10-14T16:38:25 | 2019-10-14T16:38:25 | 209,132,738 | 1 | 4 | null | 2020-10-14T14:21:45 | 2019-09-17T18:50:19 | Python | UTF-8 | Python | false | false | 395 | py | import smtplib
for i in range(0,5)
conn = smtplib.SMTP('smtp.gmail.com',587)# connect to gmail
conn.ehlo()
conn.starttls()
conn.login('your_email','your_password') # write your email and password for the gmail account
conn.sendmail('your_email','recipient_address','Subject: Write email subject here\n\n Write the message here ') # write your email and recipient address here
conn.quit()
| [
"noreply@github.com"
] | ishank-dev.noreply@github.com |
1e07bbeff0fb13fa145c80101d396935d33a0423 | 6b14d9a64a578239e5612e6098320b61b45c08d9 | /AUG14/04.py | 27bec86f2774038bbdffc335d52b45f500521bfc | [
"MIT"
] | permissive | Razdeep/PythonSnippets | 498c403140fec33ee2f0dd84801738f1256ee9dd | 76f9313894f511c487a99bc38bdf0fe5e594caf5 | refs/heads/master | 2020-03-26T08:56:23.067022 | 2018-11-26T05:36:36 | 2018-11-26T05:36:36 | 144,726,845 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 67 | py | # String slicing
test='Hello world'
print(test[1:5])
print(test[6]) | [
"rrajdeeproychowdhury@gmail.com"
] | rrajdeeproychowdhury@gmail.com |
c1fda1a470ad681c3a1a16d4e839b87151b19b33 | 6f6d215a4f0a1c30eeb5a08c8a36016fc351998a | /zcls/model/recognizers/resnet/torchvision_resnet.py | 040bc44da6892b30585f415d6130a4b2fe65cecc | [
"Apache-2.0"
] | permissive | Quebradawill/ZCls | ef9db2b54fbee17802f3342752e3d4fe4ef9d2c5 | ade3dc7fd23584b7ba597f24ec19c02ae847673e | refs/heads/master | 2023-04-15T23:25:18.195089 | 2021-04-29T07:05:46 | 2021-04-29T07:05:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,125 | py | # -*- coding: utf-8 -*-
"""
@date: 2021/2/20 上午10:28
@file: torchvision_resnet.py
@author: zj
@description:
"""
from abc import ABC
import torch.nn as nn
from torch.nn.modules.module import T
from torchvision.models.resnet import resnet18, resnet50, resnext50_32x4d
from zcls.config.key_word import KEY_OUTPUT
from zcls.model import registry
from zcls.model.norm_helper import freezing_bn
class TorchvisionResNet(nn.Module, ABC):
def __init__(self,
arch="resnet18",
num_classes=1000,
torchvision_pretrained=False,
pretrained_num_classes=1000,
fix_bn=False,
partial_bn=False,
zero_init_residual=False):
super(TorchvisionResNet, self).__init__()
self.num_classes = num_classes
self.fix_bn = fix_bn
self.partial_bn = partial_bn
if arch == 'resnet18':
self.model = resnet18(pretrained=torchvision_pretrained, num_classes=pretrained_num_classes,
zero_init_residual=zero_init_residual)
elif arch == 'resnet50':
self.model = resnet50(pretrained=torchvision_pretrained, num_classes=pretrained_num_classes,
zero_init_residual=zero_init_residual)
elif arch == 'resnext50_32x4d':
self.model = resnext50_32x4d(pretrained=torchvision_pretrained, num_classes=pretrained_num_classes,
zero_init_residual=zero_init_residual)
else:
raise ValueError('no such value')
self.init_weights(num_classes, pretrained_num_classes)
def init_weights(self, num_classes, pretrained_num_classes):
if num_classes != pretrained_num_classes:
fc = self.model.fc
fc_features = fc.in_features
self.model.fc = nn.Linear(fc_features, num_classes)
nn.init.normal_(self.model.fc.weight, 0, 0.01)
nn.init.zeros_(self.model.fc.bias)
def train(self, mode: bool = True) -> T:
super(TorchvisionResNet, self).train(mode=mode)
if mode and (self.partial_bn or self.fix_bn):
freezing_bn(self, partial_bn=self.partial_bn)
return self
def forward(self, x):
x = self.model(x)
return {KEY_OUTPUT: x}
@registry.RECOGNIZER.register('TorchvisionResNet')
def build_torchvision_resnet(cfg):
torchvision_pretrained = cfg.MODEL.RECOGNIZER.TORCHVISION_PRETRAINED
pretrained_num_classes = cfg.MODEL.RECOGNIZER.PRETRAINED_NUM_CLASSES
fix_bn = cfg.MODEL.NORM.FIX_BN
partial_bn = cfg.MODEL.NORM.PARTIAL_BN
# for backbone
arch = cfg.MODEL.BACKBONE.ARCH
zero_init_residual = cfg.MODEL.RECOGNIZER.ZERO_INIT_RESIDUAL
num_classes = cfg.MODEL.HEAD.NUM_CLASSES
return TorchvisionResNet(
arch=arch,
num_classes=num_classes,
torchvision_pretrained=torchvision_pretrained,
pretrained_num_classes=pretrained_num_classes,
fix_bn=fix_bn,
partial_bn=partial_bn,
zero_init_residual=zero_init_residual
)
| [
"wy163zhuj@163.com"
] | wy163zhuj@163.com |
092077973ed26e56e12866dd0b199df990ac44cf | c56268db8a4e08a705209142a6c171cd0f9aa7cc | /local_app/models.py | 74d009980834f60502c75147e84d6b2082fa967a | [] | no_license | nguyenl1/local-app | e46b6e2e1ebb5fc799c5bf4b90e95782e6a327d9 | 434a7f16670f9afb560355e1035fe78ec16b2eb0 | refs/heads/master | 2022-11-12T06:56:21.332008 | 2020-07-03T16:14:40 | 2020-07-03T16:14:40 | 270,892,971 | 0 | 0 | null | 2020-07-03T16:14:41 | 2020-06-09T03:23:05 | HTML | UTF-8 | Python | false | false | 1,678 | py | from django.db import models
from django.conf import settings
from django.utils import timezone
from django.contrib.auth import get_user_model
import cloudinary
import cloudinary.uploader
import cloudinary.api
from multiselectfield import MultiSelectField
class SavedPin(models.Model):
user = models.ForeignKey(get_user_model(), on_delete=models.CASCADE, null=True)
bus_id = models.CharField(max_length = 200)
name = models.CharField(max_length = 200)
address = models.CharField(max_length = 200, blank=True)
city = models.CharField(max_length = 200, blank=True)
zip_code = models.CharField(max_length = 200, blank=True)
state = models.CharField(max_length = 200, blank=True)
image = models.TextField(max_length=2000, blank=True)
image_2 = models.TextField(max_length=2000, blank=True)
image_3 = models.TextField(max_length=2000, blank=True)
latitude = models.TextField(max_length=2000, blank=True)
longitude = models.TextField(max_length=2000, blank=True)
class MyTrip(models.Model):
user = models.ForeignKey(get_user_model(), on_delete=models.CASCADE, null=True)
saved_pin = models.ForeignKey(SavedPin, on_delete=models.PROTECT, null=True)
name = models.CharField(max_length = 200, blank = True)
class SubmitPost(models.Model):
site_name = models.CharField(max_length = 200)
address = models.CharField(max_length=200)
city = models.CharField(max_length = 200, blank=True)
zip_code = models.CharField(max_length = 200, blank=True)
state = models.CharField(max_length = 200, blank=True)
publisher_name = models.CharField(max_length = 200)
email = models.CharField(max_length = 200)
| [
"lynnthuynguyen@yahoo.com"
] | lynnthuynguyen@yahoo.com |
11b7e3689c9e441e4675a957d33afa8bb29e075b | 0c4fe6a4ada54cda0f5116e9fee31f133a2ca687 | /common/logger.py | dadeaf23c5addb75052fd305ecaa9c17e8709ab5 | [] | no_license | march-saber/python_aixunshouzhu_api | 029f26470418abd5585a5593bf3addc01f83285e | 49e57cbfe9ca7055fa05e9d98c439617ae486067 | refs/heads/master | 2020-05-28T09:46:53.418636 | 2019-06-03T12:12:47 | 2019-06-03T12:12:47 | 188,961,078 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,257 | py | import logging
from common import contants
from common.config import config
def get_logger(name):
logger = logging.getLogger(name) #建立一个日志收集器
logger.setLevel("DEBUG") #设定日志收集级别
fmt = "%(name)s - %(levelname)s - %(asctime)s - %(message)s - [%(filename)s:%(lineno)d]"
formatter = logging.Formatter(fmt=fmt) #设定日志输出格式
console_handler = logging.StreamHandler() #指定输出到控制台
#吧日志级别放到配置文件里面去配置-- 优化
gather = config.get('log','gather_log')
console_handler.setLevel(gather) #指定输出级别
console_handler.setFormatter(formatter)
file_handler = logging.FileHandler(contants.log_dir + "/case.log",encoding='utf-8')
# 吧日志级别放到配置文件里面去配置
output = config.get('log','output_log')
file_handler.setLevel(output)
file_handler.setFormatter(formatter)
logger.addHandler(console_handler)
logger.addHandler(file_handler)
return logger
if __name__ == '__main__':
logger = get_logger('case')
logger.debug("测试开始")
logger.info("测试报错")
logger.error("测试数据")
logger.warning("测试结果")
logger.critical("测试结束") | [
"1162869224@qq.com"
] | 1162869224@qq.com |
a5ec4c22e8526a56d17ae4d199df63900a4fd74c | 7dfe37c9b4cb8512a49bb7abe6311b553e229fc5 | /src/utils/clases/metodos_strings.py | 44eb6a68120721b812795a4f6c0db7457ef5fb16 | [] | no_license | ArmandoBerlanga/python_playground | 7e28c992bd9934f4b72a58f076c2b3f39137951a | 4ee243e3d4a02667a8ed05fc526e5373a118e0d9 | refs/heads/main | 2023-04-05T12:37:11.478937 | 2021-03-31T00:43:30 | 2021-03-31T00:43:30 | 349,169,416 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,871 | py | # Programa creado por José Armando Berlanga Mendoza
# Creado el 17 de febrero de 2021
# Descripción: ejercicio sobre recursividad
def voltear_palabra (s): # metodo para voltear una palabra u oracion completamente
l = len(s)
if (l == 0):
return ""
else:
return s[l-1:] + voltear_palabra (s[0:l-1])
def es_palindrome (s): # metodo para evaluar si una palabra u oracion es palindrome (que estan iguales al derecho y al reves)
if (len(s) > 1):
if s[len(s)-1] == s [0]:
return es_palindrome (s [1 : len(s)-1])
else:
return False
else:
return True
def imprimir_pino (reglon, cont): # metodo para la impresion de un pino centrado
if (reglon == 0):
return ""
else:
for i in range (reglon, 0, -1):
print (" ", end = "")
if i == 0:
break
for i in range (cont+1, 0, -1):
print ("*", end = "")
if i == 0:
break
print()
return imprimir_pino(reglon-1, cont+2)
def formatear_palabra (s): # metodo de formateo
acentos = "áéíóú"
sinAcentos = "aeiou"
s = s.lower().replace(" ", "").replace(".", "").replace(",", "")
for char in s:
pos = acentos.find(char)
if pos != -1:
s = s.replace(acentos[pos], sinAcentos[pos])
return s
# Given a string s, find the length of the longest substring without repeating characters.
def longest_substring(s):
if len(s) == 1:
return 1
elif s == "":
return 0
conts = []
for i in range (len(s)-1):
chars = []
cont = 0
j = i
while s[j] not in chars and j < len(s)-1:
cont+=1
chars.append(s[j])
j+=1
conts.append(cont)
return max (conts)
if __name__ == '__main__':
option = -1
while (option == -1):
print ("\n[1] Voltear el orden de una palabra u oracion\n[2] Evaluar si una palabra o frase es palindrome\n[3] Imprimir un pino")
option = int (input("\nIngrese un numero segun las opciones dadas: "))
if option != 1 and option != 2 and option != 3:
print("\nNo has ingresado un num valido, vuelve a ingresarlo")
option = -1
if option == 1:
s = input("\nIngrese el texto a voltear: ")
print("\nResultado: " + voltear_palabra(s))
elif option == 2:
s = input("\nIngrese el texto a evaluar: ")
# s = "A mamá Roma le aviva el amor a papá y a papá Roma le aviva el amor a mamá."
print ("\n\"" + s +"\"" + ", es palindrome" if (es_palindrome(formatear_palabra(s))) else ", no es Palindrome")
else:
pisos = int (input("\nIngrese el numero de pisos de la piramide: "))
print()
imprimir_pino(pisos, 0)
print()
| [
"Jose.berlangam@udem.edu"
] | Jose.berlangam@udem.edu |
a1afae0b9a14f320f59826b7a6e3c27d9d04847f | bddcad1331e2ea68d2ffc7e3f0478d8776fea5d8 | /Administratie/Literatuurstudie/bijlagen/convert.py | 3321a9340e1be75db22d68de619847b73eb22cbe | [] | no_license | 4ilo/masterproef | 693b9123e7e3fa64ef50960581d64f5116f8b045 | c5ad81cee83b354262b2dc9d0dced0cbcf0c2f66 | refs/heads/master | 2020-03-31T08:44:10.246829 | 2019-06-24T08:56:32 | 2019-06-24T08:56:32 | 152,070,317 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,548 | py | import xml.etree.ElementTree as ET
import argparse
import os
parser = argparse.ArgumentParser()
parser.add_argument("file")
args = parser.parse_args()
classes = []
def getLabels(root):
labels = root.find('task').find('labels')
for label in labels:
classes.append(label.find('name').text)
with open('cust.names', 'w') as names:
names.write("\n".join(classes))
if __name__ == '__main__':
tree = ET.parse(args.file)
root = tree.getroot()
print(root.tag)
if not os.path.isdir('Anotations_yolo'):
os.mkdir('Anotations_yolo')
for child in root:
if child.tag == 'meta':
getLabels(child)
if child.tag == 'image':
image = child.get('name')
w = float(child.get('width'))
h = float(child.get('height'))
boxes = ''
for box in child.findall('box'):
xtl = float(box.get('xtl'))
ytl = float(box.get('ytl'))
xbr = float(box.get('xbr'))
ybr = float(box.get('ybr'))
width = xbr - xtl
height = ybr - ytl
x = xtl + (width/2) #center
y = ytl + (height/2)
boxes += '{} {} {} {} {}\n'.format(classes.index(box.get('label')),
x/w, y/h, width/w, height/h)
with open('Anotations_yolo/{}.txt'
.format(os.path.splitext(image)[0]), 'w') as file:
file.write(boxes)
| [
"oliviervandeneede@hotmail.com"
] | oliviervandeneede@hotmail.com |
8d07ce4171b2bea29faab046161815234799f885 | a0cefb1cd11b85b34c5ed58e44d087981541111b | /run_tests.py | 57157de5d2632d35069de435665c2e90b130f08e | [] | no_license | diogo-aos/masters_final | 732e436e74bbc7d24756fb10f96d0d39656212e7 | 93ae6b71d7d7d9dade0059facfe2bd5162c673da | refs/heads/master | 2021-01-18T15:36:58.263599 | 2017-03-30T05:11:18 | 2017-03-30T05:11:18 | 86,661,794 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 329 | py | import unittest
import tests.test_scan as tscan
import tests.test_boruvka as tboruvka
scan_suite = unittest.TestLoader().loadTestsFromModule(tscan)
boruvka_suite = unittest.TestLoader().loadTestsFromModule(tboruvka)
# unittest.TextTestRunner(verbosity=2).run(scan_suite)
unittest.TextTestRunner(verbosity=2).run(boruvka_suite)
| [
"dasilva@academiafa.edu.pt"
] | dasilva@academiafa.edu.pt |
5c03758b507d6d0764e0ee096e04ba7048e30035 | da9b9f75a693d17102be45b88efc212ca6da4085 | /sdk/cosmos/azure-cosmos/azure/cosmos/container.py | 73441d19f5abd428087ba295d4936b854400a8c0 | [
"MIT",
"LicenseRef-scancode-generic-cla"
] | permissive | elraikhm/azure-sdk-for-python | e1f57b2b4d8cc196fb04eb83d81022f50ff63db7 | dcb6fdd18b0d8e0f1d7b34fdf82b27a90ee8eafc | refs/heads/master | 2021-06-21T22:01:37.063647 | 2021-05-21T23:43:56 | 2021-05-21T23:43:56 | 216,855,069 | 0 | 0 | MIT | 2019-10-22T16:05:03 | 2019-10-22T16:05:02 | null | UTF-8 | Python | false | false | 35,017 | py | # The MIT License (MIT)
# Copyright (c) 2014 Microsoft Corporation
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Create, read, update and delete items in the Azure Cosmos DB SQL API service.
"""
from typing import Any, Dict, List, Optional, Union, Iterable, cast # pylint: disable=unused-import
import six
from azure.core.tracing.decorator import distributed_trace # type: ignore
from ._cosmos_client_connection import CosmosClientConnection
from ._base import build_options
from .errors import CosmosResourceNotFoundError
from .http_constants import StatusCodes
from .offer import Offer
from .scripts import ScriptsProxy
from .partition_key import NonePartitionKeyValue
__all__ = ("ContainerProxy",)
# pylint: disable=protected-access
# pylint: disable=missing-client-constructor-parameter-credential,missing-client-constructor-parameter-kwargs
class ContainerProxy(object):
"""
An interface to interact with a specific DB Container.
This class should not be instantiated directly, use :func:`DatabaseProxy.get_container_client` method.
A container in an Azure Cosmos DB SQL API database is a collection of documents,
each of which represented as an Item.
:ivar str id: ID (name) of the container
:ivar str session_token: The session token for the container.
.. note::
To create a new container in an existing database, use :func:`Database.create_container`.
"""
def __init__(self, client_connection, database_link, id, properties=None): # pylint: disable=redefined-builtin
# type: (CosmosClientConnection, str, str, Dict[str, Any]) -> None
self.client_connection = client_connection
self.id = id
self._properties = properties
self.container_link = u"{}/colls/{}".format(database_link, self.id)
self._is_system_key = None
self._scripts = None # type: Optional[ScriptsProxy]
def _get_properties(self):
# type: () -> Dict[str, Any]
if self._properties is None:
self._properties = self.read()
return self._properties
@property
def is_system_key(self):
# type: () -> bool
if self._is_system_key is None:
properties = self._get_properties()
self._is_system_key = (
properties["partitionKey"]["systemKey"] if "systemKey" in properties["partitionKey"] else False
)
return cast('bool', self._is_system_key)
@property
def scripts(self):
# type: () -> ScriptsProxy
if self._scripts is None:
self._scripts = ScriptsProxy(self.client_connection, self.container_link, self.is_system_key)
return cast('ScriptsProxy', self._scripts)
def _get_document_link(self, item_or_link):
# type: (Union[Dict[str, Any], str]) -> str
if isinstance(item_or_link, six.string_types):
return u"{}/docs/{}".format(self.container_link, item_or_link)
return item_or_link["_self"]
def _get_conflict_link(self, conflict_or_link):
# type: (Union[Dict[str, Any], str]) -> str
if isinstance(conflict_or_link, six.string_types):
return u"{}/conflicts/{}".format(self.container_link, conflict_or_link)
return conflict_or_link["_self"]
def _set_partition_key(self, partition_key):
if partition_key == NonePartitionKeyValue:
return CosmosClientConnection._return_undefined_or_empty_partition_key(self.is_system_key)
return partition_key
@distributed_trace
def read(
self,
populate_query_metrics=None, # type: Optional[bool]
populate_partition_key_range_statistics=None, # type: Optional[bool]
populate_quota_info=None, # type: Optional[bool]
**kwargs # type: Any
):
# type: (...) -> Dict[str, Any]
"""
Read the container properties
:param session_token: Token for use with Session consistency.
:param initial_headers: Initial headers to be sent as part of the request.
:param populate_query_metrics: Enable returning query metrics in response headers.
:param populate_partition_key_range_statistics: Enable returning partition key
range statistics in response headers.
:param populate_quota_info: Enable returning collection storage quota information in response headers.
:param request_options: Dictionary of additional properties to be used for the request.
:param response_hook: a callable invoked with the response metadata
:raises ~azure.cosmos.errors.CosmosHttpResponseError: Raised if the container couldn't be retrieved.
This includes if the container does not exist.
:returns: Dict representing the retrieved container.
:rtype: dict[str, Any]
"""
request_options = build_options(kwargs)
response_hook = kwargs.pop('response_hook', None)
if populate_query_metrics is not None:
request_options["populateQueryMetrics"] = populate_query_metrics
if populate_partition_key_range_statistics is not None:
request_options["populatePartitionKeyRangeStatistics"] = populate_partition_key_range_statistics
if populate_quota_info is not None:
request_options["populateQuotaInfo"] = populate_quota_info
collection_link = self.container_link
self._properties = self.client_connection.ReadContainer(
collection_link, options=request_options, **kwargs
)
if response_hook:
response_hook(self.client_connection.last_response_headers, self._properties)
return cast('Dict[str, Any]', self._properties)
@distributed_trace
def read_item(
self,
item, # type: Union[str, Dict[str, Any]]
partition_key, # type: Any
populate_query_metrics=None, # type: Optional[bool]
post_trigger_include=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Dict[str, str]
"""
Get the item identified by `item`.
:param item: The ID (name) or dict representing item to retrieve.
:param partition_key: Partition key for the item to retrieve.
:param session_token: Token for use with Session consistency.
:param initial_headers: Initial headers to be sent as part of the request.
:param populate_query_metrics: Enable returning query metrics in response headers.
:param post_trigger_include: trigger id to be used as post operation trigger.
:param request_options: Dictionary of additional properties to be used for the request.
:param response_hook: a callable invoked with the response metadata
:returns: Dict representing the item to be retrieved.
:raises ~azure.cosmos.errors.CosmosHttpResponseError: The given item couldn't be retrieved.
:rtype: dict[str, Any]
.. admonition:: Example:
.. literalinclude:: ../samples/examples.py
:start-after: [START update_item]
:end-before: [END update_item]
:language: python
:dedent: 0
:caption: Get an item from the database and update one of its properties:
:name: update_item
"""
doc_link = self._get_document_link(item)
request_options = build_options(kwargs)
response_hook = kwargs.pop('response_hook', None)
if partition_key:
request_options["partitionKey"] = self._set_partition_key(partition_key)
if populate_query_metrics is not None:
request_options["populateQueryMetrics"] = populate_query_metrics
if post_trigger_include:
request_options["postTriggerInclude"] = post_trigger_include
result = self.client_connection.ReadItem(document_link=doc_link, options=request_options, **kwargs)
if response_hook:
response_hook(self.client_connection.last_response_headers, result)
return result
@distributed_trace
def read_all_items(
self,
max_item_count=None, # type: Optional[int]
populate_query_metrics=None, # type: Optional[bool]
**kwargs # type: Any
):
# type: (...) -> Iterable[Dict[str, Any]]
"""
List all items in the container.
:param max_item_count: Max number of items to be returned in the enumeration operation.
:param session_token: Token for use with Session consistency.
:param initial_headers: Initial headers to be sent as part of the request.
:param populate_query_metrics: Enable returning query metrics in response headers.
:param feed_options: Dictionary of additional properties to be used for the request.
:param response_hook: a callable invoked with the response metadata
:returns: An Iterable of items (dicts).
:rtype: Iterable[dict[str, Any]]
"""
feed_options = build_options(kwargs)
response_hook = kwargs.pop('response_hook', None)
if max_item_count is not None:
feed_options["maxItemCount"] = max_item_count
if populate_query_metrics is not None:
feed_options["populateQueryMetrics"] = populate_query_metrics
if hasattr(response_hook, "clear"):
response_hook.clear()
items = self.client_connection.ReadItems(
collection_link=self.container_link, feed_options=feed_options, response_hook=response_hook, **kwargs
)
if response_hook:
response_hook(self.client_connection.last_response_headers, items)
return items
@distributed_trace
def query_items_change_feed(
self,
partition_key_range_id=None, # type: Optional[str]
is_start_from_beginning=False, # type: bool
continuation=None, # type: Optional[str]
max_item_count=None, # type: Optional[int]
**kwargs # type: Any
):
# type: (...) -> Iterable[Dict[str, Any]]
"""
Get a sorted list of items that were changed, in the order in which they were modified.
:param partition_key_range_id: ChangeFeed requests can be executed against specific partition key ranges.
This is used to process the change feed in parallel across multiple consumers.
:param is_start_from_beginning: Get whether change feed should start from
beginning (true) or from current (false). By default it's start from current (false).
:param continuation: e_tag value to be used as continuation for reading change feed.
:param max_item_count: Max number of items to be returned in the enumeration operation.
:param feed_options: Dictionary of additional properties to be used for the request.
:param response_hook: a callable invoked with the response metadata
:returns: An Iterable of items (dicts).
:rtype: Iterable[dict[str, Any]]
"""
feed_options = build_options(kwargs)
response_hook = kwargs.pop('response_hook', None)
if partition_key_range_id is not None:
feed_options["partitionKeyRangeId"] = partition_key_range_id
if is_start_from_beginning is not None:
feed_options["isStartFromBeginning"] = is_start_from_beginning
if max_item_count is not None:
feed_options["maxItemCount"] = max_item_count
if continuation is not None:
feed_options["continuation"] = continuation
if hasattr(response_hook, "clear"):
response_hook.clear()
result = self.client_connection.QueryItemsChangeFeed(
self.container_link, options=feed_options, response_hook=response_hook, **kwargs
)
if response_hook:
response_hook(self.client_connection.last_response_headers, result)
return result
@distributed_trace
def query_items(
self,
query, # type: str
parameters=None, # type: Optional[List[str]]
partition_key=None, # type: Optional[Any]
enable_cross_partition_query=None, # type: Optional[bool]
max_item_count=None, # type: Optional[int]
enable_scan_in_query=None, # type: Optional[bool]
populate_query_metrics=None, # type: Optional[bool]
**kwargs # type: Any
):
# type: (...) -> Iterable[Dict[str, Any]]
"""
Return all results matching the given `query`.
You can use any value for the container name in the FROM clause, but typically the container name is used.
In the examples below, the container name is "products," and is aliased as "p" for easier referencing
in the WHERE clause.
:param query: The Azure Cosmos DB SQL query to execute.
:param parameters: Optional array of parameters to the query. Ignored if no query is provided.
:param partition_key: Specifies the partition key value for the item.
:param enable_cross_partition_query: Allows sending of more than one request to
execute the query in the Azure Cosmos DB service.
More than one request is necessary if the query is not scoped to single partition key value.
:param max_item_count: Max number of items to be returned in the enumeration operation.
:param session_token: Token for use with Session consistency.
:param initial_headers: Initial headers to be sent as part of the request.
:param enable_scan_in_query: Allow scan on the queries which couldn't be served as
indexing was opted out on the requested paths.
:param populate_query_metrics: Enable returning query metrics in response headers.
:param feed_options: Dictionary of additional properties to be used for the request.
:param response_hook: a callable invoked with the response metadata
:returns: An Iterable of items (dicts).
:rtype: Iterable[dict[str, Any]]
.. admonition:: Example:
.. literalinclude:: ../samples/examples.py
:start-after: [START query_items]
:end-before: [END query_items]
:language: python
:dedent: 0
:caption: Get all products that have not been discontinued:
:name: query_items
.. literalinclude:: ../samples/examples.py
:start-after: [START query_items_param]
:end-before: [END query_items_param]
:language: python
:dedent: 0
:caption: Parameterized query to get all products that have been discontinued:
:name: query_items_param
"""
feed_options = build_options(kwargs)
response_hook = kwargs.pop('response_hook', None)
if enable_cross_partition_query is not None:
feed_options["enableCrossPartitionQuery"] = enable_cross_partition_query
if max_item_count is not None:
feed_options["maxItemCount"] = max_item_count
if populate_query_metrics is not None:
feed_options["populateQueryMetrics"] = populate_query_metrics
if partition_key is not None:
feed_options["partitionKey"] = self._set_partition_key(partition_key)
if enable_scan_in_query is not None:
feed_options["enableScanInQuery"] = enable_scan_in_query
if hasattr(response_hook, "clear"):
response_hook.clear()
items = self.client_connection.QueryItems(
database_or_container_link=self.container_link,
query=query if parameters is None else dict(query=query, parameters=parameters),
options=feed_options,
partition_key=partition_key,
response_hook=response_hook,
**kwargs
)
if response_hook:
response_hook(self.client_connection.last_response_headers, items)
return items
@distributed_trace
def replace_item(
self,
item, # type: Union[str, Dict[str, Any]]
body, # type: Dict[str, Any]
populate_query_metrics=None, # type: Optional[bool]
pre_trigger_include=None, # type: Optional[str]
post_trigger_include=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Dict[str, str]
"""
Replaces the specified item if it exists in the container.
:param item: The ID (name) or dict representing item to be replaced.
:param body: A dict-like object representing the item to replace.
:param session_token: Token for use with Session consistency.
:param initial_headers: Initial headers to be sent as part of the request.
:param access_condition: Conditions Associated with the request.
:param populate_query_metrics: Enable returning query metrics in response headers.
:param pre_trigger_include: trigger id to be used as pre operation trigger.
:param post_trigger_include: trigger id to be used as post operation trigger.
:param request_options: Dictionary of additional properties to be used for the request.
:param response_hook: a callable invoked with the response metadata
:returns: A dict representing the item after replace went through.
:raises ~azure.cosmos.errors.CosmosHttpResponseError: The replace failed or the item with
given id does not exist.
:rtype: dict[str, Any]
"""
item_link = self._get_document_link(item)
request_options = build_options(kwargs)
response_hook = kwargs.pop('response_hook', None)
request_options["disableIdGeneration"] = True
if populate_query_metrics is not None:
request_options["populateQueryMetrics"] = populate_query_metrics
if pre_trigger_include:
request_options["preTriggerInclude"] = pre_trigger_include
if post_trigger_include:
request_options["postTriggerInclude"] = post_trigger_include
result = self.client_connection.ReplaceItem(
document_link=item_link, new_document=body, options=request_options, **kwargs
)
if response_hook:
response_hook(self.client_connection.last_response_headers, result)
return result
@distributed_trace
def upsert_item(
self,
body, # type: Dict[str, Any]
populate_query_metrics=None, # type: Optional[bool]
pre_trigger_include=None, # type: Optional[str]
post_trigger_include=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Dict[str, str]
"""
Insert or update the specified item.
If the item already exists in the container, it is replaced. If it does not, it is inserted.
:param body: A dict-like object representing the item to update or insert.
:param session_token: Token for use with Session consistency.
:param initial_headers: Initial headers to be sent as part of the request.
:param access_condition: Conditions Associated with the request.
:param populate_query_metrics: Enable returning query metrics in response headers.
:param pre_trigger_include: trigger id to be used as pre operation trigger.
:param post_trigger_include: trigger id to be used as post operation trigger.
:param request_options: Dictionary of additional properties to be used for the request.
:param response_hook: a callable invoked with the response metadata
:returns: A dict representing the upserted item.
:raises ~azure.cosmos.errors.CosmosHttpResponseError: The given item could not be upserted.
:rtype: dict[str, Any]
"""
request_options = build_options(kwargs)
response_hook = kwargs.pop('response_hook', None)
request_options["disableIdGeneration"] = True
if populate_query_metrics is not None:
request_options["populateQueryMetrics"] = populate_query_metrics
if pre_trigger_include:
request_options["preTriggerInclude"] = pre_trigger_include
if post_trigger_include:
request_options["postTriggerInclude"] = post_trigger_include
result = self.client_connection.UpsertItem(
database_or_container_link=self.container_link, document=body, **kwargs)
if response_hook:
response_hook(self.client_connection.last_response_headers, result)
return result
@distributed_trace
def create_item(
self,
body, # type: Dict[str, Any]
populate_query_metrics=None, # type: Optional[bool]
pre_trigger_include=None, # type: Optional[str]
post_trigger_include=None, # type: Optional[str]
indexing_directive=None, # type: Optional[Any]
**kwargs # type: Any
):
# type: (...) -> Dict[str, str]
"""
Create an item in the container.
To update or replace an existing item, use the :func:`ContainerProxy.upsert_item` method.
:param body: A dict-like object representing the item to create.
:param session_token: Token for use with Session consistency.
:param initial_headers: Initial headers to be sent as part of the request.
:param access_condition: Conditions Associated with the request.
:param populate_query_metrics: Enable returning query metrics in response headers.
:param pre_trigger_include: trigger id to be used as pre operation trigger.
:param post_trigger_include: trigger id to be used as post operation trigger.
:param indexing_directive: Indicate whether the document should be omitted from indexing.
:param request_options: Dictionary of additional properties to be used for the request.
:param response_hook: a callable invoked with the response metadata
:returns: A dict representing the new item.
:raises ~azure.cosmos.errors.CosmosHttpResponseError: Item with the given ID already exists.
:rtype: dict[str, Any]
"""
request_options = build_options(kwargs)
response_hook = kwargs.pop('response_hook', None)
request_options["disableAutomaticIdGeneration"] = True
if populate_query_metrics:
request_options["populateQueryMetrics"] = populate_query_metrics
if pre_trigger_include:
request_options["preTriggerInclude"] = pre_trigger_include
if post_trigger_include:
request_options["postTriggerInclude"] = post_trigger_include
if indexing_directive:
request_options["indexingDirective"] = indexing_directive
result = self.client_connection.CreateItem(
database_or_container_link=self.container_link, document=body, options=request_options, **kwargs
)
if response_hook:
response_hook(self.client_connection.last_response_headers, result)
return result
@distributed_trace
def delete_item(
self,
item, # type: Union[Dict[str, Any], str]
partition_key, # type: Any
populate_query_metrics=None, # type: Optional[bool]
pre_trigger_include=None, # type: Optional[str]
post_trigger_include=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> None
"""
Delete the specified item from the container.
:param item: The ID (name) or dict representing item to be deleted.
:param partition_key: Specifies the partition key value for the item.
:param session_token: Token for use with Session consistency.
:param initial_headers: Initial headers to be sent as part of the request.
:param access_condition: Conditions Associated with the request.
:param populate_query_metrics: Enable returning query metrics in response headers.
:param pre_trigger_include: trigger id to be used as pre operation trigger.
:param post_trigger_include: trigger id to be used as post operation trigger.
:param request_options: Dictionary of additional properties to be used for the request.
:param response_hook: a callable invoked with the response metadata
:raises ~azure.cosmos.errors.CosmosHttpResponseError: The item wasn't deleted successfully.
:raises ~azure.cosmos.errors.CosmosResourceNotFoundError: The item does not exist in the container.
:rtype: None
"""
request_options = build_options(kwargs)
response_hook = kwargs.pop('response_hook', None)
if partition_key:
request_options["partitionKey"] = self._set_partition_key(partition_key)
if populate_query_metrics is not None:
request_options["populateQueryMetrics"] = populate_query_metrics
if pre_trigger_include:
request_options["preTriggerInclude"] = pre_trigger_include
if post_trigger_include:
request_options["postTriggerInclude"] = post_trigger_include
document_link = self._get_document_link(item)
result = self.client_connection.DeleteItem(document_link=document_link, options=request_options, **kwargs)
if response_hook:
response_hook(self.client_connection.last_response_headers, result)
@distributed_trace
def read_offer(self, **kwargs):
# type: (Any) -> Offer
"""
Read the Offer object for this container.
:param response_hook: a callable invoked with the response metadata
:returns: Offer for the container.
:raises ~azure.cosmos.errors.CosmosHttpResponseError: No offer exists for the container or
the offer could not be retrieved.
:rtype: ~azure.cosmos.Offer
"""
response_hook = kwargs.pop('response_hook', None)
properties = self._get_properties()
link = properties["_self"]
query_spec = {
"query": "SELECT * FROM root r WHERE r.resource=@link",
"parameters": [{"name": "@link", "value": link}],
}
offers = list(self.client_connection.QueryOffers(query_spec, **kwargs))
if not offers:
raise CosmosResourceNotFoundError(
status_code=StatusCodes.NOT_FOUND,
message="Could not find Offer for container " + self.container_link)
if response_hook:
response_hook(self.client_connection.last_response_headers, offers)
return Offer(offer_throughput=offers[0]["content"]["offerThroughput"], properties=offers[0])
@distributed_trace
def replace_throughput(self, throughput, **kwargs):
# type: (int, Any) -> Offer
"""
Replace the container's throughput
:param throughput: The throughput to be set (an integer).
:param response_hook: a callable invoked with the response metadata
:returns: Offer for the container, updated with new throughput.
:raises ~azure.cosmos.errors.CosmosHttpResponseError: No offer exists for the container
or the offer could not be updated.
:rtype: ~azure.cosmos.Offer
"""
response_hook = kwargs.pop('response_hook', None)
properties = self._get_properties()
link = properties["_self"]
query_spec = {
"query": "SELECT * FROM root r WHERE r.resource=@link",
"parameters": [{"name": "@link", "value": link}],
}
offers = list(self.client_connection.QueryOffers(query_spec, **kwargs))
if not offers:
raise CosmosResourceNotFoundError(
status_code=StatusCodes.NOT_FOUND,
message="Could not find Offer for container " + self.container_link)
new_offer = offers[0].copy()
new_offer["content"]["offerThroughput"] = throughput
data = self.client_connection.ReplaceOffer(offer_link=offers[0]["_self"], offer=offers[0], **kwargs)
if response_hook:
response_hook(self.client_connection.last_response_headers, data)
return Offer(offer_throughput=data["content"]["offerThroughput"], properties=data)
@distributed_trace
def list_conflicts(self, max_item_count=None, **kwargs):
# type: (Optional[int], Any) -> Iterable[Dict[str, Any]]
"""
List all conflicts in the container.
:param max_item_count: Max number of items to be returned in the enumeration operation.
:param feed_options: Dictionary of additional properties to be used for the request.
:param response_hook: a callable invoked with the response metadata
:returns: An Iterable of conflicts (dicts).
:rtype: Iterable[dict[str, Any]]
"""
feed_options = build_options(kwargs)
response_hook = kwargs.pop('response_hook', None)
if max_item_count is not None:
feed_options["maxItemCount"] = max_item_count
result = self.client_connection.ReadConflicts(
collection_link=self.container_link, feed_options=feed_options, **kwargs
)
if response_hook:
response_hook(self.client_connection.last_response_headers, result)
return result
@distributed_trace
def query_conflicts(
self,
query, # type: str
parameters=None, # type: Optional[List[str]]
enable_cross_partition_query=None, # type: Optional[bool]
partition_key=None, # type: Optional[Any]
max_item_count=None, # type: Optional[int]
**kwargs # type: Any
):
# type: (...) -> Iterable[Dict[str, Any]]
"""
Return all conflicts matching the given `query`.
:param query: The Azure Cosmos DB SQL query to execute.
:param parameters: Optional array of parameters to the query. Ignored if no query is provided.
:param partition_key: Specifies the partition key value for the item.
:param enable_cross_partition_query: Allows sending of more than one request to execute
the query in the Azure Cosmos DB service.
More than one request is necessary if the query is not scoped to single partition key value.
:param max_item_count: Max number of items to be returned in the enumeration operation.
:param feed_options: Dictionary of additional properties to be used for the request.
:param response_hook: a callable invoked with the response metadata
:returns: An Iterable of conflicts (dicts).
:rtype: Iterable[dict[str, Any]]
"""
feed_options = build_options(kwargs)
response_hook = kwargs.pop('response_hook', None)
if max_item_count is not None:
feed_options["maxItemCount"] = max_item_count
if enable_cross_partition_query is not None:
feed_options["enableCrossPartitionQuery"] = enable_cross_partition_query
if partition_key is not None:
feed_options["partitionKey"] = self._set_partition_key(partition_key)
result = self.client_connection.QueryConflicts(
collection_link=self.container_link,
query=query if parameters is None else dict(query=query, parameters=parameters),
options=feed_options,
**kwargs
)
if response_hook:
response_hook(self.client_connection.last_response_headers, result)
return result
@distributed_trace
def get_conflict(self, conflict, partition_key, **kwargs):
# type: (Union[str, Dict[str, Any]], Any, Any) -> Dict[str, str]
"""
Get the conflict identified by `conflict`.
:param conflict: The ID (name) or dict representing the conflict to retrieve.
:param partition_key: Partition key for the conflict to retrieve.
:param request_options: Dictionary of additional properties to be used for the request.
:param response_hook: a callable invoked with the response metadata
:returns: A dict representing the retrieved conflict.
:raises ~azure.cosmos.errors.CosmosHttpResponseError: The given conflict couldn't be retrieved.
:rtype: dict[str, Any]
"""
request_options = build_options(kwargs)
response_hook = kwargs.pop('response_hook', None)
if partition_key:
request_options["partitionKey"] = self._set_partition_key(partition_key)
result = self.client_connection.ReadConflict(
conflict_link=self._get_conflict_link(conflict), options=request_options, **kwargs
)
if response_hook:
response_hook(self.client_connection.last_response_headers, result)
return result
@distributed_trace
def delete_conflict(self, conflict, partition_key, **kwargs):
# type: (Union[str, Dict[str, Any]], Any, Any) -> None
"""
Delete the specified conflict from the container.
:param conflict: The ID (name) or dict representing the conflict to be deleted.
:param partition_key: Partition key for the conflict to delete.
:param request_options: Dictionary of additional properties to be used for the request.
:param response_hook: a callable invoked with the response metadata
:raises ~azure.cosmos.errors.CosmosHttpResponseError: The conflict wasn't deleted successfully.
:raises ~azure.cosmos.errors.CosmosResourceNotFoundError: The conflict does not exist in the container.
:rtype: None
"""
request_options = build_options(kwargs)
response_hook = kwargs.pop('response_hook', None)
if partition_key:
request_options["partitionKey"] = self._set_partition_key(partition_key)
result = self.client_connection.DeleteConflict(
conflict_link=self._get_conflict_link(conflict), options=request_options, **kwargs
)
if response_hook:
response_hook(self.client_connection.last_response_headers, result)
| [
"noreply@github.com"
] | elraikhm.noreply@github.com |
8767b568baa843c144fcb45fc3930190b3aebe10 | ef270274b87c2500485a992a798606e139c120ff | /blog/migrations/0001_initial.py | 97261cf246c533caf63cae32a13a047a7610147d | [] | no_license | chvbrr/my-first-blog | e05e517a57c84ca99b176816486488462531d11e | ecca64473048220442f35a4d27c7d1ea6697c15b | refs/heads/master | 2021-01-10T13:43:27.209037 | 2016-01-31T14:29:54 | 2016-01-31T14:29:54 | 50,764,703 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,109 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-01-30 18:04
from __future__ import unicode_literals
import datetime
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
from django.utils.timezone import utc
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=200)),
('text', models.TextField()),
('created_date', models.DateTimeField(default=datetime.datetime(2016, 1, 30, 18, 4, 39, 220156, tzinfo=utc))),
('published_date', models.DateTimeField(blank=True, null=True)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"ch.v.b.ramaraju@gmail.com"
] | ch.v.b.ramaraju@gmail.com |
ec5a3ad6b211c933d6f0f6c9428eef2cd6609e11 | ce2ed48a3ea2b067ff45e2901fd1aa08b1b9fd3b | /Data_prep.py | 78e05e3e3b624e9b3f50fb508fd7ace9a273d008 | [] | no_license | mhmdsab/ASHRAE---Great-Energy-Predictor-III- | 9d049a8f7eb9887c89f10b8073e6ad7587d06582 | 946c37af579299d8eb0d338fd4367f0ffc5dd59a | refs/heads/master | 2020-12-01T06:07:04.791763 | 2019-12-28T07:27:32 | 2019-12-28T07:27:32 | 230,572,508 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 13,487 | py | import numpy as np
import pandas as pd
from tqdm import tqdm
import tensorflow as tf
import os
from abc import ABCMeta, abstractmethod
from sklearn.preprocessing import OneHotEncoder as OHE
class DataGenerator(metaclass=ABCMeta):
def __init__(self, para):
self.iterator = None
self.para = para
def inputs(self, mode, batch_size, num_epochs=None):
"""Reads input data num_epochs times.
Args:
mode: String for the corresponding tfrecords ('train', 'validation')
batch_size: Number of examples per returned batch.
"""
if mode != "train" and mode != "valid":
raise ValueError("mode: {} while mode should be "
"'train', 'validation'".format(mode))
filename = self.para.tf_records_url + '/' +mode + "_.tfrecord"
with tf.name_scope("input"):
# TFRecordDataset opens a binary file and
# reads one record at a time.
# `filename` could also be a list of filenames,
# which will be read in order.
dataset = tf.data.TFRecordDataset(filename)
# The map transformation takes a function and
# applies it to every element
# of the dataset.
dataset = dataset.map(self._decode)
# The shuffle transformation uses a finite-sized buffer to shuffle
# elements in memory. The parameter is the number of elements in the
# buffer. For completely uniform shuffling, set the parameter to be
# the same as the number of elements in the dataset.
if mode == "train":
dataset = dataset.shuffle(2380*16)
dataset = dataset.batch(batch_size)
dataset = dataset.prefetch(batch_size)
self.iterator = dataset.make_initializable_iterator()
return self.iterator.get_next()
@abstractmethod
def _decode(self, serialized_example):
pass
class generate_dataset(DataGenerator):
def __init__(self, para):
self.para = para
self.train_data_range = pd.date_range(para.TS_start_date,para.TS_end_date , freq=para.sampling_frequency)
self.train_written_examples = 0
self.valid_written_examples = 0
self.train_excluded_examples = 0
self.valid_excluded_examples = 0
self.mean_values = {}
self.std_values = {}
self.three_months_corr = {}
self.six_months_corr = {}
self.features = {'x_encoder': tf.FixedLenFeature([self.para.in_sequence_window, 25], dtype=tf.float32),
'x_decoder': tf.FixedLenFeature([self.para.out_sequence_window, 24], dtype=tf.float32),
'encoder_meter_reading': tf.FixedLenFeature([self.para.in_sequence_window], dtype=tf.float32),
'decoder_meter_reading': tf.FixedLenFeature([self.para.out_sequence_window], dtype=tf.float32),
'decoder_meter_reading_denorm': tf.FixedLenFeature([self.para.out_sequence_window], dtype=tf.float32),
'mean': tf.FixedLenFeature([1], dtype=tf.float32),
'std':tf.FixedLenFeature([1], dtype=tf.float32)}
self.generate_encoders()
self.generate_dataset()
self.organize('train', 'valid')
super().__init__(para)
def generate_encoders(self):
self.encoding_dict = dict([('month_number' , OHE().fit(np.arange(1, 13).reshape(-1,1))),
('weekday_number' , OHE().fit(np.arange(7).reshape(-1,1))),
('meter' , OHE().fit(np.arange(4).reshape(-1,1)))])
def generate_dataset(self):
print('generating raw dataset')
train = pd.read_csv(self.para.train_url)
meta = pd.read_csv(self.para.meta_url).loc[:,['building_id', 'primary_use']]
dataset = pd.merge(train, meta, left_on = 'building_id', right_on = 'building_id')
dataset['timestamp']= pd.to_datetime(dataset['timestamp'])
self.dataset = dataset.groupby(['building_id', 'meter'])
self.groups = list(self.dataset.groups.keys())
def autocorrelate(self, df, group):
three_months_corr = df['meter_reading'].autocorr(lag = int(3*30*(24/int(self.para.sampling_frequency[0]))))
self.three_months_corr[group] = three_months_corr
six_months_corr = df['meter_reading'].autocorr(lag = int(6*30*(24/int(self.para.sampling_frequency[0]))))
self.six_months_corr[group] = six_months_corr
df['three_months_lag_autocorr'] = three_months_corr * np.ones(shape = len(df))#shape = (8784)
df['six_months_lag_autocorr'] = six_months_corr *np.ones(shape = len(df))#shape = (8784)
return df.astype('float32')
def One_Hot_Enode(self, df):
for feature in self.encoding_dict.keys():
raw_series = df[feature].values.reshape(-1, 1)
One_Hot_Encoded_array = self.encoding_dict[feature].transform(raw_series).todense()[:,:-1]
for i in range(One_Hot_Encoded_array.shape[1]):
OHE_feature_name = feature+'_'+str(i)
df[OHE_feature_name] = One_Hot_Encoded_array[:,i]
return df.astype('float32')
def pad_time_series(self, df):
padding_df = pd.DataFrame()
padding_df['any'] = np.ones(shape = (self.para.building_total_len//int(self.para.sampling_frequency[0])))
padding_df = padding_df.set_index(self.train_data_range)
merged = pd.merge(padding_df, df, how = 'left', left_index=True, right_index=True).drop('any',1).fillna(0)
return merged.astype('float32')
def Normalize_Pad_Split(self, df, group):
mean = np.mean(df['meter_reading'][df['month_number'] <= self.para.features_extractor_len])
std = np.std(df['meter_reading'][df['month_number'] <= self.para.features_extractor_len])
if (mean!= mean) or (std!= std):
mean = np.mean(list(self.mean_values.values()))
std = np.mean(list(self.std_values.values()))
self.mean_values[group] = mean
self.std_values[group] = std
df = self.pad_time_series(df)
df['meter_reading_normalized'] = df['meter_reading'].map(lambda x:self.Normalize(x, mean, std))
df['three_months_lag'] = df['three_months_lag'].map(lambda x:self.Normalize(x, mean, std))
df['six_months_lag'] = df['six_months_lag'].map(lambda x:self.Normalize(x, mean, std))
train_df = df.loc[:self.para.train_end_date,:].drop(['month_number', 'weekday_number'], 1)
valid_df = df.loc[self.para.valid_start_date:,:].drop(['month_number', 'weekday_number'], 1)
return train_df, valid_df
def organize(self, train_name, valid_name):
print('organizing raw dataset')
if (os.path.exists(self.para.tf_records_url+'/'+'{}_.tfrecord'.format(train_name))) & \
(os.path.exists(self.para.tf_records_url+'/'+'{}_.tfrecord'.format(valid_name))):
self.para.train_kickoff = 'not_first_time'
train_name, valid_name = '_','_'
with tf.python_io.TFRecordWriter(self.para.tf_records_url+'/'+'{}_.tfrecord'.format(train_name)) as train_writer:
with tf.python_io.TFRecordWriter(self.para.tf_records_url+'/'+'{}_.tfrecord'.format(valid_name)) as valid_writer:
for group in tqdm(self.groups):
building_df = self.dataset.get_group(group).set_index('timestamp')
building_df = building_df.resample(self.para.sampling_frequency).mean().fillna(method = 'ffill')
building_df = self.add_time_features(building_df)
building_df = self.One_Hot_Enode(building_df)
building_df = self.add_timelags(building_df)
building_df = self.autocorrelate(building_df, group)
train_building_df, valid_building_df = self.Normalize_Pad_Split(building_df, group)
if self.para.train_kickoff == 'first_time':
self._convert_to_tfrecord(train_building_df, train_writer, train_name, group)
self._convert_to_tfrecord(valid_building_df, valid_writer, valid_name, group)
def _convert_to_tfrecord(self, df, writer, mode, group):
df_meter_reading_normalized = df['meter_reading_normalized']
df_meter_reading_unnormalized = df['meter_reading']
df_x = df.drop(['building_id', 'meter_reading', 'meter'],1)
mean = self.mean_values[group]
std = self.std_values[group]
for i in range((len(df) - self.para.in_sequence_window)//self.para.out_sequence_window):
start = i * self.para.out_sequence_window
end = start + self.para.in_sequence_window
y_end = end + self.para.out_sequence_window
if ((df['meter_reading'].iloc[start:end] == 0).sum() < \
int(self.para.max_zeros_in_example * self.para.in_sequence_window)) \
and (std>self.para.min_std):
example = tf.train.Example(features=tf.train.Features(feature={
'x_encoder': tf.train.Feature(
float_list=tf.train.FloatList(value = df_x.iloc[start:end,:].values.flatten())),
'x_decoder': tf.train.Feature(
float_list=tf.train.FloatList(value = df_x.drop(['meter_reading_normalized'],1) \
.iloc[end:y_end,:].values.flatten())),
'encoder_meter_reading': tf.train.Feature(
float_list=tf.train.FloatList(value = df_meter_reading_normalized[start:end]\
.values.flatten())),
'decoder_meter_reading': tf.train.Feature(
float_list=tf.train.FloatList(value = df_meter_reading_normalized[end:y_end]\
.values.flatten())),
'decoder_meter_reading_denorm': tf.train.Feature(
float_list=tf.train.FloatList(value = df_meter_reading_unnormalized[end:y_end]\
.values.flatten())),
'mean': tf.train.Feature(
float_list=tf.train.FloatList(value = [mean])),
'std': tf.train.Feature(
float_list=tf.train.FloatList(value = [std]))}))
writer.write(example.SerializeToString())
if mode == 'train':
self.train_written_examples += 1
elif mode == 'valid':
self.valid_written_examples += 1
else:
if mode == 'train':
self.train_excluded_examples+=1
elif mode == 'valid':
self.valid_excluded_examples+=1
def _decode(self, serialized_example):
example = tf.parse_single_example(
serialized_example,
features=self.features)
x_encoder = example['x_encoder']
x_decoder = example['x_decoder']
encoder_meter_reading = example['encoder_meter_reading']
decoder_meter_reading = example['decoder_meter_reading']
decoder_meter_reading_denorm = example['decoder_meter_reading_denorm']
mean = example['mean']
std = example['std']
return x_encoder, x_decoder, encoder_meter_reading, decoder_meter_reading, decoder_meter_reading_denorm, mean, std
@staticmethod
def Normalize(value, mean, std):
return (value-mean)/(std + 1e-100)
@staticmethod
def add_timelags(df):
df['three_months_lag'] = df['meter_reading'].shift(90*6).fillna(0)
df['six_months_lag'] = df['meter_reading'].shift(90*6*2).fillna(0)
return df.astype('float32')
@staticmethod
def add_time_features(df):
df['weekday_number'] = df.index.weekday
df['month_number'] = df.index.month
return df
| [
"noreply@github.com"
] | mhmdsab.noreply@github.com |
b531a49ad9aebf02cb211cb8fd9267c0546770c9 | 8cfeb8b488987da3d955cdbded7e00fb4a63345e | /ula/views.py | 0225cc55dfd7da7a66938b4ac1490eafa887816b | [] | no_license | Vedaad-Shakib/oola | 77be7798b258898dde72b8efbe4dc4d35203af3e | 003b865a39e71d1a76c592cfe47bcc4fccc3305f | refs/heads/master | 2021-03-12T19:38:12.213770 | 2014-09-17T04:16:48 | 2014-09-17T04:16:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 48,730 | py | ###############################################################################
## Copyright (c) 2013-2014 Bogt, Inc.
## All rights reserved.
## This source code is confidential and may not be disclosed.
###############################################################################
###############################################################################
##
## "views.py": create the web pages
##
###############################################################################
from django.shortcuts import render_to_response
from django.conf import settings
from django.template import RequestContext
from django.http import HttpResponseRedirect
from django.http import HttpResponse
from django.db.models import Q
import cookies
import math
import csv
import operator
from datetime import datetime, timedelta
import json
from forms import *
from cookies import *
from email_util import *
import paginator
#==============================================================================
#
# "mainPage": display the home page
#
#==============================================================================
def mainPage(request, checkSignup=None, checkSignin=None):
# if checking for signup
if checkSignup is not None:
form = SignupForm(request.GET.copy())
if form.is_valid():
user = form.save()
email = form.data['email']
user = User.objects.get(email=email)
SetUserCookies(request, user)
url = '/myprofile/'
if user.userType:
url = '/students/'
json = util.JsonLoad( url )
else:
json = util.JsonFormError(form)
return HttpResponse(json,
mimetype='application/json')
# if checking for signin
if checkSignin is not None:
form = SigninForm(request.GET.copy())
if form.is_valid():
user = User.objects.get(email=form.data['email'])
SetUserCookies(request, user)
url = '/myprofile/'
if user.userType:
url = '/students/'
json = util.JsonLoad( url )
else:
json = util.JsonFormError(form)
return HttpResponse(json,
mimetype='application/json')
# normal call
try:
userName = request.session['fullName']
userId = request.session['userId']
user = User.objects.get(userId=userId)
userType = user.userType
return render_to_response( 'home.html',
{'userName': userName,
'userType': userType,
},
context_instance=RequestContext(request))
except:
forup = SignupForm()
forin = SigninForm()
return render_to_response( 'home.html',
{'forup': forup,
'forin': forin,
'userName': None,
'userType': None,
# 'actionUrl':"/mainPage/"
},
context_instance=RequestContext(request))
#==============================================================================
#
# "signout":
#
#==============================================================================
def signout(request, check=None):
DelUserCookies(request)
return HttpResponseRedirect('/home/')
#==============================================================================
#
# "signin":
#
#==============================================================================
def signin(request, check=None):
if check is not None:
data = request.GET.copy( )
form = SigninForm( data )
if form.is_valid():
user = User.objects.get(email=form.data['email'])
SetUserCookies(request, user)
url = '/myprofile/'
if user.userType:
url = '/students/'
json = util.JsonLoad( url )
else:
json = util.JsonFormError(form)
return HttpResponse(json,
mimetype='application/json')
form = SigninForm( )
return render_to_response( 'signin.html',
{'form': form},
context_instance=RequestContext(request))
#==============================================================================
#
# "classPage": The main class page in which dancers can check in
#
#==============================================================================
def classPage(request):
DelUserCookies(request)
#form = ClassName()
userList = list(User.objects.all())
userNames = []
for i in userList:
userNames.append(str(i.name))
return render_to_response('class.html',
{'userNames': userNames},
context_instance=RequestContext(request))
#==============================================================================
#
# "classCheckRecent": Check if the student has recently checked in
#
#==============================================================================
def classCheckRecent(request):
jdata = {}
jdata["recent"] = False
try:
user = User.objects.get(name = request.GET["name"] )
currTime= datetime.datetime.now()
lst10Min= currTime - datetime.timedelta(minutes = 10)
lstAttendance = Attendance.objects.get(userId = user.userId,
dateTime__gt = lst10Min
)
if lstAttendance:
jdata["recent"] = True
jdata["name"] = user.name
jdata["userId"] = user.userId
jdata["attendanceId"] = lstAttendance.attendanceId
except:
pass
jsonData = json.dumps( jdata )
return HttpResponse( jsonData, mimetype = 'application/json')
#==============================================================================
#
# "classCancel": The cancellation page
#
# Reason for this page: more secure than doing this on the main class page
#
#==============================================================================
def classCancel(request, totalGuests, userId, attendanceId):
Attendance.objects.filter(attendanceId = attendanceId).delete()
user = User.objects.get(userId=userId)
user.balance = user.balance + 1 + int(str(totalGuests))
user.lastAccess = datetime.datetime.now( )
user.save()
#----------------------------------------------------------------------
# Set the recent change
#----------------------------------------------------------------------
rcntChg = RecentChange( )
rcntChg.userId = user
rcntChg.change = "Balance"
rcntChg.value = str( user.balance )
rcntChg.dateTime = datetime.datetime.now( )
rcntChg.save( )
return HttpResponseRedirect('/class/')
#==============================================================================
#
# "classSave": The save page
#
# Reason for this page: more secure than doing this on the main class page
#
#==============================================================================
def classSave(request, guests, userId, oldCount):
guests = int(str(guests))
oldCount = int(str(oldCount))
user = User.objects.get(userId=userId)
# update the counts and balance
newCount = guests + 1
user.balance = user.balance - newCount + oldCount
user.lastAccess = datetime.datetime.now( )
user.save()
# remove old records
attendanceList = list(Attendance.objects.filter(userId = user.userId))
today = datetime.datetime.today()
for i in attendanceList:
if i.dateTime.year == today.year and i.dateTime.month == today.month \
and i.dateTime.day == today.day:
Attendance.objects.filter(attendanceId = i.attendanceId).delete()
# create an attendance record
attendance = Attendance()
attendance.userId = User.objects.get(userId = user.userId)
attendance.dateTime = datetime.datetime.today()
attendance.DancerNumber = newCount
attendance.save()
#----------------------------------------------------------------------
# Set the recent change
#----------------------------------------------------------------------
rcntChg = RecentChange( )
rcntChg.userId = user
rcntChg.change = "Balance"
rcntChg.value = str( user.balance )
rcntChg.dateTime = datetime.datetime.now( )
rcntChg.save( )
rcntChg = RecentChange( )
rcntChg.userId = user
rcntChg.change = "Attendance (Dancer Number)"
rcntChg.value = str( newCount )
rcntChg.dateTime = datetime.datetime.now( )
rcntChg.save( )
return HttpResponseRedirect('/class/')
#==============================================================================
#
# "classCheckin": redirects to if the user is not in the database
#
#==============================================================================
def classCheckin(request, userId = None):
user = None
guests = 0
oldCount = 0
newCount = 0
if userId != None:
# get the parameters; come from signup (new student)
user = User.objects.get(userId=userId)
name = user.name
guests = 0
oldCount = 0
else:
# get the parameters;
try:
name = request.POST['name']
guests = int(str(request.POST['guests']))
oldCount = int(str(request.POST['oldCount']))
except:
return render_to_response('classInvalid.html',
{"redirect": "/class/"},
context_instance=RequestContext(request))
# redirect if the user not found
try:
user = User.objects.get(name=name)
except:
request.session['name'] = name
return HttpResponseRedirect('/class/signup/')
# cancel the previous checkin
try:
lstAttendanceId = request.POST['attendanceId']
lstAttendance = Attendance.objects.get(attendanceId = lstAttendanceId)
savedCount = lstAttendance.DancerNumber
lstAttendance.delete()
user.balance = user.balance + savedCount
user.save()
except:
pass
# update the counts and balance
newCount = guests + 1
user.balance = user.balance - newCount + oldCount
user.lastAccess = datetime.datetime.now( )
user.save()
# remove old records
attendanceList = list(Attendance.objects.filter(userId = user.userId))
today = datetime.datetime.today()
for i in attendanceList:
if i.dateTime.year == today.year and i.dateTime.month == today.month \
and i.dateTime.day == today.day:
Attendance.objects.filter(attendanceId = i.attendanceId).delete()
# create an attendance record
attendance = Attendance()
attendance.userId = User.objects.get(userId = user.userId)
attendance.dateTime = datetime.datetime.today()
attendance.DancerNumber = newCount
attendance.save()
attendanceId = attendance.attendanceId
#----------------------------------------------------------------------
# Set the recent change
#----------------------------------------------------------------------
rcntChg = RecentChange( )
rcntChg.userId = user
rcntChg.change = "Balance"
rcntChg.value = str( user.balance )
rcntChg.dateTime = datetime.datetime.now( )
rcntChg.save( )
rcntChg = RecentChange( )
rcntChg.userId = user
rcntChg.change = "Attendance (Dancer Number)"
rcntChg.value = str( newCount )
rcntChg.dateTime = datetime.datetime.now( )
rcntChg.save( )
# confirm
return render_to_response("classCheckin.html",
{"name": user.name,
"balance": user.balance,
"guests": guests,
"userId": user.userId,
"oldCount": newCount,
"waiver": user.waiverSigned,
"attendanceId": attendanceId},
context_instance=RequestContext(request))
#==============================================================================
#
# "classSignup": signup page if the user is not in the database
#
#==============================================================================
def classSignup(request, check=None):
if check is not None:
form = ClassSignup(request.GET.copy())
if form.is_valid():
user = form.save()
SetUserCookies(request, user)
json = util.JsonLoad('/class/checkin/' + str( user.userId ) + "/")
else:
json = util.JsonFormError(form)
return HttpResponse(json,
mimetype='application/json')
else:
try:
name = request.session['name'].title()
except:
name = ""
form = ClassSignup( initial = {'name': name } )
return render_to_response('classSignup.html',
{'form': form,
'name': name},
context_instance=RequestContext(request))
#==============================================================================
#
# "history": The attendance and transactions page
#
#==============================================================================
def history( request, search = "" ):
user, errUrl = GetValidUser(request)
if errUrl:
return HttpResponseRedirect(errUrl)
return render_to_response( 'history.html',
{'userName': user.name,
'userType': user.userType,
'sort': "date",
'filter': "attendance",
'currPage': 1,
'search': search
},
context_instance=RequestContext(request) )
#==============================================================================
#
# "historyList": The history page
#
#==============================================================================
def historyList( request ):
# Get "get" values
sort = str(request.POST['sort'])
filter = str(request.POST['filter'])
page = int(request.POST['page'])
searchVal=str(request.POST['search']).strip()
dRange = str(request.POST['range']).split("-")
try:
date1 = datetime.datetime.strptime( dRange[0].strip(),'%b %d, %Y')
except:
date1 = datetime.datetime.strptime( dRange[0].strip(),'%B %d, %Y')
try:
date2 = datetime.datetime.strptime( dRange[1].strip()+" 23:59:59",'%b %d, %Y %H:%M:%S')
except:
date2 = datetime.datetime.strptime( dRange[1].strip()+" 23:59:59",'%B %d, %Y %H:%M:%S')
chartBYear = date1.year
chartBMonth = date1.month - 1
chartBDay = date1.day
if filter == "attendance":
dispType = 'Attendance'
chartTitle = "Student Attendance"
chartYTitle = "No. Attendees"
chartSName = "Attendees"
dispList = []
chartData = []
chkDay = date1
while chkDay <= date2:
strDay1 = chkDay.strftime("%m %d, %Y")
chkDay2 = datetime.datetime.strptime( strDay1 + " 23:59:59",'%m %d, %Y %H:%M:%S')
dayData = 0
if searchVal:
attndLst= Attendance.objects.filter( userId__name__icontains=searchVal,
dateTime__range=(chkDay, chkDay2))
else:
attndLst= Attendance.objects.filter( dateTime__range=(chkDay, chkDay2) )
for i in attndLst:
dayData += int( i.DancerNumber )
dispList.append( i )
chartData.append( dayData )
chkDay += datetime.timedelta( days=1 )
else:
dispType = 'Purchases'
chartTitle = "Student Classes Bought"
chartYTitle = "Classes Bought"
chartSName = "Purchases"
dispList = []
chartData = []
chkDay = date1
while chkDay <= date2:
strDay1 = chkDay.strftime("%m %d, %Y")
chkDay2 = datetime.datetime.strptime( strDay1 + " 23:59:59",'%m %d, %Y %H:%M:%S')
dayData = 0
if searchVal:
attndLst= Purchase.objects.filter( userId__name__icontains=searchVal,
date__range=(chkDay, chkDay2))
else:
attndLst= Purchase.objects.filter( date__range=(chkDay, chkDay2) )
for i in attndLst:
dayData += int( i.numberOfClasses )
dispList.append( i )
chartData.append( dayData )
chkDay += datetime.timedelta( days=1 )
for item in dispList:
item.name = item.userId.name
# Sort by
if sort == "name":
dispList.sort(key=operator.attrgetter("name"))
else:
if filter == "attendance":
dispList.sort(key=operator.attrgetter("dateTime"))
else:
dispList.sort(key=operator.attrgetter("date"))
dispList.reverse()
#---------------------------------------------------------------------
# Set the paginator object and get the current page data list
#---------------------------------------------------------------------
currPage = page
pgintObj = getPgintObj(request, dispList,
currPage, nCols = 1 )
dispList = pgintObj.getDataList( )
return render_to_response( 'historyInfo.html',
{'dispType': dispType,
'dispList': dispList,
'currPage': currPage,
'paginator': pgintObj,
'chartTitle': chartTitle,
'chartYTitle': chartYTitle,
'chartSName': chartSName,
'chartData': chartData,
'chartBYear': chartBYear,
'chartBMonth': chartBMonth,
'chartBDay': chartBDay,
'sort': sort,
'filter': filter,
},
context_instance=RequestContext(request) )
#==============================================================================
##
## Gets all people with birthdays within given number of days
##
#==============================================================================
def birthdaysWithin(days):
now = datetime.datetime.now()
then = now + timedelta(days)
# Build the list of month/day tuples.
monthdays = [(now.month, now.day)]
while now <= then:
monthdays.append((now.month, now.day))
now += timedelta(days=1)
# Tranform each into queryset keyword args.
monthdays = (dict(zip(("birthday__month", "birthday__day"), t))
for t in monthdays)
# Compose the djano.db.models.Q objects together for a single query.
query = reduce(operator.or_, (Q(**d) for d in monthdays))
# Run the query.
return User.objects.filter(query)
#==============================================================================
#
# "students": The student control page
#
#==============================================================================
def students( request, check=None):
user, errUrl = GetValidUser(request)
if errUrl:
return HttpResponseRedirect(errUrl)
return render_to_response( 'students.html',
{'userName': user.name,
'userType': user.userType,
'sort': "name",
'filter': None,
'currPage': 1,
},
context_instance=RequestContext(request) )
#==============================================================================
#
# "studentList": The students page
#
#==============================================================================
def studentList( request ):
# idle?
user, errUrl = GetValidUser(request)
if errUrl:
return HttpResponseRedirect(errUrl)
# Get "get" values
sort = str(request.POST['sort'])
filter = str(request.POST['filter'])
page = int(request.POST['page'])
search = str(request.POST['search']).strip()
dRange = str(request.POST['range']).split("-")
try:
date1 = datetime.datetime.strptime( dRange[0].strip(),'%b %d, %Y')
except:
date1 = datetime.datetime.strptime( dRange[0].strip(),'%B %d, %Y')
try:
date2 = datetime.datetime.strptime( dRange[1].strip()+" 23:59:59",'%b %d, %Y %H:%M:%S')
except:
date2 = datetime.datetime.strptime( dRange[1].strip()+" 23:59:59",'%B %d, %Y %H:%M:%S')
# Get user list with filters (bday, waiver, balance)
if filter == "bday":
userList = birthdaysWithin(7)
elif filter == "balance":
userList = User.objects.filter(balance__lte=3)
elif filter == "waiver":
userList = User.objects.filter(waiverSigned=False)
else:
userList = User.objects.all()
# Filter user list by search
if search:
userList = userList.filter(name__icontains=str(search))
#print userList
# Date range
try:
if date1.year != 2014 or date1.month != 1 or date1.day != 1:
attendanceLst = Attendance.objects.filter(userId__in = userList,
dateTime__range=(date1, date2))
usrAttenLst = []
for i in attendanceLst:
usrId = i.userId.userId
if usrId in usrAttenLst: continue
usrAttenLst.append( usrId )
userList = userList.filter(userId__in=usrAttenLst)
except:
pass
# Make userList a list so we can sort
userList = list(userList)
# Sort by
if str(sort) == "name":
userList.sort(key=operator.attrgetter("name"))
elif str(sort) == "activity":
userList.sort(key=operator.attrgetter("lastAccess"))
userList.reverse()
else:
userList.sort(key=operator.attrgetter("balance"))
# @WIP1@ These lines must be fixed
now = datetime.datetime.now()
for user2 in userList:
bday = user2.birthday
user2.hasBirthday = (bday.year != 1970 and bday.month != 1 and bday.day != 1)
lst7Day = now - datetime.timedelta( days = 7 )
user2.showBirthday = (bday > lst7Day) # check Bday within 7 days
user2.recent = ((now - user2.lastAccess).total_seconds() < 2*24*3600)
# @WIP1@ These lines must be fixed
nStudents = len( userList )
currPage = 4
nPages = 4
# url = request.get_full_path()[:-1]
#url = str(sortBy) + "/" + str(page) + "/" + str(range)
#---------------------------------------------------------------------
# Set the paginator object and get the current page data list
#---------------------------------------------------------------------
currPage = page
pgintObj = getPgintObj(request, userList,
currPage, nCols = 4 )
dataLst = pgintObj.getDataList( )
return render_to_response( 'studentsInfo.html',
{'userName': user.name,
'userType': user.userType,
'userList': dataLst,
'nStudents': nStudents,
'currPage': currPage,
'paginator': pgintObj,
'nPages': nPages,
#'url': url,
#'addUserForm': addUserForm,
'sort': sort,
'filter': filter,
},
context_instance=RequestContext(request) )
###############################################################################
##
## "addStudent": Add a new student; using drop down
##
###############################################################################
def addStudent( request, check = None ):
user, errUrl = GetValidUser(request)
if errUrl:
return HttpResponseRedirect(errUrl)
# Add new user validation and forms
if check is not None:
form = AddUserForm(request.GET.copy())
if form.is_valid():
user2 = form.save()
url = '/students/' # + str(sort) + "/" + str(page) + "/" + str(range) + "/"
json = util.JsonLoad( url )
else:
json = util.JsonFormError(form)
return HttpResponse(json,
mimetype='application/json')
# Make form
addUserForm = AddUserForm()
return render_to_response( 'dropDownForm.html',
{'form': addUserForm,
#'formName': 'addClient',
#'actionUrl': addDrpdnUrl,
},
context_instance=RequestContext(request))
###############################################################################
##
## "exportStudents": Create a CSV file which contains the users data
##
###############################################################################
def exportStudents( request ):
today = datetime.datetime.now( )
response = HttpResponse( mimetype = 'text/csv' )
fileName = "students_%s.csv" %today.strftime( '%y%m%d%H%M%S' )
response['Content-Disposition'] = 'attachment; filename=%s' %fileName
writer = csv.writer( response )
writer.writerow(['Name', 'E-mail', 'Address',
'Phone', 'Birthday', 'Balance',
'Waiver', 'Admin',
])
for i in User.objects.all():
address = i.address.replace( "\r\n", " " )
writer.writerow([i.name, i.email, address, i.phone,
str(i.birthday.date()), i.balance,
i.waiverSigned, i.userType
])
return response
###############################################################################
##
## "importStudents": Import a CSV file which contains the users data
##
###############################################################################
def importStudents( request ):
if request.POST:
fileName = request.FILES.copy()['fileName']
fileData = fileName.read().split( '\r' )
#----------------------------------------------------------------
# Check the file data to be correct and save it
#----------------------------------------------------------------
fileHeader = fileData[0][:55].replace( "\r", "" ) #[:55] is the header
header = "Name,E-mail,Address,Phone,Birthday,Balance,Waiver,Admin"
if fileHeader != header: return
#----------------------------------------------------------------
# Save imported file data into data base
#----------------------------------------------------------------
for row in fileData[1:]:
rowData = row.replace("\r", "" )
studentData = rowData.split( ',' )
if len( studentData ) != 8: continue
email = studentData[1]
if studentData[1] == "excelttest3@gmail.com":
file = open("/tmp/XXX.txt", "w")
file.write(", ".join(studentData))
file.close()
try:
user = User.objects.get( email__exact = email )
except User.DoesNotExist:
user = None
if user:
updateStudent( studentData, update = True)
else:
updateStudent( studentData )
return HttpResponseRedirect("/students/")
###############################################################################
##
## "updateStudent": Update student table
##
###############################################################################
def updateStudent( data, update = False ):
if update:
#----------------------------------------------------------------
# Update student data
#----------------------------------------------------------------
user = User.objects.get( email__exact = data[1] )
user.name = data[0]
user.address = data[2]
user.phone = data[3]
try:
user.birthday = datetime.datetime.strptime(data[4],'%Y-%m-%d')
except:
try:
data[4] = (data[4][:-2] + "19" + data[4][-2:]) if int(data[4][-2:]) > 20 else (data[4][:-2] + "20" + data[4][-2:]) #csv date comes in format 7/3/99
user.birthday = datetime.datetime.strptime(data[4],'%m/%d/%Y')
except:
user.birthday = datetime.datetime(1970, 1, 1)
try:
user.balance = int(data[5])
except:
user.balance = 0
user.waiverSigned = True if data[6] == "True" else False
try:
user.userType = int(data[7])
except:
user.userType = 0
user.save()
return
#--------------------------------------------------------------------
# Save New student
#--------------------------------------------------------------------
today = datetime.datetime.today()
user = User()
user.name = data[0]
user.email = data[1].lower()
user.password = util.encryptPass(str(random.random()))
user.address = data[2].lower()
user.phone = data[3]
user.lastAccess = today
try:
user.balance = int(data[5])
except:
user.balance = 0
user.waiverSigned = True if data[6] == "True" else False
user.facebook = False
user.notes = ""
user.dateCreated = today
try:
user.userType = int(data[7])
except:
user.userType = 0
user.idleTime = 10
try:
user.birthday = datetime.datetime.strptime(data[4],'%Y-%m-%d')
except:
try:
data[4] = (data[4][:-2] + "19" + data[4][-2:]) if int(data[4][-2:]) > 20 else (data[4][:-2] + "20" + data[4][-2:]) #csv date comes in format 7/3/99
user.birthday = datetime.datetime.strptime(data[4],'%m/%d/%Y')
except:
user.birthday = datetime.datetime(1970,1, 1)
birthdayAssigned = False
user.save()
#==============================================================================
#
# "clearance": A user does not have permission to be somewhere
#
#==============================================================================
def clearance(request):
try:
userName = request.session['fullName']
#userType = request.session['userType']
userId = request.session['userId']
user = User.objects.get(userId=userId)
userType = user.userType
return render_to_response('clearance.html',
{'userName': userName,
'userType': userType,
},
context_instance=RequestContext(request))
except:
if checkSignup is not None:
form = SignupForm(request.GET.copy())
if form.is_valid():
user = form.save()
email = form.data['email']
user = User.objects.get(email=email)
SetUserCookies(request, user)
json = util.JsonLoad('/students/name0/1/all')
else:
json = util.JsonFormError(form)
return HttpResponse(json,
mimetype='application/json')
elif checkSignin is not None:
form = SigninForm(request.GET.copy())
if form.is_valid():
user = User.objects.get(email=form.data['email'])
SetUserCookies(request, user)
json = util.JsonLoad('/students/name0/1/all')
else:
json = util.JsonFormError(form)
return HttpResponse(json,
mimetype='application/json')
else:
forup = SignupForm()
forin = SigninForm()
return render_to_response('clearance.html',
{'forup': forup,
'forin': forin,
'userName': None,
'userType': None,
},
context_instance=RequestContext(request))
#==============================================================================
#
# "edit": A page in which Ula can edit a student's information
#
#==============================================================================
def editStudent(request, userId, check=None):
user, errUrl = GetValidUser(request)
if errUrl:
return HttpResponseRedirect(errUrl)
## try:
## userId = request.GET['id']
## except:
## return HttpResponseRedirect('/students/name0/1/all/')
## try:
## url = request.GET['url']
## except:
## url = 'name0/1/all/'
if check is not None:
data = request.GET.copy()
form = EditUserForm(data)
if form.is_valid():
form.save()
json = util.JsonLoad( closeSecId = "editSudent" )
else:
json = util.JsonFormError(form)
return HttpResponse(json,
mimetype='application/json')
user2 = User.objects.get(userId = userId )
if str( user2.birthday.date() ) == "1970-01-01":
birthVal = ""
else:
birthVal = user2.birthday.date()
editUserForm = EditUserForm(initial = {'userId': user2.userId,
'name': user2.name,
'email': user2.email,
'address': user2.address,
'phone': user2.phone,
'birth': birthVal,
'balance': user2.balance,
'notes': user2.notes,
'waiver': user2.waiverSigned
}
)
## return render_to_response('editStudent.html',
## {'userId': user2.userId,
## 'userName': user.name,
## 'userType': user.userType,
## 'url': url,
## 'editUserForm': editUserForm,
## },
## context_instance=RequestContext(request))
##
return render_to_response( 'editStudentForm.html',
{'form': editUserForm,
'userType': int(user2.userType),
'title': 'Edit Student',
'actionUrl': "/students/edit/%d/" %int( userId ),
'submitVal': 'Save'
},
context_instance=RequestContext(request))
#==============================================================================
#
# "purchaseStudent": A purchase student page
#
#==============================================================================
def purchaseStudent(request, userId, check=None):
user, errUrl = GetValidUser(request)
if errUrl:
return HttpResponseRedirect(errUrl)
if check is not None:
data = request.GET.copy()
form = PurchaseUserForm(data)
if form.is_valid():
form.save()
json = util.JsonLoad( closeSecId = "purchaseStudent" )
else:
json = util.JsonFormError(form)
return HttpResponse(json,
mimetype='application/json')
user2 = User.objects.get(userId = userId )
purchaseUserForm = PurchaseUserForm( initial = {'userId': user2.userId })
return render_to_response( 'purchaseStudenttForm.html',
{'form': purchaseUserForm,
'title': 'Add classes to "%s"' %user2.name,
'actionUrl': "/students/purchase/%d/" %int( userId ),
'submitVal': 'Save'
},
context_instance=RequestContext(request))
#==============================================================================
#
# "rcntActivityStudent": Student recent activity page
#
#==============================================================================
def rcntActivityStudent(request, userId ):
user, errUrl = GetValidUser(request)
if errUrl:
return HttpResponseRedirect(errUrl)
user2 = User.objects.get(userId = userId )
recentLst = usrRcntChanges( user2 )
return render_to_response( 'recentActivity.html',
{'recentLst': recentLst,
'title': '"%s" recent activity' %user2.name,
},
context_instance=RequestContext(request))
#==============================================================================
#
# "forgotPassword": Forgot password page
#
#==============================================================================
def forgotPassword( request, check = None ):
if check is not None:
data = request.GET.copy()
form = ForgotPasswordForm( data )
if form.is_valid():
email = form.data['email']
user = User.objects.get( email__exact = email )
forgotPasswd= form.save( )
emailBody = "Dear %s" %user.name
emailBody +="<br/><br/>To reset your account password, please click on the following link:"
emailBody +="<br/>"
emailBody += settings.PROJECT_URL + "forgotCode/" + forgotPasswd.forgotCode
util.emailNotification( "Forgot password",
user,
emailBody )
json = util.JsonLoad( '/forgotPdSentEmail/' )
else:
json = util.JsonFormError( form )
return HttpResponse(json, mimetype = 'application/json' )
form = ForgotPasswordForm()
extraInformation = "Please enter the e-mail address registered with your account.<br/>An e-mail containing a password reset link will be sent to your email."
forup = SignupForm()
forin = SigninForm()
return render_to_response( 'forgotPassword.html',
{'form': form,
'forup': forup,
'forin': forin,
'extraInformation': extraInformation
},
context_instance=RequestContext(request))
def forgotPdSentEmail( request ):
forup = SignupForm()
forin = SigninForm()
return render_to_response( 'forgotPwdSentEmail.html',
{'forup': forup,
'forin': forin,
},
context_instance=RequestContext(request) )
##############################################################################
##
## "changePassword": Change Password page
##
###############################################################################
def changePassword( request, forgotCode = None, check = None ):
if check is not None:
data = request.GET.copy( )
form = ChangePasswordForm( data )
form.forgotCode = forgotCode
if form.is_valid():
user = form.save( )
## SetUserCookies( request, user )
emailBody = "Dear %s" %user.name
emailBody +="<br/><br/>Your passowrd has been reset on "
emailBody += settings.PROJECT_URL
emailBody +="<br/>"
util.emailNotification( "Change password",
user,
emailBody )
json = util.JsonLoad( '/passwordChanged/' )
else:
json = util.JsonFormError( form )
return HttpResponse(json, mimetype = 'application/json')
today = datetime.datetime.today( )
( validFlag, msg ) = chkValidForgotCode( forgotCode )
if validFlag:
form = ChangePasswordForm( )
return render_to_response('changePassword.html',
{
'form': form,
'actionUrl':"/forgotCode/" + forgotCode + "/"
},
context_instance=RequestContext(request) )
else:
forup = SignupForm()
forin = SigninForm()
return render_to_response('denyAccess.html',
{
'message': msg,
'forup': forup,
'forin': forin,
},
context_instance=RequestContext(request))
def passwordChanged( request ):
forup = SignupForm()
forin = SigninForm()
return render_to_response( 'passwordChanged.html',
{'forup': forup,
'forin': forin,
},
context_instance=RequestContext(request) )
###############################################################################
##
## "chkValidForgotCode": Check forgot code validation
##
###############################################################################
def chkValidForgotCode( forgotCode ):
today = datetime.datetime.today( )
validFlag = False
retMsg = ""
try:
forgotPasswd = ForgotPassword.objects.get(
forgotCode__exact = forgotCode)
if forgotPasswd.active == 'Y':
chkTime = ( today - forgotPasswd.forgotTime ).seconds
if chkTime > 21600:
retMsg = "Your time has been expired. You have only 6 hours"
retMsg +=" to change your password."
else:
validFlag= True
forgotPasswd.active = 'N'
forgotPasswd.save( )
else:
retMsg = "Sorry, the forgot code had been used before and "
retMsg +="is not valid."
except:
retMsg = "Sorry, the user info is invalid."
return ( validFlag, retMsg )
#==============================================================================
#
# "myprofile": Allows students to change their information
#
#==============================================================================
def myprofile(request, check=None):
user, errUrl = GetValidUser(request)
if errUrl:
return HttpResponseRedirect(errUrl)
if check is not None:
data = request.GET.copy( )
form = MyprofileForm( data )
if form.is_valid():
form.save( )
json = util.JsonLoad('/home/')
else:
json = util.JsonFormError(form)
return HttpResponse(json,
mimetype='application/json')
if str( user.birthday.date() ) == "1970-01-01":
birthVal = ""
else:
birthVal = user.birthday.date()
form = MyprofileForm(initial = {'userId': user.userId,
'name': user.name,
'email': user.email,
'birth': birthVal,
'phone': user.phone,
'address': user.address,
'idleTime': user.idleTime,
}
)
return render_to_response( 'myProfile.html',
{'form': form,
'userName':user.name,
'userType':user.userType,
},
context_instance=RequestContext(request))
###############################################################################
##
## "getPgintObj": Create paginator object and return it.
##
###############################################################################
def getPgintObj( request, usrLst, currPage = 1, padding = 3, nCols = 1 ):
# currUsr = getCurrUser( request )
pgintObj = paginator.PaginatorObj( usrLst,
6 * nCols,
currPage,
padding )
return pgintObj
###############################################################################
##
## "usrRcntChanges":
##
###############################################################################
def usrRcntChanges( user ):
now = datetime.datetime.now()
rcnChgs = RecentChange.objects.filter(userId = user.userId)
retVal = []
for chg in rcnChgs:
if ((now - chg.dateTime).total_seconds() < 2*24*3600):
retVal.append( [chg.change, chg.value, chg.dateTime.strftime('%Y-%m-%d') ] )
else:
chg.delete()
return retVal
| [
"vedaad799@gmail.com"
] | vedaad799@gmail.com |
ab60302c0ed0fb4b5e89d82951627fa21c93947d | 902aef0f2cde6c73a70c1833bec2c6f4fa1bc0b6 | /StimControl/LightStim/Text.py | 8250df16138250fa9a1abc57f9be913ac1e111ff | [
"BSD-2-Clause",
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | chrox/RealTimeElectrophy | 2b0c88b28cbeeb4967db5630f3dfa35764c27f54 | e1a331b23d0a034894a0185324de235091e54bf0 | refs/heads/master | 2020-04-14T12:29:47.958693 | 2013-10-07T14:12:04 | 2013-10-07T14:12:04 | 1,662,847 | 6 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,048 | py | # Text stimulus
#
# Copyright (C) 2010-2013 Huang Xin
#
# See LICENSE.TXT that came with this file.
# Taget stimuli
#
# Copyright (C) 2010-2013 Huang Xin
#
# See LICENSE.TXT that came with this file.
from VisionEgg.Text import Text
from LightData import dictattr
from Core import Stimulus
class Hint(Stimulus):
def __init__(self, params, **kwargs):
super(Hint, self).__init__(params=params, **kwargs)
self.name = 'hint'
self.parameters = dictattr()
self.set_parameters(self.parameters, params)
self.make_stimuli()
def make_stimuli(self):
position = self.viewport.deg2pix(self.parameters.xorigDeg) + self.viewport.xorig ,\
self.viewport.deg2pix(self.parameters.yorigDeg) + self.viewport.yorig
self.text = Text(text=self.parameters.text,
position=position,
color=self.parameters.color,
font_size=self.parameters.fontsize,
anchor='center')
self.stimuli = [self.text] | [
"chrox.huang@gmail.com"
] | chrox.huang@gmail.com |
d0fe0d32f8117f0320d48ac65fba33e6aa33014e | adc5060ccc1f9e1243f0d9d5eb95e0ca87034806 | /cdkworkshop/cdkworkshop_stack.py | 2b18b7ea586a1953a25a1b199afbf137a865fcaa | [] | no_license | fanaticjo/cdkworkshop | 37a1762fa9137ee405a098afaf2f45c3b57b135d | aa5a888d42aaae98eb18fe629b82fb1416b4bd7e | refs/heads/master | 2023-08-16T22:41:11.431991 | 2021-09-14T16:30:35 | 2021-09-14T16:30:35 | 406,444,342 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 583 | py | from aws_cdk import (
aws_lambda as _lambda,
aws_apigateway as api,
core
)
class CdkworkshopStack(core.Stack):
def __init__(self, scope: core.Construct, construct_id: str, **kwargs) -> None:
super().__init__(scope, construct_id, **kwargs)
self.lambdaStack=_lambda.Function(
self,'Handler',runtime=_lambda.Runtime.PYTHON_3_6,
code=_lambda.Code.asset('lambda'),
handler='lambda_function.lambda_handler'
)
self.api_g=api.LambdaRestApi(
self,'Endpoint',handler=self.lambdaStack
)
| [
"biswajit196@live.com"
] | biswajit196@live.com |
47d9a4ccfc270889c30a9622496a38134161653c | 2e8ca9eceb525c5e8649525654a931fff637ef6c | /FSF-2020/approximations-and-optimizations/Critical Points/example.py | 3a41be7d6ecb0fbd3b74b5b15c55177857163881 | [] | no_license | abuzar0013/FSF-mathematics-python-code-archive | b4d97833cac727366f7037350300d98ba71008c1 | 573819dbfec617e253c154fd3ccc6fe0c92ab149 | refs/heads/main | 2023-03-01T16:07:55.319281 | 2021-02-05T11:39:01 | 2021-02-05T11:39:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,220 | py | from manimlib.imports import*
class ExampleAnimation(ThreeDScene):
def construct(self):
axes = ThreeDAxes()
f_text = TextMobject("$f(x,y) = (y-x)(1-2x-3y)$").to_corner(UL)
d = Dot(np.array([0,0,0]), color = '#800000') #---- Critical Point
d_text = TextMobject("$(0.2,0.2)$",color = '#DC143C').scale(0.5).shift(0.2*UP) #----x = 0.2, y = 0.2
r_text=TextMobject("Critical Point",color = '#00FFFF').shift(0.3*DOWN).scale(0.6)
#----f(x,y) = (y-x)(1-2x-3y)
f = ParametricSurface(
lambda u, v: np.array([
u,
v,
(v-u)*(1-2*u-3*v)
]),v_min = -1, v_max = 1, u_min = -1, u_max = 1, checkerboard_colors = [PURPLE_D, PURPLE_E],
resolution=(20, 20)).scale(1)
self.set_camera_orientation(phi = 75 * DEGREES)
self.begin_ambient_camera_rotation(rate=0.5)
self.add_fixed_in_frame_mobjects(f_text)
self.wait(1)
self.add(axes)
self.play(Write(f),Write(d))
self.wait(1)
self.add_fixed_in_frame_mobjects(d_text)
self.wait(1)
self.add_fixed_in_frame_mobjects(r_text)
self.wait(3)
| [
"noreply@github.com"
] | abuzar0013.noreply@github.com |
ccf100ecb17578bc9791263e5270183990fed468 | 0b793bce2da8c3d09b7956c0672ddbffd46feaed | /atcoder/corp/keyence2020_c.py | 9e943f94b0f860184c871b6de78e2af5092d409b | [
"MIT"
] | permissive | knuu/competitive-programming | c6c4e08fb231937d988bdc5a60a8ad6b31b97616 | 16bc68fdaedd6f96ae24310d697585ca8836ab6e | refs/heads/master | 2021-01-17T09:39:02.647688 | 2020-11-07T03:17:22 | 2020-11-07T03:17:22 | 27,886,732 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 201 | py | N, K, S = map(int, input().split())
if S == 1:
const = S + 1
else:
const = S - 1
ans = []
for i in range(N):
if i < K:
ans.append(S)
else:
ans.append(const)
print(*ans)
| [
"premier3next@gmail.com"
] | premier3next@gmail.com |
57cc2f13b93c6dddb86a3a96dc0caa25754781c5 | 0c13fd0ae32c77a01a2527190d81e303d48d6fc9 | /src/snk_quiver3.0.py | ec8e47311a46d966e45d66b72ede7c3a58a9a8f3 | [] | no_license | migrau/hinosima | aa040a14381bf645158f57b5dfe738b5d47469a8 | 45a83c7c90bc8e20497c6b59179cc4fa4f21a879 | refs/heads/master | 2021-01-23T02:59:26.276261 | 2017-03-28T01:30:54 | 2017-03-28T01:30:54 | 86,038,967 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,800 | py | ##########################################################################################################
# snk.quiver3.0.py #
# #
## Script to polish a PacBio assembly using Quiver with SMRTANALYSIS V3.0 #
# 1. Convert bax.h5 files to bam #
# 2. Run pbalign with each bam file. #
# 3. Merge all the pbalign bam files output in a single bam file. #
# 4. (sort/index bam and index fasta #
# 5. run Quiver. #
# #
## Requirements: #
# - pacbio assembly (from canu, falcon, etc) #
# - pacbio reads (.bax.h5 format) #
# #
## Example run: #
# one node, cpus=24 [1 run with 24threads] #
# (dry run) $ snakemake --snakefile snk.quiver3.0.py -j 1 --config rdir=raw assembly=assembly.fasta -np #
# #
# multi-node [max 80 jobs at once, each one with threads=24] #
# (dry run) $ snakemake -j 80 --snakefile snk.quiver3.0.py --cluster-config cluster.json #
# --cluster "sbatch --partition=compute --cpus-per-task=1 --time=14-0 --job-name=snkmk --mem=10GB" #
# --config rdir=raw assembly=assembly.fasta -np #
# #
##########################################################################################################
import subprocess,glob
from os.path import join
import os,re,sys
from Bio import SeqIO
from os.path import basename
# Globals ---------------------------------------------------------------------
#PATH of PacificBiosciences pitchfork SMRTANALYSIS3.0
SMRTloc="/apps/pitchfork"
# Full path to a folder that holds PacBio reads in bax.h5 format. Use symlinks in case separate folders.
BAX_DIR = config["rdir"]
# assemnbly folder path
ASBLY = config["assembly"]
# Regular expression matching the bax.h5 files.
SAMPLES, = glob_wildcards(join(BAX_DIR, '{sample,[^/]+}.1.bax.h5'))
# Patterns using the 'sample' wildcard.
PATTERN = '{sample}.1.bax.h5'
fnames=ffnames=[]
fnames=glob.glob(BAX_DIR+"/*.1.bax.h5")
for f in fnames:
ffnames.append(os.path.basename(f).split(".1.bax.h5")[0])
# Rules -----------------------------------------------------------------------
rule all:
input:
fasta='quiverOut.fasta',
fastq='quiverOut.fastq'
rule createBAM:
input:
files=join(BAX_DIR, PATTERN)
output:
compliantFiles='bax2bam/{sample}.subreads.bam'
params:
outfiles='bax2bam/{sample}',
SMRT=SMRTloc
shell:"""
source {params.SMRT}/deployment/setup-env.sh
dname=$(dirname {input.files});
fname=$(basename {input.files} .1.bax.h5);
bax2bam $dname/$fname.1.bax.h5 $dname/$fname.2.bax.h5 $dname/$fname.3.bax.h5 -o {params.outfiles} --subread --pulsefeatures=DeletionQV,DeletionTag,InsertionQV,IPD,MergeQV,SubstitutionQV,PulseWidth,SubstitutionTag
"""
BAM_DIR = "bax2bam/"
SAMPLES, = glob_wildcards(join(BAM_DIR, '{sample,[^/]+}.subreads.bam'))
PATTERN = '{sample}.subreads.bam'
rule runPBALIGN:
input:
files=join(BAM_DIR, PATTERN)
output:
'bax2bam/{sample}_aligned.bam'
params:
assembly=ASBLY
shell:"""
pbalign --nproc 12 {input.files} {params.assembly} {output}
"""
rule mergePBALIGN:
input:
expand("bax2bam/{sample}_aligned.bam", sample=ffnames)
output:
'bax2bam/all.bam'
shell:"""
din=$(echo {input} | sed 's/ / -in /g')
bamtools merge -in $din -out {output};
"""
rule sortBAM:
input:
'bax2bam/all.bam'
output:
'bax2bam/all_sort.bam'
shell:"""
bamtools sort -in {input} -out {output}
"""
rule indexBAM:
input:
'bax2bam/all_sort.bam'
output:
'bax2bam/all_sort.bam.bai'
shell:"""
bamtools index -in {input}
"""
rule indexFasta:
input:
ay=ASBLY,
bai='bax2bam/all_sort.bam.bai'
output:
ASBLY+".fai"
shell:"""
samtools faidx {input.ay}
"""
rule pbiBAM:
input:
bai='bax2bam/all_sort.bam.bai'
output:
ASBLY+".pbi"
params:
assembly=ASBLY
shell:"""
python /apps/unit/MikheyevU/miquel/GenomicConsensus/bin/makePbi.py --referenceFasta {params.assembly} bax2bam/all_sort.bam
"""
#if pbi missing file error. http://pb-falcon.readthedocs.io/en/latest/quick_start.html
rule runQuiver:
input:
bam='bax2bam/all_sort.bam',
bai='bax2bam/all_sort.bam.bai',
fai=ASBLY+".fai"
output:
fasta='quiverOut.fasta',
fastq='quiverOut.fastq'
params:
assembly=ASBLY
shell:"""
module load gcc/4.9.2
variantCaller -j 12 --algorithm=best {input.bam} --referenceFilename {params.assembly} -o {output.fasta} -o {output.fastq}
"""
| [
"migrau@uji.es"
] | migrau@uji.es |
a96427e29b2d6a2689c2072888af98f6bd99e8a0 | 7f0eaa5aa008e7116645fa214e772ef4c2ee5406 | /api/util/__tests__/test_api.py | 533128dc7e06c5d7a5bb870807019b6ee2ad1d4a | [] | no_license | enixdark/raven | ef7273ff26e96b45486f67cb52d22c252bd336e2 | c60ec2aa4ffec3c89afac305e63512732a148f4e | refs/heads/master | 2023-04-10T21:05:51.703163 | 2020-01-17T20:09:25 | 2020-01-17T20:09:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,950 | py | import asyncio
import asynctest
import mock
import pytest
from mock import patch, MagicMock
from asynctest import CoroutineMock
from expects import expect, equal
from api.util.api import Api, Async
class TestApi:
@pytest.mark.asyncio
async def test_call(self, *args):
with asynctest.mock.patch('aiohttp.request', create=True) as aiohttp_req_mock:
mock_req = CoroutineMock()
mock_req.__aexit__ = CoroutineMock()
mock_req.__aenter__ = CoroutineMock()
aiohttp_req_mock.return_value = mock_req
expected_method = 'some-value'
expected_url = 'some-value'
expected_params = {}
expected_data = {}
expected_json = {}
expected_cookies = {}
expected_headers = {}
expected_auth = {}
await Api.call(method=expected_method, url=expected_url, params=expected_params, data=expected_data, json=expected_json, cookies=expected_cookies, headers=expected_headers, auth=expected_auth)
aiohttp_req_mock.assert_called_with(
method=expected_method,
url=expected_url,
params=expected_params,
data=expected_data,
json=expected_json,
cookies=expected_cookies,
headers=expected_headers,
auth=expected_auth)
@pytest.mark.asyncio
async def test_batch(self, *args):
with asynctest.mock.patch.object(Api, 'call') as call_mock:
expected_requests = [
{
'method': 'some-value',
'url': 'some-value',
'params': {},
'data': {},
'json': {},
'cookies': {},
'headers': {},
'auth': {}
},
{
'method': 'some-value',
'url': 'some-value',
'params': {},
'data': {},
'json': {},
'cookies': {},
'headers': {},
'auth': {}
},
{
'method': 'some-value',
'url': 'some-value',
'params': {},
'data': {},
'json': {},
'cookies': {},
'headers': {},
'auth': {}
}
]
res = await Api.batch(expected_requests)
expect(call_mock.call_count).to(equal(len(expected_requests)))
expect(len(res)).to(equal(len(expected_requests)))
@pytest.mark.asyncio
@asynctest.patch.object(Async, 'all')
@asynctest.patch.object(Api, 'call')
async def test_batch_async(self, *args):
# with as call_mock:
expected_requests = [
{
'method': 'some-value',
'url': 'some-value',
'params': {},
'data': {},
'json': {},
'cookies': {},
'headers': {},
'auth': {}
},
{
'method': 'some-value',
'url': 'some-value',
'params': {},
'data': {},
'json': {},
'cookies': {},
'headers': {},
'auth': {}
},
{
'method': 'some-value',
'url': 'some-value',
'params': {},
'data': {},
'json': {},
'cookies': {},
'headers': {},
'auth': {}
}
]
res = await Api.batch_async(expected_requests)
expect(args[0].call_count).to(equal(len(expected_requests)))
args[1].assert_called()
| [
"noreply@github.com"
] | enixdark.noreply@github.com |
93608c46dbf817b09cd537b4fe647e9a03ac63ca | 06cd48c385acf1b79e9cf235730ab5a2f61c016e | /sql_queries.py | 12c9d5d060a7b204c6da2552d0bf4328db5250cf | [] | no_license | haymar017/-UDACITY-Data-Modeling-with-Postgres | 16f66b6abd267577f5da4211b6e0ce753761a257 | 89b494f2de889f388e6c4c3acaa1c10bbb57acfe | refs/heads/master | 2022-11-14T00:57:54.859730 | 2020-07-09T04:44:15 | 2020-07-09T04:44:15 | 276,080,584 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,432 | py | import json
import pandas
# DROP TABLES
songplay_table_drop = "drop table if exists songplays"
user_table_drop = "drop table if exists users"
song_table_drop = "drop table if exists songs"
artist_table_drop = "drop table if exists artists"
time_table_drop = "drop table if exists time"
# CREATE TABLES
user_table_create = ("""
create table if not exists users (
user_id int Primary Key,
first_name varchar,
last_name varchar,
gender varchar,
level varchar
);
""")
artist_table_create = ("""
create table if not exists artists (
artist_id varchar Primary Key,
name varchar,
location varchar,
latitude float,
longitude float
);
""")
song_table_create = ("""
create table if not exists songs(
song_id varchar Primary Key ,
title varchar,
artist_id varchar not null,
year int,
duration float
);
""")
time_table_create = ("""
create table if not exists time (
start_time timestamp Primary Key,
hour int,
day int,
week int,
month int,
year int,
weekday int
);
""")
songplay_table_create = ("""
create table if not exists songplays (
songplay_id serial Primary Key,
start_time timestamp not null,
user_id int not null,
level varchar,
song_id varchar,
artist_id varchar,
session_id int,
location varchar,
user_agent varchar,
foreign key (user_id) references users (user_id),
foreign key (song_id) references songs (song_id),
foreign key (artist_id) references artists (artist_id),
foreign key (start_time) references time (start_time)
);
""")
# INSERT RECORDS
songplay_table_insert = ("""
insert into songplays(
start_time,
user_id,
level,
song_id,
artist_id,
session_id,
location,
user_agent)
values (%s,%s,%s,%s,%s,%s,%s,%s)
""")
user_table_insert = ("""
insert into users(
user_id,
first_name,
last_name,
gender,
level)
values (%s,%s,%s,%s,%s)
on conflict (user_id) do update set level = excluded.level;
""")
song_table_insert = ("""
insert into songs(
song_id,
title,
artist_id,
year,
duration)
values (%s,%s,%s,%s,%s)
on conflict(song_id) do nothing
""")
artist_table_insert = ("""
insert into artists(
artist_id,
name,
location,
latitude,
longitude)
values (%s,%s,%s,%s,%s)
on conflict(artist_id) do nothing
""")
time_table_insert = ("""
insert into time(
start_time,
hour,
day,
week,
month,
year,
weekday)
values(%s,%s,%s,%s,%s,%s,%s)
on conflict(start_time) do nothing
""")
# FIND SONGS
song_select = ("""
select
songs.song_id , artists.artist_id
from
songs join artists
on
songs.artist_id = artists.artist_id
where
songs.title=(%s) and artists.name=(%s) and songs.duration=(%s)
""")
# QUERY LISTS
create_table_queries = [user_table_create, artist_table_create, song_table_create, time_table_create, songplay_table_create]
drop_table_queries = [songplay_table_drop, user_table_drop, song_table_drop, artist_table_drop, time_table_drop]
| [
"noreply@github.com"
] | haymar017.noreply@github.com |
8afe9cc9f4f53d06be5e718686be5cb4cf5c0cdb | c67268ac491ecfe606308a43185f1bf8073d56a1 | /unittesting/test_employee2.py | 84682a7e52ffd035b6a9a992a079c59112128dc6 | [] | no_license | jisshub/python-django-training | 3c0fad4c80c78bcfb4b61b025da60d220b502e4b | d8c61f53e3bb500b1a58a706f20108babd6a1a54 | refs/heads/master | 2020-06-21T15:07:25.704209 | 2019-09-01T19:24:02 | 2019-09-01T19:24:02 | 197,487,745 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,929 | py | import unittest
# here v import Employee class employee module(employee.py)
from employee import Employee
class EmployeeTest(unittest.TestCase):
def setUp(self):
print('setup\n')
# here v create two employee obj instead of creating them for each test.
# ENSURING DRY PRINCIPLE
self.emp1 = Employee('jiss', 'jose', 3000)
self.emp2 = Employee('isco', 'alarcon', 5000)
def tearDown(self):
print('teardown\n')
def test_email(self):
print('test_email\n')
var1 = self.emp1.email
var2 = self.emp2.email
self.assertEqual(var1, 'jissjose@gmail.com')
self.assertEqual(var2, 'iscoalarcon@gmail.com')
self.emp1.first = 'john'
self.emp2.last = 'james'
self.assertEqual(self.emp1.email, 'johnjose@gmail.com')
self.assertEqual(self.emp2.email, 'iscojames@gmail.com')
def test_fullname(self):
print('test_fullname\n')
self.assertEqual(self.emp1.full_name, 'jiss jose')
self.emp1.first = 'jom'
self.emp1.last = 'thomas'
self.assertEqual(self.emp1.full_name, 'jom thomas')
self.assertEqual(self.emp2.full_name, 'isco alarcon')
self.emp2.first = 'alvaro'
self.emp2.last = 'morata'
self.assertEqual(self.emp2.full_name, 'alvaro morata')
def test_pay(self):
print('test_pay\n')
self.assertEqual(self.emp1.apply_raise, 6000)
self.emp1.pay_raise = 1.5
self.assertEqual(self.emp1.apply_raise, 9000)
self.assertEqual(self.emp2.apply_raise, 10000)
self.emp2.pay_raise = .5
self.assertEqual(self.emp2.apply_raise, 5000)
if __name__ == '__main__':
unittest.main()
# here v text whether value of apply_raise and pay are equal.
# here setUp runs before each test and tearDown method runs after each test.
# order will be like
# setUp
# testmethod
# teardown
| [
"jissmon476@gmial.com"
] | jissmon476@gmial.com |
1dce40b705380b07cbe6eb7c6fbb5f9749a9ae9b | 08b4f0f914e33039d3ca408702b110fd6b24764a | /venv/Scripts/easy_install-script.py | be41f6d074beef7058f951ad44f80f761a1f7357 | [] | no_license | dakaun/wiki_crawler | 6558c0b4f4c9ec8ca94a5b7f0e24fc2bca77d1d1 | 82b526ce63e952d8364c5c553b6f00440a6be9cc | refs/heads/master | 2021-04-12T04:56:27.526150 | 2018-12-13T10:04:53 | 2018-12-13T10:04:53 | 125,738,838 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 449 | py | #!C:\Users\danielak\PycharmProjects\FIZ\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==28.8.0','console_scripts','easy_install'
__requires__ = 'setuptools==28.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==28.8.0', 'console_scripts', 'easy_install')()
)
| [
"danielak@famkaun.local"
] | danielak@famkaun.local |
f5d1ed04d9af16888d78a13a4548ec0454cbba05 | e5ba5c10b94e8971a3124ddb6b36048d76d7853e | /bot/main.py | 5d969adf3a42dc470c2f123fd81c8dd84fa54bd7 | [
"MIT"
] | permissive | sonhal/telegram-reddit-bot | 1933c1d0769bc167a951c2ba824a6827ae19f99a | 0d23f2bd1f8761d2133c6bcf2643502b505cb3be | refs/heads/master | 2020-04-15T06:50:49.437490 | 2019-01-09T10:53:09 | 2019-01-09T10:53:09 | 164,475,656 | 0 | 0 | MIT | 2019-01-09T11:08:54 | 2019-01-07T18:44:49 | Python | UTF-8 | Python | false | false | 157 | py | from connectors import telegram_connector
COMMANDS = ("top10", "top")
if __name__ == '__main__':
tb = telegram_connector.TelegramBot()
tb.start()
| [
"sondre.hal@gmail.com"
] | sondre.hal@gmail.com |
611af549c5585bbe5afcc4f755e6bb733fbd0ce6 | e4370c3831bf4e1324d55930870fd754df3e5e9a | /apps/survey_app/urls.py | 632d830bb37715e5ba45b8f35313f153a7322569 | [] | no_license | grommitt/Django-survey-form | d338053adcbb664ac8198182928854cab321d0ca | d7869acd0bca4b33617bde9fa51cf8e38f38d0b4 | refs/heads/master | 2020-03-17T08:33:37.926850 | 2018-05-15T01:42:09 | 2018-05-15T01:42:09 | 133,442,798 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 326 | py | from django.conf.urls import url
from . import views # This line is new!
urlpatterns = [
url(r'^$', views.index), # This line has changed! Notice that urlpatterns is a list, the comma is in
url(r'^results$', views.results), # anticipation of all the routes that will be coming soon
] | [
"gmshaughn@email.arizona.edu"
] | gmshaughn@email.arizona.edu |
0e6201ff8130bc459133a9f810db8ce61eaaf2b8 | 502a801b6ac886b25e915bc70d97f258acd9db56 | /src/data/srdata.py | e23ed16185415db9df0f0005e85ef56752bedfcf | [] | no_license | Dearbreeze/SPDNet | 31e79bff29678572372c0a99ac343932759f0291 | ff805b8a008e241ed7f8bb80242cbf2abca94f10 | refs/heads/main | 2023-08-20T10:11:14.386825 | 2021-10-13T10:45:11 | 2021-10-13T10:45:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,334 | py | import os
import glob
from data import common
import pickle
import numpy as np
import imageio
import torch
import torch.utils.data as data
class SRData(data.Dataset):
def __init__(self, args, name='', train=True, benchmark=False):
self.args = args
self.name = name
self.train = train
self.split = 'train' if train else 'test'
self.do_eval = True
self.benchmark = benchmark
self.scale = args.scale
self.idx_scale = 0
data_range = [r.split('-') for r in args.data_range.split('/')]
if train:
data_range = data_range[0]
else:
if args.test_only and len(data_range) == 1:
data_range = data_range[0]
else:
data_range = data_range[1]
self.begin, self.end = list(map(lambda x: int(x), data_range))
self._set_filesystem(args.dir_data)
if args.ext.find('img') < 0:
path_bin = os.path.join(self.apath, 'bin')
os.makedirs(path_bin, exist_ok=True)
list_hr, list_lr = self._scan()
if args.ext.find('bin') >= 0:
# Binary files are stored in 'bin' folder
# If the binary file exists, load it. If not, make it.
list_hr, list_lr = self._scan()
self.images_hr = self._check_and_load(
args.ext, list_hr, self._name_hrbin()
)
self.images_lr = [
self._check_and_load(args.ext, l, self._name_lrbin(s)) \
for s, l in zip(self.scale, list_lr)
]
else:
if args.ext.find('img') >= 0 or benchmark:
self.images_hr, self.images_lr = list_hr, list_lr
elif args.ext.find('sep') >= 0:
os.makedirs(
self.dir_hr.replace(self.apath, path_bin),
exist_ok=True
)
for s in self.scale:
os.makedirs(
os.path.join(
self.dir_lr.replace(self.apath, path_bin),
'X{}'.format(s)
),
exist_ok=True
)
self.images_hr, self.images_lr = [], [[] for _ in self.scale]
for h in list_hr:
b = h.replace(self.apath, path_bin)
b = b.replace(self.ext[0], '.pt')
self.images_hr.append(b)
self._check_and_load(
args.ext, [h], b, verbose=True, load=False
)
for i, ll in enumerate(list_lr):
for l in ll:
b = l.replace(self.apath, path_bin)
b = b.replace(self.ext[1], '.pt')
self.images_lr[i].append(b)
self._check_and_load(
args.ext, [l], b, verbose=True, load=False
)
if train:
self.repeat \
= args.test_every // (len(self.images_hr) // args.batch_size)
# Below functions as used to prepare images
def _scan(self):
names_hr = sorted(
glob.glob(os.path.join(self.dir_hr, '*' + self.ext[0]))
)
names_lr = [[] for _ in self.scale]
for f in names_hr:
f = f.replace('.png','x2.png')
filename, _ = os.path.splitext(os.path.basename(f))
for si, s in enumerate(self.scale):
names_lr[si].append(os.path.join(
self.dir_lr, '{}{}'.format(
filename, self.ext[1]
)
))
return names_hr, names_lr
def _set_filesystem(self, dir_data):
self.apath = os.path.join(dir_data, self.name)
self.dir_hr = os.path.join(self.apath, 'HR')
self.dir_lr = os.path.join(self.apath, 'LR_bicubic')
self.ext = ('.png', '.png')
def _name_hrbin(self):
return os.path.join(
self.apath,
'bin',
'{}_bin_HR.pt'.format(self.split)
)
def _name_lrbin(self, scale):
return os.path.join(
self.apath,
'bin',
'{}_bin_LR.pt'.format(self.split)
)
def _check_and_load(self, ext, l, f, verbose=True, load=True):
if os.path.isfile(f) and ext.find('reset') < 0:
if load:
if verbose: print('Loading {}...'.format(f))
with open(f, 'rb') as _f: ret = pickle.load(_f)
return ret
else:
return None
else:
if verbose:
if ext.find('reset') >= 0:
print('Making a new binary: {}'.format(f))
else:
print('{} does not exist. Now making binary...'.format(f))
b = [{
'name': os.path.splitext(os.path.basename(_l))[0],
'image': imageio.imread(_l)
} for _l in l]
with open(f, 'wb') as _f: pickle.dump(b, _f)
return b
def __getitem__(self, idx):
lr, hr, filename = self._load_file(idx)
lr, hr = self.get_patch(lr, hr)
lr, hr = common.set_channel(lr, hr, n_channels=self.args.n_colors)
lr_tensor, hr_tensor = common.np2Tensor(
lr, hr, rgb_range=self.args.rgb_range
)
return lr_tensor, hr_tensor, filename
def __len__(self):
if self.train:
return len(self.images_hr) * self.repeat
else:
return len(self.images_hr)
def _get_index(self, idx):
if self.train:
return idx % len(self.images_hr)
else:
return idx
def _load_file(self, idx):
idx = self._get_index(idx)
f_hr = self.images_hr[idx]
f_lr = self.images_lr[self.idx_scale][idx]
if self.args.ext.find('bin') >= 0:
filename = f_hr['name']
hr = f_hr['image']
lr = f_lr['image']
else:
filename, _ = os.path.splitext(os.path.basename(f_hr))
if self.args.ext == 'img' or self.benchmark:
hr = imageio.imread(f_hr)
lr = imageio.imread(f_lr)
elif self.args.ext.find('sep') >= 0:
with open(f_hr, 'rb') as _f: hr = np.load(_f)[0]['image']
with open(f_lr, 'rb') as _f: lr = np.load(_f)[0]['image']
return lr, hr, filename
def get_patch(self, lr, hr):
scale = self.scale[self.idx_scale]
multi_scale = len(self.scale) > 1
if self.train:
# print('****preparte data****')
lr, hr = common.get_patch(
lr,
hr,
patch_size=self.args.patch_size,
scale=scale,
multi_scale=multi_scale
)
if not self.args.no_augment:
# print('****use augment****')
lr, hr = common.augment(lr, hr)
else:
ih, iw = lr.shape[:2]
hr = hr[0:ih, 0:iw]
#hr = hr[0:ih * scale, 0:iw * scale]
return lr, hr
def set_scale(self, idx_scale):
self.idx_scale = idx_scale
| [
"noreply@github.com"
] | Dearbreeze.noreply@github.com |
6c224211afdf733a84ccbb82e6c9968e0193891f | c3a1ce1a918a9f15355b17cece583bd27da1bd53 | /Taking input for Competitive Programming.py | b6204ac15b3909ac2a2fe4a251c2f5eac33381c1 | [] | no_license | MrJay10/Graph-Algorithms | 3cea89b8951f8b656505deef1aa52adf22549f0c | 382d0f8d41313bcf37b266695a82a8ebe6182b40 | refs/heads/master | 2021-01-17T19:21:25.246914 | 2016-06-25T08:33:36 | 2016-06-25T08:33:36 | 61,347,944 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 431 | py | from Graph import Graph
graph = dict()
# Remove the message in input(); change str -> int for integer input
vertices = list(map(str, input("Enter vertices :: ").split()))
for vertex in vertices:
# Remove the message in input(); change str -> int for integer input
graph[vertex] = list(map(str, input("Enter neighbors of "+vertex+" -> ").split()))
g = Graph(graph)
print("Your Graph is ::\n\n"+str(g)+"\n")
| [
"noreply@github.com"
] | MrJay10.noreply@github.com |
03bc0d80849bc3264945b6fc903d9599b980d26a | a38725ed7fb93b503207502984ec197e921eb54b | /venv/lib/python3.6/site-packages/django_ajax/encoder.py | 64ed9ca2af3a6a719fd651966cacb7ddaf862693 | [] | no_license | tanveerahmad1517/myblogproject | d00d550230e2df0843e67f793504f9c19d0b755c | 2eaa051caa5b68a8fba260c7cd431f1e1719a171 | refs/heads/master | 2020-03-16T21:38:32.738671 | 2018-08-23T11:55:02 | 2018-08-23T11:55:02 | 133,008,051 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,859 | py | """
Utils
"""
from __future__ import unicode_literals
import json
from datetime import date
from django.http.response import HttpResponseRedirectBase, HttpResponse
from django.template.response import TemplateResponse
from django.utils.encoding import force_text
from django.db.models.base import ModelBase
from decimal import Decimal
class LazyJSONEncoderMixin(object):
"""
A JSONEncoder subclass that handle querysets and models objects.
Add how handle your type of object here to use when dump json
"""
def default(self, obj):
# handles HttpResponse and exception content
if issubclass(type(obj), HttpResponseRedirectBase):
return obj['Location']
elif issubclass(type(obj), TemplateResponse):
return obj.rendered_content
elif issubclass(type(obj), HttpResponse):
return obj.content
elif issubclass(type(obj), Exception) or isinstance(obj, bytes):
return force_text(obj)
# this handles querysets and other iterable types
try:
iterable = iter(obj)
except TypeError:
pass
else:
return list(iterable)
# this handlers Models
if isinstance(obj.__class__, ModelBase):
return force_text(obj)
if isinstance(obj, Decimal):
return float(obj)
if isinstance(obj, date):
return obj.isoformat()
return super(LazyJSONEncoderMixin, self).default(obj)
class LazyJSONEncoder(LazyJSONEncoderMixin, json.JSONEncoder):
pass
def serialize_to_json(data, *args, **kwargs):
"""
A wrapper for simplejson.dumps with defaults as:
cls=LazyJSONEncoder
All arguments can be added via kwargs
"""
kwargs['cls'] = kwargs.get('cls', LazyJSONEncoder)
return json.dumps(data, *args, **kwargs)
| [
"tanveerobjects@gmail.com"
] | tanveerobjects@gmail.com |
f3d5dcd2e5f655280d986d7d5e685dfb3b524cc2 | 06604399c457d6ec05fa5d5ae458632e2606ec98 | /torch/utils/_sympy/functions.py | 3c78e1bebb50e8e34e979cab147e57e371f418bb | [
"BSD-3-Clause",
"BSD-2-Clause",
"LicenseRef-scancode-secret-labs-2011",
"LicenseRef-scancode-generic-cla",
"BSL-1.0",
"Apache-2.0"
] | permissive | yncxcw/pytorch | 6f262f7613caef4c2ce18c85662db9adc6a2a81a | a3b72ee354031004edd9b951d0efcdd4508fd578 | refs/heads/master | 2023-07-20T21:38:00.718093 | 2023-07-13T03:54:17 | 2023-07-13T03:54:17 | 234,432,318 | 0 | 0 | NOASSERTION | 2020-01-16T23:34:42 | 2020-01-16T23:34:41 | null | UTF-8 | Python | false | false | 5,787 | py | import sympy
from sympy.core.logic import fuzzy_and, fuzzy_or
__all__ = ["FloorDiv", "ModularIndexing", "CleanDiv", "CeilDiv", "LShift", "RShift"]
class FloorDiv(sympy.Function):
"""
We maintain this so that:
1. We can use divisibility guards to simplify FloorDiv(a, b) to a / b.
2. Printing out the expression is nicer (compared to say, representing a//b as (a - a % b) / b)
"""
nargs = (2,)
precedence = 50 # precedence of mul # noqa: F811
# Default return type for SymPy assumptions.
# https://docs.sympy.org/latest/guides/assumptions.html#implementing-assumptions-handlers
is_real = True
@property
def base(self):
return self.args[0]
@property
def divisor(self):
return self.args[1]
def _sympystr(self, printer):
base = printer.parenthesize(self.base, self.precedence)
divisor = printer.parenthesize(self.divisor, self.precedence)
return f"({base}//{divisor})"
# SymPy assumptions based on argument types.
def _eval_is_real(self):
return fuzzy_or([self.base.is_real, self.divisor.is_real])
def _eval_is_integer(self):
return fuzzy_and([self.base.is_integer, self.divisor.is_integer])
# Automatic evaluation.
# https://docs.sympy.org/latest/guides/custom-functions.html#best-practices-for-eval
@classmethod
def eval(cls, base, divisor):
def check_supported_type(x):
if (x.is_integer is False and x.is_real is False and x.is_complex) or x.is_Boolean:
raise TypeError(
f"unsupported operand type(s) for //: "
f"'{type(base).__name__}' and '{type(divisor).__name__}'"
f", expected integer or real")
check_supported_type(base)
check_supported_type(divisor)
# We don't provide the same error message as in Python because SymPy
# makes it difficult to check the types.
if divisor.is_zero:
raise ZeroDivisionError("division by zero")
if base.is_zero:
return sympy.S.Zero
if base.is_integer and divisor == 1:
return base
if base.is_real and divisor == 1:
return sympy.floor(base)
if isinstance(base, sympy.Integer) and isinstance(divisor, sympy.Integer):
return base // divisor
if isinstance(base, (sympy.Integer, sympy.Float)) and isinstance(divisor, (sympy.Integer, sympy.Float)):
return sympy.floor(base / divisor)
if isinstance(base, FloorDiv):
return FloorDiv(base.args[0], base.args[1] * divisor)
if isinstance(base, sympy.Add):
for a in base.args:
gcd = sympy.gcd(a, divisor)
if gcd == divisor:
return FloorDiv(base - a, divisor) + a / gcd
gcd = sympy.gcd(base, divisor)
if gcd != 1:
return FloorDiv(
sympy.simplify(base / gcd), sympy.simplify(divisor / gcd)
)
class ModularIndexing(sympy.Function):
"""
ModularIndexing(a, b, c) => (a // b) % c
"""
nargs = (3,)
is_integer = True
@classmethod
def eval(cls, base, divisor, modulus):
if base == 0 or modulus == 1:
return sympy.Integer(0)
if (
isinstance(base, sympy.Integer)
and isinstance(divisor, sympy.Integer)
and isinstance(modulus, sympy.Integer)
):
return (base // divisor) % modulus
if divisor != 1:
gcd = sympy.gcd(base, divisor)
if gcd != 1:
return ModularIndexing(
sympy.simplify(base / gcd), sympy.simplify(divisor / gcd), modulus
)
if isinstance(base, sympy.Add):
new_terms = []
all_positive = True
for term in base.args:
if sympy.gcd(term, modulus * divisor) != modulus * divisor:
if (isinstance(term, sympy.Integer) and term < 0) or (
isinstance(term, sympy.Mul)
and isinstance(term.args[0], sympy.Integer)
and term.args[0] < 0
):
# workaround for https://github.com/openai/triton/issues/619,
# if there are negative terms, // produces wrong result
# TODO if https://github.com/openai/triton/issues/619 is fixed
# this optimization would become valid
all_positive = False
break
else:
new_terms.append(term)
if len(new_terms) != len(base.args) and all_positive:
return ModularIndexing(sum(new_terms), divisor, modulus)
if isinstance(base, FloorDiv):
return ModularIndexing(base.args[0], base.args[1] * divisor, modulus)
class CleanDiv(FloorDiv):
"""
Div where we can assume no rounding.
This is to enable future optimizations.
"""
pass
class CeilDiv(sympy.Function):
"""
Div used in indexing that rounds up.
"""
is_integer = True
def __new__(cls, base, divisor):
if sympy.gcd(base, divisor) == divisor:
return CleanDiv(base, divisor)
else:
return FloorDiv(base + (divisor - 1), divisor)
class LShift(sympy.Function):
@classmethod
def eval(cls, base, shift):
if shift < 0:
raise ValueError('negative shift count')
return base * 2 ** shift
class RShift(sympy.Function):
@classmethod
def eval(cls, base, shift):
if shift < 0:
raise ValueError('negative shift count')
return base // 2 ** shift
| [
"pytorchmergebot@users.noreply.github.com"
] | pytorchmergebot@users.noreply.github.com |
34b5e8044cc30321ccc9752daf6dcba32df3f719 | 00a6541d639d073e819f2c080e6c65610eff1982 | /coordinates.py | beab54eac858b13725db676f5022b7b6e5a8878f | [] | no_license | Wajahat0/Annotation-Transfer-Across-the-Microscope | 16fd0eb339363505449297981c8b4817e93dc594 | 911a79b3307b890bb7ec7a2822d91f4f6fa9a6d6 | refs/heads/main | 2023-07-13T12:45:14.350687 | 2021-08-27T05:15:39 | 2021-08-27T05:15:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,544 | py |
import sys
import cv2
import argparse
import numpy as np
import pandas as pd
from yattag import Doc, indent
from homography_cal import homography_cal
def tranfer_annoation(data_path,t_data_path,slide_num,r_res,t_res,surf):
load_csv_path = data_path +'/'+ slide_num+'/'+ r_res+ '/' +r_res + '_labels.csv'
save_csv_path = t_data_path +'/'+ slide_num+'/'+ t_res+ '/' +t_res + '_labels.csv'
r_image_path = data_path +'/'+ slide_num+'/'+ r_res+ '/'
t_image_path = t_data_path +'/'+ slide_num+'/'+ t_res+ '/'
print(load_csv_path)
print(save_csv_path)
print(r_image_path)
print(t_image_path)
df = pd.read_csv(data_path +'/'+ slide_num+'/'+ r_res+ '/' +r_res + '_labels.csv')
new_df=df
ref_filename = str(df.iloc[0,0])
print(ref_filename)
ref=cv2.imread(r_image_path + ref_filename)
xz=ref_filename.split('_')[-1]
t_image= ref_filename.replace(xz, (t_res +'.png'))
t_filename = t_image_path + t_image #ref_filename[:-9]+t_res+ '.png'
print(t_filename)
# # print(filename)
target_image=cv2.imread(t_filename)
h,w,c = target_image.shape
# print('ref_image:-', ref_filename)
# print('target_image:-', filename)
h2=homography_cal(ref,target_image,surf)
fname = ref_filename
for i in range(0, len(df)):
# read refrence image
ref_filename = df.iloc[i,0]
ref=cv2.imread(r_image_path + ref_filename)
# # read target image
# # need change here
# xz=ref_filename.split('_')[-1]
t_image= ref_filename.replace(xz, (t_res +'.png'))
t_filename = t_image_path + t_image #ref_filename[:-9]+t_res+ '.png'
target_image=cv2.imread(t_filename)
h,w,c = target_image.shape
if fname == ref_filename:
print('skip_homograpy.....')
else:
print('compute homograpy.....')
h2=homography_cal(ref,target_image,surf)
points = np.float32([[df.iloc[i,4],df.iloc[i,5],df.iloc[i,6],df.iloc[i,7]]]).reshape(-1, 1, 2)
t_points = cv2.perspectiveTransform(points, h2).reshape(1,4)
new_df.iloc[i,0] = t_image
new_df.iloc[i,1] = w
new_df.iloc[i,2] = h
new_df.iloc[i,3] = df.iloc[i,3]
new_df.iloc[i,4] = t_points[0,0]
new_df.iloc[i,5] = t_points[0,1]
new_df.iloc[i,6] = t_points[0,2]
new_df.iloc[i,7] = t_points[0,3]
fname = ref_filename
new_df.to_csv(save_csv_path, index=False)
| [
"noreply@github.com"
] | Wajahat0.noreply@github.com |
911880abd59978863d7a68579ace6207eaea3235 | 0b6f3f46b61e23792bf3cebe068c987a517f88c6 | /chapter_09/动手试一试/9-11 导入Admin类/admin.py | d6f661f4c0b34faa87b1364d8aa7bf6c66eb2ad2 | [] | no_license | honkly/pcc | 0884522c348706a9bfde401df03084f256bf4f04 | 0789b41b0558e847e35eccbd8ecd096a8a1a34e3 | refs/heads/master | 2020-04-29T18:25:48.355921 | 2019-07-25T13:12:57 | 2019-07-25T13:12:57 | 176,323,830 | 0 | 0 | null | 2019-03-18T16:13:13 | 2019-03-18T16:13:12 | null | UTF-8 | Python | false | false | 1,006 | py | class User():
"""docstring for User"""
def __init__(self, first_name, last_name):
self.first_name = first_name
self.last_name = last_name
self.login_attempts = 0
def describe_user(self):
print(self.first_name.title() + ' ' + self.last_name.title())
def greet_user(self):
print("Hello! " + self.first_name.title() + ' ' + self.last_name.title())
def increment_login_attempts(self):
self.login_attempts += 1
def reset_login_attempts(self):
self.login_attempts = 0
def print_login_attempts(self):
print("Login attempts are " + str(self.login_attempts))
class Privileges():
def __init__(self):
self.privileges = ['can add post', 'can delete post', 'can ban user']
def show_privileges(self):
print(self.privileges)
class Admin(User):
"""9-8权限"""
def __init__(self, first_name, last_name):
super().__init__(first_name, last_name)
self.privileges = Privileges() | [
"honkly@163.com"
] | honkly@163.com |
fb572a3e446aa09655b1e5770b4fb5b166afd177 | 9044b1cb3c9a85094cdf02aae1c788e6d39252df | /day16/run.py | da22f278d816038bc490d2355c33cc109571b8f1 | [
"MIT"
] | permissive | 1923851861/Python_Base | 5aea29dabf9531b5f5e945989f2f567042b0bff3 | c9d8224cf41c58235fc67783d73a48f82fee61a1 | refs/heads/master | 2020-03-23T12:11:38.704227 | 2018-08-10T11:01:21 | 2018-08-10T11:01:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,193 | py | # 当前的执行文件
# x=1
# y=2
# 首次导入模块发生了3件事:
#1、以模块为准创造一个模块的名称空间
#2、执行模块对应的文件,将执行过程中产生的名字都丢到模块的名称空间
#3、在当前执行文件中拿到一个模块名
# import spam
# 之后的重复导入会直接引用之前创造好的结果,不会重复执行模块的文件
# import spam #spam=spam=模块名称空间的内存地址
# spam.名字
# print(x)
# spam.x
# print(spam.money)
# print(spam.read1)
# print(spam.read2)
# print(spam.change)
# money=11111111111111
# spam.read1()
# def read1():
# print('执行文件的read1')
# spam.read2()
# spam.change()
# print(spam.money)
# print(money)
# import spam as sm
# print(sm.money)
# import time,spam,os,sys
# import spam
# import os
# import sys
# import spam
# spam.money
# from ... import ...首次导入也发生了三件事:
#1、以模块为准创造一个模块的名称空间
#2、执行模块对应的文件,将执行过程中产生的名字都丢到模块的名称空间
#3、在当前执行文件的名称空间中拿到一个名字,该名字直接指向模块中的某一个名字,意味着可以不用加任何前缀而直接使用
# x=1
# y=2
#
# from spam import money,read1
# money=10
# print(money)
# print(read1)
# from .... import ... 对比 import 。。。
# 优点:不用加前缀,代码更为精简
# 缺点:容易与当前执行文件中名称空间中的名字冲突
# 相同点:
# 1、都会执行模块对应的文件,都会产生模块的名称空间
# 2、调用功能时,需要跑到定义时寻找作用域关系,与调用位置无关
# 不同点
# 1、一种需要加前缀,一种不需要加前缀
# from spam import money,read1,read2,change
# money=111111111111111111
# read1()
# def read1():
# print('当前执行文件的read1',money)
# read1()
# def read1():
# print('当前执行文件的read1',money)
# read2()
# change=1
# change()
# print(money)
# from spam import money
# from spam import read1
# from spam import read2
# from spam import change
from spam import *
print(money)
print(read1)
print(change)
# print(read2) | [
"1923851861@qq.com"
] | 1923851861@qq.com |
af2729e0f3c3c35ad20460334df67ddb78436aec | 6635686859b272d291d0ba3520ccd03cdc80a349 | /DT/threadingtext.py | 5b21c27fdb9a81a7ecb35c0e0d9c9ebe52c19d32 | [] | no_license | yangrencong/web_spiders | ac15c491f60e489000e5312c999f02e6c4fdafdf | 69fdc6eeb5ad19283690c056064f8853e0256445 | refs/heads/master | 2020-03-28T18:45:50.800667 | 2018-10-26T02:50:16 | 2018-10-26T02:50:16 | 148,908,630 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 995 | py | #!/usr/bin/env python
# -*- coding=utf8 -*-
"""
# Author: Mr.yang
# Created Time : 2018/10/10 星期三 12:26:07
# File Name: threadingtext.py
# Description:
# Editortool: vim8.0
"""
import threading
import time
class myThread(threading.Thread):
def __init__(self ,name ,delay):
threading.Thread.__init__(self)
self.name = name
self.delay = delay
def run(self):
print("Starting " + self.name)
print_time(self.name ,self.delay)
print("Exiting " + self.name)
def print_time(threadName ,delay):
counter = 0
while counter < 3:
time.sleep(delay)
print(threadName ,time.ctime())
counter += 1
threads = []
#创建新线程
thread1 = myThread("Thread-1" ,1)
thread2 = myThread("Thread-2" ,2)
#开启新线程
thread1.start()
thread2.start()
#添加线程到线程列表
threads.append(thread1)
threads.append(thread2)
#等待所有线程完成
for t in threads:
t.join()
print("Exiting main thread")
| [
"1452581359@qq.com"
] | 1452581359@qq.com |
cb2f886ed26850bfebfaf4e3a00a9e730652e300 | cc086a96967761f520c24ce3b22bacecb673cbf2 | /chec_operator/threads/observation.py | 877c8afa6dcb8e097cf23a53a3504277d6791849 | [] | no_license | watsonjj/chec_operator | 39524405b3c6a55fe7fa3e8353da5f456f76a27d | c537a1737a53fe996652c793c09f5a33cd03e208 | refs/heads/master | 2020-04-18T02:27:52.730614 | 2019-01-23T10:41:13 | 2019-01-23T10:41:13 | 167,163,531 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,722 | py | import threading
from time import sleep, ctime, time
from datetime import datetime
from chec_operator.utils.enums import CameraState
class ObservingThread(threading.Thread):
def __init__(self, parent_handler, timedelta, triggerdelta):
print("Creating observation thread")
self.parent_handler = parent_handler
self.timedelta = timedelta
self.triggerdelta = triggerdelta
self.starttime = 0
self.starttrigger = 0
self.currenttimedelta = 0
self.currenttriggerdelta = 0
self.get_trigger = self.parent_handler.get_backplane_trigger_count
super(ObservingThread, self).__init__()
self._observation_interrupt = threading.Event()
self.observation_reached_end = False
self.running = False
self.lock = threading.Lock()
def _check_time(self):
if self.timedelta:
self.currenttimedelta = datetime.now() - self.starttime
return self.currenttimedelta >= self.timedelta
else:
return False
def _check_trigger(self):
if self.triggerdelta:
self.currenttriggerdelta = self.get_trigger() - self.starttrigger
return self.currenttriggerdelta >= self.triggerdelta
else:
return False
def observation_ended(self):
return self._observation_interrupt.isSet()
def interrupt_observation(self):
if self.lock.acquire(False):
print("[WARNING] Interrupting observation thread!")
self._observation_interrupt.set()
self.join()
def run(self):
self.running = True
self.starttime = datetime.now()
self.starttrigger = self.get_trigger()
print("[INFO] Starting observation thread, "
"start time = {}, timedelta = {} s, triggerdelta = {}"
.format(ctime(time()), self.timedelta, self.triggerdelta))
while not self.observation_ended():
if self._check_time() or self._check_trigger():
self._finish_run()
break
self.running = False
print("Observation Ended")
def _finish_run(self):
if self.lock.acquire(False):
print("[INFO] Observation thread complete, "
"end time = {}, duration = {}, triggers {} (end) {} (actual)"
.format(ctime(time()), self.currenttimedelta,
self.currenttriggerdelta,
self.get_trigger() - self.starttrigger))
self.observation_reached_end = True
self.parent_handler.go_to_state(CameraState.READY)
def wait_for_end(self):
self.join()
print("Observation Ended")
| [
"jason.jw@live.co.uk"
] | jason.jw@live.co.uk |
96a38c411c7fb0314bce1ce412e3ee359440beb4 | a3ab76ec61f01e6bfea0925806bba57ae37ca505 | /blog/migrations/0013_auto_20200501_2232.py | 3b69304dc1c57ddb1a59105552ede3c0d1414800 | [] | no_license | kevinzhao07/django_posts | 343c92223046ffafa7673ceee053b8623c9fbca7 | baf48403bc9f6ff36d5f70a2c7359d4658747134 | refs/heads/master | 2022-12-10T00:08:47.789394 | 2020-08-24T16:35:14 | 2020-08-24T16:35:14 | 242,276,050 | 1 | 0 | null | 2022-12-08T04:04:44 | 2020-02-22T03:57:40 | HTML | UTF-8 | Python | false | false | 983 | py | # Generated by Django 2.2.12 on 2020-05-02 02:32
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('blog', '0012_auto_20200428_1719'),
]
operations = [
migrations.AlterField(
model_name='comment',
name='date_posted',
field=models.DateTimeField(default=django.utils.timezone.now),
),
migrations.AlterField(
model_name='like',
name='date_liked',
field=models.DateTimeField(default=django.utils.timezone.now),
),
migrations.AlterField(
model_name='message',
name='date_sent',
field=models.DateTimeField(default=django.utils.timezone.now),
),
migrations.AlterField(
model_name='post',
name='date_posted',
field=models.DateTimeField(default=django.utils.timezone.now),
),
]
| [
"kevinmz@umich.edu"
] | kevinmz@umich.edu |
8b59ab52b831746af65b2e33c02a38521a75fd82 | 40c86e0b1cda09b4beba5a74099e35578fce0dd5 | /ascii.py | 50a1e4e3711773a9a73763f4367e00da7f81ff26 | [] | no_license | nikitiwari/Learning_Python | ec9b479e6cd47423d6754aa37eb1032444cd186d | 6a76dc9fbc0934d0d655e296a7777a09bcc26afb | refs/heads/master | 2020-04-05T13:05:19.686745 | 2017-07-15T19:17:26 | 2017-07-15T19:17:26 | 95,075,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 105 | py | c = raw_input("Enter a character:");
try :
print ord(c)
except :
print "Only characters allowed"
| [
"tiwarinikita96@gmail.com"
] | tiwarinikita96@gmail.com |
e52bdadb66f4c2ef03e7ec524da43f8558e1ba9d | df7fbefd727c081fb5d2d0e93356042f20b908fd | /index.py | 627ee82c34fddaf2605fe13cd24a7b6a08516d29 | [] | no_license | Germey/SDUCourse | 1a1c189054dea2082bcacc58f5900434c58689a5 | bea33811eca988d1eb295ada460d677a50187230 | refs/heads/master | 2020-04-06T06:53:51.280673 | 2016-09-13T07:48:42 | 2016-09-13T07:48:42 | 62,208,347 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 88 | py | # -*- coding: utf-8 -*-
from lib.main import main
if __name__ == '__main__':
main() | [
"1016903103@qq.com"
] | 1016903103@qq.com |
b93b498f4935633bf8a5990243e2de8805264276 | ad48d4a4107dd02eb8b155cfcfcae86e40bf5001 | /venv/Scripts/easy_install-3.6-script.py | 6a207e087202fdcfc3044244e7cf026f5af2913f | [] | no_license | Chloe1997/VGGnet_classification | 2515f69d7c0ccf09e7161e367cf8600988c880e4 | e01a1ac42aec74cb914a17a776db9ec28f641b96 | refs/heads/master | 2020-06-30T16:03:44.485820 | 2019-08-07T14:24:52 | 2019-08-07T14:24:52 | 200,879,223 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 464 | py | #!C:\Users\student1\Desktop\VGG_classification\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==39.1.0','console_scripts','easy_install-3.6'
__requires__ = 'setuptools==39.1.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==39.1.0', 'console_scripts', 'easy_install-3.6')()
)
| [
"dellawen1997@gmail.com"
] | dellawen1997@gmail.com |
b512faa24699a6fe7dd43a3e59774c974454b97b | 2f79b7d886eb05ccdab5cad10fc85540e72384f2 | /HW2/Q2/Q2(b).py | 1cd2fe5200d96a8bc4179440dcb5c4a119bb6ac5 | [] | no_license | nrgdoublex/DataMiningHomework | 0276f537f2a5c6c2e9458aa56d782f358e74f84f | add7fd8d785e3f5d80724b7c7a1bb651d3705ea3 | refs/heads/master | 2021-01-01T20:50:09.368008 | 2017-08-01T01:06:11 | 2017-08-01T01:06:11 | 98,943,146 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,066 | py | import numpy as np
import scipy.stats as stats
import pandas as pd
from Stat_function import t_test
file_name = 'Q2.csv'
df = pd.read_csv(file_name
,skiprows = 1
,encoding = 'Latin-1'
,na_values = 'No Data')
pd.set_option('display.expand_frame_repr', False)
df['number'] = df['number'].apply(lambda x: int(x) if pd.notnull(x) else x)
df['percent'] = df['percent'].div(100)
# we only care about those county with > 100000 people
df = df.loc[df['number'] >= 100000,:]
df.index = xrange(df.shape[0])
#t-score for 95% confidence interval(Bonferroni correction)
z = stats.norm.ppf(1-0.05/(df.shape[0]-1))
max = np.max(df['percent'])
argmax = np.argmax(df['percent'])
for i in range(df.shape[0]):
if i == argmax: continue
compare = np.max(df.iloc[i,:]['percent'])
sample_size = (z**2)*max*(1-max)/((max-compare)**2)
print "{0},{1} VS {2},{3} => sample size = {4}".format(
df.iloc[argmax,:]['County'],df.iloc[argmax,:]['State'],
df.iloc[i,:]['County'],df.iloc[i,:]['State'], sample_size) | [
"nrgdoublex@gmail.com"
] | nrgdoublex@gmail.com |
e8012164e3c46fc9c9870c0102c227d7f0d59508 | f09b27136435cb298a55514adc5b1ccbe8badb28 | /lab 13/zad 1.py | 0ebf9023e993c963ae08d5d854e8ba1bdc8c1439 | [] | no_license | Roman43407/Sem-2- | 890db6446254266d9866992bb804af906bdd87df | de3a3eb9de1477cca29fa118264729cbe90f93ff | refs/heads/main | 2023-04-26T19:58:39.122440 | 2021-05-31T12:10:50 | 2021-05-31T12:10:50 | 372,455,729 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,036 | py | class Car:
def __init__(self, brand, age, color, capacity, fuelType):
self.brand = brand
self.age = age
self.color = color
self.capacity = capacity
self.fuelType = fuelType
def Refueling(self):
print("Zatankowano: {} {} litrów ".format(self.fuelType, self.capacity))
def CarInfo(self):
print("Marka:", self.brand)
print("Wiek:", self.age)
print("Kolor:", self.color)
print("Pojemność baku:", self.capacity)
print("Rodzaj paliwa:", self.fuelType)
def ChangeColor(self, newColor):
print("Zmieniono kolor z {} na {}".format(self.color, newColor))
def Velocity(self):
if (self.color == "red"):
print("Bardzo szybki")
else:
print("Nie tak szybki jak czerwony")
def CallService(self):
print("Jesteś 10 w kolejce")
opelek = Car("Volvo", 10, "Gold", 50, "diesel")
opelek.Refueling()
opelek.ChangeColor("Red")
opelek.CarInfo()
opelek.Velocity()
opelek.CallService() | [
"ya.ignis@gmail.com"
] | ya.ignis@gmail.com |
3245d99ff8b76871f71f66e0d14bf4ef52546a21 | 1e06402f2196da5270060d2dbe8c1aafafd3b387 | /venv/bin/chardetect | 75070d1851ae937bbe1b728865f55c1df3944359 | [] | no_license | billyggroves/myBlog | c26be905861d4a9f3b32ba8fce24103866311e7d | adc759d4a1a5389ab64cbfeb400898cad086c840 | refs/heads/master | 2022-12-15T10:40:20.626606 | 2019-02-26T01:11:20 | 2019-02-26T01:11:20 | 121,530,285 | 0 | 0 | null | 2022-12-08T00:55:40 | 2018-02-14T16:01:44 | Python | UTF-8 | Python | false | false | 257 | #!/Users/newuser/Desktop/myBlog/venv/bin/python3.6
# -*- coding: utf-8 -*-
import re
import sys
from chardet.cli.chardetect import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"billyggroves@gmail.com"
] | billyggroves@gmail.com | |
4bfbe0f48777b895dcde27aa0a5fff0fa22a8110 | 20f8e01f6c574e2cc0af8a9fffadc54e5ae938cd | /util/io.py | 8c77b6067c60d3f0872960b48d75f062ee326cb2 | [
"MIT"
] | permissive | samjbaugh/CoopNets | 3c5334b5e0b2071bf0423318306ae8d2abaa47eb | 0e3ef7060cbc37c6b4e32b93d7b4613f9ace42fe | refs/heads/master | 2020-03-10T03:41:43.358144 | 2018-03-27T21:37:41 | 2018-03-27T21:37:41 | 129,171,842 | 0 | 0 | MIT | 2018-04-12T00:51:12 | 2018-04-12T00:51:11 | null | UTF-8 | Python | false | false | 733 | py | import sys
import logging
import tensorflow as tf
def init_log(path):
log = logging.getLogger()
log.setLevel(logging.INFO)
formatter_cs = logging.Formatter('%(message)s')
cs = logging.StreamHandler(sys.stdout)
cs.setLevel(logging.INFO)
cs.setFormatter(formatter_cs)
log.addHandler(cs)
log = logging.getLogger('tensorflow')
log.setLevel(logging.INFO)
log.handlers = []
formatter_fh = logging.Formatter('%(asctime)s - %(message)s')
fh = logging.FileHandler(path)
fh.setLevel(logging.DEBUG)
fh.setFormatter(formatter_fh)
log.addHandler(fh)
def make_dir(output_dir):
if not tf.gfile.Exists(output_dir):
tf.gfile.MakeDirs(output_dir)
return output_dir
| [
"erik.nijkamp@gmail.com"
] | erik.nijkamp@gmail.com |
a8b8d378ab3f6f1387d52577aa1bf5431858cd0c | c41471781f65d38d9010450b6c9e17f2346a551b | /openstack-dashboard/templates/icehouse/local_settings.py | f45295ea2a3762091554852d5a4f73c472022cc5 | [
"Apache-2.0"
] | permissive | juanarturovargas/openstack-juju | b6854e2feea615404c053e9c754e4d7997c8a6a5 | 21b1aef8aa51c3c32cb1efd1b8cad7865c4d40a0 | refs/heads/master | 2022-12-13T15:31:53.383963 | 2017-05-05T19:18:55 | 2017-05-05T19:18:55 | 90,163,436 | 0 | 1 | NOASSERTION | 2022-11-20T08:41:15 | 2017-05-03T15:17:34 | Python | UTF-8 | Python | false | false | 17,009 | py | import os
from django.utils.translation import ugettext_lazy as _
{% if use_syslog %}
from logging.handlers import SysLogHandler
{% endif %}
from openstack_dashboard import exceptions
DEBUG = {{ debug }}
TEMPLATE_DEBUG = DEBUG
# Required for Django 1.5.
# If horizon is running in production (DEBUG is False), set this
# with the list of host/domain names that the application can serve.
# For more information see:
# https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts
#ALLOWED_HOSTS = ['horizon.example.com', ]
# Set SSL proxy settings:
# For Django 1.4+ pass this header from the proxy after terminating the SSL,
# and don't forget to strip it from the client's request.
# For more information see:
# https://docs.djangoproject.com/en/1.4/ref/settings/#secure-proxy-ssl-header
# SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTOCOL', 'https')
# If Horizon is being served through SSL, then uncomment the following two
# settings to better secure the cookies from security exploits
#CSRF_COOKIE_SECURE = True
#SESSION_COOKIE_SECURE = True
# Overrides for OpenStack API versions. Use this setting to force the
# OpenStack dashboard to use a specfic API version for a given service API.
# NOTE: The version should be formatted as it appears in the URL for the
# service API. For example, The identity service APIs have inconsistent
# use of the decimal point, so valid options would be "2.0" or "3".
# OPENSTACK_API_VERSIONS = {
# "identity": 3
# }
# Set this to True if running on multi-domain model. When this is enabled, it
# will require user to enter the Domain name in addition to username for login.
# OPENSTACK_KEYSTONE_MULTIDOMAIN_SUPPORT = False
# Overrides the default domain used when running on single-domain model
# with Keystone V3. All entities will be created in the default domain.
# OPENSTACK_KEYSTONE_DEFAULT_DOMAIN = 'Default'
# Set Console type:
# valid options would be "AUTO", "VNC" or "SPICE"
# CONSOLE_TYPE = "AUTO"
# Default OpenStack Dashboard configuration.
HORIZON_CONFIG = {
'dashboards': ('project', 'admin', 'settings',),
'default_dashboard': 'project',
'user_home': 'openstack_dashboard.views.get_user_home',
'ajax_queue_limit': 10,
'auto_fade_alerts': {
'delay': 3000,
'fade_duration': 1500,
'types': ['alert-success', 'alert-info']
},
'help_url': "http://docs.openstack.org",
'exceptions': {'recoverable': exceptions.RECOVERABLE,
'not_found': exceptions.NOT_FOUND,
'unauthorized': exceptions.UNAUTHORIZED},
}
# Specify a regular expression to validate user passwords.
# HORIZON_CONFIG["password_validator"] = {
# "regex": '.*',
# "help_text": _("Your password does not meet the requirements.")
# }
# Disable simplified floating IP address management for deployments with
# multiple floating IP pools or complex network requirements.
# HORIZON_CONFIG["simple_ip_management"] = False
# Turn off browser autocompletion for the login form if so desired.
# HORIZON_CONFIG["password_autocomplete"] = "off"
LOCAL_PATH = os.path.dirname(os.path.abspath(__file__))
# Set custom secret key:
# You can either set it to a specific value or you can let horizion generate a
# default secret key that is unique on this machine, e.i. regardless of the
# amount of Python WSGI workers (if used behind Apache+mod_wsgi): However, there
# may be situations where you would want to set this explicitly, e.g. when
# multiple dashboard instances are distributed on different machines (usually
# behind a load-balancer). Either you have to make sure that a session gets all
# requests routed to the same dashboard instance or you set the same SECRET_KEY
# for all of them.
SECRET_KEY = "{{ secret }}"
# We recommend you use memcached for development; otherwise after every reload
# of the django development server, you will have to login again. To use
# memcached set CACHES to something like
CACHES = {
'default': {
'BACKEND' : 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION' : '127.0.0.1:11211',
}
}
# Send email to the console by default
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
# Or send them to /dev/null
#EMAIL_BACKEND = 'django.core.mail.backends.dummy.EmailBackend'
# Configure these for your outgoing email host
# EMAIL_HOST = 'smtp.my-company.com'
# EMAIL_PORT = 25
# EMAIL_HOST_USER = 'djangomail'
# EMAIL_HOST_PASSWORD = 'top-secret!'
# For multiple regions uncomment this configuration, and add (endpoint, title).
# AVAILABLE_REGIONS = [
# ('http://cluster1.example.com:5000/v2.0', 'cluster1'),
# ('http://cluster2.example.com:5000/v2.0', 'cluster2'),
# ]
{% if regions|length > 1 -%}
AVAILABLE_REGIONS = [
{% for region in regions -%}
('{{ region.endpoint }}', '{{ region.title }}'),
{% endfor -%}
]
{% endif -%}
OPENSTACK_HOST = "{{ service_host }}"
OPENSTACK_KEYSTONE_URL = "{{ service_protocol }}://%s:{{ service_port }}/v2.0" % OPENSTACK_HOST
OPENSTACK_KEYSTONE_DEFAULT_ROLE = "{{ default_role }}"
# Disable SSL certificate checks (useful for self-signed certificates):
# OPENSTACK_SSL_NO_VERIFY = True
# The CA certificate to use to verify SSL connections
# OPENSTACK_SSL_CACERT = '/path/to/cacert.pem'
# The OPENSTACK_KEYSTONE_BACKEND settings can be used to identify the
# capabilities of the auth backend for Keystone.
# If Keystone has been configured to use LDAP as the auth backend then set
# can_edit_user to False and name to 'ldap'.
#
# TODO(tres): Remove these once Keystone has an API to identify auth backend.
OPENSTACK_KEYSTONE_BACKEND = {
'name': 'native',
'can_edit_user': True,
'can_edit_group': True,
'can_edit_project': True,
'can_edit_domain': True,
'can_edit_role': True
}
OPENSTACK_HYPERVISOR_FEATURES = {
'can_set_mount_point': False,
'can_set_password': False,
}
# The OPENSTACK_NEUTRON_NETWORK settings can be used to enable optional
# services provided by neutron. Options currenly available are load
# balancer service, security groups, quotas.
OPENSTACK_NEUTRON_NETWORK = {
'enable_lb': {{ neutron_network_lb }},
'enable_quotas': True,
'enable_security_group': True,
'enable_firewall': {{ neutron_network_firewall }},
'enable_vpn': {{ neutron_network_vpn }},
# The profile_support option is used to detect if an external router can be
# configured via the dashboard. When using specific plugins the
# profile_support can be turned on if needed.
#'profile_support': None,
#'profile_support': 'cisco', # Example of value set to support Cisco
{% if support_profile -%}
'profile_support': '{{ support_profile }}',
{% else -%}
'profile_support': None,
{% endif -%}
}
# The OPENSTACK_IMAGE_BACKEND settings can be used to customize features
# in the OpenStack Dashboard related to the Image service, such as the list
# of supported image formats.
OPENSTACK_IMAGE_BACKEND = {
'image_formats': [
('', ''),
('aki', _('AKI - Amazon Kernel Image')),
('ami', _('AMI - Amazon Machine Image')),
('ari', _('ARI - Amazon Ramdisk Image')),
('iso', _('ISO - Optical Disk Image')),
('qcow2', _('QCOW2 - QEMU Emulator')),
('raw', _('Raw')),
('vdi', _('VDI')),
('vhd', _('VHD')),
('vmdk', _('VMDK'))
]
}
# The IMAGE_CUSTOM_PROPERTY_TITLES settings is used to customize the titles for
# image custom property attributes that appear on image detail pages.
IMAGE_CUSTOM_PROPERTY_TITLES = {
"architecture": _("Architecture"),
"kernel_id": _("Kernel ID"),
"ramdisk_id": _("Ramdisk ID"),
"image_state": _("Euca2ools state"),
"project_id": _("Project ID"),
"image_type": _("Image Type")
}
# OPENSTACK_ENDPOINT_TYPE specifies the endpoint type to use for the endpoints
# in the Keystone service catalog. Use this setting when Horizon is running
# external to the OpenStack environment. The default is 'publicURL'.
#OPENSTACK_ENDPOINT_TYPE = "publicURL"
{% if primary_endpoint -%}
OPENSTACK_ENDPOINT_TYPE = "{{ primary_endpoint }}"
{% endif -%}
# SECONDARY_ENDPOINT_TYPE specifies the fallback endpoint type to use in the
# case that OPENSTACK_ENDPOINT_TYPE is not present in the endpoints
# in the Keystone service catalog. Use this setting when Horizon is running
# external to the OpenStack environment. The default is None. This
# value should differ from OPENSTACK_ENDPOINT_TYPE if used.
#SECONDARY_ENDPOINT_TYPE = "publicURL"
{% if secondary_endpoint -%}
SECONDARY_ENDPOINT_TYPE = "{{ secondary_endpoint }}"
{% endif -%}
# The number of objects (Swift containers/objects or images) to display
# on a single page before providing a paging element (a "more" link)
# to paginate results.
API_RESULT_LIMIT = 1000
API_RESULT_PAGE_SIZE = 20
# The timezone of the server. This should correspond with the timezone
# of your entire OpenStack installation, and hopefully be in UTC.
TIME_ZONE = "UTC"
# When launching an instance, the menu of available flavors is
# sorted by RAM usage, ascending. Provide a callback method here
# (and/or a flag for reverse sort) for the sorted() method if you'd
# like a different behaviour. For more info, see
# http://docs.python.org/2/library/functions.html#sorted
# CREATE_INSTANCE_FLAVOR_SORT = {
# 'key': my_awesome_callback_method,
# 'reverse': False,
# }
# The Horizon Policy Enforcement engine uses these values to load per service
# policy rule files. The content of these files should match the files the
# OpenStack services are using to determine role based access control in the
# target installation.
# Path to directory containing policy.json files
#POLICY_FILES_PATH = os.path.join(ROOT_PATH, "conf")
# Map of local copy of service policy files
#POLICY_FILES = {
# 'identity': 'keystone_policy.json',
# 'compute': 'nova_policy.json'
#}
# Trove user and database extension support. By default support for
# creating users and databases on database instances is turned on.
# To disable these extensions set the permission here to something
# unusable such as ["!"].
# TROVE_ADD_USER_PERMS = []
# TROVE_ADD_DATABASE_PERMS = []
LOGGING = {
'version': 1,
# When set to True this will disable all logging except
# for loggers specified in this configuration dictionary. Note that
# if nothing is specified here and disable_existing_loggers is True,
# django.db.backends will still log unless it is disabled explicitly.
'disable_existing_loggers': False,
'handlers': {
'null': {
'level': 'DEBUG',
'class': 'django.utils.log.NullHandler',
},
'console': {
# Set the level to "DEBUG" for verbose output logging.
'level': 'INFO',
'class': 'logging.StreamHandler',
},
{% if use_syslog %}
'syslog': {
'level': 'INFO',
'class': 'logging.handlers.SysLogHandler',
}
{% endif %}
},
'loggers': {
# Logging from django.db.backends is VERY verbose, send to null
# by default.
'django.db.backends': {
'handlers': ['null'],
'propagate': False,
},
'requests': {
'handlers': ['null'],
'propagate': False,
},
'horizon': {
{% if use_syslog %}
'handlers': ['syslog'],
{% else %}
'handlers': ['console'],
{% endif %}
'propagate': False,
},
'openstack_dashboard': {
{% if use_syslog %}
'handlers': ['syslog'],
{% else %}
'handlers': ['console'],
{% endif %}
'propagate': False,
},
'openstack_auth': {
{% if use_syslog %}
'handlers': ['syslog'],
{% else %}
'handlers': ['console'],
{% endif %}
'propagate': False,
},
'novaclient': {
{% if use_syslog %}
'handlers': ['syslog'],
{% else %}
'handlers': ['console'],
{% endif %}
'propagate': False,
},
'cinderclient': {
{% if use_syslog %}
'handlers': ['syslog'],
{% else %}
'handlers': ['console'],
{% endif %}
'propagate': False,
},
'keystoneclient': {
{% if use_syslog %}
'handlers': ['syslog'],
{% else %}
'handlers': ['console'],
{% endif %}
'propagate': False,
},
'glanceclient': {
{% if use_syslog %}
'handlers': ['syslog'],
{% else %}
'handlers': ['console'],
{% endif %}
'propagate': False,
},
'heatclient': {
{% if use_syslog %}
'handlers': ['syslog'],
{% else %}
'handlers': ['console'],
{% endif %}
'propagate': False,
},
'nose.plugins.manager': {
{% if use_syslog %}
'handlers': ['syslog'],
{% else %}
'handlers': ['console'],
{% endif %}
'propagate': False,
}
}
}
SECURITY_GROUP_RULES = {
'all_tcp': {
'name': 'ALL TCP',
'ip_protocol': 'tcp',
'from_port': '1',
'to_port': '65535',
},
'all_udp': {
'name': 'ALL UDP',
'ip_protocol': 'udp',
'from_port': '1',
'to_port': '65535',
},
'all_icmp': {
'name': 'ALL ICMP',
'ip_protocol': 'icmp',
'from_port': '-1',
'to_port': '-1',
},
'ssh': {
'name': 'SSH',
'ip_protocol': 'tcp',
'from_port': '22',
'to_port': '22',
},
'smtp': {
'name': 'SMTP',
'ip_protocol': 'tcp',
'from_port': '25',
'to_port': '25',
},
'dns': {
'name': 'DNS',
'ip_protocol': 'tcp',
'from_port': '53',
'to_port': '53',
},
'http': {
'name': 'HTTP',
'ip_protocol': 'tcp',
'from_port': '80',
'to_port': '80',
},
'pop3': {
'name': 'POP3',
'ip_protocol': 'tcp',
'from_port': '110',
'to_port': '110',
},
'imap': {
'name': 'IMAP',
'ip_protocol': 'tcp',
'from_port': '143',
'to_port': '143',
},
'ldap': {
'name': 'LDAP',
'ip_protocol': 'tcp',
'from_port': '389',
'to_port': '389',
},
'https': {
'name': 'HTTPS',
'ip_protocol': 'tcp',
'from_port': '443',
'to_port': '443',
},
'smtps': {
'name': 'SMTPS',
'ip_protocol': 'tcp',
'from_port': '465',
'to_port': '465',
},
'imaps': {
'name': 'IMAPS',
'ip_protocol': 'tcp',
'from_port': '993',
'to_port': '993',
},
'pop3s': {
'name': 'POP3S',
'ip_protocol': 'tcp',
'from_port': '995',
'to_port': '995',
},
'ms_sql': {
'name': 'MS SQL',
'ip_protocol': 'tcp',
'from_port': '1443',
'to_port': '1443',
},
'mysql': {
'name': 'MYSQL',
'ip_protocol': 'tcp',
'from_port': '3306',
'to_port': '3306',
},
'rdp': {
'name': 'RDP',
'ip_protocol': 'tcp',
'from_port': '3389',
'to_port': '3389',
},
}
FLAVOR_EXTRA_KEYS = {
'flavor_keys': [
('quota:read_bytes_sec', _('Quota: Read bytes')),
('quota:write_bytes_sec', _('Quota: Write bytes')),
('quota:cpu_quota', _('Quota: CPU')),
('quota:cpu_period', _('Quota: CPU period')),
('quota:inbound_average', _('Quota: Inbound average')),
('quota:outbound_average', _('Quota: Outbound average')),
]
}
{% if ubuntu_theme %}
# Enable the Ubuntu theme if it is present.
try:
from ubuntu_theme import *
except ImportError:
pass
{% endif %}
# Default Ubuntu apache configuration uses /horizon as the application root.
# Configure auth redirects here accordingly.
{% if webroot == "/" %}
LOGIN_URL='/auth/login/'
LOGOUT_URL='/auth/logout/'
{% else %}
LOGIN_URL='{{ webroot }}/auth/login/'
LOGOUT_URL='{{ webroot }}/auth/logout/'
{% endif %}
LOGIN_REDIRECT_URL='{{ webroot }}'
# The Ubuntu package includes pre-compressed JS and compiled CSS to allow
# offline compression by default. To enable online compression, install
# the node-less package and enable the following option.
COMPRESS_OFFLINE = {{ compress_offline }}
# By default, validation of the HTTP Host header is disabled. Production
# installations should have this set accordingly. For more information
# see https://docs.djangoproject.com/en/dev/ref/settings/.
ALLOWED_HOSTS = '*'
{% if password_retrieve %}
OPENSTACK_ENABLE_PASSWORD_RETRIEVE = True
{% endif %}
{{ settings|join('\n\n') }}
| [
"kataguruma1"
] | kataguruma1 |
7fc024f18bdc5289a4cad605dbc8a2f6fa792e74 | c4c159a21d2f1ea0d7dfaa965aeff01c8ef70dce | /flask/flaskenv/Lib/site-packages/tensorflow/contrib/eager/python/tfe.py | c441ab87be7e0aebadefe92023f89bfd67ff471e | [] | no_license | AhsonAslam/webapi | 54cf7466aac4685da1105f9fb84c686e38f92121 | 1b2bfa4614e7afdc57c9210b0674506ea70b20b5 | refs/heads/master | 2020-07-27T06:05:36.057953 | 2019-09-17T06:35:33 | 2019-09-17T06:35:33 | 208,895,450 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:9b7bd976495c4645582fde2d7fcf488a311648b43813cff249462fccfa19224a
size 5928
| [
"github@cuba12345"
] | github@cuba12345 |
02922e6762aefcb1ca578359e971f39a28df7916 | d0eb582894eff3c44e3de4bd50f571f9d9ab3a02 | /venv/lib/python3.7/site-packages/tox/config.py | 4d5e02be935189dbd4ec6eaf1666cc27c16ee562 | [
"MIT"
] | permissive | tdle94/app-store-scrapper | 159187ef3825213d40425215dd9c9806b415769e | ed75880bac0c9ef685b2c1bf57a6997901abface | refs/heads/master | 2022-12-20T21:10:59.621305 | 2020-10-28T00:32:21 | 2020-10-28T00:32:21 | 247,291,364 | 1 | 2 | MIT | 2022-12-08T03:53:08 | 2020-03-14T14:25:44 | Python | UTF-8 | Python | false | false | 57,366 | py | from __future__ import print_function
import argparse
import itertools
import os
import random
import re
import shlex
import string
import sys
import warnings
from collections import OrderedDict
from fnmatch import fnmatchcase
from subprocess import list2cmdline
import pkg_resources
import pluggy
import py
import toml
import tox
from tox.constants import INFO
from tox.interpreters import Interpreters, NoInterpreterInfo
hookimpl = tox.hookimpl
"""DEPRECATED - REMOVE - this is left for compatibility with plugins importing this from here.
Instead create a hookimpl in your code with:
import pluggy
hookimpl = pluggy.HookimplMarker("tox")
"""
default_factors = tox.PYTHON.DEFAULT_FACTORS
"""DEPRECATED MOVE - please update to new location."""
def get_plugin_manager(plugins=()):
# initialize plugin manager
import tox.venv
pm = pluggy.PluginManager("tox")
pm.add_hookspecs(tox.hookspecs)
pm.register(tox.config)
pm.register(tox.interpreters)
pm.register(tox.venv)
pm.register(tox.session)
from tox import package
pm.register(package)
pm.load_setuptools_entrypoints("tox")
for plugin in plugins:
pm.register(plugin)
pm.check_pending()
return pm
class Parser:
"""Command line and ini-parser control object."""
def __init__(self):
self.argparser = argparse.ArgumentParser(description="tox options", add_help=False)
self._testenv_attr = []
def add_argument(self, *args, **kwargs):
""" add argument to command line parser. This takes the
same arguments that ``argparse.ArgumentParser.add_argument``.
"""
return self.argparser.add_argument(*args, **kwargs)
def add_testenv_attribute(self, name, type, help, default=None, postprocess=None):
""" add an ini-file variable for "testenv" section.
Types are specified as strings like "bool", "line-list", "string", "argv", "path",
"argvlist".
The ``postprocess`` function will be called for each testenv
like ``postprocess(testenv_config=testenv_config, value=value)``
where ``value`` is the value as read from the ini (or the default value)
and ``testenv_config`` is a :py:class:`tox.config.TestenvConfig` instance
which will receive all ini-variables as object attributes.
Any postprocess function must return a value which will then be set
as the final value in the testenv section.
"""
self._testenv_attr.append(VenvAttribute(name, type, default, help, postprocess))
def add_testenv_attribute_obj(self, obj):
""" add an ini-file variable as an object.
This works as the ``add_testenv_attribute`` function but expects
"name", "type", "help", and "postprocess" attributes on the object.
"""
assert hasattr(obj, "name")
assert hasattr(obj, "type")
assert hasattr(obj, "help")
assert hasattr(obj, "postprocess")
self._testenv_attr.append(obj)
def parse_cli(self, args):
return self.argparser.parse_args(args)
def _format_help(self):
return self.argparser.format_help()
class VenvAttribute:
def __init__(self, name, type, default, help, postprocess):
self.name = name
self.type = type
self.default = default
self.help = help
self.postprocess = postprocess
class DepOption:
name = "deps"
type = "line-list"
help = "each line specifies a dependency in pip/setuptools format."
default = ()
def postprocess(self, testenv_config, value):
deps = []
config = testenv_config.config
for depline in value:
m = re.match(r":(\w+):\s*(\S+)", depline)
if m:
iname, name = m.groups()
ixserver = config.indexserver[iname]
else:
name = depline.strip()
ixserver = None
# we need to process options, in case they contain a space,
# as the subprocess call to pip install will otherwise fail.
# in case of a short option, we remove the space
for option in tox.PIP.INSTALL_SHORT_OPTIONS_ARGUMENT:
if name.startswith(option):
name = "{}{}".format(option, name[len(option) :].strip())
# in case of a long option, we add an equal sign
for option in tox.PIP.INSTALL_LONG_OPTIONS_ARGUMENT:
name_start = "{} ".format(option)
if name.startswith(name_start):
name = "{}={}".format(option, name[len(option) :].strip())
name = self._replace_forced_dep(name, config)
deps.append(DepConfig(name, ixserver))
return deps
def _replace_forced_dep(self, name, config):
"""Override given dependency config name. Take ``--force-dep-version`` option into account.
:param name: dep config, for example ["pkg==1.0", "other==2.0"].
:param config: ``Config`` instance
:return: the new dependency that should be used for virtual environments
"""
if not config.option.force_dep:
return name
for forced_dep in config.option.force_dep:
if self._is_same_dep(forced_dep, name):
return forced_dep
return name
@classmethod
def _is_same_dep(cls, dep1, dep2):
"""Definitions are the same if they refer to the same package, even if versions differ."""
dep1_name = pkg_resources.Requirement.parse(dep1).project_name
try:
dep2_name = pkg_resources.Requirement.parse(dep2).project_name
except pkg_resources.RequirementParseError:
# we couldn't parse a version, probably a URL
return False
return dep1_name == dep2_name
class PosargsOption:
name = "args_are_paths"
type = "bool"
default = True
help = "treat positional args in commands as paths"
def postprocess(self, testenv_config, value):
config = testenv_config.config
args = config.option.args
if args:
if value:
args = []
for arg in config.option.args:
if arg and not os.path.isabs(arg):
origpath = os.path.join(config.invocationcwd.strpath, arg)
if os.path.exists(origpath):
arg = os.path.relpath(origpath, testenv_config.changedir.strpath)
args.append(arg)
testenv_config._reader.addsubstitutions(args)
return value
class InstallcmdOption:
name = "install_command"
type = "argv"
default = "python -m pip install {opts} {packages}"
help = "install command for dependencies and package under test."
def postprocess(self, testenv_config, value):
if "{packages}" not in value:
raise tox.exception.ConfigError(
"'install_command' must contain '{packages}' substitution"
)
return value
def parseconfig(args, plugins=()):
"""Parse the configuration file and create a Config object.
:param plugins:
:param list[str] args: list of arguments.
:rtype: :class:`Config`
:raise SystemExit: toxinit file is not found
"""
pm = get_plugin_manager(plugins)
config, option = parse_cli(args, pm)
for config_file in propose_configs(option.configfile):
config_type = config_file.basename
content = None
if config_type == "pyproject.toml":
toml_content = get_py_project_toml(config_file)
try:
content = toml_content["tool"]["tox"]["legacy_tox_ini"]
except KeyError:
continue
ParseIni(config, config_file, content)
pm.hook.tox_configure(config=config) # post process config object
break
else:
msg = "tox config file (either {}) not found"
candidates = ", ".join(INFO.CONFIG_CANDIDATES)
feedback(msg.format(candidates), sysexit=not (option.help or option.helpini))
return config
def get_py_project_toml(path):
with open(str(path)) as file_handler:
config_data = toml.load(file_handler)
return config_data
def propose_configs(cli_config_file):
from_folder = py.path.local()
if cli_config_file is not None:
if os.path.isfile(cli_config_file):
yield py.path.local(cli_config_file)
return
if os.path.isdir(cli_config_file):
from_folder = py.path.local(cli_config_file)
else:
print(
"ERROR: {} is neither file or directory".format(cli_config_file), file=sys.stderr
)
return
for basename in INFO.CONFIG_CANDIDATES:
if from_folder.join(basename).isfile():
yield from_folder.join(basename)
for path in from_folder.parts(reverse=True):
ini_path = path.join(basename)
if ini_path.check():
yield ini_path
def parse_cli(args, pm):
parser = Parser()
pm.hook.tox_addoption(parser=parser)
option = parser.parse_cli(args)
if option.version:
print(get_version_info(pm))
raise SystemExit(0)
interpreters = Interpreters(hook=pm.hook)
config = Config(pluginmanager=pm, option=option, interpreters=interpreters, parser=parser)
return config, option
def feedback(msg, sysexit=False):
print("ERROR: {}".format(msg), file=sys.stderr)
if sysexit:
raise SystemExit(1)
def get_version_info(pm):
out = ["{} imported from {}".format(tox.__version__, tox.__file__)]
plugin_dist_info = pm.list_plugin_distinfo()
if plugin_dist_info:
out.append("registered plugins:")
for mod, egg_info in plugin_dist_info:
source = getattr(mod, "__file__", repr(mod))
out.append(" {}-{} at {}".format(egg_info.project_name, egg_info.version, source))
return "\n".join(out)
class SetenvDict(object):
_DUMMY = object()
def __init__(self, definitions, reader):
self.definitions = definitions
self.reader = reader
self.resolved = {}
self._lookupstack = []
def __repr__(self):
return "{}: {}".format(self.__class__.__name__, self.definitions)
def __contains__(self, name):
return name in self.definitions
def get(self, name, default=None):
try:
return self.resolved[name]
except KeyError:
try:
if name in self._lookupstack:
raise KeyError(name)
val = self.definitions[name]
except KeyError:
return os.environ.get(name, default)
self._lookupstack.append(name)
try:
self.resolved[name] = res = self.reader._replace(val)
finally:
self._lookupstack.pop()
return res
def __getitem__(self, name):
x = self.get(name, self._DUMMY)
if x is self._DUMMY:
raise KeyError(name)
return x
def keys(self):
return self.definitions.keys()
def __setitem__(self, name, value):
self.definitions[name] = value
self.resolved[name] = value
@tox.hookimpl
def tox_addoption(parser):
parser.add_argument(
"--version",
action="store_true",
dest="version",
help="report version information to stdout.",
)
parser.add_argument(
"-h", "--help", action="store_true", dest="help", help="show help about options"
)
parser.add_argument(
"--help-ini", "--hi", action="store_true", dest="helpini", help="show help about ini-names"
)
parser.add_argument(
"-v",
action="count",
dest="verbose_level",
default=0,
help="increase verbosity of reporting output."
"-vv mode turns off output redirection for package installation, "
"above level two verbosity flags are passed through to pip (with two less level)",
)
parser.add_argument(
"-q",
action="count",
dest="quiet_level",
default=0,
help="progressively silence reporting output.",
)
parser.add_argument(
"--showconfig",
action="store_true",
help="show configuration information for all environments. ",
)
parser.add_argument(
"-l",
"--listenvs",
action="store_true",
dest="listenvs",
help="show list of test environments (with description if verbose)",
)
parser.add_argument(
"-a",
"--listenvs-all",
action="store_true",
dest="listenvs_all",
help="show list of all defined environments (with description if verbose)",
)
parser.add_argument(
"-c",
action="store",
default=None,
dest="configfile",
help="config file name or directory with 'tox.ini' file.",
)
parser.add_argument(
"-e",
action="append",
dest="env",
metavar="envlist",
help="work against specified environments (ALL selects all).",
)
parser.add_argument(
"--notest", action="store_true", dest="notest", help="skip invoking test commands."
)
parser.add_argument(
"--sdistonly",
action="store_true",
dest="sdistonly",
help="only perform the sdist packaging activity.",
)
parser.add_argument(
"--parallel--safe-build",
action="store_true",
dest="parallel_safe_build",
help="(deprecated) ensure two tox builds can run in parallel "
"(uses a lock file in the tox workdir with .lock extension)",
)
parser.add_argument(
"--installpkg",
action="store",
default=None,
metavar="PATH",
help="use specified package for installation into venv, instead of creating an sdist.",
)
parser.add_argument(
"--develop",
action="store_true",
dest="develop",
help="install package in the venv using 'setup.py develop' via 'pip -e .'",
)
parser.add_argument(
"-i",
"--index-url",
action="append",
dest="indexurl",
metavar="URL",
help="set indexserver url (if URL is of form name=url set the "
"url for the 'name' indexserver, specifically)",
)
parser.add_argument(
"--pre",
action="store_true",
dest="pre",
help="install pre-releases and development versions of dependencies. "
"This will pass the --pre option to install_command "
"(pip by default).",
)
parser.add_argument(
"-r",
"--recreate",
action="store_true",
dest="recreate",
help="force recreation of virtual environments",
)
parser.add_argument(
"--result-json",
action="store",
dest="resultjson",
metavar="PATH",
help="write a json file with detailed information "
"about all commands and results involved.",
)
# We choose 1 to 4294967295 because it is the range of PYTHONHASHSEED.
parser.add_argument(
"--hashseed",
action="store",
metavar="SEED",
default=None,
help="set PYTHONHASHSEED to SEED before running commands. "
"Defaults to a random integer in the range [1, 4294967295] "
"([1, 1024] on Windows). "
"Passing 'noset' suppresses this behavior.",
)
parser.add_argument(
"--force-dep",
action="append",
metavar="REQ",
default=None,
help="Forces a certain version of one of the dependencies "
"when configuring the virtual environment. REQ Examples "
"'pytest<2.7' or 'django>=1.6'.",
)
parser.add_argument(
"--sitepackages",
action="store_true",
help="override sitepackages setting to True in all envs",
)
parser.add_argument(
"--alwayscopy", action="store_true", help="override alwayscopy setting to True in all envs"
)
cli_skip_missing_interpreter(parser)
parser.add_argument(
"--workdir",
action="store",
dest="workdir",
metavar="PATH",
default=None,
help="tox working directory",
)
parser.add_argument(
"args", nargs="*", help="additional arguments available to command positional substitution"
)
parser.add_testenv_attribute(
name="envdir",
type="path",
default="{toxworkdir}/{envname}",
help="set venv directory -- be very careful when changing this as tox "
"will remove this directory when recreating an environment",
)
# add various core venv interpreter attributes
def setenv(testenv_config, value):
setenv = value
config = testenv_config.config
if "PYTHONHASHSEED" not in setenv and config.hashseed is not None:
setenv["PYTHONHASHSEED"] = config.hashseed
setenv["TOX_ENV_NAME"] = str(testenv_config.envname)
setenv["TOX_ENV_DIR"] = str(testenv_config.envdir)
return setenv
parser.add_testenv_attribute(
name="setenv",
type="dict_setenv",
postprocess=setenv,
help="list of X=Y lines with environment variable settings",
)
def basepython_default(testenv_config, value):
"""either user set or proposed from the factor name
in both cases we check that the factor name implied python version and the resolved
python interpreter version match up; if they don't we warn, unless ignore base
python conflict is set in which case the factor name implied version if forced
"""
for factor in testenv_config.factors:
if factor in tox.PYTHON.DEFAULT_FACTORS:
implied_python = tox.PYTHON.DEFAULT_FACTORS[factor]
break
else:
implied_python, factor = None, None
if testenv_config.config.ignore_basepython_conflict and implied_python is not None:
return implied_python
proposed_python = (implied_python or sys.executable) if value is None else str(value)
if implied_python is not None and implied_python != proposed_python:
testenv_config.basepython = proposed_python
implied_version = tox.PYTHON.PY_FACTORS_RE.match(factor).group(2)
python_info_for_proposed = testenv_config.python_info
if not isinstance(python_info_for_proposed, NoInterpreterInfo):
proposed_version = "".join(
str(i) for i in python_info_for_proposed.version_info[0:2]
)
if implied_version != proposed_version:
# TODO(stephenfin): Raise an exception here in tox 4.0
warnings.warn(
"conflicting basepython version (set {}, should be {}) for env '{}';"
"resolve conflict or set ignore_basepython_conflict".format(
proposed_version, implied_version, testenv_config.envname
)
)
return proposed_python
parser.add_testenv_attribute(
name="basepython",
type="string",
default=None,
postprocess=basepython_default,
help="executable name or path of interpreter used to create a virtual test environment.",
)
def merge_description(testenv_config, value):
"""the reader by default joins generated description with new line,
replace new line with space"""
return value.replace("\n", " ")
parser.add_testenv_attribute(
name="description",
type="string",
default="",
postprocess=merge_description,
help="short description of this environment",
)
parser.add_testenv_attribute(
name="envtmpdir", type="path", default="{envdir}/tmp", help="venv temporary directory"
)
parser.add_testenv_attribute(
name="envlogdir", type="path", default="{envdir}/log", help="venv log directory"
)
parser.add_testenv_attribute(
name="downloadcache",
type="string",
default=None,
help="(ignored) has no effect anymore, pip-8 uses local caching by default",
)
parser.add_testenv_attribute(
name="changedir",
type="path",
default="{toxinidir}",
help="directory to change to when running commands",
)
parser.add_testenv_attribute_obj(PosargsOption())
parser.add_testenv_attribute(
name="skip_install",
type="bool",
default=False,
help="Do not install the current package. This can be used when you need the virtualenv "
"management but do not want to install the current package",
)
parser.add_testenv_attribute(
name="ignore_errors",
type="bool",
default=False,
help="if set to True all commands will be executed irrespective of their result error "
"status.",
)
def recreate(testenv_config, value):
if testenv_config.config.option.recreate:
return True
return value
parser.add_testenv_attribute(
name="recreate",
type="bool",
default=False,
postprocess=recreate,
help="always recreate this test environment.",
)
def passenv(testenv_config, value):
# Flatten the list to deal with space-separated values.
value = list(itertools.chain.from_iterable([x.split(" ") for x in value]))
passenv = {"PATH", "PIP_INDEX_URL", "LANG", "LANGUAGE", "LD_LIBRARY_PATH", "TOX_WORK_DIR"}
# read in global passenv settings
p = os.environ.get("TOX_TESTENV_PASSENV", None)
if p is not None:
env_values = [x for x in p.split() if x]
value.extend(env_values)
# we ensure that tmp directory settings are passed on
# we could also set it to the per-venv "envtmpdir"
# but this leads to very long paths when run with jenkins
# so we just pass it on by default for now.
if tox.INFO.IS_WIN:
passenv.add("SYSTEMDRIVE") # needed for pip6
passenv.add("SYSTEMROOT") # needed for python's crypto module
passenv.add("PATHEXT") # needed for discovering executables
passenv.add("COMSPEC") # needed for distutils cygwincompiler
passenv.add("TEMP")
passenv.add("TMP")
# for `multiprocessing.cpu_count()` on Windows (prior to Python 3.4).
passenv.add("NUMBER_OF_PROCESSORS")
passenv.add("PROCESSOR_ARCHITECTURE") # platform.machine()
passenv.add("USERPROFILE") # needed for `os.path.expanduser()`
passenv.add("MSYSTEM") # fixes #429
else:
passenv.add("TMPDIR")
for spec in value:
for name in os.environ:
if fnmatchcase(name.upper(), spec.upper()):
passenv.add(name)
return passenv
parser.add_testenv_attribute(
name="passenv",
type="line-list",
postprocess=passenv,
help="environment variables needed during executing test commands (taken from invocation "
"environment). Note that tox always passes through some basic environment variables "
"which are needed for basic functioning of the Python system. See --showconfig for the "
"eventual passenv setting.",
)
parser.add_testenv_attribute(
name="whitelist_externals",
type="line-list",
help="each lines specifies a path or basename for which tox will not warn "
"about it coming from outside the test environment.",
)
parser.add_testenv_attribute(
name="platform",
type="string",
default=".*",
help="regular expression which must match against ``sys.platform``. "
"otherwise testenv will be skipped.",
)
def sitepackages(testenv_config, value):
return testenv_config.config.option.sitepackages or value
def alwayscopy(testenv_config, value):
return testenv_config.config.option.alwayscopy or value
parser.add_testenv_attribute(
name="sitepackages",
type="bool",
default=False,
postprocess=sitepackages,
help="Set to ``True`` if you want to create virtual environments that also "
"have access to globally installed packages.",
)
parser.add_testenv_attribute(
name="alwayscopy",
type="bool",
default=False,
postprocess=alwayscopy,
help="Set to ``True`` if you want virtualenv to always copy files rather "
"than symlinking.",
)
def pip_pre(testenv_config, value):
return testenv_config.config.option.pre or value
parser.add_testenv_attribute(
name="pip_pre",
type="bool",
default=False,
postprocess=pip_pre,
help="If ``True``, adds ``--pre`` to the ``opts`` passed to the install command. ",
)
def develop(testenv_config, value):
option = testenv_config.config.option
return not option.installpkg and (value or option.develop)
parser.add_testenv_attribute(
name="usedevelop",
type="bool",
postprocess=develop,
default=False,
help="install package in develop/editable mode",
)
parser.add_testenv_attribute_obj(InstallcmdOption())
parser.add_testenv_attribute(
name="list_dependencies_command",
type="argv",
default="python -m pip freeze",
help="list dependencies for a virtual environment",
)
parser.add_testenv_attribute_obj(DepOption())
parser.add_testenv_attribute(
name="commands",
type="argvlist",
default="",
help="each line specifies a test command and can use substitution.",
)
parser.add_testenv_attribute(
name="commands_pre",
type="argvlist",
default="",
help="each line specifies a setup command action and can use substitution.",
)
parser.add_testenv_attribute(
name="commands_post",
type="argvlist",
default="",
help="each line specifies a teardown command and can use substitution.",
)
parser.add_testenv_attribute(
"ignore_outcome",
type="bool",
default=False,
help="if set to True a failing result of this testenv will not make "
"tox fail, only a warning will be produced",
)
parser.add_testenv_attribute(
"extras",
type="line-list",
help="list of extras to install with the source distribution or develop install",
)
def cli_skip_missing_interpreter(parser):
class SkipMissingInterpreterAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
value = "true" if values is None else values
if value not in ("config", "true", "false"):
raise argparse.ArgumentTypeError("value must be config, true or false")
setattr(namespace, self.dest, value)
parser.add_argument(
"--skip-missing-interpreters",
default="config",
metavar="val",
nargs="?",
action=SkipMissingInterpreterAction,
help="don't fail tests for missing interpreters: {config,true,false} choice",
)
class Config(object):
"""Global Tox config object."""
def __init__(self, pluginmanager, option, interpreters, parser):
self.envconfigs = OrderedDict()
"""Mapping envname -> envconfig"""
self.invocationcwd = py.path.local()
self.interpreters = interpreters
self.pluginmanager = pluginmanager
self.option = option
self._parser = parser
self._testenv_attr = parser._testenv_attr
"""option namespace containing all parsed command line options"""
@property
def homedir(self):
homedir = get_homedir()
if homedir is None:
homedir = self.toxinidir # FIXME XXX good idea?
return homedir
class TestenvConfig:
"""Testenv Configuration object.
In addition to some core attributes/properties this config object holds all
per-testenv ini attributes as attributes, see "tox --help-ini" for an overview.
"""
def __init__(self, envname, config, factors, reader):
#: test environment name
self.envname = envname
#: global tox config object
self.config = config
#: set of factors
self.factors = factors
self._reader = reader
self.missing_subs = []
"""Holds substitutions that could not be resolved.
Pre 2.8.1 missing substitutions crashed with a ConfigError although this would not be a
problem if the env is not part of the current testrun. So we need to remember this and
check later when the testenv is actually run and crash only then.
"""
def get_envbindir(self):
"""Path to directory where scripts/binaries reside."""
if tox.INFO.IS_WIN and "jython" not in self.basepython and "pypy" not in self.basepython:
return self.envdir.join("Scripts")
else:
return self.envdir.join("bin")
@property
def envbindir(self):
return self.get_envbindir()
@property
def envpython(self):
"""Path to python executable."""
return self.get_envpython()
def get_envpython(self):
""" path to python/jython executable. """
if "jython" in str(self.basepython):
name = "jython"
else:
name = "python"
return self.envbindir.join(name)
def get_envsitepackagesdir(self):
"""Return sitepackagesdir of the virtualenv environment.
NOTE: Only available during execution, not during parsing.
"""
x = self.config.interpreters.get_sitepackagesdir(info=self.python_info, envdir=self.envdir)
return x
@property
def python_info(self):
"""Return sitepackagesdir of the virtualenv environment."""
return self.config.interpreters.get_info(envconfig=self)
def getsupportedinterpreter(self):
if tox.INFO.IS_WIN and self.basepython and "jython" in self.basepython:
raise tox.exception.UnsupportedInterpreter(
"Jython/Windows does not support installing scripts"
)
info = self.config.interpreters.get_info(envconfig=self)
if not info.executable:
raise tox.exception.InterpreterNotFound(self.basepython)
if not info.version_info:
raise tox.exception.InvocationError(
"Failed to get version_info for {}: {}".format(info.name, info.err)
)
return info.executable
testenvprefix = "testenv:"
def get_homedir():
try:
return py.path.local._gethomedir()
except Exception:
return None
def make_hashseed():
max_seed = 4294967295
if tox.INFO.IS_WIN:
max_seed = 1024
return str(random.randint(1, max_seed))
class ParseIni(object):
def __init__(self, config, ini_path, ini_data): # noqa
config.toxinipath = ini_path
config.toxinidir = config.toxinipath.dirpath()
self._cfg = py.iniconfig.IniConfig(config.toxinipath, ini_data)
config._cfg = self._cfg
self.config = config
prefix = "tox" if ini_path.basename == "setup.cfg" else None
context_name = getcontextname()
if context_name == "jenkins":
reader = SectionReader(
"tox:jenkins", self._cfg, prefix=prefix, fallbacksections=["tox"]
)
dist_share_default = "{toxworkdir}/distshare"
elif not context_name:
reader = SectionReader("tox", self._cfg, prefix=prefix)
dist_share_default = "{homedir}/.tox/distshare"
else:
raise ValueError("invalid context")
if config.option.hashseed is None:
hash_seed = make_hashseed()
elif config.option.hashseed == "noset":
hash_seed = None
else:
hash_seed = config.option.hashseed
config.hashseed = hash_seed
reader.addsubstitutions(toxinidir=config.toxinidir, homedir=config.homedir)
# As older versions of tox may have bugs or incompatibilities that
# prevent parsing of tox.ini this must be the first thing checked.
config.minversion = reader.getstring("minversion", None)
if config.minversion:
tox_version = pkg_resources.parse_version(tox.__version__)
config_min_version = pkg_resources.parse_version(self.config.minversion)
if config_min_version > tox_version:
raise tox.exception.MinVersionError(
"tox version is {}, required is at least {}".format(
tox.__version__, self.config.minversion
)
)
self.ensure_requires_satisfied(reader.getlist("requires"))
if config.option.workdir is None:
config.toxworkdir = reader.getpath("toxworkdir", "{toxinidir}/.tox")
else:
config.toxworkdir = config.toxinidir.join(config.option.workdir, abs=True)
if config.option.skip_missing_interpreters == "config":
val = reader.getbool("skip_missing_interpreters", False)
config.option.skip_missing_interpreters = "true" if val else "false"
config.ignore_basepython_conflict = reader.getbool("ignore_basepython_conflict", False)
# determine indexserver dictionary
config.indexserver = {"default": IndexServerConfig("default")}
prefix = "indexserver"
for line in reader.getlist(prefix):
name, url = map(lambda x: x.strip(), line.split("=", 1))
config.indexserver[name] = IndexServerConfig(name, url)
override = False
if config.option.indexurl:
for url_def in config.option.indexurl:
m = re.match(r"\W*(\w+)=(\S+)", url_def)
if m is None:
url = url_def
name = "default"
else:
name, url = m.groups()
if not url:
url = None
if name != "ALL":
config.indexserver[name].url = url
else:
override = url
# let ALL override all existing entries
if override:
for name in config.indexserver:
config.indexserver[name] = IndexServerConfig(name, override)
reader.addsubstitutions(toxworkdir=config.toxworkdir)
config.distdir = reader.getpath("distdir", "{toxworkdir}/dist")
reader.addsubstitutions(distdir=config.distdir)
config.distshare = reader.getpath("distshare", dist_share_default)
config.temp_dir = reader.getpath("temp_dir", "{toxworkdir}/.tmp")
reader.addsubstitutions(distshare=config.distshare)
config.sdistsrc = reader.getpath("sdistsrc", None)
config.setupdir = reader.getpath("setupdir", "{toxinidir}")
config.logdir = config.toxworkdir.join("log")
self.parse_build_isolation(config, reader)
config.envlist, all_envs = self._getenvdata(reader, config)
# factors used in config or predefined
known_factors = self._list_section_factors("testenv")
known_factors.update({"py", "python"})
# factors stated in config envlist
stated_envlist = reader.getstring("envlist", replace=False)
if stated_envlist:
for env in _split_env(stated_envlist):
known_factors.update(env.split("-"))
# configure testenvs
for name in all_envs:
section = "{}{}".format(testenvprefix, name)
factors = set(name.split("-"))
if (
section in self._cfg
or factors <= known_factors
or all(
tox.PYTHON.PY_FACTORS_RE.match(factor) for factor in factors - known_factors
)
):
config.envconfigs[name] = self.make_envconfig(name, section, reader._subs, config)
all_develop = all(
name in config.envconfigs and config.envconfigs[name].usedevelop
for name in config.envlist
)
config.skipsdist = reader.getbool("skipsdist", all_develop)
def parse_build_isolation(self, config, reader):
config.isolated_build = reader.getbool("isolated_build", False)
config.isolated_build_env = reader.getstring("isolated_build_env", ".package")
if config.isolated_build is True:
name = config.isolated_build_env
if name not in config.envconfigs:
config.envconfigs[name] = self.make_envconfig(
name, "{}{}".format(testenvprefix, name), reader._subs, config
)
@staticmethod
def ensure_requires_satisfied(specified):
missing_requirements = []
for s in specified:
try:
pkg_resources.get_distribution(s)
except pkg_resources.RequirementParseError:
raise
except Exception:
missing_requirements.append(str(pkg_resources.Requirement(s)))
if missing_requirements:
raise tox.exception.MissingRequirement(
"Packages {} need to be installed alongside tox in {}".format(
", ".join(missing_requirements), sys.executable
)
)
def _list_section_factors(self, section):
factors = set()
if section in self._cfg:
for _, value in self._cfg[section].items():
exprs = re.findall(r"^([\w{}\.!,-]+)\:\s+", value, re.M)
factors.update(*mapcat(_split_factor_expr_all, exprs))
return factors
def make_envconfig(self, name, section, subs, config, replace=True):
factors = set(name.split("-"))
reader = SectionReader(section, self._cfg, fallbacksections=["testenv"], factors=factors)
tc = TestenvConfig(name, config, factors, reader)
reader.addsubstitutions(
envname=name,
envbindir=tc.get_envbindir,
envsitepackagesdir=tc.get_envsitepackagesdir,
envpython=tc.get_envpython,
**subs
)
for env_attr in config._testenv_attr:
atype = env_attr.type
try:
if atype in ("bool", "path", "string", "dict", "dict_setenv", "argv", "argvlist"):
meth = getattr(reader, "get{}".format(atype))
res = meth(env_attr.name, env_attr.default, replace=replace)
elif atype == "space-separated-list":
res = reader.getlist(env_attr.name, sep=" ")
elif atype == "line-list":
res = reader.getlist(env_attr.name, sep="\n")
else:
raise ValueError("unknown type {!r}".format(atype))
if env_attr.postprocess:
res = env_attr.postprocess(testenv_config=tc, value=res)
except tox.exception.MissingSubstitution as e:
tc.missing_subs.append(e.name)
res = e.FLAG
setattr(tc, env_attr.name, res)
if atype in ("path", "string"):
reader.addsubstitutions(**{env_attr.name: res})
return tc
def _getenvdata(self, reader, config):
candidates = (
self.config.option.env,
os.environ.get("TOXENV"),
reader.getstring("envlist", replace=False),
)
env_str = next((i for i in candidates if i), [])
env_list = _split_env(env_str)
# collect section envs
all_envs = OrderedDict((i, None) for i in env_list)
if "ALL" in all_envs:
all_envs.pop("ALL")
for section in self._cfg:
if section.name.startswith(testenvprefix):
all_envs[section.name[len(testenvprefix) :]] = None
if not all_envs:
all_envs["python"] = None
package_env = config.isolated_build_env
if config.isolated_build is True and package_env in all_envs:
all_envs.pop(package_env)
if not env_list or "ALL" in env_list:
env_list = list(all_envs.keys())
if config.isolated_build is True and package_env in env_list:
msg = "isolated_build_env {} cannot be part of envlist".format(package_env)
raise tox.exception.ConfigError(msg)
all_env_list = list(all_envs.keys())
return env_list, all_env_list
def _split_env(env):
"""if handed a list, action="append" was used for -e """
if not isinstance(env, list):
env = [e.split("#", 1)[0].strip() for e in env.split("\n")]
env = ",".join([e for e in env if e])
env = [env]
return mapcat(_expand_envstr, env)
def _is_negated_factor(factor):
return factor.startswith("!")
def _base_factor_name(factor):
return factor[1:] if _is_negated_factor(factor) else factor
def _split_factor_expr(expr):
def split_single(e):
raw = e.split("-")
included = {_base_factor_name(factor) for factor in raw if not _is_negated_factor(factor)}
excluded = {_base_factor_name(factor) for factor in raw if _is_negated_factor(factor)}
return included, excluded
partial_envs = _expand_envstr(expr)
return [split_single(e) for e in partial_envs]
def _split_factor_expr_all(expr):
partial_envs = _expand_envstr(expr)
return [{_base_factor_name(factor) for factor in e.split("-")} for e in partial_envs]
def _expand_envstr(envstr):
# split by commas not in groups
tokens = re.split(r"((?:\{[^}]+\})+)|,", envstr)
envlist = ["".join(g).strip() for k, g in itertools.groupby(tokens, key=bool) if k]
def expand(env):
tokens = re.split(r"\{([^}]+)\}", env)
parts = [re.sub(r"\s+", "", token).split(",") for token in tokens]
return ["".join(variant) for variant in itertools.product(*parts)]
return mapcat(expand, envlist)
def mapcat(f, seq):
return list(itertools.chain.from_iterable(map(f, seq)))
class DepConfig:
def __init__(self, name, indexserver=None):
self.name = name
self.indexserver = indexserver
def __str__(self):
if self.indexserver:
if self.indexserver.name == "default":
return self.name
return ":{}:{}".format(self.indexserver.name, self.name)
return str(self.name)
__repr__ = __str__
class IndexServerConfig:
def __init__(self, name, url=None):
self.name = name
self.url = url
is_section_substitution = re.compile(r"{\[[^{}\s]+\]\S+?}").match
"""Check value matches substitution form of referencing value from other section.
E.g. {[base]commands}
"""
class SectionReader:
def __init__(self, section_name, cfgparser, fallbacksections=None, factors=(), prefix=None):
if prefix is None:
self.section_name = section_name
else:
self.section_name = "{}:{}".format(prefix, section_name)
self._cfg = cfgparser
self.fallbacksections = fallbacksections or []
self.factors = factors
self._subs = {}
self._subststack = []
self._setenv = None
def get_environ_value(self, name):
if self._setenv is None:
return os.environ.get(name)
return self._setenv.get(name)
def addsubstitutions(self, _posargs=None, **kw):
self._subs.update(kw)
if _posargs:
self.posargs = _posargs
def getpath(self, name, defaultpath, replace=True):
path = self.getstring(name, defaultpath, replace=replace)
if path is not None:
toxinidir = self._subs["toxinidir"]
return toxinidir.join(path, abs=True)
def getlist(self, name, sep="\n"):
s = self.getstring(name, None)
if s is None:
return []
return [x.strip() for x in s.split(sep) if x.strip()]
def getdict(self, name, default=None, sep="\n", replace=True):
value = self.getstring(name, None, replace=replace)
return self._getdict(value, default=default, sep=sep, replace=replace)
def getdict_setenv(self, name, default=None, sep="\n", replace=True):
value = self.getstring(name, None, replace=replace, crossonly=True)
definitions = self._getdict(value, default=default, sep=sep, replace=replace)
self._setenv = SetenvDict(definitions, reader=self)
return self._setenv
def _getdict(self, value, default, sep, replace=True):
if value is None or not replace:
return default or {}
d = {}
for line in value.split(sep):
if line.strip():
name, rest = line.split("=", 1)
d[name.strip()] = rest.strip()
return d
def getbool(self, name, default=None, replace=True):
s = self.getstring(name, default, replace=replace)
if not s or not replace:
s = default
if s is None:
raise KeyError("no config value [{}] {} found".format(self.section_name, name))
if not isinstance(s, bool):
if s.lower() == "true":
s = True
elif s.lower() == "false":
s = False
else:
raise tox.exception.ConfigError(
"{}: boolean value {!r} needs to be 'True' or 'False'".format(name, s)
)
return s
def getargvlist(self, name, default="", replace=True):
s = self.getstring(name, default, replace=False)
return _ArgvlistReader.getargvlist(self, s, replace=replace)
def getargv(self, name, default="", replace=True):
return self.getargvlist(name, default, replace=replace)[0]
def getstring(self, name, default=None, replace=True, crossonly=False):
x = None
for s in [self.section_name] + self.fallbacksections:
try:
x = self._cfg[s][name]
break
except KeyError:
continue
if x is None:
x = default
else:
# It is needed to apply factors before unwrapping
# dependencies, otherwise it can break the substitution
# process. Once they are unwrapped, we call apply factors
# again for those new dependencies.
x = self._apply_factors(x)
x = self._replace_if_needed(x, name, replace, crossonly)
x = self._apply_factors(x)
x = self._replace_if_needed(x, name, replace, crossonly)
return x
def _replace_if_needed(self, x, name, replace, crossonly):
if replace and x and hasattr(x, "replace"):
x = self._replace(x, name=name, crossonly=crossonly)
return x
def _apply_factors(self, s):
def factor_line(line):
m = re.search(r"^([\w{}\.!,-]+)\:\s+(.+)", line)
if not m:
return line
expr, line = m.groups()
if any(
included <= self.factors and not any(x in self.factors for x in excluded)
for included, excluded in _split_factor_expr(expr)
):
return line
lines = s.strip().splitlines()
return "\n".join(filter(None, map(factor_line, lines)))
def _replace(self, value, name=None, section_name=None, crossonly=False):
if "{" not in value:
return value
section_name = section_name if section_name else self.section_name
self._subststack.append((section_name, name))
try:
replaced = Replacer(self, crossonly=crossonly).do_replace(value)
assert self._subststack.pop() == (section_name, name)
except tox.exception.MissingSubstitution:
if not section_name.startswith(testenvprefix):
raise tox.exception.ConfigError(
"substitution env:{!r}: unknown or recursive definition in"
" section {!r}.".format(value, section_name)
)
raise
return replaced
class Replacer:
RE_ITEM_REF = re.compile(
r"""
(?<!\\)[{]
(?:(?P<sub_type>[^[:{}]+):)? # optional sub_type for special rules
(?P<substitution_value>(?:\[[^,{}]*\])?[^:,{}]*) # substitution key
(?::(?P<default_value>[^{}]*))? # default value
[}]
""",
re.VERBOSE,
)
def __init__(self, reader, crossonly=False):
self.reader = reader
self.crossonly = crossonly
def do_replace(self, value):
"""
Recursively expand substitutions starting from the innermost expression
"""
def substitute_once(x):
return self.RE_ITEM_REF.sub(self._replace_match, x)
expanded = substitute_once(value)
while expanded != value: # substitution found
value = expanded
expanded = substitute_once(value)
return expanded
def _replace_match(self, match):
g = match.groupdict()
sub_value = g["substitution_value"]
if self.crossonly:
if sub_value.startswith("["):
return self._substitute_from_other_section(sub_value)
# in crossonly we return all other hits verbatim
start, end = match.span()
return match.string[start:end]
# special case: all empty values means ":" which is os.pathsep
if not any(g.values()):
return os.pathsep
# special case: opts and packages. Leave {opts} and
# {packages} intact, they are replaced manually in
# _venv.VirtualEnv.run_install_command.
if sub_value in ("opts", "packages"):
return "{{{}}}".format(sub_value)
try:
sub_type = g["sub_type"]
except KeyError:
raise tox.exception.ConfigError(
"Malformed substitution; no substitution type provided"
)
if sub_type == "env":
return self._replace_env(match)
if sub_type == "tty":
if is_interactive():
return match.group("substitution_value")
return match.group("default_value")
if sub_type is not None:
raise tox.exception.ConfigError(
"No support for the {} substitution type".format(sub_type)
)
return self._replace_substitution(match)
def _replace_env(self, match):
key = match.group("substitution_value")
if not key:
raise tox.exception.ConfigError("env: requires an environment variable name")
default = match.group("default_value")
value = self.reader.get_environ_value(key)
if value is not None:
return value
if default is not None:
return default
raise tox.exception.MissingSubstitution(key)
def _substitute_from_other_section(self, key):
if key.startswith("[") and "]" in key:
i = key.find("]")
section, item = key[1:i], key[i + 1 :]
cfg = self.reader._cfg
if section in cfg and item in cfg[section]:
if (section, item) in self.reader._subststack:
raise ValueError(
"{} already in {}".format((section, item), self.reader._subststack)
)
x = str(cfg[section][item])
return self.reader._replace(
x, name=item, section_name=section, crossonly=self.crossonly
)
raise tox.exception.ConfigError("substitution key {!r} not found".format(key))
def _replace_substitution(self, match):
sub_key = match.group("substitution_value")
val = self.reader._subs.get(sub_key, None)
if val is None:
val = self._substitute_from_other_section(sub_key)
if callable(val):
val = val()
return str(val)
def is_interactive():
return sys.stdin.isatty()
class _ArgvlistReader:
@classmethod
def getargvlist(cls, reader, value, replace=True):
"""Parse ``commands`` argvlist multiline string.
:param SectionReader reader: reader to be used.
:param str value: Content stored by key.
:rtype: list[list[str]]
:raise :class:`tox.exception.ConfigError`:
line-continuation ends nowhere while resolving for specified section
"""
commands = []
current_command = ""
for line in value.splitlines():
line = line.rstrip()
if not line:
continue
if line.endswith("\\"):
current_command += " {}".format(line[:-1])
continue
current_command += line
if is_section_substitution(current_command):
replaced = reader._replace(current_command, crossonly=True)
commands.extend(cls.getargvlist(reader, replaced))
else:
commands.append(cls.processcommand(reader, current_command, replace))
current_command = ""
else:
if current_command:
raise tox.exception.ConfigError(
"line-continuation ends nowhere while resolving for [{}] {}".format(
reader.section_name, "commands"
)
)
return commands
@classmethod
def processcommand(cls, reader, command, replace=True):
posargs = getattr(reader, "posargs", "")
posargs_string = list2cmdline([x for x in posargs if x])
# Iterate through each word of the command substituting as
# appropriate to construct the new command string. This
# string is then broken up into exec argv components using
# shlex.
if replace:
newcommand = ""
for word in CommandParser(command).words():
if word == "{posargs}" or word == "[]":
newcommand += posargs_string
continue
elif word.startswith("{posargs:") and word.endswith("}"):
if posargs:
newcommand += posargs_string
continue
else:
word = word[9:-1]
new_arg = ""
new_word = reader._replace(word)
new_word = reader._replace(new_word)
new_word = new_word.replace("\\{", "{").replace("\\}", "}")
new_arg += new_word
newcommand += new_arg
else:
newcommand = command
# Construct shlex object that will not escape any values,
# use all values as is in argv.
shlexer = shlex.shlex(newcommand, posix=True)
shlexer.whitespace_split = True
shlexer.escape = ""
return list(shlexer)
class CommandParser(object):
class State(object):
def __init__(self):
self.word = ""
self.depth = 0
self.yield_words = []
def __init__(self, command):
self.command = command
def words(self):
ps = CommandParser.State()
def word_has_ended():
return (
(
cur_char in string.whitespace
and ps.word
and ps.word[-1] not in string.whitespace
)
or (cur_char == "{" and ps.depth == 0 and not ps.word.endswith("\\"))
or (ps.depth == 0 and ps.word and ps.word[-1] == "}")
or (cur_char not in string.whitespace and ps.word and ps.word.strip() == "")
)
def yield_this_word():
yieldword = ps.word
ps.word = ""
if yieldword:
ps.yield_words.append(yieldword)
def yield_if_word_ended():
if word_has_ended():
yield_this_word()
def accumulate():
ps.word += cur_char
def push_substitution():
ps.depth += 1
def pop_substitution():
ps.depth -= 1
for cur_char in self.command:
if cur_char in string.whitespace:
if ps.depth == 0:
yield_if_word_ended()
accumulate()
elif cur_char == "{":
yield_if_word_ended()
accumulate()
push_substitution()
elif cur_char == "}":
accumulate()
pop_substitution()
else:
yield_if_word_ended()
accumulate()
if ps.word.strip():
yield_this_word()
return ps.yield_words
def getcontextname():
if any(env in os.environ for env in ["JENKINS_URL", "HUDSON_URL"]):
return "jenkins"
return None
| [
"tuyenle@Admins-MacBook-Pro.local"
] | tuyenle@Admins-MacBook-Pro.local |
d4d00147e745d9e951765b3fc1fd6c50c016f113 | 3c9011b549dd06b6344c6235ed22b9dd483365d1 | /Agenda/contatos/migrations/0003_contato_foto.py | 097090b85a3f8c6636e29e52edc33d1acc2ee0e7 | [] | no_license | joaoo-vittor/estudo-python | 1411f4c3620bbc5f6b7c674a096cae8f90f0db8d | 5562d823dd574d7df49fddca87a1fbd319356969 | refs/heads/master | 2023-05-31T17:59:16.752835 | 2021-06-25T04:54:56 | 2021-06-25T04:54:56 | 292,372,669 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 402 | py | # Generated by Django 3.2 on 2021-05-16 01:41
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('contatos', '0002_contato_mostrar'),
]
operations = [
migrations.AddField(
model_name='contato',
name='foto',
field=models.ImageField(blank=True, upload_to='fotos/%Y/%m'),
),
]
| [
"joaoo.vittor007@gmail.com"
] | joaoo.vittor007@gmail.com |
65dd6e05846806bcfc8c999115ef687d45c97af7 | 3160c1b057319c66e3fe30af52ddabc5555b347a | /venv/Scripts/rst2pseudoxml.py | 67178034fb66411b820e6bc1291d00d95015ebc9 | [] | no_license | bopopescu/TLDR_AI | acfb35f0a0677e03f48b03805ff44fd78303f126 | d468e599fc13828f6e754e2d1d99b6a3e3c1be21 | refs/heads/master | 2022-11-25T17:40:08.513723 | 2019-04-09T06:52:06 | 2019-04-09T06:52:06 | 282,555,526 | 0 | 0 | null | 2020-07-26T01:41:57 | 2020-07-26T01:41:56 | null | UTF-8 | Python | false | false | 647 | py | #!C:\Users\aaron\PycharmProjects\TLDR_AI\venv\Scripts\python.exe
# $Id: rst2pseudoxml.py 4564 2006-05-21 20:44:42Z wiemann $
# Author: David Goodger <goodger@python.org>
# Copyright: This module has been placed in the public domain.
"""
A minimal front end to the Docutils Publisher, producing pseudo-XML.
"""
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline, default_description
description = ('Generates pseudo-XML from standalone reStructuredText '
'sources (for testing purposes). ' + default_description)
publish_cmdline(description=description)
| [
"aaron157@bellsouth.net"
] | aaron157@bellsouth.net |
125483cf11de2cfc713c9a895380f931f1ec938b | f9b45fd409384de3c69d68c62c0f21d9196e6927 | /python_tools/FelixUtilities.py | da0b1769ef7a4288b8afdbea8bf0248b0f87b580 | [
"MIT"
] | permissive | ultimatezen/felix | 04799eb666b54e2eeea961dc983cf3721f5182de | 5a7ad298ca4dcd5f1def05c60ae3c84519ec54c4 | refs/heads/master | 2020-04-08T14:21:02.872525 | 2016-02-16T01:18:11 | 2016-02-16T01:18:11 | 159,433,144 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,611 | py | # coding: UTF8
"""
COM sever for utilities for Felix
"""
import os
import shutil
from datetime import date
import subprocess
# COM stuff
import win32com.server.register
import loc
from loc import get_local_app_data_folder, module_path
from loc import get_prefs, set_prefs
import utils
from utils import debug, log_err
from FelixMemoryServes import update_checker_felix
__version__ = "1.0"
__author__ = "Ryan Ginstrom"
__license__ = "MIT"
__progname__ = "Felix Utilities"
CHECK_INTERVAL = update_checker_felix.CHECK_INTERVAL
class FelixUtilities(object):
"""Utilities for Felix"""
_public_methods_ = """EnsureHtml CopyHtml LoadProp SaveProp
CheckUpdates CheckUpdatesRequested
HasUpdate CurrentVersion NewVersion""".split()
_public_attrs_ = []
_reg_progid_ = "Felix.Utilities"
# import pythoncom
# print pythoncom.CreateGuid()
_reg_clsid_ = "{3D368688-B9E8-4BFE-ADF6-22F1CBA8845B}"
def __init__(self):
debug("Felix.Utilities")
def HasUpdate(self):
return False
def CurrentVersion(self):
return "1"
def NewVersion(self):
return "1"
def LoadProp(self, key):
"""
Loads a key-value pair from the properties.
:param key: The key to retrieve
:return: The value corresponding to the key
"""
val = get_prefs().get(key, None)
debug("Loaded prop: %s=%s" % (key, val))
return val
def SaveProp(self, key, val):
"""
Saves a property as a key-value pair
:param key: The key
:param val: The value
"""
prefs = get_prefs()
prefs[key] = val
set_prefs(prefs)
debug("Saved prop: %s=%s" % (key, val))
def CopyHtml(self, source, dest):
"""
Forces copy of HTML files to local app folder.
:param source: The location of the backup HTML files
:param dest: Copy the HTML files here (local app data)
"""
debug(u"Forcing copy of HTML files to local app folder")
debug(u"source: %s" % source)
debug(u"destination: %s" % dest)
shutil.copyfile(source, dest)
def EnsureHtml(self):
"""
Ensure that HTML files are in local app data. If they are not,
copies them over.
"""
debug("Checking for HTML files in local app folder")
data_folder = get_local_app_data_folder()
html_folder = os.path.join(data_folder, u"html")
if not os.path.isdir(html_folder):
debug(" ... Files not found. Copying.")
head, tail = os.path.split(module_path())
src = os.path.join(head, u"html")
dst = html_folder
shutil.copytree(src, dst)
return True
return False
def CheckUpdates(self, language):
"""
Check Felix server for updates.
:param language: The UI language for showing the update dialog.
"""
try:
debug(u"Checking for updates. Language is %s" % language)
loc.language.change_language(language)
prefs = get_prefs()
check_permission(prefs, date.today())
set_prefs(prefs)
except:
LOGGER.exception("Failed to check updates")
def CheckUpdatesRequested(self, language):
"""
If the user checks for updates, we don't have to see whether the user
wants to check automatically.
:param language: The UI language for showing dialogs.
"""
try:
debug(u"Checking for updates at user request. Language is %s" % language)
loc.language.change_language(language)
check_updates()
prefs = get_prefs()
prefs["last_update_check"] = date.today()
set_prefs(prefs)
except:
LOGGER.exception("Failed to check updates")
def get_command():
modpath = loc.module_path()
if " " in modpath:
format = '"%s"'
else:
format = '%s'
if loc.we_are_frozen():
return [format % os.path.join(modpath, "CheckUpdates.exe")]
else:
return ["pythonw.exe",
format % os.path.join(modpath, "update_checker_felix.py")]
def check_updates():
cmd = get_command()
subprocess.Popen(cmd,
shell=False)
def ask_updates():
cmd = get_command()
cmd.append("ask")
subprocess.Popen(cmd,
shell=False)
def check_permission(prefs, today):
if not prefs["ask_about_updates"] and not prefs["check_updates"]:
return
last_check = prefs["last_update_check"]
if not last_check:
if prefs["ask_about_updates"]:
ask_updates()
else:
check_updates()
prefs["last_update_check"] = today
return
else:
diff = today - last_check
if diff.days >= CHECK_INTERVAL:
if prefs["ask_about_updates"]:
ask_updates()
else:
check_updates()
prefs["last_update_check"] = today
def reg():
"""Register COM servers"""
debug("Registering COM servers")
win32com.server.register.UseCommandLine(FelixUtilities)
utils.determine_redirect("felix_utilities.log")
# Add code so that when this script is run by
# Python.exe, it self-registers.
if __name__ == '__main__':
reg()
| [
"software@ginstrom.com"
] | software@ginstrom.com |
4353deb50a51a18cfc392b8d5fada6467c849fe1 | 91d1a6968b90d9d461e9a2ece12b465486e3ccc2 | /worklink_write_3/domain_associate.py | 622afc66bafaf4062a1575d617c77e954bc7ee2e | [] | no_license | lxtxl/aws_cli | c31fc994c9a4296d6bac851e680d5adbf7e93481 | aaf35df1b7509abf5601d3f09ff1fece482facda | refs/heads/master | 2023-02-06T09:00:33.088379 | 2020-12-27T13:38:45 | 2020-12-27T13:38:45 | 318,686,394 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,279 | py | #!/usr/bin/python
# -*- codding: utf-8 -*-
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from common.execute_command import write_three_parameter
# url : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/worklink/associate-domain.html
if __name__ == '__main__':
"""
describe-domain : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/worklink/describe-domain.html
disassociate-domain : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/worklink/disassociate-domain.html
list-domains : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/worklink/list-domains.html
"""
parameter_display_string = """
# fleet-arn : The Amazon Resource Name (ARN) of the fleet.
# domain-name : The fully qualified domain name (FQDN).
# acm-certificate-arn : The ARN of an issued ACM certificate that is valid for the domain being associated.
"""
add_option_dict = {}
add_option_dict["parameter_display_string"] = parameter_display_string
# ex: add_option_dict["no_value_parameter_list"] = "--single-parameter"
write_three_parameter("worklink", "associate-domain", "fleet-arn", "domain-name", "acm-certificate-arn", add_option_dict)
| [
"hcseo77@gmail.com"
] | hcseo77@gmail.com |
9adc81d26ca9708c7ee07b95c8795d117a6c05e9 | 987a82368d3a15b618ff999f28dc16b89e50f675 | /plaso/parsers/winreg_plugins/shutdown.py | f14cb1abcb7795e44c6e63f3728e75b94987c6ff | [
"Apache-2.0"
] | permissive | arunthirukkonda/plaso | 185b30ab4ec90fcc2d280b3c89c521c9eef7b7ab | 846fc2fce715e1f78b11f375f6fe4e11b5c284ba | refs/heads/master | 2021-08-30T15:21:12.267584 | 2017-12-18T12:33:08 | 2017-12-18T12:33:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,893 | py | # -*- coding: utf-8 -*-
"""Windows Registry plugin for parsing the last shutdown time of a system."""
from __future__ import unicode_literals
import construct
from dfdatetime import filetime as dfdatetime_filetime
from dfdatetime import semantic_time as dfdatetime_semantic_time
from plaso.containers import events
from plaso.containers import time_events
from plaso.lib import definitions
from plaso.parsers import winreg
from plaso.parsers.winreg_plugins import interface
__author__ = 'Preston Miller, dpmforensics.com, github.com/prmiller91'
class ShutdownWindowsRegistryEventData(events.EventData):
"""Shutdown Windows Registry event data.
Attributes:
key_path (str): Windows Registry key path.
value_name (str): name of the Windows Registry value.
"""
DATA_TYPE = 'windows:registry:shutdown'
def __init__(self):
"""Initializes event data."""
super(ShutdownWindowsRegistryEventData, self).__init__(
data_type=self.DATA_TYPE)
self.key_path = None
self.value_name = None
class ShutdownPlugin(interface.WindowsRegistryPlugin):
"""Windows Registry plugin for parsing the last shutdown time of a system."""
NAME = 'windows_shutdown'
DESCRIPTION = 'Parser for ShutdownTime Registry value.'
FILTERS = frozenset([
interface.WindowsRegistryKeyPathFilter(
'HKEY_LOCAL_MACHINE\\System\\CurrentControlSet\\Control\\Windows')])
_UINT64_STRUCT = construct.ULInt64('value')
def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
"""Extracts events from a ShutdownTime Windows Registry value.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
registry_key (dfwinreg.WinRegistryKey): Windows Registry key.
"""
shutdown_value = registry_key.GetValueByName('ShutdownTime')
if not shutdown_value:
return
# Directly parse the Windows Registry value data in case it is defined
# as binary data.
try:
timestamp = self._UINT64_STRUCT.parse(shutdown_value.data)
except construct.FieldError as exception:
timestamp = None
parser_mediator.ProduceExtractionError(
'unable to determine shutdown timestamp with error: {0!s}'.format(
exception))
if not timestamp:
date_time = dfdatetime_semantic_time.SemanticTime('Not set')
else:
date_time = dfdatetime_filetime.Filetime(timestamp=timestamp)
event_data = ShutdownWindowsRegistryEventData()
event_data.key_path = registry_key.path
event_data.offset = shutdown_value.offset
event_data.value_name = shutdown_value.name
event = time_events.DateTimeValuesEvent(
date_time, definitions.TIME_DESCRIPTION_LAST_SHUTDOWN)
parser_mediator.ProduceEventWithEventData(event, event_data)
winreg.WinRegistryParser.RegisterPlugin(ShutdownPlugin)
| [
"joachim.metz@gmail.com"
] | joachim.metz@gmail.com |
1cf5fc50d95b24370dad7de31e70a0868c09d967 | 0af9ff3662729cf1f949e91b8d8862ff739e7b5f | /números primos.py | 24d5729c603ea4e67a9383127801a93e2941912d | [] | no_license | lucasjukr94/JavaProjects | e979726564e6ed47974931c3e5e04bbd06901e17 | 9c7315402c74458de054be21e6986c710ddfd5ef | refs/heads/master | 2021-09-15T20:27:42.619406 | 2018-06-10T14:54:44 | 2018-06-10T14:54:44 | 125,927,047 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,688 | py | '''
╔══════════════════════════════════════════════════════════════════════════════╗
║ Instituição : Faculdade de Tecnologia de São Paulo ║
║ Departamento : Tecnologia da Informação ║
║ Curso : Análise e Desenvolvimento de Sistemas ║
║ Autor : Lucio Nunes de Lira ║
╠══════════════════════════════════════════════════════════════════════════════╣
║ Evento : Palestra (Números Primos: Conceitos & Algoritmos) ║
║ Programa : Implementações de algoritmos para cálculo de números primos ║
║ Linguagem : Python 3 ║
║ Compilador : CPython (3.6.4) ║
║ Versão : A (Rev. 0) ║
╚══════════════════════════════════════════════════════════════════════════════╝
'''
# Importação das funções necessárias
from math import sqrt, floor
# Conta os divisores entre 1 e n.
def primo_1(n):
divisores = 0
for i in range(1, n + 1):
if n % i == 0:
divisores += 1
if divisores == 2:
return True
else:
return False
# Conta os divisores entre 1 e n.
def primo_2(n):
divisores = 0
for i in range(1, n + 1):
if n % i == 0:
divisores += 1
return divisores == 2
# Conta os divisores entre 2 e n-1.
def primo_3(n):
divisores = 0
for i in range(2, n):
if n % i == 0:
divisores += 1
return n > 1 and divisores == 0
# Termina as tentativas de divisão na
# primeira ocorrência de uma divisão
# com resto zero.
def primo_4(n):
if n == 1: return False
divisor = 2
while n % divisor != 0:
divisor += 1
return divisor == n
# Testa apenas divisores ímpares.
def primo_5(n):
if n == 1: return False
if n % 2 == 0: return n == 2
divisor = 3
while n % divisor != 0:
divisor += 2
return divisor == n
# Testa divisores até a metade de n.
def primo_6(n):
if n % 2 == 0: return n == 2
divisor = 3
metade = n // 2
while divisor <= metade and n % divisor != 0:
divisor += 2
return n > 1 and divisor > metade
# Testa divisores até a raiz de n.
def primo_7(n):
if n % 2 == 0: return n == 2
divisor = 3
raiz = floor(sqrt(n))
while divisor <= raiz and n % divisor != 0:
divisor += 2
return n > 1 and divisor > raiz
# Marca os múltiplos de n com zero.
# Obs.: a partir do segundo múltiplo.
def marca_multiplos(n, lista, lim):
for i in range(n * 2, lim + 1, n):
lista[i] = 0
# Devolve uma nova lista sem zeros.
def filtra(lista):
return [n for n in lista if n != 0]
# Algoritmo "Crivo de Eratóstenes"
# lim = valor do limite
def crivo(lim):
lista = [n for n in range(lim + 1)]
lista[1] = 0
raiz = floor(sqrt(lim))
for i in range(2, raiz + 1):
marca_multiplos(i, lista, lim)
return filtra(lista)
| [
"noreply@github.com"
] | lucasjukr94.noreply@github.com |
d5aa6095ffe361c6c24f7e7ace9e878dcd34a356 | 8a452b71e3942d762fc2e86e49e72eac951b7eba | /leetcode/editor/en/[1065]Index Pairs of a String.py | 9596550ca2c48f4cb14e3df379385e19b37fe19c | [] | no_license | tainenko/Leetcode2019 | 7bea3a6545f97c678a176b93d6622f1f87e0f0df | 8595b04cf5a024c2cd8a97f750d890a818568401 | refs/heads/master | 2023-08-02T18:10:59.542292 | 2023-08-02T17:25:49 | 2023-08-02T17:25:49 | 178,761,023 | 5 | 0 | null | 2019-08-27T10:59:12 | 2019-04-01T01:04:21 | JavaScript | UTF-8 | Python | false | false | 1,478 | py | # Given a string text and an array of strings words, return an array of all
# index pairs [i, j] so that the substring text[i...j] is in words.
#
# Return the pairs [i, j] in sorted order (i.e., sort them by their first
# coordinate, and in case of ties sort them by their second coordinate).
#
#
# Example 1:
#
#
# Input: text = "thestoryofleetcodeandme", words = ["story","fleet","leetcode"]
# Output: [[3,7],[9,13],[10,17]]
#
#
# Example 2:
#
#
# Input: text = "ababa", words = ["aba","ab"]
# Output: [[0,1],[0,2],[2,3],[2,4]]
# Explanation: Notice that matches can overlap, see "aba" is found in [0,2] and
# [2,4].
#
#
#
# Constraints:
#
#
# 1 <= text.length <= 100
# 1 <= words.length <= 20
# 1 <= words[i].length <= 50
# text and words[i] consist of lowercase English letters.
# All the strings of words are unique.
#
# Related Topics Array String Trie Sorting 👍 203 👎 73
# leetcode submit region begin(Prohibit modification and deletion)
class Solution:
def indexPairs(self, text: str, words: List[str]) -> List[List[int]]:
res = []
words.sort(key=lambda x: len(x))
for i in range(len(text)):
for word in words:
if i + len(word) > len(text):
continue
if word == text[i:i + len(word)]:
res.append([i, i + len(word) - 1])
return res
# leetcode submit region end(Prohibit modification and deletion)
| [
"31752048+tainenko@users.noreply.github.com"
] | 31752048+tainenko@users.noreply.github.com |
7e323ca57a44b81a67fc770dd4181fb866231c6b | 0c7ffe86e5060cf7cfbc6dfc3ec5eb4367c20962 | /TMB_files/learn-to-program-the-fundamentals/Week 6/a3.py | f1a1dbdaa6b4b419ea18f8d629ddac3f99395824 | [] | no_license | BreslauerTodd/telework_data_and_code | 049e2d4fb1a4479078f6edf14434034824dc6f40 | 38c39fbab481b48b6e2a15f2a130329725d2e07e | refs/heads/master | 2022-11-20T12:21:44.952847 | 2020-07-20T18:20:21 | 2020-07-20T18:20:21 | 279,941,119 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,920 | py | def is_valid_word(wordlist, word):
""" (list of str, str) -> bool
Return True if and only if word is an element of wordlist.
>>> is_valid_word(['ANT', 'BOX', 'SOB', 'TO'], 'TO')
True
"""
if word in wordlist:
inList = True
else:
inList = False
return inList
def make_str_from_row(board, row_index):
""" (list of list of str, int) -> str
Return the characters from the row of the board with index row_index
as a single string.
>>> make_str_from_row([['A', 'N', 'T', 'T'], ['X', 'S', 'O', 'B']], 0)
'ANTT'
"""
rowsum = ''
for i in board[row_index]:
rowsum = rowsum + i
return rowsum
def make_str_from_column(board, column_index):
""" (list of list of str, int) -> str
Return the characters from the column of the board with index column_index
as a single string.
>>> make_str_from_column([['A', 'N', 'T', 'T'], ['X', 'S', 'O', 'B']], 1)
'NS'
"""
colsum = ''
for i in range(len(board)):
colsum = colsum + board[i][column_index]
return colsum
def board_contains_word_in_row(board, word):
""" (list of list of str, str) -> bool
Return True if and only if one or more of the rows of the board contains
word.
Precondition: board has at least one row and one column, and word is a
valid word.
>>> board_contains_word_in_row([['A', 'N', 'T', 'T'], ['X', 'S', 'O', 'B']], 'SOB')
True
"""
for row_index in range(len(board)):
if word in make_str_from_row(board, row_index):
return True
return False
def board_contains_word_in_column(board, word):
""" (list of list of str, str) -> bool
Return True if and only if one or more of the columns of the board
contains word.
Precondition: board has at least one row and one column, and word is a
valid word.
>>> board_contains_word_in_column([['A', 'N', 'T', 'T'], ['X', 'S', 'O', 'B']], 'NO')
False
"""
for column_index in range(len(board[0])):
if word in make_str_from_column(board, column_index):
return True
return False
def board_contains_word(board, word):
""" (list of list of str, str) -> bool
Return True if and only if word appears in board.
Precondition: board has at least one row and one column.
>>> board_contains_word([['A', 'N', 'T', 'T'], ['X', 'S', 'O', 'B']], 'ANT')
True
"""
if board_contains_word_in_row(board, word):
return True
elif board_contains_word_in_column(board, word):
return True
else:
return False
def word_score(word):
""" (str) -> int
Return the point value the word earns.
Word length: < 3: 0 points
3-6: 1 point per character for all characters in word
7-9: 2 points per character for all characters in word
10+: 3 points per character for all characters in word
>>> word_score('DRUDGERY')
16
"""
word_length = len(word)
if word_length < 3:
return 0
elif word_length < 7:
return word_length
elif word_length < 10:
return word_length * 2
else:
return word_length * 3
def update_score(player_info, word):
""" ([str, int] list, str) -> NoneType
player_info is a list with the player's name and score. Update player_info
by adding the point value word earns to the player's score.
>>> update_score(['Jonathan', 4], 'ANT')
"""
add_score = word_score(word)
new_score = player_info[1] + add_score
player_info[1] = new_score
def num_words_on_board(board, words):
""" (list of list of str, list of str) -> int
Return how many words appear on board.
>>> num_words_on_board([['A', 'N', 'T', 'T'], ['X', 'S', 'O', 'B']], ['ANT', 'BOX', 'SOB', 'TO'])
3
"""
count = 0
for i in words:
if board_contains_word(board,i):
count = count +1
return count
def read_words(words_file):
""" (file open for reading) -> list of str
Return a list of all words (with newlines removed) from open file
words_file.
Precondition: Each line of the file contains a word in uppercase characters
from the standard English alphabet.
"""
words = []
for line in words_file:
words.append(line.rstrip('\n'))
return words
def read_board(board_file):
""" (file open for reading) -> list of list of str
Return a board read from open file board_file. The board file will contain
one row of the board per line. Newlines are not included in the board.
"""
board = []
for line in board_file:
baseline = line.rstrip('\n')
lineList = []
for i in range(0,len(baseline)):
lineList.append(baseline[i])
board.append(lineList)
return board
| [
"noreply@github.com"
] | BreslauerTodd.noreply@github.com |
5a2126d0acf87e3690274f9ba267477d89a89241 | c46b327faa127e0803fbd40521a769ac54d8e2f9 | /code/word2vec.py | 03c0bffc124e52ab7b754b802ecc48b44382e853 | [] | no_license | lizy10/Modified-and-Annotated-Code-of--An--Unsupervised-Neural-Attention-Model-for-Aspect-Extraction-- | f74abd8cc6fd90322dc3876ab8eb250d55f60467 | 0b40ec8897398e214d6a6c7a8936cf34686ea233 | refs/heads/master | 2021-09-15T01:03:07.254804 | 2018-05-23T10:44:37 | 2018-05-23T10:44:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 913 | py | import gensim
import codecs
# 利用生成器读取数据可以避免等待数据读取的过程,速度更快.
class MySentences(object):
def __init__(self, filename):
self.filename = filename
def __iter__(self):
for line in codecs.open(self.filename, 'r', 'utf-8'):
yield line.split()
# 将数据转换为向量形式,基于word2vec算法.
def main(domain):
source = '../preprocessed_data/%s/train.txt' % (domain)
model_file = '../preprocessed_data/%s/w2v_embedding' % (domain)
sentences = MySentences(source)
model = gensim.models.Word2Vec(sentences, size=200, window=5, min_count=10, workers=4)
# try to print something to show the effectiveness of word2vec.
print('model.mv:',model.wv['like'])
model.save(model_file)
# 分别转换restaurant和beer数据集.
print ('Pre-training word embeddings ...')
main('restaurant')
main('beer')
| [
"woshihaodayikeshu@163.com"
] | woshihaodayikeshu@163.com |
d870a37c961f3bdf556b4c8622bb56f0ade0454a | 400aa1b481650fee1d9e55ea0003384f5355808f | /main.py | b5e263a8f28c3550702e810f95b271958db28806 | [] | no_license | brunomarvss/moodmemoir | 13f34ff41002f10ae186ec5ae30a5cd905c7edad | a040b797ec0d56d81039f85515c1a49a2043591e | refs/heads/master | 2021-01-12T16:56:59.424680 | 2017-06-25T07:05:48 | 2017-06-25T07:05:48 | 71,473,361 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 188,216 | py | #kivy.require("1.8.1")
#Bruno
#Iringan
from kivy.app import App #create app
from kivy.uix.label import Label
from kivy.uix.floatlayout import FloatLayout #import float layout/ frame
from kivy.uix.gridlayout import GridLayout
from kivy.uix.boxlayout import BoxLayout
from kivy.core.window import Window
from kivy.uix.button import Button
from kivy.uix.label import Label
from kivy.uix.textinput import TextInput
from kivy.uix.carousel import Carousel
from kivy.uix.image import Image
from kivy.uix.popup import Popup
from kivy.uix.screenmanager import ScreenManager, Screen,SwapTransition,FadeTransition, SlideTransition
from kivy.uix.dropdown import DropDown
from kivy.uix.scrollview import ScrollView
import datetime
from garden import Graph, MeshLinePlot, SmoothLinePlot
from kivy.core.window import Window
from kivy.uix.label import Label
from decimal import*
from kivy.utils import get_color_from_hex as rgb
from math import sin
from textwrap import fill
from kivy.core.text import Label as CoreLabel
from functools import partial
from plyer import battery
from datetime import timedelta
from kivy.uix.slider import Slider
#naive Classifier module
import naive_classifier
import sqlite3
import re
import random
con = sqlite3.connect("moodJournal.db")
cursor = con.cursor()
#print(datetime.datetime.now().strftime('%Y-%m-%d %H:%M %p'))
#cursor.execute("DROP table if exists user_account")
#cursor.execute("CREATE TABLE user_info(id INTEGER PRIMARY KEY AUTOINCREMENT,thoughts_info text)")
#cursor.execute("INSERT INTO user_account(password) VALUES('8888')")
class NumRestrict(TextInput):
res = re.compile('[^0-9]')
def insert_text(self, substring, from_undo=False):
res = self.res
s = '%'.join([re.sub(res, '', s) for s in substring.split('%', 1)])
return super(NumRestrict, self).insert_text(s, from_undo=from_undo)
class MainApp(App): #Inherited Class App
def open_settings(self):
pass
def on_pause(self):
return True
def on_resume(self):
pass
def hook_keyboard(self,window,key,*largs):
if key==27:
return True
def build(self):
#Global Variables
global color_val
color_val = 'blue'
cursor.execute('SELECT COUNT(*) FROM color_tbl')
color_count = cursor.fetchone()[0]
con.commit()
if color_count == 0:
cursor.execute("INSERT INTO color_tbl(color) VALUES('"+color_val+"')")
con.commit()
elif color_count >= 1:
cursor.execute("SELECT color from color_tbl")
color_val = cursor.fetchone()[0]
con.commit()
global mood_var
global gN
global d_now
global dYear
global dMonth
global dDay
global wplot
global wplot2
global wplot3
global mood_value
wplot = SmoothLinePlot(mode="points",color=rgb('76abe6'))
wplot2 = SmoothLinePlot(mode="points",color=rgb('ff6666'))
wplot3 = SmoothLinePlot(mode="points",color=rgb('faff66'))
dYear = ' '
dMonth = ' '
dDay = '01'
mood_value = 0
#Window.size = (400,700)
sm = ScreenManager() #ManagesForms
sc1 = Screen(name='firstscreen')
sc2 = Screen(name ='secondscreen')
sc3 = Screen(name = 'thirdscreen')
scmain= Screen(name = 'mainscreen')
sc4 =Screen(name = 'fourthscreen')
sc5 = Screen(name='fifthscreen')
sc6 = Screen(name='sixthscreen')
sc7 = Screen(name='seventhscreen')
sm.add_widget(sc1)
sm.add_widget(scmain)
sm.add_widget(sc3)
sm.add_widget(sc2)
sm.add_widget(sc4)
sm.add_widget(sc5)
sm.add_widget(sc6)
sm.add_widget(sc7)
#--SCREEN1-------------------------------------
root= FloatLayout(size_hint = (1.0, 1.0))
frontBg = Image(pos= root.pos,
source="img/front.jpg",
allow_stretch=True,
keep_ratio=False)
mmlogo = Image(pos= root.pos,
source="img/mmfront.png", #img source
allow_stretch=False,
keep_ratio=False,
pos_hint={'x':0,'center_y': .7}, #scalable position
size_hint=(1,.9)) #scalable size
label_start = Label(text="[b]JOURNAL IT \nIN A SNAP[/b]\n[size=20sp]share what you feel \non the go[/size]",
pos_hint={'x':.1,'center_y': .35},
size_hint=(.5,.1),
markup=True,
font_size=(30))
bPopName = BoxLayout(orientation='vertical',padding=5,spacing = 5)
lblname= Label(text="[color=000000]Hi! What is your name?[/color]", markup=True,
font_size='10sp')
tbName=TextInput()
buttonPopName = Button(text="Confirm",
background_normal='img/blue.jpg')
popupname = Popup(title='',
background='img/white.jpg',
content=bPopName,
auto_dismiss=False,
size_hint=(0.8, 0.3))
#gender
bPopGender = BoxLayout(orientation='vertical',padding=5,spacing = 5)
lblgender= Label(text="[color=000000]Are you a Male or Female?[/color]",
markup=True,
font_size='10sp')
buttonPopGender1 = Button(text="Male",
background_normal='img/blue.jpg')
buttonPopGender2 = Button(text="Female",
background_normal='img/blue.jpg')
popupgender = Popup(title='',
background='img/white.jpg',
content=bPopGender,
auto_dismiss=False,
size_hint=(0.8, 0.3))
#help
bPopHelp = BoxLayout(orientation='vertical')
lblHelp= Label(text="[color=000000]<<< Swipe to left[/color]",
markup=True,
font_size='10sp')
tut1 = Image(source="img/tut1.jpg")
tut2 = Image(source="img/tut2.jpg")
tut3 = Image(source="img/tut3.jpg")
tut4 = Image(source="img/tut4.jpg")
carouselHelp = Carousel(direction='right')
btnHelp1 = Button(text="[color=ffffff]Okay, I got it![/color]",
markup=True,
pos_hint={'x':0.2,'center_y': 0.5},
size_hint=(0.6, 0.4))
popuphelp = Popup(title='',
background='img/white.jpg',
content=bPopHelp,
auto_dismiss=False,
size_hint=(0.9, 0.8))
def popgender():
bPopGender.add_widget(lblgender)
bPopGender.add_widget(buttonPopGender1)
bPopGender.add_widget(buttonPopGender2)
popupgender.open()
def popname():
cursor.execute("SELECT COUNT(*) from user_account")
if cursor.fetchone()[0] == 0:
bPopName.add_widget(lblname)
bPopName.add_widget(tbName)
bPopName.add_widget(buttonPopName)
popupname.open()
else:
pass
def screen1():
sm.transition = FadeTransition()
sc1.add_widget(root)
root.add_widget(frontBg)
root.add_widget(label_start)
root.add_widget(mmlogo)
screen1() #launches first screen
#-SCREEN2--------------------------------------
root2= FloatLayout(size_hint = (1.0, 1.0))
frontBg2 = Image(pos= root.pos,
source="img/lock.jpg",
allow_stretch=True,
keep_ratio=False)
#popoutpin--
bPopPin = GridLayout(cols=2,row=2,padding=10,spacing = 5)
lblPin= Label(text="[color=000000]Enter PIN[/color]",markup=True)
lblPin2= Label(text="[color=000000]Re-enter PIN[/color]",markup=True)
lblDum= Label(text=" ")
tbPin1 = NumRestrict(password = True,
password_mask ="*")
tbPin2 = NumRestrict(password = True,
password_mask ="*")
buttonPopPin = Button(text="CONFIRM",
background_normal='img/blue.jpg')
popupPin = Popup(title=' ',
content=bPopPin,
background='img/white.jpg',
auto_dismiss=False,
size_hint=(0.8, 0.3))
passimg1 = Button(pos= root.pos,
text="[color=ffffff]1[/color]",
font_size='30sp',
markup=True,
pos_hint={'x':0.2,'center_y': .6},
font_name='helv.otf',
background_color=[0,0,0,0.3],
size_hint=(0.15,0.1))
passimg2 = Button(pos= root.pos,
text="[color=ffffff]2[/color]",
font_size='30sp',
markup=True,
font_name='helv.otf',
background_color=[0,0,0,0.3],
size_hint=(0.15,0.1),
pos_hint={'x':0.45,'center_y': .6})
passimg3 = Button(pos= root.pos,
text="[color=ffffff]3[/color]",
font_size='30sp',
markup=True,
font_name='helv.otf',
background_color=[0,0,0,0.3],
pos_hint={'x':0.7,'center_y': .6},
size_hint=(0.15,0.1))
passimg4 = Button(pos= root.pos,
text="[color=ffffff]4[/color]",
font_size='30sp',
markup=True,
font_name='helv.otf',
background_color=[0,0,0,0.3],
pos_hint={'x':0.2,'center_y': .45},
size_hint=(0.15,0.1))
passimg5 = Button(pos= root.pos,
text="[color=ffffff]5[/color]",
font_size='30sp',
markup=True,
background_color=[0,0,0,0.3],
font_name='helv.otf',
pos_hint={'x':0.45,'center_y': .45},
size_hint=(0.15,0.1))
passimg6 = Button(pos= root.pos,
text="[color=ffffff]6[/color]",
font_size='30sp',
markup=True,
font_name='helv.otf',
background_color=[0,0,0,0.3],
pos_hint={'x':0.7,'center_y': .45},
size_hint=(0.15,0.1))
passimg7 = Button(pos= root.pos,
text="[color=ffffff]7[/color]",
font_size='30sp',
markup=True,
font_name='helv.otf',
background_color=[0,0,0,0.3],
pos_hint={'x':0.2,'center_y': .3},
size_hint=(0.15,0.1))
passimg8 = Button(pos= root.pos,
text="[color=ffffff]8[/color]",
font_size='30sp',
markup=True,
background_color=[0,0,0,0.3],
font_name='helv.otf',
pos_hint={'x':0.45,'center_y': .3},
size_hint=(0.15,0.1))
passimg9 = Button(pos= root.pos,
text="[color=ffffff]9[/color]",
font_size='30sp',
markup=True,
font_name='helv.otf',
background_color=[0,0,0,0.3],
pos_hint={'x':0.7,'center_y': .3},
size_hint=(0.15,0.1))
passimgclr = Button(pos= root.pos,
text="[color=ffffff]<[/color]",
font_size='25sp',
markup=True,
font_name='helv.otf',
background_color=[0,0,0,0.3],
pos_hint={'x':0.2,'center_y': .15},
size_hint=(0.15,0.1))
passimg0 = Button(pos= root.pos,
text="[color=ffffff]0[/color]",
font_size='30sp',
markup=True,
font_name='helv.otf',
background_color=[0,0,0,0.3],
pos_hint={'x':0.45,'center_y': .15},
size_hint=(0.15,0.1))
passimgok =Button(pos= root.pos,
text="[color=ffffff]OK[/color]",
font_name='helv.otf',
font_size='25sp',
markup=True,
background_color=[0,0,0,0.3],
pos_hint={'x':0.7,'center_y': .15},
size_hint=(0.15,0.1))
titlepin = Button(text="Enter PIN Code",
pos_hint={'x':0,'center_y': 0.96},
font_name='helv.otf',
background_color=[0,0,0,0.3],
font_size='25sp',
size_hint=(1,.09))
txtInp2 = NumRestrict(pos_hint={'x':.2,'center_y': .8},
font_name='helv.otf',
size_hint=(.65,.1),
foreground_color=[1,1,1,1],
background_color=[0,0,0,0.5],
font_size=(80),
password = True,
password_mask ="*",
readonly = True)
#--Screen3------------------------------------
root3= FloatLayout(size_hint = (1.0, 1.0))
frontBg3 = Image(pos= root.pos,
source="img/white.jpg",
allow_stretch=True,
keep_ratio=False)
b_sc3menu = Button(text="T",
font_name='IconsSouthSt.ttf',
font_size='20sp',
pos_hint={'x':0.02,'center_y': 0.96},
background_normal="img/"+color_val+".jpg",
size_hint=(.115,.09))
lblmood = Label(text="[color=000000]\nHow are you today?[/color]",
markup=True,
size_hint=(1, 0.3),
font_size='20sp')
bPop33 = BoxLayout()
bPop3= BoxLayout(orientation="vertical",
spacing=30,
padding=10)
popup3 = Popup(title="",
separator_color=[1,0,0,1],
background="img/white.jpg",
content=bPop3,
auto_dismiss=False,
size_hint=(0.9, 0.4))
#popoutSave--
bPopSave = BoxLayout(orientation='vertical',padding=5,spacing = 5)
lblsave= Label(text="[color=000000]Log saved![/color]",
font_size='15sp',
markup=True)
buttonPopSave = Button(text="Ok",
background_normal="img/"+color_val+".jpg")
popupsave = Popup(title='',
separator_color=[1,0,0,0.5],
background='img/white.jpg',
content=bPopSave,
auto_dismiss=False,
size_hint=(0.8, 0.3))
#-------
bHappy=Label(text="[color=ffdf00]m[/color]",
font_size='50sp',
font_name='smile.ttf',
markup=True,
size_hint=(0.2, 1),
background_color=[0,0,0,0],)
bHappy2=Label(text="[color=ffdf00]A[/color]",
font_size='50sp',
font_name='smile.ttf',
size_hint=(0.2, 1),
markup=True,
background_color=[0,0,0,0],)
bHappy3=Label(text="[color=ffdf00]C[/color]",
font_size='50sp',
font_name='smile.ttf',
size_hint=(0.2, 1),
markup=True,
background_color=[0,0,0,0],)
bSad=Label(text="[color=ffdf00]d[/color]",
font_size='50sp',
font_name='smile.ttf',
markup=True,
size_hint=(0.2,1),
background_color=[0,0,0,0],)
bAngry=Label(text="[color=ffdf00]W[/color]",
font_size='50sp',
font_name='smile.ttf',
markup=True,
size_hint=(0.2, 1),
background_color=[0,0,0,0])
b_sc3 = Button(text="[color=ffffff][b]SAVE LOG[b][/color]",
markup=True,
font_size='15sp',
background_normal="img/"+color_val+".jpg",
pos_hint={'x':.35,'center_y': .35},
size_hint=(.3,.06))
input_sc3 = TextInput(pos_hint={'x':.11,'center_y': .60},
size_hint=(.8,.3),
font_size='20sp')
titlewrite = Button(text="Write Log",
font_size='25sp',
pos_hint={'x':0,'center_y': 0.96},
background_normal="img/"+color_val+".jpg",
background_down="img/"+color_val+".jpg",
size_hint=(1,.09))
label_write = Label(text="[color=ffffff][b]How is your day today?[/b][/color]",
markup = True,
pos_hint={'x':.45,'center_y': .83},
size_hint=(.1,.1),
font_size='20sp',
size=(11,11))
btnSlider = Button(text='This is my mood',
markup = True,
background_normal="img/"+color_val+".jpg",
#font_name='helv.otf',
font_size='15sp',
size_hint=(1,1),)
def my_callback(instance, value):
global mood_value
#print('Current volume level: %0.2f' % value)
mood_value = str('%0.2f' % value)
tryslide=Slider(value_track=True,
min=-1,
max=1,
value_track_color=[1, 0, 0, 1],
size_hint=(1, 0.3),
)
tryslide.bind(value=my_callback)
#Screen6------
root6= FloatLayout(size_hint = (1.0, 1.0))
frontBg6 = Image(pos= root.pos,
source="img/coffee.jpg",
allow_stretch=True,
keep_ratio=False)
root6Title = Button(text="Home",
#font_name='helv.otf',
pos_hint={'x':0,'center_y': 0.96},
background_normal="img/"+color_val+".jpg",
background_down="img/"+color_val+".jpg",
font_size='25sp',
size_hint=(1,.09))
root6time = Label(text="[color=ffffff]"+datetime.datetime.now().strftime('%I:%M ')+"[/color]",
font_name='helv.otf',
markup=True,
pos_hint={'x':0.05,'center_y': 0.75},
font_size='100sp',
size_hint=(1,.095))
root6batt = Label(text="[color=ffffff][b]"+str(battery.status['percentage'])+"%[/b][/color]",
font_name='helv.otf',
markup=True,
pos_hint={'x':0.08,'center_y': 0.5825},
font_size='20sp',
size_hint=(1,.09))
root6lblbatt = Label(text="[color=ffffff]x[/color]",
font_name='IconsSouthSt.ttf',
markup=True,
pos_hint={'x':-0.05,'center_y': 0.6},
font_size='30sp',
size_hint=(1,.09))
root6lbldate = Label(text="[color=ffffff]"+datetime.datetime.now().strftime('%A %d %B %Y')+"[/color]",
font_name='helv.otf',
markup=True,
pos_hint={'x':0,'center_y': 0.63},
font_size='20sp',
size_hint=(1,.09))
#SCREEN7-------------------
root7= FloatLayout(size_hint = (1.0, 1.0))
frontBg7 = Image(pos= root.pos,
source="img/white.jpg",
allow_stretch=True,
keep_ratio=False)
b_sc7menu = Button(text="[color=000000]T[/color]",
markup=True,
font_name='IconsSouthSt.ttf',
font_size='20sp',
pos_hint={'x':0.02,'center_y': 0.96},
background_normal="img/white.jpg",
size_hint=(.115,.08))
userTitle = Button(text="[color=000000]User Garden[/color]",
markup=True,
pos_hint={'x':0,'center_y': 0.96},
font_name='helv.otf',
background_normal="img/white.jpg",
background_down="img/white.jpg",
font_size='25sp',
size_hint=(1,.09))
petalWhole = Image(pos= root.pos,
source="img/flowerone.jpg", #img source
allow_stretch=False,
keep_ratio=False,
pos_hint={'x':0.05,'center_y': .45}, #scalable position
size_hint=(.9,.7)) #scalable size
#--ScreenMain------------------------------------
#--sc5
root5= FloatLayout(size_hint = (1.0, 1.0))
frontBg5 = Image(pos= root.pos,
source="img/white.jpg",
allow_stretch=True,
keep_ratio=False)
b_sc5menu = Button(text="[color=000000]T[/color]",
markup=True,
font_name='IconsSouthSt.ttf',
font_size='20sp',
pos_hint={'x':0.02,'center_y': 0.96},
background_normal="img/white.jpg",
size_hint=(.115,.08))
recentTitle = Button(text="[color=000000]Logs History[/color]",
markup=True,
pos_hint={'x':0,'center_y': 0.96},
font_name='helv.otf',
background_normal="img/white.jpg",
background_down="img/white.jpg",
font_size='25sp',
size_hint=(1,.09))
grid5_history=GridLayout(cols=1,
spacing =20,
padding=30,
size_hint_y=None,
pos_hint={'center_x':.5, 'center_y':.5})
grid5_history.bind(minimum_height=grid5_history.setter('height'))
sv5_history=ScrollView(size_hint=(1,.75),
pos_hint={'center_x':.5, 'center_y':.45})
#scmain
rootmain= FloatLayout(size_hint = (1.0, 1.0))
grid_history=GridLayout(cols=1,
padding=30,
spacing =20,
size_hint_y=None,
pos_hint={'center_x':.5, 'center_y':.48})
grid_history.bind(minimum_height=grid_history.setter('height'))
sv_history=ScrollView(size_hint=(0.9,.8),
pos_hint={'center_x':.5, 'center_y':.45})
frontBgmain = Image(pos= root.pos,
source="img/trees.jpg",
allow_stretch=True,
keep_ratio=False)
b_scmainmenu = Button(text="T",
font_name='IconsSouthSt.ttf',
font_size='20sp',
pos_hint={'x':0.02,'center_y': 0.96},
background_normal="img/"+color_val+".jpg",
size_hint=(.115,.09))
#Get name from database Limit 1 for 1 user only
cursor.execute("SELECT COUNT(*) FROM user_account")
if cursor.fetchone()[0] == 0:
_name = 'guest'
else:
cursor.execute("SELECT name from user_account LIMIT 1")
_name = cursor.fetchone()[0]
con.commit()
bPop = BoxLayout(orientation='vertical',padding=5,spacing = 5)
cursor.execute("SELECT COUNT(*) FROM tipsTbl")
tips_count = cursor.fetchone()[0]
con.commit()
tips_n = random.randrange(1,tips_count)
cursor.execute("SELECT tips_info from tipsTbl WHERE tips_id="+str(tips_n) )
tips = cursor.fetchone()[0]
con.commit()
bPop.add_widget(Label(text="[color=000000]DAILY TIP\n\n"+str(tips)+"\n[/color]",
font_size='11sp',halign='center',
markup=True,
size_hint=(1,.8)))
buttonPop = Button(text="Close",
background_normal="img/roundblue.jpg",
size_hint=(1,.2))
bPop.add_widget(buttonPop)
popup = Popup(title='',separator_color=[1,0,0,0.5],
content=bPop,
background="img/white.jpg",
auto_dismiss=False,
size_hint=(0.8, 0.3))
#popoutexit--
bPopExit = BoxLayout(orientation='vertical',padding=5)
bPopExit2 = BoxLayout(orientation='horizontal', padding=5,spacing=10)
lblexit= Label(text="[color=000000]\n \n Are you sure you want to exit?[/color]",
markup=True,
font_size='15sp',size_hint=(1,.5))
buttonPopExit1 = Button(text="Yes",
size_hint=(.5,.3),
background_normal="img/roundblue.jpg")
buttonPopExit2 = Button(text="No",
size_hint=(.5,.3),
background_normal="img/roundblue.jpg")
popupexit = Popup(background="img/white.jpg",
separator_color=[1,0,0,0.5],
title="",
content=bPopExit,
auto_dismiss=True,
size_hint=(0.8, 0.3))
#popoutAbout--
bPopAb = GridLayout(cols=1,padding=5,spacing = 5)
lblabout= Label(text="[color=000000]Mood Memoir is a\n*Virtual Temperament Journal\n*Diary Archive \n*Monitor your moods"
+"[/color]",
markup=True,halign='center',
size_hint=(1,.5),
font_size='10sp')
buttonPopAb = Button(text="Close",
size_hint=(.5,.2),
background_normal="img/roundblue.jpg")
popupabout = Popup(background="img/white.jpg",
separator_color=[1,0,0,0.5],
content=bPopAb,
title="About Mood Memoir",
auto_dismiss=False,title_color=[0,0,0,1],
size_hint=(0.8, 0.4))
#popoutDev--
bPopD = GridLayout(cols=2,padding=5,spacing = 5)
lblDev1= Label(text="[color=000000]Jomari R. Iringan[/color]",
#font_name='helv.otf',
markup=True,
font_size='12sp',
size_hint=(1,.2))
lblDev2= Label(text="[color=000000]Lamberto A. Bruno, Jr.[/color]",
# font_name='helv.otf',
markup=True,
font_size='12sp')
imgBruno = Image(source="img/bruno.png",
size_hint=(1,1),
height=150,
allow_stretch=True)
imgJoms = Image(source="img/joms.png",
size_hint=(1,1),
allow_stretch=True)
lbldevblank = Label(size_hint=(.1,.1))
buttonPopD = Button(text="Close",
#font_name='helv.otf',
background_normal="img/roundblue.jpg",
size_hint=(.1,.2))
popupDev = Popup(background="img/white.jpg",
title="",
separator_color=[1,0,0,0.5],
content=bPopD,
auto_dismiss=False,
size_hint=(0.8, 0.6))
#popoutColor----
bPopColor= GridLayout(cols=1,padding=5,spacing = 5)
buttonColorRed = Button(text="Red",
# font_name='helv.otf',
background_normal="img/red.jpg",
size_hint=(.1,.2))
buttonColorBlue = Button(text="Blue",
#font_name='helv.otf',
background_normal="img/blue.jpg",
size_hint=(.1,.2))
buttonColorGreen = Button(text="Green",
#font_name='helv.otf',
background_normal="img/green.jpg",
size_hint=(.1,.2))
buttonColorOrange = Button(text="Orange",
# font_name='helv.otf',
background_normal="img/orange.jpg",
size_hint=(.1,.2))
buttonColorPurple = Button(text="Purple",
#font_name='helv.otf',
background_normal="img/purple.jpg",
size_hint=(.1,.2))
buttonColorPRed = Button(text="Pastel Red",
# font_name='helv.otf',
markup=True,
background_normal="img/pred.jpg",
size_hint=(.1,.2))
buttonColorPBlue = Button(text="Pastel Blue",
# font_name='helv.otf',
markup=True,
background_normal="img/pblue.jpg",
size_hint=(.1,.2))
buttonColorPGreen = Button(text="Pastel Green",
# font_name='helv.otf',
markup=True,
background_normal="img/pgreen.jpg",
size_hint=(.1,.2))
buttonColorPOrange = Button(text="Pastel Orange",
#font_name='helv.otf',
markup=True,
background_normal="img/porange.jpg",
size_hint=(.1,.2))
buttonColorPPurple = Button(text="Pastel Purple",
#font_name='helv.otf',
markup=True,
background_normal="img/ppurple.jpg",
size_hint=(.1,.2))
buttonPopColor = Button(text="Close",
# font_name='helv.otf',
background_normal="img/roundblue.jpg",
size_hint=(.1,.2))
popupColor = Popup(background="img/white.jpg",
title="",
separator_color=[1,0,0,0.5],
content=bPopColor,
auto_dismiss=False,
size_hint=(0.8, 0.8))
#popoutCP--
bPopCp = GridLayout(cols=2,row=2,padding=10,spacing = 5)
lblCp1= Label(text="[color=000000]Enter Old PIN[/color]",
markup=True,
size_hint=(1,.3))
#font_name='helv.otf')
lblCp2= Label(text="[color=000000]Enter New PIN[/color]",
size_hint=(1,.3),
markup=True)
#font_name='helv.otf')
lblCp3= Label(text="[color=000000]Re-enter New PIN[/color]",
size_hint=(1,.3),
markup=True)
#font_name='helv.otf')
lblblank= Label(text="",
#font_name='helv.otf',
size_hint=(1,.3))
lblnotice= Label(text="",
# font_name='helv.otf',
font_size='13sp',
size_hint=(1,.2))
tbCp1 = NumRestrict(password = True,
size_hint=(1,.3),
# font_name='helv.otf',
password_mask ="*")
tbCp2 = NumRestrict(password = True,
size_hint=(1,.3),
#font_name='helv.otf',
password_mask ="*")
tbCp3 = NumRestrict(password = True,
size_hint=(1,.3),
#font_name='helv.otf',
password_mask ="*")
buttonPopCpExit=Button(text="CLOSE",
# font_name='helv.otf',
size_hint=(.1,.2),
background_normal="img/roundblue.jpg")
buttonPopCp = Button(text="CONFIRM",
# font_name='helv.otf',
size_hint=(.1,.2),
background_normal="img/roundblue.jpg")
popupCp = Popup(title="",
separator_color=[1,0,0,0.5],
content=bPopCp,
background="img/white.jpg",
auto_dismiss=False,
size_hint=(0.8, 0.4))
#-------
b_logs = Button(text="[color=ffffff]D[/color]",
font_size='30sp',
font_name='trees.ttf',
pos_hint={'x':0.22,'center_y': 0.065},
background_color=[0,0,0,0],
markup=True,
size_hint=(.3,.1))
b_home = Label(text="[color=5e8ac9]D[/color]",
font_size='32sp',
font_name='IconsSouthSt.ttf',
pos_hint={'x':0.007,'center_y': 0.072},
markup=True,
size_hint=(.3,.1))
logsTitle = Button(text="Recent Logs",
#font_name='helv.otf',
pos_hint={'x':0,'center_y': 0.96},
background_normal="img/"+color_val+".jpg",
background_down="img/"+color_val+".jpg",
font_size='25sp',
size_hint=(1,.09))
lblHome = Label(text="[color=5e8ac9][b]Home[/b][/color]",
font_size='15sp',
#font_name='helv.otf',
pos_hint={'x':0.007,'center_y': 0.015},
markup=True,
size_hint=(.3,.1))
lblStat= Label(text="[color=ffffff]Stats[/color]",
font_size='15sp',
#font_name='helv.otf',
pos_hint={'x':0.74,'center_y': 0.015},
markup=True,
size_hint=(.3,.1))
lblLogs = Label(text="[color=ffffff]Logs[/color]",
font_size='15sp',
#font_name='helv.otf',
pos_hint={'x':0.22,'center_y': 0.015},
markup=True,
size_hint=(.3,.1))
lblWrite = Label(text="[color=ffffff]Write[/color]",
font_size='15sp',
#font_name='helv.otf',
pos_hint={'x':0.48,'center_y': 0.015},
markup=True,
size_hint=(.3,.1))
b_write = Button(text="[color=ffffff][b]h[/b][/color]",
font_size='29sp',
font_name='IconsSouthSt.ttf',
pos_hint={'x':0.5,'center_y': 0.068},
markup=True,
background_color=[0,0,0,0],
size_hint=(.3,.1))
b_stats = Button(text="[color=ffffff]W[/color]",
font_size="33sp",
font_name='IconsSouthSt.ttf',
pos_hint={'x':0.74,'center_y': 0.07},
markup=True,
background_color=[0,0,0,0],
size_hint=(.3,.1))
b_user= Button(text="[color=ffffff]W[/color]",
font_size="33sp",
font_name='IconsSouthSt.ttf',
pos_hint={'x':0.74,'center_y': 0.4},
markup=True,
background_color=[0,0,0,0],
size_hint=(.3,.1))
lblUser = Label(text="[color=ffffff]User[/color]",
font_size='15sp',
#font_name='helv.otf',
pos_hint={'x':0.74,'center_y': 0.35},
markup=True,
size_hint=(.3,.1))
dropdown = DropDown()
btnCP = Button(text='[color=ffffff]Change PIN[/color]',
#ont_name='helv.otf',
font_size='11sp',
markup = True,
background_normal = "img/"+color_val+".jpg",
size_hint=(1,None),
height=100)
btnCP.bind(on_release=lambda btn: dropdown.select(btn.text))
btnColor = Button(text='[color=ffffff]Color Scheme[/color]',
#font_name='helv.otf',
font_size='11sp',
markup = True,
background_normal = "img/"+color_val+".jpg",
size_hint=(1,None),
height=100)
btnColor.bind(on_release=lambda btn: dropdown.select(btn.text))
btnCons = Button(text='[color=ffffff]Consultant Mode[/color]',
#font_name='helv.otf',
font_size='11sp',
markup = True,
background_normal = "img/"+color_val+".jpg",
size_hint=(1,None),
height=100)
btnCons.bind(on_release=lambda btn: dropdown.select(btn.text))
btnHelp = Button(text='[color=ffffff]How-To[/color]',
#font_name='helv.otf',
font_size='15sp',
markup = True,
background_normal = "img/"+color_val+".jpg",
size_hint=(1,None),
height=100)
btnHelp.bind(on_release=lambda btn: dropdown.select(btn.text))
btnD = Button(text='[color=ffffff]Developers[/color]',
#font_name='helv.otf',
font_size='11sp',
markup = True,
background_normal = "img/"+color_val+".jpg",
size_hint=(1,None),
height=100)
btnD.bind(on_release=lambda btn: dropdown.select(btn.text))
btnA = Button(text='[color=ffffff]About[/color]',
font_size='11sp',
# font_name='helv.otf',
size_hint=(1,None),
markup = True,
background_normal = "img/"+color_val+".jpg",
height=100)
btnA.bind(on_release=lambda btn: dropdown.select(btn.text))
btnLO = Button(text='[color=ffffff]Log Out[/color]',
#font_name='helv.otf',
font_size='15sp',
markup = True,
background_normal = "img/"+color_val+".jpg",
size_hint=(1,None),
width=50,
height=100)
btnLO.bind(on_release=lambda btn: dropdown.select(btn.text))
# create a big main button
mainbutton = Button(text='[b]::[/b]',
markup=True,
font_size='20sp',
background_normal="img/"+color_val+".jpg",
size_hint=(.25,.085),
pos_hint={'x':0,'center_y': .96})
# show the dropdown menu when the main button is released
# note: all the bind() calls pass the instance of the caller (here, the
# mainbutton instance) as the first argument of the callback (here,
# dropdown.open.).
mainbutton.bind(on_press=dropdown.open)
mainbutton.bind(on_release=dropdown.open)
# one last thing, listen for the selection in the dropdown list and
# assign the data to the button text.
#dropdown.bind(on_select=lambda instance, x: setattr(mainbutton, 'text', x))
lbltest = Label(text="[color=ffffff]How are you today?[/color]",
markup = True)
#------Screen4--------------------------
root4= FloatLayout(size_hint = (1.0, 1.0))
frontBg4 = Image(pos= root.pos,
source="img/white.jpg",
allow_stretch=True,
keep_ratio=False)
lblDescript = Label(text="[color=000000]LEGEND\n[/color]"+"[color=76abe6]Blue line[/color]"+"[color=393d42] Positive Score\n"+"[color=ff0000]Red line[/color]"+" Negative Score[/color]",
font_size='11sp',
size_hint=(.20,.05),
markup=True,
pos_hint={'center_x':.18,'center_y':0.35})
lblTemp = Label(text="[color=000000]Result:[/color]"+"[color=393d42][/color]",
font_size='11sp',
size_hint=(.25,.05),
markup=True,pos_hint={'x':.37,'center_y':0.05}
)
petalG = Image(pos= root.pos,
source="img/flowerone.jpg", #img source
allow_stretch=False,
keep_ratio=False,
pos_hint={'x':0.05,'center_y': .45}, #scalable position
size_hint=(.9,.7)) #scalable size
boxStats = BoxLayout(size_hint=(0.85,0.4),
pos_hint={'center_x':.45, 'center_y':.6})
btnCont = Button(text=">",
size_hint=(0.08,0.05),
pos_hint={'center_x':.93, 'center_y':.6})
#boxStats.bind(minimum_width=boxStats.setter('width'))
gridData = GridLayout(cols=4,
padding=10,
spacing=30,
size_hint_y = None,
size_hint_x = 1,
pos_hint={'center_x':.6, 'center_y':.5})
gridData.bind(minimum_height=gridData.setter('height'))
scrollData=ScrollView(size_hint=(.52,.3),
pos_hint={'center_x':.7, 'center_y':.24})
#graph dropdown
graphDown = DropDown()
btnDaily = Button(text="[color=ffffff]Daily Stats[/color]",
font_size='14sp',
markup=True,
#font_name='helv.otf',
background_normal="img/"+color_val+".jpg",
size_hint=(1,None),
height=50)
btnDaily.bind(on_release=lambda btn: graphDown.select(btn.text))
btnWeekly = Button(text="[color=ffffff]Weekly Stats[/color]",
font_size='14sp',
#font_name='helv.otf',
markup=True,
background_normal="img/"+color_val+".jpg",
size_hint=(1,None),
height=50)
btnWeekly.bind(on_release=lambda btn: graphDown.select(btn.text))
btnMonthly = Button(text="[color=ffffff]Monthly Stats[/color]",
font_size='13sp',
#font_name='helv.otf',
markup=True,
background_normal="img/"+color_val+".jpg",
size_hint=(1,None),
height=50)
btnMonthly.bind(on_release=lambda btn: graphDown.select(btn.text))
btnAnnual = Button(text="[color=ffffff]Annual Stats[/color]",
font_size='14sp',
#font_name='helv.otf',
markup=True,
background_normal="img/"+color_val+".jpg",
size_hint=(1,None),
height=50)
btnAnnual.bind(on_release=lambda btn: graphDown.select(btn.text))
graphButton = Button(text='View Stats',
#font_name = 'helv.otf',
font_size='15sp',
background_normal="img/"+color_val+".jpg",
size_hint=(.25,.05),
pos_hint={'x':.7,'center_y':0.85})
graphButton.bind(on_press=graphDown.open)
graphButton.bind(on_release=graphDown.open)
b_sc4menu = Button(text="T",
font_name='IconsSouthSt.ttf',
font_size='20sp',
pos_hint={'x':0.02,'center_y': 0.96},
background_normal="img/"+color_val+".jpg",
size_hint=(.115,.09))
statsTitle = Button(text="User Garden",
#font_name = 'helv.otf',
pos_hint={'x':0,'center_y': 0.96},
background_normal="img/"+color_val+".jpg",
background_down="img/"+color_val+".jpg",
font_size='25sp',
size_hint=(1,.09))
btnHelpGraph = Button(text="[color=ffffff][b]What does the image mean?[/b][/color]",
markup=True,
background_normal="img/"+color_val+".jpg",
pos_hint={'x':0.15,'center_y': 0.06},
font_size='15sp',
size_hint=(0.7,.05))
popBoxLay = BoxLayout(orientation='vertical')
popuphelpme= Popup(title='',
separator_color=[0,0,1,0.5],
background='img/white.jpg',
content=popBoxLay,
auto_dismiss=False,
size_hint=(0.8, 0.6))
lblhelpme = Label(text="[color=000000] The image represents the positivity\n progress of you.\n The images has birds, flowers, petals\n (depends on your gender)\n that is total to 10% each, which will represent\n the positivity in terms of Daily, Weekly,\n Monthly and Annual progress.\n\n Ex: 8 birds/flowers/petals will represent\n 80% positivity.[/color]",
markup=True,
font_size='13sp')
btnhelpmeok = Button(text="Close",
background_normal="img/"+color_val+".jpg",
size_hint=(0.3, 0.1),
pos_hint={'x':0.355,'center_y': 0.01},
)
def helpme(self,*args):
popBoxLay.add_widget(lblhelpme)
popBoxLay.add_widget(btnhelpmeok)
popuphelpme.open()
def closehelpme(self,*args):
popBoxLay.remove_widget(lblhelpme)
popBoxLay.remove_widget(btnhelpmeok)
popuphelpme.dismiss()
btnHelpGraph.bind(on_press=helpme)
btnhelpmeok.bind(on_press=closehelpme)
#DayWidget
dropdownDay = DropDown()
btnd1 = Button(text='[color=ffffff]1[/color]',
#font_name = 'helv.otf',
markup=True,
font_size='12sp',
background_normal="img/"+color_val+".jpg",
size_hint=(1,None),
height=50)
btnd1.bind(on_release=lambda btn: dropdownDay.select(btn.text))
btnd2 = Button(text='[color=ffffff]2[/color]',
#font_name = 'helv.otf',
markup=True,
font_size='12sp',
background_normal="img/"+color_val+".jpg",
size_hint=(1,None),
height=50)
btnd2.bind(on_release=lambda btn: dropdownDay.select(btn.text))
btnDayDay= Button(text='[color=ffffff]Day v[/color]',
markup=True,
#font_name = 'helv.otf',
font_size='15sp',
background_normal="img/"+color_val+".jpg",
size_hint=(.13,.05),
pos_hint={'x':0.37,'center_y': 0.85})
dropdownYearDay=DropDown()
#dropdownYearDay.bind(on_select=lambda instance, x: setattr(btndy1, 'text', x))
btndy1 = Button(text='[color=ffffff]2016[/color]',
#font_name = 'helv.otf',
markup=True,
font_size='12sp',
background_normal="img/"+color_val+".jpg",
size_hint=(1,None),
height=50)
#btndy1.bind(on_release=lambda btn: dropdownYearDay.select(btn.text))
btndy2 = Button(text='[color=ffffff]2017[/color]',
# font_name = 'helv.otf',
markup=True,
font_size='12sp',
background_normal="img/"+color_val+".jpg",
size_hint=(1,None),
height=50)
btndy2.bind(on_release=lambda btn: dropdownYearDay.select(btn.text))
btnYearDay= Button(text='[color=ffffff]Year v[/color]',
markup=True,
#font_name = 'helv.otf',
font_size='12sp',
background_normal="img/"+color_val+".jpg",
size_hint=(.13,.05),
pos_hint={'x':0.05,'center_y': 0.85})
dropdownYearMonth=DropDown()
btnmy11 = Button(text='[color=ffffff]Jan[/color]',
#font_name = 'helv.otf',
markup=True,
font_size='12sp',
background_normal="img/"+color_val+".jpg",
size_hint=(1,None),
height=50)
btnmy11.bind(on_release=lambda btn: dropdownYearMonth.select(btn.text))
btnmy22 = Button(text='[color=ffffff]Feb[/color]',
#font_name = 'helv.otf',
markup=True,
font_size='12sp',
background_normal="img/"+color_val+".jpg",
size_hint=(1,None),
height=50)
btnmy22.bind(on_release=lambda btn: dropdownYearMonth.select(btn.text))
btnMonthDay= Button(text='[color=ffffff]Month v[/color]',
markup=True,
# font_name = 'helv.otf',
font_size='15sp',
background_normal="img/"+color_val+".jpg",
size_hint=(.15,.05),
pos_hint={'x':0.20,'center_y': 0.85})
btnDayDay.bind(on_press=dropdownDay.open)
btnDayDay.bind(on_release=dropdownDay.open)
btnYearDay.bind(on_press=dropdownYearDay.open)
btnYearDay.bind(on_release=dropdownYearDay.open)
btnMonthDay.bind(on_press=dropdownYearMonth.open)
btnMonthDay.bind(on_release=dropdownYearMonth.open)
#WeekWidgets
dropdownWeekDay = DropDown()
btnwd1 = Button(text='[color=ffffff]1[/color]',
#font_name = 'helv.otf',
markup=True,
font_size='12sp',
background_normal="img/"+color_val+".jpg",
size_hint=(1,None),
height=50)
btnwd1.bind(on_release=lambda btn: dropdownWeekDay.select(btn.text))
btnWeekDay= Button(text='[color=ffffff]Day v[/color]',
markup=True,
#font_name = 'helv.otf',
font_size='15sp',
background_normal="img/"+color_val+".jpg",
size_hint=(.13,.05),
pos_hint={'x':0.37,'center_y': 0.85})
dropdownWeekYear=DropDown()
btnwy1 = Button(text='[color=ffffff]2017[/color]',
#font_name = 'helv.otf',
markup=True,
font_size='12sp',
background_normal="img/"+color_val+".jpg",
size_hint=(1,None),
height=50)
btnwy1.bind(on_release=lambda btn: dropdownWeekYear.select(btn.text))
btnWeekYear= Button(text='[color=ffffff]Year v[/color]',
markup=True,
#font_name = 'helv.otf',
font_size='12sp',
background_normal="img/"+color_val+".jpg",
size_hint=(.13,.05),
pos_hint={'x':0.05,'center_y': 0.85})
dropdownWeekMonth=DropDown()
btnwm1 = Button(text='[color=ffffff]Feb[/color]',
#font_name = 'helv.otf',
markup=True,
font_size='12sp',
background_normal="img/"+color_val+".jpg",
size_hint=(1,None),
height=50)
btnwm1.bind(on_release=lambda btn: dropdownWeekMonth.select(btn.text))
btnWeekMonth= Button(text='[color=ffffff]Month v[/color]',
markup=True,
#font_name = 'helv.otf',
font_size='15sp',
background_normal="img/"+color_val+".jpg",
size_hint=(.15,.05),
pos_hint={'x':0.20,'center_y': 0.85})
btnWeekDay.bind(on_press=dropdownWeekDay.open)
btnWeekDay.bind(on_release=dropdownWeekDay.open)
btnWeekYear.bind(on_press=dropdownWeekYear.open)
btnWeekYear.bind(on_release=dropdownWeekYear.open)
btnWeekMonth.bind(on_press=dropdownWeekMonth.open)
btnWeekMonth.bind(on_release=dropdownWeekMonth.open)
#MonthWidgets
dropdownMonth = DropDown()
btnmm1 = Button(text='[color=ffffff]Jul[/color]',
# font_name = 'helv.otf',
markup=True,
font_size='12sp',
background_normal="img/"+color_val+".jpg",
size_hint=(1,None),
height=50)
btnmm1.bind(on_release=lambda btn: dropdownMonth.select(btn.text))
btnmm2 = Button(text='[color=ffffff]Jun[/color]',
# font_name = 'helv.otf',
markup=True,
font_size='12sp',
background_normal="img/"+color_val+".jpg",
size_hint=(1,None),
height=50)
btnmm2.bind(on_release=lambda btn: dropdownMonth.select(btn.text))
dropdownMonthYear = DropDown()
btnmy1 = Button(text='[color=ffffff]2017[/color]',
## font_name = 'helv.otf',
markup=True,
font_size='12sp',
background_normal="img/"+color_val+".jpg",
size_hint=(1,None),
height=50)
btnmy1.bind(on_release=lambda btn: dropdownMonthYear.select(btn.text))
btnmy2 = Button(text='[color=ffffff]2016[/color]',
# font_name = 'helv.otf',
markup=True,
font_size='12sp',
background_normal="img/"+color_val+".jpg",
size_hint=(1,None),
height=50)
btnmy2.bind(on_release=lambda btn: dropdownMonthYear.select(btn.text))
btnMon1= Button(text='[color=ffffff]Year v[/color]',
markup=True,
#font_name = 'helv.otf',
font_size='12sp',
background_normal="img/"+color_val+".jpg",
size_hint=(.25,.05),
pos_hint={'x':0.05,'center_y': 0.85})
btnMon2= Button(text='[color=ffffff]Month v[/color]',
markup=True,
# font_name = 'helv.otf',
font_size='12sp',
background_normal="img/"+color_val+".jpg",
size_hint=(.25,.05),
pos_hint={'x':0.35,'center_y': 0.85})
btnMon1.bind(on_press=dropdownMonth.open)
btnMon1.bind(on_release=dropdownMonth.open)
btnMon2.bind(on_press=dropdownMonthYear.open)
btnMon2.bind(on_release=dropdownMonthYear.open)
#annual
dropdownAnnual=DropDown()
btnann1 = Button(text='[color=ffffff]1988[/color]',
# font_name = 'helv.otf',
markup=True,
font_size='12sp',
background_normal="img/"+color_val+".jpg",
size_hint=(1,None),
height=50)
btnann1.bind(on_release=lambda btn: dropdownAnnual.select(btn.text))
btnAnnualYear= Button(text='[color=ffffff]Year v[/color]',
markup=True,
# font_name = 'helv.otf',
font_size='15sp',
background_normal="img/"+color_val+".jpg",
size_hint=(.15,.05),
pos_hint={'x':0.05,'center_y': 0.85})
btnAnnualYear.bind(on_press=dropdownAnnual.open)
btnAnnualYear.bind(on_release=dropdownAnnual.open)
def addWidgetsHis():
root4.add_widget(btnAnnualYear)
dropdownAnnual.add_widget(btnann1)
root4.add_widget(btnWeekDay)
dropdownWeekDay.add_widget(btnwd1)
root4.add_widget(btnWeekYear)
dropdownWeekYear.add_widget(btnwy1)
root4.add_widget(btnWeekMonth)
dropdownWeekMonth.add_widget(btnwm1)
root4.add_widget(btnMon1)
root4.add_widget(btnMon2)
dropdownMonth.add_widget(btnmm1)
dropdownMonth.add_widget(btnmm2)
dropdownMonthYear.add_widget(btnmy1)
dropdownMonthYear.add_widget(btnmy2)
root4.add_widget(btnDayDay)
dropdownDay.add_widget(btnd1)
dropdownDay.add_widget(btnd2)
root4.add_widget(btnYearDay)
dropdownYearDay.add_widget(btndy1)
dropdownYearDay.add_widget(btndy2)
root4.add_widget(btnMonthDay)
dropdownYearMonth.add_widget(btnmy11)
dropdownYearMonth.add_widget(btnmy22)
global d_now
d_now = datetime.datetime.now().strftime('%Y-%m-%d')
d_now_s = d_now.split('-')
cursor.execute("SELECT COUNT(*) FROM user_info WHERE year=? AND month=? AND day=?",(d_now_s[0], d_now_s[1], d_now_s[2]))
global _x_max
global cb_ctr
_x_max = int(cursor.fetchone()[0])
#print _x_max
con.commit()
graph_theme = {
'label_options': {
'color': rgb('000000'), # color of tick labels and titles
'bold': True}}
global nGraph
nGraph = Graph(xlabel='Daily',
ylabel='Polarity Values',
x_ticks_minor=1,
x_ticks_major=1,
y_ticks_minor=0,
y_ticks_major=0.20,
y_grid_label=True,
tick_color=(0,0,0,1),
border_color=(0,0,0,1),
x_grid_label=True,
padding=5,
x_grid=False,
y_grid=False,
xmin=0,
xmax=_x_max,
ymin=0,font_size='10sp',
ymax=1,
**graph_theme)
#start
def startGraphPlot(self, *args):
#dito maglagay
gridData.clear_widgets()
gridData.add_widget(Button(text="[color=000000]Date[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
gridData.add_widget(Button(text="[color=000000]Time[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
gridData.add_widget(Button(text="[color=000000]P_POS[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
gridData.add_widget(Button(text="[color=000000]P_NEG[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
if len(nGraph.plots) >= 1:
for plots in nGraph.plots:
nGraph.remove_plot(plots)
nGraph._clear_buffer()
#nGraph.xlabel = 'Daily'
d_now = datetime.datetime.now().strftime('%Y-%m-%d')
d_now_s = d_now.split('-')
cursor.execute("SELECT COUNT(*) FROM user_info")
if int(cursor.fetchone()[0]) == 0:
return
con.commit()
cursor.execute("SELECT COUNT(*) FROM user_info WHERE year=? AND month=? AND day=?",(d_now_s[0], d_now_s[1], d_now_s[2]))
_x_max = int(cursor.fetchone()[0])
minus = 1
while _x_max == 0:
d_now = datetime.datetime.strftime(datetime.datetime.now() - timedelta(minus), '%Y-%m-%d')
d_now_s = d_now.split('-')
cursor.execute("SELECT COUNT(*) FROM user_info WHERE year=? AND month=? AND day=?",(d_now_s[0], d_now_s[1], d_now_s[2]))
_x_max = int(cursor.fetchone()[0])
minus+=1
con.commit()
nGraph.xmax = _x_max
if _x_max >=12 and _x_max <= 24:
nGraph.x_ticks_major=3
elif _x_max >=25 and _x_max <=36:
nGraph.x_ticks_major = 6
elif _x_max >=37:
n_Graph.x_ticks_major = int(_x_max*20)
#print _x_max
wplot = SmoothLinePlot(mode="points",color=rgb('76abe6'))
wplot2 = SmoothLinePlot(mode="points",color=rgb('ff6666'))
#wplot3 = SmoothLinePlot(mode="points",color=rgb('faff66'))
#faff66
cursor.execute("SELECT pos,neg,time FROM user_info WHERE year=? AND month=? AND day=?",(d_now_s[0], d_now_s[1], d_now_s[2]))
plot_pts = [(0,0)]
plot_pts2 = [(0,0)]
plot_pts3 = [(0,0)]
p_total = 0
n_total = 0
#nplot_pts = [(0,-1)]
pola = cursor.fetchall()
#print pola
for m in range(0,len(pola),1):
temp = float(pola[m][0])
temp2 = float(pola[m][1])
p_total +=float(pola[m][0])
n_total +=float(pola[m][1])
gridData.add_widget(Button(text="[color=000000]"+str(d_now_s[1])+' '+str(d_now_s[2])+' '+str(d_now_s[0])+"[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
gridData.add_widget(Button(text="[color=000000]"+str(pola[m][2])+"[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
gridData.add_widget(Button(text="[color=000000]"+str('%.2f' % float(pola[m][0]))+"[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
gridData.add_widget(Button(text="[color=000000]"+str('%.2f' % float(pola[m][1]))+"[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
#print 'POS:{0} NEG:{1}'.format(temp,temp2)
dd = (m+1,temp)
dd2 = (m+1,temp2)
if temp>temp2:
dd3 = (m+1,temp-temp2)
elif temp2>temp:
dd3 = (m+1,temp2-temp)
plot_pts.append(dd)
plot_pts2.append(dd2)
plot_pts3.append(dd3)
petal_val = result_precision(float(p_total/len(pola)))
petal_val = float(petal_val) * 100
con.commit()
cursor.execute("SELECT gender from user_account LIMIT 1")
gender_t = cursor.fetchone()[0]
con.commit()
if gender_t == 'Male':
gN = 'wire'
if gender_t == 'Female':
gN = 'flower'
if petal_val <=0.00:
petalG.source="img/"+gN+"zero.jpg"
if petal_val>= 1.00 and petal_val <= 10:
petalG.source="img/"+gN+"one.jpg"
if petal_val >= 11.00 and petal_val<= 20.00:
petalG.source="img/"+gN+"two.jpg"
if petal_val >= 21.00 and petal_val<= 30.00:
petalG.source="img/"+gN+"three.jpg"
if petal_val >= 31.00 and petal_val <= 40.00:
petalG.source="img/"+gN+"four.jpg"
if petal_val >= 41.00 and petal_val <= 50.00:
petalG.source="img/"+gN+"five.jpg"
if petal_val >= 51.00 and petal_val <= 60.00:
petalG.source="img/"+gN+"six.jpg"
if petal_val >= 61.00 and petal_val <= 70.00:
petalG.source="img/"+gN+"seven.jpg"
if petal_val >= 71.00 and petal_val < 80.00:
petalG.source="img/"+gN+"eight.jpg"
if petal_val >= 81.00 and petal_val <= 90.00:
petalG.source="img/"+gN+"nine.jpg"
if petal_val >= 91.00 and petal_val <= 100.00:
petalG.source="img/"+gN+"whole.jpg"
lblTemp.text = "[color=000000]Average:[/color]"+"[color=393d42]Positive Score:"+result_precision(float(p_total/len(pola)))+" Negative Score:"+str(abs(float(result_precision(float(n_total/len(pola)) ))))+"[/color]"
wplot2.points = plot_pts2
wplot.points = plot_pts
wplot3.points = plot_pts3
nGraph.add_plot(wplot)
nGraph.add_plot(wplot2)
#nGraph.add_plot(wplot3)
#daily
def dailyGraphPlot(self ,*args):
dayWidgets()
cursor.execute("SELECT gender from user_account LIMIT 1")
gender_t = cursor.fetchone()[0]
con.commit()
if gender_t == 'Male':
petalG.source="img/wirezero.jpg"
if gender_t == 'Female':
petalG.source="img/flowerzero.jpg"
if len(nGraph.plots)>=1:
for plot in nGraph.plots:
nGraph.remove_plot(plot)
nGraph._clear_buffer()
for plot in nGraph.plots:
nGraph.remove_plot(plot)
nGraph._clear_buffer()
def dGraphPlot( y, mon, d,*args):
if len(nGraph.plots) >= 1:
for plots in nGraph.plots:
nGraph.remove_plot(plots)
nGraph._clear_buffer()
nGraph.xlabel = 'n Logs per Day'
nGraph.ylabel = 'Polarity Values'
d_now = datetime.datetime.now().strftime('%Y-%m-%d')
d_now_s = d_now.split('-')
cursor.execute("SELECT COUNT(*) FROM user_info WHERE year=? AND month=? AND day=?",(y,mon,d))
_x_max = int(cursor.fetchone()[0])
con.commit()
p_total = 0
n_total = 0
#print 'qweqwewq', _x_max
if _x_max != 0:
gridData.clear_widgets()
gridData.add_widget(Button(text="[color=000000]Date[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
gridData.add_widget(Button(text="[color=000000]Time[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
gridData.add_widget(Button(text="[color=000000]P_POS[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
gridData.add_widget(Button(text="[color=000000]P_NEG[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
nGraph.xmax = _x_max
#faff66
cursor.execute("SELECT pos,neg,time FROM user_info WHERE year=? AND month=? AND day=?",(y, mon, d))
plot_pts = [(0,0)]
plot_pts2 = [(0,0)]
plot_pts3 = [(0,0)]
pola = cursor.fetchall()
for m in range(0,len(pola),1):
temp = float(pola[m][0])
temp2 = float(pola[m][1])
p_total +=float(pola[m][0])
n_total +=float(pola[m][1])
gridData.add_widget(Button(text="[color=000000]"+str(mon)+' '+str(d)+' '+str(y)+"[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
gridData.add_widget(Button(text="[color=000000]"+str(pola[m][2])+"[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
gridData.add_widget(Button(text="[color=000000]"+str('%.2f' % float(pola[m][0]) )+"[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
gridData.add_widget(Button(text="[color=000000]"+str('%.2f '% float(pola[m][1]) )+"[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
dd = (m+1,temp)
dd2 = (m+1,temp2)
if temp>temp2:
dd3 = (m+1,temp-temp2)
elif temp2>temp:
dd3 = (m+1,temp2-temp)
plot_pts.append(dd)
plot_pts2.append(dd2)
plot_pts3.append(dd3)
petal_val = result_precision(float(p_total/len(pola)))
petal_val = float(petal_val) * 100
con.commit()
cursor.execute("SELECT gender from user_account LIMIT 1")
gender_t = cursor.fetchone()[0]
con.commit()
if gender_t == 'Male':
gN = 'wire'
if gender_t == 'Female':
gN = 'flower'
if petal_val <=0.00:
petalG.source="img/"+gN+"zero.jpg"
if petal_val>= 1.00 and petal_val <= 10:
petalG.source="img/"+gN+"one.jpg"
if petal_val >= 11.00 and petal_val<= 20.00:
petalG.source="img/"+gN+"two.jpg"
if petal_val >= 21.00 and petal_val<= 30.00:
petalG.source="img/"+gN+"three.jpg"
if petal_val >= 31.00 and petal_val <= 40.00:
petalG.source="img/"+gN+"four.jpg"
if petal_val >= 41.00 and petal_val <= 50.00:
petalG.source="img/"+gN+"five.jpg"
if petal_val >= 51.00 and petal_val <= 60.00:
petalG.source="img/"+gN+"six.jpg"
if petal_val >= 61.00 and petal_val <= 70.00:
petalG.source="img/"+gN+"seven.jpg"
if petal_val >= 71.00 and petal_val < 80.00:
petalG.source="img/"+gN+"eight.jpg"
if petal_val >= 81.00 and petal_val <= 90.00:
petalG.source="img/"+gN+"nine.jpg"
if petal_val >= 91.00 and petal_val <= 100.00:
petalG.source="img/"+gN+"whole.jpg"
lblTemp.text = "[color=000000]Average:[/color]"+"[color=393d42]Positive Text:"+result_precision(float(p_total/len(pola)) )+" Negative Text:"+result_precision(float(n_total/len(pola)) )+"[/color]"
wplot.points = plot_pts
wplot2.points = plot_pts2
wplot3.points = plot_pts3
nGraph.add_plot(wplot)
nGraph.add_plot(wplot2)
#nGraph.add_plot(wplot3)
else:
pass
def result_precision(number):
return '%.2f' % number
def weeklyGraphPlot(self, *args):
weekWidgets()
cursor.execute("SELECT gender from user_account LIMIT 1")
gender_t = cursor.fetchone()[0]
con.commit()
if gender_t == 'Male':
petalG.source="img/birdzero.jpg"
if gender_t == 'Female':
petalG.source="img/tulipzero.jpg"
for i in range(2):
if len(nGraph.plots) >= 1:
for plots in nGraph.plots:
nGraph.remove_plot(plots)
nGraph._clear_buffer()
def wGraphPlot( y, mon, d,*args):
if len(nGraph.plots) >= 1:
for plots in nGraph.plots:
nGraph.remove_plot(plots)
nGraph._clear_buffer()
nGraph.xlabel = 'Weekly Entries'
nGraph.ylabel = 'Polarity Values'
nGraph.xmax = 7
p_total = 0
n_total = 0
d_now = datetime.datetime.now().strftime('%Y-%m-%d')
d_now_s = d_now.split('-')
dlist = [ [] for _ in range(7)]
dlist2 = [ [] for _ in range(7)]
#jlist = [[] for _ in range(n)]
#create multiple list
cursor.execute("SELECT COUNT(*) FROM user_info WHERE year=? AND month=? AND day=?",(y,mon,d))
v_count = int(cursor.fetchone()[0])
#print 'eto {0}'.format(v_count)
con.commit()
c_list = []
days = []
if v_count != 0 :
gridData.clear_widgets()
gridData.add_widget(Button(text="[color=000000]Date[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
gridData.add_widget(Button(text="[color=000000]# of Entries[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
gridData.add_widget(Button(text="[color=000000]P_POS[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
gridData.add_widget(Button(text="[color=000000]P_NEG[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
for i in range(0,7):
cursor.execute("SELECT pos,neg,time FROM user_info WHERE year=? AND month=? AND day=?",(y,mon,d))
pola = cursor.fetchall()
days.append(int(d))
n_count = 0
for ou in range(0,len(pola),1):
temp = float(pola[ou][0])
temp2 = float(pola[ou][1])
dlist[i].append(temp)
dlist2[i].append(temp2)
n_count += 1
c_list.append(str(n_count))
d = str(format((int(d)+1), '02d'))
con.commit()
plot_pts = [(0,0)]
plot_pts2 = [(0,0)]
plot_pts3 = [(0,0)]
#print c_list
for n in range(7):
if len(dlist[n]) != 0:
plot_pts.append( ((n+1), compute_values(sum(dlist[n]), len(dlist[n])) ) )
wplot.points = plot_pts
for n in range(7):
if len(dlist2[n]) != 0:
plot_pts2.append( ((n+1), compute_values(sum(dlist2[n]), len(dlist2[n]))) )
#d = str(int(d)-len(dlist))
p_total = []
n_total = []
for pos_range in range(0,len(dlist)):
if dlist[pos_range] != []:
gridData.add_widget(Button(text="[color=000000]"+str(mon)+' '+str(format((int(days[pos_range])), '02d'))+' '+str(y)+"[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
gridData.add_widget(Button(text="[color=000000]"+str(c_list[pos_range])+"[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
gridData.add_widget(Button(text="[color=000000]"+str('%.2f' % compute_values(sum(dlist[pos_range]), len(dlist[pos_range])) )+"[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
gridData.add_widget(Button(text="[color=000000]"+str('%.2f '% compute_values(sum(dlist2[pos_range]), len(dlist2[pos_range])) )+"[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
p_total.append( compute_values(sum(dlist[pos_range]), len(dlist[pos_range])) )
n_total.append( compute_values(sum(dlist2[pos_range]), len(dlist2[pos_range])) )
wplot2.points = plot_pts2
nGraph.add_plot(wplot)
nGraph.add_plot(wplot2)
petal_val = compute_values(sum(p_total), len(p_total))
petal_val = float(petal_val) * 100
con.commit()
cursor.execute("SELECT gender from user_account LIMIT 1")
gender_t = cursor.fetchone()[0]
con.commit()
#print petal_val
if gender_t == 'Male':
gN = 'bird'
if gender_t == 'Female':
gN = 'tulip'
if petal_val <=0.00:
petalG.source="img/"+gN+"zero.jpg"
if petal_val>= 1.00 and petal_val <= 10:
petalG.source="img/"+gN+"one.jpg"
if petal_val >= 11.00 and petal_val<= 20.00:
petalG.source="img/"+gN+"two.jpg"
if petal_val >= 21.00 and petal_val<= 30.00:
petalG.source="img/"+gN+"three.jpg"
if petal_val >= 31.00 and petal_val <= 40.00:
petalG.source="img/"+gN+"four.jpg"
if petal_val >= 41.00 and petal_val <= 50.00:
petalG.source="img/"+gN+"five.jpg"
if petal_val >= 51.00 and petal_val <= 60.00:
petalG.source="img/"+gN+"six.jpg"
if petal_val >= 61.00 and petal_val <= 70.00:
petalG.source="img/"+gN+"seven.jpg"
if petal_val >= 71.00 and petal_val < 80.00:
petalG.source="img/"+gN+"eight.jpg"
if petal_val >= 81.00 and petal_val <= 90.00:
petalG.source="img/"+gN+"nine.jpg"
if petal_val >= 91.00 and petal_val <= 100.00:
petalG.source="img/"+gN+"whole.jpg"
lblTemp.text = "[color=000000]Average:[/color]"+"[color=393d42]Positive Text:"+('%.2f '% compute_values(sum(p_total), len(p_total)))+" Negative Text:"+('%.2f '% compute_values(sum(n_total), len(n_total)) )+"[/color]"
def monthlyGraphPlot(self, *args):
monthWidgets()
cursor.execute("SELECT gender from user_account LIMIT 1")
gender_t = cursor.fetchone()[0]
con.commit()
if gender_t == 'Male':
petalG.source="img/birdzero.jpg"
if gender_t == 'Female':
petalG.source="img/tulipzero.jpg"
for i in range(2):
if len(nGraph.plots) >= 1:
for plots in nGraph.plots:
nGraph.remove_plot(plots)
nGraph._clear_buffer()
def mGraph(mon, y,*args):
if len(nGraph.plots) >= 1:
for plots in nGraph.plots:
nGraph.remove_plot(plots)
nGraph._clear_buffer()
nGraph.xlabel = 'Monthly Entries'
nGraph.ylabel = 'Polarity Values'
nGraph.xmax = 31
p_total = 0
n_total = 0
d_now = datetime.datetime.now().strftime('%Y-%m-%d')
d_now_s = d_now.split('-')
dlist = [ [] for _ in range(31)]
dlist2 = [ [] for _ in range(31)]
#jlist = [[] for _ in range(n)]
#create multiple list
cursor.execute("SELECT COUNT(*) FROM user_info WHERE year=? AND month=?",(y,mon))
v_count = int(cursor.fetchone()[0])
#print 'eto {0}'.format(v_count)
con.commit()
c_list = []
days = []
if v_count != 0 :
gridData.clear_widgets()
gridData.add_widget(Button(text="[color=000000]Date[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
gridData.add_widget(Button(text="[color=000000]# of Entries[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
gridData.add_widget(Button(text="[color=000000]P_POS[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
gridData.add_widget(Button(text="[color=000000]P_NEG[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
for i in range(0,31):
da = str(format((i+1), '02d'))
cursor.execute("SELECT pos,neg,time FROM user_info WHERE year=? AND month=? AND day=?",(y,mon,da))
pola = cursor.fetchall()
days.append(da)
n_count = 0
for ou in range(0,len(pola),1):
temp = float(pola[ou][0])
temp2 = float(pola[ou][1])
dlist[i].append(temp)
dlist2[i].append(temp2)
n_count += 1
c_list.append(str(n_count))
con.commit()
plot_pts = [(0,0)]
plot_pts2 = [(0,0)]
plot_pts3 = [(0,0)]
#print c_list
for n in range(31):
if len(dlist[n]) != 0:
plot_pts.append( ((n+1), compute_values(sum(dlist[n]), len(dlist[n])) ) )
wplot.points = plot_pts
for n in range(31):
if len(dlist2[n]) != 0:
plot_pts2.append( ((n+1), compute_values(sum(dlist2[n]), len(dlist2[n]))) )
#d = str(int(d)-len(dlist))
p_total = []
n_total = []
for pos_range in range(0,len(dlist)):
if dlist[pos_range] != []:
gridData.add_widget(Button(text="[color=000000]"+str(mon)+' '+str(format((int(days[pos_range])), '02d'))+' '+str(y)+"[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
gridData.add_widget(Button(text="[color=000000]"+str(c_list[pos_range])+"[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
gridData.add_widget(Button(text="[color=000000]"+str('%.2f' % compute_values(sum(dlist[pos_range]), len(dlist[pos_range])) )+"[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
gridData.add_widget(Button(text="[color=000000]"+str('%.2f '% compute_values(sum(dlist2[pos_range]), len(dlist2[pos_range])) )+"[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
p_total.append( compute_values(sum(dlist[pos_range]), len(dlist[pos_range])) )
n_total.append( compute_values(sum(dlist2[pos_range]), len(dlist2[pos_range])) )
wplot2.points = plot_pts2
nGraph.add_plot(wplot)
nGraph.add_plot(wplot2)
petal_val = compute_values(sum(p_total), len(p_total))
petal_val = float(petal_val) * 100
con.commit()
cursor.execute("SELECT gender from user_account LIMIT 1")
gender_t = cursor.fetchone()[0]
con.commit()
if gender_t == 'Male':
gN = 'bird'
if gender_t == 'Female':
gN = 'tulip'
if petal_val <=0.00:
petalG.source="img/"+gN+"zero.jpg"
if petal_val>= 1.00 and petal_val <= 10:
petalG.source="img/"+gN+"one.jpg"
if petal_val >= 11.00 and petal_val<= 20.00:
petalG.source="img/"+gN+"two.jpg"
if petal_val >= 21.00 and petal_val<= 30.00:
petalG.source="img/"+gN+"three.jpg"
if petal_val >= 31.00 and petal_val <= 40.00:
petalG.source="img/"+gN+"four.jpg"
if petal_val >= 41.00 and petal_val <= 50.00:
petalG.source="img/"+gN+"five.jpg"
if petal_val >= 51.00 and petal_val <= 60.00:
petalG.source="img/"+gN+"six.jpg"
if petal_val >= 61.00 and petal_val <= 70.00:
petalG.source="img/"+gN+"seven.jpg"
if petal_val >= 71.00 and petal_val < 80.00:
petalG.source="img/"+gN+"eight.jpg"
if petal_val >= 81.00 and petal_val <= 90.00:
petalG.source="img/"+gN+"nine.jpg"
if petal_val >= 91.00 and petal_val <= 100.00:
petalG.source="img/"+gN+"whole.jpg"
lblTemp.text = "[color=000000]Average:[/color]"+"[color=393d42]Positive Text:"+('%.2f '% compute_values(sum(p_total), len(p_total)))+" Negative Text:"+('%.2f '% compute_values(sum(n_total), len(n_total)) )+"[/color]"
def annualGraphPlot(self,*args):
annualWidgets()
cursor.execute("SELECT gender from user_account LIMIT 1")
gender_t = cursor.fetchone()[0]
con.commit()
if gender_t == 'Male':
petalG.source="img/birdzero.jpg"
if gender_t == 'Female':
petalG.source="img/tulipzero.jpg"
for i in range(2):
if len(nGraph.plots) >= 1:
for plots in nGraph.plots:
nGraph.remove_plot(plots)
nGraph._clear_buffer()
def aGraph(y,*args):
if len(nGraph.plots) >= 1:
for plots in nGraph.plots:
nGraph.remove_plot(plots)
nGraph._clear_buffer()
nGraph.xlabel = 'Annual Entries'
nGraph.ylabel = 'Polarity Values'
nGraph.xmax = 12
p_total = 0
n_total = 0
d_now = datetime.datetime.now().strftime('%Y-%m-%d')
d_now_s = d_now.split('-')
dlist = [ [] for _ in range(12)]
dlist2 = [ [] for _ in range(12)]
#jlist = [[] for _ in range(n)]
#create multiple list
cursor.execute("SELECT COUNT(*) FROM user_info WHERE year="+str(y))
v_count = int(cursor.fetchone()[0])
#print 'eto {0}'.format(v_count)
con.commit()
m_list = []
days = []
temp_list = []
if v_count != 0 :
gridData.clear_widgets()
gridData.add_widget(Button(text="[color=000000]Date[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
gridData.add_widget(Button(text="[color=000000]# of Entries[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
gridData.add_widget(Button(text="[color=000000]P_POS[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
gridData.add_widget(Button(text="[color=000000]P_NEG[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
for m in range(12):
ma = str(format((m+1), '02d'))
m_list.append(ma)
for i in range(31):
da = str(format((i+1), '02d'))
cursor.execute("SELECT pos,neg,time FROM user_info WHERE year=? AND month=? AND day=?",(y,ma,da))
pola = cursor.fetchall()
for ou in range(0,len(pola),1):
temp = float(pola[ou][0])
temp2 = float(pola[ou][1])
temp_list.append(temp)
temp_list2.append(temp2)
t_Values = compute_values(sum(temp_list), len(temp_list))
t_Values2 = compute_values(sum(temp_list2), len(temp_list2))
dlist[m].append(t_Values)
dlist2[m].append(t_Values2)
t_Values = 0
t_Values2 = 0
temp_list2 = []
temp_list = []
con.commit()
con.commit()
plot_pts = [(0,0)]
plot_pts2 = [(0,0)]
plot_pts3 = [(0,0)]
for n in range(12):
if len(dlist[n]) != 0:
plot_pts.append( ((n+1), compute_values(sum(dlist[n]), len(dlist[n])) ) )
wplot.points = plot_pts
for n in range(12):
if len(dlist2[n]) != 0:
plot_pts2.append( ((n+1), compute_values(sum(dlist2[n]), len(dlist2[n]))) )
#d = str(int(d)-len(dlist))
p_total = []
n_total = []
for pos_range in range(0,len(dlist)):
if dlist[pos_range] != []:
gridData.add_widget(Button(text="[color=000000]"+str(m_list[pos_range])+' '+' '+str(y)+"[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
gridData.add_widget(Button(text="[color=000000]"+str(m_list[pos_range])+"[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
gridData.add_widget(Button(text="[color=000000]"+str('%.2f' % compute_values(sum(dlist[pos_range]), len(dlist[pos_range])) )+"[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
gridData.add_widget(Button(text="[color=000000]"+str('%.2f '% compute_values(sum(dlist2[pos_range]), len(dlist2[pos_range])) )+"[/color]",font_size='9sp',markup=True,background_color=[0,0,0,0]))
p_total.append( compute_values(sum(dlist[pos_range]), len(dlist[pos_range])) )
n_total.append( compute_values(sum(dlist2[pos_range]), len(dlist2[pos_range])) )
wplot2.points = plot_pts2
nGraph.add_plot(wplot)
nGraph.add_plot(wplot2)
petal_val = compute_values(sum(p_total), len(p_total))
petal_val = float(petal_val) * 100
con.commit()
cursor.execute("SELECT gender from user_account LIMIT 1")
gender_t = cursor.fetchone()[0]
con.commit()
if gender_t == 'Male':
gN = 'bird'
if gender_t == 'Female':
gN = 'tulip'
if petal_val <=0.00:
petalG.source="img/"+gN+"zero.jpg"
if petal_val>= 1.00 and petal_val <= 10:
petalG.source="img/"+gN+"one.jpg"
if petal_val >= 11.00 and petal_val<= 20.00:
petalG.source="img/"+gN+"two.jpg"
if petal_val >= 21.00 and petal_val<= 30.00:
petalG.source="img/"+gN+"three.jpg"
if petal_val >= 31.00 and petal_val <= 40.00:
petalG.source="img/"+gN+"four.jpg"
if petal_val >= 41.00 and petal_val <= 50.00:
petalG.source="img/"+gN+"five.jpg"
if petal_val >= 51.00 and petal_val <= 60.00:
petalG.source="img/"+gN+"six.jpg"
if petal_val >= 61.00 and petal_val <= 70.00:
petalG.source="img/"+gN+"seven.jpg"
if petal_val >= 71.00 and petal_val < 80.00:
petalG.source="img/"+gN+"eight.jpg"
if petal_val >= 81.00 and petal_val <= 90.00:
petalG.source="img/"+gN+"nine.jpg"
if petal_val >= 91.00 and petal_val <= 100.00:
petalG.source="img/"+gN+"whole.jpg"
lblTemp.text = "[color=000000]Average:[/color]"+"[color=393d42]Positive Text:"+('%.2f '% compute_values(sum(p_total), len(p_total)))+" Negative Text:"+('%.2f '% compute_values(sum(n_total), len(n_total)) )+"[/color]"
def compute_values(a, b):
if a==0:
return 0
else:
return float(a)/b
#daily Widgets + buttons on Dropdown
def dayWidgets():
root4.add_widget(btnDayDay)
root4.add_widget(btnYearDay)
root4.add_widget(btnMonthDay)
def daily_day(self,*args):
dropdownDay.clear_widgets()
cursor.execute('SELECT DISTINCT day FROM user_info')
day_values = cursor.fetchall()
for day in day_values:
day_t_btn = Button(text='[color=000000]'+str(day[0])+'[/color]',
markup=True,
font_size='12sp',
background_normal="ffffff",
size_hint=(1,None),
height=50)
day_t_btn.bind(on_release=lambda day_t_btn: dropdownDay.select(day_t_btn.text))
day_t_btn.bind(on_release=partial(dChange_day, str(day[0]) ))
dropdownDay.add_widget(day_t_btn)
con.commit()
def dChange_day(dD,*args):
global dDay
dDay = str(dD)
btnDayDay.text = dDay
dGraphPlot(dYear,dMonth,dDay)
def daily_year(self,*args):
dropdownYearDay.clear_widgets()
cursor.execute('SELECT DISTINCT year FROM user_info')
year_values = cursor.fetchall()
for year in year_values:
year_t_btn = Button(text='[color=000000]'+str(year[0])+'[/color]',
markup=True,
font_size='12sp',
background_normal="ffffff",
size_hint=(1,None),
height=50)
year_t_btn.bind(on_release=lambda year_t_btn: dropdownYearDay.select(year_t_btn.text))
year_t_btn.bind(on_release=partial(dChange_year, str(year[0])))
dropdownYearDay.add_widget(year_t_btn)
con.commit()
def dChange_year(dY,*args):
global dYear
dYear = str(dY)
btnYearDay.text = dY
dGraphPlot(dYear,dMonth,dDay)
def daily_month(self,*args):
dropdownYearMonth.clear_widgets()
cursor.execute('SELECT DISTINCT month FROM user_info')
month_values = cursor.fetchall()
for month in month_values:
month_t_btn = Button(text='[color=000000]'+str(month[0])+'[/color]',
markup=True,
font_size='12sp',
background_normal="ffffff",
size_hint=(1,None),
height=50)
month_t_btn.bind(on_release=lambda month_t_btn: dropdownYearMonth.select(month_t_btn.text))
month_t_btn.bind(on_release=partial(dChange_month, str(month[0]) ))
dropdownYearMonth.add_widget(month_t_btn)
con.commit()
def dChange_month(dM,*args):
global dMonth
dMonth = str(dM)
btnMonthDay.text = dM
dGraphPlot(dYear,dMonth,dDay)
#weekly Widgets + buttons on Dropdown
def weekWidgets():
root4.add_widget(btnWeekDay)
root4.add_widget(btnWeekYear)
root4.add_widget(btnWeekMonth)
def weekly_day(self,*args):
dropdownWeekDay.clear_widgets()
cursor.execute('SELECT DISTINCT day FROM user_info')
day_values = cursor.fetchall()
for day in day_values:
day_t_btn = Button(text='[color=000000]'+str(day[0])+'[/color]',
markup=True,
font_size='12sp',
background_normal="ffffff",
size_hint=(1,None),
height=50)
day_t_btn.bind(on_release=lambda day_t_btn: dropdownWeekDay.select(day_t_btn.text))
day_t_btn.bind(on_release=partial(wChange_day, str(day[0]) ))
dropdownWeekDay.add_widget(day_t_btn)
con.commit()
def wChange_day(wD,*args):
global dDay
dDay = str(wD)
btnWeekDay.text = wD
wGraphPlot(dYear,dMonth,dDay)
def weekly_year(self,*args):
dropdownWeekYear.clear_widgets()
cursor.execute('SELECT DISTINCT year FROM user_info')
year_values = cursor.fetchall()
for year in year_values:
year_t_btn = Button(text='[color=000000]'+str(year[0])+'[/color]',
markup=True,
font_size='12sp',
background_normal="ffffff",
size_hint=(1,None),
height=50)
year_t_btn.bind(on_release=lambda year_t_btn: dropdownWeekYear.select(year_t_btn.text))
year_t_btn.bind(on_release=partial(wChange_year, str(year[0])))
dropdownWeekYear.add_widget(year_t_btn)
con.commit()
def wChange_year(wY,*args):
global dYear
dYear = str(wY)
btnWeekYear.text = wY
wGraphPlot(dYear,dMonth,dDay)
def weekly_month(self,*args):
dropdownWeekMonth.clear_widgets()
cursor.execute('SELECT DISTINCT month FROM user_info')
month_values = cursor.fetchall()
for month in month_values:
month_t_btn = Button(text='[color=000000]'+str(month[0])+'[/color]',
markup=True,
font_size='12sp',
background_normal="ffffff",
size_hint=(1,None),
height=50)
month_t_btn.bind(on_release=lambda month_t_btn: dropdownWeekMonth.select(month_t_btn.text))
month_t_btn.bind(on_release=partial(wChange_month, str(month[0]) ))
dropdownWeekMonth.add_widget(month_t_btn)
con.commit()
def wChange_month(wM,*args):
global dMonth
dMonth = str(wM)
btnWeekMonth.text = wM
wGraphPlot(dYear,dMonth,dDay)
#Monthly Widgets + buttons on Dropdown
def monthWidgets():
root4.add_widget(btnMon1)
root4.add_widget(btnMon2)
def monthly_month(self,*args):
dropdownMonth.clear_widgets()
cursor.execute('SELECT DISTINCT year FROM user_info')
month_values = cursor.fetchall()
for month in month_values:
month_t_btn = Button(text='[color=000000]'+str(month[0])+'[/color]',
markup=True,
font_size='12sp',
background_normal="ffffff",
size_hint=(1,None),
height=50)
month_t_btn.bind(on_release=lambda month_t_btn: dropdownMonth.select(month_t_btn.text))
month_t_btn.bind(on_release=partial(mChange_month, str(month[0]) ))
dropdownMonth.add_widget(month_t_btn)
con.commit()
def mChange_month(mM,*args):
global dMonth
dMonth = str(mM)
btnMon1.text = mM
mGraph(dYear,dMonth)
def monthly_year(self,*args):
dropdownMonthYear.clear_widgets()
cursor.execute('SELECT DISTINCT month FROM user_info')
year_values = cursor.fetchall()
for year in year_values:
year_t_btn = Button(text='[color=000000]'+str(year[0])+'[/color]',
markup=True,
font_size='12sp',
background_normal="ffffff",
size_hint=(1,None),
height=50)
year_t_btn.bind(on_release=lambda year_t_btn: dropdownMonthYear.select(year_t_btn.text))
year_t_btn.bind(on_release=partial(mChange_year, str(year[0])))
dropdownMonthYear.add_widget(year_t_btn)
con.commit()
def mChange_year(mY,*args):
global dYear
dYear = str(mY)
btnMon2.text = mY
mGraph(dYear,dMonth)
#Yearly Widgets + buttons on Dropdown
def annualWidgets():
root4.add_widget(btnAnnualYear)
def yearly_year(self,*args):
dropdownAnnual.clear_widgets()
cursor.execute('SELECT DISTINCT year FROM user_info')
year_values = cursor.fetchall()
for year in year_values:
year_t_btn = Button(text='[color=000000]'+str(year[0])+'[/color]',
markup=True,
font_size='12sp',
background_normal="ffffff",
size_hint=(1,None),
height=50)
year_t_btn.bind(on_release=lambda year_t_btn: dropdownAnnual.select(year_t_btn.text))
year_t_btn.bind(on_release=partial(yChange_year, str(year[0])))
dropdownAnnual.add_widget(year_t_btn)
con.commit()
def yChange_year(yY,*args):
global dYear
dYear = str(yY)
btnAnnualYear.text = yY
aGraph(dYear)
def rWidgets(self,*args):
root4.remove_widget(petalG)
root4.remove_widget(lblDescript)
root4.remove_widget(scrollData)
scrollData.remove_widget(gridData)
boxStats.remove_widget(nGraph)
t_consul = '0'
www = 0
cursor.execute("SELECT COUNT(*) FROM consultant_account")
www = int(cursor.fetchone()[0])
con.commit()
cursor.execute("SELECT id from consultant_account")
tmp_id = cursor.fetchone()[0]
con.commit()
cursor.execute("SELECT Control FROM consultant_account WHERE ID="+str(tmp_id))
t_consul = str(cursor.fetchone()[0])
con.commit()
if t_consul == '0':
statsTitle.text = 'User Garden'
root4.add_widget(petalG)
if t_consul == '1':
root4.add_widget(lblDescript)
root4.add_widget(lblTemp)
root4.add_widget(scrollData)
scrollData.add_widget(gridData)
#graph
boxStats.add_widget(nGraph)
def removeDWM():
root4.remove_widget(btnAnnualYear)
dropdownAnnual.remove_widget(btnann1)
btnAnnualYear.text = "Year v"
root4.remove_widget(btnWeekDay)
dropdownDay.remove_widget(btnwd1)
root4.remove_widget(btnWeekYear)
dropdownYearDay.remove_widget(btnwy1)
root4.remove_widget(btnWeekMonth)
dropdownYearMonth.remove_widget(btnwm1)
btnWeekDay.text = "Day v"
btnWeekYear.text = "Year v"
btnWeekMonth.text = "Month v"
root4.remove_widget(btnMon1)
root4.remove_widget(btnMon2)
dropdownMonth.remove_widget(btnmm1)
dropdownMonth.remove_widget(btnmm2)
dropdownMonthYear.remove_widget(btnmy1)
dropdownMonthYear.remove_widget(btnmy2)
btnMon1.text = "Year v"
btnMon2.text = "Month v"
root4.remove_widget(btnDayDay)
dropdownDay.remove_widget(btnd1)
dropdownDay.remove_widget(btnd2)
root4.remove_widget(btnYearDay)
dropdownYearDay.remove_widget(btndy1)
dropdownYearDay.remove_widget(btndy2)
root4.remove_widget(btnMonthDay)
dropdownYearMonth.remove_widget(btnmy11)
dropdownYearMonth.remove_widget(btnmy22)
btnDayDay.text = "Day v"
btnMonthDay.text = "Year v"
btnYearDay.text = "Month v"
def removeDWM1(self,*args):
removeDWM()
def screen4():
sc4.add_widget(root4)
root4.add_widget(frontBg4)
root4.add_widget(statsTitle)
root4.add_widget(b_sc4menu)
root4.add_widget(btnHelpGraph)
graphDown.add_widget(btnDaily)
graphDown.add_widget(btnWeekly)
graphDown.add_widget(btnMonthly)
graphDown.add_widget(btnAnnual)
#eto tinanggal ko tapos dito rin ako nagcode
root4.add_widget(graphButton)
t_consul = '0'
www = 0
cursor.execute("SELECT COUNT(*) FROM consultant_account")
www = int(cursor.fetchone()[0])
con.commit()
cursor.execute("SELECT id from consultant_account")
tmp_id = cursor.fetchone()[0]
con.commit()
cursor.execute("SELECT Control FROM consultant_account WHERE ID="+str(tmp_id))
t_consul = str(cursor.fetchone()[0])
con.commit()
if t_consul == '0':
statsTitle.text = 'User Garden'
root4.add_widget(petalG)
if t_consul == '1':
root4.add_widget(lblDescript)
root4.add_widget(lblTemp)
root4.add_widget(scrollData)
scrollData.add_widget(gridData)
#graph
boxStats.add_widget(nGraph)
#Screen5
def screen5(t_date,t_time,*args):
#--sc5
root5= FloatLayout(size_hint = (1.0, 1.0))
frontBg5 = Image(pos= root.pos,
source="img/white.jpg",
allow_stretch=True,
keep_ratio=False)
b_sc5menu = Button(text="[color=000000]T[/color]",
markup=True,
font_name='IconsSouthSt.ttf',
font_size='20sp',
pos_hint={'x':0.02,'center_y': 0.96},
background_normal="img/white.jpg",
size_hint=(.115,.08))
recentTitle = Button(text="[color=000000]Logs History[/color]",
markup=True,
pos_hint={'x':0,'center_y': 0.96},
font_name='helv.otf',
background_normal="img/white.jpg",
background_down="img/white.jpg",
font_size='25sp',
size_hint=(1,.09))
grid5_history=GridLayout(cols=1,
padding=10,
size_hint_y=None,
pos_hint={'center_x':.5, 'center_y':.48})
grid5_history.bind(minimum_height=grid5_history.setter('height'))
sv5_history=ScrollView(size_hint=(1,.75),
pos_hint={'center_x':.5, 'center_y':.5})
sc5.add_widget(root5)
root5.add_widget(frontBg5)
root5.add_widget(recentTitle)
root5.add_widget(b_sc5menu)
root5.add_widget(sv5_history)
sv5_history.add_widget(grid5_history)
b_sc5menu.bind(on_press=callbackscmain)
#nilagay ni joms
grid5_history.clear_widgets()
#con.commit()
dis_date = t_date.split('-')
cursor.execute("SELECT info from user_info WHERE year=? AND month=? AND day=? AND time=?",(dis_date[0],dis_date[1],dis_date[2],t_time ) )
wat = cursor.fetchall()[0][0]
def formatItem(right):
wrapped = fill(right, width=50)
return '{0}'.format(wrapped)
wat = formatItem(wat)
btnn = Button(text="[color=000000]"+t_date+' '+t_time+"\n\n{0}[/color]".format(wat),
#background_normal='img/feedbord.png',
size_hint=(1,None),
font_size='13sp',
background_color=[0,0,0,0],
#font_name='helv.otf',
height=300,
markup=True)
btnn2 = Button(text="",
size_hint=(1,None),
font_size='13sp',
background_color=[0,0,0,0],
#font_name='helv.otf',
height=200,
markup=True)
btnn3 = Button(text="",
size_hint=(1,None),
font_size='13sp',
background_color=[0,0,0,0],
#font_name='helv.otf',
height=200,
markup=True)
if len(wat)>300:
grid5_history.add_widget(btnn3)
if len(wat)<300:
btnn.height = 200
if len(wat)<150:
btnn.height = 150
grid5_history.add_widget(btnn)
grid5_history.add_widget(btnn2)
def callbacksc5(d,t,*args):
sm.transition = SwapTransition()
sm.current='fifthscreen'
## grid_history.clear_widgets()
## sv_history.clear_widgets()
## sv_history.remove_widget(grid_history)
## rootmain.remove_widget(sv_history)
screen5(d,t)
def getDB():
cursor.execute("SELECT * from user_info")
dt = cursor.fetchall()
tp = ''
#con.commit()
cursor.execute("SELECT COUNT(*) from user_info")
xx = cursor.fetchone()
#cursor.execute("SELECT COUNT(DISTINCT month) from user_info")
#print cursor.fetchone()
#con.commit()
def formatItem(right):
wrapped = fill(right, width=50)
return '{0}'.format(wrapped)
for i in range(xx[0],0,-1):
wat = str(dt[i-1][5])
wat = formatItem(wat)
if len(wat)>=40:
wat = wat[:36] + '.'*3
else:
wat = wat + '.'*4
_tmp_d = str(dt[i-1][1]) +'-'+ str(dt[i-1][2])+'-'+str(dt[i-1][3])
tp = _tmp_d + (' '*40)+str(dt[i-1][4])+'\n'+str(wat)
btn = Button(text="[color=000000]{0}[/color]".format(tp),
background_normal='img/feedbord.png',
#background_color=[0,0,0,0.5],
size_hint=(1,None),
font_size='13sp',
#font_name='helv.otf',
height=100,
markup=True)
btn.bind(on_press=partial(callbacksc5, _tmp_d, dt[i-1][4] ))
#btn.bind(on_release= get_dot)
grid_history.add_widget(btn)
def screenmain():
scmain.add_widget(rootmain)
rootmain.add_widget(frontBgmain)
rootmain.add_widget(logsTitle)
rootmain.add_widget(sv_history)
sv_history.add_widget(grid_history)
rootmain.add_widget(b_scmainmenu)
getDB()
def addbatt():
root6.add_widget(root6time)
root6.add_widget(root6batt)
root6.add_widget(root6lblbatt)
root6.add_widget(root6lbldate)
def removebatt():
root6.remove_widget(root6time)
root6.remove_widget(root6batt)
root6.remove_widget(root6lblbatt)
root6.remove_widget(root6lbldate)
def screen6():
sc6.add_widget(root6)
root6.add_widget(frontBg6)
root6.add_widget(root6Title)
root6.add_widget(mainbutton)
dropdown.add_widget(btnCP)
dropdown.add_widget(btnColor)
dropdown.add_widget(btnCons)
dropdown.add_widget(btnD)
dropdown.add_widget(btnA)
dropdown.add_widget(btnHelp)
dropdown.add_widget(btnLO)
root6.add_widget(b_logs)
root6.add_widget(b_home)
root6.add_widget(b_write)
root6.add_widget(b_stats)
root6.add_widget(lblHome)
root6.add_widget(lblStat)
root6.add_widget(lblLogs)
root6.add_widget(lblWrite)
#change stat into Garden
t_consul = 0
www = 0
cursor.execute("SELECT COUNT(*) FROM consultant_account")
www = int(cursor.fetchone()[0])
con.commit()
if www != 0:
cursor.execute("SELECT Control FROM consultant_account")
t_consul = int(cursor.fetchone()[0])
con.commit()
if t_consul == 0:
lblStat.text = "[color=ffffff]Garden[/color]"
if t_consul == 1:
lblStat.text = "[color=ffffff]Stats[/color]"
addbatt()
def screen3():
sc3.add_widget(root3)
root3.add_widget(frontBg3)
root3.add_widget(b_sc3)
root3.add_widget(input_sc3)
root3.add_widget(titlewrite)
root3.add_widget(label_write)
root3.add_widget(b_sc3menu)
def popPin():
## cursor.execute("SELECT COUNT(*) from user_account")
## if cursor.fetchone()[0] == 1:
bPopPin.add_widget(lblPin)
bPopPin.add_widget(tbPin1)
bPopPin.add_widget(lblPin2)
bPopPin.add_widget(tbPin2)
bPopPin.add_widget(lblDum)
bPopPin.add_widget(buttonPopPin)
popupPin.open()
## else:
## pass
def popGenderRemove1(self,*args):
cursor.execute("SELECT id from user_account")
tmp_id = cursor.fetchone()[0]
con.commit()
cursor.execute("UPDATE user_account SET gender=? WHERE id=?",('Male',tmp_id))
con.commit()
bPopGender.remove_widget(buttonPopGender1)
bPopGender.remove_widget(buttonPopGender2)
bPopGender.remove_widget(lblgender)
popupgender.dismiss()
popPin()
def popGenderRemove2(self,*args):
cursor.execute("SELECT id from user_account")
tmp_id = cursor.fetchone()[0]
con.commit()
cursor.execute("UPDATE user_account SET gender=? WHERE id=?",('Female',tmp_id))
con.commit()
bPopGender.remove_widget(buttonPopGender1)
bPopGender.remove_widget(buttonPopGender2)
bPopGender.remove_widget(lblgender)
popupgender.dismiss()
popPin()
def popColor(self,*args):
bPopColor.add_widget(buttonColorGreen)
bPopColor.add_widget(buttonColorRed)
bPopColor.add_widget(buttonColorBlue)
bPopColor.add_widget(buttonColorOrange)
bPopColor.add_widget(buttonColorPurple)
bPopColor.add_widget(buttonColorPGreen)
bPopColor.add_widget(buttonColorPRed)
bPopColor.add_widget(buttonColorPBlue)
bPopColor.add_widget(buttonColorPOrange)
bPopColor.add_widget(buttonColorPPurple)
bPopColor.add_widget(buttonPopColor)
popupColor.open()
def popColorRemove():
bPopColor.remove_widget(buttonColorGreen)
bPopColor.remove_widget(buttonColorRed)
bPopColor.remove_widget(buttonColorBlue)
bPopColor.remove_widget(buttonColorOrange)
bPopColor.remove_widget(buttonColorPurple)
bPopColor.remove_widget(buttonColorPGreen)
bPopColor.remove_widget(buttonColorPRed)
bPopColor.remove_widget(buttonColorPBlue)
bPopColor.remove_widget(buttonColorPOrange)
bPopColor.remove_widget(buttonColorPPurple)
bPopColor.remove_widget(buttonPopColor)
def ConsultantMode(self,*args):
if btnCons.text == '[color=ffffff]User Mode[/color]':
cursor.execute("SELECT id from consultant_account")
tmp_id = cursor.fetchone()[0]
con.commit()
cursor.execute("UPDATE consultant_account SET Control=? WHERE id=?",('0',tmp_id))
con.commit()
btnCons.text = '[color=ffffff]Consultant Mode[/color]'
bPopConsult.add_widget(lblConsult)
lblConsult.text="[color=000000]You're now in User Mode[/color]"
bPopConsult.add_widget(buttonPopConsult)
popupConsult.open()
def consuldismiss(self,*args):
bPopConsult.remove_widget(lblConsult)
bPopConsult.remove_widget(buttonPopConsult)
popupConsult.dismiss()
buttonPopConsult.bind(on_press=consuldismiss)
elif btnCons.text == '[color=ffffff]Consultant Mode[/color]':
cursor.execute("SELECT id from consultant_account")
tmp_id = cursor.fetchone()[0]
con.commit()
cursor.execute("UPDATE consultant_account SET Control=? WHERE id=?",('1',tmp_id))
con.commit()
btnCons.text = '[color=ffffff]User Mode[/color]'
bPopConsult.add_widget(lblConsult)
bPopConsult.add_widget(buttonPopConsult)
popupConsult.open()
def consuldismiss1(self,*args):
bPopConsult.remove_widget(lblConsult)
bPopConsult.remove_widget(buttonPopConsult)
popupConsult.dismiss()
buttonPopConsult.bind(on_press=consuldismiss1)
def MMHelp(self,*args):
bPopHelp.add_widget(carouselHelp)
carouselHelp.add_widget(lblHelp)
carouselHelp.add_widget(tut1)
carouselHelp.add_widget(tut2)
carouselHelp.add_widget(tut3)
carouselHelp.add_widget(tut4)
carouselHelp.add_widget(btnHelp1)
popuphelp.open()
def dismisshelp(self,*args):
bPopHelp.remove_widget(carouselHelp)
carouselHelp.remove_widget(lblHelp)
carouselHelp.remove_widget(tut1)
carouselHelp.remove_widget(tut2)
carouselHelp.remove_widget(tut3)
carouselHelp.remove_widget(tut4)
carouselHelp.remove_widget(btnHelp1)
popuphelp.dismiss()
btnHelp1.bind(on_press=dismisshelp)
def screen2():
popname()
sc2.add_widget(root2)
root2.add_widget(frontBg2)
root2.add_widget(txtInp2)
root2.add_widget(passimg1)
root2.add_widget(passimg2)
root2.add_widget(passimg3)
root2.add_widget(passimg4)
root2.add_widget(passimg5)
root2.add_widget(passimg6)
root2.add_widget(passimg7)
root2.add_widget(passimg8)
root2.add_widget(passimg9)
root2.add_widget(passimgclr)
root2.add_widget(passimg0)
root2.add_widget(passimgok)
root2.add_widget(titlepin)
def screen7():
root7.remove_widget(frontBg7)
root7.remove_widget(userTitle)
root7.remove_widget(b_sc7menu)
root7.remove_widget(petalWhole)
sc7.remove_widget(root7)
sc7.add_widget(root7)
root7.add_widget(frontBg7)
root7.add_widget(userTitle)
root7.add_widget(petalWhole)
root7.add_widget(b_sc7menu)
#--EVENTS-----------------------
def p1(self,*args):
txtInp2.text += "1"
def p2(self,*args):
txtInp2.text += "2"
def p3(self,*args):
txtInp2.text += "3"
def p4(self,*args):
txtInp2.text += "4"
def p5(self,*args):
txtInp2.text += "5"
def p6(self,*args):
txtInp2.text += "6"
def p7(self,*args):
txtInp2.text += "7"
def p8(self,*args):
txtInp2.text += "8"
def p9(self,*args):
txtInp2.text += "9"
def p0(self,*args):
txtInp2.text += "0"
def pclr(self,*args):
txtInp2.text = ""
def feedremove():
#nilagay ni joms
grid5_history.clear_widgets()
sv5_history.clear_widgets()
grid_history.clear_widgets()
sv_history.clear_widgets()
scmain.remove_widget(rootmain)
rootmain.remove_widget(frontBgmain)
rootmain.remove_widget(logsTitle)
sv_history.remove_widget(grid_history)
rootmain.remove_widget(sv_history)
rootmain.remove_widget(b_scmainmenu)
def feedadd():
feedremove()
rootmain.add_widget(sv_history)
sv_history.add_widget(grid_history)
getDB()
def popsave():
bPopSave.add_widget(lblsave)
bPopSave.add_widget(buttonPopSave)
popupsave.open()
def callback_proctext(self,*args):
thoughts_text = input_sc3.text
thoughts_text = thoughts_text.lower()
thoughts_text = re.sub( '\s+', ' ', thoughts_text ).strip()
thoughts_text = re.sub(r"[^\w.,']", ' ', thoughts_text)
#thoughts_text = re.sub(r"[^\w]", ' ', thoughts_text)
#print thoughts_text
if str(thoughts_text).isspace() == False and str(thoughts_text).strip() != '':
date_now = datetime.datetime.now().strftime('%Y-%m-%d')
time_now = datetime.datetime.now().strftime('%I:%M %p')
p_pos = naive_classifier.classify_polarity(thoughts_text)[0]
p_neg = naive_classifier.classify_polarity(thoughts_text)[1]
cursor.execute("INSERT INTO user_info(year,month,day,time,info,mood,pos,neg) VALUES(?,?,?,?,?,?,?,?)",(date_now.split('-')[0],date_now.split('-')[1],date_now.split('-')[2],time_now,thoughts_text,str(mood_value),p_pos,p_neg))
con.commit()
input_sc3.text = ""
input_sc3.clear_widgets()
popsave()
feedremove()
else:
boxErrorWrite = BoxLayout(orientation="vertical")
buttonErrorWrite = Button(text="CLOSE",
background='img/blue.jpg',
size_hint=(1,.5))
lblErrorWrite2=Label(text="[color=000000]CANNOT SAVE EMPTY LOG[/color]",
markup=True)
boxErrorWrite.add_widget(lblErrorWrite2)
boxErrorWrite.add_widget(buttonErrorWrite)
popuperrorwrite= Popup(title='',
separator_color=[0,0,1,0.5],
background='img/white.jpg',
content=boxErrorWrite,
auto_dismiss=False,
size_hint=(0.6, 0.2))
popuperrorwrite.open()
def errorclick(self,*args):
popuperrorwrite.dismiss()
buttonErrorWrite.bind(on_press=errorclick)
#print 'walang laman'
def callbackmain(self,*args):
cursor.execute("SELECT password from user_account")
pwd = cursor.fetchall()[0]
#password is 8888
con.commit()
#txtInp2.text = '8888'
if txtInp2.text == pwd[0]:
sm.current='sixthscreen'
#always update into ordinary user
cursor.execute("SELECT id from consultant_account")
tmp_id = cursor.fetchone()[0]
con.commit()
cursor.execute("UPDATE consultant_account SET Control=? WHERE id=?",('0',tmp_id))
con.commit()
screen6()
popup.open()
screen3()
screen4()
else:
pass
def popsc3():
bPop3.add_widget(lblmood)
bPop3.add_widget(bPop33)
bPop33.add_widget(bAngry)
bPop33.add_widget(bSad)
bPop33.add_widget(bHappy3)
bPop33.add_widget(bHappy2)
bPop33.add_widget(bHappy)
bPop3.add_widget(tryslide)
bPop3.add_widget(btnSlider)
popup3.open()
def popexit(self,*args):
bPopExit.add_widget(lblexit)
bPopExit.add_widget(bPopExit2)
bPopExit2.add_widget(buttonPopExit1)
bPopExit2.add_widget(buttonPopExit2)
popupexit.open()
def popcp(self,*args):
bPopCp.add_widget(lblCp1)
bPopCp.add_widget(tbCp1)
bPopCp.add_widget(lblCp2)
bPopCp.add_widget(tbCp2)
bPopCp.add_widget(lblCp3)
bPopCp.add_widget(tbCp3)
bPopCp.add_widget(lblblank)
bPopCp.add_widget(lblnotice)
bPopCp.add_widget(buttonPopCpExit)
bPopCp.add_widget(buttonPopCp)
lblnotice.text=""
lblblank.text=""
popupCp.open()
def popdev(self,*args):
bPopD.add_widget(imgBruno)
bPopD.add_widget(imgJoms)
bPopD.add_widget(lblDev2)
bPopD.add_widget(lblDev1)
bPopD.add_widget(lbldevblank)
bPopD.add_widget(buttonPopD)
popupDev.open()
def popabout(self,*args):
bPopAb.add_widget(lblabout)
bPopAb.add_widget(buttonPopAb)
popupabout.open()
bPopError = GridLayout(cols=1,padding=5,spacing = 5)
lblerror= Label(text="[color=000000]Invalid Password![/color]",
# font_name='helv.otf',
markup=True,halign='center',
size_hint=(1,.5),
font_size='10sp')
buttonPopError = Button(text="Close",
#font_name='helv.otf',
size_hint=(.5,.2),
background_normal="img/roundblue.jpg")
popuperror = Popup(background="img/white.jpg",
separator_color=[1,0,0,0.5],
content=bPopError,
title="Error",
auto_dismiss=False,title_color=[0,0,0,1],
size_hint=(0.5, 0.2))
#--devmode
bPopConsult = GridLayout(cols=1,padding=5,spacing = 5)
lblConsult= Label(text="[color=000000]You're now in Consultant Mode[/color]",
# font_name='helv.otf',
markup=True,halign='center',
size_hint=(1,.5),
font_size='10sp')
buttonPopConsult = Button(text="Close",
#font_name='helv.otf',
size_hint=(.5,.2),
background_normal="img/roundblue.jpg")
popupConsult = Popup(background="img/white.jpg",
separator_color=[1,0,0,0.5],
content=bPopConsult,
title=" ",
auto_dismiss=False,
size_hint=(0.5, 0.2))
def popError(self, *args):
bPopError.add_widget(lblerror)
bPopError.add_widget(buttonPopError)
cursor.execute("SELECT password from user_account")
pwd = cursor.fetchall()[0]
con.commit()
if txtInp2.text != pwd[0]:
popuperror.open()
def callbacksc2(self,*args):
sm.current='secondscreen'
screen2()
def callbacksc3(self,*args):
sm.transition = SwapTransition()
sm.current='thirdscreen'
input_sc3.text=""
#popsc3()
def callbacksc4(self,*args):
#back to main screen from graph
sm.transition = SwapTransition()
sm.current='fourthscreen'
root4.add_widget(boxStats)
#root4.add_widget(btnCont)
wplot = SmoothLinePlot(mode="points",color=rgb('adaa0a'))
removeDWM()
def callbacksc7(self,*args):
sm.transition = SwapTransition()
sm.current='seventhscreen'
screen7()
def mooddismiss():
popup3.remove_widget(bPop3)
bPop3.remove_widget(bPop33)
bPop3.remove_widget(lblmood)
bPop33.remove_widget(bHappy)
bPop33.remove_widget(bHappy2)
bPop33.remove_widget(bHappy3)
bPop33.remove_widget(bSad)
bPop33.remove_widget(bAngry)
bPop3.remove_widget(tryslide)
bPop3.remove_widget(btnSlider)
def popexitdismiss(self,*args):
popupexit.dismiss()
bPopExit2.remove_widget(buttonPopExit1)
bPopExit2.remove_widget(buttonPopExit2)
bPopExit.remove_widget(bPopExit2)
bPopExit.remove_widget(lblexit)
def popaboutdismiss(self,*args):
popupabout.dismiss()
bPopAb.remove_widget(buttonPopAb)
bPopAb.remove_widget(lblabout)
def poperrordismiss(self,*args):
txtInp2.text = ''
popuperror.dismiss()
bPopError.remove_widget(buttonPopError)
bPopError.remove_widget(lblerror)
def popdevdismiss(self,*args):
popupDev.dismiss()
bPopD.remove_widget(buttonPopD)
bPopD.remove_widget(lblDev1)
bPopD.remove_widget(lblDev2)
bPopD.remove_widget(imgBruno)
bPopD.remove_widget(imgJoms)
bPopD.remove_widget(lbldevblank)
def popsavenotidismiss(self,*args):
popupsave.dismiss()
bPopSave.remove_widget(buttonPopSave)
bPopSave.remove_widget(lblsave)
def popcpdismiss(self,*args):
popupCp.dismiss()
tbCp1.text=""
tbCp2.text=""
tbCp3.text=""
bPopCp.remove_widget(buttonPopCp)
bPopCp.remove_widget(buttonPopCpExit)
bPopCp.remove_widget(lblCp1)
bPopCp.remove_widget(lblCp2)
bPopCp.remove_widget(lblCp3)
bPopCp.remove_widget(tbCp1)
bPopCp.remove_widget(tbCp2)
bPopCp.remove_widget(tbCp3)
bPopCp.remove_widget(lblnotice)
bPopCp.remove_widget(lblblank)
def popnamedismiss(self,*args):
if tbName.text in ['',' ']:
pass
else:
cursor.execute("INSERT INTO user_account(name) VALUES('"+str(tbName.text) +"')" )
con.commit()
popupname.dismiss()
bPopName.remove_widget(lblname)
bPopName.remove_widget(tbName)
bPopName.remove_widget(buttonPopName)
popgender()
#popPin()
def savepoppin(self, *args):
cursor.execute("SELECT id from user_account")
tmp_pass = cursor.fetchone()[0]
con.commit()
if tbPin1.text == tbPin2.text:
if tbPin1.text in ['',' ']:
pass
else:
cursor.execute("UPDATE user_account SET password=? WHERE id=?",(str(tbPin1.text),tmp_pass))
con.commit()
bPopPin.remove_widget(tbPin1)
bPopPin.remove_widget(lblPin)
bPopPin.remove_widget(lblPin2)
bPopPin.remove_widget(tbPin2)
bPopPin.remove_widget(lblDum)
bPopPin.remove_widget(buttonPopPin)
popupPin.dismiss()
else:
print 'mali pass'
pass
#Define Change Password Function
def popcpsave(self,*args):
cursor.execute("SELECT * FROM user_account")
temp_pass = cursor.fetchall()
con.commit()
#print temp_pass[0][1]
if tbCp1.text == temp_pass[0][1]:
if tbCp2.text == tbCp3.text:
if tbCp2.text and tbCp3.text == None or len(tbCp2.text) <=3:
pass
else:
cursor.execute("UPDATE user_account SET password=? WHERE id=?",(str(tbCp2.text),temp_pass[0][0] ))
con.commit()
#print 'oki doki'
popupCp.dismiss()
tbCp1.text=""
tbCp2.text=""
tbCp3.text=""
bPopCp.remove_widget(buttonPopCp)
bPopCp.remove_widget(buttonPopCpExit)
bPopCp.remove_widget(lblCp1)
bPopCp.remove_widget(lblCp2)
bPopCp.remove_widget(lblCp3)
bPopCp.remove_widget(tbCp1)
bPopCp.remove_widget(tbCp2)
bPopCp.remove_widget(tbCp3)
bPopCp.remove_widget(lblnotice)
bPopCp.remove_widget(lblblank)
else:
lblnotice.markup = True
lblblank.markup = True
lblnotice.text="[color=ff00ff][b]PINS DOES NOT MATCH![/b][/color]"
lblblank.text="[color=ff00ff][b]ERROR:[/b][/color]"
#print 'mali'
def callbackhome(self,*args):
sm.transition = SwapTransition()
sm.current='sixthscreen'
mooddismiss()
root4.remove_widget(boxStats)
root4.remove_widget(btnCont)
root4.remove_widget(petalG)
root4.remove_widget(lblDescript)
root4.remove_widget(lblTemp)
root4.remove_widget(scrollData)
scrollData.remove_widget(gridData)
boxStats.remove_widget(nGraph)
root4.remove_widget(boxStats)
removebatt()
root6time.text="[color=ffffff]"+datetime.datetime.now().strftime('%I:%M ')+"[/color]"
root6batt.text="[color=ffffff][b]"+str(battery.status['percentage'])+"%[/b][/color]"
root6lbldate.text="[color=ffffff]"+datetime.datetime.now().strftime('%A %d %B %Y')+"[/color]"
addbatt()
popupsave.dismiss()
bPopSave.remove_widget(buttonPopSave)
bPopSave.remove_widget(lblsave)
def callbacksc6(self,*args):
sm.transition = SwapTransition()
sm.current='sixthscreen'
def callbackscmain(self,*args):
sm.transition = SwapTransition()
sm.current='mainscreen'
feedremove()
screenmain()
def exitclose(self,*args):
App.get_running_app().stop()
#Define Mood Confirm Function
def moodconfirm(self,*args):
popup3.dismiss()
#color_schemes
def colorRed(self,*args):
color_val = 'red'
cursor.execute('SELECT COUNT(*) FROM color_tbl')
color_count = cursor.fetchone()[0]
if color_count == 0:
cursor.execute("INSERT INTO color_tbl(color) VALUES('"+color_val+"')")
con.commit()
elif color_count >= 1:
cursor.execute("UPDATE color_tbl SET color='"+color_val+"'")
con.commit()
btnhelpmeok.background_normal="img/red.jpg"
btnHelpGraph.background_normal="img/red.jpg"
buttonPop.background_normal="img/red.jpg"
titlewrite.background_down="img/red.jpg"
logsTitle.background_down="img/red.jpg"
recentTitle.background_down="img/red.jpg"
statsTitle.background_down="img/red.jpg"
root6Title.background_down="img/red.jpg"
root6Title.background_normal="img/red.jpg"
buttonPopExit1.background_normal="img/red.jpg"
buttonPopExit2.background_normal="img/red.jpg"
buttonPop.background_normal="img/red.jpg"
buttonPopAb.background_normal="img/red.jpg"
buttonPopD.background_normal="img/red.jpg"
buttonPopName.background_normal="img/red.jpg"
buttonPopCpExit.background_normal="img/red.jpg"
btnCP.background_normal="img/red.jpg"
btnCons.background_normal="img/red.jpg"
btnHelp.background_normal="img/red.jpg"
btnColor.background_normal="img/red.jpg"
btnD.background_normal="img/red.jpg"
btnA.background_normal="img/red.jpg"
btnLO.background_normal="img/red.jpg"
mainbutton.background_normal="img/red.jpg"
buttonPopName.background_normal="img/red.jpg"
btnDaily.background_normal="img/red.jpg"
btnWeekly.background_normal="img/red.jpg"
btnMonthly.background_normal="img/red.jpg"
btnAnnual.background_normal="img/red.jpg"
graphButton.background_normal="img/red.jpg"
b_sc4menu.background_normal="img/red.jpg"
statsTitle.background_normal="img/red.jpg"
btnd1.background_normal="img/red.jpg"
btnd2.background_normal="img/red.jpg"
btnDayDay.background_normal="img/red.jpg"
btndy1.background_normal="img/red.jpg"
btndy2.background_normal="img/red.jpg"
btnYearDay.background_normal="img/red.jpg"
btnmy11.background_normal="img/red.jpg"
btnmy22.background_normal="img/red.jpg"
btnMonthDay.background_normal="img/red.jpg"
btnwd1.background_normal="img/red.jpg"
btnWeekDay.background_normal="img/red.jpg"
btnwy1.background_normal="img/red.jpg"
btnWeekYear.background_normal="img/red.jpg"
btnwm1.background_normal="img/red.jpg"
btnWeekMonth.background_normal="img/red.jpg"
btnmm1.background_normal="img/red.jpg"
btnmm2.background_normal="img/red.jpg"
btnmy1.background_normal="img/red.jpg"
btnmy2.background_normal="img/red.jpg"
btnMon1.background_normal="img/red.jpg"
btnMon2.background_normal="img/red.jpg"
btnann1.background_normal="img/red.jpg"
btnAnnualYear.background_normal="img/red.jpg"
b_scmainmenu.background_normal="img/red.jpg"
logsTitle.background_normal="img/red.jpg"
b_sc5menu.background_normal="img/red.jpg"
recentTitle.background_normal="img/red.jpg"
b_sc3menu.background_normal="img/red.jpg"
buttonPopSave.background_normal="img/red.jpg"
b_sc3.background_normal="img/red.jpg"
titlewrite.background_normal="img/red.jpg"
btnSlider.background_normal="img/red.jpg"
buttonPopCp.background_normal="img/red.jpg"
b_home.text="[color=ff0000]D[/color]"
lblHome.text="[color=ff0000][b]Home[/b][/color]"
def colorBlue(self,*args):
color_val = 'blue'
cursor.execute('SELECT COUNT(*) FROM color_tbl')
color_count = cursor.fetchone()[0]
if color_count == 0:
cursor.execute("INSERT INTO color_tbl(color) VALUES('"+color_val+"')")
con.commit()
elif color_count >= 1:
cursor.execute("UPDATE color_tbl SET color='"+color_val+"'")
con.commit()
btnhelpmeok.background_normal="img/blue.jpg"
btnHelpGraph.background_normal="img/blue.jpg"
buttonPop.background_down="img/blue.jpg"
titlewrite.background_down="img/blue.jpg"
logsTitle.background_down="img/blue.jpg"
recentTitle.background_down="img/blue.jpg"
statsTitle.background_down="img/blue.jpg"
root6Title.background_down="img/blue.jpg"
root6Title.background_normal="img/blue.jpg"
buttonPopExit1.background_normal="img/blue.jpg"
buttonPopExit2.background_normal="img/blue.jpg"
buttonPop.background_normal="img/blue.jpg"
buttonPopAb.background_normal="img/blue.jpg"
buttonPopD.background_normal="img/blue.jpg"
buttonPopName.background_normal="img/blue.jpg"
buttonPopCpExit.background_normal="img/blue.jpg"
btnCP.background_normal="img/blue.jpg"
btnCons.background_normal="img/blue.jpg"
btnHelp.background_normal="img/blue.jpg"
btnColor.background_normal="img/blue.jpg"
btnD.background_normal="img/blue.jpg"
btnA.background_normal="img/blue.jpg"
btnLO.background_normal="img/blue.jpg"
mainbutton.background_normal="img/blue.jpg"
buttonPopName.background_normal="img/blue.jpg"
btnDaily.background_normal="img/blue.jpg"
btnWeekly.background_normal="img/blue.jpg"
btnMonthly.background_normal="img/blue.jpg"
btnAnnual.background_normal="img/blue.jpg"
graphButton.background_normal="img/blue.jpg"
b_sc4menu.background_normal="img/blue.jpg"
statsTitle.background_normal="img/blue.jpg"
btnd1.background_normal="img/blue.jpg"
btnd2.background_normal="img/blue.jpg"
btnDayDay.background_normal="img/blue.jpg"
btndy1.background_normal="img/blue.jpg"
btndy2.background_normal="img/blue.jpg"
btnYearDay.background_normal="img/blue.jpg"
btnmy11.background_normal="img/blue.jpg"
btnmy22.background_normal="img/blue.jpg"
btnMonthDay.background_normal="img/blue.jpg"
btnwd1.background_normal="img/blue.jpg"
btnWeekDay.background_normal="img/blue.jpg"
btnwy1.background_normal="img/blue.jpg"
btnWeekYear.background_normal="img/blue.jpg"
btnwm1.background_normal="img/blue.jpg"
btnWeekMonth.background_normal="img/blue.jpg"
btnmm1.background_normal="img/blue.jpg"
btnmm2.background_normal="img/blue.jpg"
btnmy1.background_normal="img/blue.jpg"
btnmy2.background_normal="img/blue.jpg"
btnMon1.background_normal="img/blue.jpg"
btnMon2.background_normal="img/blue.jpg"
btnann1.background_normal="img/blue.jpg"
btnAnnualYear.background_normal="img/blue.jpg"
b_scmainmenu.background_normal="img/blue.jpg"
logsTitle.background_normal="img/blue.jpg"
b_sc5menu.background_normal="img/blue.jpg"
recentTitle.background_normal="img/blue.jpg"
b_sc3menu.background_normal="img/blue.jpg"
buttonPopSave.background_normal="img/blue.jpg"
b_sc3.background_normal="img/blue.jpg"
titlewrite.background_normal="img/blue.jpg"
btnSlider.background_normal="img/blue.jpg"
buttonPopCp.background_normal="img/blue.jpg"
b_home.text="[color=3b5999]D[/color]"
lblHome.text="[color=3b5999][b]Home[/b][/color]"
def colorGreen(self,*args):
color_val = 'green'
cursor.execute('SELECT COUNT(*) FROM color_tbl')
color_count = cursor.fetchone()[0]
if color_count == 0:
cursor.execute("INSERT INTO color_tbl(color) VALUES('"+color_val+"')")
con.commit()
elif color_count >= 1:
cursor.execute("UPDATE color_tbl SET color='"+color_val+"'")
con.commit()
btnhelpmeok.background_normal="img/green.jpg"
btnHelpGraph.background_normal="img/green.jpg"
buttonPop.background_down="img/green.jpg"
titlewrite.background_down="img/green.jpg"
logsTitle.background_down="img/green.jpg"
recentTitle.background_down="img/green.jpg"
statsTitle.background_down="img/green.jpg"
root6Title.background_down="img/green.jpg"
root6Title.background_normal="img/green.jpg"
buttonPopExit1.background_normal="img/green.jpg"
buttonPopExit2.background_normal="img/green.jpg"
buttonPop.background_normal="img/green.jpg"
buttonPopAb.background_normal="img/green.jpg"
buttonPopD.background_normal="img/green.jpg"
buttonPopName.background_normal="img/green.jpg"
buttonPopCpExit.background_normal="img/green.jpg"
btnCP.background_normal="img/green.jpg"
btnCons.background_normal="img/green.jpg"
btnHelp.background_normal="img/green.jpg"
btnColor.background_normal="img/green.jpg"
btnD.background_normal="img/green.jpg"
btnA.background_normal="img/green.jpg"
btnLO.background_normal="img/green.jpg"
mainbutton.background_normal="img/green.jpg"
buttonPopName.background_normal="img/green.jpg"
btnDaily.background_normal="img/green.jpg"
btnWeekly.background_normal="img/green.jpg"
btnMonthly.background_normal="img/green.jpg"
btnAnnual.background_normal="img/green.jpg"
graphButton.background_normal="img/green.jpg"
b_sc4menu.background_normal="img/green.jpg"
statsTitle.background_normal="img/green.jpg"
btnd1.background_normal="img/green.jpg"
btnd2.background_normal="img/green.jpg"
btnDayDay.background_normal="img/green.jpg"
btndy1.background_normal="img/green.jpg"
btndy2.background_normal="img/green.jpg"
btnYearDay.background_normal="img/green.jpg"
btnmy11.background_normal="img/green.jpg"
btnmy22.background_normal="img/green.jpg"
btnMonthDay.background_normal="img/green.jpg"
btnwd1.background_normal="img/green.jpg"
btnWeekDay.background_normal="img/green.jpg"
btnwy1.background_normal="img/green.jpg"
btnWeekYear.background_normal="img/green.jpg"
btnwm1.background_normal="img/green.jpg"
btnWeekMonth.background_normal="img/green.jpg"
btnmm1.background_normal="img/green.jpg"
btnmm2.background_normal="img/green.jpg"
btnmy1.background_normal="img/green.jpg"
btnmy2.background_normal="img/green.jpg"
btnMon1.background_normal="img/green.jpg"
btnMon2.background_normal="img/green.jpg"
btnann1.background_normal="img/green.jpg"
btnAnnualYear.background_normal="img/green.jpg"
b_scmainmenu.background_normal="img/green.jpg"
logsTitle.background_normal="img/green.jpg"
b_sc5menu.background_normal="img/green.jpg"
recentTitle.background_normal="img/green.jpg"
b_sc3menu.background_normal="img/green.jpg"
buttonPopSave.background_normal="img/green.jpg"
b_sc3.background_normal="img/green.jpg"
titlewrite.background_normal="img/green.jpg"
btnSlider.background_normal="img/green.jpg"
buttonPopCp.background_normal="img/green.jpg"
b_home.text="[color=00ff00]D[/color]"
lblHome.text="[color=00ff00][b]Home[/b][/color]"
def colorOrange(self,*args):
color_val = 'orange'
cursor.execute('SELECT COUNT(*) FROM color_tbl')
color_count = cursor.fetchone()[0]
if color_count == 0:
cursor.execute("INSERT INTO color_tbl(color) VALUES('"+color_val+"')")
con.commit()
elif color_count >= 1:
cursor.execute("UPDATE color_tbl SET color='"+color_val+"'")
con.commit()
btnhelpmeok.background_normal="img/orange.jpg"
btnHelpGraph.background_normal="img/orange.jpg"
buttonPop.background_down="img/orange.jpg"
titlewrite.background_down="img/orange.jpg"
logsTitle.background_down="img/orange.jpg"
recentTitle.background_down="img/orange.jpg"
statsTitle.background_down="img/orange.jpg"
root6Title.background_down="img/orange.jpg"
root6Title.background_normal="img/orange.jpg"
buttonPopExit1.background_normal="img/orange.jpg"
buttonPopExit2.background_normal="img/orange.jpg"
buttonPop.background_normal="img/orange.jpg"
buttonPopAb.background_normal="img/orange.jpg"
buttonPopD.background_normal="img/orange.jpg"
buttonPopName.background_normal="img/orange.jpg"
buttonPopCpExit.background_normal="img/orange.jpg"
btnCP.background_normal="img/orange.jpg"
btnCons.background_normal="img/orange.jpg"
btnHelp.background_normal="img/orange.jpg"
btnColor.background_normal="img/orange.jpg"
btnD.background_normal="img/orange.jpg"
btnA.background_normal="img/orange.jpg"
btnLO.background_normal="img/orange.jpg"
mainbutton.background_normal="img/orange.jpg"
buttonPopName.background_normal="img/orange.jpg"
btnDaily.background_normal="img/orange.jpg"
btnWeekly.background_normal="img/orange.jpg"
btnMonthly.background_normal="img/orange.jpg"
btnAnnual.background_normal="img/orange.jpg"
graphButton.background_normal="img/orange.jpg"
b_sc4menu.background_normal="img/orange.jpg"
statsTitle.background_normal="img/orange.jpg"
btnd1.background_normal="img/orange.jpg"
btnd2.background_normal="img/orange.jpg"
btnDayDay.background_normal="img/orange.jpg"
btndy1.background_normal="img/orange.jpg"
btndy2.background_normal="img/orange.jpg"
btnYearDay.background_normal="img/orange.jpg"
btnmy11.background_normal="img/orange.jpg"
btnmy22.background_normal="img/orange.jpg"
btnMonthDay.background_normal="img/orange.jpg"
btnwd1.background_normal="img/orange.jpg"
btnWeekDay.background_normal="img/orange.jpg"
btnwy1.background_normal="img/orange.jpg"
btnWeekYear.background_normal="img/orange.jpg"
btnwm1.background_normal="img/orange.jpg"
btnWeekMonth.background_normal="img/orange.jpg"
btnmm1.background_normal="img/orange.jpg"
btnmm2.background_normal="img/orange.jpg"
btnmy1.background_normal="img/orange.jpg"
btnmy2.background_normal="img/orange.jpg"
btnMon1.background_normal="img/orange.jpg"
btnann1.background_normal="img/orange.jpg"
btnAnnualYear.background_normal="img/orange.jpg"
b_scmainmenu.background_normal="img/orange.jpg"
logsTitle.background_normal="img/orange.jpg"
b_sc5menu.background_normal="img/orange.jpg"
recentTitle.background_normal="img/orange.jpg"
b_sc3menu.background_normal="img/orange.jpg"
buttonPopSave.background_normal="img/orange.jpg"
b_sc3.background_normal="img/orange.jpg"
titlewrite.background_normal="img/orange.jpg"
btnSlider.background_normal="img/orange.jpg"
buttonPopCp.background_normal="img/orange.jpg"
b_home.text="[color=ff4500]D[/color]"
lblHome.text="[color=ff4500][b]Home[/b][/color]"
def colorPurple(self,*args):
color_val = 'purple'
cursor.execute('SELECT COUNT(*) FROM color_tbl')
color_count = cursor.fetchone()[0]
if color_count == 0:
cursor.execute("INSERT INTO color_tbl(color) VALUES('"+color_val+"')")
con.commit()
elif color_count >= 1:
cursor.execute("UPDATE color_tbl SET color='"+color_val+"'")
con.commit()
btnhelpmeok.background_normal="img/purple.jpg"
btnHelpGraph.background_normal="img/purple.jpg"
buttonPop.background_normal="img/purple.jpg"
titlewrite.background_down="img/purple.jpg"
logsTitle.background_down="img/purple.jpg"
recentTitle.background_down="img/purple.jpg"
statsTitle.background_down="img/purple.jpg"
root6Title.background_down="img/purple.jpg"
root6Title.background_normal="img/purple.jpg"
buttonPopExit1.background_normal="img/purple.jpg"
buttonPopExit2.background_normal="img/purple.jpg"
buttonPop.background_normal="img/purple.jpg"
buttonPopAb.background_normal="img/purple.jpg"
buttonPopD.background_normal="img/purple.jpg"
buttonPopName.background_normal="img/purple.jpg"
buttonPopCpExit.background_normal="img/purple.jpg"
btnCP.background_normal="img/purple.jpg"
btnCons.background_normal="img/purple.jpg"
btnHelp.background_normal="img/purple.jpg"
btnColor.background_normal="img/purple.jpg"
btnD.background_normal="img/purple.jpg"
btnA.background_normal="img/purple.jpg"
btnLO.background_normal="img/purple.jpg"
mainbutton.background_normal="img/purple.jpg"
buttonPopName.background_normal="img/purple.jpg"
btnDaily.background_normal="img/purple.jpg"
btnWeekly.background_normal="img/purple.jpg"
btnMonthly.background_normal="img/purple.jpg"
btnAnnual.background_normal="img/purple.jpg"
graphButton.background_normal="img/purple.jpg"
b_sc4menu.background_normal="img/purple.jpg"
statsTitle.background_normal="img/purple.jpg"
btnd1.background_normal="img/purple.jpg"
btnd2.background_normal="img/purple.jpg"
btnDayDay.background_normal="img/purple.jpg"
btndy1.background_normal="img/purple.jpg"
btndy2.background_normal="img/purple.jpg"
btnYearDay.background_normal="img/purple.jpg"
btnmy11.background_normal="img/purple.jpg"
btnmy22.background_normal="img/purple.jpg"
btnMonthDay.background_normal="img/purple.jpg"
btnwd1.background_normal="img/purple.jpg"
btnWeekDay.background_normal="img/purple.jpg"
btnwy1.background_normal="img/purple.jpg"
btnWeekYear.background_normal="img/purple.jpg"
btnwm1.background_normal="img/purple.jpg"
btnWeekMonth.background_normal="img/purple.jpg"
btnmm1.background_normal="img/purple.jpg"
btnmm2.background_normal="img/purple.jpg"
btnmy1.background_normal="img/purple.jpg"
btnmy2.background_normal="img/purple.jpg"
btnMon1.background_normal="img/purple.jpg"
btnann1.background_normal="img/purple.jpg"
btnAnnualYear.background_normal="img/purple.jpg"
b_scmainmenu.background_normal="img/purple.jpg"
logsTitle.background_normal="img/purple.jpg"
b_sc5menu.background_normal="img/purple.jpg"
recentTitle.background_normal="img/purple.jpg"
b_sc3menu.background_normal="img/purple.jpg"
buttonPopSave.background_normal="img/purple.jpg"
b_sc3.background_normal="img/purple.jpg"
titlewrite.background_normal="img/purple.jpg"
btnSlider.background_normal="img/purple.jpg"
buttonPopCp.background_normal="img/purple.jpg"
b_home.text="[color=ba55d3]D[/color]"
lblHome.text="[color=ba55d3][b]Home[/b][/color]"
def colorPRed(self,*args):
color_val = 'pred'
cursor.execute('SELECT COUNT(*) FROM color_tbl')
color_count = cursor.fetchone()[0]
if color_count == 0:
cursor.execute("INSERT INTO color_tbl(color) VALUES('"+color_val+"')")
con.commit()
elif color_count >= 1:
cursor.execute("UPDATE color_tbl SET color='"+color_val+"'")
con.commit()
btnhelpmeok.background_normal="img/pred.jpg"
btnHelpGraph.background_normal="img/pred.jpg"
buttonPop.background_down="img/pred.jpg"
titlewrite.background_down="img/pred.jpg"
logsTitle.background_down="img/pred.jpg"
recentTitle.background_down="img/pred.jpg"
statsTitle.background_down="img/pred.jpg"
root6Title.background_down="img/pred.jpg"
root6Title.background_normal="img/pred.jpg"
buttonPopExit1.background_normal="img/pred.jpg"
buttonPopExit2.background_normal="img/pred.jpg"
buttonPop.background_normal="img/pred.jpg"
buttonPopAb.background_normal="img/pred.jpg"
buttonPopD.background_normal="img/pred.jpg"
buttonPopName.background_normal="img/pred.jpg"
buttonPopCpExit.background_normal="img/pred.jpg"
btnCP.background_normal="img/pred.jpg"
btnCons.background_normal="img/pred.jpg"
btnHelp.background_normal="img/pred.jpg"
btnColor.background_normal="img/pred.jpg"
btnD.background_normal="img/pred.jpg"
btnA.background_normal="img/pred.jpg"
btnLO.background_normal="img/pred.jpg"
mainbutton.background_normal="img/pred.jpg"
buttonPopName.background_normal="img/pred.jpg"
btnDaily.background_normal="img/pred.jpg"
btnWeekly.background_normal="img/pred.jpg"
btnMonthly.background_normal="img/pred.jpg"
btnAnnual.background_normal="img/pred.jpg"
graphButton.background_normal="img/pred.jpg"
b_sc4menu.background_normal="img/pred.jpg"
statsTitle.background_normal="img/pred.jpg"
btnd1.background_normal="img/pred.jpg"
btnd2.background_normal="img/pred.jpg"
btnDayDay.background_normal="img/pred.jpg"
btndy1.background_normal="img/pred.jpg"
btndy2.background_normal="img/pred.jpg"
btnYearDay.background_normal="img/pred.jpg"
btnmy11.background_normal="img/pred.jpg"
btnmy22.background_normal="img/pred.jpg"
btnMonthDay.background_normal="img/pred.jpg"
btnwd1.background_normal="img/pred.jpg"
btnWeekDay.background_normal="img/pred.jpg"
btnwy1.background_normal="img/pred.jpg"
btnWeekYear.background_normal="img/pred.jpg"
btnwm1.background_normal="img/pred.jpg"
btnWeekMonth.background_normal="img/pred.jpg"
btnmm1.background_normal="img/pred.jpg"
btnmm2.background_normal="img/pred.jpg"
btnmy1.background_normal="img/pred.jpg"
btnmy2.background_normal="img/pred.jpg"
btnMon1.background_normal="img/pred.jpg"
btnMon2.background_normal="img/pred.jpg"
btnann1.background_normal="img/pred.jpg"
btnAnnualYear.background_normal="img/pred.jpg"
b_scmainmenu.background_normal="img/pred.jpg"
logsTitle.background_normal="img/pred.jpg"
b_sc5menu.background_normal="img/pred.jpg"
recentTitle.background_normal="img/pred.jpg"
b_sc3menu.background_normal="img/pred.jpg"
buttonPopSave.background_normal="img/pred.jpg"
b_sc3.background_normal="img/pred.jpg"
titlewrite.background_normal="img/pred.jpg"
btnSlider.background_normal="img/pred.jpg"
buttonPopCp.background_normal="img/pred.jpg"
b_home.text="[color=ffb4bb]D[/color]"
lblHome.text="[color=ffb4bb][b]Home[/b][/color]"
def colorPBlue(self,*args):
color_val = 'pblue'
cursor.execute('SELECT COUNT(*) FROM color_tbl')
color_count = cursor.fetchone()[0]
if color_count == 0:
cursor.execute("INSERT INTO color_tbl(color) VALUES('"+color_val+"')")
con.commit()
elif color_count >= 1:
cursor.execute("UPDATE color_tbl SET color='"+color_val+"'")
con.commit()
btnhelpmeok.background_normal="img/pblue.jpg"
btnHelpGraph.background_normal="img/pblue.jpg"
buttonPop.background_down="img/pblue.jpg"
titlewrite.background_down="img/pblue.jpg"
logsTitle.background_down="img/pblue.jpg"
recentTitle.background_down="img/pblue.jpg"
statsTitle.background_down="img/pblue.jpg"
root6Title.background_down="img/pblue.jpg"
root6Title.background_normal="img/pblue.jpg"
buttonPopExit1.background_normal="img/pblue.jpg"
buttonPopExit2.background_normal="img/pblue.jpg"
buttonPop.background_normal="img/pblue.jpg"
buttonPopAb.background_normal="img/pblue.jpg"
buttonPopD.background_normal="img/pblue.jpg"
buttonPopName.background_normal="img/pblue.jpg"
buttonPopCpExit.background_normal="img/pblue.jpg"
btnCP.background_normal="img/pblue.jpg"
btnCons.background_normal="img/pblue.jpg"
btnHelp.background_normal="img/pblue.jpg"
btnColor.background_normal="img/pblue.jpg"
btnD.background_normal="img/pblue.jpg"
btnA.background_normal="img/pblue.jpg"
btnLO.background_normal="img/pblue.jpg"
mainbutton.background_normal="img/pblue.jpg"
buttonPopName.background_normal="img/pblue.jpg"
btnDaily.background_normal="img/pblue.jpg"
btnWeekly.background_normal="img/pblue.jpg"
btnMonthly.background_normal="img/pblue.jpg"
btnAnnual.background_normal="img/pblue.jpg"
graphButton.background_normal="img/pblue.jpg"
b_sc4menu.background_normal="img/pblue.jpg"
statsTitle.background_normal="img/pblue.jpg"
btnd1.background_normal="img/pblue.jpg"
btnd2.background_normal="img/pblue.jpg"
btnDayDay.background_normal="img/pblue.jpg"
btndy1.background_normal="img/pblue.jpg"
btndy2.background_normal="img/pblue.jpg"
btnYearDay.background_normal="img/pblue.jpg"
btnmy11.background_normal="img/pblue.jpg"
btnmy22.background_normal="img/pblue.jpg"
btnMonthDay.background_normal="img/pblue.jpg"
btnwd1.background_normal="img/pblue.jpg"
btnWeekDay.background_normal="img/pblue.jpg"
btnwy1.background_normal="img/pblue.jpg"
btnWeekYear.background_normal="img/pblue.jpg"
btnwm1.background_normal="img/pblue.jpg"
btnWeekMonth.background_normal="img/pblue.jpg"
btnmm1.background_normal="img/pblue.jpg"
btnmm2.background_normal="img/pblue.jpg"
btnmy1.background_normal="img/pblue.jpg"
btnmy2.background_normal="img/pblue.jpg"
btnMon1.background_normal="img/pblue.jpg"
btnMon2.background_normal="img/pblue.jpg"
btnann1.background_normal="img/pblue.jpg"
btnAnnualYear.background_normal="img/pblue.jpg"
b_scmainmenu.background_normal="img/pblue.jpg"
logsTitle.background_normal="img/pblue.jpg"
b_sc5menu.background_normal="img/pblue.jpg"
recentTitle.background_normal="img/pblue.jpg"
b_sc3menu.background_normal="img/pblue.jpg"
buttonPopSave.background_normal="img/pblue.jpg"
b_sc3.background_normal="img/pblue.jpg"
titlewrite.background_normal="img/pblue.jpg"
btnSlider.background_normal="img/pblue.jpg"
buttonPopCp.background_normal="img/pblue.jpg"
b_home.text="[color=88cafd]D[/color]"
lblHome.text="[color=88cafd][b]Home[/b][/color]"
def colorPGreen(self,*args):
color_val = 'pgreen'
cursor.execute('SELECT COUNT(*) FROM color_tbl')
color_count = cursor.fetchone()[0]
if color_count == 0:
cursor.execute("INSERT INTO color_tbl(color) VALUES('"+color_val+"')")
con.commit()
elif color_count >= 1:
cursor.execute("UPDATE color_tbl SET color='"+color_val+"'")
con.commit()
btnhelpmeok.background_normal="img/pgreen.jpg"
btnHelpGraph.background_normal="img/pgreen.jpg"
buttonPop.background_down="img/pgreen.jpg"
titlewrite.background_down="img/pgreen.jpg"
logsTitle.background_down="img/pgreen.jpg"
recentTitle.background_down="img/pgreen.jpg"
statsTitle.background_down="img/pgreen.jpg"
root6Title.background_down="img/pgreen.jpg"
root6Title.background_normal="img/pgreen.jpg"
buttonPopExit1.background_normal="img/pgreen.jpg"
buttonPopExit2.background_normal="img/pgreen.jpg"
buttonPop.background_normal="img/pgreen.jpg"
buttonPopAb.background_normal="img/pgreen.jpg"
buttonPopD.background_normal="img/pgreen.jpg"
buttonPopName.background_normal="img/pgreen.jpg"
buttonPopCpExit.background_normal="img/pgreen.jpg"
btnCP.background_normal="img/pgreen.jpg"
btnCons.background_normal="img/pgreen.jpg"
btnHelp.background_normal="img/pgreen.jpg"
btnColor.background_normal="img/pgreen.jpg"
btnD.background_normal="img/pgreen.jpg"
btnA.background_normal="img/pgreen.jpg"
btnLO.background_normal="img/pgreen.jpg"
mainbutton.background_normal="img/pgreen.jpg"
buttonPopName.background_normal="img/pgreen.jpg"
btnDaily.background_normal="img/pgreen.jpg"
btnWeekly.background_normal="img/pgreen.jpg"
btnMonthly.background_normal="img/pgreen.jpg"
btnAnnual.background_normal="img/pgreen.jpg"
graphButton.background_normal="img/pgreen.jpg"
b_sc4menu.background_normal="img/pgreen.jpg"
statsTitle.background_normal="img/pgreen.jpg"
btnd1.background_normal="img/pgreen.jpg"
btnd2.background_normal="img/pgreen.jpg"
btnDayDay.background_normal="img/pgreen.jpg"
btndy1.background_normal="img/pgreen.jpg"
btndy2.background_normal="img/pgreen.jpg"
btnYearDay.background_normal="img/pgreen.jpg"
btnmy11.background_normal="img/pgreen.jpg"
btnmy22.background_normal="img/pgreen.jpg"
btnMonthDay.background_normal="img/pgreen.jpg"
btnwd1.background_normal="img/pgreen.jpg"
btnWeekDay.background_normal="img/pgreen.jpg"
btnwy1.background_normal="img/pgreen.jpg"
btnWeekYear.background_normal="img/pgreen.jpg"
btnwm1.background_normal="img/pgreen.jpg"
btnWeekMonth.background_normal="img/pgreen.jpg"
btnmm1.background_normal="img/pgreen.jpg"
btnmm2.background_normal="img/pgreen.jpg"
btnmy1.background_normal="img/pgreen.jpg"
btnmy2.background_normal="img/pgreen.jpg"
btnMon1.background_normal="img/pgreen.jpg"
btnMon2.background_normal="img/pgreen.jpg"
btnann1.background_normal="img/pgreen.jpg"
btnAnnualYear.background_normal="img/pgreen.jpg"
b_scmainmenu.background_normal="img/pgreen.jpg"
logsTitle.background_normal="img/pgreen.jpg"
b_sc5menu.background_normal="img/pgreen.jpg"
recentTitle.background_normal="img/pgreen.jpg"
b_sc3menu.background_normal="img/pgreen.jpg"
buttonPopSave.background_normal="img/pgreen.jpg"
b_sc3.background_normal="img/pgreen.jpg"
titlewrite.background_normal="img/pgreen.jpg"
btnSlider.background_normal="img/pgreen.jpg"
buttonPopCp.background_normal="img/pgreen.jpg"
b_home.text="[color=7ffe9a]D[/color]"
lblHome.text="[color=7ffe9a][b]Home[/b][/color]"
def colorPOrange(self,*args):
color_val = 'porange'
cursor.execute('SELECT COUNT(*) FROM color_tbl')
color_count = cursor.fetchone()[0]
if color_count == 0:
cursor.execute("INSERT INTO color_tbl(color) VALUES('"+color_val+"')")
con.commit()
elif color_count >= 1:
cursor.execute("UPDATE color_tbl SET color='"+color_val+"'")
con.commit()
btnhelpmeok.background_normal="img/porange.jpg"
btnHelpGraph.background_normal="img/porange.jpg"
buttonPop.background_down="img/porange.jpg"
titlewrite.background_down="img/porange.jpg"
logsTitle.background_down="img/porange.jpg"
recentTitle.background_down="img/porange.jpg"
statsTitle.background_down="img/porange.jpg"
root6Title.background_down="img/porange.jpg"
root6Title.background_normal="img/porange.jpg"
buttonPopExit1.background_normal="img/porange.jpg"
buttonPopExit2.background_normal="img/porange.jpg"
buttonPop.background_normal="img/porange.jpg"
buttonPopAb.background_normal="img/porange.jpg"
buttonPopD.background_normal="img/porange.jpg"
buttonPopName.background_normal="img/porange.jpg"
buttonPopCpExit.background_normal="img/porange.jpg"
btnCP.background_normal="img/porange.jpg"
btnCons.background_normal="img/porange.jpg"
btnHelp.background_normal="img/porange.jpg"
btnColor.background_normal="img/porange.jpg"
btnD.background_normal="img/porange.jpg"
btnA.background_normal="img/porange.jpg"
btnLO.background_normal="img/porange.jpg"
mainbutton.background_normal="img/porange.jpg"
buttonPopName.background_normal="img/porange.jpg"
btnDaily.background_normal="img/porange.jpg"
btnWeekly.background_normal="img/porange.jpg"
btnMonthly.background_normal="img/porange.jpg"
btnAnnual.background_normal="img/porange.jpg"
graphButton.background_normal="img/porange.jpg"
b_sc4menu.background_normal="img/porange.jpg"
statsTitle.background_normal="img/porange.jpg"
btnd1.background_normal="img/porange.jpg"
btnd2.background_normal="img/porange.jpg"
btnDayDay.background_normal="img/porange.jpg"
btndy1.background_normal="img/porange.jpg"
btndy2.background_normal="img/porange.jpg"
btnYearDay.background_normal="img/porange.jpg"
btnmy11.background_normal="img/porange.jpg"
btnmy22.background_normal="img/porange.jpg"
btnMonthDay.background_normal="img/porange.jpg"
btnwd1.background_normal="img/porange.jpg"
btnWeekDay.background_normal="img/porange.jpg"
btnwy1.background_normal="img/porange.jpg"
btnWeekYear.background_normal="img/porange.jpg"
btnwm1.background_normal="img/porange.jpg"
btnWeekMonth.background_normal="img/porange.jpg"
btnmm1.background_normal="img/porange.jpg"
btnmm2.background_normal="img/porange.jpg"
btnmy1.background_normal="img/porange.jpg"
btnmy2.background_normal="img/porange.jpg"
btnMon1.background_normal="img/porange.jpg"
btnann1.background_normal="img/porange.jpg"
btnAnnualYear.background_normal="img/porange.jpg"
b_scmainmenu.background_normal="img/porange.jpg"
logsTitle.background_normal="img/porange.jpg"
b_sc5menu.background_normal="img/porange.jpg"
recentTitle.background_normal="img/porange.jpg"
b_sc3menu.background_normal="img/porange.jpg"
buttonPopSave.background_normal="img/porange.jpg"
b_sc3.background_normal="img/porange.jpg"
titlewrite.background_normal="img/porange.jpg"
btnSlider.background_normal="img/porange.jpg"
buttonPopCp.background_normal="img/porange.jpg"
b_home.text="[color=fec47e]D[/color]"
lblHome.text="[color=fec47e][b]Home[/b][/color]"
def colorPPurple(self,*args):
color_val = 'ppurple'
cursor.execute('SELECT COUNT(*) FROM color_tbl')
color_count = cursor.fetchone()[0]
if color_count == 0:
cursor.execute("INSERT INTO color_tbl(color) VALUES('"+color_val+"')")
con.commit()
elif color_count >= 1:
cursor.execute("UPDATE color_tbl SET color='"+color_val+"'")
con.commit()
btnhelpmeok.background_normal="img/ppurple.jpg"
btnHelpGraph.background_normal="img/ppurple.jpg"
buttonPop.background_down="img/ppurple.jpg"
titlewrite.background_down="img/ppurple.jpg"
logsTitle.background_down="img/ppurple.jpg"
recentTitle.background_down="img/ppurple.jpg"
statsTitle.background_down="img/ppurple.jpg"
root6Title.background_down="img/ppurple.jpg"
root6Title.background_normal="img/ppurple.jpg"
buttonPopExit1.background_normal="img/ppurple.jpg"
buttonPopExit2.background_normal="img/ppurple.jpg"
buttonPop.background_normal="img/ppurple.jpg"
buttonPopAb.background_normal="img/ppurple.jpg"
buttonPopD.background_normal="img/ppurple.jpg"
buttonPopName.background_normal="img/ppurple.jpg"
buttonPopCpExit.background_normal="img/ppurple.jpg"
btnCP.background_normal="img/ppurple.jpg"
btnCons.background_normal="img/ppurple.jpg"
btnHelp.background_normal="img/ppurple.jpg"
btnColor.background_normal="img/ppurple.jpg"
btnD.background_normal="img/ppurple.jpg"
btnA.background_normal="img/ppurple.jpg"
btnLO.background_normal="img/ppurple.jpg"
mainbutton.background_normal="img/ppurple.jpg"
buttonPopName.background_normal="img/ppurple.jpg"
btnDaily.background_normal="img/ppurple.jpg"
btnWeekly.background_normal="img/ppurple.jpg"
btnMonthly.background_normal="img/ppurple.jpg"
btnAnnual.background_normal="img/ppurple.jpg"
graphButton.background_normal="img/ppurple.jpg"
b_sc4menu.background_normal="img/ppurple.jpg"
statsTitle.background_normal="img/ppurple.jpg"
btnd1.background_normal="img/ppurple.jpg"
btnd2.background_normal="img/ppurple.jpg"
btnDayDay.background_normal="img/ppurple.jpg"
btndy1.background_normal="img/ppurple.jpg"
btndy2.background_normal="img/ppurple.jpg"
btnYearDay.background_normal="img/ppurple.jpg"
btnmy11.background_normal="img/ppurple.jpg"
btnmy22.background_normal="img/ppurple.jpg"
btnMonthDay.background_normal="img/ppurple.jpg"
btnwd1.background_normal="img/ppurple.jpg"
btnWeekDay.background_normal="img/ppurple.jpg"
btnwy1.background_normal="img/ppurple.jpg"
btnWeekYear.background_normal="img/ppurple.jpg"
btnwm1.background_normal="img/ppurple.jpg"
btnWeekMonth.background_normal="img/ppurple.jpg"
btnmm1.background_normal="img/ppurple.jpg"
btnmm2.background_normal="img/ppurple.jpg"
btnmy1.background_normal="img/ppurple.jpg"
btnmy2.background_normal="img/ppurple.jpg"
btnMon1.background_normal="img/ppurple.jpg"
btnann1.background_normal="img/ppurple.jpg"
btnAnnualYear.background_normal="img/ppurple.jpg"
b_scmainmenu.background_normal="img/ppurple.jpg"
logsTitle.background_normal="img/ppurple.jpg"
b_sc5menu.background_normal="img/ppurple.jpg"
recentTitle.background_normal="img/ppurple.jpg"
b_sc3menu.background_normal="img/ppurple.jpg"
buttonPopSave.background_normal="img/ppurple.jpg"
b_sc3.background_normal="img/ppurple.jpg"
titlewrite.background_normal="img/ppurple.jpg"
btnSlider.background_normal="img/ppurple.jpg"
buttonPopCp.background_normal="img/ppurple.jpg"
b_home.text="[color=ba55d3]D[/color]"
lblHome.text="[color=ba55d3][b]Home[/b][/color]"
def colorClose(self,*args):
popupColor.dismiss()
popColorRemove()
if color_val =='red':
b_home.text="[color=ff0000]D[/color]"
lblHome.text="[color=ff0000][b]Home[/b][/color]"
elif color_val == 'blue':
b_home.text="[color=3b5999]D[/color]"
lblHome.text="[color=3b5999][b]Home[/b][/color]"
elif color_val == 'green':
b_home.text="[color=00ff00]D[/color]"
lblHome.text="[color=00ff00][b]Home[/b][/color]"
elif color_val == 'orange':
b_home.text="[color=ff4500]D[/color]"
lblHome.text="[color=ff4500][b]Home[/b][/color]"
elif color_val == 'purple':
b_home.text="[color=ba55d3]D[/color]"
lblHome.text="[color=ba55d3][b]Home[/b][/color]"
elif color_val == 'pred':
b_home.text="[color=ffb4bb]D[/color]"
lblHome.text="[color=ffb4bb][b]Home[/b][/color]"
elif color_val == 'pblue':
b_home.text="[color=88cafd]D[/color]"
lblHome.text="[color=88cafd][b]Home[/b][/color]"
elif color_val == 'pgreen':
b_home.text="[color=7ffe9a]D[/color]"
lblHome.text="[color=7ffe9a][b]Home[/b][/color]"
elif color_val == 'porange':
b_home.text="[color=fec47e]D[/color]"
lblHome.text="[color=fec47e][b]Home[/b][/color]"
elif color_val == 'ppurple':
b_home.text="[color=ba55d3]D[/color]"
lblHome.text="[color=ba55d3][b]Home[/b][/color]"
#--BINDINGS---------------------
root.bind(on_touch_down=callbacksc2) # button tap event
passimgok.bind(on_release=callbackmain)
passimgok.bind(on_release=popError)
passimg0.bind(on_press=p0)
passimg1.bind(on_press=p1)
passimg2.bind(on_press=p2)
passimg3.bind(on_press=p3)
passimg4.bind(on_press=p4)
passimg5.bind(on_press=p5)
passimg6.bind(on_press=p6)
passimg7.bind(on_press=p7)
passimg8.bind(on_press=p8)
passimg9.bind(on_press=p9)
passimgclr.bind(on_press=pclr)
buttonPop.bind(on_press=popup.dismiss)
b_write.bind(on_press=callbacksc3)
b_sc3menu.bind(on_release=callbackhome)
b_stats.bind(on_release=callbacksc4)
b_user.bind(on_press=callbacksc7)
b_logs.bind(on_release=callbackscmain)
b_scmainmenu.bind(on_release=callbackhome)
b_sc4menu.bind(on_release=callbackhome)
b_sc7menu.bind(on_release=callbackhome)
b_sc3.bind(on_press=callback_proctext)
#slider
btnSlider.bind(on_press=moodconfirm)
#logout
btnLO.bind(on_press=popexit)
buttonPopExit1.bind(on_press=exitclose)
buttonPopExit2.bind(on_press=popexitdismiss)
#about
buttonPopAb.bind(on_press=popaboutdismiss)
btnA.bind(on_press=popabout)
#error
buttonPopError.bind(on_press=poperrordismiss)
#devs
buttonPopD.bind(on_press=popdevdismiss)
btnD.bind(on_press=popdev)
#change pass
buttonPopCpExit.bind(on_press=popcpdismiss)
buttonPopCp.bind(on_press=popcpsave)
btnCP.bind(on_press=popcp)
btnColor.bind(on_press=popColor)
#dismissnote
buttonPopSave.bind(on_press=callbackhome)
#nameinput
buttonPopName.bind(on_press=popnamedismiss)
#save Button Pop Pin
buttonPopPin.bind(on_press=savepoppin)
#remove widgets
b_stats.bind(on_release=rWidgets)
#graph plot
btnWeekly.bind(on_release=weeklyGraphPlot)
btnDaily.bind(on_release=dailyGraphPlot)
btnMonthly.bind(on_release=monthlyGraphPlot)
btnAnnual.bind(on_release=annualGraphPlot)
b_stats.bind(on_release=startGraphPlot)
graphButton.bind(on_press=removeDWM1)
#Graph Buttons List
#Daily
btnDaily.bind(on_release=daily_month)
btnDaily.bind(on_release=daily_year)
btnDaily.bind(on_release=daily_day)
#weekly
btnWeekly.bind(on_release=weekly_month)
btnWeekly.bind(on_release=weekly_year)
btnWeekly.bind(on_release=weekly_day)
#monthly
btnMonthly.bind(on_release=monthly_month)
btnMonthly.bind(on_release=monthly_year)
#monthly
btnAnnualYear.bind(on_release=yearly_year)
#color
buttonColorRed.bind(on_press=colorRed)
buttonColorBlue.bind(on_press=colorBlue)
buttonColorGreen.bind(on_press=colorGreen)
buttonColorOrange.bind(on_press=colorOrange)
buttonColorPurple.bind(on_press=colorPurple)
buttonColorPRed.bind(on_press=colorPRed)
buttonColorPBlue.bind(on_press=colorPBlue)
buttonColorPGreen.bind(on_press=colorPGreen)
buttonColorPOrange.bind(on_press=colorPOrange)
buttonColorPPurple.bind(on_press=colorPPurple)
buttonPopColor.bind(on_press=colorClose)
#gender
buttonPopGender1.bind(on_press=popGenderRemove1)
buttonPopGender2.bind(on_press=popGenderRemove2)
# disable Back button on Android
Window.bind(on_keyboard=self.hook_keyboard)
#consultant Mode
btnCons.bind(on_release=ConsultantMode)
#help
btnHelp.bind(on_release=MMHelp)
return sm
if __name__ == "__main__": #run the class
MainApp().run()
con.commit()
| [
"noreply@github.com"
] | brunomarvss.noreply@github.com |
c3a805e35fb8e1e5293462c79a7b65bffc9c28a6 | 87f7d232723117ad4c72574898bda957d7adfe4d | /Unfinished Loop - Bug Fixing.py | 0a796fecfa5fa1b4dc2bd9f430211a66291eff95 | [] | no_license | Taras198700/Home_Work_S_S | 876f53c615fef440ef58b7d39cf896e1e778d5de | 081a7b17a67f803556027162b0058eccc1a27788 | refs/heads/master | 2022-12-16T20:30:11.888892 | 2020-09-09T18:13:16 | 2020-09-09T18:13:16 | 278,019,575 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 113 | py | def create_array(n):
res = []
i = 1
while i <= n:
res.append(i)
i += 1
return res | [
"homitsky_taras@ukr.net"
] | homitsky_taras@ukr.net |
717fdc2d540f242841bacdd7c2381591a288cc15 | 464f40c696af1028aee439c31938137f43f482f1 | /api/serializers.py | 9fda532152aa3dd7233279f1ccf9660a67302a36 | [] | no_license | MarkWengSTR/drf_todo_practice | d0062bbbc56fd9833a24b9439cd2e9bcb31b4a80 | 431c3173178683f50e5dd9df6eab038dc35452c6 | refs/heads/master | 2023-04-07T05:14:02.815232 | 2021-04-21T03:49:56 | 2021-04-21T03:49:56 | 360,025,275 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 184 | py | from rest_framework import serializers
from .models import Task
class TaskSerializerser(serializers.ModelSerializer):
class Meta:
model = Task
fields = '__all__'
| [
"bskin0330@gmail.com"
] | bskin0330@gmail.com |
24415f583157830a26bbdd97d7d14286cd332701 | 8b198614532b0fd6bc9861f4427299bcc2764c1e | /wsgi.py | 0b182b5796ca105033ef509ec1809d1c622e9bd6 | [] | no_license | summerandwinter/wxapppython | e9b0321b7b7c844eee39baf588b94c6a88f96351 | b345447260fcad01903771a38fbc66b7494c3389 | refs/heads/master | 2021-01-20T01:51:19.589571 | 2018-01-29T10:01:36 | 2018-01-29T10:01:36 | 89,334,998 | 27 | 8 | null | null | null | null | UTF-8 | Python | false | false | 1,144 | py | # coding: utf-8
from gevent import monkey
monkey.patch_all()
import os
import configparser
# 设置 Django 项目配置文件
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
import leancloud
from gevent.pywsgi import WSGIServer
from cloud import engine
cf = configparser.ConfigParser()
cf.read("config.conf")
os.environ["WX_APP_ID"] = cf.get("wx", "app_id")
os.environ["WX_APP_SECRET"] = cf.get("wx", "app_secret")
os.environ["WXA_APP_ID"] = cf.get("wxa", "app_id")
os.environ["WXA_APP_SECRET"] = cf.get("wxa", "app_secret")
os.environ["QINIU_ACCESS_KEY"] = cf.get("qiniu", "access_key")
os.environ["QINIU_SECRET_KEY"] = cf.get("qiniu", "secret_key")
os.environ["QINIU_ACCESS_URL"] = cf.get("qiniu", "access_url")
#import logging
#logging.basicConfig(level=logging.DEBUG)
APP_ID = os.environ['LC_APP_ID']
MASTER_KEY = os.environ['LC_APP_MASTER_KEY']
PORT = int(os.environ['LC_APP_PORT'])
leancloud.init(APP_ID, master_key=MASTER_KEY)
application = engine
if __name__ == '__main__':
# 只在本地开发环境执行的代码
server = WSGIServer(('localhost', PORT), application)
server.serve_forever()
| [
"xiaopihailaotou@gmail.com"
] | xiaopihailaotou@gmail.com |
a112cf2bb050b37acf7da7c8c2b2bbaca571d4d0 | 1a3333fda7f53f6feff0c00e6b623e02211d5b27 | /tests/buildPythonApplication/channels/root/pkgs/testPackage/src/hello/__main__.py | d1691dbc5968d2404c607e812794e10d293e8bac | [] | no_license | jD91mZM2/flox-nixexprs | 6a1b7ca40c6dc8e99ae01941d3cbf77c8c896fc3 | be50d9f2374719cdf0ac762e4442e4fdd77758c9 | refs/heads/master | 2023-03-01T17:43:18.550198 | 2020-11-27T14:24:22 | 2020-11-27T14:24:22 | 328,352,754 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 59 | py | from . import hello
if __name__ == '__main__':
hello()
| [
"contact@infinisil.com"
] | contact@infinisil.com |
44cea545571a791e10617e28cfcb824365481f53 | c5d4fe5f7d01edf095541171fe2610ff62da5104 | /Mission_to_Mars/app.py | 5875d052c0755302a450c5da2bd7dba8ff3fd363 | [] | no_license | thierzon/web-scraping-challenge | 37c24477584415a53a59dd6d59026e432409feaa | cc961977e618b535464e394adf4edc2f569dfbc1 | refs/heads/main | 2023-04-16T03:31:57.133064 | 2021-04-16T13:11:26 | 2021-04-16T13:11:26 | 347,530,493 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 891 | py | from flask import Flask, render_template, redirect
import pymongo
import scrape_mars
# Create an instance of Flask
app = Flask(__name__)
# Use PyMongo to establish Mongo connection
mongo = pymongo.MongoClient("mongodb://localhost:27017/mars_app")
# Route to render index.html template using data from Mongo
@app.route("/")
def home():
# Find one record of data from the mongo database
mars = mongo.db.mars.find_one()
# Return template and data
return render_template("index.html", mars=mars)
# Route that will trigger the scrape function
@app.route("/scrape")
def scrape():
# Run the scrape function
mars_data = scrape_mars.scrape()
# Update the Mongo database using update and upsert=True
mongo.db.mars.update({}, mars_data, upsert=True)
# Redirect back to home page
return redirect("/")
if __name__ == "__main__":
app.run(debug=True)
| [
"thierryzonneveld@gmail.com"
] | thierryzonneveld@gmail.com |
dc3187b27ca89c6c79e0c886c4bcc3dea7baa847 | 6f707329e5ef1bb56e74126a1ded82252013807b | /donation_site/urls.py | 51acd5a85b2f6bfe73b9b6b75c2d48992267333d | [] | no_license | DPBefumo/donation-site | dbe612745953805c521596bbd9c7ec7310dd4f5b | 5ae49a6d1d992b1c2b9f9b0ebb149382a70d1711 | refs/heads/main | 2022-12-25T12:06:29.106759 | 2020-10-07T20:18:23 | 2020-10-07T20:18:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 619 | py | from django.contrib import admin
from django.conf import settings
from django.urls import include, path
from core import views as core_views
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('', core_views.index, name='index'),
path('donate/', core_views.donate, name='donate'),
path('success/<str:args>', core_views.successPage, name='success'),
]
if settings.DEBUG:
import debug_toolbar
urlpatterns = [
path('__debug__/', include(debug_toolbar.urls)),
] + urlpatterns + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| [
"DPBefumo@gmail.com"
] | DPBefumo@gmail.com |
73ddcb45b10df0f0c85f275e7a8190f7bd549bc2 | 677df89f5395f19b87b9f868da8c96f2c5ffbc5e | /code/pycs/client.py | 6afd66ec16bd58036a0f37d4f94cf7133e878ada | [] | no_license | vinnyzhaowenyu/mdnote | eefdffd8c526f2aa1893de3ec3891cc7452ea395 | d1f0835e31f46a6eeb9fe274778fdbac84f27570 | refs/heads/master | 2021-01-15T13:25:07.011643 | 2017-11-04T07:48:57 | 2017-11-04T07:48:57 | 92,654,000 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 535 | py | #!/usr/bin/env python
import socket
import os
ss=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
ss.connect(('127.0.0.1',8123))
#f=open('aa','wb')
ss.sendall('hello serverdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd')
os.system('sleep 1')
ss.send('EOF')
data=ss.recv(1024)
print "server dafu %s"%data
ss.close()
| [
"admin@zhaowenyu.com"
] | admin@zhaowenyu.com |
44a7c1943eb7b978a98e5947ac8a6925fbb719a5 | 5f27bc1a0460a078f6fe33a544f494a5dff7f452 | /script/puzzle_test_backup_D_20_1026/D_20_1020_vrepMoveit_jaco1.py | 9c282275c0404bbd785b9da6c406c8b27613031f | [] | no_license | A-Why-not-fork-repositories-Good-Luck/arm_move | 3e381f0310265f47da14beaac136c358fb318f92 | e2e6182cfd93df1935bd3b8e9158134964dc44fa | refs/heads/master | 2023-03-15T18:37:17.337770 | 2020-11-18T06:46:06 | 2020-11-18T06:46:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 32,748 | py | #!/usr/bin/env python
GRID_SIZE = 0.01
G2P_SIZE = 100
import rospy
import numpy as np
import tf
import matplotlib.pyplot as plt
import copy
import time
import D_20_1020_custom_function as CUF
import D_20_1020_client_function as CLF
from D_20_1020_VFHplus_change_radius import influence
from D_20_1020_envClass4altest import EnvInfo as EI
from D_20_1020_envClass4altest import CanInfo as CI
from arm_move.srv._box_info_srv import *
from arm_move.srv._arm_move_srv import *
from arm_move.srv._work_start_srv import *
from arm_move.srv._att_hand_box_srv import *
from arm_move.srv._arm_goalJoint_srv import *
import timeit
def go_home():
# 2020.08.05 SH
move_group_name = 'panda_arm'
home_joint = [-0.7912285295667355, -1.7449968666946676, 1.6255344777637362, -2.9980328554805484, 1.552371742049853, 1.345932931635115, 0.8050298552807971]
CLF.move_joints_client_rad(move_group_name, home_joint)
def go_ready():
# 2020.08.05 SH
move_group_name = 'panda_arm'
home_joint = [-1.6238, -1.6078, -0.2229, -2.6057, 1.4646, 1.4325, -0.2159]
CLF.move_joints_client_rad(move_group_name, home_joint)
def hand_open():
# 2020.08.05 SH
CLF.panda_gripper_open()
def pick_and_place(env, pick_pose, pick_object_name, place_pose):
print"\tPICK AND PLACE ACTION => rearrange", pick_object_name
env.pick(env.obs_pos, pick_pose, place_pose)
CLF.att_box_client('hand', pick_object_name)
env.go_ready()
env.place(env.obs_pos, place_pose)#, vrep_env.get_current_joint(joint_names_jaco))
CLF.det_box_client(pick_object_name, [0, 0, 0], [0, 0, 0, 0], [0, 0, 0], 'red')
CLF.add_mesh_client(pick_object_name, [place_pose[0], place_pose[1], 0.605], [0.0, 0.0, 0.0, 0.0], [0.001, 0.001, 0.001])
env.go_ready()
print"\tEND PICK AND PLACE ACTION"
#
# # ret_pick_pose = env.pick(env.obs_pos, pick_pose, place_pose)
# env.move_to([[ret_pick_pose[0][0] - 0.03, ret_pick_pose[0][1], ret_pick_pose[0][2]], ret_pick_pose[1]])
#
# env.move_to([[ret_pick_pose[0][0] + 0.05, ret_pick_pose[0][1], ret_pick_pose[0][2]], ret_pick_pose[1]])
#
#
# env.pre_place(env.obs_pos, place_pose, vrep_env.get_current_joint(joint_names_jaco))
# ret_place_pose = env.place(env.obs_pos, place_pose, vrep_env.get_current_joint(joint_names_jaco))
# env.move_to([[ret_place_pose[0][0] - 0.1, ret_place_pose[0][1], ret_place_pose[0][2]], ret_place_pose[1]])
#
# CLF.det_box_client(pick_object_name, [env.object_z, -sel_can_pos[1], sel_can_pos[0]], obstacle_info[env.ore_order[0]][1], obstacle_info[env.ore_order[0]][2], 'blue')
#
# env.move_to([[ret_place_pose[0][0] + 0.1, ret_place_pose[0][1], ret_place_pose[0][2]], ret_place_pose[1]])
# # CLF.add_box_client(obstacle_name[env.ore_order[0]], [env.object_z, -sel_can_pos[1], sel_can_pos[0]], obstacle_info[env.ore_order[0]][1], obstacle_info[env.ore_order[0]][2], 'blue')
def test_algorithm(method, data_in):
# method
# "where" : icra2020 "where to relocate?"
# "far" : farthest method
go_ready()
hand_open()
print "start with method:", method
print "\n***STEP 1*** : env setting"
obj_h = -0.0
obj_z = 0.605 + obj_h#+ obj_h/2.0
target_name = ['target']
target_info = []
target_info.append([[data_in[0][1], -data_in[0][0], obj_z], [0, 0, 0, 0], [0.001, 0.001, 0.001]]) # for the add_mesh
# target_info.append([[data_in[0][0], data_in[0][1], obj_z], [0, 0, 0, 0], [0.06, 0.06, 0.12]]) # for the add_box
# target_info[i][0][2] = target_info[i][0][2] + 0.04
# target_info[i][2][2] = target_info[i][2][2] + 0.08
# obstacle_name = []
# for i in range(len(data_in[1])):
obstacle_name = [str(i).zfill(2) for i in range(len(data_in[1]))]
# obstacle_name.append('obstacle'+str(i))
# print obstacle_name
# obstacle_name = ['obstacle0', 'obstacle1', 'obstacle2', 'obstacle3', 'obstacle4', 'obstacle5', 'obstacle6', 'obstacle7', 'obstacle8']
obstacle_info = []
# [[obj_pos.x, obj_pos.y, obj_pos.z], [obj_ori_q.x, obj_ori_q.y, obj_ori_q.z, obj_ori_q.w], [obj_scale.x, obj_scale.y, obj_scale.z]]
for i in range(len(obstacle_name)):
obstacle_info.append([[data_in[1][i][1], -data_in[1][i][0], obj_z], [0, 0, 0, 0], [0.001, 0.001, 0.001]]) # for the add_mesh
# obstacle_info.append([[data_in[1][i][0], data_in[1][i][1], obj_z], [0, 0, 0, 0], [0.06, 0.06, 0.12]]) # for the add_box
# obstacle_info[i][0][2] = obstacle_info[i][0][2] + 0.04
# obstacle_info[i][2][2] = obstacle_info[i][2][2] + 0.08
print "\tNo. of obstacles:", len(obstacle_name)
env_name = ['shelf_gazebo']#2020.10.21: puzzle test, 'Jaco_base', 'table_ls', 'table_rs', 'table_us', 'table_bs']
env_info = []
base_position = [0.8637, 0, 0.0 + obj_h]
base_quaternion = [0, 0, 0, 1]
base_scale = [0.001, 0.001, 0.001]
CLF.add_mesh_client('shelf_gazebo', base_position, base_quaternion, base_scale)
ws_pos = [0.8637+0.5*0.45+0.03, 0.0, 0.0 + obj_h]
ws_rot = [0.0, 0.0, 0.0, 0.0]
ws_scale = [0.45, 0.91, 0.0]
env_info.append([ws_pos, ws_rot, ws_scale])
# for i in range(len(env_name)):
# env_info.append(vrep_env.get_object_info(env_name[i]))
# if i > 1:
# env_info[i][2][0] = env_info[i][2][0]+0.01
# env_info[i][2][1] = env_info[i][2][1]+0.01
# env_info[i][2][2] = env_info[i][2][2]+0.01
for i in range(len(obstacle_info)):
CLF.add_mesh_client(obstacle_name[i], obstacle_info[i][0], obstacle_info[i][1], obstacle_info[i][2])
# CLF.add_box_client(obstacle_name[i], obstacle_info[i][0], obstacle_info[i][1], obstacle_info[i][2], 'red')
for i in range(len(target_info)):
CLF.add_mesh_client(target_name[i], target_info[i][0], target_info[i][1], target_info[i][2])
# CLF.add_box_client(target_name[i], target_info[i][0], target_info[i][1], target_info[i][2], 'green')
# for i in range(len(env_info)):
# # CLF.add_mesh_client(env_name[i], env_info[i][0], env_info[i][1], env_info[i][2])
# CLF.add_box_client(env_name[i], env_info[i][0], env_info[i][1], env_info[i][2], 'gray')
ws = env_info[0]
# print"ws info", env_info[0]
ws_w = int(round(ws[2][0]*100)) # x-axes in Rviz
ws_d = int(round(ws[2][1]*100)) # y-axes in Rviz
print "\tRviz ws width, depth:", ws_w, ws_d
# GRID_SIZE = 0.01
ws_zero_pos = [round(ws[0][0] - ws[2][0]*0.5, 2), round(ws[0][1] - ws[2][1]*0.5, 2)]
print "\tRviz ws cen pos:", ws[0]
print "\tRviz ws, zero pos:", ws_zero_pos
# ws_w, ws_d = 100, 100 # get table size in the v-rep
ws_cen = [-ws[0][1], ws[0][0]]
rob_pos = [0.0, 0.0]
OBJ_R = 0.035
env = EI(rob_pos, ws_w, ws_d, ws_cen, ws_zero_pos, grid_size=GRID_SIZE, wall_r=OBJ_R)
env.set_env(obstacle_name, obstacle_info, target_name, target_info)
env.update_env(env.obs_pos, env.obs_grid)
print "\trearrangement order:", env.ore_order
if len(env.ore_order) == 0:
print "end rearrangement"
pick_and_place(env, env.tar_pos, 'target', env.tar_pos)
time.sleep(1)
# CUF.draw_grid_info(env.grid_ori)
# plt.show()
space_err = 0
rearr_cnt = 0
# env.get_env(obs_r, tar_r, min_ore)
algorithm_start = timeit.default_timer()
env.get_max_can(env.grid_ori, bt_num=1, trial_num=1000) # We get "grid_max_can", "can_grid"
# env.get_env_case1()
# env.get_max_can_case1()
'''
Make object info!
Type : target, obstacle, candidate
Info : pos, grid, A, BT, b, ORC, ORE
'''
can_info = []
for i in range(len(env.can_pos)):
can_info.append((CI('candidate', env.can_pos[i], env.can_grid[i])))
# check env info got right
# if 1:
# print "\n# of obstacles", len(env.obs_pos), "\n# of candidates", len(env.can_pos)
'''
GET candidates info
'''
t_ore_order = copy.deepcopy(env.ore_order)
# for i in range(len(can_info)):
# print "can", i, ":", can_info[i].pos
# CUF.draw_grid_info(env.grid_ori)
# CUF.draw_grid_info(env.grid_del)
# CUF.draw_grid_info(env.grid_max_can)
# for c_i in range(len(can_info)):
# plt.text(can_info[c_i].grid[0], can_info[c_i].grid[1], 'Can' + str(c_i), fontsize=20, ha='center', bbox=dict(facecolor='pink', alpha=0.8))
# for o_i in range(len(env.obs_grid)):
# plt.text(env.obs_grid[o_i][0], env.obs_grid[o_i][1], 'Obs' + str(o_i), fontsize=20, ha='center', bbox=dict(facecolor='red', alpha=0.8))
# plt.show()
method = 'mine'
method = 'far'
method = 'deep'
while len(env.ore_order): # this while loop is for the algorithm
print"\n***STEP 2*** REARRANGE ORDER => :", env.ore_order
print"\tCheck C.A"
# Check C.A : just next step
t_can_info = []
in_can_info = copy.deepcopy(can_info)
in_obs_pos = copy.deepcopy(env.obs_pos)
in_obs_pos.remove(env.obs_pos[env.ore_order[0]])
CLF.del_box_client(obstacle_name[env.ore_order[0]])
t_can_info.append(env.get_can_A(in_can_info, in_obs_pos, env.tar_pos))
CLF.add_mesh_client(obstacle_name[env.ore_order[0]], obstacle_info[env.ore_order[0]][0], obstacle_info[env.ore_order[0]][1], obstacle_info[env.ore_order[0]][2])
# CLF.add_box_client(obstacle_name[env.ore_order[0]], obstacle_info[env.ore_order[0]][0], obstacle_info[env.ore_order[0]][1], obstacle_info[env.ore_order[0]][2], 'red')
# Check C.BT
in_can_info = copy.deepcopy(t_can_info[0])
in_can_info = env.init_BT(in_can_info) # init the BT value of candidates to '0'
in_obs_pos = copy.deepcopy(env.obs_pos)
for ore_i in range(len(env.ore_order)): # after rearrange all ORE
in_obs_pos.remove(env.obs_pos[env.ore_order[ore_i]])
CLF.del_box_client(obstacle_name[env.ore_order[ore_i]])
t_can_info[0] = env.get_can_BT(in_can_info, in_obs_pos, env.tar_pos)
for ore_i in range(len(env.ore_order)):
CLF.add_mesh_client(obstacle_name[env.ore_order[ore_i]], obstacle_info[env.ore_order[ore_i]][0], obstacle_info[env.ore_order[ore_i]][1], obstacle_info[env.ore_order[ore_i]][2])
# CLF.add_box_client(obstacle_name[env.ore_order[ore_i]], obstacle_info[env.ore_order[ore_i]][0], obstacle_info[env.ore_order[ore_i]][1], obstacle_info[env.ore_order[ore_i]][2], 'red')
# Check C.BO : BO : other ORE, just before target
in_can_info = copy.deepcopy(t_can_info[0])
in_obs_pos = copy.deepcopy(env.obs_pos)
for ore_i in range(len(env.ore_order)): # after rearrange all ORE
in_obs_pos.remove(env.obs_pos[env.ore_order[ore_i]])
CLF.del_box_client(obstacle_name[env.ore_order[ore_i]])
for j in range(len(env.ore_order)): # check other ORE just before target
if j > i:
t_can_info[0] = env.get_can_BT(in_can_info, in_obs_pos,env.obs_pos[env.ore_order[j]])
for ore_i in range(len(env.ore_order)):
CLF.add_mesh_client(obstacle_name[env.ore_order[ore_i]], obstacle_info[env.ore_order[ore_i]][0], obstacle_info[env.ore_order[ore_i]][1], obstacle_info[env.ore_order[ore_i]][2])
# CLF.add_box_client(obstacle_name[env.ore_order[ore_i]], obstacle_info[env.ore_order[ore_i]][0], obstacle_info[env.ore_order[ore_i]][1], obstacle_info[env.ore_order[ore_i]][2], 'red')
s_v = []
s_v_index = []
for i in range(1):
in_can_info = copy.deepcopy(t_can_info[i])
ret_can, ret_index = env.get_cf(in_can_info)
s_v.append(ret_can)
s_v_index.append(ret_index)
# print "\n step", i, " has # of cf pos:", len(t_cf[i]), "index", t_cf_index[i]
print"\n***STEP 3*** : find valid candidates"
print "\ts_v:", len(s_v[0]), "\n\ts_v_index:", len(s_v_index[0])
# for i in range(len(s_v[0])):
# print "s_v index:", [i], s_v_index[0][i]
# See the feasibile candidate
# for i in range(len(t_cf[0])):
# print "\n Our Cf pos:", i, t_cf[0][i].pos
# See if this case if case0 or case1
# print "t_cf:", t_cf, "order", env.ore_order
if len(s_v[0]) >= len(env.ore_order):
print "\n\tenough candidate spots"
t_b = []
for i in range(1):
in_obs_pos = copy.deepcopy(env.obs_pos)
for ore_i in range(i + 1):
in_obs_pos.remove(env.obs_pos[env.ore_order[ore_i]])
t_b.append(env.get_cf_b(s_v[i], in_obs_pos))
# print "\n step", i, " has cf b:", t_b[i]
# draw_figs = 1
# if draw_figs == 1:
# for c_i in range(len(can_info)):
# plt.text(can_info[c_i].grid[0], can_info[c_i].grid[1], 'Can' + str(c_i), fontsize=20, ha='center', bbox=dict(facecolor='pink', alpha=0.8))
# for o_i in range(len(env.obs_grid)):
# plt.text(env.obs_grid[o_i][0], env.obs_grid[o_i][1], 'Obs' + str(o_i), fontsize=20, ha='center', bbox=dict(facecolor='red', alpha=0.8))
#
# for step_i in range(1):
# step_grid = copy.deepcopy(env.grid_act)
# step_obs_grid = copy.deepcopy(env.obs_grid)
# for ore_i in range(step_i + 1):
# step_obs_grid.remove(env.obs_grid[env.ore_order[ore_i]])
# for i in range(len(step_obs_grid)):
# step_grid = CUF.obstacle_circle(step_grid, [round(step_obs_grid[i][0], 2), round(step_obs_grid[i][1], 2), env.obs_r[i]], 2)
# for ci in range(len(can_info)):
# xi, yi = can_info[ci].grid
# step_grid = CUF.obstacle_circle(step_grid, [xi, yi, 0.04], 30)
#
# step_grid = CUF.obstacle_circle(step_grid, [env.tar_grid[0], env.tar_grid[1], tar_r], 4) # target
#
# for cf_i in range(len(t_b[step_i])):
# xi = (t_cf[step_i][cf_i].pos[0] - env.ws_zero[0]) * G2P_SIZE
# yi = (t_cf[step_i][cf_i].pos[1] - env.ws_zero[1]) * G2P_SIZE
# step_grid = CUF.obstacle_circle(step_grid, [xi, yi, 0.04], 3)
#
# CUF.draw_grid_info(step_grid)
#
# for cf_i in range(len(t_b[step_i])):
# xi = (t_cf[step_i][cf_i].pos[0] - env.ws_zero[0]) * G2P_SIZE
# yi = (t_cf[step_i][cf_i].pos[1] - env.ws_zero[1]) * G2P_SIZE
# plt.text(xi, yi, 'b=' + str(t_b[step_i][cf_i]), fontsize=20, ha='center', bbox=dict(facecolor='pink', alpha=0.8))
# for ci in range(len(t_can_info[step_i])):
# plt.text(t_can_info[step_i][ci].grid[0], t_can_info[step_i][ci].grid[1] - 2.0, '[A, BT] :' + str([t_can_info[step_i][ci].A, t_can_info[step_i][ci].BT]), fontsize=10, ha='center', bbox=dict(facecolor='pink', alpha=0.8))
# for o_i in range(len(env.obs_grid)):
# plt.text(env.obs_grid[o_i][0], env.obs_grid[o_i][1], 'Obs' + str(o_i), fontsize=20, ha='center', bbox=dict(facecolor='red', alpha=0.8))
# plt.title('step' + str(step_i) + " obs: " + str(env.ore_order[step_i]) + " rearranged")
elif len(s_v[0]) < len(env.ore_order):
print "\n\tnot enough candidate spots"
# print "Since we meet condition: N(CF) < N(ORE) by", len(t_cf[0]), "<", len(env.ore_order), ",\nwe have to remove additional obstacles."
## step1 : "get t_cp", check candidates which have A = 0 and BT = 0
## This means that a candidate is not reachable and it does not block the target object
# Check A for this environment state
in_can_info = copy.deepcopy(can_info)
in_obs_pos = copy.deepcopy(env.obs_pos)
t_can_add = copy.deepcopy(env.get_can_A(in_can_info, in_obs_pos, env.tar_pos))
s_e = [] # s_e: extra candidate spots
in_can_info = copy.deepcopy(t_can_add)
ret_can, ret_index = env.get_cp(in_can_info)
print "\t# of OR'", len(ret_can)
t_s_e = ret_can
t_s_e_index = ret_index
# print "t_cp:", len(t_cp), "index", t_cp_index
# for i in range(len(t_cp)):
# print "\n Our Cp:", i, t_cp[i].pos
if len(t_s_e) == 0:
print "\tno possible extra candidate exist"
space_err = 1
break
# step2 : check c_ore for each cp and pick min of it
t_s_r = [] # s_r: candidate spot relocate plan
in_can_info = copy.deepcopy(t_s_e)
# tmp_order_time_start = timeit.default_timer()
# tmp_order_time_start2 = time.clock()
t_s_r = env.get_c_ore(in_can_info)
# tmp_order_time_end = timeit.default_timer()
# tmp_order_time_end2 = time.clock()
# order_time = order_time + tmp_order_time_end - tmp_order_time_start
# order_time2 = order_time2 + tmp_order_time_end2 - tmp_order_time_start2
# order_cnt = order_cnt + 100 * len(t_s_e)
# print "\n"
# for i in range(len(t_cp)):
# print "cp", t_cp[i].pos, "\nc_ore", c_ore[i]
s_r = []
s_e_index = []
print "\n"
for i in range(len(t_s_e)):
print "can", t_s_e_index[i], "grid:", t_s_e[i].grid, ", s_r:", t_s_r[i]
for i in range(len(t_s_e)):
if t_s_r[i] != []:
s_e.append(t_s_e[i])
s_r.append(t_s_r[i])
s_e_index.append(t_s_e_index[i])
# tmp_se = copy.deepcopy(s_e)
# tmp_sr = copy.deepcopy(s_r)
# emp_sr = []
# for i in range(len(s_e)):
# if s_r[i] == []:
# print "remove empty s_e", i
# emp_sr.append(i)
#
# print "tmp se:", tmp_se, "\ntmp sr", tmp_sr
# for i in range(len(emp_sr)):
#
# print "tmp_se[emp_sr[i]]", tmp_se[emp_sr[i]].pos
# print "tmp_sr[emp_sr[i]]", tmp_sr[emp_sr[i]]
# s_e.remove(tmp_se[emp_sr[i]])
# s_r.remove(tmp_sr[emp_sr[i]])
while len(s_e):
print "# of s_e:", len(s_e), s_r
print "\n"
for i in range(len(s_e)):
print "can", s_e_index[i], "pos:", s_e[i].pos, ", s_r:", s_r[i]
min_s_r = CUF.min_len_list(s_r)
print "\nmin sr:", min_s_r
#
# print "picked ci index:", t_cp.index(t_cp[c_ore.index(min_c_ore)])
# print "picked ci address:", copy.deepcopy(t_cp[c_ore.index(min_c_ore)]).pos
cp = copy.deepcopy(s_e[s_r.index(min_s_r)])
# print "selected cp pos", cp.pos
## step3 : "get t_cf", check candidates which have A = 1 and BT' = 0
## Check A for this environment state T' is t_cp_i
in_can_info = copy.deepcopy(can_info)
in_obs_pos = copy.deepcopy(env.obs_pos)
in_tar_pos = copy.deepcopy(cp.pos)
t_can_add = copy.deepcopy(env.get_can_A(in_can_info, in_obs_pos, env.tar_pos))
# Check C.BT for this environment state
in_can_info = copy.deepcopy(t_can_add)
in_can_info = env.init_BT(in_can_info) # init the BT value of candidates to '0'
in_obs_pos = copy.deepcopy(env.obs_pos)
sorted_min_s_r = copy.deepcopy(min_s_r)
sorted_min_s_r.sort(reverse=True)
print "sorted min_s_r:", sorted_min_s_r
if sorted_min_s_r[0] == len(env.obs_pos): # if OR' has o_t ! remove s_e
print "o_t is in OR'"
s_e.remove(s_e[s_r.index(min_s_r)])
s_e_index.remove(s_e_index[s_r.index(min_s_r)])
s_r.remove(s_r[s_r.index(min_s_r)])
else:
for ore_i in range(len(min_s_r)): # after rearrange all OR'
in_obs_pos.remove(in_obs_pos[sorted_min_s_r[ore_i]])
CLF.del_box_client(obstacle_name[sorted_min_s_r[ore_i]])
in_tar_pos = copy.deepcopy(cp.pos)
t_can_add = env.get_can_BT(in_can_info, in_obs_pos, in_tar_pos)
for ore_i in range(len(min_s_r)): # after rearrange all OR'
CLF.add_box_client(obstacle_name[sorted_min_s_r[ore_i]], obstacle_info[sorted_min_s_r[ore_i]][0], obstacle_info[sorted_min_s_r[ore_i]][1], obstacle_info[sorted_min_s_r[ore_i]][2], 'red')
# for i in range(len(t_can_add)):
# print "can", i, "A:", t_can_add[i].A, "B:", t_can_add[i].BT
s_e_v = []
s_v_index = []
in_can_info = copy.deepcopy(t_can_add)
ret_can, ret_index = env.get_cf(in_can_info)
s_e_v.append(ret_can)
s_v_index.append(ret_index)
print "s_e_v: ", s_e_v
for i in range(len(s_e_v[0])):
print s_e_v[0][i].grid
if len(s_e_v[0]) >= len(min_s_r) - 1:
print "this se is possible"
if len(min_s_r) == 1:
print "only one move needed"
# t_can_info = []
# for i in range(len(env.ore_order)):
# in_can_info = copy.deepcopy(can_info)
# in_obs_pos = copy.deepcopy(env.obs_pos)
# for ore_i in range(i + 1):
# if min_s_r[0] != env.ore_order[ore_i]:
# in_obs_pos.remove(env.obs_pos[env.ore_order[ore_i]])
# in_obs_pos.remove(env.obs_pos[min_s_r[0]])
# t_can_info.append(env.get_can_A(in_can_info, in_obs_pos, env.tar_pos))
s_v = [[s_e[s_r.index(min_s_r)]]]
s_v_index = [[s_e_index[s_r.index(min_s_r)]]]
# print "se v:", s_v, s_v[0], s_v[0][0], s_v[0][0].pos
# for i in range(len(env.ore_order)):
# add_can_info = copy.deepcopy(t_can_info[i])
# ret_can, ret_index = env.get_cf(add_can_info)
# s_v.append(ret_can)
# s_v_index.append(ret_index)
t_b = [[0]]
# for i in range(1):
# in_obs_pos = copy.deepcopy(env.obs_pos)
# for ore_i in range(i+1):
# in_obs_pos.remove(env.obs_pos[env.ore_order[ore_i]])
# t_b.append(env.get_cf_b(s_v[i], in_obs_pos))
# # print "\n step", i, " has cf b:", t_b[i]
break # for out s_e loop
else:
t_b = []
in_obs_pos = copy.deepcopy(env.obs_pos)
for ore_i in range(1):
in_obs_pos.remove(env.obs_pos[min_s_r[ore_i]])
t_b.append(env.get_cf_b(s_e_v[0], in_obs_pos))
s_v[0] = s_e_v[0]
break # for out s_e loop
else: # s_e[s_r.index(min_s_r)]
print "\nremove",
print "s_e:", s_e
print "s_r:", s_r
print "s_e_index:", s_e_index
s_e.remove(s_e[s_r.index(min_s_r)])
s_e_index.remove(s_e_index[s_r.index(min_s_r)])
s_r.remove(s_r[s_r.index(min_s_r)])
if len(s_e) == 0:
# print "no possible extra candidate exist"
break
env.ore_order = min_s_r
# draw_figs = 1
# if draw_figs == 1:
# for c_i in range(len(can_info)):
# plt.text(can_info[c_i].grid[0], can_info[c_i].grid[1], 'Can' + str(c_i), fontsize=20, ha='center', bbox=dict(facecolor='pink', alpha=0.8))
# for o_i in range(len(env.obs_grid)):
# plt.text(env.obs_grid[o_i][0], env.obs_grid[o_i][1], 'Obs' + str(o_i), fontsize=20, ha='center', bbox=dict(facecolor='red', alpha=0.8))
#
# step_i = 0
# step_grid = copy.deepcopy(env.grid_act)
# step_obs_grid = copy.deepcopy(env.obs_grid)
# step_obs_grid.remove(env.obs_grid[env.ore_order[0]])
# for i in range(len(step_obs_grid)):
# # print "i:", i, "step_obs_grid [i]:", step_obs_grid[i]
# step_grid = CUF.obstacle_circle(step_grid, [round(step_obs_grid[i][0], 2), round(step_obs_grid[i][1], 2), env.obs_r[i]], 2)
# for ci in range(len(can_info)):
# xi, yi = can_info[ci].grid
# step_grid = CUF.obstacle_circle(step_grid, [xi, yi, 0.04], 30)
#
# step_grid = CUF.obstacle_circle(step_grid, [env.tar_grid[0], env.tar_grid[1], tar_r], 4) # target
#
# for cf_i in range(len(t_b[step_i])):
# xi = (t_cf[step_i][cf_i].pos[0] - env.ws_zero[0]) * G2P_SIZE
# yi = (t_cf[step_i][cf_i].pos[1] - env.ws_zero[1]) * G2P_SIZE
# step_grid = CUF.obstacle_circle(step_grid, [xi, yi, 0.04], 3)
#
# CUF.draw_grid_info(step_grid)
#
# for cf_i in range(len(t_b[step_i])):
# xi = (t_cf[step_i][cf_i].pos[0] - env.ws_zero[0]) * G2P_SIZE
# yi = (t_cf[step_i][cf_i].pos[1] - env.ws_zero[1]) * G2P_SIZE
# plt.text(xi, yi, 'b=' + str(t_b[step_i][cf_i]), fontsize=20, ha='center', bbox=dict(facecolor='pink', alpha=0.8))
# for ci in range(len(t_can_info[step_i])):
# plt.text(t_can_info[step_i][ci].grid[0], t_can_info[step_i][ci].grid[1] - 2.0, '[A, BT] :' + str([t_can_info[step_i][ci].A, t_can_info[step_i][ci].BT]), fontsize=10, ha='center', bbox=dict(facecolor='pink', alpha=0.8))
# for o_i in range(len(env.obs_grid)):
# plt.text(env.obs_grid[o_i][0], env.obs_grid[o_i][1], 'Obs' + str(o_i), fontsize=20, ha='center', bbox=dict(facecolor='red', alpha=0.8))
# plt.title('step' + str(step_i) + " obs: " + str(env.ore_order[step_i]) + " rearranged")
if space_err:
print "no possible extra candidate exist"
break
# move obstacle to can(min(b))
# print "s_v", s_v
# print "s_v[0]", s_v[0]
# print "s_v[0][0]", s_v[0][0]
# print "s_v[0][0].pos", s_v[0][0].pos
print "\tt_b[0]", t_b[0]
find_b = copy.deepcopy(t_b[0])
# print "move to c_", find_b.index(min(find_b))
if method == 'far':
t_sel_can_index = [i for i in range(len(find_b))]
elif method == 'deep':
t_sel_can_index = [i for i in range(len(find_b))]
elif method == 'mine':
t_sel_can_index = [i for i in range(len(find_b)) if find_b[i] == min(find_b)]
t_sel_can_dist = []
# print "\ntar grid: ", env.tar_grid
# print "\ntar pos: ", env.tar_pos
print "\tt sel can index", t_sel_can_index
for i in range(len(t_sel_can_index)):
# print "t_cf grid x,y:", t_sel_can_index[i], t_cf[0][t_sel_can_index[i]].grid[0], t_cf[0][t_sel_can_index[i]].grid[1]
# print "t_cf pos x,y:", t_sel_can_index[i], s_v[0][t_sel_can_index[i]].pos[0], s_v[0][t_sel_can_index[i]].pos[1]
if method == 'deep':
t_sel_can_dist.append(np.sqrt((env.rob_pos[0] - s_v[0][t_sel_can_index[i]].pos[0]) ** 2 + (env.rob_pos[1] - s_v[0][t_sel_can_index[i]].pos[1]) ** 2))
else:
t_sel_can_dist.append(np.sqrt((env.tar_pos[0] - s_v[0][t_sel_can_index[i]].pos[0]) ** 2 + (env.tar_pos[1] - s_v[0][t_sel_can_index[i]].pos[1]) ** 2))
# print "t sel can dist", t_sel_can_dist
sel_can_index = t_sel_can_index[t_sel_can_dist.index(max(t_sel_can_dist))]
# print "sel can index", sel_can_index
sel_can_pos = can_info[s_v_index[0][sel_can_index]].pos
sel_can_grid = can_info[s_v_index[0][sel_can_index]].grid
sel_obs_pos = env.obs_pos[env.ore_order[0]]
sel_obs_grid = env.obs_grid[env.ore_order[0]]
env.obs_pos[env.ore_order[0]] = sel_can_pos
env.obs_grid[env.ore_order[0]] = sel_can_grid
can_info[s_v_index[0][sel_can_index]].pos = sel_obs_pos
can_info[s_v_index[0][sel_can_index]].grid = sel_obs_grid
# tmp_order_time_start = timeit.default_timer()
# tmp_order_time_start2 = time.clock()
# env.pick_n_place()
# CLF.add_box_client(obstacle_name[env.ore_order[0]], [env.object_z, -sel_can_pos[1], sel_can_pos[0]], obstacle_info[env.ore_order[0]][1], obstacle_info[env.ore_order[0]][2], 'blue')
pick_and_place(env, sel_obs_pos, obstacle_name[env.ore_order[0]], env.obs_pos[env.ore_order[0]])
# time.sleep(1)
# obstacle_info = []
# for i in range(len(obstacle_name)):
# obstacle_info.append(vrep_env.get_object_info(obstacle_name[i]))
# # obstacle_info[i][0][2] = obstacle_info[i][0][2] + 0.04
# # obstacle_info[i][2][2] = obstacle_info[i][2][2] + 0.08
# for i in range(len(obstacle_info)):
# CLF.add_box_client(obstacle_name[i], obstacle_info[i][0], obstacle_info[i][1], obstacle_info[i][2], 'red')
# env.set_env(obstacle_name, obstacle_info, target_name, target_info)
# home_joint = [3.1415927410125732, 4.537856101989746, 5.93411922454834, -0.6108652353286743, 1.7453292608261108, -0.5235987901687622]
#
# CLF.move_joints_client_rad('arm', home_joint)
env.update_env(env.obs_pos, env.obs_grid)
# tmp_order_time_end = timeit.default_timer()
# order_time = order_time + tmp_order_time_end - tmp_order_time_start
# order_time2 = order_time2 + tmp_order_time_end2 - tmp_order_time_start2
# order_cnt = order_cnt + 1
rearr_cnt = rearr_cnt + 1
if env.order_error_flag == 0:
print "\nretry for another environment"
space_err = 1
break
print "after move order is:", env.ore_order
# CUF.draw_grid_info(env.grid_ori)
# for c_i in range(len(can_info)):
# plt.text(can_info[c_i].grid[0], can_info[c_i].grid[1], 'Can' + str(c_i), fontsize=20, ha='center', bbox=dict(facecolor='pink', alpha=0.8))
# for o_i in range(len(env.obs_grid)):
# plt.text(env.obs_grid[o_i][0], env.obs_grid[o_i][1], 'Obs' + str(o_i), fontsize=20, ha='center', bbox=dict(facecolor='red', alpha=0.8))
if len(env.ore_order) == 0:
print "end rearrangement"
pick_and_place(env, env.tar_pos, 'target', env.tar_pos)
time.sleep(1)
# plt.title('rearrangement finished')
break
# else:
# plt.title('after rearrngement')
# plt.show()
pick_and_place(env, env.tar_pos, 'target', env.tar_pos)
time.sleep(1)
algorithm_end = timeit.default_timer()
tot_time = algorithm_end - algorithm_start
print "tot time:", tot_time
if __name__ == "__main__":
X = ['0.03', '-0.01', '0.36', '0.30', '-0.19', '-0.05', '-0.29', '0.22', '0.19', '0.14', '-0.12']
Y = ['1.22', '1.11', '1.04', '1.17', '1.06', '1.31', '1.17', '1.31', '1.06', '1.19', '1.13']
data_in = []
data_in.append([-0.17, 1.22])
obs_list = []
for i in range(len(X)):
obs_list.append([float(X[i]), float(Y[i])])
data_in.append(obs_list)
# print "data:", data_in
method = "where"
test_algorithm(method, data_in) | [
"welovehun91@gmail.com"
] | welovehun91@gmail.com |
26e63b09b2482b2c6ad1c3618519251ceac2ed9a | 4dc5f671d6f8e1d22a90b8244c58473aa440236b | /definitions.py | dc126bb2175829caf0262328d5499902f8a6223e | [] | no_license | nithinmk26/PythonStuff | 00f6c5094ee09aedde0691c05ba6c8ca5c5640a5 | d54927147bebb94e14529c77f414fb77148ca3ba | refs/heads/master | 2023-01-09T07:30:51.416535 | 2020-11-04T10:23:53 | 2020-11-04T10:23:53 | 309,967,890 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 164 | py | def print_name(name):
print(name)
print_name("nanda")
#print_twice("nithin")
def print_twice(name):
print_name(name)
print_name(name)
print_twice("nithin")
| [
"nithinmk26@gmail.com"
] | nithinmk26@gmail.com |
df90f052aa06361c8bda15da8154b401621d1aed | 50eb70ca3154f9a84f89a883718a87df63f78e80 | /mpServerProcess.py | aa49a8172d4fc14116a7062f5109b6281c5f3414 | [] | no_license | Rocia/Learning-multiprocessing | b0e5bd8b70afa7c3dce19feb9b188ede9869323f | 0b4dd7d1d7bf4831069b53adb1e8d970a24261d9 | refs/heads/master | 2020-06-28T01:22:44.012701 | 2017-07-14T07:32:01 | 2017-07-14T07:32:01 | 97,080,749 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 561 | py | from multiprocessing import Process, Manager
def A(d, l):
d[1] = '2'
d['2'] = 3
d[3.14] = None
d['alpha'] = None
d['beta'] = 'gamma'
l.reverse()
if __name__ == '__main__':
manager = Manager()
DICTIONARY = manager.dict()
LIST = manager.list(range(15))
process = Process(target=A, args=(DICTIONARY,LIST))
process.start()
process.join()
print (DICTIONARY)
print (LIST)
'''
OUTPUT:-
{1: '2', '2': 3, 3.14: None, 'alpha': None, 'beta': 'gamma'}
[14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0]
''' | [
"rocia.fernandes@gmail.com"
] | rocia.fernandes@gmail.com |
d53a6445ee0ad5ff6856c3aa7fb0d32ba99044a2 | 301912f90f6fbc0ebdfe0726a4900a74789a0707 | /tele-bot.py | a54604ea84c8c1853977a5a3f90d0c187cc90ce8 | [] | no_license | denyskrysko/python-bots | 51037374da09d08b6027fb855d951a86ace7215b | 8eebf065f0a98667de620ba1b99eb4407091fd5e | refs/heads/master | 2020-06-04T11:06:50.156392 | 2019-07-04T13:33:54 | 2019-07-04T13:33:54 | 191,995,562 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,476 | py | import time
import requests
from lxml import html
from bs4 import BeautifulSoup
import schedule
import datetime
import urllib.request
def parse(url):
try:
page = requests.get(url)
tree = html.fromstring(page.content)
name = tree.xpath('//div[@class="quote-ticker tickerLarge"]/text()')
value = tree.xpath('//div[@class="quote-price priceLarge"]/span/text()')
Arr = {
'name': name,
'value': value
}
# response = requests.get(url, timeout=5)
# content = BeautifulSoup(response.content, "html.parser")
# Arr = []
# for element in content.findAll('div', attrs={"class": "quote-details"}):
# Arr = {
# 'name': element.find('div', attrs={"class": "quote-ticker tickerLarge"}).text.encode('utf-8'),
# 'price': element.find('div', attrs={"class": "quote-price priceLarge"}).text.encode('utf-8'),
# }
return Arr
except Exception as e:
print(e)
def telegram_bot_sendtext(bot_message):
bot_token = '712175175:AAHXStBiBHKgCvtfv87hRfID2yc207j-dd4'
bot_chatID = '-337657336'
send_text = 'https://api.telegram.org/bot' + bot_token + '/sendMessage?chat_id=' \
+ bot_chatID + '&parse_mode=Markdown&text=' + bot_message
response = requests.get(send_text)
return response.json()
def report(symbol, target):
# current_dt = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
try:
Arr = parse('https://web.tmxmoney.com/quote.php?qm_symbol=' + symbol + '&locale=EN')
message = "Heads up! " + str(Arr['name']).strip("['']") + " Stock level reached: " + \
": $" + str(Arr['value']).strip("['']")
value = str(Arr['value']).strip("['']")
if float(value) >= float(target):
global counter
if counter == 0:
telegram_bot_sendtext(message)
print(" --- M3lvin stock tracking---> " + message)
counter = + 1
except Exception as e:
print(e)
# Entry point - user input
stock_symbol = input("To get a quote enter the stock symbol: ")
stock_target = input("What is the Stock target? ")
counter = 0 # creating global variable for the report function
# schedule.every().day.at("16:50").do(report)
# schedule.every().day.at("13:30").do(report)
while True:
schedule.run_pending()
report(stock_symbol, stock_target)
time.sleep(5)
| [
"denyskrysko@gmail.com"
] | denyskrysko@gmail.com |
fab2b30ed6fb8e6b0e82f7b5754c46de062f13c1 | 6cffd0ad5a0e15dc7bc5c0ec78b6eaece0ef9371 | /bstest.py | 83525ffd175a259b0ca1875d622be75c475e938f | [] | no_license | asura123456/untitled1 | 79979edd7f51307e00b10e3a474031fe70f709d5 | 08382bef16ef603955c86cc55aafaecf1282117f | refs/heads/master | 2021-03-03T14:12:00.514373 | 2020-03-19T09:34:03 | 2020-03-19T09:34:03 | 203,749,765 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 408 | py | __author__ = 'Administrator'
from bs4 import BeautifulSoup
import urllib.request
#reponse=urllib.request.urlopen('BeautifulSoup.html')
soup=BeautifulSoup(open('BeautifulSoup.html'),'lxml')
#for i in soup.find_all('a',id="link2"):
# print(i)
print(soup.select('p .sister'))
print(soup.select('head>title'))
print(soup.select('a[id="link3"]'))
print(soup.select('p href="http://example.com/tillie"')) | [
"maya0026@tom.com"
] | maya0026@tom.com |
3c693e431934b383adf4d7f0be6aacd476f5da73 | 9a78d88daeef20341da3b3ed326f7be633cb2b46 | /instagram/instagramScraper.py | db788f63dd38fd3becb00f541e0c67850be53b54 | [] | no_license | muhammedfatih/SocialScraper | 6f410e3f44495ec65f63d8c50734aa24f0278bc7 | a83f53b272fae7da33e400d8409d44578f22ccd9 | refs/heads/master | 2022-11-17T21:30:12.966814 | 2020-07-18T17:59:17 | 2020-07-18T17:59:17 | 280,710,515 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,425 | py | from urllib2 import urlopen
import re
import json
import sys
sys.path.append("models")
from instagramUser import instagramUser
class instagramScraper:
INSTAGRAM_URL="https://www.instagram.com/"
userData=instagramUser("", "", "", 0, 0)
def fetch(self, username):
link = self.INSTAGRAM_URL + username
urlStream = urlopen(link)
urlContent = urlStream.read()
encapsulatedJsonContentAsString=re.findall("window._sharedData = .*?;</script>", urlContent)
jsonContentAsString=encapsulatedJsonContentAsString[0].replace("window._sharedData = ", "").replace(";</script>", "")
jsonContent=json.loads(jsonContentAsString)
userJsonContent=jsonContent["entry_data"]["ProfilePage"][0]["graphql"]["user"]
userData=instagramUser(
username
, userJsonContent["biography"].encode(sys.stdout.encoding, errors='replace')
, userJsonContent["business_category_name"]
, userJsonContent["edge_follow"]["count"]
, userJsonContent["edge_followed_by"]["count"]
)
for node in userJsonContent["edge_owner_to_timeline_media"]["edges"]:
userData.addPhoto(
node["node"]["display_url"]
, node["node"]["edge_liked_by"]["count"]
, node["node"]["taken_at_timestamp"]
)
return userData.__dict__ | [
"mfatihinanc@gmail.com"
] | mfatihinanc@gmail.com |
d9d33fe8b116cb7dc30454d84a7a1097e2401020 | ad1e55b9a67c798cf4b4ce41c76b26977f8b4e8d | /vendor-local/celery/tests/test_utils/test_datastructures.py | c06e4d732e8b0cdfa46db88f55237d3206f51bd9 | [
"BSD-3-Clause"
] | permissive | kumar303/rockit | 7a6ac84bb8c37e5f3b65d7dcecf9b9c549902cf5 | fc347b5b143835ddd77fd0c1ea4e6f2007a21972 | refs/heads/master | 2021-01-10T19:51:30.638073 | 2020-07-26T19:00:37 | 2020-07-26T19:00:37 | 4,219,328 | 0 | 2 | BSD-3-Clause | 2020-07-26T19:00:38 | 2012-05-03T22:03:24 | Python | UTF-8 | Python | false | false | 8,435 | py | from __future__ import absolute_import
from __future__ import with_statement
import sys
from celery.datastructures import (ExceptionInfo, LRUCache, LimitedSet,
AttributeDict, DictAttribute,
ConfigurationView, DependencyGraph)
from celery.tests.utils import Case, WhateverIO
class Object(object):
pass
class test_DictAttribute(Case):
def test_get_set(self):
x = DictAttribute(Object())
x["foo"] = "The quick brown fox"
self.assertEqual(x["foo"], "The quick brown fox")
self.assertEqual(x["foo"], x.obj.foo)
self.assertEqual(x.get("foo"), "The quick brown fox")
self.assertIsNone(x.get("bar"))
with self.assertRaises(KeyError):
x["bar"]
def test_setdefault(self):
x = DictAttribute(Object())
self.assertEqual(x.setdefault("foo", "NEW"), "NEW")
self.assertEqual(x.setdefault("foo", "XYZ"), "NEW")
def test_contains(self):
x = DictAttribute(Object())
x["foo"] = 1
self.assertIn("foo", x)
self.assertNotIn("bar", x)
def test_items(self):
obj = Object()
obj.attr1 = 1
x = DictAttribute(obj)
x["attr2"] = 2
self.assertDictEqual(dict(x.iteritems()),
dict(attr1=1, attr2=2))
self.assertDictEqual(dict(x.items()),
dict(attr1=1, attr2=2))
class test_ConfigurationView(Case):
def setUp(self):
self.view = ConfigurationView({"changed_key": 1,
"both": 2},
[{"default_key": 1,
"both": 1}])
def test_setdefault(self):
self.assertEqual(self.view.setdefault("both", 36), 2)
self.assertEqual(self.view.setdefault("new", 36), 36)
def test_get(self):
self.assertEqual(self.view.get("both"), 2)
sp = object()
self.assertIs(self.view.get("nonexisting", sp), sp)
def test_update(self):
changes = dict(self.view.changes)
self.view.update(a=1, b=2, c=3)
self.assertDictEqual(self.view.changes,
dict(changes, a=1, b=2, c=3))
def test_contains(self):
self.assertIn("changed_key", self.view)
self.assertIn("default_key", self.view)
self.assertNotIn("new", self.view)
def test_repr(self):
self.assertIn("changed_key", repr(self.view))
self.assertIn("default_key", repr(self.view))
def test_iter(self):
expected = {"changed_key": 1,
"default_key": 1,
"both": 2}
self.assertDictEqual(dict(self.view.items()), expected)
self.assertItemsEqual(list(iter(self.view)),
expected.keys())
self.assertItemsEqual(self.view.keys(), expected.keys())
self.assertItemsEqual(self.view.values(), expected.values())
class test_ExceptionInfo(Case):
def test_exception_info(self):
try:
raise LookupError("The quick brown fox jumps...")
except LookupError:
exc_info = sys.exc_info()
einfo = ExceptionInfo(exc_info)
self.assertEqual(str(einfo), einfo.traceback)
self.assertIsInstance(einfo.exception, LookupError)
self.assertTupleEqual(einfo.exception.args,
("The quick brown fox jumps...", ))
self.assertTrue(einfo.traceback)
r = repr(einfo)
self.assertTrue(r)
class test_LimitedSet(Case):
def test_add(self):
s = LimitedSet(maxlen=2)
s.add("foo")
s.add("bar")
for n in "foo", "bar":
self.assertIn(n, s)
s.add("baz")
for n in "bar", "baz":
self.assertIn(n, s)
self.assertNotIn("foo", s)
def test_iter(self):
s = LimitedSet(maxlen=2)
items = "foo", "bar"
for item in items:
s.add(item)
l = list(iter(s))
for item in items:
self.assertIn(item, l)
def test_repr(self):
s = LimitedSet(maxlen=2)
items = "foo", "bar"
for item in items:
s.add(item)
self.assertIn("LimitedSet(", repr(s))
def test_clear(self):
s = LimitedSet(maxlen=2)
s.add("foo")
s.add("bar")
self.assertEqual(len(s), 2)
s.clear()
self.assertFalse(s)
def test_update(self):
s1 = LimitedSet(maxlen=2)
s1.add("foo")
s1.add("bar")
s2 = LimitedSet(maxlen=2)
s2.update(s1)
self.assertItemsEqual(list(s2), ["foo", "bar"])
s2.update(["bla"])
self.assertItemsEqual(list(s2), ["bla", "bar"])
s2.update(["do", "re"])
self.assertItemsEqual(list(s2), ["do", "re"])
def test_as_dict(self):
s = LimitedSet(maxlen=2)
s.add("foo")
self.assertIsInstance(s.as_dict(), dict)
class test_LRUCache(Case):
def test_expires(self):
limit = 100
x = LRUCache(limit=limit)
slots = list(xrange(limit * 2))
for i in slots:
x[i] = i
self.assertListEqual(x.keys(), list(slots[limit:]))
def test_least_recently_used(self):
x = LRUCache(3)
x[1], x[2], x[3] = 1, 2, 3
self.assertEqual(x.keys(), [1, 2, 3])
x[4], x[5] = 4, 5
self.assertEqual(x.keys(), [3, 4, 5])
# access 3, which makes it the last used key.
x[3]
x[6] = 6
self.assertEqual(x.keys(), [5, 3, 6])
x[7] = 7
self.assertEqual(x.keys(), [3, 6, 7])
def assertSafeIter(self, method, interval=0.01, size=10000):
from threading import Thread, Event
from time import sleep
x = LRUCache(size)
x.update(zip(xrange(size), xrange(size)))
class Burglar(Thread):
def __init__(self, cache):
self.cache = cache
self._is_shutdown = Event()
self._is_stopped = Event()
Thread.__init__(self)
def run(self):
while not self._is_shutdown.isSet():
try:
self.cache.data.popitem(last=False)
except KeyError:
break
self._is_stopped.set()
def stop(self):
self._is_shutdown.set()
self._is_stopped.wait()
self.join(1e10)
burglar = Burglar(x)
burglar.start()
try:
for _ in getattr(x, method)():
sleep(0.0001)
finally:
burglar.stop()
def test_safe_to_remove_while_iteritems(self):
self.assertSafeIter("iteritems")
def test_safe_to_remove_while_keys(self):
self.assertSafeIter("keys")
def test_safe_to_remove_while_itervalues(self):
self.assertSafeIter("itervalues")
def test_items(self):
c = LRUCache()
c.update(a=1, b=2, c=3)
self.assertTrue(c.items())
class test_AttributeDict(Case):
def test_getattr__setattr(self):
x = AttributeDict({"foo": "bar"})
self.assertEqual(x["foo"], "bar")
with self.assertRaises(AttributeError):
x.bar
x.bar = "foo"
self.assertEqual(x["bar"], "foo")
class test_DependencyGraph(Case):
def graph1(self):
return DependencyGraph([
("A", []),
("B", []),
("C", ["A"]),
("D", ["C", "B"]),
])
def test_repr(self):
self.assertTrue(repr(self.graph1()))
def test_topsort(self):
order = self.graph1().topsort()
# C must start before D
self.assertLess(order.index("C"), order.index("D"))
# and B must start before D
self.assertLess(order.index("B"), order.index("D"))
# and A must start before C
self.assertLess(order.index("A"), order.index("C"))
def test_edges(self):
self.assertListEqual(list(self.graph1().edges()),
["C", "D"])
def test_items(self):
self.assertDictEqual(dict(self.graph1().items()),
{"A": [], "B": [],
"C": ["A"], "D": ["C", "B"]})
def test_to_dot(self):
s = WhateverIO()
self.graph1().to_dot(s)
self.assertTrue(s.getvalue())
| [
"kumar.mcmillan@gmail.com"
] | kumar.mcmillan@gmail.com |
cde91956a9edb08f6b4f838f37d9d754e7956ca6 | d78c504f800a7fc305db5a1e84fe303f97a5f3f4 | /14-0 Weather.py | 98d1f1ca145a59c9c8ac73455b9184447d63b248 | [] | no_license | InnaKr/Exercises | e2c44dc06e1e561cd00e31d3715e503901c58e1f | d9d4531d3c7f8aebf1e1f9c0645843569e6f07c4 | refs/heads/master | 2020-05-18T19:52:10.336057 | 2019-06-30T15:59:05 | 2019-06-30T15:59:05 | 184,618,845 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,049 | py | import requests
# city = "Macerata"
city = input('Введіть місто(En): ')
appid = "47212252e5b0293b1e53348aa194bdb3"
while True:
try:
res = requests.get("http://api.openweathermap.org/data/2.5/weather",
params={'q': city, 'units': 'metric', 'lang': 'ua', 'APPID': appid})
data = res.json()
print('Місто: ', data['name'],'('+str(data['sys']['country'])+')', '\n')
print("* Хмарність:", data['weather'][0]['description'])
print("* Температура повітря:", str(data['main']['temp']) + '°С')
print("* Температура повітря від ", str(data['main']['temp_min'])+'°С', 'до', str(data['main']['temp_max'])+'°С')
print('* Вологість: ', str(data['main']['humidity']) + '%', )
print('* Вітер: ', str(data['wind']['speed']) + 'km/год', )
break
except Exception:
print("Такого міста немає")
city = input('Введіть місто(En): ')
| [
"49992989+InnaKr@users.noreply.github.com"
] | 49992989+InnaKr@users.noreply.github.com |
00afe15515e8406d7267839d7d8a4be3bccea3fa | 1dbbb05b30d27c6419b9f34eea3b9a47f92582a0 | /parlai/zoo/sea/bart_base.py | 44e3581dd73c1b7ad168a64f76a5a09e3c7c18f6 | [
"MIT"
] | permissive | facebookresearch/ParlAI | 815334323d0ebef51bf9837336fe3eef6fe1655d | e1d899edfb92471552bae153f59ad30aa7fca468 | refs/heads/main | 2023-08-31T22:20:45.918129 | 2023-08-14T19:39:56 | 2023-08-14T19:39:56 | 89,266,735 | 10,943 | 2,395 | MIT | 2023-09-13T23:07:40 | 2017-04-24T17:10:44 | Python | UTF-8 | Python | false | false | 741 | py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Vanila BART-Large 400m parameter model with no retrieval.
"""
from parlai.core.build_data import built, download_models, get_model_dir
import os
import os.path
def download(datapath):
ddir = os.path.join(get_model_dir(datapath), 'sea')
model_type = 'bart_base'
version = 'v1.0'
if not built(os.path.join(ddir, model_type), version):
opt = {'datapath': datapath, 'model_type': model_type}
fnames = [f'model_{version}.tgz']
download_models(opt, fnames, 'sea', version=version, use_model_type=True)
| [
"noreply@github.com"
] | facebookresearch.noreply@github.com |
363f924942de71b7d00a8c1074ffc45f369e5ddd | d257d98b6f0116797d882191a85fbfc407b44a07 | /new submission/Core.py | 7e6cac5e3c020057298b1cb743c21e258730278b | [
"MIT"
] | permissive | AmrAshmawy2099/DL_Framework | dbe56d67dce78b5e679a10620f74a7ccc0b9e2d8 | a46e76936646e349f6e50852e1630362ae3d6d49 | refs/heads/main | 2023-02-20T00:14:29.866234 | 2021-01-25T11:17:13 | 2021-01-25T11:17:13 | 332,546,908 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,546 | py |
import numpy as np
import pandas as pd
#import matplotlib.pyplot as plt
from statistics import mean
import warnings
warnings.filterwarnings("error")
# foundation classes
###################################################
class SGD:
def __init__(self, parameters, alpha=0.1):
self.parameters = parameters
self.alpha = alpha
def zero(self):
for p in self.parameters:
p.grad.data *= 0
def step(self, zero=True):
for p in self.parameters:
p.data -= p.grad.data * self.alpha
if(zero):
p.grad.data *= 0
class Tensor (object):
def __init__(self,data,autograd=False, creators=None,creation_op=None,id=None):
self.data = np.array(data)
self.creators = creators
self.creation_op = creation_op
self.grad = None
self.autograd = autograd
self.children = {}
if(id is None):
id = np.random.randint(0,100000)
self.id = id
if(creators is not None):
for c in creators:
if(self.id not in c.children):
c.children[self.id] = 1
else:
c.children[self.id] += 1
def all_children_grads_accounted_for(self):
for id,cnt in self.children.items():
if(cnt != 0): return False
return True
def backward(self,grad=None, grad_origin=None):
if(grad is None):
grad = Tensor(np.ones_like(self.data))
if(self.autograd):
if(grad_origin is not None):
if(self.children[grad_origin.id] == 0):raise Exception("cannot backprop more than once")
else: self.children[grad_origin.id] -= 1
if(self.grad is None):
self.grad = grad
else:
self.grad += grad
if(self.creators is not None and(self.all_children_grads_accounted_for() or grad_origin is None)):
if(self.creation_op == "add"):
self.creators[0].backward(self.grad, self)
self.creators[1].backward(self.grad, self)
if(self.creation_op == "sub"):
new = Tensor(self.grad.data)
self.creators[0].backward(new, self)
new = Tensor(self.grad.__neg__().data)
self.creators[1].backward(new, self)
if(self.creation_op == "mul"):
new = self.grad * self.creators[1]
self.creators[0].backward(new , self)
new = self.grad * self.creators[0]
self.creators[1].backward(new, self)
if(self.creation_op == "mm"):
act = self.creators[0]
weights = self.creators[1]
new = self.grad.mm(weights.transpose())
act.backward(new)
new = self.grad.transpose().mm(act).transpose()
weights.backward(new)
if(self.creation_op == "transpose"):
self.creators[0].backward(self.grad.transpose())
if("sum" in self.creation_op):
dim = int(self.creation_op.split("_")[1])
ds = self.creators[0].data.shape[dim]
self.creators[0].backward(self.grad.expand(dim,ds))
if("expand" in self.creation_op):
dim = int(self.creation_op.split("_")[1])
self.creators[0].backward(self.grad.sum(dim))
if(self.creation_op == "neg"):
self.creators[0].backward(self.grad.__neg__())
if(self.creation_op == "sigmoid"):
ones = Tensor(np.ones_like(self.grad.data))
self.creators[0].backward(self.grad * (self * (ones - self)))
if(self.creation_op == "relu"):
self.creators[0].backward(self.grad * (self ))
if(self.creation_op == "tanh"):
ones = Tensor(np.ones_like(self.grad.data))
self.creators[0].backward(self.grad * (ones - (self * self)))
def __neg__(self):
if(self.autograd): return Tensor(self.data * -1,autograd=True, creators=[self],creation_op="neg")
return Tensor(self.data * -1)
def __add__(self, other):
if(self.autograd and other.autograd):
return Tensor(self.data + other.data, autograd=True,creators=[self,other], creation_op="add")
return Tensor(self.data + other.data)
def getdata(self):
return (self.data)
def __repr__(self):
return str(self.data.__repr__())
def __str__(self):
return str(self.data.__str__())
def __sub__(self, other):
if(self.autograd and other.autograd):
return Tensor(self.data - other.data,autograd=True,creators=[self,other],creation_op="sub")
return Tensor(self.data - other.data)
def __mul__(self, other):
if(self.autograd and other.autograd):
try:
return Tensor(self.data * other.data,autograd=True,creators=[self,other],creation_op="mul")
return Tensor(self.data * other.data)
except(RuntimeWarning):
other.data/=9875
if(self.autograd and other.autograd):
return Tensor(self.data * other.data,autograd=True,creators=[self,other],creation_op="mul")
return Tensor(self.data * other.data)
def sum(self, dim):
if(self.autograd):
return Tensor(self.data.sum(dim),autograd=True,creators=[self],creation_op="sum_"+str(dim))
return Tensor(self.data.sum(dim))
def expand(self, dim,copies):
trans_cmd = list(range(0,len(self.data.shape)))
trans_cmd.insert(dim,len(self.data.shape))
new_shape = list(self.data.shape) + [copies]
new_data = self.data.repeat(copies).reshape(new_shape)
new_data = new_data.transpose(trans_cmd)
if(self.autograd):
return Tensor(new_data,autograd=True,creators=[self],creation_op="expand_"+str(dim))
return Tensor(new_data)
def transpose(self):
if(self.autograd):return Tensor(self.data.transpose(),autograd=True,creators=[self],creation_op="transpose")
return Tensor(self.data.transpose())
def mm(self, x):
if(self.autograd):
try:
return Tensor(self.data.dot(x.data),autograd=True,creators=[self,x],creation_op="mm")
except(TypeError):
print(x.data)
print(x.data.shape)
print(self.data.shape)
return Tensor(self.data.dot(x.data))
def shape(self):
return self.data.shape
class Layer(object):
def __init__(self):
self.parameters = list()
def get_parameters(self):
return self.parameters
def change_Weights(self,para):
print( self.parameters )
print(type(self.parameters))
class Linear(Layer):
def __init__(self, n_inputs, n_outputs):
super().__init__()
W = np.random.randn(n_inputs, n_outputs)*np.sqrt(2.0/(n_inputs))
self.weight = Tensor(W, autograd=True)
self.bias = Tensor(np.zeros(n_outputs), autograd=True)
self.parameters.append(self.weight)
self.parameters.append(self.bias)
def forward(self, input):
return input.mm(self.weight)+self.bias.expand(0,len(input.data))
def change_Weights(self,para):
self.weight = Tensor(para, autograd=True)
self.parameters=[]
self.parameters.append(self.weight)
self.parameters.append(self.bias)
class MSELoss(Layer):
def __init__(self):
super().__init__()
def forward(self, pred, target):
return ((pred - target)*(pred - target)).sum(0)
class Sequential(Layer):
def __init__(self, layers=list()):
super().__init__()
self.layers = layers
def add(self, layer):
self.layers.append(layer)
def forward(self, input):
for layer in self.layers:
input = layer.forward(input)
return input
def set_parameters(self,para):
# print(isinstance(self.layers[0],Linear))#is instance to compare class tpes
for i in range (len(self.layers)):
#if(not(isinstance(self.layers[i],Tanh)or isinstance(self.layers[i],Sigmoid))):
self.layers[i].change_Weights(para[i])
def get_parameters(self):
params = list()
for l in self.layers:
params+= l.get_parameters()
return params
def validate(self,data,target):
criterion = MSELoss()
optim = SGD(parameters=model.get_parameters(),alpha=1)
loss_list=[]
for i in data:
pred = model.forward(i)
loss_list.append(criterion.forward(pred, target).getdata()[0])
return mean(loss_list)
def train(model,target,data,batch_no,alpha,validation_counter,validation_data,validation_target):
criterion = MSELoss()
optim = SGD(parameters=model.get_parameters(),alpha=alpha)
loss_list=[]
pred = model.forward(data)
loss = criterion.forward(pred, target)
loss.backward(Tensor(np.ones_like(loss.data)))
optim.step()
loss_list.append(loss.getdata()[0] )
validation_list=[]
counter=0
count=0
while((loss_list[-1])>=( .001) and count<100 ):
for i in range(batch_no):
pred = model.forward(data)
loss = criterion.forward(pred, target)
loss.backward(Tensor(np.ones_like(loss.data)))
optim.step()
loss_list.append(loss.getdata().sum(0))
count+=1
print(count)
if(len(loss_list)>20):
if(mean(loss_list[-10:-2])<=loss_list[-1] or (loss_list[-1]==loss_list[-2] and loss_list[-2]==loss_list[-3])):
print("training reached overflow")
return [loss_list,model]
## counter+=1
## if(counter==validation_counter):
## counter=0
## l=model.validate(validation_data,validation_target)
## if(l>loss_list[-1]):
## print("overfitting occured")
## return [loss_list,model]
return [loss_list,model]
def test(self,data):
return self.forward(data)
####################################################3
#activation functions syntax pass numpy array return a numpy array
#############################################
def hard_sigmoid(x):
l=np.zeros(len(x))
for i in range (len(x)):
if(x[i]>1): l[i]=1
elif(x[i]<=0): l[i]=0
else:l[i]=(x[i]+1)/2
return l
def softmax_function(x):
z = np.exp(x)
z_ = z/z.sum()
return z_
def leaky_relu_function(x):
if x<0:
return 0.01*x
else:
return x
def parametrized_relu_function(a,x):
if x<0:
return a*x
else:
return x
def elu_function(x, a):
if x<0:
return a*(np.exp(x)-1)
else:
return x
def swish_function(x):
return x/(1-np.exp(-x))
#############################################
# activation functions that call tensor
####################################### #####
def sigmoid(self):
try:
if(self.autograd):
return Tensor(1 / (1 + np.exp(-self.data)), autograd=True,creators=[self],creation_op="sigmoid")
return Tensor(1 / (1 + np.exp(-self.data)))
except(RuntimeWarning):
if(self.autograd):
return Tensor(np.array(.5), autograd=True,creators=[self],creation_op="sigmoid")
return Tensor(np.array(.5))
def tanh(self):
if(self.autograd):
return Tensor(np.tanh(self.data),autograd=True,creators=[self],creation_op="tanh")
return Tensor(np.tanh(self.data))
def relu1(self):
if(self.autograd):
return Tensor( np.maximum(0, self.data),autograd=True,creators=[self],creation_op="relu")
return Tensor(np.maximum(0, self.data))
def leaky_relu(self):
if(self.autograd):
return Tensor(np.maximum(self.data *.001, self.data),autograd=True,creators=[self],creation_op="relu")
return Tensor(np.maximum( self.data*.01, self.data))
########################################################
#activation functions classes
########################################################
class Sigmoid(Layer):
def __init__(self):
super().__init__()
def forward(self, input):
return sigmoid(input)
class Tanh(Layer):
def __init__(self):
super().__init__()
def forward(self, input):
return tanh(input)
class Relu(Layer):
def __init__(self):
super().__init__()
def forward(self, input):
return relu1(input)
class LeakyRelu(Layer):
def __init__(self):
super().__init__()
def forward(self, input):
return leaky_relu(input)
##################################################
#main part of code
################################################
#load teat data and train data
train_file = pd.read_csv('train.csv')
train_file = train_file.sample(frac=1)
train_set = train_file[0:600]
test_set = train_file[600:700]
validat_set=train_file[500:600]
#creat Y_train which have only the label Column of the train.csv file
Y_train = train_set["label"]
Y_test = test_set["label"]
Y_valid=validat_set["label"]
# creat X_train which have all columns of the train.csv file Except 'label' column
#X_train = train_file.drop(labels = ["label"],axis = 1)
X_train = train_set.drop(["label"],axis = 1)
X_test = test_set.drop(["label"],axis = 1)
X_valid=validat_set.drop(["label"],axis = 1)
#print(X_train)
print('-----------------------------')
# Normalize the data
X_train = X_train / 255.0
X_test = X_test / 255.0
X_valid=X_valid/255.0
# Reshape image in 3 dimensions (height = 28px, width = 28px , canal = 1)
#X_train = X_train.values.reshape(-1,28,28,1)
#X_test = X_test.values.reshape(-1,28,28,1)
#X_valid = X_valid.values.reshape(-1,28,28,1)
print('--------------- --------------')
arr2=np.array(Y_train.values)
print(arr2.shape)
##arr1=np.array(X_train)
##arr5=np.array(X_valid)
##print(type(arr1))
##print(arr1[0].shape)
##print(np.array([[0,0],[0,1],[1,0],[1,1]]).shape)
##print(np.array([[0,0],[0,1],[1,0],[1,1]])[0].shape)
def transformer(x):
arr=[]
for i in x:
arr.append(np.array([i]))
return arr
arr1=np.array((X_train.values))
arr2=np.array(transformer(Y_train.values))
arr3=np.array((X_test.values))
arr4=np.array(transformer(Y_test.values))
arr5=np.array((X_valid.values))
arr6=np.array(transformer(Y_valid.values))
print((arr1[0]))
print(arr2)
##print(np.array(Y_valid.values).shape)
##print(arr6[0].shape)
##print(arr6[1].shape)
##print(np.array(Y_valid.values)[2].shape)
##
##print(np.array([[0],[1],[1],[1]]).shape)
##print(np.array([[0],[1],[1],[1]])[0].shape)
##print(np.array([[0],[1],[1],[1]])[1].shape)
##print(np.array([[0],[1],[1],[1]])[2].shape)
##
data = Tensor(arr1, autograd=True)
target = Tensor(arr2, autograd=True)
model = Sequential([Linear(784,400),Tanh(),Linear(400,90),Tanh(),Linear(90,1),Tanh()])
epoch_no=20
[list1,model]=model.train(target,data,epoch_no,1,50,Tensor(arr5),Tensor(arr6))
print(list1)
print("***********************************")
print(model.test(Tensor(arr3[0])))
print(arr4[0])
print(model.test(Tensor(arr3[1])).sum(0))
print(arr4[1])
print(model.test(Tensor(arr3[2])))
print(arr4[2])
print(model.test(Tensor(arr3[3])))
print(arr4[3])
| [
"noreply@github.com"
] | AmrAshmawy2099.noreply@github.com |
e038243caf03d7f65252cbc3b38256816376cb27 | db6f7200e7d62f499b500d7e26678835df2e5771 | /app/__init__.py | 8723ea03b093e4553a46f087b4720fff63cc3d5e | [] | no_license | melaniesarah/flask-microblog2 | 0583d820152e5493456346554597146c8236cf9e | 98b6ebf206624a5189a16617b427c2b51779d2cb | refs/heads/master | 2023-03-10T01:42:05.322797 | 2021-02-24T21:45:58 | 2021-02-24T21:45:58 | 296,149,326 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,042 | py | import logging
from logging.handlers import SMTPHandler, RotatingFileHandler
import os
from flask import Flask, request, current_app
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_login import LoginManager
from flask_mail import Mail
from flask_bootstrap import Bootstrap
from flask_moment import Moment
from flask_babel import Babel, lazy_gettext as _l
from config import Config
from elasticsearch import Elasticsearch
db = SQLAlchemy()
migrate = Migrate()
login = LoginManager()
login.login_view = 'auth.login'
login.login_message = _l('Please log in to access this page.')
mail = Mail()
bootstrap = Bootstrap()
moment = Moment()
babel = Babel()
def create_app(config_class=Config):
app = Flask(__name__)
app.config.from_object(config_class)
db.init_app(app)
migrate.init_app(app, db)
login.init_app(app)
mail.init_app(app)
bootstrap.init_app(app)
moment.init_app(app)
babel.init_app(app)
app.elasticsearch = Elasticsearch([app.config['ELASTICSEARCH_URL']]) if app.config['ELASTICSEARCH_URL'] else None
from app.errors import bp as errors_bp
app.register_blueprint(errors_bp)
from app.auth import bp as auth_bp
app.register_blueprint(auth_bp, url_prefix='/auth')
from app.main import bp as main_bp
app.register_blueprint(main_bp)
if not app.debug and not app.testing:
if app.config['MAIL_SERVER']:
auth = None
if app.config['MAIL_USERNAME'] or app.config['MAIL_PASSWORD']:
auth = (app.config['MAIL_USERNAME'], app.config['MAIL_PASSWORD'])
secure = None
if app.config['MAIL_USE_TLS']:
secure = ()
mail_handler = SMTPHandler(
mailhost=(app.config['MAIL_SERVER'], app.config['MAIL_PORT']),
fromaddr='no-reply@' + app.config['MAIL_SERVER'],
toaddrs=app.config['ADMINS'], subject='Microblog Failure',
credentials=auth, secure=secure)
mail_handler.setLevel(logging.ERROR)
app.logger.addHandler(mail_handler)
if app.config['LOG_TO_STDOUT']:
strem_handler = logging.StreamHandler()
stream_handler.setLevel(logging.INFO)
app.logger.addHandler(stream_handler)
else:
if not os.path.exists('logs'):
os.mkdir('logs')
file_handler = RotatingFileHandler('logs/microblog.log', maxBytes=10240,
backupCount=10)
file_handler.setFormatter(logging.Formatter(
'%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.setLevel(logging.INFO)
app.logger.info('Microblog startup')
return app
@babel.localeselector
def get_locale():
#return request.accept_languages.best_match(current_app.config['LANGUAGES'])
return 'es'
from app import models | [
"melaniesarah@gmail.com"
] | melaniesarah@gmail.com |
417a08e70bb7a9d9dd71117edfc2d81b6f20d424 | 0686243fad8eb4e17959d8819c62d183d71adf57 | /tasks/views.py | e18347d900ef9a8032594d1c89c71cbcefe9fb5c | [] | no_license | package-coder/REPO-CS50Django | c241034d1e164d8d79a2ebf5601dce0517b7db66 | 0b1acb77800cbc55f8a7ef608a498aa813253851 | refs/heads/main | 2023-07-25T08:18:35.750988 | 2021-08-29T11:10:28 | 2021-08-29T11:10:28 | 401,021,110 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,000 | py | from django.shortcuts import render
from django import forms
from django.http import HttpResponseRedirect
from django.urls import reverse
tasks = []
class TaskDAOForm(forms.Form):
task = forms.CharField(label="Task")
priority = forms.IntegerField(label="Priority", min_value=1, max_value=10)
# Create your views here.
def index(request):
if "tasks" not in request.session:
request.session["tasks"] = []
return render(request, "tasks/index.html", {
"tasks": request.session["tasks"]
})
def new(request):
if request.method == 'POST':
form = TaskDAOForm(request.POST)
if form.is_valid():
task = form.cleaned_data["task"]
request.session["tasks"] += [task]
return HttpResponseRedirect(reverse("tasks:index"))
else:
return render(request, "tasks/new.html", {
"form": form
})
return render(request, "tasks/new.html",{
"forms": TaskDAOForm()
}) | [
"79316343+package-coder@users.noreply.github.com"
] | 79316343+package-coder@users.noreply.github.com |
1e6a1bb4cbd846a1560ef7c7610b9ebcc4805c88 | 0e8b844a713bdb29e47ab445e55e0470194f3fda | /venv/bin/isort | c01f421efccde16d7e0aa853783f244265ba6e87 | [] | no_license | Linazzi83/aplicacao4 | 77666efd124192298cf3db06704a164a33dc451b | 2a1d20b3c3bb2da618b533b6bdb79f72802cdc80 | refs/heads/master | 2020-07-20T15:14:02.049933 | 2019-09-05T19:11:46 | 2019-09-05T19:11:46 | 206,665,730 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 233 | #!/home/san/Dev/app4/venv/bin/python3.7
# -*- coding: utf-8 -*-
import re
import sys
from isort.main import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"gedegoliu@gmail.com"
] | gedegoliu@gmail.com | |
da2e6602833e17793bb83c4b4c77acb468f4abe2 | 850435d1ba6bd7c7a22f1a866af7ab75c49471b3 | /and-the-award-goes-to-master/extract/util.py | 90359e96560404c14899ca93231e3a018f995033 | [
"MIT"
] | permissive | csaund/formal-models | fcc93c7f6df0e4bf2a1f988cd9a6785122b92ae0 | 85d5f217fbbc05f373d4822093373665c256a3ee | refs/heads/master | 2020-04-17T09:14:37.197142 | 2019-04-17T14:28:37 | 2019-04-17T14:28:37 | 166,450,395 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,877 | py | import re
import os
import csv
import requests
from bs4 import BeautifulSoup
def only_ascii(string):
""" Returns only ASCII encoded characters """
return "".join([c for c in string if ord(c) < 127])
def check_headers(filename, fieldnames):
""" Validates input fields to file fields. """
with open(filename, 'r') as f:
header = csv.reader(f).next()
overlap = set(header).intersection(fieldnames)
if len(header) != len(overlap):
raise Exception("Input fields must match file fields.")
def write_to_csv(filename, results, isList = True):
""" Writes list of dictionaries to CSV file. """
fieldnames = results[0].keys() if isList else results.keys()
fieldnames = sorted(fieldnames)
with open(filename, 'ab+') as f:
writer = csv.DictWriter(f, fieldnames=fieldnames)
if os.path.getsize(filename) == 0:
writer.writeheader()
else:
check_headers(filename, fieldnames)
if isList:
writer.writerows(results)
else:
writer.writerow(results)
def retrieve_clean_response(href):
raw_html = requests.get(href)
raw_html = re.sub(r'\[[0-9A-Z]+\]', '', raw_html.content) # removes footnotes
dom_tree = BeautifulSoup(raw_html, "lxml")
# why do we do this?
for tag in dom_tree.find_all('sup'):
tag.replaceWith('')
# why do we do this?
for tag in dom_tree.find_all('td'):
if 'rowspan' in tag.attrs.keys():
tag.name = 'th'
return dom_tree
def find_first_number(string):
try:
return re.findall(r'\b\d+\b', string)[0]
except:
return ""
def find_nth(haystack, needle, n):
""" TBD. """
start = haystack.find(needle)
while start >= 0 and n > 1:
start = haystack.find(needle, start+len(needle))
n -= 1
return start | [
"tmartinovska117@gmail.com"
] | tmartinovska117@gmail.com |
1bcf9326d84b2e3d2430d6b53b5c8bd59809b755 | 6a861335fc5d659abdbc8bd9163d924106271f8c | /addNewGTPRevScore.py | 09d4b50b18163530ac6a83b9fe21174683de55af | [] | no_license | krtin/ContrastiveSummaries | 2585b8c84f4d8aa5ffdbeffac65d91b226bf8c95 | fc6fad3ca6ba6b85074ebfa4cfb605eddbd68479 | refs/heads/master | 2020-03-28T06:46:06.244548 | 2018-09-07T21:10:10 | 2018-09-07T21:10:10 | 147,858,579 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,811 | py | import config
import pandas as pd
import pickle as pkl
import numpy as np
import os
import unicodedata
def is_number(s):
try:
float(s)
return True
except ValueError:
pass
try:
import unicodedata
unicodedata.numeric(s)
return True
except (TypeError, ValueError):
pass
return False
#get to the point smc scores
print('Reading SMC data and its probabilities')
smc_data = pd.read_csv(config.smc_final)
#print(len(smc_data))
prob_smc_data = pkl.load(open(config.new_gtp_smc_reverse_probfile, 'rb'))
#print(len(prob_smc_data))
if(len(smc_data)!=len(prob_smc_data)):
raise Exception("Number of generated probabilities and number of smcs are not equal")
else:
#print(smc_data.columns)
print('Adding GTP probabilities to smc data for INCORR padded token')
prob_smc_data = pd.DataFrame(prob_smc_data, columns=['counter', 'log_prob', 'avg_log_prob', 'gtp_target', 'gtp_output'])
#print(len(prob_smc_data))
#print(prob_smc_data['gtp_target'])
smc_data['new_rev_gtp_oov_words'] = prob_smc_data['gtp_target']==prob_smc_data['gtp_output']
smc_data['new_rev_gtp_log_prob'] = prob_smc_data['log_prob']
smc_data['new_rev_gtp_avg_log_prob'] = prob_smc_data['avg_log_prob']
#print(smc_data['gtp_log_prob'])
#print(prob_smc_data['log_prob'])
#print(smc_data['system'])
#print(prob_smc_data['gtp_target'])
smc_data.to_csv(config.smc_final, index=False)
print('Reading SMIC probability data')
probfiles = [f for f in os.listdir(config.new_gtp_smic_reverse_probdir) if os.path.isfile(os.path.join(config.new_gtp_smic_reverse_probdir, f))]
prob_smic_data = {}
for filename in probfiles:
filename_parts = filename.split('_')
if(len(filename_parts)==3):
filename_parts[2] = filename_parts[2].split('.')[0]
if(is_number(filename_parts[1]) and is_number(filename_parts[2])):
start = int(filename_parts[1])
end = int(filename_parts[2])
prob_smic_data[start] = pkl.load(open(os.path.join(config.new_gtp_smic_reverse_probdir, filename), 'rb'))
tmp_data = []
for key in sorted(prob_smic_data.iterkeys()):
#if(key>810000):
#tmp = pd.DataFrame(prob_smic_data[key], columns=['counter', 'gtp_log_prob', 'gtp_avg_log_prob', 'gtp_target', 'gtp_output'])
#print(tmp)
#print('Null count', len(tmp['counter'].isnull()))
#break
#print(key)
print(key, len(prob_smic_data[key]))
tmp_data.extend(prob_smic_data[key])
prob_smic_data = tmp_data
del tmp_data
print(len(prob_smic_data))
print('Reading SMIC data')
smic_data = pd.read_csv(config.smic_final)
#print(len(smic_data))
f = open(config.smicidfile_filtered_new_gtp, 'r')
smicids = f.read().split('\n')
f.close()
orig_len = len(smicids)
#prob_smic_data = prob_smic_data[0:orig_len]
smicids = smicids[0:len(prob_smic_data)]
cut_len = len(smicids)
print('Total available data: %d out of %d' % (cut_len, orig_len))
prob_smic_data = pd.DataFrame(prob_smic_data, columns=['counter', 'new_rev_gtp_log_prob', 'new_rev_gtp_avg_log_prob', 'gtp_target', 'gtp_output'])
prob_smic_data['smic_id'] = np.array(smicids).astype(int)
prob_smic_data['new_rev_oov_words'] = prob_smic_data['gtp_target']!=prob_smic_data['gtp_output']
del prob_smic_data['gtp_target']
del prob_smic_data['gtp_output']
del prob_smic_data['counter']
orig_smic_len = len(smic_data)
smic_data = pd.merge(smic_data, prob_smic_data, left_on='smic_id', right_on='smic_id', how='left')
prob_added_len = smic_data['new_rev_gtp_log_prob'].count()
if(prob_added_len!=(cut_len)):
raise Exception("Some ids are not matching during join")
if(len(smic_data)!=orig_smic_len):
raise Exception("Some issue in joining")
#print(smic_data)
smic_data.to_csv(config.smic_final, index=False)
| [
"kkumar15@CS.McGill.CA"
] | kkumar15@CS.McGill.CA |
7240d4ecab15682787443877a38f85b61f1f032b | 46a09c35dda8f75d9df6af8db52f6e826e1fc1e1 | /main.py | 773769696b9b9bc14d292f4fe478bf0fabe7fe80 | [] | no_license | stevebottos/stocktwits-scraper | c4c675da029859e578b609fc8f452f346b404285 | 04609b9a33a94a989f3476fcaa7e742fcb179efb | refs/heads/master | 2023-03-13T06:50:19.636431 | 2021-03-04T07:20:42 | 2021-03-04T07:20:42 | 291,401,654 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,788 | py | """
* Needed to enable gmails "less secure apps" to allow the email portion to work
"""
import smtplib
import ssl
from email.mime.text import MIMEText
import os
import secrets
import requests
import json
from datetime import datetime
import pickle
import re
import pandas as pd
import pathlib
PATH = pathlib.Path(__file__).parent.absolute()
USERS = [
"Newsfilter",
"fla",
"cctranscripts"
]
EMAIL_LIST = ["MARA",
"NAK",
"FNKO",
"STAF",
"EMAN",
"BNED",
"TTOO",
"IDEX",
"LAC",
"OXLC",
"RNET",
"APHA",
"IEX",
"AYRO",
"GEVO",
"LTHM",
"SOLO",
"BLIN",
"FAMI",
"GRIL",
"TGC"
]
MONTHS = {1 : ["January", "Jan"],
2 : ["February", "Feb"],
3 : ["March"],
4 : ["April"],
5 : ["May"],
6 : ["June"],
7 : ["July"],
8 : ["August", "Aug"],
9 : ["September", "Sep"],
10: ["October", "Oct"],
11: ["November", "Nov"],
12: ["December", "Dec"]}
def find_catalysts(filename, headlines, search_terms:list, after_day=None):
headlines_filtered = headlines.loc[(headlines["message"].str.contains(search_terms[0])) | (headlines["message"].str.contains(search_terms[1]))]
with open(filename, "w+") as f:
if after_day:
for _, row in headlines_filtered.iterrows():
row_split = row["message"].split(" ")
messages = []
for i, r in enumerate(row_split):
if r in search_terms:
possible_num = row_split[i+1].replace(",", "").replace(".","")
try:
num = int(possible_num)
if num > after_day and num != 2021:
rows = row["message"] = row["message"].replace("\n", " ")
if len(row["message"]) >= 200:
rows = [rows[:200] + "\n\t|", rows[200:]]
else:
rows = [rows]
if rows not in messages:
rows_as_str = " ".join(rows)
s = row["tickers"] + "\t|" + rows_as_str
f.write(s)
f.write("\n----------------------------------------------------------------------------------------------------------------\n")
messages.append(rows)
except:
pass
def scrape(users_of_interest, tickers_of_interest):
scraped_messages = []
url_str = "https://api.stocktwits.com/api/2/streams/user/{}.json"
for user in users_of_interest:
url = url_str.format(user)
response = requests.request("GET", url)
parsed = json.loads(response.text)
messages = parsed["messages"]
for message in messages:
message_body = message["body"]
for ticker in tickers_of_interest:
search_key = "".join(("$", ticker, " "))
if search_key in message_body:
scraped_messages.append(message_body)
return scraped_messages
def format_email_message(email_content):
return "\n---\n".join(email_content)
def send_email(email_content):
msg = MIMEText(email_content)
msg['Subject'] = "STOCKTWITS ALERT"
msg['From'] = msg['To'] = secrets.USER
context=ssl.create_default_context()
with smtplib.SMTP("smtp.gmail.com", port=587) as smtp:
smtp.starttls(context=context)
smtp.login(secrets.USER, secrets.PASS)
smtp.send_message(msg)
def save_list_as_pickle(lst,fname='todays_entries.pkl'):
with open(os.path.join(PATH, fname), 'wb+') as f:
pickle.dump(lst, f)
def load_list_from_pickle(fname='todays_entries.pkl'):
with open(os.path.join(PATH, fname), 'rb') as f:
return pickle.load(f)
today = datetime.now()
if today.time().hour == 6 and today.time().minute <=10:
todays_entries = load_list_from_pickle()
info_dict = {}
info_dict["tickers"] = []
info_dict["message"] = []
for entry in todays_entries:
tickers = re.findall(r'[$][A-Za-z]+', entry)
for ticker in tickers:
info_dict["tickers"].append(ticker)
info_dict["message"].append(entry)
headlines = pd.DataFrame.from_dict(info_dict)
# To filter out the bullshit
headlines = headlines.loc[~headlines["message"].str.contains("Why")]
headlines = headlines.loc[~headlines["message"].str.contains("Stocks Moving")]
headlines = headlines.loc[~headlines["message"].str.contains("gainers")]
headlines = headlines.loc[~headlines["message"].str.contains("Gainers")]
headlines = headlines.loc[~headlines["message"].str.contains("movers")]
headlines = headlines.loc[~headlines["message"].str.contains("Movers")]
headlines = headlines.loc[~headlines["message"].str.contains("Trading Higher")]
headlines = headlines.loc[~headlines["message"].str.contains("Market Update")]
headlines.to_csv(os.path.join(PATH, "records", str(today).split(" ")[0]+".csv"))
todays_entries = []
save_list_as_pickle(todays_entries)
# The catalysts document
try:
filename = os.path.join(PATH, "catalysts", str(today).split(" ")[0]+".txt")
if not os.path.exists(filename):
open(filename, 'w+').close()
with open(filename, "a+") as f:
find_catalysts(filename,
headlines,
search_terms=MONTHS[today.month],
after_day=today.day)
except:
pass
todays_entries = load_list_from_pickle()
tickers_of_interest = set(secrets.WATCHLIST + EMAIL_LIST)
email_raw = scrape(USERS, tickers_of_interest)
email_raw_filtered = [email for email in email_raw if email not in todays_entries]
if len(email_raw_filtered):
todays_entries = email_raw_filtered + todays_entries
save_list_as_pickle(todays_entries)
# Only send an email if it contains one of the special tickers we're watching
email_messages = []
for ticker in EMAIL_LIST:
for entry in email_raw_filtered:
search_key = "".join(("$", ticker, " "))
if search_key in entry:
email_messages.append(entry)
if len(email_messages) > 0:
email_formatted = format_email_message(email_messages)
send_email(email_formatted)
print("SUCCESS")
| [
"steve.bottos@hotmail.ca"
] | steve.bottos@hotmail.ca |
f3a8746b07f7a4ed5bae29cf9753647930f33964 | 2f99edadfdea111b06000e01a4d15d7a5ec3264f | /15_python包.py | 6ea8368d1eb2295cc9df8b4e779e34431a0bb50b | [] | no_license | chenweilong1022/python- | 00dd1ffe0f612d8c573e93a5fe1e43eb7715f115 | e7251ab791af65589cf42cf2570286212f4636b3 | refs/heads/master | 2020-05-07T17:33:44.648974 | 2019-04-17T10:18:50 | 2019-04-17T10:18:50 | 180,731,375 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 104 | py |
from test01.package01 import package01
from test01.package02 import package02
package01()
package02() | [
"陈伟龙"
] | 陈伟龙 |
6d6f0f9613d182bbd152627313f49791fd6e358f | 4fa6036d6f924105554ae02ffbd1cec23de80354 | /2020-09-09-blusa-camiseta-criterios-minimo-stock/code_low_demand_aux.py | 541fe5ff613f616a291e409bba74790936b4ff6c | [] | no_license | daryachyzhyk/Reports | a9ac7f4850a26c51dae5198a940ed786636f7acd | 93eb92dc04beb6b02ce97fb74496195d5647c77d | refs/heads/master | 2023-05-14T20:22:41.992352 | 2021-06-09T07:06:42 | 2021-06-09T07:06:42 | 277,542,689 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20,516 | py | '''Two families: BLUSA and CAMISETA do not follow the rule of the minimum of stock that we establish on the level family-size. They have a good level of stock but the PS feedback is bad.
Investigate these families using another variables such as: color, print, basic, style, fabric, etc.
Use particular dates.
Compare % of sent units to available in stock.
'''
import os
import pandas as pd
import numpy as np
import pickle
from joblib import Parallel, delayed, parallel_backend
def opt_sum(opt, df_opt_dummy):
df_opt = pd.DataFrame([])
df_opt['opt_demanda'] = df_opt_dummy[opt] * df_opt_dummy['demanda']
df_opt['opt_stock_actual'] = df_opt_dummy[opt] * df_opt_dummy['stock_actual']
df_opt['demanda'] = df_opt_dummy['demanda']
df_opt['stock_actual'] = df_opt_dummy['stock_actual']
# df_opt['real_stock'] = df_opt_dummy['real_stock'] # TODO: delete
opt_sum = df_opt.sum()
return opt_sum
######################################################################################################################
# path
file_demanda = ('/var/lib/lookiero/stock/stock_tool/demanda_preprocessed.csv.gz')
file_product = ('/var/lib/lookiero/stock/stock_tool/productos_preprocessed.csv.gz')
file_stock = ('/var/lib/lookiero/stock/stock_tool/stock.csv.gz')
path_results = ('/home/darya/Documents/Reports/2020-09-09-blusa-camiseta-criterios-minimo-stock')
file_feedback = ('/home/darya/Documents/Reports/2020-09-09-blusa-camiseta-criterios-minimo-stock/Stock_Situacion.csv')
#######################################
# feedback
df_feedback = pd.read_csv(file_feedback, usecols=['Familia', 'Talla', 'Fecha', 'Stock NOK'])
df_feedback = df_feedback.rename(columns={'Familia': 'family_desc',
'Talla': 'size',
'Fecha': 'date',
'Stock NOK': 'stock_nok'})
df_feedback['stock_nok'] = df_feedback['stock_nok'].fillna(0)
df_feedback.loc[df_feedback['stock_nok'] != 0, 'stock_nok'] = 1
family_list = list(set(df_feedback['family_desc']))
date_list = list(set(df_feedback['date']))
#######################################
# demanda
query_demanda_text = 'date_ps_done in @date_list and family_desc in @family_list'
df_demanda_raw = pd.read_csv(file_demanda, usecols=['reference', 'date_ps_done', 'family_desc']).query(query_demanda_text)
reference_list = list(set(df_demanda_raw['reference'].to_list()))
df_demanda = df_demanda_raw.groupby(['date_ps_done', 'reference']).size().reset_index(name='demanda')
df_demanda = df_demanda.rename(columns={'date_ps_done': 'date'})
###################################
# stock
query_stock_text = 'date in @date_list and reference in @reference_list'
df_stock_raw = pd.read_csv(file_stock).query(query_stock_text)
# visible
df_stock = df_stock_raw.drop(df_stock_raw[(df_stock_raw['es'] == 0) & (df_stock_raw['fr'] == 0) &
(df_stock_raw['gb'] == 0) & (df_stock_raw['pt'] == 0) &
(df_stock_raw['be'] == 0) & (df_stock_raw['lu'] == 0) &
(df_stock_raw['it'] == 0)].index)
df_demanda_stock = pd.merge(df_demanda,
df_stock[['date', 'real_stock', 'reference']],
on=['date', 'reference'],
how='outer')
df_demanda_stock['real_stock'] = df_demanda_stock['real_stock'].fillna(0)
df_demanda_stock['demanda'] = df_demanda_stock['demanda'].fillna(0)
######################################
# add description of the product
# product
var_list_aux = ['reference', 'family_desc', 'size']
# TODO add 'composicion' , corte, grosor, ligero -> all fields are Nan
var_list_cat = ['clima',
# 'aventurera',
'basico',
'estilo_producto',
'fit',
'uso',
# 'pattern',
'has_pattern',
'composicion',
'origen',
'color_group',
'color_category',
'price_range_product',
# 'tejido',
'acabado',
# TODO: añadir premium
#'premium',
# 'corte',
# 'grosor',
# 'ligero'
]
# var_list_opt = []
# var_list = var_list_aux + var_list_cat + var_list_opt
var_list = var_list_aux + var_list_cat
query_product_text = 'reference in @reference_list'
df_product_raw = pd.read_csv(file_product, usecols=var_list).query(query_product_text)
df_product_raw = df_product_raw.drop_duplicates('reference', keep='last')
################################
df = pd.merge(df_demanda_stock,
df_product_raw,
on=['reference'],
how='outer')
df = df[df['family_desc'].isin(family_list)]
# stock actual
df['stock_actual'] = df['real_stock']
df.loc[df['stock_actual'] < df['demanda'], 'stock_actual'] = df['demanda']
date_family_size_list = list(zip(df['date'], df['family_desc'], df['size']))
# test
# date_family_size_list = list(zip(['2020-07-24', '2020-07-24'], ['VESTIDO', 'VESTIDO'], ['M', 'XXXL']))
# df_indicators = pd.DataFrame([])
mean_weight_relative_list = []
mean_weight_abs_list = []
date_family_size_var_valor_list = []
for date_family_size in date_family_size_list:
# date_family_size = date_family_size_list[0]
dt = date_family_size[0]
family = date_family_size[1]
sz = date_family_size[2]
print(dt)
print(family)
print(sz)
df_fam_sz_var = df[(df['date'] == dt) & (df['family_desc'] == family) & (df['size'] == sz)]
for var in var_list_cat:
# print(var)
if ~df_fam_sz_var[var].isnull().all():
# dummies
df_dummy = pd.get_dummies(df_fam_sz_var[var], columns=var)
# print(df_dummy.head())
var_group_aux = ['date', 'family_desc', 'size', 'demanda', 'real_stock', 'stock_actual']
df_opt_dummy = pd.concat([df_fam_sz_var[var_group_aux], df_dummy], axis=1)
var_opt_list = df_dummy.columns.to_list()
# for each option of the variable calculate distr_abs and distr_relativa
with parallel_backend('threading', n_jobs=6):
opt_sum_paral = Parallel()(
delayed(opt_sum)(opt, df_opt_dummy) for opt in var_opt_list)
df_gr = pd.DataFrame(opt_sum_paral)
# option
# df_opt = pd.DataFrame([])
# df_gr = pd.DataFrame([])
# for opt in var_opt_list:
# df_opt['opt_demanda'] = df_opt_dummy[opt] * df_opt_dummy['demanda']
# df_opt['opt_stock_actual'] = df_opt_dummy[opt] * df_opt_dummy['stock_actual']
# df_opt['demanda'] = df_opt_dummy['demanda']
# df_opt['stock_actual'] = df_opt_dummy['stock_actual']
# # df_opt['real_stock'] = df_opt_dummy['real_stock'] # TODO: delete
# df_gr = df_gr.append(df_opt.sum(), ignore_index=True)
df_gr['pct_demanda'] = df_gr['opt_demanda'] / df_gr['demanda']
# porcentaje de demanda de opcion de demanda de variable
df_gr['pct_stock'] = df_gr['opt_stock_actual'] / df_gr['stock_actual']
df_gr['pct_demanda_stock_actual'] = df_gr['opt_demanda'] / df_gr['opt_stock_actual']
df_gr['distr_relative'] = np.where((df_gr['pct_demanda'] == 0) | (df_gr['pct_demanda'] < df_gr['pct_stock']), 0, 1)
df_gr['distr_abs'] = np.where((df_gr['demanda'] == 0) | (df_gr['pct_demanda_stock_actual'] < 1), 0, 1)
# df_gr['mean_weight'] = df_gr['mean_weight']
date_family_size_var_valor_list.append((dt, family, sz, var,
(df_gr['distr_relative'] * df_gr['pct_demanda']).sum(),
(df_gr['distr_abs'] * df_gr['pct_demanda']).sum()))
# mean_weight_relative_list.append((df_gr['distr_relative'] * df_gr['pct_demanda']).sum())
# mean_weight_abs_list.append((df_gr['distr_abs'] * df_gr['pct_demanda']).sum())
df_indicators = pd.DataFrame(date_family_size_var_valor_list, columns=['date', 'family_desc', 'size', 'variable',
'mean_weight_relative', 'mean_weight_abs'])
df_indicators_label = pd.merge(df_indicators, df_feedback,
on=['date', 'family_desc', 'size'])
df_indicators_label_gr = df_indicators_label.groupby(['date', 'family_desc', 'size']).agg({'mean_weight_relative': 'mean',
'mean_weight_abs': 'mean',
'stock_nok': 'last'}).reset_index()
# save
df_indicators_label.to_csv(os.path.join(path_results, 'date_family_size_var_mean_weight_relat_abs_psfeedback.csv'), index=False)
df_indicators_label_gr.to_csv(os.path.join(path_results, 'date_family_size_mean_var_mean_weight_relat_abs_psfeedback.csv'), index=False)
# with open(os.path.join(path_results, 'var_list.txt'), "wb") as fp: # Pickling
# pickle.dump(var_group, fp)
# df_indicators = pd.DataFrame(date_family_size_list, columns=['date', 'family_desc', 'size'])
# df_indicators['mean_weight_relative'] = mean_weight_relative_list
# df_indicators['mean_weight_abs'] = mean_weight_abs_list
##########################################
# test
#
# df_test = df.copy()
#
# df = df_test.copy()
# var = 'fit'
# df_fam_sz_var = df[(df['family_desc'].isin(['VESTIDO', 'TOP', 'ABRIGO'])) &
# (df['date'] == '2020-07-24') &
# (df['size'] == 'XXXL')]
####### end test
#
#
# for col in var_group:
# print(col)
# columns = var_group_aux + [col]
# df_var = df[columns].copy()
#
#
# df_var[col + '_demanda'] = df_var[col] * df_var['demanda']
# df_var[col + '_stock_real'] = df_var[col] * df_var['real_stock']
# df_var[col + '_stock_actual'] = df_var[col] * df_var['stock_actual']
#
# df_gr = df_var.groupby(['date', 'family_desc', 'size']).sum().reset_index()
# # stock real, stock appeared in snapshots
# # stock actual, in case when number of items in snapshots is less then in stock_real,
#
# # porcentaje de demanda de opcion de demanda de variable
# df_gr[col + '_pct_demanda_demanda'] = df_gr[col + '_demanda'] / df_gr['demanda']
#
# # porcentaje de demanda de opcion de demanda de variable
# df_gr[col + '_pct_stock_stock'] = df_gr[col + '_stock_actual'] / df_gr['stock_actual']
#
# # percentage of variable option (option "holgado" of variable "fit") shipped of all real stock (snapshot),
# # could be more then 100%
# df_gr[col + '_pct_demanda_stock_real'] = df_gr[col + '_demanda'] / df_gr[col + '_stock_real']
#
# # percentage of variable option (option "holgado" of variable "fit") shipped of all actual stock (snapshot),
# # could be 100% maximum
# df_gr[col + '_pct_demanda_stock_actual'] = df_gr[col + '_demanda'] / df_gr[col + '_stock_actual']
#
# # percentage of option (option 'holgado') of variable ('fit') stock, could be 100% maximum
# df_gr[col + '_pct_varstock'] = df_gr[col + '_stock_actual'] / df_gr['stock_actual']
#
# # percentage of option (option 'holgado') shipped of variable ('fit') stock
#
# df_gr[col + '_pct_demanda_weight'] = df_gr[col + '_pct_demanda_stock_actual'] * df_gr[col + '_pct_varstock']
#
# # TODO: distr relativa, absoluta
# # distr relativa distr abs
# # =if (OR(demanda=0, demanda de stock actual > stock actual), 0, 1)
# df_gr[col + '_distr_relativa'] = np.where((df_gr[col + '_pct_demanda_demanda'] == 0) |
# (df_gr[col + '_pct_demanda_demanda'] < df_gr[col + '_pct_stock_stock']),
# 0, 1)
#
# df_gr[col + '_distr_abs'] = np.where((df_gr[col + '_demanda'] == 0) |
# (df_gr[col + '_pct_demanda_stock_actual'] < 1),
# 0, 1)
#
#
#
# df_gr = df_gr.fillna(0)
#
# df_gr = df_gr.replace(np.inf, 1.0)
#
# # TODO: añadir nuevas columnas
# df_var_pct = df_var_pct.merge(df_gr[['date', 'family_desc', 'size',
# col + '_stock_actual',
# col + '_pct_demanda_demanda',
# col + '_pct_stock_stock',
# col + '_pct_demanda_stock_real',
# col + '_pct_demanda_stock_actual',
# col + '_pct_varstock',
# col + '_pct_demanda_weight',
# col + '_distr_relativa',
# col + '_distr_abs']],
# on=['date', 'family_desc', 'size'],
# how='outer')
#
# # save as column
#
# df_temp = df_var_pct[['date', 'family_desc', 'size']]
# df_temp['varoption_pct_demanda_stock_real'] = df_var_pct[col + '_pct_demanda_stock_real']
# df_temp['varoption_pct_demanda_stock_actual'] = df_var_pct[col + '_pct_demanda_stock_actual']
#
# df_temp['varoption_pct_varstock'] = df_var_pct[col + '_pct_varstock']
# df_temp['varoption_pct_demanda_weight'] = df_var_pct[col + '_pct_demanda_weight']
#
# df_temp['varoption'] = col
# df_var_pct_col = df_var_pct_col.append(df_temp)
#
# # TODO df_var_pct_col change inf to 10, nan to 0
#
# df_var_pct = df_var_pct.fillna(0)
# df_var_pct_col = df_var_pct_col.fillna(0)
#
# df_var_pct_col = df_var_pct_col.replace(np.inf, 1.0)
#
# df_var_pct_ps = df_var_pct.merge(df_feedback,
# on=['date', 'family_desc', 'size'],
# how='outer')
#
# df_var_pct_col_ps = df_var_pct_col.merge(df_feedback,
# on=['date', 'family_desc', 'size'],
# how='outer')
#
# df_var_pct_ps = df_var_pct_ps.fillna(0)
# df_var_pct_ps = df_var_pct_ps.replace(np.inf, 1)
#
# # TODO merge df_var_pct_col with PS labels
# ##################################################################################################################
# # save
#
# # df_var_pct_ps.to_csv(os.path.join(path_results, 'date_family_size_var_pct_psfeedback.csv'), index=False)
# # df_var_pct_col_ps.to_csv(os.path.join(path_results, 'date_family_size_var_pct_col_psfeedback.csv'), index=False)
# #
# # aa = df_var_pct_ps.groupby(['family_desc']).agg({'stock_nok': 'mean'})
# #
# # with open(os.path.join(path_results, 'var_list.txt'), "wb") as fp: # Pickling
# # pickle.dump(var_group, fp)
#
#
# test = df[(df['family_desc']=='VESTIDO') & (df['date']=='2020-07-24') & (df['size']=='XXXL')]
#
# var_list_aux = ['reference', 'family_desc', 'size']
# var_group = set(df.columns.to_list()) - set(['date', 'reference', 'demanda', 'real_stock', 'family_desc', 'size', 'stock_actual'])
#
#
#
#
# # # eliminate good dates for CAMISETA and good date for BLUSA
# #
# # df = df.drop(df[(df['family_desc'] == 'CAMISETA') & (~df['date'].isin(date_list_camiseta))].index)
# #
# # df = df.drop(df[(df['family_desc'] == 'BLUSA') & (~df['date'].isin(date_list_blusa))].index)
#
#
# df_return = pd.DataFrame([])
# df_threshold = pd.DataFrame([])
#
# var_dummies = set(df.columns.to_list()) - set(['date', 'reference', 'demanda', 'real_stock', 'family_desc', 'size'])
#
# ###############################################
# ###############################################
#
# for var_name in var_list[2:]:
# print(var_name)
# # var_name = 'aventurera'
#
# df_var = df.groupby(['date', 'family_desc', var_name]).agg({'demanda': 'sum',
# 'real_stock': 'sum'}).reset_index()
#
# df_var['demanda_pct'] = df_var['demanda'] / df_var['real_stock'] * 100
#
#
# df_var['demanda_pct_w'] = df_var['demanda_pct'] / df_var['real_stock']
#
# df_var.loc[(df_var['demanda'] == 0) & (df_var['real_stock'] != 0), 'demanda_pct'] = 0
# df_var.loc[(df_var['demanda'] != 0) & (df_var['real_stock'] == 0), 'demanda_pct'] = 1
#
# threshold_min = 20
#
# threshold_max = 80
#
# threshols_days = 0.3
#
#
# df_var_thr = df_var[(df_var['demanda_pct'] < threshold_min) | (df_var['demanda_pct'] > threshold_max)]
# df_var_thr['var_name'] = var_name
# df_var_thr = df_var_thr.rename(columns={var_name: 'var_option'})
# df_threshold = df_threshold.append(df_var_thr)
#
#
# df_var_thr_min = df_var[df_var['demanda_pct'] < threshold_min]
#
# df_var_thr_max = df_var[df_var['demanda_pct'] > threshold_max]
#
#
#
#
#
#
# df_var_thr_min['n'] = 1
# df_var_thr_max['n'] = 1
# n_div = len(df['date'].unique())
#
# df_var_thr_min_fam = df_var_thr_min.groupby(['family_desc', var_name]).agg({'n': 'sum',
# 'demanda_pct': 'mean'}).reset_index()
# df_var_thr_min_fam['n'] = df_var_thr_min_fam['n'] / n_div
#
# df_var_thr_max_fam = df_var_thr_max.groupby(['family_desc', var_name]).agg({'n': 'sum',
# 'demanda_pct': 'mean'}).reset_index()
# df_var_thr_max_fam['n'] = df_var_thr_max_fam['n'] / n_div
#
# df_var_demand_low = df_var_thr_min_fam[df_var_thr_min_fam['n'] >= threshols_days]
#
#
# df_var_demand_high = df_var_thr_max_fam[df_var_thr_max_fam['n'] >= threshols_days]
#
# df_var_demand_low['var_type'] = var_name
# df_var_demand_low['problem_type'] = 'demand_low'
#
# df_var_demand_high['var_type'] = var_name
# df_var_demand_high['problem_type'] = 'demand_high'
#
# df_var_demand_low = df_var_demand_low.rename(columns={var_name: 'var_name'})
# df_var_demand_high = df_var_demand_high.rename(columns={var_name: 'var_name'})
#
# df_return = df_return.append(df_var_demand_low)
# df_return = df_return.append(df_var_demand_high)
#
# # df_return = df_return.rename(columns={var_name: 'var_name'})
#
# # save
# df_return.to_csv(os.path.join(path_results, 'blusa_camiseta_low_hight_demand_pct.csv'))
# df_threshold.to_csv(os.path.join(path_results, 'blusa_camiseta_threshold_pct_pct.csv'))
################ END ################
#####
# brand
# df_brand = df.groupby(['date', 'family_desc', 'brand']).agg({'demanda': 'sum',
# 'real_stock': 'sum'}).reset_index()
#
# df_brand['demanda_pct'] = df_brand['demanda'] / df_brand['real_stock'] * 100
#
#
# df_brand['demanda_pct_w'] = df_brand['demanda_pct'] / df_brand['real_stock']
#
# df_brand.loc[(df_brand['demanda'] == 0) & (df_brand['real_stock'] != 0), 'demanda_pct'] = 0
# df_brand.loc[(df_brand['demanda'] != 0) & (df_brand['real_stock'] == 0), 'demanda_pct'] = 1
#
# threshold_min = 20
#
# threshold_max = 80
#
#
# df_brand_thr = df_brand[(df_brand['demanda_pct'] < threshold_min) | (df_brand['demanda_pct'] > threshold_max)]
#
#
#
# df_brand_thr_min = df_brand[df_brand['demanda_pct'] < threshold_min]
#
# df_brand_thr_max = df_brand[df_brand['demanda_pct'] > threshold_max]
#
#
#
#
#
#
# df_brand_thr_min['n'] = 1
# df_brand_thr_max['n'] = 1
# n_div = len(df['date'].unique())
#
# df_brand_thr_min_fam = df_brand_thr_min.groupby(['family_desc', 'brand']).agg({'n': 'sum'}).reset_index()
# df_brand_thr_min_fam['n'] = df_brand_thr_min_fam['n'] / n_div
#
# df_brand_thr_max_fam = df_brand_thr_max.groupby(['family_desc', 'brand']).agg({'n': 'sum'}).reset_index()
# df_brand_thr_max_fam['n'] = df_brand_thr_max_fam['n'] / n_div
#
# df_brand_demand_low = df_brand_thr_min_fam[df_brand_thr_min_fam['n'] >= 0.5]
#
#
# df_brand_demand_high = df_brand_thr_max_fam[df_brand_thr_max_fam['n'] >= 0.5]
#
# df_brand_demand_low['var_type'] = 'brand'
# df_brand_demand_low['problem_type'] = 'demand_low'
#
# df_brand_demand_high['var_type'] = 'brand'
# df_brand_demand_high['problem_type'] = 'demand_high'
#
#
#
#
# df_return = df_return.append(df_brand_demand_low)
# df_return = df_return.append(df_brand_demand_high)
#
# df_return = df_return.rename(columns={'brand': 'var_name'})
| [
"d.chyzhyk@lookiero.com"
] | d.chyzhyk@lookiero.com |
1e28c69475afb7c2f2e67c3a0a62edb27df50580 | 179f33dc6856f32db8aceffdd1cf2278000fb321 | /blog/models.py | 48d317ba3d060c202d24ea634059d8fb6d1d60b7 | [] | no_license | alejandrapiedrasanta/progracomercial | 8c9826012f9a48a132276f275152a7fdbf065163 | 68c2c68379e178e54fa827d8c84d2f695b6a01aa | refs/heads/master | 2023-08-14T09:19:14.836187 | 2021-10-14T09:32:20 | 2021-10-14T09:32:20 | 411,798,479 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 677 | py | from django.db import models
# Createfrom django.db import models
from django.utils import timezone
class Publicacion(models.Model):
autor = models.ForeignKey('auth.User', on_delete=models.CASCADE)
titulo = models.CharField(max_length=200,help_text="Ingrese el titulo")
texto = models.TextField()
fecha_creacion = models.DateTimeField(
default=timezone.now)
fecha_publicacion = models.DateTimeField(
blank=True, null=True)
def publicar(self):
self.fecha_publicacion = timezone.now()
self.save()
def __str__(self):
return self.titulo
class Meta:
verbose_name_plural = 'Publicaciones' | [
"alesarti@mesoamericana.edu.gt"
] | alesarti@mesoamericana.edu.gt |
b92efc48c054ab584bde5eced15bde64256a245a | 3951ae64b6b7ada4f035a3dd835b23cf7e624ee0 | /advert/models.py | b7ee5f7904cd6c568b07570302a340d84dec2ef5 | [] | no_license | ichinda/Tenders | bc4123be28929f827d773c267d2fac2d93662ff5 | f13111b7aeccf90bf5e4104966f76936e83ef41e | refs/heads/master | 2022-03-06T17:26:31.351951 | 2019-09-20T03:45:48 | 2019-09-20T03:45:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 979 | py | from django.db import models
from django.utils import timezone
from django.contrib.auth.models import User
from django.urls import reverse
from phonenumber_field.modelfields import PhoneNumberField
# Create your models here.
class Advert(models.Model):
image = models.ImageField(default= 'default_business.jpg', upload_to = 'business_pics')
name = models.CharField(max_length=100)
address = models.CharField(max_length=100)
email = models.EmailField()
products = models.ManyToManyField('Item')
hours = models.CharField(max_length=100, default= '10:00AM – 08:00PM (Mon-Sat)')
details = models.TextField()
date_posted = models.DateTimeField(auto_now_add= True)
author = models.ForeignKey(User, on_delete = models.CASCADE)
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('advert-detail', kwargs= {'pk': self.pk})
class Item(models.Model):
item = models.CharField(max_length=30)
def __str__(self):
return self.item | [
"agrawal.arpit14@gmail.com"
] | agrawal.arpit14@gmail.com |
1c0a243fae087ba9520b940a1940a5458e5d1a61 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_95/2103.py | a414f3ccb3b1e3142cf256e7d173e37982b1e31b | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,026 | py | #! /usr/bin/env python
import operator
from sys import stdin
in1 = "ejp mysljylc kd kxveddknmc re jsicpdrysirbcpc ypc rtcsra dkh wyfrepkym veddknkmkrkcdde kr kd eoya kw aej tysr re ujdr lkgc jvzq"
out1 = "our language is impossible to understandthere are twenty six factorial possibilitiesso it is okay if you want to just give upqz"
sample = "ejp mysljylc kd kxveddknmc re jsicpdrysi"
def getInput():
raw = stdin.readlines()
for x in range(0, len(raw)):
raw[x] = raw[x].replace('\n', '')
return raw
def makeMap(input_str, output_str):
mymap = {}
for x,y in zip(input_str, output_str):
if(x != " "):
mymap[x] = y
return mymap
def googler2english(input_str):
mymap = makeMap(in1, out1)
ret_str = ""
for x in input_str:
if x != ' ':
ret_str = ret_str + mymap[x]
else:
ret_str = ret_str + " "
return ret_str
def main():
myinput = getInput()
bound = int(myinput[0])
for x in range(1, bound + 1):
print "Case #%d: %s" % (x, googler2english(myinput[x]))
if __name__ == "__main__":
main()
| [
"miliar1732@gmail.com"
] | miliar1732@gmail.com |
2fe4cec6defc2e66ddc4db17511c536f84514dd1 | ee6acbd5fcd0fcd16230e96a4a539de41a02c97e | /operators/special-resource-operator/python/pulumi_pulumi_kubernetes_crds_operators_special_resource_operator/sro/v1alpha1/__init__.py | 7e6f12156a258138619de35e038acddf3d969e0c | [
"Apache-2.0"
] | permissive | isabella232/pulumi-kubernetes-crds | 777e78137aaf6525a44b61a02dccf91bf0d87a14 | 372c4c0182f6b899af82d6edaad521aa14f22150 | refs/heads/master | 2023-03-15T04:29:16.039753 | 2020-12-30T19:35:54 | 2020-12-30T19:35:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 275 | py | # coding=utf-8
# *** WARNING: this file was generated by crd2pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
# Export this package's modules as members:
from .SpecialResource import *
from ._inputs import *
from . import outputs
| [
"albertzhong0@gmail.com"
] | albertzhong0@gmail.com |
f7c258330381012d574538f806f44342ee8b2f48 | 3d6f870af1af2bb608bfd404c80c3f8ed58d31b2 | /main.py | d7d437390c61e2d1294ee1a6c701de9cf33b46cb | [] | no_license | saness/tictactoe | 1cbd0b9fa8230f259c587b9194b7eefbdc029ab7 | 4f3ac33d8f0743d0887aa4f602dea311a69cc780 | refs/heads/master | 2022-04-25T16:50:30.106869 | 2020-04-10T06:53:32 | 2020-04-10T06:53:32 | 254,531,521 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,178 | py | board = [' ' for x in range(10)]
def insertLetter(letter, pos):
board[pos] = letter
def spaceIsFree(pos):
return board[pos] == ' '
def printBoard(board):
print(' | | ')
print(' ' + board[1] + '| ' + board[2] + '| ' + board[3])
print(' | | ')
print('----------- ')
print(' | | ')
print(' ' + board[4] + '| ' + board[5] + '| ' + board[6])
print(' | | ')
print('----------- ')
print(' | | ')
print(' ' + board[7] + '| ' + board[8] + '| ' + board[9])
print(' | | ')
def isWinner(bo, le):
return (bo[7] == le and bo[8] == le and bo[9] == le) or (bo[4] == le and bo[5] == le and bo[6] == le) or (bo[1] == le and bo[4] == le and bo[7] == le) or (bo[2] == le and bo[5] == le and bo[8] == le) or (bo[3] == le and bo[6] == le and bo[9] == le) or (bo[1] == le and bo[5] == le and bo[9] == le) or (bo[3] == le and bo[5] == le and bo[7] == le)
def playerMove():
run = True
while run:
move = input('Please select a position to place an \'X\' (1-9):')
try:
move = int(move)
if move > 0 and move < 10:
if spaceIsFree(move):
run = False
insertLetter('X', move)
else:
print("Sorry, this space is occupied")
else:
print('Please type a number within the range')
except:
print("Please type a number")
def selectRandom(li):
import random
ln = len(li)
r = random.randrange(0, ln)
return li[r]
def compMove():
possibleMoves = [x for x, letter in enumerate(board) if letter == ' ' and x != 0]
move = 0
for let in ['O', 'X']:
for i in possibleMoves:
boardCopy = board[:]
boardCopy[i] = let
if isWinner(boardCopy, let):
move = i
return move
cornersOpen = []
for i in possibleMoves:
if i in [1, 3, 7, 9]:
cornersOpen.append(i)
if len(cornersOpen) > 0:
move = selectRandom(cornersOpen)
return move
if 5 in possibleMoves:
move = 5
return move
edgesOpen = []
for i in possibleMoves:
if i in [2, 4, 6, 8]:
edgesOpen.append(i)
if len(edgesOpen) > 0:
move = selectRandom(edgesOpen)
return move
def isBoardFull(board):
if board.count(' ') > 1:
return False
else:
return True
def main():
print("Welcome to tic tac toe")
printBoard(board)
while not(isBoardFull(board)):
if not(isWinner(board, "O")):
playerMove()
printBoard(board)
else:
print('Sorry O\'s won thi time')
break
if not(isWinner(board, 'X')):
move = compMove()
if move == 0:
print('Tie Game')
else:
insertLetter('O', move)
print('Computer placed an \'O\' in position', move, ':')
printBoard(board)
else:
print('X\'S won this time')
break
if isBoardFull(board):
print("Tie Game")
main()
| [
"sanishsuwal3@gmail.com"
] | sanishsuwal3@gmail.com |
5e1e1a8a01e9a4132bd94ac4745a7070a47d4718 | be0f3dfbaa2fa3d8bbe59229aef3212d032e7dd1 | /Gauss_v45r8/Gen/DecFiles/options/22114002.py | 7953c9feeb8c8d3d7f7c9d855b04a94363e3a510 | [] | no_license | Sally27/backup_cmtuser_full | 34782102ed23c6335c48650a6eaa901137355d00 | 8924bebb935b96d438ce85b384cfc132d9af90f6 | refs/heads/master | 2020-05-21T09:27:04.370765 | 2018-12-12T14:41:07 | 2018-12-12T14:41:07 | 185,989,173 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 752 | py | # file /home/hep/ss4314/cmtuser/Gauss_v45r8/Gen/DecFiles/options/22114002.py generated: Fri, 27 Mar 2015 15:48:15
#
# Event Type: 22114002
#
# ASCII decay Descriptor: [D0 -> pi+ pi- mu+ mu-]cc
#
from Configurables import Generation
Generation().EventType = 22114002
Generation().SampleGenerationTool = "SignalPlain"
from Configurables import SignalPlain
Generation().addTool( SignalPlain )
Generation().SignalPlain.ProductionTool = "PythiaProduction"
from Configurables import ToolSvc
from Configurables import EvtGenDecay
ToolSvc().addTool( EvtGenDecay )
ToolSvc().EvtGenDecay.UserDecayFile = "$DECFILESROOT/dkfiles/D0_pipimumu=DecProdCut.dec"
Generation().SignalPlain.CutTool = "DaughtersInLHCb"
Generation().SignalPlain.SignalPIDList = [ 421,-421 ]
| [
"slavomirastefkova@b2pcx39016.desy.de"
] | slavomirastefkova@b2pcx39016.desy.de |
7ce855509a02fabb32794d93a57361f957f8535c | 67bde32c6aeb71880a10cc67d5c2266fceb9deb8 | /Python3-Patterns_Recipes_Idioms/saved_script/Ch 28 Fronting for an implementation.py | fe941f29091702deca207bc093ec11c1a0da3102 | [] | no_license | Sprinterzzj/Effective-Python | 981ea71eb028d5c782ab85de2e66c30f724a4ddc | 0af0e1d20741c6bce0951ea17c23534c4ccb2d9a | refs/heads/master | 2021-07-01T16:22:46.682452 | 2020-10-08T10:11:39 | 2020-10-08T10:11:39 | 180,560,855 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,435 | py | #!/usr/bin/env python
# coding: utf-8
# 代理
#
# 代理模式 用来**控制 implementation 的访问**.
# In[ ]:
# Simple demonstration of the Proxy pattern
class Implementation(object):
def f(self):
print('***f***')
return
def g(self):
print('***g***')
return
def h(self):
print('***h***')
return
class Proxy(object):
def __init__(self):
self.__implementation = Implementation()
# Pass method calls to the implementation
def f(self):
self.__implementation.f()
def g(self):
self.__implementation.g()
def h(self):
self.__implementation.h()
# In[3]:
# 当然我们可以用 Python 的特性
class Implementation(object):
def f(self):
print('***f***')
return
class Proxy(object):
def __init__(self):
self.__implementation = Implementation()
def __getattr__(self, name):
"""重定向 __getattr__ 方法
"""
return getattr(self.__implementation, name)
# State
#
# State pattern adds more implementations to Proxy. State 模式可以切换 Implementation 的种类
# In[4]:
class State_d(object):
def __init__(self, imp):
self.__implementation = imp
def changeImp(self, newImp):
self.__implementation = newImp
def __getattr__(self, name):
return getattr(self.__implementation, name)
# In[ ]:
| [
"zhangzijie.dlut@gmail.com"
] | zhangzijie.dlut@gmail.com |
b248f7b6e4a7f92757f0a8c13236f489a28b112f | ace30d0a4b1452171123c46eb0f917e106a70225 | /filesystems/vnx_rootfs_lxc_ubuntu64-16.04-v025-openstack-compute/rootfs/usr/lib/python2.7/dist-packages/oslo_privsep/tests/test_comm.py | 72f7aefceab898b5a143045d7aa771a0fc759a9f | [
"Python-2.0"
] | permissive | juancarlosdiaztorres/Ansible-OpenStack | e98aa8c1c59b0c0040c05df292964520dd796f71 | c01951b33e278de9e769c2d0609c0be61d2cb26b | refs/heads/master | 2022-11-21T18:08:21.948330 | 2018-10-15T11:39:20 | 2018-10-15T11:39:20 | 152,568,204 | 0 | 3 | null | 2022-11-19T17:38:49 | 2018-10-11T09:45:48 | Python | UTF-8 | Python | false | false | 2,698 | py | # Copyright 2015 Rackspace Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from oslotest import base
from oslo_privsep import comm
class BufSock(object):
def __init__(self):
self.readpos = 0
self.buf = six.BytesIO()
def recv(self, bufsize):
if self.buf.closed:
return b''
self.buf.seek(self.readpos, 0)
data = self.buf.read(bufsize)
self.readpos += len(data)
return data
def sendall(self, data):
self.buf.seek(0, 2)
self.buf.write(data)
def shutdown(self, _flag):
self.buf.close()
class TestSerialization(base.BaseTestCase):
def setUp(self):
super(TestSerialization, self).setUp()
sock = BufSock()
self.input = comm.Serializer(sock)
self.output = iter(comm.Deserializer(sock))
def send(self, data):
self.input.send(data)
return next(self.output)
def assertSendable(self, value):
self.assertEqual(value, self.send(value))
def test_none(self):
self.assertSendable(None)
def test_bool(self):
self.assertSendable(True)
self.assertSendable(False)
def test_int(self):
self.assertSendable(42)
self.assertSendable(-84)
def test_bytes(self):
data = b'\x00\x01\x02\xfd\xfe\xff'
self.assertSendable(data)
def test_unicode(self):
data = u'\u4e09\u9df9'
self.assertSendable(data)
def test_tuple(self):
self.assertSendable((1, 'foo'))
def test_list(self):
# NB! currently lists get converted to tuples by serialization.
self.assertEqual((1, 'foo'), self.send([1, 'foo']))
def test_dict(self):
self.assertSendable(
{
'a': 'b',
1: 2,
None: None,
(1, 2): (3, 4),
}
)
def test_badobj(self):
class UnknownClass(object):
pass
obj = UnknownClass()
self.assertRaises(TypeError, self.send, obj)
def test_eof(self):
self.input.close()
self.assertRaises(StopIteration, next, self.output)
| [
"jcdiaztorres96@gmail.com"
] | jcdiaztorres96@gmail.com |
5fdaeecaf64cdc22e25b4913a33d91d5c7b28577 | 714306f0587cbe874a8663d80255799e91e3bff8 | /scripts/scrape.py | 4fecb8389f2a88602b52ad694bf9f8ef3d3d10ca | [] | no_license | carbonXIII/research_scraper | 65225e536512a43746f50bc38d0b007ee14ebb86 | 658b1ee616298366631a0bebe06d5b3ef3342310 | refs/heads/master | 2022-05-17T13:44:04.167376 | 2019-10-22T19:17:51 | 2019-10-22T19:17:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,404 | py | # Scraper to scrape some information from a Github repository (website and cloned repo)
# From Github:
# Description
# Issues
# Pull requests
# From Cloned Repo:
# .markdown files
# in-line comments
from shutil import rmtree
import os, sys
import bs4 as soup
import requests
from myutils import onerror, choose_subset, run_proc, clone, slurp, write_as_bytes
code_extensions = ['cpp','c','cxx','py','sh','js','java','glsl','hlsl']
markdown_extensions = ['md','rst']
comment_starters = ['//','#']
to_clean = ['.','/','-']
comment_merge_distance = 5
max_issues = 20
max_inline = 20
max_markdown = 20
force_readme = True
def parse_inline(qualified):
with open(qualified, 'r') as g:
try:
lines = g.readlines()
except:
print("can't read {}, skipping".format(qualified))
lines = []
comments = []
for i,line in enumerate(lines):
stripped = line.lstrip()
for cs in comment_starters:
if stripped.startswith(cs):
if len(comments) and comments[-1][1] > i-comment_merge_distance:
stop = min(i+comment_merge_distance,len(lines))
comments[-1] = (comments[-1][0], stop)
else:
start = max(i-comment_merge_distance,0)
stop = min(i+comment_merge_distance,len(lines))
comments += [(start, stop)]
break
merged = []
for comment in comments:
merged += ['### Line ',str(comment[0]+1),'-',str(comment[1]),'\n']
merged += ['```\n']
for i in range(comment[0],comment[1]):
if len(line.strip()) == 0:
continue
line = line.replace('```',"'''")
merged += [lines[i]]
merged += ['\n```\n']
merged = ''.join(merged)
return merged
def parse_markdown(qualified):
b = None
with open(qualified, 'rb') as g:
b = g.read()
b = b.replace(b'```', b"'''")
return b'```' + b + b'```'
def extract_from_repo(repo, out):
candidates = {"markdown": [], "inline": []}
_extract_from_repo(repo, candidates, path='/.')
inline_ = choose_subset(max_inline, len(candidates["inline"]))
markdown_ = choose_subset(max_markdown, len(candidates["markdown"]))
for i in inline_:
npath = candidates["inline"][i]
print('Scraping',npath)
qualified = repo + npath
extension = npath.split('.')[-1]
cleaned_path = npath
for symbol in to_clean:
cleaned_path = '_'.join(cleaned_path.split(symbol))
while '__' in cleaned_path:
cleaned_path = cleaned_path.replace('__', '_')
merged = parse_inline(qualified)
out["inline"][cleaned_path] = merged
if force_readme:
readme_names = ['/./README.' + ext for ext in markdown_extensions]
for i,npath in enumerate(candidates["markdown"]):
if npath in readme_names:
if i not in markdown_:
markdown_ += [i]
for i in markdown_:
npath = candidates["markdown"][i]
print('Scraping',npath)
qualified = repo + npath
extension = npath.split('.')[-1]
cleaned_path = npath
for symbol in to_clean:
cleaned_path = '_'.join(cleaned_path.split(symbol))
while '__' in cleaned_path:
cleaned_path = cleaned_path.replace('__', '_')
merged = parse_markdown(qualified)
out["markdown"][cleaned_path] = merged
def _extract_from_repo(repo, out, path='/.'):
for f in os.listdir(repo + path):
npath = path + '/' + f
qualified = repo + npath
extension = f.split('.')[-1]
if os.path.isdir(qualified):
_extract_from_repo(repo, out, npath)
elif extension in markdown_extensions:
out["markdown"] += [npath]
elif extension in code_extensions:
out["inline"] += [npath]
def count_issues(url):
max_jump = 65536
guess = 0
while max_jump:
nguess = guess + max_jump
page = requests.get(url + str(nguess))
if page.status_code is 200:
guess = nguess
max_jump = max_jump // 2
return guess
def extract_from_url(url, out):
souped = slurp(url)[0]
desc = souped.find('span', {'itemprop': 'about'}).getText()
out["info"]["description"] = desc.strip() + '\n'
issue_count = count_issues(url + '/issues/')
to_scrape = choose_subset(max_issues, issue_count)
for target in to_scrape:
target_url = url + '/issues/' + str(target + 1)
page,target_url = slurp(target_url)
if type(page) is not soup.BeautifulSoup:
print(target_url,', status_code =',page,', skipping.')
continue
print('Scraping',target_url)
issue_type = target_url.split('/')[-2]
merged = []
title = page.find('span', {'class': 'js-issue-title'}).getText()
merged += ['Title:\n```\n', title, '\n```\n']
comments = page.find_all('div', {'class': 'timeline-comment-group'})
for comment in comments:
author = comment.find('a', {'class': 'author'}).getText()
text = comment.find('td', {'class': 'comment-body'}).getText()
merged += ['Author:\n```\n',
author,
'\n```\n',
'Text:\n```\n',
text,
'\n```\n']
merged = ''.join(merged)
out[issue_type][str(target+1)] = merged
def write_summary(title, extracted, out):
with open(out, 'wb') as f:
write_as_bytes(f, '# ' + title + '\n')
for heading in extracted:
heading_adj = heading.capitalize()
write_as_bytes(f, '[- {}](#{})\n\n'.format(heading_adj, heading_adj))
for path in extracted[heading]:
path_ = path.split('/')[-1]
write_as_bytes(f, '* [{}](#{})\n\n'.format(path_, path))
write_as_bytes(f, '<!-- toc -->\n\n')
for heading in extracted:
heading_adj = heading.capitalize()
write_as_bytes(f, '# ' + str(heading_adj) + '\n')
for path in extracted[heading]:
write_as_bytes(f, '## ' + str(path) + '\n')
to_write = extracted[heading][path]
if type(to_write) is str:
f.write(extracted[heading][path].encode())
else:
f.write(extracted[heading][path])
write_as_bytes(f, '\n')
def go(url, out_file):
repo = './repo'
rmtree(repo, onerror=onerror)
clone(url, repo)
extracted = {"info": {}, "markdown": {}, "inline": {}, "issues": {}, "pull": {}}
extract_from_repo(repo, extracted)
extract_from_url(url, extracted)
write_summary('/'.join(url.split('/')[-2:]), extracted, out_file)
if __name__ == '__main__':
repo_list = 'repos.txt' if len(sys.argv) < 2 else sys.argv[1]
with open(repo_list, 'r') as f:
while True:
line = f.readline()
if line.strip() == 'END':
break
url = line.split(',')[1].strip()
name = url.split('/')[-1]
print('repo:',name)
go(url, name + '.md')
| [
"r1ch4rds.j0rdan@gmail.com"
] | r1ch4rds.j0rdan@gmail.com |
626ad41b7c2f45201c36c890134ff3c5e306ba4b | 2e7656524d3bc32a5f256973afcb3dd35fc6b764 | /matrice.py | e8f55835f226cee96bdf2a3211155629cc12b883 | [] | no_license | bondac-ion/Matrice | fa047adf91e23c3691b98f05bda79e8971f7d482 | 891e0325a8c8e605ae18129272e3cc80aabee421 | refs/heads/main | 2023-08-15T04:00:37.106089 | 2021-10-06T19:03:29 | 2021-10-06T19:03:29 | 414,337,296 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 589 | py | a=[[2,11,54,3,1],
[34,6,55,9,22],
[1,2,3,4,5],
[9,7,6,45,0],
[12,23,34,45,56]]
for i in a:
print("suma randului",i+1,"este",sum(i))
for i in range(len(a)):
col=[]
for rind in a:
col.append(rind[i])
print("suma coloanei",i+1,"este",sum(col))
d_principala=[]
d_secundara=[]
for i in range(len(a)):
for j in range(len(a[0])):
if i==j:
d_principala.append(a[i][j])
if i+j==(len(a)-1):
d_secundara.append(a[j][i])
print("Diagonala principala-",d_principala)
print("Diagonala secundara-",d_secundara) | [
"noreply@github.com"
] | bondac-ion.noreply@github.com |
5a1a215fc88b1c2d5c7a9729d348862c15461931 | b64687833bbbd206d871e5b20c73e5bf363c4995 | /crocs.py | 2ea505fc464101c7b928b4bbcbb3e5e9cd5a0f07 | [
"Apache-2.0"
] | permissive | barkinet/crocs | 462225eee0975c9240ec25ca1275e0f9dc991e00 | 7ab44d1eb45aac7b24ab64601255d9fb38049040 | refs/heads/master | 2020-12-02T09:12:18.988446 | 2017-07-09T20:09:52 | 2017-07-09T20:09:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,547 | py | from random import choice, randint
from string import printable
import re
class RegexStr(object):
def __init__(self, value):
self.value = value
def invalid_data(self):
pass
def valid_data(self):
return self.value
def __str__(self):
return re.escape(self.value)
class RegexOperator(object):
# It may be interesting to have a base class Pattern
# that implements common methods with Group and Include, Exclude.
# Because these accept multiple arguments.
def __init__(self):
pass
def invalid_data(self):
pass
def valid_data(self):
pass
def encargs(self, args):
return [RegexStr(ind) if isinstance(ind, str) else ind
for ind in args]
def encstr(self, regex):
regex = RegexStr(regex) if isinstance(
regex, str) else regex
return regex
def test(self):
regex = str(self)
data = self.valid_data()
# It has to be search in order to work with ConsumeNext.
strc = re.search(regex, data)
print 'Regex;', regex
print 'Input:', data
print 'Group dict:', strc.groupdict()
print 'Group 0:', strc.group(0)
print 'Groups:', strc.groups()
def join(self):
return ''.join(map(lambda ind: str(ind), self.args))
def __str__(self):
pass
class NamedGroup(RegexOperator):
"""
Named groups.
(?P<name>...)
"""
def __init__(self, name, *args):
self.args = self.encargs(args)
self.name = name
def invalid_data(self):
pass
def valid_data(self):
return ''.join(map(lambda ind: \
ind.valid_data(), self.args))
def __str__(self):
return '(?P<%s>%s)' % (self.name, self.join())
class Group(RegexOperator):
"""
A normal group.
(abc).
"""
def __init__(self, *args):
self.args = self.encargs(args)
def invalid_data(self):
pass
def valid_data(self):
return ''.join(map(lambda ind: \
ind.valid_data(), self.args))
def __str__(self):
return '(%s)' % self.join()
class Times(RegexOperator):
"""
Match n, m times.
a{1, 3}
Note: The * and + are emulated by
Times(regex, 0) or Times(regex, 1)
"""
TEST_MAX = 10
def __init__(self, regex, min=0, max=''):
self.regex = self.encstr(regex)
self.min = min
self.max = max
def invalid_data(self):
pass
def valid_data(self):
count = randint(self.min, self.max
if self.max else self.TEST_MAX)
data = ''.join((self.regex.valid_data()
for ind in xrange(count)))
return data
def __str__(self):
return '%s{%s,%s}' % (self.regex,
self.min, self.max)
class ConsumeNext(RegexOperator):
"""
Lookbehind assertion.
(?<=...)
"""
def __init__(self, regex0, regex1):
self.regex0 = self.encstr(regex0)
self.regex1 = self.encstr(regex1)
def invalid_data(self):
pass
def valid_data(self):
return '%s%s' % (self.regex0.valid_data(),
self.regex1.valid_data())
def __str__(self):
return '(?<=%s)%s' % (self.regex0, self.regex1)
class ConsumeBack(RegexOperator):
"""
Lookahead assertion.
(?=...)
"""
def __init__(self, regex0, regex1):
self.regex0 = self.encstr(regex0)
self.regex1 = self.encstr(regex1)
def invalid_data(self):
pass
def valid_data(self):
return '%s%s' % (self.regex0.valid_data(),
self.regex1.valid_data())
def __str__(self):
return '%s(?=%s)' % (self.regex0, self.regex1)
class Seq(RegexOperator):
def __init__(self, start, end):
self.start = start
self.end = end
self.seq = [chr(ind) for ind in xrange(
ord(self.start), ord(self.end))]
def valid_data(self):
return ''.join(self.seq)
def __str__(self):
return '%s-%s' % (self.start, self.end)
class Include(RegexOperator):
"""
Sets.
[abc]
"""
def __init__(self, *args):
self.args = self.encargs(args)
def invalid_data(self):
pass
def valid_data(self):
chars = ''.join(map(lambda ind: \
ind.valid_data(), self.args))
char = choice(chars)
return char
def __str__(self):
return '[%s]' % self.join()
class Exclude(RegexOperator):
"""
Excluding.
[^abc]
"""
def __init__(self, *args):
self.args = self.encargs(args)
def invalid_data(self):
pass
def valid_data(self):
chars = ''.join(map(lambda ind: \
ind.valid_data(), self.args))
data = filter(lambda ind: \
not ind in chars, printable)
return choice(data)
def __str__(self):
return '[^%s]' % self.join()
class X(RegexOperator):
"""
The dot.
.
"""
TOKEN = '.'
def __init__(self):
pass
def invalid_data(self):
return ''
def valid_data(self):
char = choice(printable)
return char
def __str__(self):
return self.TOKEN
class Pattern(RegexOperator):
"""
Setup a pattern.
"""
def __init__(self, *args):
self.args = self.encargs(args)
def invalid_data(self):
pass
def valid_data(self):
return ''.join(map(lambda ind: \
ind.valid_data(), self.args))
def __str__(self):
return self.join()
| [
"ioliveira.id.uff.br"
] | ioliveira.id.uff.br |
a97d0b7b5c266a837d5caf3fefb00339c7d845dc | 8fcae139173f216eba1eaa01fd055e647d13fd4e | /.history/scraper_20191220144406.py | d08dde54a93939a73cdb07eb8e08d72519375f5e | [] | no_license | EnriqueGalindo/backend-web-scraper | 68fdea5430a0ffb69cc7fb0e0d9bcce525147e53 | 895d032f4528d88d68719838a45dae4078ebcc82 | refs/heads/master | 2020-11-27T14:02:59.989697 | 2019-12-21T19:47:34 | 2019-12-21T19:47:34 | 229,475,085 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,693 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Module docstring: One line description of what your program does.
There should be a blank line in between description above, and this
more detailed description. In this section you should put any caveats,
environment variable expectations, gotchas, and other notes about running
the program. Author tag (below) helps instructors keep track of who
wrote what, when grading.
"""
__author__ = "Enrique Galindo"
# Imports go at the top of your file, after the module docstring.
# One module per import line. These are for example only.
import sys
import requests
import re
def main(args):
"""Main function is declared as standalone, for testability"""
url = args[0]
response = requests.get(url)
response.raise_for_status()
url_list = re.findall(r'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', response.text)
01-\x08\x0regex_email = r'''(?:[a-z0-9!#$%&‘*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&‘*+/=?^_`{|}~-]+)*|“(?:[\xb\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*“)@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?|\[(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?|[a-z0-9-]*[a-z0-9]:(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])'''
regex_phone = r'''(1?\W*([2-9][0-8][0-9])\W*([2-9][0-9]{2})\W*([0-9]{4})(\se?x?t?(\d*))?)'''
email_list = set(re.findall(regex_email, response.text))
phone_list = set(re.findall(re_phone, response.text))
print(email_list)
if __name__ == '__main__':
"""Docstring goes here"""
main(sys.argv[1:]) | [
"egalindo@protonmail.com"
] | egalindo@protonmail.com |
4795a56db26abe40c2a74585633599bdd4dbe46f | 87658453efcdbed898c9dd7fae77cd6c1c9dae79 | /scripts/run_multiple_tail_at_scale.py | 860b49357df4b2486ab955ce0f908ebec6f4a722 | [
"Apache-2.0"
] | permissive | matthelb/gryff | ada0fed962988bbcc32daff040de1029eb0d17f2 | faea8bf494c472d66f98c3701cd88b7c472c374f | refs/heads/master | 2021-02-05T23:52:38.728832 | 2020-04-01T20:01:30 | 2020-04-01T20:01:30 | 243,851,022 | 8 | 3 | null | null | null | null | UTF-8 | Python | false | false | 281 | py | import utils
import sys
from utils.experiment_util import *
def main():
if len(sys.argv) != 2:
sys.stderr.write('Usage: python3 %s <config_file>\n' % sys.argv[0])
sys.exit(1)
run_multiple_tail_at_scale(sys.argv[1])
if __name__ == "__main__":
main()
| [
"mlb452@cornell.edu"
] | mlb452@cornell.edu |
a0adbf0801f319434a3785fe01f994198732a1a1 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2864/60618/317011.py | a6f56f689fbf65afed239fae41b9105f1e30fc28 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,548 | py | class Solution:
def find(self, n, data):
if n == 1:
return data[0]
re = 0
d = dict()
for i in range(n):
d[data[i]] = data.count(data[i])
# 这两行可以好好学习一下!!!
sorted_key_list = sorted(d, reverse=True) # key从大到小
sorted_dict = sorted(d.items(), key=lambda x: x[0], reverse=True)
k = sorted_key_list
dd = []
for item in sorted_dict:
dd.append(item[1])
i = 0
while i < len(k):
if k[i] == k[i+1] + 1: # 下一个不能要了
if dd[i] * k[i] > dd[i + 1] * k[i + 1]:
re += dd[i] * k[i]
i += 1
if i == len(k) - 1:
break
if i == len(k) - 2:
re += dd[i + 1] * k[i + 1]
break
else:
re += dd[i + 1] * k[i + 1]
i += 1
if i == len(k) - 1:
break
if k[i] == k[i+1]+1:
i += 1
i += 1
else: # 下一个还能要
re += dd[i] * k[i]
i += 1
if i == len(k) - 1:
re += dd[i] * k[i]
break
return re
if __name__ == '__main__':
n = int(input())
data = [int(a) for a in input().split()]
s = Solution()
re = s.find(n, data)
print(re)
| [
"1069583789@qq.com"
] | 1069583789@qq.com |
fb100ad85682045d4052339d1143167421753289 | 3cfaa57f120c53694bdeb71c3785e95a6152f8e7 | /pi_libs/macros.py | 83e7f48ea48cf340b143c79a982f8e8ae9ff69f6 | [] | no_license | QUT-Motorsport/QUTMS_CANSimulation | 810593d82963717dd82e0c8ee549c06bdb4d851d | 6f3f7acaebceae90702a9cada113f1265dc51e1b | refs/heads/master | 2022-04-08T09:23:55.848072 | 2020-03-12T01:41:27 | 2020-03-12T01:41:27 | 246,234,740 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,400 | py | """
CAB202 Teensy Library (cab202_teensy)
macros.h rewritten to Python code
B.Talbot, September 2015
L.Buckingham, September 2017
M.Sadykov, November 2019
Queensland University of Technology
"""
"""
Setting data directions in a data direction register (DDR)
"""
def SET_INPUT(portddr, pin): (portddr) &= ~(1 << (pin))
def SET_OUTPUT(portddr, pin): (portddr) |= (1 << (pin))
"""
Setting, clearing, and reading bits in registers.
reg is the name of a register; pin is the index (0..7)
of the bit to set, clear or read.
(WRITE_BIT is a combination of CLEAR_BIT & SET_BIT)
"""
def SET_BIT(reg, pin): (reg) |= (1 << (pin))
def CLEAR_BIT(reg, pin): (reg) &= ~(1 << (pin))
def WRITE_BIT(reg, pin, value):
(reg) = (((reg) & ~(1 << (pin))) | ((value) << (pin)))
def BIT_VALUE(reg, pin): (((reg) >> (pin)) & 1)
def BIT_IS_SET(reg, pin): (BIT_VALUE((reg),(pin))==1)
"""
Rudimentary math macros
"""
def ABS(x):
if ((x) >= 0):
return (x)
else:
return -(x)
def SIGN(x):
if (((x) > 0) - ((x) < 0)):
return True
else:
return False
"""
Define definitions as in docs.
"""
HIGH = 1
LOW = 0
"""
Bit Write
"""
def BV(bit):
return (1 << (bit))
| [
"mattsadov@gmail.com"
] | mattsadov@gmail.com |
07d35f66b6c9afb14eb49238a555cec69a67ef2c | 5dc77586e3e0f9de1f032fd2ca68494d8e58928f | /contrib/great_expectations_geospatial_expectations/great_expectations_geospatial_expectations/expectations/expect_column_values_geometry_to_be_within_shape.py | 4fb1ffed1fe1ffab5fafb06d1d68eb5f5526a0d0 | [
"Apache-2.0"
] | permissive | great-expectations/great_expectations | dd7c22e6277d6b08bee3ff38a015e6e8cd434df6 | b0290e2fd2aa05aec6d7d8871b91cb4478e9501d | refs/heads/develop | 2023-09-04T09:30:26.395518 | 2023-09-02T00:00:13 | 2023-09-02T00:00:13 | 103,071,520 | 8,931 | 1,535 | Apache-2.0 | 2023-09-14T19:57:16 | 2017-09-11T00:18:46 | Python | UTF-8 | Python | false | false | 12,685 | py | from typing import Optional
import pandas as pd
import pygeos as geos
from great_expectations.core.expectation_configuration import ExpectationConfiguration
from great_expectations.execution_engine import PandasExecutionEngine
from great_expectations.expectations.expectation import ColumnMapExpectation
from great_expectations.expectations.metrics import (
ColumnMapMetricProvider,
column_condition_partial,
)
# This class defines a Metric to support your Expectation.
# For most ColumnMapExpectations, the main business logic for calculation will live in this class.
class ColumnValuesGeometryWithinShape(ColumnMapMetricProvider):
# This is the id string that will be used to reference your metric.
condition_metric_name = "column_values.geometry.within_shape"
condition_value_keys = ("shape", "shape_format", "column_shape_format", "properly")
# This method implements the core logic for the PandasExecutionEngine
@column_condition_partial(engine=PandasExecutionEngine)
def _pandas(cls, column, **kwargs):
shape = kwargs.get("shape")
shape_format = kwargs.get("shape_format")
column_shape_format = kwargs.get("column_shape_format")
properly = kwargs.get("properly")
# Check that shape is given and given in the correct format
if shape is not None:
try:
if shape_format == "wkt":
shape_ref = geos.from_wkt(shape)
elif shape_format == "wkb":
shape_ref = geos.from_wkb(shape)
elif shape_format == "geojson":
shape_ref = geos.from_geojson(shape)
else:
raise NotImplementedError(
"Shape constructor method not implemented. Must be in WKT, WKB, or GeoJSON format."
)
except Exception:
raise Exception("A valid reference shape was not given.")
else:
raise Exception("A shape must be provided for this method.")
# Load the column into a pygeos Geometry vector from numpy array (Series not supported).
if column_shape_format == "wkt":
shape_test = geos.from_wkt(column.to_numpy(), on_invalid="ignore")
elif column_shape_format == "wkb":
shape_test = geos.from_wkb(column.to_numpy(), on_invalid="ignore")
else:
raise NotImplementedError("Column values shape format not implemented.")
# Allow for an array of reference shapes to be provided. Return a union of all the shapes in the array (Polygon or Multipolygon)
shape_ref = geos.union_all(shape_ref)
# Prepare the geometries
geos.prepare(shape_ref)
geos.prepare(shape_test)
if properly:
return pd.Series(geos.contains_properly(shape_ref, shape_test))
else:
return pd.Series(geos.contains(shape_ref, shape_test))
# This method defines the business logic for evaluating your metric when using a SqlAlchemyExecutionEngine
# @column_condition_partial(engine=SqlAlchemyExecutionEngine)
# def _sqlalchemy(cls, column, _dialect, **kwargs):
# raise NotImplementedError
# This method defines the business logic for evaluating your metric when using a SparkDFExecutionEngine
# @column_condition_partial(engine=SparkDFExecutionEngine)
# def _spark(cls, column, **kwargs):
# raise NotImplementedError
# This class defines the Expectation itself
class ExpectColumnValuesGeometryToBeWithinShape(ColumnMapExpectation):
"""Expect that column values as geometries are within a given reference shape.
expect_column_values_geometry_to_be_within_shape is a \
[Column Map Expectation](https://docs.greatexpectations.io/docs/guides/expectations/creating_custom_expectations/how_to_create_custom_column_map_expectations).
Args:
column (str): \
The column name. \
Column values must be provided in WKT or WKB format, which are commom formats for GIS Database formats. \
WKT can be accessed thhrough the ST_AsText() or ST_AsBinary() functions in queries for PostGIS and MSSQL.
Keyword Args:
shape (str or list of str): \
The reference geometry
shape_format (str): \
Geometry format for 'shape' string(s). Can be provided as 'Well Known Text' (WKT), 'Well Known Binary' (WKB), or as GeoJSON. \
Must be one of: [wkt, wkb, geojson]. Default: wkt
column_shape_format (str): \
Geometry format for 'column'. Column values must be provided in WKT or WKB format, which are commom formats for GIS Database formats. \
WKT can be accessed thhrough the ST_AsText() or ST_AsBinary() functions in queries for PostGIS and MSSQL.
properly (boolean): \
Whether the 'column' values should be properly within in the reference 'shape'. \
The method allows for shapes to be 'properly contained' within the reference, meaning no points of a given geometry can touch the boundary of the reference. \
See the pygeos docs for reference. Default: False
Returns:
An [ExpectationSuiteValidationResult](https://docs.greatexpectations.io/docs/terms/validation_result)
Notes:
* Convention is (X Y Z) for points, which would map to (Longitude Latitude Elevation) for geospatial cases.
* Any convention can be followed as long as the test and reference shapes are consistent.
* The reference shape allows for an array, but will union (merge) all the shapes into 1 and check the contains condition.
"""
# These examples will be shown in the public gallery.
# They will also be executed as unit tests for your Expectation.
examples = [
{
"data": {
"points_only": [
"POINT(1 1)",
"POINT(2 2)",
"POINT(6 4)",
"POINT(3 9)",
"POINT(8 9.999)",
],
"points_and_lines": [
"POINT(1 1)",
"POINT(2 2)",
"POINT(6 4)",
"POINT(3 9)",
"LINESTRING(5 5, 8 10)",
],
},
"tests": [
{
"title": "positive_test_with_points",
"exact_match_out": False,
"include_in_gallery": True,
"in": {
"column": "points_only",
"shape": "POLYGON ((0 0, 0 10, 10 10, 10 0, 0 0))",
"shape_format": "wkt",
"properly": False,
},
"out": {
"success": True,
},
},
{
"title": "positive_test_with_points_and_lines",
"exact_match_out": False,
"include_in_gallery": True,
"in": {
"column": "points_and_lines",
"shape": "POLYGON ((0 0, 0 10, 10 10, 10 0, 0 0))",
"shape_format": "wkt",
"properly": False,
},
"out": {
"success": True,
},
},
{
"title": "positive_test_with_points_wkb_reference_shape",
"exact_match_out": False,
"include_in_gallery": True,
"in": {
"column": "points_only",
"shape": "010300000001000000050000000000000000000000000000000000000000000000000000000000000000002440000000000000244000000000000024400000000000002440000000000000000000000000000000000000000000000000",
"shape_format": "wkb",
"properly": False,
},
"out": {
"success": True,
},
},
{
"title": "positive_test_with_points_geojson_reference_shape",
"exact_match_out": False,
"include_in_gallery": True,
"in": {
"column": "points_only",
"shape": '{"type":"Polygon","coordinates":[[[0.0,0.0],[0.0,10.0],[10.0,10.0],[10.0,0.0],[0.0,0.0]]]}',
"shape_format": "geojson",
"properly": False,
},
"out": {
"success": True,
},
},
{
"title": "negative_test_with_points",
"exact_match_out": False,
"include_in_gallery": True,
"in": {
"column": "points_only",
"shape": "POLYGON ((0 0, 0 7.5, 7.5 7.5, 7.5 0, 0 0))",
"shape_format": "wkt",
"properly": True,
},
"out": {
"success": False,
},
},
{
"title": "negative_test_with_points_and_lines_not_properly_contained",
"exact_match_out": False,
"include_in_gallery": True,
"in": {
"column": "points_and_lines",
"shape": "POLYGON ((0 0, 0 10, 10 10, 10 0, 0 0))",
"shape_format": "wkt",
"properly": True,
"mostly": 1,
},
"out": {
"success": False,
},
},
],
}
]
# This is the id string of the Metric used by this Expectation.
# For most Expectations, it will be the same as the `condition_metric_name` defined in your Metric class above.
map_metric = "column_values.geometry.within_shape"
# This is a list of parameter names that can affect whether the Expectation evaluates to True or False
success_keys = (
"mostly",
"shape",
"shape_format",
"column_shape_format",
"properly",
)
# This dictionary contains default values for any parameters that should have default values
default_kwarg_values = {
"mostly": 1,
"shape_format": "wkt",
"column_shape_format": "wkt",
"properly": False,
}
def validate_configuration(self, configuration: Optional[ExpectationConfiguration]):
"""
Validates that a configuration has been set, and sets a configuration if it has yet to be set. Ensures that
necessary configuration arguments have been provided for the validation of the expectation.
Args:
configuration (OPTIONAL[ExpectationConfiguration]): \
An optional Expectation Configuration entry that will be used to configure the expectation
Returns:
True if the configuration has been validated successfully. Otherwise, raises an exception
"""
super().validate_configuration(configuration)
configuration = configuration or self.configuration
# # Check other things in configuration.kwargs and raise Exceptions if needed
# try:
# assert (
# ...
# ), "message"
# assert (
# ...
# ), "message"
# except AssertionError as e:
# raise InvalidExpectationConfigurationError(str(e))
return True
# This object contains metadata for display in the public Gallery
library_metadata = {
"tags": [
"geospatial",
"hackathon-2022",
], # Tags for this Expectation in the Gallery
"contributors": [ # Github handles for all contributors to this Expectation.
"@pjdobson", # Don't forget to add your github handle here!
],
"requirements": ["pygeos"],
}
if __name__ == "__main__":
ExpectColumnValuesGeometryToBeWithinShape().print_diagnostic_checklist()
| [
"noreply@github.com"
] | great-expectations.noreply@github.com |
07009bf78c01bc832ea59222c306f561654e552d | ba2efb956f472e508cac8a41921d67083136e1c8 | /test.py | d54d89821dc811294a6f2b518dd498f232324426 | [] | no_license | AHBruns/Syrio | 1599c9f97b8022dd63c297e7ea6a4e88abd5cf86 | 881512ee5e2ee52ad70de5326f80c9e100736adb | refs/heads/master | 2020-05-09T15:55:12.831283 | 2019-04-14T03:45:44 | 2019-04-14T03:45:44 | 181,250,041 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 119 | py | import sqlite3
def t(s, c):
try:
c.execute(s)
except sqlite3.OperationalError as ex:
print(ex) | [
"alex.h.bruns@gmail.com"
] | alex.h.bruns@gmail.com |
e1918394a57db35a46a6856e38ebedd667af34e4 | d21112887ed1ec675b7b519cc991fc47bfa11735 | /SaleML_PreDjango/Predicting/urls.py | fc440ad929f7717a684452088ecfe3d8b3a0a1bb | [] | no_license | SydNS/DjangoML-model | 8c9ab65075b896ff129a872b087cdcd9dfc87e83 | c15474b136d592e182e707f6a73269685c3e62ad | refs/heads/master | 2023-03-02T13:27:33.809869 | 2021-02-06T09:57:34 | 2021-02-06T09:57:34 | 336,550,706 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 196 | py | from django.conf.urls import url
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('products', views.product_describe_view, name='product_add'),
]
| [
"root@localhost.localdomain"
] | root@localhost.localdomain |
24c369e604c97d4f7e16c18d93c67302fca9be3b | 47789957e8dd6ba944bd4c04341cf8add431d64f | /shoppingcart/home/migrations/0005_address.py | 0b765715896903f72bb9986d4f645cf9f59811fb | [] | no_license | binnib/EshopApp | 8c93c518d09b5dbe9e3e9f357e9a68fa2d2d4585 | 4846b7f48e4224f2a0aee8942e9c758a9f890081 | refs/heads/main | 2023-03-28T17:04:48.063283 | 2021-04-12T05:52:37 | 2021-04-12T05:52:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,121 | py | # Generated by Django 3.0.6 on 2021-03-31 07:22
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('home', '0004_delete_address'),
]
operations = [
migrations.CreateModel(
name='Address',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('address_details', models.CharField(max_length=250)),
('state', models.CharField(max_length=100)),
('city', models.CharField(max_length=100)),
('country', models.CharField(max_length=100)),
('pincode', models.CharField(max_length=10)),
('landmark', models.CharField(max_length=50)),
('resident_type', models.CharField(max_length=10)),
('is_active', models.BooleanField(default=False)),
('customer', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='home.Customer')),
],
),
]
| [
"maheshsharma171226@gmail.com"
] | maheshsharma171226@gmail.com |
1565de3abac24dee338faefa2cd4b094f35f2ecd | 7963f09b4002249e73496c6cbf271fd6921b3d22 | /emulator_6502/instructions/sbc.py | 31e549a6575d30d766fd1bf37990f233dd92938b | [] | no_license | thales-angelino/py6502emulator | 6df908fc02f29b41fad550c8b773723a7b63c414 | 1cea28489d51d77d2dec731ab98a6fe8a515a2a8 | refs/heads/master | 2023-03-19T14:46:17.393466 | 2021-03-08T04:10:45 | 2021-03-08T04:10:45 | 345,754,473 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,553 | py | SBC_IMMEDIATE_OPCODE = 0xe9
SBC_ZEROPAGE_OPCODE = 0xe5
SBC_ZEROPAGEX_OPCODE = 0xf5
SBC_ABSOLUTE_OPCODE = 0xed
SBC_ABSOLUTEX_OPCODE = 0xfd
SBC_ABSOLUTEY_OPCODE = 0xf9
SBC_INDIRECTX_OPCODE = 0xe1
SBC_INDIRECTY_OPCODE = 0xf1
class SBCImmediate(object):
def __init__(self):
super(SBCImmediate, self).__init__()
def run(self, cpu):
byte_r = cpu.immediate()
print("SBC memory byte read: %s" % hex(byte_r))
print("SBC register A read: %s" % hex(cpu.a))
print("SBC processor status Carry read: %s" % hex(cpu.processor_status['carry']))
cpu.sbc(byte_r)
class SBCZeroPage(object):
"""SBC Zero Page instruction"""
def __init__(self):
super(SBCZeroPage, self).__init__()
def run(self, cpu):
byte_r = cpu.zero_page()
print("SBC zero page byte read: %s" % hex(byte_r))
print("SBC register A read: %s" % hex(cpu.a))
print("SBC processor status Carry read: %s" % hex(cpu.processor_status['carry']))
cpu.sbc(byte_r)
class SBCZeroPageX(object):
"""SBC Zero Page X instruction"""
def __init__(self):
super(SBCZeroPageX, self).__init__()
def run(self, cpu):
byte_r = cpu.zero_page_x()
print("SBC zero page X byte read: %s" % hex(byte_r))
print("SBC register A read: %s" % hex(cpu.a))
print("SBC processor status carry read: %s" % hex(cpu.processor_status['carry']))
cpu.sbc(byte_r)
class SBCAbsolute(object):
"""SBC absolute instruction"""
def __init__(self):
super(SBCAbsolute, self).__init__()
def run(self, cpu):
byte_r = cpu.absolute()
print("SBC absolute byte read: %s" % hex(byte_r))
print("SBC register A read: %s" % hex(cpu.a))
print("SBC processor status carry read: %s" % hex(cpu.processor_status['carry']))
cpu.sbc(byte_r)
class SBCAbsoluteX(object):
"""SBC absolute X instruction"""
def __init__(self):
super(SBCAbsoluteX, self).__init__()
def run(self, cpu):
byte_r = cpu.absolute_x()
print("SBC absolute x byte read: %s" % hex(byte_r))
print("SBC register A read: %s" % hex(cpu.a))
print("SBC processor status carry read: %s" % hex(cpu.processor_status['carry']))
cpu.sbc(byte_r)
class SBCAbsoluteY(object):
"""SBC absolute Y instruction"""
def __init__(self):
super(SBCAbsoluteY, self).__init__()
def run(self, cpu):
byte_r = cpu.absolute_y()
print("SBC absolute Y byte read: %s" % hex(byte_r))
print("SBC register A read: %s" % hex(cpu.a))
print("SBC processor status carry read: %s" % hex(cpu.processor_status['carry']))
cpu.sbc(byte_r)
class SBCIndirectX(object):
"""SBC indirect X instruction"""
def __init__(self):
super(SBCIndirectX, self).__init__()
def run(self, cpu):
byte_r = cpu.indirect_x()
print("SBC indirect X byte read: %s" % hex(byte_r))
print("SBC register A read: %s" % hex(cpu.a))
print("SBC processor status carry read: %s" % hex(cpu.processor_status['carry']))
cpu.sbc(byte_r)
class SBCIndirectY(object):
"""SBC Indirect Y instruction"""
def __init__(self):
super(SBCIndirectY, self).__init__()
def run(self, cpu):
byte_r = cpu.indirect_y()
print("SBC indirect Y byte read: %s" % hex(byte_r))
print("SBC register A read: %s" % hex(cpu.a))
print("SBC processor status Carry read: %s" % hex(cpu.processor_status['carry']))
cpu.sbc(byte_r) | [
"thales.angelino@gmail.com"
] | thales.angelino@gmail.com |
5ee11a1a3c24fe548935f32270ff881fa53eb1a8 | 8460f7bdc6dd01c2a4f095a9af5b3766474afb4d | /util/Vis.py | 00ae504b8892e6a281301b4bdf9bb0e87591f7b0 | [] | no_license | ggqshr/ClassificationPic | 36b38890f821549cb22dffd3e99e9fa8ebe16719 | ddcd877184e368a823b08801d67c88645541ee3a | refs/heads/master | 2020-05-03T20:44:05.650053 | 2019-04-07T06:43:46 | 2019-04-07T06:43:46 | 178,809,450 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,464 | py | import visdom
import numpy as np
import time
import torchvision as tv
class Visualizer():
"""
封装了visdom的基本操作,但是你仍然可以通过`self.vis.function`
调用原生的visdom接口
"""
def __init__(self, env='default', **kwargs):
import visdom
self.vis = visdom.Visdom(env=env, use_incoming_socket=False, **kwargs)
# 画的第几个数,相当于横座标
# 保存(’loss',23) 即loss的第23个点
self.index = {}
self.log_text = ''
def reinit(self, env='default', **kwargs):
"""
修改visdom的配置
"""
self.vis = visdom.Visdom(env=env, use_incoming_socket=False, **kwargs)
return self
def plot_many(self, d):
"""
一次plot多个
@params d: dict (name,value) i.e. ('loss',0.11)
"""
for k, v in d.items():
self.plot(k, v)
def img_many(self, d):
for k, v in d.items():
self.img(k, v)
def plot(self, name, y):
"""
self.plot('loss',1.00)
"""
x = self.index.get(name, 0)
self.vis.line(Y=np.array([y]), X=np.array([x]),
win=name,
opts=dict(title=name),
update=None if x == 0 else 'append'
)
self.index[name] = x + 1
def img(self, name, img_):
"""
self.img('input_img',t.Tensor(64,64))
"""
if len(img_.size()) < 3:
img_ = img_.cpu().unsqueeze(0)
self.vis.image(img_.cpu(),
win=name,
opts=dict(title=name)
)
def img_grid_many(self, d):
for k, v in d.items():
self.img_grid(k, v)
def img_grid(self, name, input_3d):
"""
一个batch的图片转成一个网格图,i.e. input(36,64,64)
会变成 6*6 的网格图,每个格子大小64*64
"""
self.img(name, tv.utils.make_grid(
input_3d.cpu()[0].unsqueeze(1).clamp(max=1, min=0)))
def log(self, info, win='log_text'):
"""
self.log({'loss':1,'lr':0.0001})
"""
self.log_text += ('[{time}] {info} <br>'.format(
time=time.strftime('%m%d_%H%M%S'),
info=info))
self.vis.text(self.log_text, win=win)
def __getattr__(self, name):
return getattr(self.vis, name)
| [
"ggq18663278150@sian.com"
] | ggq18663278150@sian.com |
c9aa9c599cdb6264bd9acab064e97e527515437f | 0a4d3723384ef0a6a858f01d79292ce73a46cd8b | /frontend/urls.py | 286bf72ee53476aac3e84eb4719abbef526f6fec | [] | no_license | albus12138/NKTC-Website-Django | f32594e7de7397b1e70ccaa6e0dafbd2ff2caaf6 | 3c96ae272f226b33927bbdd0b05057127824a6ef | refs/heads/master | 2021-01-23T16:26:51.251044 | 2015-08-06T08:28:37 | 2015-08-06T08:28:37 | 39,332,441 | 0 | 1 | null | 2015-08-06T08:28:37 | 2015-07-19T12:07:38 | JavaScript | UTF-8 | Python | false | false | 653 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf.urls import patterns, url
from frontend.views import index, main_menu, secondary_menu, secondary_menu_all, search, content
urlpatterns = patterns('',
url(r'^$', index, name='index'),
url(r'^s/(?P<main>\w+)/$', main_menu, name="main_menu"),
url(r'^s/(?P<main>\w+)/(?P<secondary>\w+)/$', secondary_menu, name="secondary_menu"),
url(r'^a/(?P<main>\w+)/(?P<secondary>\w+)/$', secondary_menu_all, name="secondary_menu_all"),
url(r'^search/$', search, name="search"),
url(r'^s/(?P<main>\w+)/(?P<secondary>\w+)/(?P<id>\d+)/$', content, name="content")
) | [
"albus.zly@gmail.com"
] | albus.zly@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.