hexsha stringlengths 40 40 | size int64 5 2.06M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 3 248 | max_stars_repo_name stringlengths 5 125 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 248 | max_issues_repo_name stringlengths 5 125 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringdate 2015-01-01 00:00:47 2022-03-31 23:42:18 ⌀ | max_issues_repo_issues_event_max_datetime stringdate 2015-01-01 17:43:30 2022-03-31 23:59:58 ⌀ | max_forks_repo_path stringlengths 3 248 | max_forks_repo_name stringlengths 5 125 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 5 2.06M | avg_line_length float64 1 1.02M | max_line_length int64 3 1.03M | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6e1d05dba9a266286addc73ec4950cdeada8c0b4 | 1,581 | py | Python | config.py | juanjtov/Twitter_PNL_PUBLIC | 473eea0e7b030c8358aa86f6d3ff9d787c94abe6 | [
"MIT"
] | null | null | null | config.py | juanjtov/Twitter_PNL_PUBLIC | 473eea0e7b030c8358aa86f6d3ff9d787c94abe6 | [
"MIT"
] | null | null | null | config.py | juanjtov/Twitter_PNL_PUBLIC | 473eea0e7b030c8358aa86f6d3ff9d787c94abe6 | [
"MIT"
] | null | null | null | import os
class Credentials:
API_KEY = os.getenv('API_KEY')
API_SECRET_KEY = os.getenv('API_SECRET_KEY')
#tokens
ACCESS_TOKEN = os.getenv('ACCESS_TOKEN')
ACCESS_TOKEN_SECRET = os.getenv('ACCESS_SECRET_TOKEN')
class Settings:
TRACK_WORDS = 'Technology'
TABLE_NAME = "twttechnology"
TABLE_ATTRIBUTES = "id INTEGER PRIMARY KEY AUTO_INCREMENT, id_tweet VARCHAR(255), created_at DATETIME, text VARCHAR(255), \
polarity INT, subjectivity INT, user_created_at VARCHAR(255), \
user_location VARCHAR(255), user_description VARCHAR(255), \
user_followers_count INT, longitude DOUBLE, latitude DOUBLE, \
retweet_count INT, favorite_count INT"
TABLE_ATTRIBUTES_2 = "id INTEGER PRIMARY KEY AUTO_INCREMENT, \
word_id INTEGER, id_tweet VARCHAR(255), created_at DATETIME, text VARCHAR(255), \
polarity INT, subjectivity INT, user_created_at VARCHAR(255), \
user_location VARCHAR(255), user_description VARCHAR(255), \
user_followers_count INT, longitude DOUBLE, latitude DOUBLE, \
retweet_count INT, favorite_count INT"
class Dbsettings:
HOST = os.getenv('MYSQL_HOST')
USER = os.getenv('MYSQL_USER')
PASSWORD = os.getenv('MYSQL_PASSWORD')
DATABASE = os.getenv('MYSQL_DB')
PORT = os.getenv('MYSQL_PORT')
#DATA BASE FOR NLP TECH PROJECT
HOST2 = os.getenv('MYSQL_HOST_2')
USER2 = os.getenv('MYSQL_USER_2')
PASSWORD2 = os.getenv('MYSQL_PASSWORD_2')
DATABASE2 = os.getenv('MYSQL_DB_2')
PORT2 = os.getenv('MYSQL_PORT_2')
| 39.525 | 127 | 0.697027 |
6e1de2b972d3bacd17bc4fe230cc40342951d8ec | 130 | py | Python | code/helpers/__init__.py | briandesilva/discovery-of-physics-from-data | b79c34317f049c9b47aaf2cc4c54c5ec7219f3d7 | [
"MIT"
] | 11 | 2020-07-02T01:48:27.000Z | 2022-03-29T18:23:32.000Z | code/helpers/__init__.py | briandesilva/discovery-of-physics-from-data | b79c34317f049c9b47aaf2cc4c54c5ec7219f3d7 | [
"MIT"
] | null | null | null | code/helpers/__init__.py | briandesilva/discovery-of-physics-from-data | b79c34317f049c9b47aaf2cc4c54c5ec7219f3d7 | [
"MIT"
] | 3 | 2020-11-21T09:11:21.000Z | 2022-03-29T18:23:58.000Z | from .library import *
from .differentiation import *
from .sindy_ball import SINDyBall
from .tests import *
from .utils import *
| 21.666667 | 33 | 0.776923 |
6e1fd593ca8661737d9d161ba6774b763dcdbb57 | 341 | py | Python | users/models.py | diogor/desafio-backend | 4264a843503cc51f635bcfb31a009d53ebe671d8 | [
"MIT"
] | null | null | null | users/models.py | diogor/desafio-backend | 4264a843503cc51f635bcfb31a009d53ebe671d8 | [
"MIT"
] | null | null | null | users/models.py | diogor/desafio-backend | 4264a843503cc51f635bcfb31a009d53ebe671d8 | [
"MIT"
] | null | null | null | from django.db import models
from django.contrib.auth.models import AbstractUser
class User(AbstractUser):
ADMIN = 'admin'
PLAYER = 'player'
TYPE_CHOICES = (
(ADMIN, "Admin"),
(PLAYER, "Player")
)
type = models.CharField(choices=TYPE_CHOICES, max_length=6,
default=PLAYER)
| 21.3125 | 63 | 0.609971 |
6e1ff72ebc4c23799d24fd64dfc337c27cbb1d44 | 151 | py | Python | python/glob/glob1.py | jtraver/dev | c7cd2181594510a8fa27e7325566ed2d79371624 | [
"MIT"
] | null | null | null | python/glob/glob1.py | jtraver/dev | c7cd2181594510a8fa27e7325566ed2d79371624 | [
"MIT"
] | null | null | null | python/glob/glob1.py | jtraver/dev | c7cd2181594510a8fa27e7325566ed2d79371624 | [
"MIT"
] | null | null | null | #!/usr/bin/python
import glob
def main():
pyfiles = glob.glob("../*/*.py")
for pyfile in pyfiles:
print "pyfile %s" % pyfile
main()
| 13.727273 | 36 | 0.569536 |
6e201007363380e4d643bfc71a7961525d34bdc2 | 4,073 | py | Python | email_scrapper/readers/gmail_reader.py | datmellow/email-scrapper | 614e99a4b33f3a0d3d85d5eb9c359818991673a6 | [
"MIT"
] | 2 | 2018-01-07T23:12:28.000Z | 2018-01-10T00:58:17.000Z | email_scrapper/readers/gmail_reader.py | LucasCoderT/email-scrapper | 614e99a4b33f3a0d3d85d5eb9c359818991673a6 | [
"MIT"
] | null | null | null | email_scrapper/readers/gmail_reader.py | LucasCoderT/email-scrapper | 614e99a4b33f3a0d3d85d5eb9c359818991673a6 | [
"MIT"
] | 1 | 2019-12-09T17:01:08.000Z | 2019-12-09T17:01:08.000Z | import base64
import datetime
import email
import logging
import os
import typing
from email.message import Message
from googleapiclient import errors
from email_scrapper.models import Stores
from email_scrapper.readers.base_reader import BaseReader
logger = logging.getLogger(__name__)
class GmailReader(BaseReader):
SCOPES = ['https://www.googleapis.com/auth/gmail.readonly']
def __init__(self, service, user_id: str = "me", user_email: str = None, email_mapping: dict = None,
date_from: datetime.datetime = None):
"""
Parameters
----------
service:
The Gmail API service
email_mapping: dict
Mapping of class:Stores: to str representing the email to search from
"""
super(GmailReader, self).__init__(date_from=date_from, user_email=user_email, email_mapping=email_mapping)
self.service = service
self.user_id = user_id
@classmethod
def authenticate_with_browser(cls, credentials_json: dict = None, date_from: datetime.datetime = None):
"""
Login to gmail through the browser.
Requires a credentials.json file or a credentials_json dict passed
Returns
-------
GmailReader
"""
try:
from google_auth_oauthlib.flow import InstalledAppFlow
from googleapiclient.discovery import build
import pickle
creds = None
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
if not creds or not creds.valid:
if credentials_json:
flow = InstalledAppFlow.from_client_config(credentials_json, GmailReader.SCOPES)
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', GmailReader.SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
service = build('gmail', 'v1', credentials=creds)
response = service.users().getProfile(userId="me").execute()
return cls(service, user_id="me", user_email=response.get("emailAddress"), date_from=date_from)
except (ImportError, ModuleNotFoundError):
raise BaseException("Google Auth library not found")
def _get_search_date_range(self):
return self.search_date_range.strftime("%Y-%m-%d")
def _get_email_details(self, message) -> Message:
response = self.service.users().messages().get(userId=self.user_id, id=message['id'], format="raw").execute()
msg_str = base64.urlsafe_b64decode(response['raw'].encode('ASCII'))
mime_msg = email.message_from_bytes(msg_str)
return mime_msg
def _get_search_query(self, store: Stores, subject: str = None):
return f"from:{self._get_store_email(store)} after:{self._get_search_date_range()}"
def read_store_emails(self, store: Stores, subject: str = None) -> typing.Generator[str, None, None]:
query = self._get_search_query(store, subject)
try:
response = self.service.users().messages().list(userId=self.user_id,
q=query).execute()
if 'messages' in response:
for message in response['messages']:
yield self._get_email_details(message)
while 'nextPageToken' in response:
page_token = response['nextPageToken']
response = self.service.users().messages().list(userId=self.user_id, q=query,
pageToken=page_token).execute()
for message in response['messages']:
yield self._get_email_details(message)
except errors.HttpError as error:
print('An error occurred: %s' % error)
| 41.141414 | 117 | 0.615762 |
6e20db78562463b54a9444adfca80573369e9be8 | 2,026 | py | Python | python/rail-fence-cipher/rail_fence_cipher.py | rootulp/exercism | 312a053ad1d375752acf0fce062ee7b9c643a149 | [
"MIT"
] | 41 | 2015-02-09T18:08:45.000Z | 2022-03-06T15:23:32.000Z | python/rail-fence-cipher/rail_fence_cipher.py | DucChuyenSoftwareEngineer/exercism | fb7820a1ba162b888a39f1b86cbe5d3ca3b15d4f | [
"MIT"
] | 21 | 2019-12-28T17:47:06.000Z | 2021-02-27T19:43:00.000Z | python/rail-fence-cipher/rail_fence_cipher.py | DucChuyenSoftwareEngineer/exercism | fb7820a1ba162b888a39f1b86cbe5d3ca3b15d4f | [
"MIT"
] | 18 | 2016-04-29T14:35:12.000Z | 2021-06-23T07:32:29.000Z | class Rails:
def __init__(self, num_rails):
self.num_rails = num_rails
self.rails = [[] for _ in range(num_rails)]
def populate_rails_linear(self, message, rail_lengths):
message_list = list(message)
for rail in self.linear_iterator(rail_lengths):
rail.append(message_list.pop(0))
def populate_rails_zig_zag(self, message):
message_list = list(message)
for rail in self.zig_zag_iterator(message):
rail.append(message_list.pop(0))
def to_string_linear(self):
return ''.join([data for rail in self.rails for data in rail])
def to_string_zig_zag(self, message):
return ''.join([rail.pop(0) for rail in
self.zig_zag_iterator(message)])
def linear_iterator(self, rail_lengths):
for index in range(len(self.rails)):
for rail_length in range(rail_lengths[index]):
yield self.rails[index]
def zig_zag_iterator(self, message):
index = 0
increasing = True
for _ in message:
yield self.rails[index]
increasing = self.direction(index, increasing)
index = self.increment_index(index, increasing)
def increment_index(self, index, increasing):
if increasing:
return index + 1
else:
return index - 1
def direction(self, index, increasing):
if index == 0:
return True
elif index == self.num_rails - 1:
return False
else:
return increasing
def encode(message, num_rails):
rails = Rails(num_rails)
rails.populate_rails_zig_zag(message)
return rails.to_string_linear()
def decode(message, num_rails):
faulty_rails = Rails(num_rails)
faulty_rails.populate_rails_zig_zag(message)
rail_lengths = [len(rail) for rail in faulty_rails.rails]
rails = Rails(num_rails)
rails.populate_rails_linear(message, rail_lengths)
return rails.to_string_zig_zag(message)
| 30.69697 | 70 | 0.638203 |
6e218f16003cae78a4d29f7eb9e696aa4c77eb3e | 187 | py | Python | ClassCode/P2/HW - Copy.py | tsyet12/ClassCode | db1db97f71a6f31769d58739c6687863bc6b88c4 | [
"MIT"
] | null | null | null | ClassCode/P2/HW - Copy.py | tsyet12/ClassCode | db1db97f71a6f31769d58739c6687863bc6b88c4 | [
"MIT"
] | null | null | null | ClassCode/P2/HW - Copy.py | tsyet12/ClassCode | db1db97f71a6f31769d58739c6687863bc6b88c4 | [
"MIT"
] | null | null | null | a=[1,2,3]
b=[1,1,1]
#d={1:"ONE", 2:"TWO", 3:"THREE", 4:"FOUR", 5:"FIVE", 6:"SIX"}
f=[a[0]+b[0],a[1]+b[1],a[2]+b[2]]
if f[0]==1:
f[0]="ONE"
elif f[0]==2:
f[0]="TWO"
print(f)
| 11.6875 | 61 | 0.417112 |
6e2255b8f77a18ad6776515831039d97cfa15e3a | 748 | py | Python | Advanced_algorithm/oj_test/test04.py | mndream/MyOJ | ee92fb657475d998e6c201f222cb20bcbc2bfd64 | [
"Apache-2.0"
] | 1 | 2018-12-27T08:06:38.000Z | 2018-12-27T08:06:38.000Z | Advanced_algorithm/oj_test/test04.py | mndream/MyPythonOJ | ee92fb657475d998e6c201f222cb20bcbc2bfd64 | [
"Apache-2.0"
] | null | null | null | Advanced_algorithm/oj_test/test04.py | mndream/MyPythonOJ | ee92fb657475d998e6c201f222cb20bcbc2bfd64 | [
"Apache-2.0"
] | null | null | null | '''
A+B for Input-Output Practice (IV)
描述
Your task is to Calculate the sum of some integers.
输入
Input contains multiple test cases. Each test case contains a integer N,
and then N integers follow in the same line.
A test case starting with 0 terminates the input and this test case is not to be processed.
输出
For each group of input integers you should output their sum in one line,
and with one line of output for each line in input.
输入样例
4 1 2 3 4
5 1 2 3 4 5
0
输出样例
10
15
'''
while(True):
input_list = list(map(int, input().split()))
# split()默认为所有的空字符,包括空格、换行(\n)、制表符(\t)等。
# 使用split(" ") 报RE
n = input_list[0]
if n == 0:
break
sum = 0
for i in range(n):
sum = sum + input_list[i + 1]
print(sum) | 24.933333 | 91 | 0.669786 |
6e22c62fbf96771a37ae5b157b23776e81cda2c5 | 2,421 | py | Python | pre-processing/obtain_audio_spectrogram.py | GeWu-Lab/OGM-GE_CVPR2022 | 08b3f2498dd3e89f57fe9a12b5bf0c162eba1fbf | [
"MIT"
] | 4 | 2022-03-06T17:57:24.000Z | 2022-03-24T04:26:32.000Z | pre-processing/obtain_audio_spectrogram.py | GeWu-Lab/OGM-GE_CVPR2022 | 08b3f2498dd3e89f57fe9a12b5bf0c162eba1fbf | [
"MIT"
] | null | null | null | pre-processing/obtain_audio_spectrogram.py | GeWu-Lab/OGM-GE_CVPR2022 | 08b3f2498dd3e89f57fe9a12b5bf0c162eba1fbf | [
"MIT"
] | 1 | 2022-03-31T08:12:15.000Z | 2022-03-31T08:12:15.000Z | import multiprocessing
import os
import os.path
import pickle
import librosa
import numpy as np
from scipy import signal
def audio_extract(path, audio_name, audio_path, sr=16000):
save_path = path
samples, samplerate = librosa.load(audio_path)
resamples = np.tile(samples, 10)[:160000]
resamples[resamples > 1.] = 1.
resamples[resamples < -1.] = -1.
frequencies, times, spectrogram = signal.spectrogram(resamples, samplerate, nperseg=512, noverlap=353)
spectrogram = np.log(spectrogram + 1e-7)
mean = np.mean(spectrogram)
std = np.std(spectrogram)
spectrogram = np.divide(spectrogram - mean, std + 1e-9)
assert spectrogram.shape == (257, 1004)
save_name = os.path.join(save_path, audio_name + '.pkl')
print(save_name)
with open(save_name, 'wb') as fid:
pickle.dump(spectrogram, fid)
class Consumer(multiprocessing.Process):
def __init__(self, task_queue):
multiprocessing.Process.__init__(self)
self.task_queue = task_queue
def run(self):
proc_name = self.name
while True:
next_task = self.task_queue.get()
if next_task is None:
# Poison pill means shutdown
print('{}: Exiting'.format(proc_name))
self.task_queue.task_done()
break
# print(next_task)
audio_extract(next_task[0], next_task[1], next_task[2])
self.task_queue.task_done()
if __name__ == '__main__':
# Establish communication queues
tasks = multiprocessing.JoinableQueue()
# Start consumers
num_consumers = multiprocessing.cpu_count()
print('Creating {} consumers'.format(num_consumers))
consumers = [
Consumer(tasks)
for i in range(num_consumers)
]
for w in consumers:
w.start()
# path='data/'
save_dir = '/home/xiaokang_peng/data/AVE_av/audio_spec'
if not os.path.exists(save_dir):
os.mkdir(save_dir)
path_origin = '/home/xiaokang_peng/data/AVE_av/audio'
audios = os.listdir(path_origin)
for audio in audios:
audio_name = audio
audio_path = os.path.join(path_origin, audio)
tasks.put([save_dir, audio_name[:-4], audio_path])
# Add a poison pill for each consumer
for i in range(num_consumers):
tasks.put(None)
# Wait for all of the tasks to finish
tasks.join()
print("ok")
| 28.482353 | 106 | 0.646014 |
6e237945177ee47426cc1fcc873291dbba403f32 | 3,317 | py | Python | src/protean/core/event_handler.py | mpsiva89/protean | 315fa56da3f64178bbbf0edf1995af46d5eb3da7 | [
"BSD-3-Clause"
] | null | null | null | src/protean/core/event_handler.py | mpsiva89/protean | 315fa56da3f64178bbbf0edf1995af46d5eb3da7 | [
"BSD-3-Clause"
] | null | null | null | src/protean/core/event_handler.py | mpsiva89/protean | 315fa56da3f64178bbbf0edf1995af46d5eb3da7 | [
"BSD-3-Clause"
] | null | null | null | import inspect
import logging
from protean.container import Element, OptionsMixin
from protean.core.event import BaseEvent
from protean.exceptions import IncorrectUsageError
from protean.utils import DomainObjects, derive_element_class, fully_qualified_name
from protean.utils.mixins import HandlerMixin
logger = logging.getLogger(__name__)
class BaseEventHandler(Element, HandlerMixin, OptionsMixin):
"""Base Event Handler to be inherited by all event handlers"""
element_type = DomainObjects.EVENT_HANDLER
class Meta:
abstract = True
@classmethod
def _default_options(cls):
aggregate_cls = (
getattr(cls.meta_, "aggregate_cls")
if hasattr(cls.meta_, "aggregate_cls")
else None
)
return [
("aggregate_cls", None),
("stream_name", aggregate_cls.meta_.stream_name if aggregate_cls else None),
("source_stream", None),
]
def __new__(cls, *args, **kwargs):
if cls is BaseEventHandler:
raise TypeError("BaseEventHandler cannot be instantiated")
return super().__new__(cls)
def event_handler_factory(element_cls, **opts):
element_cls = derive_element_class(element_cls, BaseEventHandler, **opts)
if not (element_cls.meta_.aggregate_cls or element_cls.meta_.stream_name):
raise IncorrectUsageError(
{
"_entity": [
f"Event Handler `{element_cls.__name__}` needs to be associated with an aggregate or a stream"
]
}
)
# Iterate through methods marked as `@handle` and construct a handler map
#
# Also, if `_target_cls` is an event, associate it with the event handler's
# aggregate or stream
methods = inspect.getmembers(element_cls, predicate=inspect.isroutine)
for method_name, method in methods:
if not (
method_name.startswith("__") and method_name.endswith("__")
) and hasattr(method, "_target_cls"):
# `_handlers` is a dictionary mapping the event to the handler method.
if method._target_cls == "$any":
# This replaces any existing `$any` handler, by design. An Event Handler
# can have only one `$any` handler method.
element_cls._handlers["$any"] = {method}
else:
element_cls._handlers[fully_qualified_name(method._target_cls)].add(
method
)
# Associate Event with the handler's stream
if inspect.isclass(method._target_cls) and issubclass(
method._target_cls, BaseEvent
):
# Order of preference:
# 1. Stream name defined in event
# 2. Stream name defined for the event handler
# 3. Stream name derived from aggregate
stream_name = element_cls.meta_.stream_name or (
element_cls.meta_.aggregate_cls.meta_.stream_name
if element_cls.meta_.aggregate_cls
else None
)
method._target_cls.meta_.stream_name = (
method._target_cls.meta_.stream_name or stream_name
)
return element_cls
| 36.855556 | 114 | 0.619234 |
6e246664f07a32e8eef7dfd24b7f3cda19fa9734 | 7,508 | py | Python | read_prepare_data.py | jlu-ilr-hydro/IPCC-Repots-Focus-Overview | bf631975eb6c3ea2cf2f8fe9382e3361ad700a6e | [
"Apache-2.0"
] | null | null | null | read_prepare_data.py | jlu-ilr-hydro/IPCC-Repots-Focus-Overview | bf631975eb6c3ea2cf2f8fe9382e3361ad700a6e | [
"Apache-2.0"
] | null | null | null | read_prepare_data.py | jlu-ilr-hydro/IPCC-Repots-Focus-Overview | bf631975eb6c3ea2cf2f8fe9382e3361ad700a6e | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Fri Sep 17 10:12:26 2021
@author: Florian Jehn
"""
import os
import pandas as pd
import numpy as np
def read_ipcc_counts_temp():
"""reads all counts of temperatures for all reports and makes on df"""
files = os.listdir(os.getcwd()+os.sep+"Results"+ os.sep + "temperatures")
all_df = pd.DataFrame()
for file in files:
file_df = pd.read_csv("Results" + os.sep + "temperatures" + os.sep + file, sep=";", index_col=0)
file_df.columns = [file[:-4]]
all_df = pd.concat([all_df, file_df], axis=1)
return all_df.transpose()
def read_ipcc_counts_rfc():
"""reads all counts of reasons of concern for all reports and makes on df"""
files = os.listdir(os.getcwd()+os.sep+"Results"+ os.sep + "reasons_for_concern")
all_df = pd.DataFrame()
for file in files:
file_df = pd.read_csv("Results" + os.sep + "reasons_for_concern" + os.sep + file, sep=";", index_col=0)
file_df.columns = [file[:-4]]
all_df = pd.concat([all_df, file_df], axis=1)
return all_df.transpose()
def read_false_positive():
"""reads in all the counted false/true positive rates for the temperatres in the
IPCC and calculates a true positive rate for each entry"""
files = os.listdir(os.getcwd()+os.sep+"Results"+ os.sep + "false_positive_check_files")
all_df = pd.DataFrame()
for file in files:
# only read those files that contains the counting results
if "results" not in file:
continue
file_df = pd.read_csv("Results" + os.sep + "false_positive_check_files" + os.sep + file, sep=",", index_col=0)
# calculate the true positive rate
file_df["True Positive Rate [%]"] = (file_df["n true positive"]/(file_df["n true positive"]+file_df["n false positive"]))*100
# Arange the df for seaborn
file_df["Temperature [°C]"] = file_df.index
file_df.reset_index(inplace=True, drop=True)
all_df = pd.concat([all_df, file_df])
return all_df
def scale_counts(ipcc_counts):
"""scale the counts by overall sum"""
sums = ipcc_counts.sum(axis=1)
for col in ipcc_counts:
ipcc_counts[col] = ipcc_counts[col]/sums*100
return ipcc_counts
def read_meta():
"""reads in the meta data of the reports"""
meta = pd.read_csv("Reports" + os.sep + "meta_data_reports.tsv", sep="\t")
meta["Year"] = meta["Year"].astype("str")
return meta
def group_temps(ipcc_counts):
"""groups the temperatures into three categories"""
ipcc_counts["0.5°C - 2°C"] = ipcc_counts[" 0.5°C"] + ipcc_counts[" 1°C"] + ipcc_counts[" 1.5°C"] +ipcc_counts[" 2°C"]
ipcc_counts["2.5°C - 4°C"] = ipcc_counts[" 2.5°C"] + ipcc_counts[" 3°C"] + ipcc_counts[" 3.5°C"] +ipcc_counts[" 4°C"]
ipcc_counts["≥ 4.5°C"] = ipcc_counts[" 4.5°C"] + ipcc_counts[" 5°C"] + ipcc_counts[" 5.5°C"] +ipcc_counts[" 6°C"] +ipcc_counts[" 6.5°C"] + ipcc_counts[" 7°C"] + ipcc_counts[" 7.5°C"] +ipcc_counts[" 8°C"] + ipcc_counts[" 8.5°C"] + ipcc_counts[" 9°C"] + ipcc_counts[" 9.5°C"] +ipcc_counts[" 10°C"]
return ipcc_counts.iloc[:,20:]
def merge_counts_meta(ipcc_counts, meta):
"""merges the df with the counted temperatures/rfcs with the metadata"""
return pd.merge(meta, ipcc_counts, right_index=True, left_on="count_names")
def lookup_names():
""""Returns lookup dict for different files names to merge them"""
lookup_dict = {
"IPCC_AR6_WGI_Full_Report":"counts_IPCC_AR6_WGI_Full_Report_parsed",
"SROCC_FullReport_FINAL":"counts_SROCC_FullReport_FINAL_parsed",
"210714-IPCCJ7230-SRCCL-Complete-BOOK-HRES":"counts_210714-IPCCJ7230-SRCCL-Complete-BOOK-HRES_parsed",
"SR15_Full_Report_Low_Res":"counts_SR15_Full_Report_Low_Res_parsed",
"SYR_AR5_FINAL_full":"counts_SYR_AR5_FINAL_full_wcover_parsed",
"ipcc_wg3_ar5_full":"counts_ipcc_wg3_ar5_full_parsed",
"WGIIAR5-PartA_FINAL":"counts_WGIIAR5-PartA_FINAL_parsed",
"WGIIAR5-PartB_FINAL":"counts_WGIIAR5-PartB_FINAL_parsed",
"WG1AR5_all_final":"counts_WG1AR5_all_final_parsed",
"SREX_Full_Report-1":"counts_SREX_Full_Report-1_parsed",
"SRREN_Full_Report-1":"counts_SRREN_Full_Report-1_parsed",
"ar4_syr_full_report":"counts_ar4_syr_full_report_parsed",
"ar4_wg2_full_report":"counts_ar4_wg2_full_report_parsed",
"ar4_wg1_full_report-1":"counts_ar4_wg1_full_report-1_parsed",
"ar4_wg3_full_report-1":"counts_ar4_wg3_full_report-1_parsed",
"sroc_full-1":"counts_sroc_full-1_parsed",
"srccs_wholereport-1":"counts_srccs_wholereport-1_parsed",
"SYR_TAR_full_report":"counts_SYR_TAR_full_report_parsed",
"WGII_TAR_full_report-2":"counts_WGII_TAR_full_report-2_parsed",
"WGI_TAR_full_report":"counts_WGI_TAR_full_report_parsed",
"WGIII_TAR_full_report":"counts_WGIII_TAR_full_report_parsed",
"srl-en-1":"counts_srl-en-1_parsed",
"srtt-en-1":"counts_srtt-en-1_parsedd",
"emissions_scenarios-1":"counts_emissions_scenarios-1_parsed",
"av-en-1":"counts_av-en-1_parsed",
"The-Regional-Impact":"counts_The-Regional-Impact_parsed",
"2nd-assessment-en-1":"counts_2nd-assessment-en-1_parsed",
"ipcc_sar_wg_III_full_report":"counts_ipcc_sar_wg_III_full_report_parsed",
"ipcc_sar_wg_II_full_report":"counts_ipcc_sar_wg_II_full_report_parsed",
"ipcc_sar_wg_I_full_report":"counts_ipcc_sar_wg_I_full_report_parsed",
"climate_change_1994-2":"counts_climate_change_1994-2_parsed",
# "ipcc-technical-guidelines-1994n-1":"", # could not read in, but also contains no temp mentions
"ipcc_wg_I_1992_suppl_report_full_report":"counts_ipcc_wg_I_1992_suppl_report_full_report_parsed",
"ipcc_wg_II_1992_suppl_report_full_report":"counts_ipcc_wg_II_1992_suppl_report_full_report_parsed",
"ipcc_90_92_assessments_far_full_report":"counts_ipcc_90_92_assessments_far_full_report_parsed",
"ipcc_far_wg_III_full_report":"counts_ipcc_far_wg_III_full_report_parsed",
"ipcc_far_wg_II_full_report":"counts_ipcc_far_wg_II_full_report_parsed",
"ipcc_far_wg_I_full_report":"counts_ipcc_far_wg_I_full_report_parsed",
}
return lookup_dict
def create_temp_keys():
"""Creates a list of strings for all temperatures the paper looked at"""
temps = []
for i,temp in enumerate(np.arange(0.5,10.1,0.5)):
if i % 2 != 0:
temps.append(" "+str(int(temp))+"°C")
else:
temps.append(" "+str(temp)+"°C" )
return temps
def combine_all_raw_strings():
"""combines all raw strings into one big file to search through"""
reports = [file for file in os.listdir(os.getcwd() + os.sep + "Raw IPCC Strings") if file[-4:] == ".csv" ]
all_reports = " "
for report in reports:
print("Starting with " + report)
report_df = pd.read_csv(os.getcwd() + os.sep + "Raw IPCC Strings" + os.sep + report, sep="\t", usecols=[0])
report_list = report_df[report_df.columns[0]].tolist()
report_str = " ".join([str(item) for item in report_list])
all_reports += report_str
with open(os.getcwd() + os.sep + "Raw IPCC Strings" + os.sep + "all_ipcc_strings.csv", 'w', encoding='utf-8') as f:
# this file is not included in the repository, as it is too large for Github
f.write(all_reports)
if __name__ == "__main__":
combine_all_raw_strings()
| 48.128205 | 300 | 0.683404 |
6e25342e23a32ed5b961b03bb3584a54058a2d5c | 156 | py | Python | tests/test_get_filesize.py | zevaverbach/zev | 7330718f4eee28695fe57fb1107e506e6b0c9e4e | [
"MIT"
] | null | null | null | tests/test_get_filesize.py | zevaverbach/zev | 7330718f4eee28695fe57fb1107e506e6b0c9e4e | [
"MIT"
] | 1 | 2019-07-20T09:26:46.000Z | 2019-07-20T09:26:46.000Z | tests/test_get_filesize.py | zevaverbach/zev | 7330718f4eee28695fe57fb1107e506e6b0c9e4e | [
"MIT"
] | null | null | null | from pytest import fixture
from zev.get_filesize import get_filesize
def test_get_filesize(empty_filepath):
assert get_filesize(empty_filepath) == 0
| 19.5 | 44 | 0.814103 |
6e253d478e601785b1142f2b0dc902543e75cdbc | 179 | py | Python | part1/03.py | jbaltop/57_Challenges | fa66ac584fc02761803fbd5692b737a73bd57983 | [
"MIT"
] | 31 | 2017-10-08T15:57:07.000Z | 2021-06-16T11:55:05.000Z | part1/03.py | jbaltop/57_Challenges | fa66ac584fc02761803fbd5692b737a73bd57983 | [
"MIT"
] | 1 | 2021-04-30T20:39:01.000Z | 2021-04-30T20:39:01.000Z | part1/03.py | jbaltop/57_Challenges | fa66ac584fc02761803fbd5692b737a73bd57983 | [
"MIT"
] | 7 | 2017-10-16T17:13:36.000Z | 2019-07-03T16:24:01.000Z | def main():
quote = input("What is the quote?\n")
person = input("Who said it?\n")
speech = "\n" + person + " says, " + '"' + quote + '"'
print(speech)
main()
| 16.272727 | 58 | 0.502793 |
6e265824cd5b4d3d09aa3a85134608484df9ae21 | 1,151 | py | Python | Integertask.py | Ainara12/Programing-Scripting-problems | 1017c1a8a3aeabc040886f9bdab35b252e7e08ea | [
"MIT"
] | null | null | null | Integertask.py | Ainara12/Programing-Scripting-problems | 1017c1a8a3aeabc040886f9bdab35b252e7e08ea | [
"MIT"
] | null | null | null | Integertask.py | Ainara12/Programing-Scripting-problems | 1017c1a8a3aeabc040886f9bdab35b252e7e08ea | [
"MIT"
] | null | null | null | #This program calculates the successive values of the following
# calculation: Next value by taking the positive integer added by user
# and if it is even divide it by 2, if it is odd, multiply by
#3 and add 1.Program ends if current value is 1.
#First: I created variable "pnumber" which will be the positive integer entered by the user.
pnumber=int(input("Enter a positive integer here:"))
#Created formula to find out if number entered by user is positive integer ( greater than 0)
while pnumber > 0:
if pnumber ==1:# then if number greater than 0 and equals 1, program stops with break statement.
print(pnumber)
break
if pnumber % 2 == 0:# if number entered by user is even we divide numbers by 2.
print(pnumber)
pnumber = pnumber / 2
elif pnumber % 2 != 0: #if number entered by user is odd we multiply the values by 3 and add 1.
print(pnumber)
pnumber = pnumber*3+1
#If user enters a not positive integer , the program confirmes this and stops.
while pnumber < 0:
print pnumber, "is not a positive integer."
break
print ("Thank you so much for using my program") | 34.878788 | 100 | 0.701998 |
6e2666a6e406e4ebd7fe6e6904bdb4696b8d2f47 | 404 | py | Python | has33.py | CombatPompano81/Python-Snippets-Galore | c2fb9c6ebef0477895749db9f2aa0f87132a72d6 | [
"Apache-2.0"
] | null | null | null | has33.py | CombatPompano81/Python-Snippets-Galore | c2fb9c6ebef0477895749db9f2aa0f87132a72d6 | [
"Apache-2.0"
] | null | null | null | has33.py | CombatPompano81/Python-Snippets-Galore | c2fb9c6ebef0477895749db9f2aa0f87132a72d6 | [
"Apache-2.0"
] | null | null | null | # main function
def has33(nums):
# iterates through the list and tries to find two 3s next to each other
for i in range(0, len(nums) - 1):
# if indice i has a 3 and the indice next to it has a 3, print true
if nums[i] == 3 and nums[i + 1] == 3:
return print('True')
return print('False')
has33([1, 3, 3])
has33([3, 1, 3])
has33([3, 3, 3])
has33([1, 3, 1, 3])
| 22.444444 | 75 | 0.569307 |
6e26eeb7a1d51ccae528791cb9b9b4c924ad57bd | 914 | py | Python | proj/urls.py | vitali-rebkavets-itechart/students-lab | 574ad0249ee40b799a2e8faaced3661915bee756 | [
"MIT"
] | null | null | null | proj/urls.py | vitali-rebkavets-itechart/students-lab | 574ad0249ee40b799a2e8faaced3661915bee756 | [
"MIT"
] | 26 | 2019-05-21T13:24:59.000Z | 2019-06-13T10:24:29.000Z | proj/urls.py | vitali-r/students-lab | 574ad0249ee40b799a2e8faaced3661915bee756 | [
"MIT"
] | 2 | 2019-05-21T12:55:23.000Z | 2019-05-21T14:31:14.000Z | from django.contrib import admin
from django.urls import path, include
from django.conf.urls.static import static
from django.conf import settings
from products.views import (products, index,
products_detail)
from rest_framework_jwt.views import refresh_jwt_token
from users.views import ObtainCustomJSONWebToken
apipatterns = [
path('', include('products.urls')),
]
urlpatterns = [
path('admin/', admin.site.urls),
path('api/', include((apipatterns, 'api'), namespace='api')),
path('', index, name='index'),
path('products/', products, name='products'),
path('products/<int:product_id>/', products_detail, name='products_detail'),
path('', include('users.urls'), name='users'),
path('sign-in/', ObtainCustomJSONWebToken.as_view()),
path('api/sign-in/refresh', refresh_jwt_token)
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| 35.153846 | 80 | 0.705689 |
6e2726ca9cbe233a3e8bac00017eecef8153cd91 | 17,692 | py | Python | survos2/frontend/plugins/objects.py | DiamondLightSource/SuRVoS2 | 42bacfb6a5cc267f38ca1337e51a443eae1a9d2b | [
"MIT"
] | 4 | 2017-10-10T14:47:16.000Z | 2022-01-14T05:57:50.000Z | survos2/frontend/plugins/objects.py | DiamondLightSource/SuRVoS2 | 42bacfb6a5cc267f38ca1337e51a443eae1a9d2b | [
"MIT"
] | 1 | 2022-01-11T21:11:12.000Z | 2022-01-12T08:22:34.000Z | survos2/frontend/plugins/objects.py | DiamondLightSource/SuRVoS2 | 42bacfb6a5cc267f38ca1337e51a443eae1a9d2b | [
"MIT"
] | 2 | 2018-03-06T06:31:29.000Z | 2019-03-04T03:33:18.000Z | from survos2.config import Config
import numpy as np
from numpy.lib.function_base import flip
from qtpy import QtWidgets
from qtpy.QtWidgets import QPushButton, QRadioButton
from survos2.frontend.components.base import *
from survos2.frontend.components.entity import (
SmallVolWidget,
TableWidget,
setup_entity_table,
setup_bb_table,
)
from survos2.frontend.components.icon_buttons import IconButton
from survos2.frontend.control import Launcher
from survos2.frontend.plugins.base import *
from survos2.frontend.plugins.plugins_components import MultiSourceComboBox
from survos2.frontend.utils import FileWidget
from survos2.improc.utils import DatasetManager
from survos2.model import DataModel
from survos2.server.state import cfg
from survos2.frontend.plugins.features import FeatureComboBox
from survos2.frontend.plugins.annotations import LevelComboBox
from survos2.entity.patches import PatchWorkflow, organize_entities, make_patches
class ObjectComboBox(LazyComboBox):
def __init__(self, full=False, header=(None, "None"), parent=None):
self.full = full
super().__init__(header=header, parent=parent)
def fill(self):
params = dict(workspace=True, full=self.full)
result = Launcher.g.run("objects", "existing", **params)
logger.debug(f"Result of objects existing: {result}")
if result:
# self.addCategory("Points")
for fid in result:
if result[fid]["kind"] == "points":
self.addItem(fid, result[fid]["name"])
elif result[fid]["kind"] == "boxes":
self.addItem(fid, result[fid]["name"])
@register_plugin
class ObjectsPlugin(Plugin):
__icon__ = "fa.picture-o"
__pname__ = "objects"
__views__ = ["slice_viewer"]
__tab__ = "objects"
def __init__(self, parent=None):
super().__init__(parent=parent)
self.vbox = VBox(self, spacing=10)
self.objects_combo = ComboBox()
self.vbox.addWidget(self.objects_combo)
self.existing_objects = {}
self.objects_layout = VBox(margin=0, spacing=5)
self.objects_combo.currentIndexChanged.connect(self.add_objects)
self.vbox.addLayout(self.objects_layout)
self._populate_objects()
def _populate_objects(self):
self.objects_params = {}
self.objects_combo.clear()
self.objects_combo.addItem("Add objects")
params = dict(
workspace=DataModel.g.current_session + "@" + DataModel.g.current_workspace
)
result = Launcher.g.run("objects", "available", **params)
print(result)
logger.debug(f"objects available: {result}")
if result:
all_categories = sorted(set(p["category"] for p in result))
for i, category in enumerate(all_categories):
self.objects_combo.addItem(category)
self.objects_combo.model().item(
i + len(self.objects_params) + 1
).setEnabled(False)
for f in [p for p in result if p["category"] == category]:
self.objects_params[f["name"]] = f["params"]
self.objects_combo.addItem(f["name"])
def add_objects(self, idx):
logger.debug(f"Add objects with idx {idx}")
if idx == 0 or idx == -1:
return
# self.objects_combo.setCurrentIndex(0)
print(idx)
order = idx - 2
if order == 1:
params = dict(
order=order,
workspace=DataModel.g.current_session
+ "@"
+ DataModel.g.current_workspace,
fullname="survos2/entity/blank_boxes.csv",
)
else:
params = dict(
order=order,
workspace=DataModel.g.current_session
+ "@"
+ DataModel.g.current_workspace,
fullname="survos2/entity/blank_entities.csv",
)
result = Launcher.g.run("objects", "create", **params)
if result:
objectsid = result["id"]
objectsname = result["name"]
objectsfullname = result["fullname"]
objectstype = result["kind"]
self._add_objects_widget(
objectsid, objectsname, objectsfullname, objectstype, True
)
def _add_objects_widget(
self, objectsid, objectsname, objectsfullname, objectstype, expand=False
):
logger.debug(
f"Add objects {objectsid} {objectsname} {objectsfullname} {objectstype}"
)
widget = ObjectsCard(objectsid, objectsname, objectsfullname, objectstype)
widget.showContent(expand)
self.objects_layout.addWidget(widget)
src = DataModel.g.dataset_uri(objectsid, group="objects")
with DatasetManager(src, out=None, dtype="uint32", fillvalue=0) as DM:
src_dataset = DM.sources[0]
src_dataset.set_metadata("fullname", objectsfullname)
self.existing_objects[objectsid] = widget
return widget
def clear(self):
for objects in list(self.existing_objects.keys()):
self.existing_objects.pop(objects).setParent(None)
self.existing_objects = {}
def setup(self):
self._populate_objects()
params = dict(
workspace=DataModel.g.current_session + "@" + DataModel.g.current_workspace
)
result = Launcher.g.run("objects", "existing", **params)
logger.debug(f"objects result {result}")
if result:
# Remove objects that no longer exist in the server
print(self.existing_objects.keys())
for objects in list(self.existing_objects.keys()):
if objects not in result:
self.existing_objects.pop(objects).setParent(None)
# Populate with new entity if any
for entity in sorted(result):
if entity in self.existing_objects:
continue
enitity_params = result[entity]
objectsid = enitity_params.pop("id", entity)
objectsname = enitity_params.pop("name", entity)
objectsfullname = enitity_params.pop("fullname", entity)
objectstype = enitity_params.pop("kind", entity)
print(f"type: {objectstype}")
if objectstype != "unknown":
widget = self._add_objects_widget(
objectsid, objectsname, objectsfullname, objectstype
)
widget.update_params(params)
self.existing_objects[objectsid] = widget
else:
logger.debug(
"+ Skipping loading entity: {}, {}, {}".format(
objectsid, objectsname, objectstype
)
)
class ObjectsCard(Card):
def __init__(
self, objectsid, objectsname, objectsfullname, objectstype, parent=None
):
super().__init__(
title=objectsname,
collapsible=True,
removable=True,
editable=True,
parent=parent,
)
self.objectsid = objectsid
self.objectsname = objectsname
self.object_scale = 1.0
self.objectsfullname = objectsfullname
self.objectstype = objectstype
self.widgets = {}
self.filewidget = FileWidget(extensions="*.csv", save=False)
self.filewidget.path.setText(self.objectsfullname)
self.add_row(self.filewidget)
self.filewidget.path_updated.connect(self.load_data)
self.compute_btn = PushButton("Compute")
self.view_btn = PushButton("View", accent=True)
self.get_btn = PushButton("Get", accent=True)
self._add_param("scale", title="Scale: ", type="Float", default=1)
self._add_param("offset", title="Offset: ", type="FloatOrVector", default=0)
self._add_param(
"crop_start", title="Crop Start: ", type="FloatOrVector", default=0
)
self._add_param(
"crop_end", title="Crop End: ", type="FloatOrVector", default=9000
)
self.flipxy_checkbox = CheckBox(checked=True)
self.add_row(HWidgets(None, self.flipxy_checkbox, Spacing(35)))
self.add_row(HWidgets(None, self.view_btn, self.get_btn, Spacing(35)))
self.view_btn.clicked.connect(self.view_objects)
self.get_btn.clicked.connect(self.get_objects)
cfg.object_scale = self.widgets["scale"].value()
cfg.object_offset = self.widgets["offset"].value()
cfg.object_crop_start = self.widgets["crop_start"].value()
cfg.object_crop_end = self.widgets["crop_end"].value()
cfg.object_scale = 1.0
cfg.object_offset = (0,0,0)
cfg.object_crop_start = (0,0,0)
cfg.object_crop_end = (1e9,1e9,1e9)
if self.objectstype == "patches":
self._add_annotations_source()
self.entity_mask_bvol_size = LineEdit3D(default=64, parse=int)
self._add_feature_source()
self.make_entity_mask_btn = PushButton("Make entity mask", accent=True)
self.make_entity_mask_btn.clicked.connect(self.make_entity_mask)
self.make_patches_btn = PushButton("Make patches", accent=True)
self.make_patches_btn.clicked.connect(self.make_patches)
self.add_row(HWidgets(None, self.entity_mask_bvol_size, self.make_entity_mask_btn, Spacing(35)))
self.add_row(HWidgets(None, self.make_patches_btn, Spacing(35)))
self.table_control = TableWidget()
self.add_row(self.table_control.w, max_height=500)
cfg.entity_table = self.table_control
def _add_param(self, name, title=None, type="String", default=None):
if type == "Int":
p = LineEdit(default=default, parse=int)
elif type == "Float":
p = LineEdit(default=default, parse=float)
elif type == "FloatOrVector":
p = LineEdit3D(default=default, parse=float)
elif type == "IntOrVector":
p = LineEdit3D(default=default, parse=int)
else:
p = None
if title is None:
title = name
if p:
self.widgets[name] = p
self.add_row(HWidgets(None, title, p, Spacing(35)))
def load_data(self, path):
self.objectsfullname = path
print(f"Setting objectsfullname: {self.objectsfullname}")
def card_deleted(self):
params = dict(objects_id=self.objectsid, workspace=True)
result = Launcher.g.run("objects", "remove", **params)
if result["done"]:
self.setParent(None)
self.table_control = None
def _add_annotations_source(self):
self.annotations_source = LevelComboBox(full=True)
self.annotations_source.fill()
self.annotations_source.setMaximumWidth(250)
widget = HWidgets(
"Annotation:", self.annotations_source, Spacing(35), stretch=1
)
self.add_row(widget)
def card_title_edited(self, newtitle):
logger.debug(f"Edited entity title {newtitle}")
params = dict(objects_id=self.objectsid, new_name=newtitle, workspace=True)
result = Launcher.g.run("objects", "rename", **params)
return result["done"]
def view_objects(self):
logger.debug(f"Transferring objects {self.objectsid} to viewer")
cfg.ppw.clientEvent.emit(
{
"source": "objects",
"data": "view_objects",
"objects_id": self.objectsid,
"flipxy": self.flipxy_checkbox.value(),
}
)
def update_params(self, params):
if "fullname" in params:
self.objectsfullname = params["fullname"]
def _add_feature_source(self):
self.feature_source = FeatureComboBox()
self.feature_source.fill()
self.feature_source.setMaximumWidth(250)
widget = HWidgets("Feature:", self.feature_source, Spacing(35), stretch=1)
self.add_row(widget)
def get_objects(self):
cfg.object_scale = self.widgets["scale"].value()
cfg.object_offset = self.widgets["offset"].value()
cfg.object_crop_start = self.widgets["crop_start"].value()
cfg.object_crop_end = self.widgets["crop_end"].value()
dst = DataModel.g.dataset_uri(self.objectsid, group="objects")
print(f"objectsfullname: {self.objectsfullname}")
params = dict(
dst=dst,
fullname=self.objectsfullname,
scale=cfg.object_scale,
offset=cfg.object_offset,
crop_start=cfg.object_crop_start,
crop_end=cfg.object_crop_end,
)
logger.debug(f"Getting objects with params {params}")
result = Launcher.g.run("objects", "update_metadata", workspace=True, **params)
if self.objectstype == "points":
tabledata, self.entities_df = setup_entity_table(
self.objectsfullname,
scale=cfg.object_scale,
offset=cfg.object_offset,
crop_start=cfg.object_crop_start,
crop_end=cfg.object_crop_end,
flipxy=self.flipxy_checkbox.value()
)
elif self.objectstype == "boxes":
tabledata, self.entities_df = setup_bb_table(
self.objectsfullname,
scale=cfg.object_scale,
offset=cfg.object_offset,
crop_start=cfg.object_crop_start,
crop_end=cfg.object_crop_end,
flipxy=self.flipxy_checkbox.value()
)
elif self.objectstype == "patches":
tabledata, self.entities_df = setup_entity_table(
self.objectsfullname,
scale=cfg.object_scale,
offset=cfg.object_offset,
crop_start=cfg.object_crop_start,
crop_end=cfg.object_crop_end,
flipxy=self.flipxy_checkbox.value()
)
cfg.tabledata = tabledata
self.table_control.set_data(tabledata)
print(f"Loaded tabledata {tabledata}")
self.table_control.set_data(tabledata)
self.collapse()
self.expand()
def make_entity_mask(self):
src = DataModel.g.dataset_uri(self.feature_source.value(), group="features")
with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
src_array = DM.sources[0][:]
entity_arr = np.array(self.entities_df)
bvol_dim = self.entity_mask_bvol_size.value()
entity_arr[:, 0] -= bvol_dim[0]
entity_arr[:, 1] -= bvol_dim[1]
entity_arr[:, 2] -= bvol_dim[2]
from survos2.entity.entities import make_entity_mask
gold_mask = make_entity_mask(
src_array, entity_arr, flipxy=True, bvol_dim=bvol_dim
)[0]
# create new raw feature
params = dict(feature_type="raw", workspace=True)
result = Launcher.g.run("features", "create", **params)
if result:
fid = result["id"]
ftype = result["kind"]
fname = result["name"]
logger.debug(f"Created new object in workspace {fid}, {ftype}, {fname}")
dst = DataModel.g.dataset_uri(fid, group="features")
with DatasetManager(dst, out=dst, dtype="float32", fillvalue=0) as DM:
DM.out[:] = gold_mask
cfg.ppw.clientEvent.emit(
{"source": "objects_plugin", "data": "refresh", "value": None}
)
def make_patches(self):
src = DataModel.g.dataset_uri(self.feature_source.value(), group="features")
with DatasetManager(src, out=None, dtype="float32", fillvalue=0) as DM:
src_array = DM.sources[0][:]
objects_scale = 1.0
entity_meta = {
"0": {
"name": "class1",
"size": np.array((15, 15, 15)) * objects_scale,
"core_radius": np.array((7, 7, 7)) * objects_scale,
},
}
entity_arr = np.array(self.entities_df)
combined_clustered_pts, classwise_entities = organize_entities(
src_array, entity_arr, entity_meta, plot_all=False
)
wparams = {}
wparams["entities_offset"] = (0, 0, 0)
wparams["entity_meta"] = entity_meta
wparams["workflow_name"] = "Make_Patches"
wparams["proj"] = DataModel.g.current_workspace
wf = PatchWorkflow(
[src_array], combined_clustered_pts, classwise_entities, src_array, wparams, combined_clustered_pts
)
src = DataModel.g.dataset_uri(self.annotations_source.value().rsplit("/", 1)[-1], group="annotations")
with DatasetManager(src, out=None, dtype="uint16", fillvalue=0) as DM:
src_dataset = DM.sources[0]
anno_level = src_dataset[:] & 15
logger.debug(f"Obtained annotation level with labels {np.unique(anno_level)}")
logger.debug(f"Making patches in path {src_dataset._path}")
train_v_density = make_patches(wf, entity_arr, src_dataset._path,
proposal_vol=(anno_level > 0)* 1.0,
padding=self.entity_mask_bvol_size.value(), num_augs=0, max_vols=-1)
self.patches = train_v_density
cfg.ppw.clientEvent.emit(
{"source": "panel_gui", "data": "view_patches", "patches_fullname": train_v_density}
)
| 37.562633 | 111 | 0.603154 |
6e27e9a98e0663d5f4593b8e13414810400eac10 | 1,248 | py | Python | src/calc_orientation.py | ouyang-lab/CAPC | e0fcc698da833b9195315d6769bd076646323289 | [
"Apache-2.0"
] | 5 | 2020-08-24T16:18:45.000Z | 2021-07-07T16:54:32.000Z | src/calc_orientation.py | ouyang-lab/CAPC | e0fcc698da833b9195315d6769bd076646323289 | [
"Apache-2.0"
] | null | null | null | src/calc_orientation.py | ouyang-lab/CAPC | e0fcc698da833b9195315d6769bd076646323289 | [
"Apache-2.0"
] | 1 | 2020-12-09T04:15:59.000Z | 2020-12-09T04:15:59.000Z | import sys
import gzip
import numpy as np
if __name__ == "__main__":
f_names = sys.argv[1:]
max_value = 100000
bin_size = 50
threshold = 0.01
data = []
total_bins = (max_value/bin_size)+1
for no, f_name in enumerate(f_names):
#prefix = f_name.split("/")[-1].replace(".txt.gz", "")
d = np.zeros(total_bins)
with gzip.open(f_name, "rb") as f:
for line in f:
row = line.strip("\r\n").split("\t")
size, count = (int(row[0]), int(row[1]))
if size < max_value:
s = size/bin_size
d[s] += count
else:
d[max_value/bin_size] += count
d = d[::-1].cumsum()
data.append(d)
data = np.array(data)
current_size = max_value
for no, d in enumerate(data.T):
p = d/d.sum()
if np.all(abs(p-0.25)<=threshold):
current_size = (total_bins-no)*bin_size
else:
break
print "Orientation Size (+/-%s): %s" % (threshold, current_size)
for no, d in enumerate(data.T):
p = d/d.sum()
print "\t".join(map(str, [(total_bins-no)*bin_size]+p.tolist()))
| 23.54717 | 72 | 0.491186 |
6e28319339ecb10a654afec47c04531f1e4fc2e5 | 5,459 | py | Python | tests/benchmark/preprocess_img/preproc.py | mpascucci/AST-image-processing | 54111e874237f0c146760d514eea96131177878a | [
"ECL-2.0",
"Apache-2.0"
] | 6 | 2020-11-24T15:55:35.000Z | 2021-12-31T11:52:56.000Z | tests/benchmark/preprocess_img/preproc.py | mpascucci/AST-image-processing | 54111e874237f0c146760d514eea96131177878a | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2020-11-24T15:46:15.000Z | 2020-11-24T15:46:15.000Z | tests/benchmark/preprocess_img/preproc.py | mpascucci/AST-image-processing | 54111e874237f0c146760d514eea96131177878a | [
"ECL-2.0",
"Apache-2.0"
] | 3 | 2021-02-04T10:08:43.000Z | 2022-02-21T02:00:47.000Z | from tqdm import tqdm
import os
import glob
import pickle
import numpy as np
from imageio import imread, imwrite
import astimp
from multiprocessing import Pool, cpu_count
from functools import partial
class ErrorInPreproc(Exception):
pass
class Dataset():
"""Datasets consisting of several files in a given input_folder."""
def __init__(self, base_path, glob_patterns=('*.jpg', '*.JPG', '*.png', "*.PNG")):
"""base_path : path to the folder where the files are stored
glob_patterns : a list of patterns for selecting files (e.g. ['*.jpg'])"""
assert os.path.exists(
base_path), "input folder '{}' not found".format(base_path)
self.base_path = base_path
self.paths = []
for pattern in glob_patterns:
self.paths += glob.glob(os.path.join(base_path, pattern))
self.names = [os.path.basename(path).split('.')[0]
for path in self.paths]
class PreprocResults():
"""Access to preprocessed pickled AST images"""
def __init__(self, pickles_folder):
if not os.path.exists(pickles_folder):
raise FileNotFoundError("{} does not exit".format(pickles_folder))
self.pf = pickles_folder
self.ds = Dataset(self.pf, glob_patterns=("*.pickle",))
self.names = self.ds.names
errorlog_path = os.path.join(pickles_folder, "error_log.txt")
if os.path.exists(errorlog_path):
with open(errorlog_path, 'r') as f:
lines = f.readlines()
self.errors = {line.split(',')[0]: line.split(',')[
1] for line in lines}
else:
self.errors = []
def get_by_name(self, name):
"""Load a pickle by name.
Pickles have the same name than images
example:
234_SLR_ESBL.jpg <-> 234_SLR_ESBL.jpg.pickle"""
if name in self.errors and self.errors[name].split(" ") != 'INFO':
raise ErrorInPreproc(self.errors[name].strip())
path = os.path.join(self.pf, name+'.pickle')
if not os.path.exists(path):
raise FileNotFoundError("Pickle {} not found.".format(path))
with open(path, 'rb') as f:
p = pickle.load(f)
return p
def __getitem__(self, name):
return self.get_by_name(name)
def get_all(self):
"""Load all pickles in input folder"""
output = []
for path in tqdm(self.ds.paths, desc="Loading pickles"):
with open(path, 'rb') as f:
p = pickle.load(f)
output.append(p)
return output
def preprocess_one_image(path):
img = np.array(imread(path)) # load image
ast = astimp.AST(img)
crop = ast.crop
circles = ast.circles
pellets = ast.pellets
labels = ast.labels_text
# create preprocessing object
# NOTE the preprocessing object is not created it no pellets where found.
preproc = ast.preproc if len(circles) != 0 else None
pobj = {"ast":ast,
"preproc": preproc,
"circles": circles,
"pellets": pellets,
"labels": labels,
"crop": crop,
"fname": os.path.basename(path),
"inhibitions": ast.inhibitions}
return pobj
def pickle_one_preproc(idx, output_path, image_paths, error_list, skip_existing=False, mute=True):
if mute:
log_function = lambda x : x
else:
log_function = tqdm.write
path = image_paths[idx]
try:
# create output path
fname = os.path.basename(path) # file name from path
ofpath = os.path.join(
output_path, f"{fname}.pickle") # output file path
if skip_existing:
# skip if output file exists already
if os.path.exists(ofpath):
return None
# WARNING for an unknown reason the pickle call must be inside this function
pobj = preprocess_one_image(path)
with open(ofpath, 'wb') as f:
pickle.dump(pobj, f)
if len(pobj['circles']) == 0:
# if no pellet found
error_list[idx] = "INFO : {}, No pellets found".format(fname)
log_function("No pellet found in {}".format(fname))
except Exception as e:
ex_text = ', '.join(map(lambda x: str(x), e.args))
error_list[idx] = "{}, {}".format(fname, ex_text)
log_function("Failed images: {} - {}".format(len(error_list), ex_text))
return None
def preprocess(img_paths, output_path, skip_existing=False, parallel=True):
"""preprocess images and pickle the preproc object.
img_paths : a list of paths of the image files."""
if not os.path.exists(output_path):
os.mkdir(output_path)
errors = [""]*len(img_paths)
if parallel:
jobs = cpu_count()
print("Running in parallel on {} processes".format(jobs))
f = partial(pickle_one_preproc,
image_paths=img_paths,
output_path=output_path,
error_list=errors,
skip_existing=skip_existing
)
with Pool(jobs) as p:
list(tqdm(p.imap(f,range(len(img_paths))), total=len(img_paths)))
errors = [e for e in errors if e != ""]
else:
for idx in tqdm(range(len(img_paths)), desc="Preprocessing"):
pickle_one_preproc(idx, output_path, img_paths, errors, skip_existing, mute=False)
return errors
| 31.923977 | 98 | 0.596263 |
6e28b70b57732d2994e0b212e99122e11d61d96f | 1,024 | py | Python | src/main.py | Evelkos/PAM-and-CLARA | 26fbb8d2d4a7924ce1d0d504c4b23bac38238c69 | [
"MIT"
] | null | null | null | src/main.py | Evelkos/PAM-and-CLARA | 26fbb8d2d4a7924ce1d0d504c4b23bac38238c69 | [
"MIT"
] | null | null | null | src/main.py | Evelkos/PAM-and-CLARA | 26fbb8d2d4a7924ce1d0d504c4b23bac38238c69 | [
"MIT"
] | null | null | null | from clustering_algorithms import CLARA, PAM, get_initial_points
from data_loaders import load_data
from timer import Timer
from visualizers import plot_data
# FILENAME = "datasets/artificial/sizes3.arff"
FILENAME = "datasets/artificial/zelnik4.arff"
# FILENAME = "datasets/artificial/xclara.arff"
# FILENAME = "datasets/real-world/glass.arff"
def run_clara(data, points):
clara = CLARA(points, len(data["classes"]), labels=data["classes"])
clara.run()
return clara.get_result_df()
def run_pam(data, points):
pam = PAM(points, len(data["classes"]), labels=data["classes"])
pam.run()
return pam.get_result_df()
if __name__ == "__main__":
data = load_data(FILENAME)
# plot_data(data["df"], data["classes"], data["class_column"])
points = get_initial_points(data["df"], data["coordinates_columns"])
# result = run_clara(data, points)
result = run_pam(data, points)
plot_data(
result, data["classes"], "cluster", attributes_names=data["coordinates_columns"]
)
| 30.117647 | 88 | 0.709961 |
6e2927924bc2223cbcdf3f80649b9ddc5b016ea6 | 1,143 | py | Python | module/test.py | yuxy000/PythonSyntax | efbfddbd62d88fa6768035d0155c9e8d17cb5670 | [
"MIT"
] | null | null | null | module/test.py | yuxy000/PythonSyntax | efbfddbd62d88fa6768035d0155c9e8d17cb5670 | [
"MIT"
] | null | null | null | module/test.py | yuxy000/PythonSyntax | efbfddbd62d88fa6768035d0155c9e8d17cb5670 | [
"MIT"
] | null | null | null | from module import support
from module import fibo
import sys
support.print_func("Runoob")
fibo.fib(1000)
print(fibo.fib2(100))
print(fibo.__name__)
# 把模块中的一个函数赋给一个本地的名称
fib = fibo.fib
fib(10)
"""
from…import 语句
Python的from语句让你从模块中导入一个指定的部分到当前命名空间中,语法如下:
from modname import name1[, name2[, ... nameN]]
例如,要导入模块 fibo 的 fib 函数,使用如下语句:
>>> from fibo import fib, fib2
>>> fib(500)
1 1 2 3 5 8 13 21 34 55 89 144 233 377
这个声明不会把整个fibo模块导入到当前的命名空间中,它只会将fibo里的fib函数引入进来。
From…import* 语句
把一个模块的所有内容全都导入到当前的命名空间也是可行的,只需使用如下声明:
from modname import *
这提供了一个简单的方法来导入一个模块中的所有项目。然而这种声明不该被过多地使用。
"""
"""
__name__属性
一个模块被另一个程序第一次引入时,其主程序将运行。如果我们想在模块被引入时,模块中的某一程序块不执行,我们可以用__name__属性来使该程序块仅在该模块自身运行时执行。
#!/usr/bin/python3
# Filename: using_name.py
if __name__ == '__main__':
print('程序自身在运行')
else:
print('我来自另一模块')
运行输出如下:
$ python using_name.py
程序自身在运行
$ python
>>> import using_name
我来自另一模块
>>>
说明: 每个模块都有一个__name__属性,当其值是'__main__'时,表明该模块自身在运行,否则是被引入。
"""
"""
内置的函数 dir() 可以找到模块内定义的所有名称。以一个字符串列表的形式返回:
"""
print(dir(fibo))
print(dir(sys))
# 如果没有给定参数,那么 dir() 函数会罗列出当前定义的所有名称
print(dir())
| 19.05 | 85 | 0.726159 |
6e2a9766e0a79f77304a55be682d4bc167bde209 | 4,459 | py | Python | src/utils.py | zimonitrome/AbstractionNet | a037b696ccac015936d60026cb1ac4ebafc68371 | [
"MIT"
] | null | null | null | src/utils.py | zimonitrome/AbstractionNet | a037b696ccac015936d60026cb1ac4ebafc68371 | [
"MIT"
] | null | null | null | src/utils.py | zimonitrome/AbstractionNet | a037b696ccac015936d60026cb1ac4ebafc68371 | [
"MIT"
] | null | null | null | import torch
from einops import rearrange
import svgwrite
###########################################
# Normalization / Standardization functions
###########################################
def normalize_functional(tensor: torch.Tensor, mean: list, std: list):
"""
Standardizes tensor in the channel dimension (dim -3) using mean and std.
[... C H W] -> [... C H W]
"""
mean = torch.tensor(mean).view(-1, 1, 1).to(tensor.device)
std = torch.tensor(std).view(-1, 1, 1).to(tensor.device)
return (tensor-mean)/std
def unnormalize_functional(tensor: torch.Tensor, mean: list, std: list):
"""
Un-standardizes tensor in the channel dimension (dim -3) using mean and std.
Also clips the tensor to be in the range [0, 1].
[... C H W] -> [... C H W]
"""
mean = torch.tensor(mean).view(-1, 1, 1).to(tensor.device)
std = torch.tensor(std).view(-1, 1, 1).to(tensor.device)
return ((tensor*std)+mean).clamp(0, 1)
def unnormalize_to(x, x_min, x_max):
"""
Linear normalization of x to [x_min, x_max].
In other words maps x.min() -> x_min and x.max() -> x_max.
"""
return x * (x_max - x_min) + x_min
############################
# Image convertion functions
############################
def rgba_to_rgb(rgba: torch.Tensor):
"""
Converts tensor from 3 channels into 4.
Multiplies first 3 channels with the last channel.
[... 4 H W] -> [... 3 H W]
"""
return rgba[..., :-1, :, :] * rgba[..., -1:, :, :]
def rgb_to_rgba(rgb: torch.Tensor, fill: float = 1.0):
"""
Converts tensor from 4 channels into 3.
Alpha layer will be filled with 1 by default, but can also be specified.
[... 3 H W] -> [... 4 H W]
"""
alpha_channel = torch.full_like(rgb[..., :1, :, :], fill_value=fill)
return torch.concat([rgb, alpha_channel], dim=-3)
###########################################
# Alpha compositing/decompositing functions
###########################################
def alpha_composite(base, added, eps=1e-8):
"""
Composite two tensors, i.e., layers `added` on top of `base`,
where the last channel is assumed to be an alpha channel.
[... C H W], [... C H W] -> [... C H W]
"""
# Separate color and alpha
alpha_b = base[..., -1:, :, :]
alpha_a = added[..., -1:, :, :]
color_b = base[..., :-1, :, :]
color_a = added[..., :-1, :, :]
# https://en.wikipedia.org/wiki/Alpha_compositing#Alpha_blending
alpha_0 = (1 - alpha_a) * alpha_b + alpha_a
color_0 = ((1-alpha_a) * alpha_b*color_b + alpha_a*color_a) / (alpha_0 + eps)
# Re-combine new color and alpha
return torch.concat([color_0, alpha_0], dim=-3)
def alpha_composite_multiple(images_tensor):
"""
Composite tensor of N images into a single image.
Assumes last channel is an alpha channel.
[... N C H W] -> [... C H W]
"""
image_iterator = rearrange(images_tensor, "... N C H W -> N ... C H W")
# Get first image
compositioned_image = image_iterator[0]
# Add the rest of the images
for image in image_iterator[1:]:
# TODO: Possibly need to add .copy() to prevent assignment error in autograd.
compositioned_image = alpha_composite(compositioned_image, image)
return compositioned_image
def get_visible_mask(shapes):
"""
Inputs a set of rendered images where C > 1 and the last channel is an alpha channel.
Assuming that images were to be compositioned first to last (N=0, 1, 2...),
returns a mask for each image that show what pixels of that image is visible in the final composition.
[... N C H W] -> [... N H W]
"""
shape_iterator = rearrange(shapes, "... N C H W -> N ... C H W").flip(0)
accumulated_alpha = torch.zeros_like(shape_iterator[0,..., 0, :, :]) # empty like first image, single channel
shape_maks = torch.zeros_like(shape_iterator[..., 0, :, :]) # empty image for each shape layer
for i, shape in enumerate(shape_iterator):
# a over b alpha compositioning
# alpha_0 = (1 - alpha_a) * alpha_b + alpha_a
# get b
# alpha_b = (alpha_0 - alpha_a) / (1 - alpha_a)
shape_alpha = shape[..., -1, :, :]
alpha_visible = shape_alpha - accumulated_alpha * shape_alpha
shape_maks[i] = alpha_visible
accumulated_alpha = (1 - shape_alpha) * accumulated_alpha + shape_alpha
return rearrange(shape_maks.flip(0), "N ... H W -> ... N H W").unsqueeze(-3) | 36.54918 | 113 | 0.589146 |
6e2c7487821c1b466bfeb152a868353bd01ba3f7 | 3,742 | py | Python | CellMQ.py | edjuaro/cell-migration-quantification | b6479cc8525a1ac8bdaf0abfc66dec57de0be21e | [
"MIT"
] | null | null | null | CellMQ.py | edjuaro/cell-migration-quantification | b6479cc8525a1ac8bdaf0abfc66dec57de0be21e | [
"MIT"
] | null | null | null | CellMQ.py | edjuaro/cell-migration-quantification | b6479cc8525a1ac8bdaf0abfc66dec57de0be21e | [
"MIT"
] | null | null | null | import cv2
import numpy as np
from skimage import draw
from skimage import io
# Read image
im_in = cv2.imread("analyses/MDA231_stopper_1_c3.tif", cv2.IMREAD_GRAYSCALE);
# Threshold.
# Set values equal to or above 220 to 0.
# Set values below 220 to 255.
th, im_th = cv2.threshold(im_in, 20, 255, cv2.THRESH_BINARY_INV);
# Copy the thresholded image.
im_floodfill = im_th.copy()
# Mask used to flood filling.
# Notice the size needs to be 2 pixels than the image.
h, w = im_th.shape[:2]
mask = np.zeros((h+2, w+2), np.uint8)
# Floodfill from point (0, 0)
cv2.floodFill(im_floodfill, mask, (0,0), 255);
# Invert floodfilled image
im_floodfill_inv = cv2.bitwise_not(im_floodfill)
# Combine the two images to get the foreground.
im_out = im_th | im_floodfill_inv
io.imsave(fname='temp_output.png', arr=im_out)
# im_out_inv = cv2.bitwise_not(im_out)
# dilate the mask:
k_size = 2
k_half = k_size/2
kernel = np.ones((k_size,k_size),np.uint8)
coords = draw.circle(k_half, k_half, k_half, shape=im_th.shape)
kernel[coords] = 1
erosion = cv2.erode(im_out,kernel,iterations = 1)
dilation = cv2.dilate(cv2.bitwise_not(erosion),kernel,iterations = 1)
dilation = cv2.bitwise_not(dilation)
# io.imshow(dilation)
io.imsave(fname='mask.png', arr=dilation)
# Display images.
# io.imsave(fname='mask.png', arr=im_out)
# # mostly from http://nickc1.github.io/python,/matlab/2016/05/17/Standard-Deviation-(Filters)-in-Matlab-and-Python.html
# import cv2
# from skimage import draw
# from skimage import io
# filename = 'analyses/MDA231_stopper_1_c3.tif'
# plate = io.imread(filename,as_grey=True)
# image = plate
# #io.imshow(image)
# # io.imsave(fname='temp_output.png', arr=image)
# import numpy as np
# # img = cv2.imread('....') # Read in the image
# sobelx = cv2.Sobel(image,cv2.CV_64F,1,0) # Find x and y gradients
# sobely = cv2.Sobel(image,cv2.CV_64F,0,1)
# # Find magnitude and angle
# I2 = np.sqrt(sobelx**2.0 + sobely**2.0)
# # angle = np.arctan2(sobely, sobelx) * (180 / np.pi)
# # io.imshow(I2)
# # io.imsave(fname='temp_output.png', arr=I2)
# from scipy.ndimage.filters import uniform_filter
# import numpy as np
# def window_stdev(X, window_size):
# c1 = uniform_filter(X, window_size, mode='reflect')
# c2 = uniform_filter(X*X, window_size, mode='reflect')
# return np.sqrt(c2 - c1*c1)
# # x = np.arange(16).reshape(4,4).astype('float')
# kernel_size = 3
# I1 = window_stdev(I2,kernel_size)*np.sqrt(kernel_size**2/(kernel_size**2 - 1))
# # io.imshow(I1)
# # io.imsave(fname='temp_output.png', arr=I1)
# from scipy.signal import medfilt2d
# I1 = medfilt2d(I1, kernel_size=3)
# # io.imshow(I1)
# # io.imsave(fname='temp_output.png', arr=I1)
# import numpy as np
# from skimage.morphology import reconstruction
# from skimage.exposure import rescale_intensity
# # image = rescale_intensity(I1, in_range=(50, 200))
# image = I1
# seed = np.copy(image)
# seed[1:-1, 1:-1] = image.max()
# mask = image
# filled = reconstruction(seed, mask, method='erosion')
# io.imsave(fname='temp_output.png', arr=filled)
# # kernel = np.zeros((80,80),np.uint8)
# # coords = draw.circle(40, 40, 40, shape=image.shape)
# # kernel[coords] = 1
# # erosion = cv2.erode(I1,kernel,iterations = 1)
# # # io.imshow(erosion)
# # # # kernel = np.ones((40,40),np.uint8)
# # # # erosion = cv2.erode(I1,kernel,iterations = 1)
# # # # io.imshow(erosion)
# # # io.imsave(fname='temp_output.png', arr=erosion)
# # from skimage.morphology import reconstruction
# # fill = reconstruction(I1, erosion, method='erosion')
# # # io.imshow(fill)
# # # io.imsave(fname='temp_output.png', arr=fill)
# # dilation = cv2.dilate(fill,kernel,iterations = 1)
# # # io.imshow(dilation)
# # io.imsave(fname='temp_output.png', arr=dilation) | 27.925373 | 120 | 0.69829 |
6e2d9335521cea1ce24ba509b262882641d75542 | 1,344 | py | Python | test/unit/messages/bloxroute/test_txs_message.py | dolphinridercrypto/bxcommon | 8f70557c1dbff785a5dd3fcdf91176066e085c3a | [
"MIT"
] | 12 | 2019-11-06T17:39:10.000Z | 2022-03-01T11:26:19.000Z | test/unit/messages/bloxroute/test_txs_message.py | dolphinridercrypto/bxcommon | 8f70557c1dbff785a5dd3fcdf91176066e085c3a | [
"MIT"
] | 8 | 2019-11-06T21:31:11.000Z | 2021-06-02T00:46:50.000Z | test/unit/messages/bloxroute/test_txs_message.py | dolphinridercrypto/bxcommon | 8f70557c1dbff785a5dd3fcdf91176066e085c3a | [
"MIT"
] | 5 | 2019-11-14T18:08:11.000Z | 2022-02-08T09:36:22.000Z | from bxcommon.test_utils.abstract_test_case import AbstractTestCase
from bxcommon.messages.bloxroute.txs_message import TxsMessage
from bxcommon.models.transaction_info import TransactionInfo
from bxcommon.test_utils import helpers
from bxcommon.utils.object_hash import Sha256Hash
class TxsMessageTests(AbstractTestCase):
def test_txs_with_short_ids_message(self):
txs_info = [
TransactionInfo(Sha256Hash(helpers.generate_bytearray(32)), helpers.generate_bytearray(200), 111),
TransactionInfo(Sha256Hash(helpers.generate_bytearray(32)), helpers.generate_bytearray(300), 222),
TransactionInfo(Sha256Hash(helpers.generate_bytearray(32)), helpers.generate_bytearray(400), 333)
]
msg = TxsMessage(txs=txs_info)
msg_bytes = msg.rawbytes()
self.assertTrue(msg_bytes)
parsed_msg = TxsMessage(buf=msg_bytes)
self.assertTrue(parsed_msg)
parsed_txs_info = parsed_msg.get_txs()
self.assertEqual(len(parsed_txs_info), len(txs_info))
for index in range(len(txs_info)):
self.assertEqual(parsed_txs_info[index].short_id, txs_info[index].short_id)
self.assertEqual(parsed_txs_info[index].contents, txs_info[index].contents)
self.assertEqual(parsed_txs_info[index].hash, txs_info[index].hash)
| 38.4 | 110 | 0.738095 |
6e2e387eef5e879a3d06801f9f8eb44b9b39bb68 | 712 | py | Python | CursoEmVideo/Aula16 - Tuplas.py | caique-santana/CursoEmVideo-Curso_Python3 | 86bb67bbbf348544e1135d8657672d4e33fa70e2 | [
"MIT"
] | 1 | 2020-04-15T00:49:02.000Z | 2020-04-15T00:49:02.000Z | CursoEmVideo/Aula16 - Tuplas.py | caique-santana/CursoEmVideo-Curso_Python3 | 86bb67bbbf348544e1135d8657672d4e33fa70e2 | [
"MIT"
] | null | null | null | CursoEmVideo/Aula16 - Tuplas.py | caique-santana/CursoEmVideo-Curso_Python3 | 86bb67bbbf348544e1135d8657672d4e33fa70e2 | [
"MIT"
] | null | null | null | lanche = ('Hambúrguer', 'Suco', 'Pizza', 'Pudim', 'Batata Frita')
# Tuplas são imutáveis
# lanche[1] = 'Refrigerante' - Esse comando não vai funcionar
print(len(lanche))
print(sorted(lanche))
print(lanche)
print(lanche[-3:])
for comida in lanche:
print(f'Eu vou comer {comida}')
for cont in range(0, len(lanche)):
print(f'Eu vou comer {lanche[cont]} na posição {cont}')
for pos, comida in enumerate(lanche):
print(f'Eu Vou comer {comida} na posição {pos}')
print('Comi pra caramba!')
a = (2, 5, 4)
b = (5, 8, 1, 2)
c = b + a
print(c)
print(c.index(5, 1))
print(f'o tamanho de "c" é {len(c)}')
print(f'Tem {c.count(5)} números 5')
pessoa = ('Gustavo', 39, 'M', 99.88)
del(pessoa)
print(pessoa)
| 22.25 | 65 | 0.644663 |
6e2ec7ad4cbde5fb55995e9127da176c9b74eb60 | 167 | py | Python | app/config.py | akabbeke/sd44_server | 7755567c7b273a5ac23b2aacc52477dd4a11d290 | [
"MIT"
] | null | null | null | app/config.py | akabbeke/sd44_server | 7755567c7b273a5ac23b2aacc52477dd4a11d290 | [
"MIT"
] | null | null | null | app/config.py | akabbeke/sd44_server | 7755567c7b273a5ac23b2aacc52477dd4a11d290 | [
"MIT"
] | null | null | null | import yaml
import os
config_file = os.path.join(os.path.dirname(__file__), "config/config.yml")
with open(config_file, 'r') as stream:
CONFIG = yaml.load(stream) | 27.833333 | 74 | 0.736527 |
6e2f62475e9654f761ab72ca7f65f8bb7603adef | 921 | py | Python | python/projects/jenkins_config_xml_parser/main.py | zhaoace/codecraft | bf06267e86bd7386714911b0df4aa0ca0a91d882 | [
"Unlicense"
] | null | null | null | python/projects/jenkins_config_xml_parser/main.py | zhaoace/codecraft | bf06267e86bd7386714911b0df4aa0ca0a91d882 | [
"Unlicense"
] | null | null | null | python/projects/jenkins_config_xml_parser/main.py | zhaoace/codecraft | bf06267e86bd7386714911b0df4aa0ca0a91d882 | [
"Unlicense"
] | null | null | null | import xml.etree.ElementTree as ET
tree = ET.parse('/Users/zhaoli/workspace/splunk/playground/var/lib/jenkins/jobs/Splunk/jobs/develop/jobs/platform/jobs/cli/jobs/trigger_cli_linux/config.xml')
root = tree.getroot()
# SPs = root.findall("properties/hudson.model.ParametersDefinitionProperty/parameterDefinitions/[hudson.model.StringParameterDefinition]")
SPs = root.findall("properties/hudson.model.ParametersDefinitionProperty/parameterDefinitions/hudson.model.StringParameterDefinition/[name='branch']")
print "***"
print dir(SPs)
print "***"
for s in SPs:
print "-----"
# print s.tag, ":", s.text
ET.dump(s)
spd = ET.Element("hudson.model.StringParameterDefinition")
name = ET.SubElement(spd, 'name')
name.text="version"
description=ET.SubElement(spd, 'description')
description.text="The product version"
defaultValue=ET.SubElement(spd, 'defaultValue')
defaultValue.text=""
ET.dump(spd)
tree. | 27.909091 | 158 | 0.761129 |
6e2fe086028f0377c018ceee95df734b7ae1f811 | 986 | py | Python | BLAST/make_fasta.py | cdiaza/bootcamp | 2fda661a44930f70ac8ef15218cc99d099fc4019 | [
"MIT"
] | 1 | 2021-01-16T20:39:41.000Z | 2021-01-16T20:39:41.000Z | BLAST/make_fasta.py | cdiaza/bootcamp | 2fda661a44930f70ac8ef15218cc99d099fc4019 | [
"MIT"
] | null | null | null | BLAST/make_fasta.py | cdiaza/bootcamp | 2fda661a44930f70ac8ef15218cc99d099fc4019 | [
"MIT"
] | 1 | 2021-01-16T20:31:17.000Z | 2021-01-16T20:31:17.000Z | import random
def format_fasta(title, sequence):
"""
This formats a fasta sequence
Input:
title - String - Title of the sequence
sequence - String - Actual sequence
Output:
String - Fully formatted fasta sequence
"""
fasta_width = 70 # Number of characters in one line
n_lines = 1 + len(sequence) // fasta_width # Number of lines
lines = [ sequence[i*fasta_width: (i+1)*fasta_width] for i in range(n_lines)]
lines = "\n".join(lines)
formatted = f"> {title}\n{lines}\n\n"
return formatted
bases = "actg" # Bases for our randon protein
# Writing random sequences in a file
with open("random_sequences.fa", "w") as f:
for length in range(1, 25): # Sequences of different lengths
for run in range(10): # Trying several times
title = f"length_{length} run_{run}"
sequence = "".join(random.choices(bases, k=length))
f.write(format_fasta(title, sequence))
| 29.878788 | 81 | 0.631846 |
6e3054f23fea6a6c7c56f18a768f57df2c3c07ac | 1,604 | py | Python | unittesting/utils/output_panel.py | guillermooo/UnitTesting | 04802c56d5ccea44043a241050d6fe331c6ff694 | [
"MIT"
] | null | null | null | unittesting/utils/output_panel.py | guillermooo/UnitTesting | 04802c56d5ccea44043a241050d6fe331c6ff694 | [
"MIT"
] | null | null | null | unittesting/utils/output_panel.py | guillermooo/UnitTesting | 04802c56d5ccea44043a241050d6fe331c6ff694 | [
"MIT"
] | null | null | null | import sublime
import os
class OutputPanel:
def __init__(
self, name, file_regex='', line_regex='', base_dir=None,
word_wrap=False, line_numbers=False, gutter=False,
scroll_past_end=False, syntax='Packages/Text/Plain text.tmLanguage'
):
self.name = name
self.window = sublime.active_window()
self.output_view = self.window.get_output_panel(name)
# default to the current file directory
if not base_dir and self.window.active_view() and \
self.window.active_view().file_name():
base_dir = os.path.dirname(self.window.active_view().file_name())
settings = self.output_view.settings()
settings.set("result_file_regex", file_regex)
settings.set("result_line_regex", line_regex)
settings.set("result_base_dir", base_dir)
settings.set("word_wrap", word_wrap)
settings.set("line_numbers", line_numbers)
settings.set("gutter", gutter)
settings.set("scroll_past_end", scroll_past_end)
settings.set("syntax", syntax)
self.closed = False
def write(self, s):
self.output_view.set_read_only(False)
self.output_view.run_command('append', {'characters': s}),
self.output_view.set_read_only(True)
self.output_view.show(self.output_view.size())
def writeln(self, s):
self.write(s + "\n")
def flush(self):
pass
def show(self):
self.window.run_command("show_panel", {"panel": "output." + self.name})
def close(self):
self.closed = True
pass
| 32.08 | 79 | 0.639651 |
6e3246c7687554b238139dfec4bd2b58d1c2ba17 | 673 | py | Python | main.py | jon-choi/hillsbarber | 346e9cbe5de7c5bf8a9136e71981b058323784a1 | [
"Apache-2.0"
] | null | null | null | main.py | jon-choi/hillsbarber | 346e9cbe5de7c5bf8a9136e71981b058323784a1 | [
"Apache-2.0"
] | null | null | null | main.py | jon-choi/hillsbarber | 346e9cbe5de7c5bf8a9136e71981b058323784a1 | [
"Apache-2.0"
] | null | null | null | from flask import Flask, render_template
app = Flask(__name__)
app.config['DEBUG'] = True
# Note: We don't need to call run() since our application is embedded within
# the App Engine WSGI application server.
@app.route('/')
def hello(name=None):
"""Return a friendly HTTP greeting."""
return render_template('template.html', name=name, text="Jinja Flask")
# return render_template('bootstrap_cover.html', name=name)
# @app.route('/rates')
# def helloRates(name='rates'):
# return render_template('template.html',name=name)
@app.errorhandler(404)
def page_not_found(e):
"""Return a custom 404 error."""
return 'Sorry, nothing at this URL.', 404
| 29.26087 | 76 | 0.708767 |
6e330bec332cbcb5e47190df3547281fe5168a28 | 903 | py | Python | tests/test_echo_server_contextvar.py | rednafi/think-async | 3642afc0d8661b10affd953ce3b239f3e6b3009b | [
"MIT"
] | 87 | 2021-04-14T09:51:30.000Z | 2022-03-24T10:38:41.000Z | tests/test_echo_server_contextvar.py | rednafi/think-async | 3642afc0d8661b10affd953ce3b239f3e6b3009b | [
"MIT"
] | 3 | 2021-06-27T18:06:11.000Z | 2022-03-24T19:56:38.000Z | tests/test_echo_server_contextvar.py | rednafi/think-async | 3642afc0d8661b10affd953ce3b239f3e6b3009b | [
"MIT"
] | 4 | 2021-05-12T01:36:14.000Z | 2022-01-28T04:06:12.000Z | from unittest.mock import Mock, patch
import pytest
import patterns.echo_server_contextvar as main
@patch.object(main, "client_addr_var", Mock())
def test_render_goodbye(capsys):
# Call 'render_goodbye'
goodbye_string = main.render_goodbye()
print(goodbye_string)
# Assert.
out, err = capsys.readouterr()
assert err == ""
assert "Good bye, client @" in out
@pytest.mark.asyncio
@patch("patterns.echo_server_contextvar.asyncio.start_server", autospec=True)
@patch("patterns.echo_server_contextvar.asyncio.sleep", autospec=True)
async def test_server(mock_asyncio_sleep, mock_asyncio_start_server):
stop_after = 5
# Call 'server()'.
await main.server(stop_after=stop_after)
# Assert.
assert mock_asyncio_sleep.call_count == stop_after
args = main.handle_request, "127.0.0.1", 8081
mock_asyncio_start_server.assert_called_once_with(*args)
| 25.8 | 77 | 0.743079 |
6e3355f7d36e6d39cee7c23d5acd90666f7629a8 | 693 | py | Python | test.py | riquedev/SSLProxies24Feed | 93ab23a6794ae7f40002eb464a9c443afe44db86 | [
"MIT"
] | null | null | null | test.py | riquedev/SSLProxies24Feed | 93ab23a6794ae7f40002eb464a9c443afe44db86 | [
"MIT"
] | 1 | 2017-09-15T13:27:09.000Z | 2017-09-15T14:43:28.000Z | test.py | riquedev/SSLProxies24Feed | 93ab23a6794ae7f40002eb464a9c443afe44db86 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Autor: rique_dev (rique_dev@hotmail.com)
from SSLProxies24.Feed import Feed
from SSLProxies24.Check import CheckProxy
import time
import gc
# Recupera a listagem
prx = Feed().PROXY_LIST
# Inicia classe
chk = CheckProxy()
# Começa validação
chk.validatelist(prx)
# Ativa garbage
gc.enable()
time.sleep(30)
# Contagem
print('Sucesso: '+str(chk.getsucesscount()))
print('Falhas: '+str(chk.getfailcount()))
print('Total de Proxys: '+str(chk.getproxycount()))
print('Restam: '+str(chk.getproxycount()-(chk.getsucesscount()+chk.getfailcount())))
# Lista de Proxys
print(chk.getproxylist())
del prx
del chk
print('Classes eliminadas.')
exit(0) | 19.25 | 84 | 0.730159 |
6e33da3d320ddccf5c2863568bc4b5fb0505e125 | 577 | py | Python | euler.py | user3719431/tna_lab1 | 183c34d927c39f502fea7d6a81f2945104d7b75b | [
"MIT"
] | null | null | null | euler.py | user3719431/tna_lab1 | 183c34d927c39f502fea7d6a81f2945104d7b75b | [
"MIT"
] | null | null | null | euler.py | user3719431/tna_lab1 | 183c34d927c39f502fea7d6a81f2945104d7b75b | [
"MIT"
] | null | null | null | import math as m
def yakobi(a, n, k):
if a < 0:
k *= pow(-1, (n - 1) // 2)
yakobi(-a, n, k)
if a % 2 == 0:
k *= (-1) ** ((pow(n, 2) - 1) / 8)
yakobi(a / 2, n, k)
if a == 1:
return k
if a < n:
k *= pow(-1, ((n - 1)(a - 1)) / 4)
yakobi(n % a, a, k)
def euler_test(p, x):
if pow(x, (p - 1) / 2) % p == yakobi(x, p, k = 1):
return bool(True)
elif pow(x, (p - 1) / 2) % p - p == yakobi(x, p, k = 1):
return bool(True)
else:
return bool(False) | 24.041667 | 60 | 0.363951 |
6e34180a8de5ed1a630ffd86a9a830130bbd1076 | 3,787 | py | Python | src/b2d/hud_b2d.py | VgTajdd/neuroevolver | 248c96b25ad936e15cfffc7a4223926db83ad540 | [
"MIT"
] | null | null | null | src/b2d/hud_b2d.py | VgTajdd/neuroevolver | 248c96b25ad936e15cfffc7a4223926db83ad540 | [
"MIT"
] | null | null | null | src/b2d/hud_b2d.py | VgTajdd/neuroevolver | 248c96b25ad936e15cfffc7a4223926db83ad540 | [
"MIT"
] | null | null | null | ## ========================================================================= ##
## Copyright (c) 2019 Agustin Durand Diaz. ##
## This code is licensed under the MIT license. ##
## hud_b2d.py ##
## ========================================================================= ##
from core.hud_base import HudBase
from enums import ScreenType, SimulationType
from core.utils import getPathWithoutExtension, existsFile, getImageSize
import settings
class HudB2D(HudBase):
def __init__(self, width, height):
HudBase.__init__(self, width, height)
def init(self):
self.showFPS()
self.addLabel((80, 30), (150, 30), 'Box2D')
self.addButton((725, 40), (100, 50), 'Back', self.gotoMetamap)
def gotoMetamap(self):
self.m_manager.gotoScreen(ScreenType.META_MAP)
class HudB2DNEATDIP(HudB2D):
def __init__(self, width, height, params):
self.params = params
HudB2D.__init__(self, width, height)
def init(self):
self.showFPS()
self.addLabel((75, 15), (150, 30), 'NEAT DIP')
if 'isTraining' in self.params and self.params['isTraining']:
self.addLabel((75, 45), (150, 30),
str(self.params['currentStep']) + "/" + str(settings.NEAT_DIP_EVOLVING_STEPS))
else:
imgPath = self.params['genomePath']
imgPath = getPathWithoutExtension(imgPath) + '.png'
if existsFile(imgPath):
size = getImageSize(imgPath)
w, h = size
if size[0] > 450:
w = 450
if size[1] > 450:
h = 450
self.addImage(((w/2) + 30, (h/2) + 30), (w, h), imgPath)
self.addButton((770, 15), (60, 30), 'Back', self.gotoMetamap, alpha = 200)
self.addButton((670, 15), (60, 30), 'Reset', self.resetDIP, alpha = 200)
def resetDIP(self):
self.m_manager.gotoScreen(ScreenType.SIMULATION, {'simulationType': SimulationType.NEAT_B2D_DIP})
class HudB2DNEATTIP(HudB2D):
def __init__(self, width, height, params):
self.params = params
HudB2D.__init__(self, width, height)
def init(self):
self.showFPS()
self.addLabel((75, 15), (150, 30), 'NEAT TIP')
if 'isTraining' in self.params and self.params['isTraining']:
self.addLabel((75, 45), (150, 30),
str(self.params['currentStep']) + "/" + str(settings.NEAT_TIP_EVOLVING_STEPS))
else:
imgPath = 'net_neat_tip.png'
if existsFile(imgPath):
size = getImageSize(imgPath)
self.addImage(((size[0]/2) + 30, (size[1]/2) + 30), size, imgPath)
self.addButton((770, 15), (60, 30), 'Back', self.gotoMetamap, alpha = 200)
class HudB2DNEATWalker(HudB2D):
def __init__(self, width, height, params):
self.params = params
HudB2D.__init__(self, width, height)
def init(self):
self.showFPS()
self.addLabel((75, 15), (150, 30), 'NEAT Walker')
if 'isTraining' in self.params and self.params['isTraining']:
self.addLabel((75, 45), (150, 30),
str(self.params['currentStep']) + "/" + str(settings.NEAT_WALKER_EVOLVING_STEPS))
else:
imgPath = 'net_neat_walker.png'
if existsFile(imgPath):
size = getImageSize(imgPath)
self.addImage(((size[0]/2) + 30, (size[1]/2) + 30), size, imgPath)
self.addButton((770, 15), (60, 30), 'Back', self.gotoMetamap, alpha = 200) | 44.034884 | 108 | 0.520201 |
6e358277ee18f33ce73fddfacb850dc985cb0977 | 1,958 | py | Python | grblc/search/gcn/parser/combine.py | youngsm/adsgrb | a89b56b371888deb67788a9f5a91300b281784a6 | [
"MIT"
] | null | null | null | grblc/search/gcn/parser/combine.py | youngsm/adsgrb | a89b56b371888deb67788a9f5a91300b281784a6 | [
"MIT"
] | null | null | null | grblc/search/gcn/parser/combine.py | youngsm/adsgrb | a89b56b371888deb67788a9f5a91300b281784a6 | [
"MIT"
] | null | null | null | def get_final_txt(grb, tables, sentences, output_path):
"""
Combine the data from [grb]_final_sentences.txt and [grb]_final_tables.txt.
If a piece of data in tables and another piece in sentecnes are originially
from the same GCN. Put them in the same GCN in [grb]_final.txt.
"""
# Avoid modifying the data for the later use.
tables = tables.copy()
sentences = sentences.copy()
# Open up the file.
file = open(f"{output_path}{grb}/{grb}_final.txt", 'w')
# Loop through the sentences and for each sentence, check if there is any table
# that are originially from the same GCN.
for sentence in sentences:
# The number of the GCN.
num = sentence['number']
# The final string that we dumps into the text file.
result = "=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=\n\n"
result += f"GCN Number: {sentence['number']}\n\n"
result += f"SENTENCE DATA:\n\n{sentence['sentences']}\n\n"
# The variable to help check how many tables are from the same GCN.
table_with_the_same_number = 0
# Loop through the tables to see if there are any tables in the same GCN.
for idx, table in enumerate(tables):
# If we find any tables in the same GCN.
if table['number'] == num:
if table_with_the_same_number == 0:
result += "TABLE DATA:\n\n"
table_with_the_same_number += 1
result += '\n'.join(table['table']) + '\n\n'
tables.pop(idx)
file.write(result)
# Write the remaining tables to the text file.
for table in tables:
result = "=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=\n\n"
result += f"GCN Number: {table['number']}\n"
result += "TABLE DATA:\n\n" + '\n'.join(table['table']) + '\n\n'
file.write(result)
| 36.943396 | 88 | 0.550051 |
6e35f3a7bd64997a4e302cd1d8e7454d8298b774 | 972 | py | Python | hardware/headband.py | davidji/roundbot | 2ca34a83c9feb3331f1b818106f06b3182c4970e | [
"Apache-2.0"
] | null | null | null | hardware/headband.py | davidji/roundbot | 2ca34a83c9feb3331f1b818106f06b3182c4970e | [
"Apache-2.0"
] | null | null | null | hardware/headband.py | davidji/roundbot | 2ca34a83c9feb3331f1b818106f06b3182c4970e | [
"Apache-2.0"
] | null | null | null | from solid import *
from solid.utils import *
import util
from util import *
from math import pi
def headband(r1=64.0, r2=85.0, t=3.0, w=12.0):
combe = right(r1-t/2)(linear_extrude(1)(square([1,1], center=True) + left(0.5)(circle(d=1))))
combe_spacing = 3.0 # mm
combe_count = pi*r1/combe_spacing
combes = union()(*[ rotate([0,0, i*180.0/combe_count])(combe) for i in range(-int(combe_count/2), int(combe_count/2))])
def arcshell(r, ends):
start, end = ends
return (arc(rad=r+t/6, start_degrees = start, end_degrees=end) -
arc(rad=r-t/6, start_degrees = start, end_degrees=end))
return (linear_extrude(w)(
offset(r=t/3)(
arcshell(r1, (-90, 90)) +
forward(r2 - r1)(arcshell(r2, (-130, -90))) +
back(r2 - r1)(arcshell(r2, (90, 130))))) +
combes)
def export_scad():
util.save('headband', headband())
if __name__ == '__main__':
export_scad()
| 31.354839 | 123 | 0.588477 |
6e362218fdee0a3ed3f2a33dd6f1acddc1fd9111 | 106 | py | Python | native_shortuuid/apps.py | foundertherapy/django-nativeshortuuidfield | 47e5a5d5c0f4caedbadb88ed6ac279f513ae522a | [
"MIT"
] | 5 | 2020-09-30T00:21:05.000Z | 2022-01-10T08:56:47.000Z | native_shortuuid/apps.py | foundertherapy/django-nativeshortuuidfield | 47e5a5d5c0f4caedbadb88ed6ac279f513ae522a | [
"MIT"
] | 1 | 2020-03-11T15:39:44.000Z | 2020-03-11T15:39:44.000Z | native_shortuuid/apps.py | foundertherapy/django-nativeshortuuidfield | 47e5a5d5c0f4caedbadb88ed6ac279f513ae522a | [
"MIT"
] | 1 | 2021-03-03T12:49:52.000Z | 2021-03-03T12:49:52.000Z | from django.apps import AppConfig
class NativeShortuuidConfig(AppConfig):
name = 'native_shortuuid'
| 17.666667 | 39 | 0.792453 |
6e364089d40bdc8f90fe2c5aa5081ef11b937f59 | 3,482 | py | Python | climlab/dynamics/meridional_advection_diffusion.py | nfeldl/climlab | 2cabb49e2c3f54c1795f24338ef5ee44e49fc7e7 | [
"BSD-3-Clause",
"MIT"
] | 160 | 2015-02-25T15:56:37.000Z | 2022-03-14T23:51:23.000Z | climlab/dynamics/meridional_advection_diffusion.py | nfeldl/climlab | 2cabb49e2c3f54c1795f24338ef5ee44e49fc7e7 | [
"BSD-3-Clause",
"MIT"
] | 137 | 2015-12-18T17:39:31.000Z | 2022-02-04T20:50:53.000Z | climlab/dynamics/meridional_advection_diffusion.py | nfeldl/climlab | 2cabb49e2c3f54c1795f24338ef5ee44e49fc7e7 | [
"BSD-3-Clause",
"MIT"
] | 54 | 2015-04-28T05:57:39.000Z | 2022-02-17T08:15:11.000Z | r"""General solver of the 1D meridional advection-diffusion equation on the sphere:
.. math::
\frac{\partial}{\partial t} \psi(\phi,t) &= -\frac{1}{a \cos\phi} \frac{\partial}{\partial \phi} \left[ \cos\phi ~ F(\phi,t) \right] \\
F &= U(\phi) \psi(\phi) -\frac{K(\phi)}{a} ~ \frac{\partial \psi}{\partial \phi}
for a state variable :math:`\psi(\phi,t)`, arbitrary diffusivity :math:`K(\phi)`
in units of :math:`x^2 ~ t^{-1}`, and advecting velocity :math:`U(\phi)`.
:math:`\phi` is latitude and :math:`a` is the Earth's radius (in meters).
:math:`K` and :math:`U` can be scalars,
or optionally vector *specified at grid cell boundaries*
(so their lengths must be exactly 1 greater than the length of :math:`\phi`).
:math:`K` and :math:`U` can be modified by the user at any time
(e.g., after each timestep, if they depend on other state variables).
A fully implicit timestep is used for computational efficiency. Thus the computed
tendency :math:`\frac{\partial \psi}{\partial t}` will depend on the timestep.
In addition to the tendency over the implicit timestep,
the solver also calculates several diagnostics from the updated state:
- ``diffusive_flux`` given by :math:`-\frac{K(\phi)}{a} ~ \frac{\partial \psi}{\partial \phi}` in units of :math:`[\psi]~[x]`/s
- ``advective_flux`` given by :math:`U(\phi) \psi(\phi)` (same units)
- ``total_flux``, the sum of advective, diffusive and prescribed fluxes
- ``flux_convergence`` (or instantanous scalar tendency) given by the right hand side of the first equation above, in units of :math:`[\psi]`/s
Non-uniform grid spacing is supported.
The state variable :math:`\psi` may be multi-dimensional, but the diffusion
will operate along the latitude dimension only.
"""
from __future__ import division
import numpy as np
from .advection_diffusion import AdvectionDiffusion, Diffusion
from climlab import constants as const
class MeridionalAdvectionDiffusion(AdvectionDiffusion):
"""A parent class for meridional advection-diffusion processes.
"""
def __init__(self,
K=0.,
U=0.,
use_banded_solver=False,
prescribed_flux=0.,
**kwargs):
super(MeridionalAdvectionDiffusion, self).__init__(K=K, U=U,
diffusion_axis='lat', use_banded_solver=use_banded_solver, **kwargs)
# Conversion of delta from degrees (grid units) to physical length units
phi_stag = np.deg2rad(self.lat_bounds)
phi = np.deg2rad(self.lat)
self._Xcenter[...,:] = phi*const.a
self._Xbounds[...,:] = phi_stag*const.a
self._weight_bounds[...,:] = np.cos(phi_stag)
self._weight_center[...,:] = np.cos(phi)
# Now properly compute the weighted advection-diffusion matrix
self.prescribed_flux = prescribed_flux
self.K = K
self.U = U
class MeridionalDiffusion(MeridionalAdvectionDiffusion):
"""A parent class for meridional diffusion-only processes,
with advection set to zero.
Otherwise identical to the parent class.
"""
def __init__(self,
K=0.,
use_banded_solver=False,
prescribed_flux=0.,
**kwargs):
# Just initialize the AdvectionDiffusion class with U=0
super(MeridionalDiffusion, self).__init__(
U=0.,
K=K,
prescribed_flux=prescribed_flux,
use_banded_solver=use_banded_solver, **kwargs)
| 42.463415 | 143 | 0.661401 |
6e369cedee85dd513db727dff183f7bdbc8263b5 | 1,624 | py | Python | gnes/service/grpc.py | micro-pixel/gnes | 388d1ba718ec04eedaaff3ce34da43689c197ee7 | [
"Apache-2.0"
] | 1 | 2019-10-23T03:41:57.000Z | 2019-10-23T03:41:57.000Z | gnes/service/grpc.py | cmy9068/gnes | 44a54be4c80108ac65b2450b4af8deded6da3339 | [
"Apache-2.0"
] | null | null | null | gnes/service/grpc.py | cmy9068/gnes | 44a54be4c80108ac65b2450b4af8deded6da3339 | [
"Apache-2.0"
] | 1 | 2020-10-28T15:07:36.000Z | 2020-10-28T15:07:36.000Z | # Tencent is pleased to support the open source community by making GNES available.
#
# Copyright (C) 2019 THL A29 Limited, a Tencent company. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import grpc
from .base import BaseService as BS, MessageHandler
from ..helper import PathImporter
from ..proto import gnes_pb2
class GRPCService(BS):
handler = MessageHandler(BS.handler)
def post_init(self):
self.channel = grpc.insecure_channel(
'%s:%s' % (self.args.grpc_host, self.args.grpc_port),
options=[('grpc.max_send_message_length', self.args.max_message_size),
('grpc.max_receive_message_length', self.args.max_message_size)])
m = PathImporter.add_modules(self.args.pb2_path, self.args.pb2_grpc_path)
# build stub
self.stub = getattr(m, self.args.stub_name)(self.channel)
def close(self):
self.channel.close()
super().close()
@handler.register(NotImplementedError)
def _handler_default(self, msg: 'gnes_pb2.Message'):
yield getattr(self.stub, self.args.api_name)(msg)
| 36.088889 | 86 | 0.711823 |
6e37060290900c339e29bf4d74171d48cbea8c69 | 3,508 | py | Python | dhost/logs/models.py | dhost-project/dhost | ca6a4a76a737174b24165e20edeb1d1019a9424b | [
"MIT"
] | null | null | null | dhost/logs/models.py | dhost-project/dhost | ca6a4a76a737174b24165e20edeb1d1019a9424b | [
"MIT"
] | 67 | 2021-07-06T11:50:25.000Z | 2021-10-14T13:45:51.000Z | dhost/logs/models.py | dhost-project/dhost | ca6a4a76a737174b24165e20edeb1d1019a9424b | [
"MIT"
] | null | null | null | import uuid
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.utils import timezone
from django.utils.timesince import timesince
from django.utils.translation import gettext_lazy as _
from dhost.dapps.models import Dapp
def get_obj_model(obj):
return ContentType.objects.get_for_model(obj)
class DappLogManager(models.Manager):
def log_action(self, obj, dapp, action_flag, user=None):
return self.create(
user=user,
content_type=get_obj_model(obj),
object_id=obj.pk,
action_flag=action_flag,
dapp=dapp,
)
class ActionFlags(models.TextChoices):
OTHER = "other", _("Other")
DAPP_ADDITION = "dapp_add", _("Dapp created")
DAPP_CHANGE = "dapp_change", _("Dapp updated")
BUNDLE_ADDITION = "bundle_add", _("Bundle added")
BUNDLE_DELETION = "bundle_del", _("Bundle removed")
AUTO_DEPLOY_START = "auto_deploy_start", _("Auto deployment started")
DEPLOY_START = "deploy_start", _("Deployment started")
DEPLOY_SUCCESS = "deploy_success", _("Deployment successful")
DEPLOY_FAIL = "deploy_fail", _("Deployment failed")
BUILD_OPTIONS_ADDITION = "build_opt_add", _("Build options created")
BUILD_OPTIONS_CHANGE = "build_opt_change", _("Build options updated")
BUILD_OPTIONS_DELETION = "build_opt_del", _("Build options removed")
AUTO_BUILD_START = "auto_build_start", _("Auto build started")
BUILD_START = "build_start", _("Build started")
BUILD_SUCCESS = "build_success", _("Build successful")
BUILD_FAIL = "build_fail", _("Build failed")
ENV_VAR_ADDITION = "env_var_add", _("New environment variable")
ENV_VAR_CHANGE = "env_var_change", _("Environment variable updated")
ENV_VAR_DELETION = "env_var_del", _("Environment variable removed")
GITHUB_OPTIONS_ADDITION = "github_opt_add", _("Github options created")
GITHUB_OPTIONS_CHANGE = "github_opt_change", _("Github options changed")
GITHUB_OPTIONS_DELETION = "github_opt_del", _("Github options removed")
class DappLog(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.SET_NULL,
null=True,
blank=True,
)
dapp = models.ForeignKey(
Dapp,
on_delete=models.CASCADE,
related_name="logs",
related_query_name="logs",
)
content_type = models.ForeignKey(
ContentType,
on_delete=models.SET_NULL,
null=True,
blank=True,
)
object_id = models.TextField(null=True, blank=True)
action_flag = models.CharField(
max_length=20,
choices=ActionFlags.choices,
default=ActionFlags.OTHER,
)
change_message = models.TextField(blank=True)
action_time = models.DateTimeField(default=timezone.now, editable=False)
objects = DappLogManager()
class Meta:
verbose_name = _("Dapp log entry")
verbose_name_plural = _("Dapp log entries")
ordering = ["-action_time"]
def __str__(self):
data = {
"user": self.user,
"dapp": self.dapp,
"action_flag": self.action_flag,
"timesince": self.timesince(),
}
return "{user} {dapp} {action_flag} {timesince} ago".format(**data)
def timesince(self, now=None):
return timesince(self.action_time, now)
| 35.434343 | 79 | 0.679019 |
6e397c403213c314186ad9c8dc4d66123671cfea | 620 | py | Python | Day14/main.py | dloibl/AOC2021 | 80672a7ee8ebc1a7970c155e4e15e0ed2351e085 | [
"MIT"
] | null | null | null | Day14/main.py | dloibl/AOC2021 | 80672a7ee8ebc1a7970c155e4e15e0ed2351e085 | [
"MIT"
] | null | null | null | Day14/main.py | dloibl/AOC2021 | 80672a7ee8ebc1a7970c155e4e15e0ed2351e085 | [
"MIT"
] | null | null | null | data = open("input.txt", "r").readlines()
polymer = data[0]
pair_insertion = {}
for line in data[2:]:
[token, replacement] = line.strip().split(" -> ")
pair_insertion[token] = replacement
result = [i for i in polymer.strip()]
for step in range(0, 10):
next = []
for i, si in enumerate(result):
if i < len(result)-1:
next.append(si)
next.append(pair_insertion[result[i]+result[i+1]])
else:
next.append(si)
result = next
count = [result.count(a) for a in set(pair_insertion.values())]
print("The answer of part 1 is", max(count) - min(count))
| 23.846154 | 63 | 0.596774 |
6e399f9876b8a0c8affd85f404dc546dcab1961f | 1,199 | py | Python | raster/migrations/0006_auto_20141016_0522.py | bpneumann/django-raster | 74daf9d396f2332a2cd83723b7330e6b10d73b1c | [
"BSD-3-Clause"
] | null | null | null | raster/migrations/0006_auto_20141016_0522.py | bpneumann/django-raster | 74daf9d396f2332a2cd83723b7330e6b10d73b1c | [
"BSD-3-Clause"
] | null | null | null | raster/migrations/0006_auto_20141016_0522.py | bpneumann/django-raster | 74daf9d396f2332a2cd83723b7330e6b10d73b1c | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('raster', '0005_auto_20141014_0955'),
]
operations = [
migrations.AddField(
model_name='rastertile',
name='tilex',
field=models.IntegerField(null=True, db_index=True),
preserve_default=True,
),
migrations.AddField(
model_name='rastertile',
name='tiley',
field=models.IntegerField(null=True, db_index=True),
preserve_default=True,
),
migrations.AddField(
model_name='rastertile',
name='tilez',
field=models.IntegerField(db_index=True, null=True, choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6), (7, 7), (8, 8), (9, 9), (10, 10), (11, 11), (12, 12), (13, 13), (14, 14), (15, 15), (16, 16), (17, 17), (18, 18)]),
preserve_default=True,
),
migrations.AlterField(
model_name='rastertile',
name='level',
field=models.IntegerField(null=True, db_index=True),
),
]
| 31.552632 | 236 | 0.539616 |
6e3ac431c3e1e4eb2271fa87cec379de652a2355 | 588 | py | Python | tests/tests/test_analysis/test_utils.py | klavinslab/coral | 17f59591211562a59a051f474cd6cecba4829df9 | [
"MIT"
] | 34 | 2015-12-26T22:13:51.000Z | 2021-11-17T11:46:37.000Z | tests/tests/test_analysis/test_utils.py | klavinslab/coral | 17f59591211562a59a051f474cd6cecba4829df9 | [
"MIT"
] | 13 | 2015-09-11T23:27:51.000Z | 2018-06-25T20:44:28.000Z | tests/tests/test_analysis/test_utils.py | klavinslab/coral | 17f59591211562a59a051f474cd6cecba4829df9 | [
"MIT"
] | 14 | 2015-10-08T17:08:48.000Z | 2022-02-22T04:25:54.000Z | '''
Tests for utils submodule of the analysis module.
'''
from nose.tools import assert_equal, assert_raises
from coral import analysis, DNA, RNA, Peptide
def test_utils():
test_DNA = DNA('ATAGCGATACGAT')
test_RNA = RNA('AUGCGAUAGCGAU')
test_peptide = Peptide('msvkkkpvqg')
test_str = 'msvkkkpvgq'
assert_equal(analysis.utils.sequence_type(test_DNA), 'dna')
assert_equal(analysis.utils.sequence_type(test_RNA), 'rna')
assert_equal(analysis.utils.sequence_type(test_peptide), 'peptide')
assert_raises(Exception, analysis.utils.sequence_type, test_str)
| 29.4 | 71 | 0.748299 |
6e3b1af1bee45ddc7a412b33a2fead806c9ec302 | 1,765 | py | Python | djangorecipebook/templating.py | tkhyn/djangorecipebook | 2cbb3d46631630e2c7a3c511b504de2088aac115 | [
"MIT"
] | null | null | null | djangorecipebook/templating.py | tkhyn/djangorecipebook | 2cbb3d46631630e2c7a3c511b504de2088aac115 | [
"MIT"
] | null | null | null | djangorecipebook/templating.py | tkhyn/djangorecipebook | 2cbb3d46631630e2c7a3c511b504de2088aac115 | [
"MIT"
] | null | null | null | """
Carry out template-based replacements in project files
"""
import os
import sys
from string import Template
def replace_name(path, mapping):
"""
Handles replacement strings in the file or directory name
"""
# look for replacement strings in filename
f_split = list(os.path.split(path))
name = f_split[1]
if '${' in name:
new_name = Template(name).substitute(mapping)
new_path = os.path.join(f_split[0], new_name)
os.rename(path, new_path)
else:
new_path = path
return new_path
def replace_ctnt(f, mapping):
"""
Handles replacement strings in the file content
"""
if not os.path.isfile(f):
return
try:
# look for replacement strings in file
t_file = open(f, 'r+')
t = Template(t_file.read())
t_file.seek(0)
t_file.write(t.substitute(mapping))
t_file.truncate()
except Exception as e:
sys.stderr.write("""
ERROR: while running template engine on file %s
""" % f)
raise e
finally:
t_file.close()
def process(path, mapping):
"""
Performs all templating operations on the given path
"""
replace_ctnt(replace_name(path, mapping), mapping)
def process_tree(directory, mapping):
"""
Performs all templating operations on the directory and its children
"""
directory = replace_name(directory, mapping)
for dirpath, dirnames, filenames in os.walk(directory):
for f in filenames:
process(os.path.join(dirpath, f), mapping)
for d in dirnames:
dirnames.remove(d)
dirnames.append(replace_name(os.path.join(dirpath, d), mapping))
| 25.214286 | 77 | 0.607932 |
6e3c23f713b7a54ba361ed5b6913012fed253e5e | 1,747 | py | Python | toHash.py | ElTarget/- | fcf774386514a7f070be25d643be7bbf1a92af1e | [
"MIT"
] | 1 | 2022-02-22T02:39:52.000Z | 2022-02-22T02:39:52.000Z | toHash.py | ElTarget/- | fcf774386514a7f070be25d643be7bbf1a92af1e | [
"MIT"
] | 1 | 2022-03-08T04:46:17.000Z | 2022-03-08T04:46:17.000Z | toHash.py | ElTarget/get_malware_bazaar | fcf774386514a7f070be25d643be7bbf1a92af1e | [
"MIT"
] | null | null | null | import hashlib
import os
# 生成字符串的MD5值
def str2md5(content=None):
if not content:
return ''
md5gen = hashlib.md5()
md5gen.update(content.encode())
return md5gen.hexdigest()
# 生成字符串的SHA256值
def str2sha256(content=None):
if not content:
return ''
sha256gen = hashlib.sha256()
sha256gen.update(content.encode())
return sha256gen.hexdigest()
# 生成文件的MD5值
def file2md5(filename):
hash_value = ''
if os.path.exists(filename):
try:
md5obj = hashlib.md5()
with open(filename, 'rb') as f:
md5obj.update(f.read())
hash_value = md5obj.hexdigest()
except Exception as e:
print(e)
return hash_value
def file2sha256(filename):
hash_value = ''
if os.path.exists(filename):
try:
sha256obj = hashlib.sha256()
with open(filename, "rb") as f:
sha256obj.update(f.read())
hash_value = sha256obj.hexdigest()
except Exception as e:
print(e)
return hash_value
def file2sha1(filename):
hash_value = ''
if os.path.exists(filename):
try:
sha1obj = hashlib.sha1()
with open(filename, 'rb') as f:
sha1obj.update(f.read())
hash_value = sha1obj.hexdigest()
except Exception as e:
print(e)
return hash_value
def file2sha3(filename):
hash_value = ''
if os.path.exists(filename):
try:
sha3obj = hashlib.sha3_384()
with open(filename, 'rb') as f:
sha3obj.update(f.read())
hash_value = sha3obj.hexdigest()
except Exception as e:
print(e)
return hash_value
| 23.293333 | 46 | 0.567258 |
6e3d50e4fe09a809ba48df4ba35365fe114afae0 | 609 | py | Python | final/VolleyballClubHouse/backend/Python/fb_post_scraper.py | Sabalone87/wp1092 | 3da36f3f3ae7ebc175bf0b015838de2928b3b5b9 | [
"MIT"
] | null | null | null | final/VolleyballClubHouse/backend/Python/fb_post_scraper.py | Sabalone87/wp1092 | 3da36f3f3ae7ebc175bf0b015838de2928b3b5b9 | [
"MIT"
] | null | null | null | final/VolleyballClubHouse/backend/Python/fb_post_scraper.py | Sabalone87/wp1092 | 3da36f3f3ae7ebc175bf0b015838de2928b3b5b9 | [
"MIT"
] | null | null | null | import os
import sys
from dotenv import load_dotenv
from facebook_scraper import get_posts
load_dotenv()
print ("hi")
result = []
for post in get_posts(group=os.environ.get("FacebookGroupId"), pages=1,
credentials=(os.environ.get("FacebookUser"), os.environ.get("FacebookPassword"))):
result.append({
"post_id": post["post_id"],
"text": post["text"],
"user_id": post["user_id"],
"username": post["username"],
"time": post["time"]
})
print ({ "post_id": post['post_id'] })
# print (post)
print (result)
sys.stdout.flush() | 25.375 | 102 | 0.609195 |
6e3ec2b42c30f989802844d030b6a4725567d1ae | 442 | py | Python | config.py | benperove/oneliner.sh | 0c6eb25f2dd32cdd5cc275ef5849b5e12c76e9db | [
"Apache-2.0"
] | 4 | 2019-02-15T01:35:17.000Z | 2020-07-08T17:47:33.000Z | config.py | benperove/oneliner.sh | 0c6eb25f2dd32cdd5cc275ef5849b5e12c76e9db | [
"Apache-2.0"
] | 1 | 2019-05-24T21:00:37.000Z | 2019-05-24T21:00:37.000Z | config.py | benperove/oneliner.sh | 0c6eb25f2dd32cdd5cc275ef5849b5e12c76e9db | [
"Apache-2.0"
] | 1 | 2020-04-10T08:03:16.000Z | 2020-04-10T08:03:16.000Z | import os
#github login
SITE = 'https://api.github.com'
CALLBACK = 'https://oneliner.sh/oauth2'
AUTHORIZE_URL = 'https://github.com/login/oauth/authorize'
TOKEN_URL = 'https://github.com/login/oauth/access_token'
SCOPE = 'user'
#redis config
REDIS_HOST = os.environ['REDIS_HOST']
#REDIS_HOST = 'localhost'
REDIS_PORT = 6379
REDIS_DB = 0
DATA_DIR = 'oneliners'
DEBUG = True
#app
SUBMISSION_PATH = 'incoming'
| 26 | 61 | 0.68552 |
6e3f3c737da2c1c4948a6562ab3459af248d21f6 | 214 | py | Python | npt/utils/__init__.py | chbrandt/gpt-neanias | aa7c2e88972f9af280b7f02ee11170df6c967b55 | [
"MIT"
] | 2 | 2020-09-28T08:22:54.000Z | 2020-09-28T13:17:25.000Z | npt/utils/__init__.py | chbrandt/gpt-neanias | aa7c2e88972f9af280b7f02ee11170df6c967b55 | [
"MIT"
] | null | null | null | npt/utils/__init__.py | chbrandt/gpt-neanias | aa7c2e88972f9af280b7f02ee11170df6c967b55 | [
"MIT"
] | null | null | null | import json
from npt import log
from . import tmpdir
def read_geojson(filename):
"""
Return JSON object from GeoJSON
"""
with open(filename, 'r') as fp:
js = json.load(fp)
return js
| 14.266667 | 35 | 0.621495 |
6e3fe2c168f62972f11479c2284c380956d44257 | 6,351 | py | Python | apps/user/tests/user/test_users_crud.py | magocod/django_chat | 9c7f82a3fdaa7a8f2f34062d8803b4f33f8c07b7 | [
"MIT"
] | 1 | 2019-10-01T01:39:37.000Z | 2019-10-01T01:39:37.000Z | apps/user/tests/user/test_users_crud.py | magocod/django_chat | 9c7f82a3fdaa7a8f2f34062d8803b4f33f8c07b7 | [
"MIT"
] | 18 | 2019-12-14T15:09:56.000Z | 2022-01-02T16:22:41.000Z | apps/user/tests/user/test_users_crud.py | magocod/django_chat | 9c7f82a3fdaa7a8f2f34062d8803b4f33f8c07b7 | [
"MIT"
] | 1 | 2020-02-10T18:00:16.000Z | 2020-02-10T18:00:16.000Z | """
Prueba creacion de usuarios
"""
# import json
from typing import Any, Dict
import pytest
from django.contrib.auth import get_user_model
from apps.user.serializers import UserHeavySerializer
# from django.contrib.auth.models import User
User = get_user_model()
pytestmark = [pytest.mark.django_db, pytest.mark.users_views]
@pytest.mark.users_crud
def test_create_user(admin_client):
"""
...
"""
data: Dict[str, Any] = {
"username": "NEW",
"email": "newemail@gmail.com",
"password": "123",
"first_name": "name",
"last_name": "name2",
"is_staff": False,
}
response = admin_client.post("/api/users/", data)
serializer = UserHeavySerializer(User.objects.get(id=response.data["id"]),)
assert response.status_code == 201
assert serializer.data == response.data
@pytest.mark.users_crud
def test_not_allowed_to_create_user(user_client, public_client):
"""
...
"""
data: Dict[str, Any] = {
"username": "NEW",
"email": "newemail@gmail.com",
"password": "123",
"first_name": "name",
"last_name": "name2",
"is_staff": False,
}
response = user_client.post("/api/users/", data)
assert response.status_code == 403
response = public_client.post("/api/users/", data)
assert response.status_code == 401
@pytest.mark.users_crud
def test_not_create_superuser(admin_client):
"""
...
"""
data: Dict[str, Any] = {
"username": "superuser",
"email": "newsuperemail@gmail.com",
"password": "123",
"first_name": "name",
"last_name": "name2",
"is_staff": True,
"is_superuser": True,
}
response = admin_client.post("/api/users/", data)
serializer = UserHeavySerializer(User.objects.get(id=response.data["id"]),)
assert response.status_code == 201
assert response.data == serializer.data
assert not response.data["is_superuser"]
@pytest.mark.users_crud
def test_create_error_params(admin_client):
"""
...
"""
data: Dict[str, Any] = {
"names": "NEW_USER",
"email": "newemail@gmail.com",
}
response = admin_client.post("/api/users/", data)
assert response.status_code == 400
@pytest.mark.users_crud
def test_create_error_duplicate(admin_client):
"""
...
"""
data: Dict[str, Any] = {
"username": "NEW",
"email": "newemail@gmail.com",
"password": "123",
"first_name": "name",
"last_name": "name2",
"is_staff": False,
}
admin_client.post("/api/users/", data)
response = admin_client.post("/api/users/", data)
assert response.status_code == 400
@pytest.mark.users_crud
def test_get_user(admin_client):
"""
...
"""
response = admin_client.get("/api/user/" + str(1) + "/")
serializer = UserHeavySerializer(User.objects.get(id=1))
assert response.status_code == 200
assert serializer.data == response.data
@pytest.mark.users_crud
def test_get_user_not_found(admin_client):
"""
...
"""
response = admin_client.get("/api/user/" + str(1000) + "/")
assert response.status_code == 404
@pytest.mark.users_crud
def test_update_user(admin_client):
"""
...
"""
oldvalues = UserHeavySerializer(User.objects.get(id=1))
newdata: Dict[str, Any] = {
"username": "NEW",
"first_name": "new name",
"last_name": "new name2",
"email": "update_email@django.com",
}
response = admin_client.put("/api/user/" + str(1) + "/", newdata)
newvalues = UserHeavySerializer(User.objects.get(id=1))
# assert response.data == 'yeah'
assert response.status_code == 200
assert newvalues.data != oldvalues.data
assert newvalues.data == response.data
@pytest.mark.users_crud
def test_error_params_update_user(admin_client):
"""
...
"""
oldvalues = UserHeavySerializer(User.objects.get(id=1))
newdata: Dict[str, Any] = {
"usernames": "NEW",
"first_namesss": "new name",
}
response = admin_client.put("/api/user/" + str(1) + "/", newdata)
newvalues = UserHeavySerializer(User.objects.get(id=1))
assert response.status_code == 400
assert newvalues.data == oldvalues.data
@pytest.mark.users_crud
def test_delete_user(admin_client):
"""
...
"""
response = admin_client.delete("/api/user/" + str(2) + "/")
assert response.status_code == 204
@pytest.mark.users_crud
def test_not_allowed_to_delete_user(user_client, public_client):
"""
...
"""
response = user_client.delete("/api/user/" + str(4) + "/")
assert response.status_code == 403
response = public_client.delete("/api/user/" + str(4) + "/")
assert response.status_code == 401
@pytest.mark.users_crud
def test_user_does_not_delete_himself(admin_client):
"""
...
"""
response = admin_client.delete("/api/user/" + str(1) + "/")
assert response.status_code == 400
assert response.data == "can't delete himself"
@pytest.mark.users_crud
def test_not_delete_superuser(admin_client):
"""
...
"""
response = admin_client.delete("/api/user/" + str(3) + "/")
assert response.status_code == 400
assert response.data == "super users cannot be deleted"
@pytest.mark.users_crud
def test_delete_admin_user(admin_client, staff_client):
"""
...
"""
response = staff_client.delete("/api/user/" + str(5) + "/")
assert response.status_code == 403
# response = staff_client.delete('/api/user/' + str(4) + '/')
# assert response.status_code == 403
# assert response.data == 'user cannot delete administrators'
response = admin_client.delete("/api/user/" + str(5) + "/")
assert response.status_code == 204
@pytest.mark.users_crud
def test_create_user_unique_email(admin_client):
"""
...
"""
repeat_email = "admin@django.com"
data: Dict[str, Any] = {
"username": "NEW",
"email": repeat_email,
"password": "123",
"first_name": "name",
"last_name": "name2",
"is_staff": False,
}
response = admin_client.post("/api/users/", data)
# assert response.data == 'yeah'
assert User.objects.filter(email__exact=repeat_email).count() == 1
assert response.status_code == 400
| 26.135802 | 79 | 0.623366 |
6e4153ef83e21bf087ec6ed89dceeb002c6fc185 | 319 | py | Python | examples/pybullet/examples/signedDistanceField.py | frk2/bullet3 | 225d823e4dc3f952c6c39920c3f87390383e0602 | [
"Zlib"
] | 27 | 2018-05-21T14:28:10.000Z | 2021-12-31T03:12:35.000Z | examples/pybullet/examples/signedDistanceField.py | frk2/bullet3 | 225d823e4dc3f952c6c39920c3f87390383e0602 | [
"Zlib"
] | 1 | 2018-11-19T19:07:47.000Z | 2018-11-19T19:07:47.000Z | examples/pybullet/examples/signedDistanceField.py | frk2/bullet3 | 225d823e4dc3f952c6c39920c3f87390383e0602 | [
"Zlib"
] | 13 | 2019-11-08T12:48:44.000Z | 2022-01-04T04:13:33.000Z | import pybullet as p
import pybullet
import time
p.connect(p.GUI)
p.loadURDF("toys/concave_box.urdf")
p.setGravity(0,0,-10)
for i in range (10):
p.loadURDF("sphere_1cm.urdf",[i*0.02,0,0.5])
p.loadURDF("duck_vhacd.urdf")
timeStep = 1./240.
p.setTimeStep(timeStep)
while (1):
p.stepSimulation()
time.sleep(timeStep)
| 21.266667 | 45 | 0.727273 |
6e415d21c97c8bf5b7c0199061ba4f235f80c0f3 | 2,472 | py | Python | Old/TitleTable.py | StephanM87/Sofie-Herrmann-Praktikum | 3fa7e715061e35aade8eb93756c30ebf10971059 | [
"MIT"
] | null | null | null | Old/TitleTable.py | StephanM87/Sofie-Herrmann-Praktikum | 3fa7e715061e35aade8eb93756c30ebf10971059 | [
"MIT"
] | 2 | 2021-10-04T08:22:40.000Z | 2021-10-05T13:30:02.000Z | Old/TitleTable.py | StephanM87/Sofie-Herrmann-Praktikum | 3fa7e715061e35aade8eb93756c30ebf10971059 | [
"MIT"
] | null | null | null | from pylatex import Document, Tabular, Section, NoEscape, Command, MultiRow
from Old.BioCatHubDatenmodell import DataModel
first_name = "some firstname"
last_name = "some lastname"
e_mail = "some@adress.com"
institution = "some institution"
vessel_type = "some vessel"
volume = int(42)
vol_unit = "mol/l"
add_attributes = [{"Sektor": "Kruzifix"}, {"Bereich": "Eisheiligen"}]
temp = int(42)
temp_unit = "°C"
ph_value = int(7)
buffer = "some buffer"
class PdfLibrary (Document):
def __init__(self, data_model):
self.biocathub_model = data_model
def create_pdf(self):
geometry_options = {
"margin": "2cm",
"includeheadfoot": True
}
doc = Document(page_numbers=True, geometry_options=geometry_options)
doc.preamble.append(Command("title", self.biocathub_model["title"]))
doc.append(NoEscape(r"\maketitle"))
with doc.create(Section("User:")):
with doc.create(Tabular("|c|c|")) as table:
table.add_hline()
table.add_row(["First Name", first_name])
table.add_hline()
table.add_row(["Last Name", last_name])
table.add_hline()
table.add_row(["E-Mail", e_mail])
table.add_hline()
table.add_row(["Institution", institution])
table.add_hline()
with doc.create(Section("Vessel:")):
with doc.create(Tabular("|c|c|")) as table2:
for i in DataModel["vessel"]:
key = list(i.keys())
table2.add_row([key, i[key]])
table2.add_hline()
with doc.create(Section("Condition:")):
with doc.create(Tabular("|c|c|")) as table3:
table3.add_hline()
table3.add_row(["Temperature", temp])
table3.add_hline()
table3.add_row(["Unit", temp_unit])
table3.add_hline()
table3.add_row(["pH", ph_value])
table3.add_hline()
table3.add_row(["Buffer", buffer])
table3.add_hline()
for i in add_attributes:
key = list(i.keys())[0]
table3.add_row([key, i[key]])
table3.add_hline()
doc.generate_pdf("Gesamt_Test",
compiler="pdflatex", clean_tex=False)
doc = PdfLibrary(DataModel)
doc.create_pdf()
| 34.333333 | 76 | 0.552589 |
6e41787cb64edb79c7312a9c056163a1f57400e3 | 535 | py | Python | Lab2/la2_4.py | ThomCruz/ImageAnalysisLab | 6a524696ecf4aab96336931d22ead8e8c9ec9e30 | [
"MIT"
] | null | null | null | Lab2/la2_4.py | ThomCruz/ImageAnalysisLab | 6a524696ecf4aab96336931d22ead8e8c9ec9e30 | [
"MIT"
] | null | null | null | Lab2/la2_4.py | ThomCruz/ImageAnalysisLab | 6a524696ecf4aab96336931d22ead8e8c9ec9e30 | [
"MIT"
] | null | null | null | import cv2
import numpy as np
import matplotlib.pyplot as plt
pic = cv2.imread('image2.png',0)
#pic = imageio.imread('img/parrot.jpg')
gray = lambda rgb : np.dot(rgb[... , :3] , [0.299 , 0.587, 0.114])
gray = gray(pic)
'''
log transform
-> s = c*log(1+r)
So, we calculate constant c to estimate s
-> c = (L-1)/log(1+|I_max|)
'''
max_ = np.max(gray)
def log_transform():
return (255/np.log(1+max_)) * np.log(1+gray)
plt.figure(figsize = (5,5))
plt.imshow(log_transform(), cmap = plt.get_cmap(name = 'gray'))
plt.axis('off');
| 20.576923 | 67 | 0.637383 |
6e41cc5519a39b51f1547eae6ffa40cae08fd9e3 | 493 | py | Python | rabbit_mq_examples/new_task.py | audip/rabbitmq | f151dea427afa2a08a76fcdccf6fb99e6a81380f | [
"Apache-2.0"
] | null | null | null | rabbit_mq_examples/new_task.py | audip/rabbitmq | f151dea427afa2a08a76fcdccf6fb99e6a81380f | [
"Apache-2.0"
] | null | null | null | rabbit_mq_examples/new_task.py | audip/rabbitmq | f151dea427afa2a08a76fcdccf6fb99e6a81380f | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
import pika
import sys
connection = pika.BlockingConnection(pika.ConnectionParameters('localhost'))
channel = connection.channel()
channel.queue_declare(queue='task_queue', durable=True)
message = ''.join(sys.argv[1:]) or 'Hello World!'
for i in range(30):
message = str(i)+' '+i*'.'
channel.basic_publish(exchange='', routing_key='task_queue',body=message,properties=pika.BasicProperties(delivery_mode=2,))
print " [x] Sent " + message
connection.close()
| 25.947368 | 127 | 0.730223 |
6e45ae2f0c35533b4360de6c8858cfc005287327 | 4,100 | py | Python | metafilter/model/__init__.py | exhuma/metafilter | dfbc01877a3020f7fe58b9fda3e14ed073684f25 | [
"BSD-3-Clause"
] | null | null | null | metafilter/model/__init__.py | exhuma/metafilter | dfbc01877a3020f7fe58b9fda3e14ed073684f25 | [
"BSD-3-Clause"
] | null | null | null | metafilter/model/__init__.py | exhuma/metafilter | dfbc01877a3020f7fe58b9fda3e14ed073684f25 | [
"BSD-3-Clause"
] | null | null | null | from ConfigParser import SafeConfigParser
from cStringIO import StringIO
import sqlalchemy
from sqlalchemy import create_engine
from sqlalchemy import MetaData
from sqlalchemy.orm import sessionmaker
from os.path import sep
from hashlib import md5
from datetime import datetime, timedelta
import re
import logging
import functools
NON_LTREE = re.compile(r'[^a-zA-Z0-9/]')
LOG = logging.getLogger(__name__)
CONFIG = None
metadata = MetaData()
Session = sessionmaker()
def loadconfig(filename):
defaults=StringIO("""\
[cli_logging]
error_log=
""")
config = SafeConfigParser()
config.readfp(defaults)
config.read(filename)
dsn = config.get('database', 'dsn', None)
if not dsn:
raise ValueError('No DSN found in the config file! This is required!')
set_dsn(dsn)
return config
class memoized(object):
"""Decorator that caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned, and
not re-evaluated.
"""
def __init__(self, func):
self.func = func
self.cache = {}
def __call__(self, *args):
obsoletion = datetime.now() - timedelta(seconds=60*5)
if args in self.cache and self.cache[args][1] < obsoletion:
# value too old. Remove it from the cache
LOG.debug("Removing obsolete value for args %r from cache." % (args,))
del(self.cache[args])
try:
output = self.cache[args][0]
LOG.debug("Cache hit for args %r." % (args,))
return output
except KeyError:
LOG.debug("Initialising cache for args %r." % (args,))
value = self.func(*args)
if isinstance(value, sqlalchemy.orm.query.Query):
result = value.all()
self.cache[args] = (result, datetime.now())
return result
else:
self.cache[args] = (value, datetime.now())
return value
except TypeError:
# uncachable -- for instance, passing a list as an argument.
# Better to not cache than to blow up entirely.
LOG.warning("Uncachable function call for args %r" % (args,))
return self.func(*args)
def __repr__(self):
"""Return the function's docstring."""
return self.func.__doc__
def __get__(self, obj, objtype):
"""Support instance methods."""
return functools.partial(self.__call__, obj)
def uri_depth(uri):
"determines the depth of a uri"
if not uri:
return 0
if uri.endswith(sep):
uri = uri[0:-1]
return len(uri.split(sep))
def file_md5(path):
"""
Return the MD5 hash of the file
"""
hash = md5()
fptr = open(path, "rb")
chunk = fptr.read(1024)
while chunk:
hash.update(chunk)
chunk = fptr.read(1024)
fptr.close()
return hash.hexdigest()
def uri_to_ltree(uri):
if not uri or uri == "/":
return "ROOT"
if uri.endswith(sep):
uri = uri[0:-1]
if uri.startswith(sep):
ltree = "ROOT%s%s" % (sep, uri[1:])
else:
ltree = uri
# the ltree module uses "." as path separator. Replace dots by
# underscores and path separators by dots
ltree = NON_LTREE.sub("_", ltree)
ltree = ltree.replace(sep, ".")
return ltree
def set_dsn(dsn):
engine = create_engine(dsn)
metadata.bind = engine
Session.bind = engine
from metafilter.model.nodes import Node
from metafilter.model.queries import Query
from metafilter.model.tags import Tag
#
# Parse the config file
#
from os.path import join, exists, expanduser
from os import getcwd
paths = [
join(getcwd(), 'config.ini'),
join(expanduser("~"), '.metafilter', 'config.ini'),
join('/', 'etc', 'metafilter', 'config.ini'),
]
for path in paths:
if not exists(path):
continue
LOG.debug('Reading config from %s' % path)
CONFIG = loadconfig(path)
if not CONFIG:
LOG.error('Unable to open config file (search order: %s)' % (', '.join(paths)))
| 26.973684 | 83 | 0.621463 |
6e46d398600e4b5a657c138522f24f0eef1938e9 | 3,067 | py | Python | manager/base.py | monocleface/viewer | 8ab47a9e846bd2716fe0208c34f33565513fc3f6 | [
"Apache-2.0"
] | 6 | 2020-02-28T21:18:16.000Z | 2020-03-13T16:45:57.000Z | manager/base.py | monocleface/viewer | 8ab47a9e846bd2716fe0208c34f33565513fc3f6 | [
"Apache-2.0"
] | 6 | 2020-02-28T12:42:52.000Z | 2020-03-16T03:49:09.000Z | manager/base.py | monocleface/viewer | 8ab47a9e846bd2716fe0208c34f33565513fc3f6 | [
"Apache-2.0"
] | 6 | 2020-03-05T13:04:25.000Z | 2020-03-13T16:46:03.000Z | from pathlib import Path
from typing import Union
import yaml
class Config(object):
"""Basic Config Class"""
def __init__(self, cfg_yaml_path:str, root:str=".", data_path:str="./data"):
r"""
Configuration of Settings
Args:
root: root path of project, default="."
data_path: data path that contains data directories
cfg_yaml_path: argument file path(`str`)
It will create directory automatically by `cfg_yaml_path`,
```
checkpoints
└── data_type
└── eval_type
├── exp_arg1
│ ├── exp1_summary
│ ├── model_type + attr_type1 <-weights
│ ├── model_type + attr_type2
│ └── model_type + attr_type3
├── exp_arg2
└── exp_arg3
```
`cfg_yaml_path` file shuould like below.
```yaml
# confiugre.yaml
type:
data_type: mnist
eval_type: roar
model_type: resnet18
attr_type: ["vanillagrad", "gradcam"]
...
```
"""
self.prj_path = Path(root)
self.data_path = Path(data_path)
with open(cfg_yaml_path, mode="r") as f:
conf = yaml.load(f, Loader=yaml.FullLoader)
# vars(self).update(conf)
self.__dict__.update(conf)
self.check_type_args()
def check_type_args(self):
r"""
Check arguments and create experiment path
"""
type_args = self.conf["type_args"]
check_types = ["data_type", "eval_type", "model_type", "attr_type"]
for c_type in check_types:
if not (c_type in type_args):
raise KeyError(f"Configure file dosen't have {c_type}, check your argument file")
self.exp_path = self.prj_path / "checkpoints" / type_args["data_type"] / type_args["eval_type"]
self.check_dir_exist(self.exp_path)
def check_dir_exist(self, path:Union[str, Path], file:bool=False):
r"""
Check directory file is exists, if not exists will create one
Args:
path: `str` or `pathlib.Path` type
file: if True, will create a file, not a directory path
"""
if not isinstance(path, Path):
path = Path(path)
if file:
if not path.exists():
path.touch()
print(f"Given path doesn't exists, created {path}")
else:
if not path.exists():
path.mkdir(parents=True)
print(f"Given path doesn't exists, created {path}")
@property
def conf(self):
return self.__dict__
class Checkpoints(object):
"""Model Checkpoint Manager"""
def __init__(self, cfg):
r"""
Save details about model weights and summaries
"""
def save_model(self):
r"""
Save model weights
"""
def save_summary(self):
r"""
Save training stats
"""
| 29.209524 | 103 | 0.538637 |
6e486d2de9698c2208f5c29100b107e8de344209 | 307 | py | Python | 007 - Intro List Comprehension.py/016 - Maior.py | rodrigoviannini/meus_Primeiros_Codigos | 828dec1c4ce06889efd491145e631c30a45e858f | [
"MIT"
] | 2 | 2021-07-22T23:26:54.000Z | 2021-07-22T23:27:27.000Z | 007 - Intro List Comprehension.py/016 - Maior.py | rodrigoviannini/meus_Primeiros_Codigos | 828dec1c4ce06889efd491145e631c30a45e858f | [
"MIT"
] | null | null | null | 007 - Intro List Comprehension.py/016 - Maior.py | rodrigoviannini/meus_Primeiros_Codigos | 828dec1c4ce06889efd491145e631c30a45e858f | [
"MIT"
] | null | null | null | """
List Comprehension Aninhada
OBJ: Encontrar o maior ou os maiores números de uma lista e imprimir outra lista
"""
listaGenerica = [1, 2, 3, 4, 1, 2, 3, 4, 10, 10, 10, 5, 3, -4]
listaMaior = [x for x in listaGenerica if not False in [True if x >= y else False for y in listaGenerica]]
print(listaMaior) | 30.7 | 106 | 0.693811 |
6e487df26dabde97ea3f1c6bd9a631bd068d4b7f | 357 | py | Python | thehardway/practice3.py | sunquan9301/pythonLearn | f10760a4e32c3ac267e39d835c08f45800d081b6 | [
"Apache-2.0"
] | null | null | null | thehardway/practice3.py | sunquan9301/pythonLearn | f10760a4e32c3ac267e39d835c08f45800d081b6 | [
"Apache-2.0"
] | null | null | null | thehardway/practice3.py | sunquan9301/pythonLearn | f10760a4e32c3ac267e39d835c08f45800d081b6 | [
"Apache-2.0"
] | null | null | null | def main():
# age = input("How old are you?")
# print("I am %s year old" % age)
file = open("demo1")
lines = file.readlines()
print("lines",lines)
for i in range(len(lines)):
print(lines[i])
file.close()
c,d = addOne(1,2)
print(c,d)
def addOne(a,b):
return a+1, b+1
if __name__ == '__main__':
main()
| 17 | 37 | 0.535014 |
6e4b454f9d9a661e964992d4f53efcc35fd88de8 | 651 | py | Python | ipt/td1/3.3-nbracines.py | lucas8/MPSI | edefa2155071910d95633acf87b9f3a9d34f67d3 | [
"MIT"
] | null | null | null | ipt/td1/3.3-nbracines.py | lucas8/MPSI | edefa2155071910d95633acf87b9f3a9d34f67d3 | [
"MIT"
] | null | null | null | ipt/td1/3.3-nbracines.py | lucas8/MPSI | edefa2155071910d95633acf87b9f3a9d34f67d3 | [
"MIT"
] | null | null | null | #!/usr/bin/python3
def nbracines(a, b, c):
if a == 0:
print("Le coefficient dominant est nul, ce n'est pas un trinome !")
return
d = b*b - 4*a*c
k = 2
if abs(d) < 1e-10:
k = 1
d = 0
elif d < 0:
k = 0
print("Le polynome " + str(a) + "X^2 + " + str(b) + "X + " + str(c) + " admet " + str(k) + " racines distinctes (det = " + str(d) + ")")
a = float(input("Entrez le coefficient dominant du trinome : "))
b = float(input("Entrez le coefficient d'ordre 1 du trinome : "))
c = float(input("Entrez la constante du trinome : "))
nbracines(a, b, c)
nbracines(0, 3, 1)
nbracines(1, 0.2, 0.01)
| 28.304348 | 140 | 0.537634 |
6e4cf85303623618f7fb5038daec890f74903ee3 | 2,641 | py | Python | ILSpy.ConvertedToPython/TreeNodes/Analyzer/AnalyzedTypeExtensionMethodsTreeNode.py | exyi/ILSpy | 17ddfa01ff4915c4ca8461c56fb7d04d25fc591e | [
"MIT"
] | 1 | 2021-04-26T19:46:09.000Z | 2021-04-26T19:46:09.000Z | ILSpy.ConvertedToPython/TreeNodes/Analyzer/AnalyzedTypeExtensionMethodsTreeNode.py | exyi/ILSpy | 17ddfa01ff4915c4ca8461c56fb7d04d25fc591e | [
"MIT"
] | null | null | null | ILSpy.ConvertedToPython/TreeNodes/Analyzer/AnalyzedTypeExtensionMethodsTreeNode.py | exyi/ILSpy | 17ddfa01ff4915c4ca8461c56fb7d04d25fc591e | [
"MIT"
] | null | null | null | # Copyright (c) 2011 AlphaSierraPapa for the SharpDevelop Team
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this
# software and associated documentation files (the "Software"), to deal in the Software
# without restriction, including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
# to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
# FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from System import *
from System.Collections.Generic import *
from System.Linq import *
from System.Threading import *
from Mono.Cecil import *
class AnalyzedTypeExtensionMethodsTreeNode(AnalyzerSearchTreeNode):
def __init__(self, analyzedType):
if analyzedType == None:
raise ArgumentNullException("analyzedType")
self._analyzedType = analyzedType
def get_Text(self):
return "Extension Methods"
Text = property(fget=get_Text)
def FetchChildren(self, ct):
analyzer = ScopedWhereUsedAnalyzer[AnalyzerTreeNode](self._analyzedType, FindReferencesInType)
return analyzer.PerformAnalysis(ct).OrderBy()
def FindReferencesInType(self, type):
if not self.HasExtensionAttribute(type):
enumerator = type.Methods.GetEnumerator()
while enumerator.MoveNext():
method = enumerator.Current
if method.IsStatic and self.HasExtensionAttribute(method):
if method.HasParameters and method.Parameters[0].ParameterType.Resolve() == self._analyzedType:
node = AnalyzedMethodTreeNode(method)
node.Language = self._Language
def HasExtensionAttribute(self, p):
if p.HasCustomAttributes:
enumerator = p.CustomAttributes.GetEnumerator()
while enumerator.MoveNext():
ca = enumerator.Current
t = ca.AttributeType
if t.Name == "ExtensionAttribute" and t.Namespace == "System.Runtime.CompilerServices":
return True
return False
def CanShow(type):
# show on all types except static classes
return not (type.IsAbstract and type.IsSealed)
CanShow = staticmethod(CanShow) | 41.920635 | 99 | 0.775842 |
6e4d07aff129e622dcf6a63b48636b52ecc07cc1 | 74 | py | Python | python/828.unique-letter-string.py | stavanmehta/leetcode | 1224e43ce29430c840e65daae3b343182e24709c | [
"Apache-2.0"
] | null | null | null | python/828.unique-letter-string.py | stavanmehta/leetcode | 1224e43ce29430c840e65daae3b343182e24709c | [
"Apache-2.0"
] | null | null | null | python/828.unique-letter-string.py | stavanmehta/leetcode | 1224e43ce29430c840e65daae3b343182e24709c | [
"Apache-2.0"
] | null | null | null | class Solution:
def uniqueLetterString(self, S: str) -> int:
| 18.5 | 48 | 0.608108 |
6e4d8cf0e65920064f1566f74432415d41b6c22a | 3,187 | py | Python | src/detector.py | omelchert/LEPM_1DFD | aa5ba78e557cdcd0a10a16065c0f17119f51ab78 | [
"BSD-3-Clause"
] | null | null | null | src/detector.py | omelchert/LEPM_1DFD | aa5ba78e557cdcd0a10a16065c0f17119f51ab78 | [
"BSD-3-Clause"
] | null | null | null | src/detector.py | omelchert/LEPM_1DFD | aa5ba78e557cdcd0a10a16065c0f17119f51ab78 | [
"BSD-3-Clause"
] | null | null | null | import sys
import numpy as np
class PiezoTransducer(object):
"""Piezoelectric transducer data structure
"""
def __init__(self,Glob,(zMin,zMax),h=1.):
"""initial instance of PiezoTransducer class
Args:
Glob (data structure): data structure holding domain configuration
(zMin, zMax) (tuple, floats): boundary locations of sensing layer
h (float): effective piezoelectric parameter
Note:
in case of mechanically free setup with open-circuit boundary
condition, the effective piezoelectric parameter reads
h = d / (eT sD),
wherein d is the piezoelectric strain constant, eT is the dielectric
coefficient, and sD is the mechanical compliance (see Refs. [1,2]).
Refs:
[1] PVDF piezoelectric polymer
Ueberschlag, P.
Sensor Review, 21 (2001) 118-126
[2] PVDF piezoelectric polymers: characterization and application
to thermal energy harvesting
Gusarov, B.
Universite Grenoble Alpes (2015)
"""
self.dz = Glob.dz
self.zIdMin = max(1,Glob._z2i(zMin))
self.zIdMax = Glob._z2i(zMax)
self.E = np.zeros(self.zIdMax-self.zIdMin)
self.h = h
self.t = []
self.U = []
self.Us = []
def measure(self,n,dt,u,tau):
"""method implementing measurement at time instant
Implements finite-difference approximation to state equation for
direct piezoelectric effect.
Args:
n (int): current time step
dt (float): increment between consequtive times steps
u (numpy array, ndim=1): velocity profile
p (numpy array, ndim=1): acoustic stress profile
"""
C = dt/self.dz
E0 = self.E
h = self.h
zL, zH = self.zIdMin, self.zIdMax
# evolve electric field within transducer
self.E[:] = E0[:] - h*C*(u[zL-1:zH-1]-u[zL:zH])
# determine potential difference across transducer
dU = -np.trapz(self.E,dx=self.dz)
self.t.append(n*dt)
self.U.append(dU)
self.Us.append(tau[self.zIdMax])
def dumpField(self,fName=None):
"""method writing field configuration to file
Args:
fName (str, optional): optional output file-path. If none is given,
sys.stdout is used instead
"""
fStream = open(fName,'w') if fName else sys.stdout
fStream.write("# (z) (E) \n")
for i in range(len(self.E)):
fStream.write("%lf %lf\n"%(self.dz*(self.zIdMin+i),self.E[i]))
def dumpSignal(self,fName=None):
"""method writing transducer response to file
Args:
fName (str, optional): optional output file-path. If none is given,
sys.stdout is used instead
"""
fStream = open(fName,'w') if fName else sys.stdout
fStream.write("# (t) (p) \n")
for i in range(len(self.U)):
fStream.write("%lf %lf\n"%(self.t[i],self.U[i]))
# EOF: detector.py
| 32.85567 | 81 | 0.569187 |
6e4dee90bdd936152cb862e03942c4be61d9a3e5 | 249 | py | Python | 2.datatype/1.number_typecasting.py | Tazri/Python | f7ca625800229c8a7e20b64810d6e162ccb6b09f | [
"DOC"
] | null | null | null | 2.datatype/1.number_typecasting.py | Tazri/Python | f7ca625800229c8a7e20b64810d6e162ccb6b09f | [
"DOC"
] | null | null | null | 2.datatype/1.number_typecasting.py | Tazri/Python | f7ca625800229c8a7e20b64810d6e162ccb6b09f | [
"DOC"
] | null | null | null | number_int = int("32");
number_float= float(32);
number_complex = complex(3222342332432435435345324435324523423);
print(type(number_int),": ",number_int);
print(type(number_float),": ",number_float);
print(type(number_complex),": ",number_complex); | 35.571429 | 64 | 0.767068 |
6e50868796b7b8940a4c3451e490a815d749a818 | 338 | py | Python | tests/test_kitty.py | raphaelavergud/CatApp | cf0a68e2c78307684e167a748e72e068c25a6089 | [
"MIT"
] | null | null | null | tests/test_kitty.py | raphaelavergud/CatApp | cf0a68e2c78307684e167a748e72e068c25a6089 | [
"MIT"
] | null | null | null | tests/test_kitty.py | raphaelavergud/CatApp | cf0a68e2c78307684e167a748e72e068c25a6089 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import unittest
import kitty
from kitty import uncomfy_checker
import mock
class TestProgram(unittest.TestCase):
def test_comfy(self):
kitty.uncomfy_checker = mock.Mock(return_value='comfortable')
self.assertIn(uncomfy_checker(), 'comfortable')
if __name__ == '__main__':
unittest.main() | 22.533333 | 69 | 0.736686 |
6e52ebb7ea5d298c9a93f221c29b566a183e81fe | 251 | py | Python | latest/modules/physics/control/control_plots-5.py | sympy/sympy_doc | ec4f28eed09d5acb9e55874e82cc86c74e762c0d | [
"BSD-3-Clause"
] | 20 | 2015-01-28T01:08:13.000Z | 2021-12-19T04:03:28.000Z | latest/modules/physics/control/control_plots-5.py | sympy/sympy_doc | ec4f28eed09d5acb9e55874e82cc86c74e762c0d | [
"BSD-3-Clause"
] | 31 | 2015-01-27T07:16:19.000Z | 2021-11-15T10:58:15.000Z | latest/modules/physics/control/control_plots-5.py | sympy/sympy_doc | ec4f28eed09d5acb9e55874e82cc86c74e762c0d | [
"BSD-3-Clause"
] | 38 | 2015-01-08T18:48:27.000Z | 2021-12-02T13:19:43.000Z | from sympy.abc import s
from sympy.physics.control.lti import TransferFunction
from sympy.physics.control.control_plots import ramp_response_plot
tf1 = TransferFunction(s, (s+4)*(s+8), s)
ramp_response_plot(tf1, upper_limit=2) # doctest: +SKIP
| 41.833333 | 67 | 0.780876 |
6e52fb33dd28eee7b106bc48ba5c34f08261ca0b | 2,309 | py | Python | src/pynorare/__main__.py | concepticon/pynorare | 3cf5ea2d1597c5acc84963f781ff49d96b4d7e02 | [
"MIT"
] | null | null | null | src/pynorare/__main__.py | concepticon/pynorare | 3cf5ea2d1597c5acc84963f781ff49d96b4d7e02 | [
"MIT"
] | 5 | 2020-07-20T11:05:07.000Z | 2022-03-11T15:51:52.000Z | src/pynorare/__main__.py | concepticon/pynorare | 3cf5ea2d1597c5acc84963f781ff49d96b4d7e02 | [
"MIT"
] | null | null | null | """
Main command line interface to the pynorare package.
"""
import sys
import pathlib
import contextlib
from cldfcatalog import Config, Catalog
from clldutils.clilib import register_subcommands, get_parser_and_subparsers, ParserError, PathType
from clldutils.loglib import Logging
from pyconcepticon import Concepticon
from pynorare import NoRaRe
import pynorare.commands
def main(args=None, catch_all=False, parsed_args=None):
try: # pragma: no cover
repos = Config.from_file().get_clone('concepticon')
except KeyError: # pragma: no cover
repos = pathlib.Path('.')
parser, subparsers = get_parser_and_subparsers('norare')
parser.add_argument(
'--repos',
help="clone of concepticon/concepticon-data",
default=repos,
type=PathType(type='dir'))
parser.add_argument(
'--repos-version',
help="version of repository data. Requires a git clone!",
default=None)
parser.add_argument(
'--norarepo',
default=pathlib.Path('.'),
type=PathType(type='dir'))
register_subcommands(subparsers, pynorare.commands)
args = parsed_args or parser.parse_args(args=args)
if not hasattr(args, "main"): # pragma: no cover
parser.print_help()
return 1
with contextlib.ExitStack() as stack:
stack.enter_context(Logging(args.log, level=args.log_level))
if args.repos_version: # pragma: no cover
# If a specific version of the data is to be used, we make
# use of a Catalog as context manager:
stack.enter_context(Catalog(args.repos, tag=args.repos_version))
args.repos = Concepticon(args.repos)
args.api = NoRaRe(args.norarepo, concepticon=args.repos)
args.log.info('norare at {0}'.format(args.repos.repos))
try:
return args.main(args) or 0
except KeyboardInterrupt: # pragma: no cover
return 0
except ParserError as e: # pragma: no cover
print(e)
return main([args._command, '-h'])
except Exception as e: # pragma: no cover
if catch_all: # pragma: no cover
print(e)
return 1
raise
if __name__ == '__main__': # pragma: no cover
sys.exit(main() or 0)
| 32.985714 | 99 | 0.644435 |
6e535cb6e52945115eb6d7ac8b6103b52efc86b8 | 92 | py | Python | app_kasir/apps.py | rizkyarwn/projectkasir | 6524a052bcb52534524db1c5fba05d31a0f0d801 | [
"MIT"
] | 2 | 2018-06-28T10:52:47.000Z | 2018-06-28T10:52:48.000Z | app_kasir/apps.py | rizkyarwn/projectkasir | 6524a052bcb52534524db1c5fba05d31a0f0d801 | [
"MIT"
] | null | null | null | app_kasir/apps.py | rizkyarwn/projectkasir | 6524a052bcb52534524db1c5fba05d31a0f0d801 | [
"MIT"
] | null | null | null | from django.apps import AppConfig
class AppKasirConfig(AppConfig):
name = 'app_kasir'
| 15.333333 | 33 | 0.76087 |
6e536b50d4b1d1ed9120b0881d839d4c283289b4 | 2,472 | py | Python | evaluator.py | kavinyao/SKBPR | 305aeb846ee89234d8eae3b73452c2fdad2496b4 | [
"MIT"
] | null | null | null | evaluator.py | kavinyao/SKBPR | 305aeb846ee89234d8eae3b73452c2fdad2496b4 | [
"MIT"
] | null | null | null | evaluator.py | kavinyao/SKBPR | 305aeb846ee89234d8eae3b73452c2fdad2496b4 | [
"MIT"
] | 1 | 2018-09-29T08:31:40.000Z | 2018-09-29T08:31:40.000Z | """
Evaluate recommendations.
"""
import config
from collections import defaultdict
class ConfusionMatrixEvaluator(object):
"""Evaluate result's precision and recall."""
def __init__(self):
self.experiment_reset()
self.reset()
def experiment_reset(self):
self.exp_results = defaultdict(list)
def reset(self):
self.results = []
def round_start(self):
"""Reset data for new round."""
self.precisions = []
self.recalls = []
def evaluate(self, actual_result, test_result):
"""
@param actual_results the set of actual products
@param test_results the set of calculated products
"""
correct_count = float(len(actual_result & test_result))
precision = correct_count / len(test_result) if test_result else 0.0
self.precisions.append(precision)
recall = correct_count / len(actual_result) if actual_result else 0.0
self.recalls.append(recall)
def round_end(self):
avg_precision = sum(self.precisions) / len(self.precisions)
avg_recall = sum(self.recalls) / len(self.recalls)
self.results.append((avg_precision, avg_recall))
def _avg_result(self, results, print_each=False, scale='Round'):
all_precision = 0.0
all_recall = 0.0
for i, result in enumerate(results, 1):
precision, recall = result
if print_each:
print '%s %2d: precision: %.4f | recall: %.4f' % (scale, i, precision, recall)
all_precision += precision
all_recall += recall
n = len(results)
return all_precision/n, all_recall/n
def summary(self, recommender, print_result):
avg_precision, avg_recall = self._avg_result(self.results, config.verbose)
self.exp_results[str(recommender)].append((avg_precision, avg_recall))
if print_result:
print '>Average: precision: %.4f | recall: %.4f' % (avg_precision, avg_recall)
def grand_summary(self):
statistics = []
for recommender, results in self.exp_results.iteritems():
avg_precision, avg_recall = self._avg_result(results)
statistics.append((recommender, avg_precision, avg_recall))
statistics.sort(key=lambda t:t[1], reverse=True)
for recommender, precision, recall in statistics:
print 'Precision: %.4f | Recall %.4f -- %s' % (precision, recall, recommender)
| 35.314286 | 94 | 0.640372 |
6e53df58b8e50b1065505ed5b573aa01243270d1 | 12,263 | py | Python | yolov3_deepsort.py | h-enes-simsek/deep_sort_pytorch | 0a9ede55e53355c19455197cc8daa60336c652bb | [
"MIT"
] | 1 | 2021-02-28T15:22:43.000Z | 2021-02-28T15:22:43.000Z | yolov3_deepsort.py | h-enes-simsek/deep_sort_pytorch | 0a9ede55e53355c19455197cc8daa60336c652bb | [
"MIT"
] | null | null | null | yolov3_deepsort.py | h-enes-simsek/deep_sort_pytorch | 0a9ede55e53355c19455197cc8daa60336c652bb | [
"MIT"
] | null | null | null | import os
import cv2
import time
import argparse
import torch
import warnings
import numpy as np
from detector import build_detector
from deep_sort import build_tracker
from utils.draw import draw_boxes
from utils.parser import get_config
from utils.log import get_logger
from utils.io import write_results
from numpy import loadtxt #gt.txt yi almak için
class VideoTracker(object):
def __init__(self, cfg, args, video_path):
self.cfg = cfg
self.args = args
self.video_path = video_path
self.logger = get_logger("root")
use_cuda = args.use_cuda and torch.cuda.is_available()
if not use_cuda:
warnings.warn("Running in cpu mode which maybe very slow!", UserWarning)
if args.display:
cv2.namedWindow("test", cv2.WINDOW_NORMAL)
cv2.resizeWindow("test", args.display_width, args.display_height)
if args.cam != -1:
print("Using webcam " + str(args.cam))
self.vdo = cv2.VideoCapture(args.cam)
else:
self.vdo = cv2.VideoCapture()
self.detector = build_detector(cfg, use_cuda=use_cuda)
self.deepsort = build_tracker(cfg, use_cuda=use_cuda)
self.class_names = self.detector.class_names
def __enter__(self):
if self.args.cam != -1:
ret, frame = self.vdo.read()
assert ret, "Error: Camera error"
self.im_width = frame.shape[0]
self.im_height = frame.shape[1]
else:
assert os.path.isfile(self.video_path), "Path error"
self.vdo.open(self.video_path)
self.im_width = int(self.vdo.get(cv2.CAP_PROP_FRAME_WIDTH))
self.im_height = int(self.vdo.get(cv2.CAP_PROP_FRAME_HEIGHT))
assert self.vdo.isOpened()
if self.args.save_path:
os.makedirs(self.args.save_path, exist_ok=True)
# path of saved video and results
self.save_video_path = os.path.join(self.args.save_path, "results.avi")
self.save_results_path = os.path.join(self.args.save_path, "results.txt")
# create video writer
fourcc = cv2.VideoWriter_fourcc(*'MJPG')
self.writer = cv2.VideoWriter(self.save_video_path, fourcc, 20, (self.im_width, self.im_height))
# logging
self.logger.info("Save results to {}".format(self.args.save_path))
#eğer gt'den veriler okunacaksa
if self.args.gt:
gtFolder = self.video_path + "/../gt/gt.txt"
gt = loadtxt(gtFolder, delimiter=",")
def sortwithFrame(elem):
return elem[0]
# sort list with key
gt_sorted = sorted(gt,key=sortwithFrame)
#-----------------------------
# object_type=1 olmayanları sil,
def filterType(param):
if (param[7]==1):
return True
else:
return False
gt_filtered = list(filter(filterType, gt_sorted))
#-------------------------------
#not_ignored=0 olanları sil
def filterIgnore(param):
if (param[6]==1):
return True
else:
return False
gt_filtered2 = list(filter(filterIgnore, gt_filtered))
self.gt = np.array(gt_filtered2)
return self
def __exit__(self, exc_type, exc_value, exc_traceback):
if exc_type:
print(exc_type, exc_value, exc_traceback)
#deep_sort içindeki fonksiyon doğru çalışmadığı için düzenleyip buraya fonksiyon olarak yazdım.
#input: frame görüntüsü, xywh formatında bbox matrisi (shape=#ofDetections,4)
#output: xywh formatında matrisin xyxy formatında matris karşılığı
def my_xywh_to_xyxy(self,ori_img, bbox_xywh):
x,y,w,h = bbox_xywh[:,0],bbox_xywh[:,1],bbox_xywh[:,2],bbox_xywh[:,3]
x = x.reshape((x.size,1))
y = y.reshape((y.size,1))
w = w.reshape((w.size,1))
h = h.reshape((h.size,1))
#ekranın boyutu alınıyor
height, width = ori_img.shape[:2]
x1 = np.maximum(np.int_(x-w/2),0)
x2 = np.minimum(np.int_(x+w/2),width-1)
y1 = np.maximum(np.int_(y-h/2),0)
y2 = np.minimum(np.int_(y+h/2),height-1)
arr = np.concatenate((x1,y1,x2,y2),axis=1)
return arr
def my_tlwh_to_xywh(self,ori_img, bbox_tlwh):
x,y,w,h = bbox_tlwh[:,0],bbox_tlwh[:,1],bbox_tlwh[:,2],bbox_tlwh[:,3]
x = x.reshape((x.size,1))
y = y.reshape((y.size,1))
w = w.reshape((w.size,1))
h = h.reshape((h.size,1))
#ekranın boyutu alınıyor
height, width = ori_img.shape[:2]
x1 = np.minimum(np.int_(x+w/2),width-1)
y1 = np.minimum(np.int_(y+h/2),height-1)
arr = np.concatenate((x1,y1,w,h),axis=1)
return arr
#topleft(xy)wh >> xyxy dönüştürücü
#gt içinde veriler tlxy şeklinde verilmiş. yolo verilerini xywh olarak üretiyor. (xy orta nokta)
def my_tlwh_to_xyxy(self,ori_img, bbox_tlwh):
x,y,w,h = bbox_tlwh[:,0],bbox_tlwh[:,1],bbox_tlwh[:,2],bbox_tlwh[:,3]
x = x.reshape((x.size,1))
y = y.reshape((y.size,1))
w = w.reshape((w.size,1))
h = h.reshape((h.size,1))
#ekranın boyutu alınıyor
height, width = ori_img.shape[:2]
x1 = np.maximum(np.int_(x),0)
x2 = np.minimum(np.int_(x+w),width-1)
y1 = np.maximum(np.int_(y),0)
y2 = np.minimum(np.int_(y+h),height-1)
arr = np.concatenate((x1,y1,x2,y2),axis=1)
return arr
def run(self):
results = []
idx_frame = 0
while self.vdo.grab():
idx_frame += 1
if idx_frame % self.args.frame_interval:
continue
start = time.time()
_, ori_im = self.vdo.retrieve()
im = cv2.cvtColor(ori_im, cv2.COLOR_BGR2RGB)
#print(im.shape) #video_boyu,video_eni,3
# do detection
bbox_xywh, cls_conf, cls_ids = self.detector(im) #bbox_xywh, confidence, labels
#gt'leri gt'den okuyarak yolo yerine veren kısım
if (self.args.gt): #py çalıştırılırken --gt yazıldıysa
if(idx_frame == 1 or idx_frame == 2 or idx_frame == 3): #üç frame boyunca gt verileri yolo yerine veriliyor
gt_curr_frame = self.gt[self.gt[:,0]==idx_frame].astype('float64') #filtreli gt verilerinden içinde bulunuğunuz kısım çıkarılıyor
gt_curr_frame = gt_curr_frame[:,2:6] #tlwh tipinde veriler alınıyor
#print(gt_curr_frame)
#print(self.my_tlwh_to_xywh(im, gt_curr_frame))
bbox_xywh = self.my_tlwh_to_xywh(im, gt_curr_frame) #yolo yerine gt bboxları
cls_conf = np.ones((bbox_xywh.shape[0],), dtype=int) #yolo conf skorları yerine (tüm skorlar 1)
cls_ids = np.zeros(bbox_xywh.shape[0]) #bütün bboxlar yolo için 0 id'li yani person.
ori_im = draw_boxes(ori_im, self.my_tlwh_to_xyxy(im,gt_curr_frame)) #gt'deki bboxları çizdir
print("yolo yerine gt kullanıldı, frame: ",idx_frame)
#test amaçlı bilerek yanlış vererek başlangıçtaki verilerin tracker üzerindeki etkisini incelemek için
"""
bbox_xywh = np.array([[100,200,400.1,600.1],[500,600.1,600.1,800.1]]) #test amaçlı bilerek yanlış vermek için
cls_conf = np.ones((bbox_xywh.shape[0],), dtype=int) #test amaçlı bilerek yanlış vermek için
cls_ids = np.zeros(bbox_xywh.shape[0]) #test amaçlı bilerek yanlış vermek için
ori_im = draw_boxes(ori_im, bbox_xywh)
"""
"""
labels = ["person", "bicycle", "car", "motorbike", "aeroplane", "bus", "train", "truck",
"boat", "traffic light", "fire hydrant", "stop sign", "parking meter", "bench",
"bird", "cat", "dog", "horse", "sheep", "cow", "elephant", "bear", "zebra", "giraffe",
"backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee", "skis", "snowboard",
"sports ball", "kite", "baseball bat", "baseball glove", "skateboard", "surfboard",
"tennis racket", "bottle", "wine glass", "cup", "fork", "knife", "spoon", "bowl", "banana",
"apple", "sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza", "donut", "cake",
"chair", "sofa", "pottedplant", "bed", "diningtable", "toilet", "tvmonitor", "laptop", "mouse",
"remote", "keyboard", "cell phone", "microwave", "oven", "toaster", "sink", "refrigerator",
"book", "clock", "vase", "scissors", "teddy bear", "hair drier", "toothbrush"]
"""
# select person class 0-people 22-zebra 20-elephant
#mask = (cls_ids == 20) + (cls_ids == 22)
mask = cls_ids == 0
bbox_xywh = bbox_xywh[mask]
# bbox dilation just in case bbox too small, delete this line if using a better pedestrian detector
bbox_xywh[:, 3:] *= 1.2
cls_conf = cls_conf[mask]
# do tracking
outputs = self.deepsort.update(bbox_xywh, cls_conf, im) #im.shape = video_boyu,video_eni,3
#print(bbox_xywh) # number_of_detection, 4
#print(cls_conf) # number_of_detection,
# draw boxes for visualization
if len(outputs) > 0:
bbox_tlwh = []
bbox_xyxy = outputs[:, :4]
identities = outputs[:, -1]
#detection'ları ekrana çizen kendi yazdığım kod
#ori_im = draw_boxes(ori_im, self.my_xywh_to_xyxy(im,bbox_xywh))
#doğru eşleşmeleri ekrana çizen orjinal kod
ori_im = draw_boxes(ori_im, bbox_xyxy, identities)
for bb_xyxy in bbox_xyxy:
bbox_tlwh.append(self.deepsort._xyxy_to_tlwh(bb_xyxy))
results.append((idx_frame - 1, bbox_tlwh, identities))
end = time.time()
if self.args.display:
cv2.imshow("test", ori_im)
cv2.waitKey(1)
if self.args.save_path:
self.writer.write(ori_im)
# save results
write_results(self.save_results_path, results, 'mot')
# logging
self.logger.info("time: {:.03f}s, fps: {:.03f}, detection numbers: {}, tracking numbers: {}" \
.format(end - start, 1 / (end - start), bbox_xywh.shape[0], len(outputs)))
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("VIDEO_PATH", type=str)
parser.add_argument("--config_detection", type=str, default="./configs/yolov3.yaml")
parser.add_argument("--config_deepsort", type=str, default="./configs/deep_sort.yaml")
# parser.add_argument("--ignore_display", dest="display", action="store_false", default=True)
parser.add_argument("--display", action="store_true")
parser.add_argument("--gt", action="store_true") #gt'den alınan verileri kullanmak istiyorsak
parser.add_argument("--frame_interval", type=int, default=1)
parser.add_argument("--display_width", type=int, default=800)
parser.add_argument("--display_height", type=int, default=600)
parser.add_argument("--save_path", type=str, default="./output/")
parser.add_argument("--cpu", dest="use_cuda", action="store_false", default=True)
parser.add_argument("--camera", action="store", dest="cam", type=int, default="-1")
return parser.parse_args()
if __name__ == "__main__":
args = parse_args()
cfg = get_config()
cfg.merge_from_file(args.config_detection)
cfg.merge_from_file(args.config_deepsort)
with VideoTracker(cfg, args, video_path=args.VIDEO_PATH) as vdo_trk:
vdo_trk.run()
| 42.432526 | 149 | 0.572698 |
280906641aae735ca1d3dbc649fdb86d59c81472 | 1,172 | py | Python | aerosandbox/numpy/array.py | askprash/AeroSandbox | 9e82966a25ced9ce96ca29bae45a4420278f0f1d | [
"MIT"
] | null | null | null | aerosandbox/numpy/array.py | askprash/AeroSandbox | 9e82966a25ced9ce96ca29bae45a4420278f0f1d | [
"MIT"
] | null | null | null | aerosandbox/numpy/array.py | askprash/AeroSandbox | 9e82966a25ced9ce96ca29bae45a4420278f0f1d | [
"MIT"
] | 1 | 2021-09-11T03:28:45.000Z | 2021-09-11T03:28:45.000Z | import numpy as onp
import casadi as cas
def array(object, dtype=None):
try:
a = onp.array(object, dtype=dtype)
if a.dtype == "O":
raise Exception
return a
except (AttributeError, Exception): # If this occurs, it needs to be a CasADi type.
# First, determine the dimension
def make_row(row):
try:
return cas.horzcat(*row)
except (TypeError, Exception): # If not iterable or if it's a CasADi MX type
return row
return cas.vertcat(
*[
make_row(row)
for row in object
]
)
def length(array) -> int:
"""
Returns the length of an 1D-array-like object.
Args:
array:
Returns:
"""
try:
return len(array)
except TypeError: # array has no function len() -> either float, int, or CasADi type
try:
if len(array.shape) >= 1:
return array.shape[0]
else:
raise AttributeError
except AttributeError: # array has no attribute shape -> either float or int
return 1
| 25.478261 | 89 | 0.529863 |
280a3ff7069c05f2fa4cfad162456023976a914d | 181 | py | Python | states/__init__.py | EemeliSyynimaa/Pore | 1eca9aa7163f1d31ae84c862790693eb3c904433 | [
"MIT"
] | null | null | null | states/__init__.py | EemeliSyynimaa/Pore | 1eca9aa7163f1d31ae84c862790693eb3c904433 | [
"MIT"
] | null | null | null | states/__init__.py | EemeliSyynimaa/Pore | 1eca9aa7163f1d31ae84c862790693eb3c904433 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
__author__ = 'eeneku'
from main_menu import MainMenu
from world_map import WorldMap
from local_map import LocalMap
__all__ = [MainMenu, WorldMap, LocalMap] | 22.625 | 40 | 0.762431 |
280b4458ea5ac2ac7597da9b972198f9e0db4a04 | 2,502 | py | Python | instagram/migrations/0001_initial.py | Maxwel5/photo-app-instagram | 8635346a5115dcc7e282791bd646f0a7f9dd2917 | [
"MIT"
] | null | null | null | instagram/migrations/0001_initial.py | Maxwel5/photo-app-instagram | 8635346a5115dcc7e282791bd646f0a7f9dd2917 | [
"MIT"
] | 8 | 2020-06-06T00:09:24.000Z | 2022-02-10T10:48:10.000Z | instagram/migrations/0001_initial.py | Maxwel5/photo-app-instagram | 8635346a5115dcc7e282791bd646f0a7f9dd2917 | [
"MIT"
] | null | null | null | # Generated by Django 2.2.5 on 2019-10-22 07:03
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Instagram',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=20)),
('email', models.EmailField(max_length=254)),
],
),
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('photo', models.ImageField(blank=True, upload_to='')),
('bio', models.CharField(max_length=120)),
],
options={
'ordering': ['photo'],
},
),
migrations.CreateModel(
name='Image',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image_image', models.ImageField(default='images/default.jpeg', upload_to='images/')),
('name', models.CharField(max_length=100)),
('caption', models.CharField(max_length=150)),
('comments', models.TextField()),
('pub_date', models.DateTimeField(default=django.utils.timezone.now)),
('like', models.ManyToManyField(blank=True, related_name='likes', to=settings.AUTH_USER_MODEL)),
('profile', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='instagram.Profile')),
],
),
migrations.CreateModel(
name='Comments',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('comments', models.TextField()),
('image', models.ForeignKey(default=1, on_delete=django.db.models.deletion.PROTECT, related_name='image_comments', to='instagram.Image')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL)),
],
),
]
| 41.7 | 154 | 0.584333 |
280b7ce2e2cb3f65d56ba5e4705455b1cbb3bb0e | 3,283 | py | Python | capspayment/api_payin.py | agorapay/python-sdk | c5b7fd6894f95e6862446248b26c16253c8fd4f4 | [
"MIT"
] | null | null | null | capspayment/api_payin.py | agorapay/python-sdk | c5b7fd6894f95e6862446248b26c16253c8fd4f4 | [
"MIT"
] | null | null | null | capspayment/api_payin.py | agorapay/python-sdk | c5b7fd6894f95e6862446248b26c16253c8fd4f4 | [
"MIT"
] | null | null | null | """
Payin API
"""
from dataclasses import dataclass
from typing import Union
from api_payin_model import (
PayinAdjustPaymentRequest,
PayinCancelRequest,
PayinCancelResponse,
PayinCaptureRequest,
PayinCaptureResponse,
PayinMandateRequest,
PayinMandateResponse,
PayinOrderDetailsRequest,
PayinOrderDetailsResponse,
PayinPaymentDetailsRequest,
PayinPaymentDetailsResponse,
PayinPaymentIframeRequest,
PayinPaymentIframeResponse,
PayinPaymentMethodsRequest,
PayinPaymentMethodsResponse,
PayinPaymentRequest,
PayinPaymentResponse,
PayinRefundRequest,
PayinRefundResponse,
PayinTicketRequest,
PayinTicketResponse,
)
from base import BaseRequest
from model import Response
@dataclass
class ApiPayin(BaseRequest):
"""Payin API requests"""
def payment(
self, payload: PayinPaymentRequest
) -> Union[PayinPaymentResponse, Response]:
"""Submit a payment"""
return self.request("POST", "/payin/payment", payload)
def payment_details(
self, payload: PayinPaymentDetailsRequest
) -> Union[PayinPaymentDetailsResponse, Response]:
"""Submit additionnal payment details"""
return self.request("POST", "/payin/paymentDetails", payload)
def payment_methods(
self, payload: PayinPaymentMethodsRequest
) -> Union[PayinPaymentMethodsResponse, Response]:
"""Submit an order/get payment methods"""
return self.request("POST", "/payin/paymentMethods", payload)
def capture(
self, payload: PayinCaptureRequest
) -> Union[PayinCaptureResponse, Response]:
"""Capture a transaction/order"""
return self.request("POST", "/payin/capture", payload)
def cancel(
self, payload: PayinCancelRequest
) -> Union[PayinCancelResponse, Response]:
"""Cancel a transaction/order"""
return self.request("POST", "/payin/cancel", payload)
def order_details(
self, payload: PayinOrderDetailsRequest
) -> Union[PayinOrderDetailsResponse, Response]:
"""Get all the order details"""
return self.request("GET", "/payin/orderDetails", payload)
def adjust_payment(self, payload: PayinAdjustPaymentRequest) -> Response:
"""Adjust the amount of the payment/change the breakdown of the payment"""
return self.request("POST", "/payin/adjustPayment", payload)
def payment_iframe(
self, payload: PayinPaymentIframeRequest
) -> Union[PayinPaymentIframeResponse, Response]:
"""Submit an order/get an authent code"""
return self.request("POST", "/payin/paymentIframe", payload)
def refund(
self, payload: PayinRefundRequest
) -> Union[PayinRefundResponse, Response]:
"""Refund a transaction/order"""
return self.request("POST", "/payin/refund", payload)
def mandate(
self, payload: PayinMandateRequest
) -> Union[PayinMandateResponse, Response]:
"""Get signed mandate file"""
return self.request("GET", "/payin/mandate", payload)
def ticket(
self, payload: PayinTicketRequest
) -> Union[PayinTicketResponse, Response]:
"""Get card payment ticket"""
return self.request("GET", "/payin/ticket", payload)
| 32.186275 | 82 | 0.687786 |
280b8063834de2658f477b63373426eabdf7a4f6 | 5,516 | py | Python | tests/test_api.py | HealthByRo/fdadb | e020a902ca20cebd5999bc2dbc530375ab0922fb | [
"MIT"
] | 1 | 2020-06-11T04:44:22.000Z | 2020-06-11T04:44:22.000Z | tests/test_api.py | HealthByRo/fdadb | e020a902ca20cebd5999bc2dbc530375ab0922fb | [
"MIT"
] | 8 | 2018-11-26T09:22:14.000Z | 2019-10-23T13:17:44.000Z | tests/test_api.py | HealthByRo/fdadb | e020a902ca20cebd5999bc2dbc530375ab0922fb | [
"MIT"
] | null | null | null | from rest_framework.reverse import reverse
from rest_framework.test import APITestCase
from fdadb.models import MedicationName, MedicationNDC, MedicationStrength
class APITests(APITestCase):
def setUp(self):
for name in ("DrugName", "OtherDrugName", "DruuuugName", "NamedDrug"):
medication_name = MedicationName.objects.create(
name=name, active_substances=[name + " Substance 1", name + " Substance 2"]
)
for strength in (1, 2, 3):
medication_strength = MedicationStrength.objects.create(
medication_name=medication_name,
strength={
name + " Substance 1": {"strength": strength, "unit": "mg/l"},
name + " Substance 2": {"strength": strength + 5, "unit": "mg/l"},
},
)
for manufacturer in ("M1", "M2"):
MedicationNDC.objects.create(
medication_strength=medication_strength,
ndc=name[:5] + str(strength) + manufacturer,
manufacturer=manufacturer,
)
def test_names_api(self):
url = reverse("fdadb-medications-names")
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data["count"], 4)
self.assertEqual(response.data["results"][0]["name"], "DrugName")
self.assertEqual(
response.data["results"][0]["active_substances"], ["DrugName Substance 1", "DrugName Substance 2"]
)
response = self.client.get(url + "?q=Druuu")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data["count"], 1)
self.assertEqual(response.data["results"][0]["name"], "DruuuugName")
self.assertEqual(
response.data["results"][0]["active_substances"], ["DruuuugName Substance 1", "DruuuugName Substance 2"]
)
def test_strengths_api(self):
url = reverse("fdadb-medications-strengths", kwargs={"medication_name": "NamedDrug"})
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data["count"], 3)
self.assertEqual(response.data["results"][0]["name"], "NamedDrug")
self.assertEqual(
response.data["results"][0]["active_substances"], ["NamedDrug Substance 1", "NamedDrug Substance 2"]
)
self.assertEqual(
response.data["results"][0]["strength"],
{
"NamedDrug Substance 1": {"strength": 1, "unit": "mg/l"},
"NamedDrug Substance 2": {"strength": 6, "unit": "mg/l"},
},
)
response = self.client.get(url + "?q=3")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data["count"], 1)
self.assertEqual(response.data["results"][0]["name"], "NamedDrug")
self.assertEqual(
response.data["results"][0]["active_substances"], ["NamedDrug Substance 1", "NamedDrug Substance 2"]
)
self.assertEqual(
response.data["results"][0]["strength"],
{
"NamedDrug Substance 1": {"strength": 3, "unit": "mg/l"},
"NamedDrug Substance 2": {"strength": 8, "unit": "mg/l"},
},
)
def test_ndcs_api(self):
strength = MedicationStrength.objects.filter(medication_name__name="OtherDrugName").first()
url = reverse("fdadb-medications-ndcs", kwargs={"medication_name": "OtherDrugName", "strength_id": strength.pk})
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data["count"], 2)
self.assertEqual(response.data["results"][0]["name"], "OtherDrugName")
self.assertEqual(
response.data["results"][0]["active_substances"], ["OtherDrugName Substance 1", "OtherDrugName Substance 2"]
)
self.assertEqual(
response.data["results"][0]["strength"],
{
"OtherDrugName Substance 1": {"strength": 1, "unit": "mg/l"},
"OtherDrugName Substance 2": {"strength": 6, "unit": "mg/l"},
},
)
self.assertEqual(response.data["results"][0]["manufacturer"], "M1")
self.assertEqual(response.data["results"][0]["ndc"], "Other1M1")
strength = MedicationStrength.objects.filter(medication_name__name="OtherDrugName").first()
url = reverse("fdadb-medications-ndcs", kwargs={"medication_name": "OtherDrugName", "strength_id": strength.pk})
response = self.client.get(url + "?q=m2")
self.assertEqual(response.status_code, 200)
self.assertEqual(response.data["count"], 1)
self.assertEqual(response.data["results"][0]["name"], "OtherDrugName")
self.assertEqual(
response.data["results"][0]["active_substances"], ["OtherDrugName Substance 1", "OtherDrugName Substance 2"]
)
self.assertEqual(
response.data["results"][0]["strength"],
{
"OtherDrugName Substance 1": {"strength": 1, "unit": "mg/l"},
"OtherDrugName Substance 2": {"strength": 6, "unit": "mg/l"},
},
)
self.assertEqual(response.data["results"][0]["manufacturer"], "M2")
self.assertEqual(response.data["results"][0]["ndc"], "Other1M2")
| 45.966667 | 120 | 0.583575 |
280c4e3ff6e2c8be5af4beb5882bf9b9cd5ee1c7 | 3,626 | py | Python | script/gen_canonical_combining_class.py | CyberZHG/UChar | e59ee5e3ad166288380407df6d5e6c0fe20681cf | [
"MIT"
] | 1 | 2020-07-15T16:16:20.000Z | 2020-07-15T16:16:20.000Z | script/gen_canonical_combining_class.py | CyberZHG/UChar | e59ee5e3ad166288380407df6d5e6c0fe20681cf | [
"MIT"
] | null | null | null | script/gen_canonical_combining_class.py | CyberZHG/UChar | e59ee5e3ad166288380407df6d5e6c0fe20681cf | [
"MIT"
] | 1 | 2020-06-01T01:15:29.000Z | 2020-06-01T01:15:29.000Z | #!/usr/bin/env python
""" Copyright 2020 Zhao HG
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
with open('UnicodeData.txt', 'r') as reader:
last, indices, canonicals, classes = '', [], [], {}
for line in reader:
parts = line.strip().split(';')
if parts[3] != last:
last = parts[3]
indices.append(parts[0])
canonicals.append(parts[3])
classes[parts[3]] = parts[0]
with open('include/unicode_data.h', 'a') as writer:
writer.write('/** The total number of indices used to store the canonical combing class. */\n')
writer.write('const int32_t CANONICAL_COMBINING_NUM = {};\n'.format(len(indices)))
writer.write('/** The indices of the first character that have a different type. */\n')
writer.write('extern const int32_t CANONICAL_COMBINING_INDEX[];\n')
writer.write('/** The canonical combining class data. */\n')
writer.write('extern const int32_t CANONICAL_COMBINING_CLASS[];\n\n')
with open('src/canonical_combining_class.cpp', 'w') as writer:
with open('copyright.txt', 'r') as reader:
writer.write(reader.read())
writer.write('#include "unicode_data.h"\n\n')
writer.write('namespace unicode {\n\n')
writer.write('\nconst int32_t CANONICAL_COMBINING_INDEX[] = {')
for i, index in enumerate(indices):
if i == 0:
writer.write('\n ')
elif i % 8 == 0:
writer.write(',\n ')
else:
writer.write(', ')
writer.write('0x' + index)
writer.write('\n};\n')
writer.write('\nconst int32_t CANONICAL_COMBINING_CLASS[] = {')
for i, canonical in enumerate(canonicals):
if i == 0:
writer.write('\n ')
elif i % 8 == 0:
writer.write(',\n ')
else:
writer.write(', ')
writer.write(canonical)
writer.write('\n};\n\n')
writer.write('} // namespace unicode\n')
with open('tests/test_canonical_combining_class_gen.cpp', 'w') as writer:
with open('copyright.txt', 'r') as reader:
writer.write(reader.read())
writer.write('#include "test.h"\n')
writer.write('#include "unicode_char.h"\n\n')
writer.write('namespace test {\n\n')
writer.write('class CanonicalCombiningClassGenTest : public UnitTest {};\n\n')
writer.write('__TEST_U(CanonicalCombiningClassGenTest, test_classes) {\n')
for canonical, code in classes.items():
writer.write(' __ASSERT_EQ({}, unicode::getCanonicalCombiningClass({}));\n'.format(
canonical, '0x' + code
))
writer.write('}\n\n')
writer.write('} // namespace test\n')
| 40.741573 | 99 | 0.660232 |
280cef3837d316af797287a2c5c707f3a00a10c1 | 3,676 | py | Python | server.py | Timothylock/twillio-buzzer-connector | 9ac7e4763a5eee7d04daa054841e17332c0bac13 | [
"Apache-2.0"
] | null | null | null | server.py | Timothylock/twillio-buzzer-connector | 9ac7e4763a5eee7d04daa054841e17332c0bac13 | [
"Apache-2.0"
] | null | null | null | server.py | Timothylock/twillio-buzzer-connector | 9ac7e4763a5eee7d04daa054841e17332c0bac13 | [
"Apache-2.0"
] | null | null | null | from flask import Flask, request
from twilio.twiml.voice_response import VoiceResponse, Gather
import datetime
import os
import json
import http.client
app = Flask(__name__)
allowUntil = datetime.datetime.now()
# Fetch env vars
whitelisted_numbers = os.environ['WHITELISTED_NUMBERS'].split(",") # Numbers allowed to dial into the system
forward_number = os.environ['FORWARD_NUMBER'] # Number that will be forwarded to if not whitelisted
forward_number_from = os.environ['FORWARD_NUMBER_FROM'] # Number that will be forwarded to if not whitelisted
buzzcode = os.environ['BUZZCODE'] # Digits to dial to let them in
minutes = int(os.environ['MINUTES']) # Number of minutes to unlock the system
slack_path = os.environ['SLACK_PATH'] # Slack path for slack message
say_message = os.environ['SAY_MESSAGE'] # The message to be said to the dialer
# Buzzer
##########################################################################
@app.route("/buzzer/webhook", methods=['GET', 'POST'])
def voice():
"""Respond to incoming phone calls"""
resp = VoiceResponse()
incoming_number = request.values['From']
# If an unknown number, filter out robo callers and forward to cell
if incoming_number not in whitelisted_numbers:
gather = Gather(num_digits=1, action='/buzzer/forward')
gather.say('Press 1 to continue')
resp.append(gather)
return str(resp)
# Tell the user a nice message that they are not permitted to enter
if not allowed_to_buzz():
resp.say("The system cannot let you in. Did you dial the right buzzcode?")
send_message("A visitor was just rejected as the buzzer system was not unlocked")
return str(resp)
# Otherwise, unlock the door
resp.say(say_message, language='zh-CN')
resp.play(digits=buzzcode)
send_message("A visitor was just let in")
return str(resp)
@app.route("/buzzer/forward", methods=['GET', 'POST'])
def forward():
resp = VoiceResponse()
incoming_number = request.values['From']
send_message("About to forward a call from " + str(incoming_number))
resp.say("Please note your call may be recorded for the benefit of both parties")
resp.dial(forward_number, caller_id=forward_number_from)
return str(resp)
@app.route("/buzzer/state", methods=['POST'])
def change_state():
"""Tells the buzzer to unlock the door for the next 30 minutes"""
global allowUntil
c = request.json
if "active" not in c:
return "missing \"active\" field", 400
if c["active"] == "true":
allowUntil = datetime.datetime.now() + datetime.timedelta(minutes=minutes)
if c["active"] == "false":
allowUntil = datetime.datetime.now()
return "OK", 200
@app.route("/buzzer/state", methods=['GET'])
def status():
"""Fetches whether the system will buzz people in"""
return json.dumps({"is_active": str(allowed_to_buzz()).lower()}), 200
def allowed_to_buzz():
"""Fetches whether the system is allowed to buzz somebody in"""
global allowUntil
return allowUntil > datetime.datetime.now()
def send_message(message):
try:
conn = http.client.HTTPSConnection("hooks.slack.com")
payload = "{\"text\": \"" + message + "\"}"
headers = {
'content-type': "application/json",
}
conn.request("POST", slack_path, payload, headers)
conn.getresponse()
except:
print("error sending message")
if __name__ == "__main__":
app.run(host='0.0.0.0', port=8080)
| 33.418182 | 121 | 0.639554 |
280f1650f5bc3fd7a59f3f2ae253341d13e12350 | 5,738 | py | Python | App/GUI_Pages/LoginPage.py | TUIASI-AC-enaki/Shopping_Application | d6c6f446618937347f9c78fe3b969bc2c2ef9331 | [
"Apache-2.0"
] | null | null | null | App/GUI_Pages/LoginPage.py | TUIASI-AC-enaki/Shopping_Application | d6c6f446618937347f9c78fe3b969bc2c2ef9331 | [
"Apache-2.0"
] | null | null | null | App/GUI_Pages/LoginPage.py | TUIASI-AC-enaki/Shopping_Application | d6c6f446618937347f9c78fe3b969bc2c2ef9331 | [
"Apache-2.0"
] | null | null | null | import tkinter as tk
from tkinter import font as tkfont, ttk
import logging as log
import sys
from cx_Oracle import DatabaseError
from GUI_Pages.BasicPage import TitlePage
from Utilities.Cipher import Cipher, get_hash
FORMAT = '[%(asctime)s] [%(levelname)s] : %(message)s'
log.basicConfig(stream=sys.stdout, level=log.DEBUG, format=FORMAT)
class LoginPage(TitlePage):
def __init__(self, parent, controller):
super().__init__(parent, controller)
self.init()
def init(self):
width_label = 10
width_entry = 25
text_font = tkfont.Font(family='Helvetica', size=13)
button_font = tkfont.Font(family='Helvetica', size=10)
login_frame = tk.Frame(master=self, bg='gold')
login_frame.pack(side=tk.TOP, fill=tk.BOTH, expand=True)
login_frame.grid_rowconfigure(0, weight=1)
login_frame.grid_columnconfigure(0, weight=1)
login_label_frame = tk.LabelFrame(login_frame, bg='gray80')
login_label_frame.grid(row=0, column=0)
tk.Label(login_label_frame, text='username', font=text_font, bg=login_label_frame['bg'], fg='red',
width=width_label).grid(row=0, column=0, padx=5, pady=10)
self.username_entry = tk.Entry(login_label_frame, width=width_entry)
self.username_entry.grid(row=0, column=1,)
tk.Label(login_label_frame, text='password', font=text_font, bg=login_label_frame['bg'], fg='red',
width=width_label).grid(row=1, column=0, padx=5, pady=10)
self.password_entry = tk.Entry(login_label_frame, show="*", width=width_entry)
self.password_entry.grid(row=1, column=1, padx=5, pady=10)
self.login_button = tk.Button(login_label_frame, text='Login', font=button_font, command=self.on_login, bg='green', fg='white')
self.login_button.grid(row=2, column=1, padx=5, pady=5)
self.sign_up_button = tk.Button(login_label_frame, text='Sign Up', font=button_font, command=self.on_sign_up, bg='blue', fg='white')
self.sign_up_button.grid(row=2, column=0, padx=5, pady=5)
def set_states(self, user_level):
if user_level == 'admin':
return
else:
self.controller.set_state(self.controller.frames['HomePage'].advanced_options_button)
self.controller.set_state(self.controller.frames['ShopPage'].insert_frame)
self.controller.set_state(self.controller.frames['ShopPage'].update_frame)
self.controller.set_state(self.controller.frames['ShopPage'].delete_frame)
self.controller.set_state(self.controller.frames['ProductPage'].insert_frame)
self.controller.set_state(self.controller.frames['ProductPage'].update_frame)
self.controller.set_state(self.controller.frames['ProductPage'].delete_frame)
self.controller.set_state(self.controller.frames['ShippingPage'].insert_frame)
self.controller.set_state(self.controller.frames['ShippingPage'].update_frame)
self.controller.set_state(self.controller.frames['ShippingPage'].delete_frame)
if user_level == 'admin_shop':
self.controller.set_state(self.controller.frames['ShopPage'].insert_frame, 'normal')
self.controller.set_state(self.controller.frames['ShopPage'].update_frame, 'normal')
self.controller.set_state(self.controller.frames['ShopPage'].delete_frame, 'normal')
self.controller.set_state(self.controller.frames['ProductPage'].insert_frame, 'normal')
self.controller.set_state(self.controller.frames['ProductPage'].update_frame, 'normal')
self.controller.set_state(self.controller.frames['ProductPage'].delete_frame, 'normal')
if user_level == 'admin_ship':
self.controller.set_state(self.controller.frames['ShippingPage'].insert_frame, 'normal')
self.controller.set_state(self.controller.frames['ShippingPage'].update_frame, 'normal')
self.controller.set_state(self.controller.frames['ShippingPage'].delete_frame, 'normal')
def on_login(self):
username = self.username_entry.get()
password = self.password_entry.get()
#-------Use encryption when sending data across internet
pass_encrypted = Cipher.encrypt(password)
log.info("Password encrypted: {}".format(pass_encrypted.decode()))
password = Cipher.decrypt(pass_encrypted)
log.info("Password decrypted: {}".format(password))
#end encryption and decryption part
# --------Get hash of password
password = get_hash(password)
log.info("Password Hash: {}".format(password))
try:
user_account_var = self.controller.get_complex_type_var('AUTH_PKG.USER_ACCOUNT')
self.controller.run_procedure('auth_pkg.login', [username, password, user_account_var])
except DatabaseError as e:
log.info("Login Failed Incorect username as password")
if e.args[0].code == 20100:
from tkinter import messagebox
messagebox.showinfo("Login Failed", "Wrong username or password")
return
user_info = self.controller.get_dict_from_oracle_object(user_account_var)
self.controller.user_info = user_info
self.controller.re_create_frames()
self.set_states(user_info['user_level'])
self.controller.frames["HomePage"].home_page_welcome_label_var.set("Welcome {}".format(user_info['first_name']))
self.controller.frames["HomePage"].populate_the_table_with_all_values()
self.controller.show_frame("HomePage")
def on_sign_up(self):
self.controller.show_frame("SignUpPage") | 50.778761 | 140 | 0.682468 |
2810be0978f433319136f58db93ce028bbbb9a9c | 8,151 | py | Python | cosmos/ingestion/ingest/process/hierarchy_extractor/bert_hierarchy_extractor/train/bert_extractor_trainer.py | ilmcconnell/Cosmos | 84245034727c30e20ffddee9e02c7e96f3aa115e | [
"Apache-2.0"
] | 30 | 2019-03-14T08:24:34.000Z | 2022-03-09T06:05:44.000Z | cosmos/ingestion/ingest/process/hierarchy_extractor/bert_hierarchy_extractor/train/bert_extractor_trainer.py | ilmcconnell/Cosmos | 84245034727c30e20ffddee9e02c7e96f3aa115e | [
"Apache-2.0"
] | 78 | 2019-02-07T22:14:48.000Z | 2022-03-09T05:59:18.000Z | cosmos/ingestion/ingest/process/hierarchy_extractor/bert_hierarchy_extractor/train/bert_extractor_trainer.py | ilmcconnell/Cosmos | 84245034727c30e20ffddee9e02c7e96f3aa115e | [
"Apache-2.0"
] | 11 | 2019-03-02T01:20:06.000Z | 2022-03-25T07:25:46.000Z | from bert_hierarchy_extractor.datasets.train_dataset import TrainHierarchyExtractionDataset
from bert_hierarchy_extractor.datasets.utils import cudafy
from bert_hierarchy_extractor.logging.utils import log_metrics
import numpy as np
from torch.utils.data import DataLoader
from transformers import AdamW, get_linear_schedule_with_warmup
import torch
import time
from tqdm import tqdm
from comet_ml import Experiment
def placeholder_num_correct(x, y, print_result=False):
result = torch.argmax(x, dim=1)
result = result.view(-1)
y2 = y.view(-1)
mask = (y2 != -1)
y2 = y2[mask]
result = result[mask]
if print_result:
print('*************')
y1mask = (y[0] != -1)
print(y[0][y1mask])
print('-------------')
rez = torch.argmax(x[0], dim=0)
print(rez[y1mask])
print('**************')
total_correct = (result == y2).sum().detach().cpu().numpy()
total = result.shape[0]
return total_correct, total
class BertExtractorTrainer:
def __init__(
self,
experiment: Experiment,
model,
data_path: str,
base_model: str,
bsz: int,
num_workers: int,
lr: float,
weight_decay: float,
warmup_updates: int,
max_updates: int,
accumulation_steps: int,
validate_interval: int,
save_metric: str,
save_min: bool,
device: str,
seed=1,
num_correct=placeholder_num_correct,
):
"""
:param model: Initialized model
:param dataset_path: Path to dataset
:param base_model: Path to base model
:param bsz: Batch size
:param num_workers: Num workers available
:param lr: Learning rate
:param weight_decay: weight decay
:param warmup_updates: number of samples to warmup learning rate
:param max_updates: max number of samples
:param accumulation_steps: Number of batches to accumulate loss over before running an update
:param validate_interval: num updates before validating
:param save_metric: metric to use to save best model
:param save_min: Whether we're looking to minimize or maximize the save metric
:param seed: Random seed for iteration
"""
torch.manual_seed(seed)
self.experiment = experiment
self.device = device
print(device)
self.model = model.to(device)
self.max_accumulation = accumulation_steps
print("Loading training dataset")
self.train_dataset = TrainHierarchyExtractionDataset(data_path)
num_classes = len(self.train_dataset.label_map)-1
class_counts = np.zeros(num_classes)
for i in range(len(self.train_dataset)):
_, l = self.train_dataset[i]
for cl in l:
class_counts[cl] += 1
effective_num = 1.0 - np.power(0.9999, class_counts)
weights = (1.0 - 0.9999) / np.array(effective_num)
weights = weights / np.sum(weights * num_classes)
self.weights = torch.FloatTensor(weights).to(device)
print(self.weights)
#print("Loading validation dataset")
#self.val_dataset = TrainHierarchyExtractionDataset(data_path, base_model, "val")
self.train_dataloader = DataLoader(
self.train_dataset,
batch_size=bsz,
num_workers=num_workers,
pin_memory=True,
shuffle=True,
collate_fn=TrainHierarchyExtractionDataset.collate,
)
self.val_dataloader = DataLoader(
self.train_dataset,
batch_size=bsz,
num_workers=num_workers,
pin_memory=True,
shuffle=True,
collate_fn=TrainHierarchyExtractionDataset.collate,
)
self.bsz = bsz
self.optimizer = AdamW(model.parameters(), lr=lr, weight_decay=0.01)
self.scheduler = get_linear_schedule_with_warmup(
self.optimizer,
num_warmup_steps=warmup_updates,
num_training_steps=max_updates,
)
self.max_updates = max_updates
self.validate_interval = validate_interval
self.num_correct = num_correct
self.save_metric = save_metric
self.current_best_metric = float('inf')
def validate(self, validate_cap=None, best_save_metric=None):
self.model.eval()
val_cap = validate_cap if validate_cap is not None else len(self.val_dataloader)
with tqdm(total=val_cap) as pbar:
total_loss = 0
total_correct = 0
total_instances = 0
for ind, batch in enumerate(self.val_dataloader):
if ind > val_cap:
break
xs, labels = cudafy(batch)
loss, logits = self.model(xs, labels=labels, weights=self.weights)
nc, t = self.num_correct(logits, labels, print_result=True if ind < 5 else False)
total_correct += nc
total_instances += t
total_loss += loss.detach().cpu().numpy()
pbar.update(1)
loss_per_sample = total_loss / val_cap / self.bsz
accuracy = total_correct / total_instances
metrics = {}
metrics["val_loss"] = loss_per_sample
metrics["val_accuracy"] = accuracy
metrics["val_per_sample_loss"] = total_loss
if best_save_metric is not None:
if metrics[best_save_metric] <= self.current_best_metric:
self.model.save_pretrained('best')
self.current_best_metric = metrics[best_save_metric]
return metrics
def train(self):
"""
"""
start_time = time.time()
# Verify forward pass using validation loop
metrics = self.validate(validate_cap=5)
self.model.train()
with tqdm(total=self.max_updates, desc='Number of updates') as pbar:
total_updates = 1
val_updates = 1
while total_updates < self.max_updates:
accumulation_steps = 0
accumulation_loss = None
for batch in self.train_dataloader:
xs, labels = cudafy(batch)
loss, _ = self.model(xs, labels=labels, weights=self.weights)
if accumulation_loss is None:
accumulation_steps += 1
accumulation_loss = loss
elif accumulation_steps > self.max_accumulation:
self.optimizer.zero_grad()
accumulation_loss.backward()
torch.nn.utils.clip_grad_norm_(self.model.parameters(), 1.0)
self.optimizer.step()
self.scheduler.step()
pbar.update(1)
total_updates += 1
accumulation_steps = 0
accumulation_loss = loss
l = loss.detach().cpu().numpy()
metrics = {}
metrics["train_update_loss"] = l
metrics["train_per_sample_loss"] = l / self.bsz
# TODO: Accuracy, f1, etc metrics
log_metrics(self.experiment, metrics, total_updates)
if total_updates % self.validate_interval == 0:
metrics = self.validate(validate_cap=100, best_save_metric=self.save_metric)
val_updates += 1
log_metrics(self.experiment, metrics, val_updates)
else:
accumulation_steps += 1
accumulation_loss += loss
metrics = self.validate(validate_cap=1000)
print(f"Final validation metrics: {metrics}")
torch.save(self.model.state_dict(), 'last.pt')
val_updates += 1
log_metrics(self.experiment, metrics, val_updates)
end_time = time.time()
total_time = end_time - start_time
print(f"Total train time: {total_time}")
| 39.567961 | 104 | 0.585818 |
2811f691c9df0cfa06acd32c5b53be25799129d1 | 786 | py | Python | lvl2.py | choxner/python-challenge | 3c726936027087bc38f830a758549dd68467af52 | [
"Apache-2.0"
] | null | null | null | lvl2.py | choxner/python-challenge | 3c726936027087bc38f830a758549dd68467af52 | [
"Apache-2.0"
] | null | null | null | lvl2.py | choxner/python-challenge | 3c726936027087bc38f830a758549dd68467af52 | [
"Apache-2.0"
] | null | null | null |
# Level 2 of pythonchallenge.com!
# Challenge: within the source code of this level, there is a
# set of jumbled characters. Within these characters, find the
# letters and join them together to find the correct url.
from solution_framework import Solution
import requests
# to view source code
import re
# to use regular expressions
url_result = ""
res = requests.get("http://www.pythonchallenge.com/pc/def/ocr.html")
# import the HTML code from the site that hosts this challenge.
text_array = res.text.split("<!--")
text_to_search = text_array[2]
# select only the text that needs to be searched
regex_results = re.findall(r'\w', text_to_search)
for item in regex_results:
if item == '_':
pass
else:
url_result += item
Solution(url_result) | 26.2 | 69 | 0.722646 |
2812d4c9e6e9c407e500296b0bda22c042be6c3e | 1,444 | py | Python | saleor/social/migrations/0001_initial.py | autobotasia/saleor | e03e9f6ab1bddac308a6609d6b576a87e90ae655 | [
"CC-BY-4.0"
] | 1 | 2022-02-19T13:27:40.000Z | 2022-02-19T13:27:40.000Z | saleor/social/migrations/0001_initial.py | autobotasia/saleor | e03e9f6ab1bddac308a6609d6b576a87e90ae655 | [
"CC-BY-4.0"
] | null | null | null | saleor/social/migrations/0001_initial.py | autobotasia/saleor | e03e9f6ab1bddac308a6609d6b576a87e90ae655 | [
"CC-BY-4.0"
] | 2 | 2021-12-03T16:59:37.000Z | 2022-02-19T13:05:42.000Z | # Generated by Django 3.1.7 on 2021-05-10 07:38
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import saleor.core.utils.json_serializer
class Migration(migrations.Migration):
initial = True
dependencies = [
('store', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Social',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('private_metadata', models.JSONField(blank=True, default=dict, encoder=saleor.core.utils.json_serializer.CustomJsonEncoder, null=True)),
('metadata', models.JSONField(blank=True, default=dict, encoder=saleor.core.utils.json_serializer.CustomJsonEncoder, null=True)),
('follow', models.BooleanField(default=True)),
('store', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='socials', to='store.store')),
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('pk',),
'permissions': (('manage_socials', 'Manage social.'),),
},
),
]
| 41.257143 | 156 | 0.640582 |
28137bb29b2acdc147558b677e97f5e615bea160 | 2,900 | py | Python | adduser.py | Vignesh424/Face-Recognition-Attendance-Python | 5d9c33b64bd41918edc55290a320f73bc4afa4e5 | [
"Apache-2.0"
] | null | null | null | adduser.py | Vignesh424/Face-Recognition-Attendance-Python | 5d9c33b64bd41918edc55290a320f73bc4afa4e5 | [
"Apache-2.0"
] | null | null | null | adduser.py | Vignesh424/Face-Recognition-Attendance-Python | 5d9c33b64bd41918edc55290a320f73bc4afa4e5 | [
"Apache-2.0"
] | null | null | null | import cv2
import os
import sqlite3
import dlib
import re,time
from playsound import playsound
import pyttsx3
cam = cv2.VideoCapture(0)
cam.set(3, 640) # set video width
cam.set(4, 480) # set video height
face_detector = cv2.CascadeClassifier('C:/Users/ACER/Desktop/PROJECT ALL RESOURCE/PROJECT ALL RESOURCE/Face recognition/HaarCascade/haarcascade_frontalface_default.xml')
detector = dlib.get_frontal_face_detector()
# init function to get an engine instance for the speech synthesis
engine1 = pyttsx3.init()
engine2 = pyttsx3.init()
# For each person, enter one numeric face id
detector = dlib.get_frontal_face_detector()
regex = '^\w+([\.-]?\w+)*@\w+([\.-]?\w+)*(\.\w{2,3})+$'
Id =int(input("Enter ID:"))
fullname = input("Enter FullName : ")
email=input("Enter Email:")
match = re.match(regex,email)
if match == None:
print('Invalid Email')
raise ValueError('Invalid Email')
rollno = int(input("Enter Roll Number : "))
print("\n [INFO] Initializing face capture. Look the camera and wait ...")
# say method on the engine that passing input text to be spoken
playsound('sound.mp3')
engine1.say('User Added Successfully')
# run and wait method, it processes the voice commands.
engine2.runAndWait()
connects = sqlite3.connect("C:/Users/ACER/Desktop/PROJECT ALL RESOURCE/PROJECT ALL RESOURCE/Face recognition/sqlite3/Studentdb.db")# connecting to the database
c = connects.cursor()
c.execute('CREATE TABLE IF NOT EXISTS Student (ID INT NOT NULL UNIQUE PRIMARY KEY, FULLNAME TEXT NOT NULL, EMAIL NOT NULL, ROLLNO INT UNIQUE NOT NULL , STATUS TEXT DATE TIMESTAMP)')
c.execute("INSERT INTO Student(ID, FULLNAME, EMAIL,ROLLNO) VALUES(?,?,?,?)",(Id,fullname,email,rollno))
print('Record entered successfully')
connects.commit()# commiting into the database
c.close()
connects.close()# closing the connection
# Initialize individual sampling face count
count = 0
while(True):
ret, img = cam.read()
img = cv2.flip(img,1) # flip video image vertically
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
faces = face_detector.detectMultiScale(gray, 1.3, 5)
for (x,y,w,h) in faces:
cv2.rectangle(img, (x,y), (x+w,y+h), (255,0,0), 2)
count += 1
# Save the captured image into the datasets folder
cv2.imwrite("dataset/User." + str(Id) + '.' + str(count) + ".jpg", gray[y:y+h,x:x+w])
cv2.imshow('image', img)
k = cv2.waitKey(100) & 0xff # Press 'ESC' for exiting video
if k == 27:
break
elif count >= 30: # Take 30 face sample and stop video
playsound('sound.mp3')
engine2.say('DataSets Captured Successfully')
# run and wait method, it processes the voice commands.
engine2.runAndWait()
break
# Doing a bit of cleanup
print("\n [INFO] Exiting Program and cleanup stuff")
cam.release()
cv2.destroyAllWindows()
| 43.283582 | 182 | 0.686207 |
2814523dd67ea38c542b435dab46056033dd5d9d | 4,847 | py | Python | web/app/djrq/model/ampache/__init__.py | bmillham/djrq2 | c84283b75a7c15da1902ebfc32b7d75159c09e20 | [
"MIT"
] | 1 | 2016-11-23T20:50:00.000Z | 2016-11-23T20:50:00.000Z | web/app/djrq/model/ampache/__init__.py | bmillham/djrq2 | c84283b75a7c15da1902ebfc32b7d75159c09e20 | [
"MIT"
] | 15 | 2017-01-15T04:18:40.000Z | 2017-02-25T04:13:06.000Z | web/app/djrq/model/ampache/__init__.py | bmillham/djrq2 | c84283b75a7c15da1902ebfc32b7d75159c09e20 | [
"MIT"
] | null | null | null | from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import *
from sqlalchemy import *
from sqlalchemy.sql import func, or_
from sqlalchemy.types import TIMESTAMP
from sqlalchemy.ext.hybrid import hybrid_property
from time import time
import markupsafe
from sqlalchemy.ext.associationproxy import association_proxy
#from auth import *
Base = declarative_base()
#metadata = Base.metadata
#session = StackedObjectProxy()
#database_type = "MySQL"
class User(Base):
__tablename__ = 'user'
id = Column(Integer, primary_key=True)
username = Column(String(255))
fullname = Column(String(255))
email = Column(String(255))
website = Column(String(255))
apikey = Column(String(255))
_password = Column('password', String(128), nullable=False)
access = Column(Integer())
disabled = Column(Integer())
last_seen = Column(Integer())
create_date = Column(Integer())
validation = Column(String(255))
def _set_password(self, value):
if value is None:
self._password = None
return
import hashlib
encoder = hashlib.new('sha512')
encoder.update(value)
self._password = encoder.hexdigest()
password = synonym('_password', descriptor=property(lambda self: self._password, _set_password))
@classmethod
def authenticate(cls, identifier, password=None, force=False):
if not force and not password:
return None
try:
user = cls.get(identifier)
except:
return None
if force:
return user.id, user
import hashlib
encoder = hashlib.new('sha512')
encoder.update(password)
if user.password is None or user.password != encoder.hexdigest():
return None
return user.id, user
class Account(Base):
__tablename__ = 'accounts'
__repr__ = lambda self: "Account(%s, '%s')" % (self.id, self.name)
id = Column(String(32), primary_key=True)
name = Column(Unicode(255), nullable=False)
_password = Column('password', String(128))
def _set_password(self, value):
if value is None:
self._password = None
return
import hashlib
encoder = hashlib.new('sha512')
encoder.update(value)
self._password = encoder.hexdigest()
password = synonym('_password', descriptor=property(lambda self: self._password, _set_password))
groups = association_proxy('_groups', 'id')
@property
def permissions(self):
perms = []
for group in self._groups:
for perm in group.permissions:
perms.append(perm)
return set(perms)
@classmethod
def lookup(cls, identifier):
user = session.query(cls).filter(cls.id==identifier).one()
return user
@classmethod
def authenticate(cls, identifier, password=None, force=False):
if not force and not password:
return None
try:
#user = cls.get(identifier)
user = session.query(cls).filter(cls.name==identifier).one()
except:
return None
if force:
return user.id, user
import hashlib
encoder = hashlib.new('sha512')
encoder.update(password)
if user.password is None or user.password != encoder.hexdigest():
return None
return user.id, user
account_groups = Table('account_groups', Base.metadata,
Column('account_id', String(32), ForeignKey('accounts.id')),
Column('group_id', Unicode(32), ForeignKey('groups.id'))
)
class Group(Base):
__tablename__ = 'groups'
__repr__ = lambda self: "Group(%s, %r)" % (self.id, self.name)
__str__ = lambda self: str(self.id)
__unicode__ = lambda self: self.id
id = Column(String(32), primary_key=True)
description = Column(Unicode(255))
members = relation(Account, secondary=account_groups, backref='_groups')
permissions = association_proxy('_permissions', 'id')
group_permissions = Table('group_perms', Base.metadata,
Column('group_id', Unicode(32), ForeignKey('groups.id')),
Column('permission_id', Unicode(32), ForeignKey('permissions.id'))
)
class Permission(Base):
__tablename__ = 'permissions'
__repr__ = lambda self: "Permission(%s)" % (self.id, )
__str__ = lambda self: str(self.id)
__unicode__ = lambda self: self.id
id = Column(String(32), primary_key=True)
description = Column(Unicode(255))
groups = relation(Group, secondary=group_permissions, backref='_permissions')
#def ready(sessionmaker):
# global session
# session = sessionmaker
# request.environ['catalogs'] = session.query(SiteOptions).limit(1).one()
| 29.023952 | 100 | 0.637508 |
2814df1e327e7a389483fc7f28c047ef76e86e37 | 8,753 | py | Python | conet/datasets/duke_oct_flat_sp.py | steermomo/conet | 21d60fcb4ab9a01a00aa4d9cd0bdee79ea35cc4b | [
"MIT"
] | null | null | null | conet/datasets/duke_oct_flat_sp.py | steermomo/conet | 21d60fcb4ab9a01a00aa4d9cd0bdee79ea35cc4b | [
"MIT"
] | null | null | null | conet/datasets/duke_oct_flat_sp.py | steermomo/conet | 21d60fcb4ab9a01a00aa4d9cd0bdee79ea35cc4b | [
"MIT"
] | 1 | 2020-05-18T10:05:24.000Z | 2020-05-18T10:05:24.000Z | import multiprocessing as mp
# mp.set_start_method('spawn')
import math
import os
import pickle
import random
from glob import glob
from os import path
import albumentations as alb
import cv2
import numpy as np
import skimage
import torch
import imageio
from albumentations.pytorch import ToTensorV2
from skimage.color import gray2rgb
from torch.utils.data import Dataset
from conet.config import get_cfg
# https://github.com/albumentations-team/albumentations/pull/511
# Fix grid distortion bug. #511
# GridDistortion bug修复.....
train_size_aug = alb.Compose([
# alb.RandomSizedCrop(min_max_height=(300, 500)),
alb.PadIfNeeded(min_height=100, min_width=600, border_mode=cv2.BORDER_REFLECT101),
alb.Rotate(limit=6),
alb.RandomScale(scale_limit=0.05,),
alb.ElasticTransform(),
# alb.GridDistortion(p=1, num_steps=20, distort_limit=0.5),
# alb.GridDistortion(num_steps=10, p=1),
# alb.OneOf([
# alb.OpticalDistortion(),
# ]),
# alb.MaskDropout(image_fill_value=0, mask_fill_value=-1,p=0.3),
alb.HorizontalFlip(),
# alb.VerticalFlip(),
# alb.RandomBrightness(limit=0.01),
alb.PadIfNeeded(min_height=224, min_width=512, border_mode=cv2.BORDER_REFLECT101),
alb.RandomCrop(224, 512),
# alb.Normalize(),
# alb.pytorch.ToTensor(),
# ToTensorV2()
])
train_content_aug = alb.Compose([
# alb.MedianBlur(3),
# alb.GaussianBlur(3),
alb.RGBShift(r_shift_limit=5, g_shift_limit=5, b_shift_limit=5),
alb.RandomBrightnessContrast(brightness_limit=0.05),
alb.Normalize(),
# ToTensorV2()
])
val_aug = alb.Compose([
# alb.PadIfNeeded(512, border_mode=cv2.BORDER_REFLECT101),
# alb.Normalize(),
# alb.Resize(512, 512),
alb.PadIfNeeded(min_height=224, min_width=512, border_mode=cv2.BORDER_REFLECT101),
alb.CenterCrop(224, 512),
# ToTensorV2(),
])
val_c_aug = alb.Compose([
alb.Normalize(),
# ToTensorV2()
])
# train_aug_f = alb.Compose([
# # alb.RandomSizedCrop(min_max_height=(300, 500)),
# alb.RandomScale(),
# # alb.HorizontalFlip(),
# alb.VerticalFlip(),
# alb.RandomBrightness(limit=0.01),
# alb.Rotate(limit=30),
# # 224 548
# alb.PadIfNeeded(min_height=224, min_width=548, border_mode=cv2.BORDER_REFLECT101),
# alb.RandomCrop(224, 512),
# alb.Normalize(),
# # alb.pytorch.ToTensor(),
# ToTensorV2()
# ])
# val_aug_f = alb.Compose([
# alb.PadIfNeeded(min_height=224, min_width=512, border_mode=cv2.BORDER_REFLECT101),
# alb.Normalize(),
# # alb.Resize(512, 512),
# alb.CenterCrop(224, 512),
# ToTensorV2(),
# ])
class DukeOctFlatSPDataset(Dataset):
def __init__(self, split='train', n_seg=0):
cfg = get_cfg()
self.cfg = cfg
self.data_dir = path.join(cfg.dme_flatten_sp, str(n_seg))
print(f'Load data from {self.data_dir}')
# with open(path.join(cfg.data_dir, 'split.dp'), 'rb') as infile:
# self.d_split = pickle.load(infile)
self.split = split
data_files = glob(path.join(self.data_dir, '*.jpg'))
# img_bname = ['_'.join(path.basename(x).split('_')[:-1]) for x in img_files]
data_bnames = [path.basename(x).split('.')[0] for x in data_files]
# self.data_bnames = data_bnames
subject_ids = [int(x.split('_')[1]) for x in data_bnames]
if split == 'train':
self.bnames = [data_bnames[i] for i in range(len(data_files)) if subject_ids[i] < 6]
else:
self.bnames = [data_bnames[i] for i in range(len(data_files)) if subject_ids[i] >= 6]
if split == 'train':
self.b_aug = train_size_aug
self.c_aug = train_content_aug
elif split == 'val':
self.b_aug = val_aug
self.c_aug = val_c_aug
else:
raise NotImplementedError
self.cache = []
for idx in range(len(self)):
bname = self.bnames[idx]
img_fp = path.join(self.data_dir, f'{bname}.jpg')
label_fp = path.join(self.data_dir, f'{bname}_label.npy')
softlabel_fp = path.join(self.data_dir, f'{bname}_softlabel.npy')
img = imageio.imread(img_fp)
label = np.load(label_fp)
softlabel = np.load(softlabel_fp)
self.cache.append((img_fp, img, label, softlabel))
def __len__(self):
# return len(self.d_basefp)
return len(self.bnames)
def __getitem__(self, idx):
# carr = np.load(path.join(self.data_dir, self.d_basefp[idx]))
# carr = np.load(self.bnames[idx])
# if idx in self.cache.keys():
# img_fp, img, label, soft_label = self.cache[idx]
# else:
# bname = self.bnames[idx]
# img_fp = path.join(self.data_dir, f'{bname}.jpg')
# label_fp = path.join(self.data_dir, f'{bname}_label.npy')
# softlabel_fp = path.join(self.data_dir, f'{bname}_softlabel.npy')
# img = imageio.imread(img_fp)
# label = np.load(label_fp)
# softlabel = np.load(softlabel_fp)
# self.cache[idx] = (img_fp, img, label, softlabel)
img_fp, img, label, softlabel = self.cache[idx]
img_fp, img, label, softlabel = img_fp, img.copy(), label.copy(), softlabel.copy()
# img = gray2rgb(img)
# if self.split == 'train':
# auged = train_aug_f(image=img, mask=label)
# else:
# auged = val_aug_f(image=img, mask=label)
# auged['fname'] = img_fp
# auged['softlabel'] = torch.tensor(0.)
# return auged
# img = np.transpose(img, (1, 2, 0))
softlabel = np.transpose(softlabel, (1, 2, 0))
img = np.expand_dims(img, axis=-1)
img_a = np.concatenate([img, softlabel], axis=-1)
# img = gray2rgb(img)
# grid_distortion 可能不支持负数
label[label == -1] = 255
auged = self.b_aug(image=img_a, mask=label)
img = auged['image']
label = auged['mask']
label[label == 255] = -1
softlabel = img[:, :, 1:]
image = img[:, :, 0]
# print(image.shape, image.max(), image.min())
image = np.clip(image, 0, 255).astype('uint8')
# image = skimage.img_as_ubyte(image)
image = gray2rgb(image)
image = self.c_aug(image=image)['image'] # normi
# image = alb.Normalize()(image)['image']
image = np.transpose(image, (2, 0, 1))
softlabel = np.transpose(softlabel, (2, 0, 1))
loss_mask = (label !=-1).astype("float")
image = torch.from_numpy(image)
softlabel = torch.from_numpy(softlabel).float()
label = torch.from_numpy(label)
loss_mask = torch.from_numpy(loss_mask)
# img = auged['image']
# print(img.shape)
return {
'image': image,
'softlabel': softlabel,
'mask': label,
'fname': img_fp,
'loss_mask': loss_mask
}
if __name__ == "__main__":
from skimage import segmentation, color, filters, exposure
import skimage
import os
from os import path
import imageio
from matplotlib import pyplot as plt
from torch.utils.data import DataLoader
import random
np.random.seed(42)
random.seed(42)
save_dir = '/data1/hangli/oct/debug'
os.makedirs(save_dir, exist_ok=True)
cmap = plt.cm.get_cmap('jet')
n_seg = 1200
training_dataset = DukeOctFlatSPDataset(split='train', n_seg=n_seg)
# val_dataset = DukeOctFlatSPDataset(split='val', n_seg=n_seg)
data_loader = DataLoader(training_dataset, batch_size=16, shuffle=False, num_workers=8, pin_memory=False)
# val_loader = DataLoader(val_dataset, batch_size=4, shuffle=False, num_workers=2, pin_memory=True)
for t in range(40):
for bidx, batch in enumerate(data_loader):
data = batch['image']
target = batch['mask']
for b_i in range(len(data)):
img = data[b_i]
img = img.permute(1, 2, 0).cpu().numpy()
img = (img - img.min()) / (img.max() - img.min())
img = skimage.img_as_ubyte(img)
mask = target[b_i]
# mask_color = cmap(mask)
mask_color = color.label2rgb(mask.cpu().numpy())
mask_color = skimage.img_as_ubyte(mask_color)
print(img.shape, mask_color.shape)
save_img = np.hstack((img, mask_color))
p = path.join(save_dir, f'{t}_{bidx}_{b_i}.jpg')
print(f'=> {p}')
imageio.imwrite(p, save_img)
| 30.498258 | 109 | 0.595224 |
28162dcf4efa8e10ec1ddcb4eb91bfa4cb4b0d83 | 107 | py | Python | Chapter 01/fraction-type.py | arifmudi/Applying-Math-with-Python | abeb6b0a9bcfa8b21092b9793d4e691cf5a146bf | [
"MIT"
] | 34 | 2020-07-23T14:42:42.000Z | 2022-03-18T07:00:17.000Z | Chapter 01/fraction-type.py | arifmudi/Applying-Math-with-Python | abeb6b0a9bcfa8b21092b9793d4e691cf5a146bf | [
"MIT"
] | null | null | null | Chapter 01/fraction-type.py | arifmudi/Applying-Math-with-Python | abeb6b0a9bcfa8b21092b9793d4e691cf5a146bf | [
"MIT"
] | 31 | 2020-07-22T11:09:33.000Z | 2022-03-15T16:59:53.000Z |
from fractions import Fraction
num1 = Fraction(1, 3)
num2 = Fraction(1, 7)
num1 * num2 # Fraction(1, 21)
| 17.833333 | 30 | 0.691589 |
281720b5fdc07905c3eb03b6c213540b162d5693 | 1,109 | py | Python | tests/config/test_project.py | gaborbernat/toxn | 1ecb1121b3e3dc30b892b0254cb5566048b5d2e7 | [
"MIT"
] | 4 | 2018-04-15T15:12:32.000Z | 2019-06-03T12:41:06.000Z | tests/config/test_project.py | gaborbernat/tox3 | 1ecb1121b3e3dc30b892b0254cb5566048b5d2e7 | [
"MIT"
] | 3 | 2018-03-15T11:06:30.000Z | 2018-04-15T15:17:29.000Z | tests/config/test_project.py | gaborbernat/tox3 | 1ecb1121b3e3dc30b892b0254cb5566048b5d2e7 | [
"MIT"
] | 1 | 2019-09-25T19:53:09.000Z | 2019-09-25T19:53:09.000Z | from io import StringIO
from pathlib import Path
import pytest
from toxn.config import from_toml
@pytest.mark.asyncio
async def test_load_from_io():
content = StringIO("""
[build-system]
requires = ['setuptools >= 38.2.4']
build-backend = 'setuptools:build_meta'
[tool.toxn]
default_tasks = ['py36']
""")
build, project, filename = await from_toml(content)
assert build.backend == 'setuptools:build_meta'
assert build.requires == ['setuptools >= 38.2.4']
assert project == {'default_tasks': ['py36']}
assert filename is None
@pytest.mark.asyncio
async def test_load_from_path(tmpdir):
filename: Path = Path(tmpdir) / 'test.toml'
with open(filename, 'wt') as f:
f.write("""
[build-system]
requires = ['setuptools >= 38.2.4']
build-backend = 'setuptools:build_meta'
[tool.toxn]
default_tasks = ['py36']
""")
build, project, config_path = await from_toml(filename)
assert build.backend == 'setuptools:build_meta'
assert build.requires == ['setuptools >= 38.2.4']
assert project == {'default_tasks': ['py36']}
assert filename == config_path
| 25.790698 | 59 | 0.6844 |
2819b274258b4f59c03325199e582718bece2d5e | 536 | py | Python | edinet_baseline_hourly_module/edinet_models/pyEMIS/ConsumptionModels/__init__.py | BeeGroup-cimne/module_edinet | 0cda52e9d6222a681f85567e9bf0f7e5885ebf5e | [
"MIT"
] | null | null | null | edinet_baseline_hourly_module/edinet_models/pyEMIS/ConsumptionModels/__init__.py | BeeGroup-cimne/module_edinet | 0cda52e9d6222a681f85567e9bf0f7e5885ebf5e | [
"MIT"
] | 13 | 2021-03-25T22:24:38.000Z | 2022-03-12T00:56:45.000Z | edinet_baseline_hourly_module/edinet_models/pyEMIS/ConsumptionModels/__init__.py | BeeGroup-cimne/module_edinet | 0cda52e9d6222a681f85567e9bf0f7e5885ebf5e | [
"MIT"
] | 1 | 2019-03-13T09:49:56.000Z | 2019-03-13T09:49:56.000Z | from constantMonthlyModel import ConstantMonthlyModel
from constantModel import ConstantModel
from twoParameterModel import TwoParameterModel
from threeParameterModel import ThreeParameterModel
from anyModel import AnyModelFactory
from schoolModel import SchoolModel, SchoolModelFactory
from recurrentModel import RecurrentModel, RecurrentModelFactory
from weeklyModel import WeeklyModel, WeeklyModelFactory
from monthlyModel import MonthlyModel, MonthlyModelFactory
from nanModel import NanModel
from profile import ConsumptionProfile
| 44.666667 | 64 | 0.902985 |
281aa6d325487ceb00b0753134cf1290afd8b2fd | 326 | py | Python | tinder_config_ex.py | nathan-149/tinderbot | 0413fbbba0219faf4415d75fd4f23518951b03a0 | [
"MIT"
] | 18 | 2020-06-30T18:31:44.000Z | 2021-12-17T05:04:58.000Z | tinder_config_ex.py | havzor1231/Tinder-Bot | 189524d7c80921a47b06262bd3cd42abaad7a85d | [
"MIT"
] | 2 | 2020-07-21T07:55:48.000Z | 2020-11-20T10:02:23.000Z | tinder_config_ex.py | havzor1231/Tinder-Bot | 189524d7c80921a47b06262bd3cd42abaad7a85d | [
"MIT"
] | 9 | 2020-07-12T08:00:00.000Z | 2022-03-24T03:29:40.000Z | host = 'https://api.gotinder.com'
#leave tinder_token empty if you don't use phone verification
tinder_token = "0bb19e55-5f12-4a23-99df-8e258631105b"
# Your real config file should simply be named "config.py"
# Just insert your fb_username and fb_password in string format
# and the fb_auth_token.py module will do the rest!
| 40.75 | 63 | 0.785276 |
281c7adc874167e64dc0db3f96ec79ad8d491740 | 1,958 | py | Python | simone/test.py | ross/simone | cfee8eaa04a7ddd235f735fa6c07adac28b4c6a4 | [
"MIT"
] | null | null | null | simone/test.py | ross/simone | cfee8eaa04a7ddd235f735fa6c07adac28b4c6a4 | [
"MIT"
] | 1 | 2021-11-04T13:47:28.000Z | 2021-11-04T13:47:28.000Z | simone/test.py | ross/simone | cfee8eaa04a7ddd235f735fa6c07adac28b4c6a4 | [
"MIT"
] | 1 | 2021-10-20T14:44:19.000Z | 2021-10-20T14:44:19.000Z | from django.test.runner import DiscoverRunner
from io import StringIO
from logging import StreamHandler, getLogger
from unittest import TextTestRunner, TextTestResult
class SimoneTestRunner(TextTestRunner):
def __init__(self, *args, **kwargs):
kwargs['buffer'] = True
super().__init__(*args, **kwargs)
class SimoneTestResult(TextTestResult):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._stream = StringIO()
self._stream_handlers = [StreamHandler(self._stream)]
self._root_logger = getLogger()
self._original_handlers = self._root_logger.handlers
def startTest(self, test):
self._stream.truncate(0)
self._root_logger.handlers = self._stream_handlers
return super().startTest(test)
def stopTest(self, test):
self._root_logger.handlers = self._original_handlers
return super().startTest(test)
# i don't like overriding a _ property, but otherwise we'd have to
# reimplement ~3 add* methods, one of which is non-trivial, which seems
# more likely to be flakey. Essentially we don't want ot mirror the output
# during the test runs when _restoreStdout is called so this effectively
# disables the property that gets set to cause that to happen.
@property
def _mirrorOutput(self):
return False
@_mirrorOutput.setter
def _mirrorOutput(self, val):
pass
# same here :-(. This is so that we can include any logging for the failed
# tests.
def _exc_info_to_string(self, err, test):
return (
super()._exc_info_to_string(err, test)
+ '\nLogging:\n'
+ self._stream.getvalue()
)
class SimoneRunner(DiscoverRunner):
test_runner = SimoneTestRunner
def get_resultclass(self):
ret = super().get_resultclass()
if ret is None:
ret = SimoneTestResult
return ret
| 31.079365 | 78 | 0.670582 |
281e79df5e1dd65bfbeca11e0d9ea108af82bb30 | 18 | py | Python | library/tutorial/__init__.py | gottaegbert/penter | 8cbb6be3c4bf67c7c69fa70e597bfbc3be4f0a2d | [
"MIT"
] | 13 | 2020-01-04T07:37:38.000Z | 2021-08-31T05:19:58.000Z | library/tutorial/__init__.py | gottaegbert/penter | 8cbb6be3c4bf67c7c69fa70e597bfbc3be4f0a2d | [
"MIT"
] | 3 | 2020-06-05T22:42:53.000Z | 2020-08-24T07:18:54.000Z | library/tutorial/__init__.py | gottaegbert/penter | 8cbb6be3c4bf67c7c69fa70e597bfbc3be4f0a2d | [
"MIT"
] | 9 | 2020-10-19T04:53:06.000Z | 2021-08-31T05:20:01.000Z | __all__ = ["fibo"] | 18 | 18 | 0.611111 |
2820ef5bc2fdcf7913515a4a45ac8b19c189a6ce | 1,340 | py | Python | longest path in matrix.py | buhuhaha/python | 4ff72ac711f0948ae5bcb0886d68e8df77fe515b | [
"MIT"
] | null | null | null | longest path in matrix.py | buhuhaha/python | 4ff72ac711f0948ae5bcb0886d68e8df77fe515b | [
"MIT"
] | null | null | null | longest path in matrix.py | buhuhaha/python | 4ff72ac711f0948ae5bcb0886d68e8df77fe515b | [
"MIT"
] | null | null | null |
row = [-1, -1, -1, 0, 0, 1, 1, 1]
col = [-1, 0, 1, -1, 1, -1, 0, 1]
def isValid(x, y, mat):
return 0 <= x < len(mat) and 0 <= y < len(mat[0])
def findMaxLength(mat, x, y, previous):
if not isValid(x, y, mat) or chr(ord(previous) + 1) != mat[x][y]:
return 0
max_len = 0
for k in range(len(row)):
length = findMaxLength(mat, x + row[k], y + col[k], mat[x][y])
max_len = max(max_len, 1 + length)
return max_len
def findMaximumLength(mat, ch):
if not mat or not len(mat):
return 0
(M, N) = (len(mat), len(mat[0]))
max_len = 0
for x in range(M):
for y in range(N):
if mat[x][y] == ch:
for k in range(len(row)):
length = findMaxLength(mat, x + row[k], y + col[k], ch)
max_len = max(max_len, 1 + length)
return max_len
if __name__ == '__main__':
mat = [
['D', 'E', 'H', 'X', 'B'],
['A', 'O', 'G', 'P', 'E'],
['D', 'D', 'C', 'F', 'D'],
['E', 'B', 'E', 'A', 'S'],
['C', 'D', 'Y', 'E', 'N']
]
ch = 'C'
print("The length of the longest path with consecutive characters starting from "
"character", ch, "is", findMaximumLength(mat, ch)) | 20.30303 | 85 | 0.435075 |
28226ec9ea67dad00950fa1852a66dbf14540c2c | 4,653 | py | Python | AnimalProfile/session/batchAnimals.py | AtMostafa/AnimalProfile | 866f55659b80291f840ecacd090afada5f4de674 | [
"MIT"
] | null | null | null | AnimalProfile/session/batchAnimals.py | AtMostafa/AnimalProfile | 866f55659b80291f840ecacd090afada5f4de674 | [
"MIT"
] | null | null | null | AnimalProfile/session/batchAnimals.py | AtMostafa/AnimalProfile | 866f55659b80291f840ecacd090afada5f4de674 | [
"MIT"
] | null | null | null | __all__ = ('get_session_list',
'get_animal_list',
'get_event',
'get_tag_pattern',
'get_pattern_animalList',
'get_current_animals')
import datetime
import logging
from .. import Root
from .. import File
from .. import Profile
from ..Profile import EventProfile
from .singleAnimal import *
def get_session_list(root: Root,
animalList: list = None,
profile: Profile = None):
"""
This function returns list of sessions with certain 'profile' for all the animals
in animalList. if animalList=Nonr, it will search all the animals.
"""
if profile is None:
profile = Profile(root=root)
if animalList is None or animalList == '' or animalList == []:
animalList = root.get_all_animals()
profileOut = Profile(root=root)
for animal in animalList:
tagFile = File(root, animal)
sessionProfile = tagFile.get_profile_session_list(profile)
profileOut += sessionProfile
return profileOut
def get_animal_list(root: Root, profile: Profile = None):
"""
this function returns list of animals with at least one session matching the "profile"
"""
if profile is None:
profile = Profile(root=root)
allProfiles = get_session_list(root, animalList=None, profile=profile)
sessionList = allProfiles.Sessions
animalList = []
for session in sessionList:
animalList.append(session[:len(profile._prefix) + 3])
animalList = list(set(animalList))
return sorted(animalList)
def get_event(root: Root,
profile1: Profile,
profile2: Profile,
badAnimals: list = None):
"""
This function finds the animals that match both profile1 and profile2 IN SUCCESSION
I.E., when the conditions changed
"""
if badAnimals is None:
badAnimals = []
animalList1 = get_animal_list(root, profile1)
animalList2 = get_animal_list(root, profile2)
animalList0 = set(animalList1).intersection(set(animalList2))
animalList0 = [animal for animal in animalList0 if animal not in badAnimals] # remove bad animals from animalList0
animalList0.sort()
eventProfile = EventProfile(profile1, profile2)
for animal in animalList0:
sessionProfile1 = get_session_list(root, animalList=[animal], profile=profile1)
sessionProfile2 = get_session_list(root, animalList=[animal], profile=profile2)
sessionTotal = get_session_list(root, animalList=[animal], profile=root.get_profile())
try:
index = sessionTotal.Sessions.index(sessionProfile1.Sessions[-1])
if sessionProfile2.Sessions[0] == sessionTotal.Sessions[index + 1]:
# Two profiles succeed, meaning the Event happended.
eventProfile.append(sessionProfile1.Sessions, sessionProfile2.Sessions)
except Exception:
pass
return eventProfile
def get_tag_pattern(root: Root,
animalList: list = None,
tagPattern: str = '*'):
"""
applies 'get_pattern_session_list' to a list of animals
"""
if animalList is None or animalList == []:
animalList = root.get_all_animals()
profileDict = root.get_profile()
for animal in animalList:
tagFile = File(root, animal)
profileDict += tagFile.get_pattern_session_list(tagPattern=tagPattern)
return profileDict
def get_pattern_animalList(root: Root, tagPattern: str):
"""
this function returns list of animals with at least one session matching the 'tagPattern'
"""
allProfile = get_tag_pattern(root, animalList=None, tagPattern=tagPattern)
sessionList = allProfile.Sessions
animalList = []
for session in sessionList:
animalList.append(session[:len(root.prefix) + 3])
animalList = list(set(animalList))
return sorted(animalList)
def get_current_animals(root: Root, days_passed: int = 4):
"""
this function returns the list of animals with a new session
within the last few ('days_passed') days
"""
now = datetime.datetime.now()
all_animals = root.get_all_animals()
if all_animals == []:
logging.warning('No animal found!')
return []
animalList = []
for animal in all_animals:
animalTag = File(root, animal)
sessionList = animalTag.get_all_sessions()
if not sessionList:
continue
lastSessionDate = animalTag.get_session_date(sessionList[-1])
if (now - lastSessionDate).days <= days_passed:
animalList.append(animal)
return animalList
| 33.47482 | 119 | 0.663873 |
282403dbaa1f17f6e0d6f80a9faabdc5990009bd | 10,747 | py | Python | IsaacAgent.py | dholmdahl/connect4-1 | cdcd92ee30f45e89a9f01ebc87a8b6d797cc4a81 | [
"MIT"
] | null | null | null | IsaacAgent.py | dholmdahl/connect4-1 | cdcd92ee30f45e89a9f01ebc87a8b6d797cc4a81 | [
"MIT"
] | null | null | null | IsaacAgent.py | dholmdahl/connect4-1 | cdcd92ee30f45e89a9f01ebc87a8b6d797cc4a81 | [
"MIT"
] | null | null | null | from random import choice
from copy import deepcopy
from game_data import GameData
from agents import Agent
import numpy as np
import random
import pickle
import pandas as pd
class IsaacAgent(Agent):
def __init__(self, max_time=2, max_depth=300):
self.max_time = max_time
self.max_depth = max_depth
# self.heuristic = [
# [0], [0], [0], [0], [0], [0], [0],
# [0], [0], [0], [0], [0], [0], [0],
# [0], [0], [0], [0], [0], [0], [0],
# [0], [0], [0], [0], [0], [0], [0], # ...
# [0], [0], [-1], [-1], [-1], [0], [0], # odd player
# [0], [1, -1], [0], [0], [0], [1, -1], [0] # even player
# ]
self.heuristic = [
[0], [0], [0], [0], [0], [0], [0],
[0], [0], [1, -1], [2, -2], [1, -1], [0], [0],
[0], [0], [1, -2], [2, -2], [1, -2], [0], [0],
[0], [0], [3, -2], [3, -2], [3, -2], [0], [0],
[0], [0], [2, -3], [2, -3], [2, -3], [0], [0],
[0], [1, -1], [3, -3], [4, -4], [3, -3], [1, -1], [0]
]
self.game_data = None
self.model = pickle.load(open("./c4model.sav", 'rb'))
def get_name(self) -> str:
return "IsaacAgent"
def get_move(self, game_data) -> int:
self.game_data = game_data
rows_reversed_connect4_board = []
for row in list(game_data.game_board):
rows_reversed_connect4_board.append(row[::-1])
connect4_board = list(np.concatenate(rows_reversed_connect4_board).flat)[::-1]
for sn, sv in enumerate(connect4_board):
if sv == 0:
connect4_board[sn] = ' '
elif sv == 1:
connect4_board[sn] = 'R'
else:
connect4_board[sn] = 'B'
# self.print_board(connect4_board)
turn = self.player(connect4_board)
actions = self.actions(connect4_board)
best_action = random.choice(actions)
if turn == 'R':
# max player
local_best_min_v = -float('inf')
for action in actions:
self.current_depth = 0
min_v = self.min_value(self.result(connect4_board, action))
# print(f"Action: {action + 1}, Min Value: {min_v}")
if min_v > local_best_min_v:
local_best_min_v = min_v
best_action = action
else:
# min player
local_best_max_v = float('inf')
for action in actions:
self.current_depth = 0
max_v = self.max_value(self.result(connect4_board, action))
# print(f"Action: {action + 1}, Max Value: {max_v}")
if max_v < local_best_max_v:
local_best_max_v = max_v
best_action = action
return best_action
def print_board(self, board):
for l in range(0, 42, 7):
row = ''.join([board[l + i] + '|' for i in range(7)])
print(row[:13])
print('-+-+-+-+-+-+-')
def player(self, board):
return 'B' if board.count('R') > board.count('B') else 'R'
def is_tie(self, board):
return len([sq for sq in board if sq == ' ']) == 0
def utility(self, board):
return 0 if self.is_tie(board) else -1000 if self.player(board) == "R" else 1000
def terminal(self, board):
# use modulo 7 to detect new row
row = 0
for sq in range(42):
if sq % 7 == 0:
row += 1
distance_to_new_row = 7 * row - (sq + 1)
distance_to_column_end = [i for i in range(6) if (sq + 1) + i * 7 > 35][0]
if board[sq] == ' ':
continue
# 4 horizontally
if distance_to_new_row >= 3 and board[sq] == board[sq + 1] and board[sq] == board[sq + 2] and board[sq] == board[sq + 3]:
return True
# 4 vertically
elif distance_to_column_end > 2 and board[sq] == board[sq + 7] and board[sq] == board[sq + 14] and board[sq] == board[sq + 21]:
return True
# 4 diagonally
elif distance_to_new_row >= 3 and distance_to_column_end >= 2 and sq + 24 < len(board) and board[sq] == board[sq + 8] and board[sq] == board[sq + 16] and board[sq] == board[sq + 24]:
return True
elif distance_to_new_row >= 3 and distance_to_column_end <= 2 and 0 <= sq - 18 < len(board) and board[sq] == board[sq - 6] and board[sq] == board[sq - 12] and board[sq] == board[sq - 18]:
return True
return self.is_tie(board)
def actions(self, board):
return [sn for sn in range(7) if board[sn] == ' ']
def result(self, board, action):
result = board[:]
for r in range(6):
current_sq = board[action + 35 - r * 7]
if current_sq == ' ':
result[action + 35 - r * 7] = self.player(board)
break
return result
def count_two_in_row(self, board, player):
two_in_row = 0
row = 0
for sq in range(42):
if sq % 7 == 0:
row += 1
distance_to_new_row = 7 * row - (sq + 1)
distance_to_column_end = [i for i in range(6) if (sq + 1) + i * 7 > 35][0]
if board[sq] != player or board[sq].isdigit() or board[sq] == ' ':
continue
# 4 horizontally
if distance_to_new_row >= 3 and board[sq] == board[sq + 1]:
two_in_row += 1
# 4 vertically
elif distance_to_column_end > 2 and board[sq] == board[sq + 7]:
two_in_row += 1
# 4 diagonally
elif distance_to_new_row >= 3 and distance_to_column_end >= 2 and sq + 8 < len(board) and board[sq] == board[sq + 8]:
two_in_row += 1
elif distance_to_new_row >= 3 and distance_to_column_end <= 2 and 0 <= sq - 6 < len(board) and board[sq] == board[sq - 6]:
two_in_row += 1
return two_in_row
def count_three_in_row(self, board, player):
three_in_row = 0
row = 0
for sq in range(42):
if sq % 7 == 0:
row += 1
distance_to_new_row = 7 * row - (sq + 1)
distance_to_column_end = [i for i in range(6) if (sq + 1) + i * 7 > 35][0]
if board[sq] != player or board[sq].isdigit() or board[sq] == ' ':
continue
# 4 horizontally
if distance_to_new_row >= 3 and board[sq] == board[sq + 1] and board[sq] == board[sq + 2]:
three_in_row += 1
# 4 vertically
elif distance_to_column_end > 2 and board[sq] == board[sq + 7] and board[sq] == board[sq + 14]:
three_in_row += 1
# 4 diagonally
elif distance_to_new_row >= 3 and distance_to_column_end >= 2 and sq + 16 < len(board) and board[sq] == board[sq + 8] and board[sq] == board[sq + 16]:
three_in_row += 1
elif distance_to_new_row >= 3 and distance_to_column_end <= 2 and 0 <= sq - 12 < len(board) and board[sq] == board[sq - 6] and board[sq] == board[sq - 12]:
three_in_row += 1
return three_in_row
def evaluate(self, board):
"""
Heuristic:
- Squares value:
[0, 0, -1, -1, -1, 0, 0,
0, 0, 2, 2, 2, 0, 0,
0, 0, -2, -2, -2, 0, 0,
0, 0, 3, 3, 3, 0, 0,
0, 0, -3, -3, -3, 0, 0,
0, 0, 1, 1, 1, 0, 0]
- Include win squares of each player and where they are located.
Heuristic based off Odd-Even strategy: https://www.youtube.com/watch?v=YqqcNjQMX18
"""
total_score = 0
for vn, values in enumerate(self.heuristic):
for value in values:
if value < 0 and board[vn] == 'B':
total_score += value
elif value > 0 and board[vn] == 'R':
total_score += value
# three_in_row_modifier = 10
# total_score += self.count_three_in_row(board, 'R') * three_in_row_modifier
# total_score -= self.count_three_in_row(board, 'B') * three_in_row_modifier
# total_score += self.count_two_in_row(board, 'R') * three_in_row_modifier
# total_score -= self.count_two_in_row(board, 'B') * three_in_row_modifier
# divisor = 5
# for i in range(7):
# action_result = self.result(board, i)
# if self.terminal(action_result):
# total_score += self.utility(action_result) / divisor
# print(total_score)
# multiplier = 2
# r_win_states = 0
# b_win_states = 0
# for i in range(7):
# action_result = self.result(board, i)
# if self.terminal(action_result):
# if self.utility(action_result) == 1000:
# r_win_states += 1
# else:
# b_win_states += 1
# total_score += r_win_states * multiplier
# total_score -= b_win_states * multiplier
# if r_win_states >= 2:
# total_score += 400
# elif b_win_states >= 2:
# total_score -= 400
# print(f"Red Win States: {r_win_states}, Blue Win States: {b_win_states}")
# multiplier = 30
# conv_data = []
# for sq in board:
# if sq.isdigit() or sq == ' ':
# conv_data.append(0)
# elif sq == 'R':
# conv_data.append(1)
# else:
# conv_data.append(-1)
# c4_board = pd.Series(conv_data, index=[f"pos_{sn + 1}" for sn, sv in enumerate(board)])
# total_score += self.model.predict([c4_board])[0][0]
return total_score
def min_value(self, board):
if self.terminal(board):
return self.utility(board)
if self.current_depth > self.max_depth:
return self.evaluate(board)
self.current_depth += 1
v = float('inf')
for action in self.actions(board):
max_v = self.max_value(self.result(board, action))
v = min(v, max_v)
return v
def max_value(self, board):
if self.terminal(board):
return self.utility(board)
if self.current_depth > self.max_depth:
return self.evaluate(board)
self.current_depth += 1
v = -float('inf')
for action in self.actions(board):
min_v = self.min_value(self.result(board, action))
v = max(v, min_v)
return v
| 33.902208 | 199 | 0.499209 |
28254fc9a86cfb17a27b879bc1d9e02d48b17b76 | 1,288 | py | Python | HTTPServer.py | dannyb648/HTTPServer | e7877646d2ee890229d5db67055abed2f3a91812 | [
"MIT"
] | null | null | null | HTTPServer.py | dannyb648/HTTPServer | e7877646d2ee890229d5db67055abed2f3a91812 | [
"MIT"
] | null | null | null | HTTPServer.py | dannyb648/HTTPServer | e7877646d2ee890229d5db67055abed2f3a91812 | [
"MIT"
] | null | null | null | from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
from SocketServer import ThreadingMixIn
from os import curdir, sep
import threading
import urlparse
import mimetypes
PORT_NUMBER = 8080
VERSION_NUMBER = '1.0.0'
class Handler(BaseHTTPRequestHandler):
def do_GET(self):
#Parse path into dictionary and process
url = urlparse.urlparse(self.path)
url_dict = urlparse.parse_qs(url.query)
if self.path=='/':
self.path="index.html"
self.respond('text/html')
return
mimetype = mimetypes.guess_type(self.path)
mimetype = mimetype[0]
try:
self.respond(mimetype)
except IOError:
self.send_error(404,'File Not Found: %s' % self.path)
return
def respond(self, mimetype):
#Open the static file requested and send it
f = open(curdir + sep + self.path)
self.send_response(200)
self.send_header('Content-type',mimetype)
self.end_headers()
self.wfile.write(f.read())
f.close()
return
class ThreadedHTTPServer(ThreadingMixIn, HTTPServer):
"""Handle Requests in a seperate thread."""
if __name__ == '__main__':
server = ThreadedHTTPServer(('', PORT_NUMBER), Handler)
print 'Starting Server on port ' + str(PORT_NUMBER)
print 'Version Code: ' + VERSION_NUMBER
print 'Author @dannyb648 | danbeglin.co.uk'
server.serve_forever()
| 24.769231 | 61 | 0.736801 |
2826bae5797a9d9d95a636c0a99581f2619ca237 | 5,872 | py | Python | algorand-oracle-smart-contracts/src/algorand_oracle.py | damees/algorand-oracle | f7f078f9d153341d1ba546ff66e8afbf2685f114 | [
"MIT"
] | null | null | null | algorand-oracle-smart-contracts/src/algorand_oracle.py | damees/algorand-oracle | f7f078f9d153341d1ba546ff66e8afbf2685f114 | [
"MIT"
] | null | null | null | algorand-oracle-smart-contracts/src/algorand_oracle.py | damees/algorand-oracle | f7f078f9d153341d1ba546ff66e8afbf2685f114 | [
"MIT"
] | null | null | null | from pyteal import *
ADMIN_KEY = Bytes("admin")
WHITELISTED_KEY = Bytes("whitelisted")
REQUESTS_BALANCE_KEY = Bytes("requests_balance")
MAX_BUY_AMOUNT = Int(1000000000)
MIN_BUY_AMOUNT = Int(10000000)
REQUESTS_SELLER = Addr("N5ICVTFKS7RJJHGWWM5QXG2L3BV3GEF6N37D2ZF73O4PCBZCXP4HV3K7CY")
MARKET_EXCHANGE_NOTE = Bytes("algo-oracle-app-4")
def approval_program():
on_creation = Seq(
[
Assert(Txn.application_args.length() == Int(0)),
App.localPut(Int(0), ADMIN_KEY, Int(1)),
Return(Int(1))
]
)
is_contract_admin = App.localGet(Int(0), ADMIN_KEY)
# set/remove an admin for this contract
admin_status = Btoi(Txn.application_args[2])
set_admin = Seq(
[
Assert(
And(
is_contract_admin,
Txn.application_args.length() == Int(3),
Txn.accounts.length() == Int(1),
)
),
App.localPut(Int(1), ADMIN_KEY, admin_status),
Return(Int(1)),
]
)
register = Seq(
[
App.localPut(Int(0), WHITELISTED_KEY, Int(0)), Return(Int(1))
]
)
# Depending on what you do, you should always consider implementing a whitelisting to
# control who access your app. This will allow you to process offchain validation before
# allowing an account to call you app.
# You may also consider case by case whitelisting to allow access to specific business methods.
whitelist = Seq(
[
Assert(
And(
is_contract_admin,
Txn.application_args.length() == Int(2),
Txn.accounts.length() == Int(1)
)
),
App.localPut(Int(1), WHITELISTED_KEY, Int(1)),
Return(Int(1))
]
)
# This should be added to the checklist of business methods.
is_whitelisted = App.localGet(Int(0), WHITELISTED_KEY)
# An admin can increase the request balance of a user.
requests_amount = Btoi(Txn.application_args[1])
allocate_requests = Seq(
[
Assert(
And(
is_contract_admin, # Sent by admin
Txn.application_args.length() == Int(3), # receiver and amount are provided
Txn.accounts.length() == Int(1),
App.localGet(Int(1), WHITELISTED_KEY), # receiver is whitelisted
)
),
App.localPut(
Int(1),
REQUESTS_BALANCE_KEY,
App.localGet(Int(1), REQUESTS_BALANCE_KEY) + requests_amount
),
Return(Int(1))
]
)
# a client can buy requests
buy_requests = Seq(
[
Assert(
And(
is_whitelisted,
Global.group_size() == Int(2), # buying requests must be done using an atomic transfer
Gtxn[0].type_enum() == TxnType.Payment, # the first transaction must be a payment...
Gtxn[0].receiver() == REQUESTS_SELLER, # ...to our address
Gtxn[0].amount() >= MIN_BUY_AMOUNT, # we don't sell for less than 10...
Gtxn[0].amount() <= MAX_BUY_AMOUNT, # ...or more than 1000 ALGO
Txn.group_index() == Int(1), # call to the contract is the second transaction
Txn.application_args.length() == Int(2),
Txn.accounts.length() == Int(1) # the address which will use the requests must be provided
)
),
App.localPut(
Int(1),
REQUESTS_BALANCE_KEY,
App.localGet(Int(1), REQUESTS_BALANCE_KEY) + (Gtxn[0].amount() / Int(100000)),
),
Return(Int(1))
]
)
market_exchange_rate_request = Seq(
[
Assert(
And(
is_whitelisted,
Txn.note() == MARKET_EXCHANGE_NOTE,
Txn.application_args.length() == Int(4),
Txn.accounts.length() == Int(0),
App.localGet(Int(0), REQUESTS_BALANCE_KEY) >= Int(1)
)
),
App.localPut(
Int(0),
REQUESTS_BALANCE_KEY,
App.localGet(Int(0), REQUESTS_BALANCE_KEY) - Int(1),
),
Return(Int(1))
]
)
# Implement other oracle methods...
program = Cond(
[Txn.application_id() == Int(0), on_creation],
[Txn.on_completion() == OnComplete.DeleteApplication, Return(is_contract_admin)],
[Txn.on_completion() == OnComplete.UpdateApplication, Return(is_contract_admin)],
[Txn.on_completion() == OnComplete.CloseOut, Return(Int(1))],
[Txn.on_completion() == OnComplete.OptIn, register],
[Txn.application_args[0] == Bytes("set_admin"), set_admin],
[Txn.application_args[0] == Bytes("whitelist"), whitelist],
[Txn.application_args[0] == Bytes("allocate_requests"), allocate_requests],
[Txn.application_args[0] == Bytes("buy_requests"), buy_requests],
[Txn.application_args[0] == Bytes("get_market_exchange_rate"), market_exchange_rate_request]
)
return program
def clear_state_program():
program = Seq(
[
Return(Int(1))
]
)
return program
if __name__ == "__main__":
with open("algorand_oracle_approval.teal", "w") as f:
compiled = compileTeal(approval_program(), mode=Mode.Application, version=5)
f.write(compiled)
with open("algorand_oracle_clear_state.teal", "w") as f:
compiled = compileTeal(clear_state_program(), mode=Mode.Application, version=5)
f.write(compiled)
| 35.161677 | 111 | 0.547854 |
28271eebbca12a80c721021d335930842259d168 | 20,198 | py | Python | custom_components/shelly/__init__.py | astrandb/ShellyForHASS | f404d3007a26945f310a801c6c7d196d7fa1fe23 | [
"MIT"
] | null | null | null | custom_components/shelly/__init__.py | astrandb/ShellyForHASS | f404d3007a26945f310a801c6c7d196d7fa1fe23 | [
"MIT"
] | null | null | null | custom_components/shelly/__init__.py | astrandb/ShellyForHASS | f404d3007a26945f310a801c6c7d196d7fa1fe23 | [
"MIT"
] | null | null | null | """
Support for Shelly smart home devices.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/shelly/
"""
# pylint: disable=broad-except, bare-except, invalid-name, import-error
from datetime import timedelta
import logging
import time
import asyncio
import voluptuous as vol
from homeassistant.const import (
CONF_DEVICES, CONF_DISCOVERY, CONF_ID, CONF_NAME, CONF_PASSWORD,
CONF_SCAN_INTERVAL, CONF_USERNAME, EVENT_HOMEASSISTANT_STOP)
from homeassistant import config_entries
from homeassistant.helpers import discovery
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.script import Script
from homeassistant.util import slugify
from .const import *
from .configuration_schema import CONFIG_SCHEMA
REQUIREMENTS = ['pyShelly==0.1.16']
_LOGGER = logging.getLogger(__name__)
__version__ = "0.1.6.b6"
VERSION = __version__
BLOCKS = {}
DEVICES = {}
BLOCK_SENSORS = []
DEVICE_SENSORS = []
#def _get_block_key(block):
# key = block.id
# if not key in BLOCKS:
# BLOCKS[key] = block
# return key
def get_block_from_hass(hass, discovery_info):
"""Get block from HASS"""
if SHELLY_BLOCK_ID in discovery_info:
key = discovery_info[SHELLY_BLOCK_ID]
return hass.data[SHELLY_BLOCKS][key]
def _dev_key(dev):
key = dev.id + "-" + dev.device_type
if dev.device_sub_type is not None:
key += "-" + dev.device_sub_type
return key
#def _get_device_key(dev):
# key = _dev_key(dev)
# if not key in DEVICES:
# DEVICES[key] = dev
# return key
def get_device_from_hass(hass, discovery_info):
"""Get device from HASS"""
device_key = discovery_info[SHELLY_DEVICE_ID]
return hass.data[SHELLY_DEVICES][device_key]
async def async_setup(hass, config):
"""Set up this integration using yaml."""
if DOMAIN not in config:
return True
hass.data[DOMAIN] = config
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data={}
)
)
return True
async def async_setup_entry(hass, config_entry):
"""Setup Shelly component"""
_LOGGER.info("Starting shelly, %s", __version__)
config = hass.data[DOMAIN]
conf = config.get(DOMAIN, {})
#todo!
hass.data[SHELLY_CONFIG] = conf
hass.data[SHELLY_DEVICES] = DEVICES
hass.data[SHELLY_BLOCKS] = BLOCKS
if conf.get(CONF_WIFI_SENSOR) is not None:
_LOGGER.warning("wifi_sensor is deprecated, use rssi in sensors instead.")
if conf.get(CONF_WIFI_SENSOR) and SENSOR_RSSI not in conf[CONF_SENSORS]:
conf[CONF_SENSORS].append(SENSOR_RSSI)
if conf.get(CONF_UPTIME_SENSOR) is not None:
_LOGGER.warning("uptime_sensor is deprecated, use uptime in sensors instead.")
if conf.get(CONF_UPTIME_SENSOR) and SENSOR_UPTIME not in conf[CONF_SENSORS]:
conf[CONF_SENSORS].append(SENSOR_UPTIME)
hass.data["SHELLY_INSTANCE"] = ShellyInstance(hass, config_entry, conf)
#def update_status_information():
# pys.update_status_information()
#for _, block in pys.blocks.items():
# block.update_status_information()
#async def update_domain_callback(_now):
# """Update the Shelly status information"""
# await hass.async_add_executor_job(update_status_information)
#if conf.get(CONF_ADDITIONAL_INFO):
# hass.helpers.event.async_track_time_interval(
# update_domain_callback, update_interval)
return True
class ShellyInstance():
"""Config instance of Shelly"""
def __init__(self, hass, config_entry, conf):
self.hass = hass
self.config_entry = config_entry
self.platforms = {}
self.pys = None
self.conf = conf
self.discover = conf.get(CONF_DISCOVERY)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, self._stop)
hass.loop.create_task(
self.start_up()
)
async def start_up(self):
conf = self.conf
if conf.get(CONF_LOCAL_PY_SHELLY):
_LOGGER.info("Loading local pyShelly")
#pylint: disable=no-name-in-module
from .pyShelly import pyShelly
else:
from pyShelly import pyShelly
additional_info = conf.get(CONF_ADDITIONAL_INFO)
update_interval = conf.get(CONF_SCAN_INTERVAL)
self.pys = pys = pyShelly()
_LOGGER.info("pyShelly, %s", pys.version())
pys.cb_block_added.append(self._block_added)
pys.cb_device_added.append(self._device_added)
pys.cb_device_removed.append(self._device_removed)
pys.username = conf.get(CONF_USERNAME)
pys.password = conf.get(CONF_PASSWORD)
pys.cloud_auth_key = conf.get(CONF_CLOUD_AUTH_KEY)
pys.cloud_server = conf.get(CONF_CLOUD_SEREVR)
pys.tmpl_name = conf.get(CONF_TMPL_NAME, pys.tmpl_name)
if additional_info:
pys.update_status_interval = update_interval
pys.only_device_id = conf.get(CONF_ONLY_DEVICE_ID)
pys.igmp_fix_enabled = conf.get(CONF_IGMPFIX)
pys.mdns_enabled = conf.get(CONF_MDNS)
pys.host_ip = conf.get(CONF_HOST_IP, '')
pys.start()
pys.discover()
discover_by_ip = conf.get(CONF_DISCOVER_BY_IP)
for ip_addr in discover_by_ip:
pys.add_device_by_ip(ip_addr, 'IP-addr')
if conf.get(CONF_VERSION):
attr = {'version': VERSION, 'pyShellyVersion': pys.version()}
self._add_device("sensor", attr)
fake_block = {
'id' : "694908",
'fake_block': True,
'info_values': {'temperature':5},
'cb_updated' : [],
}
attr = {'sensor_type':'temperature', 'itm': fake_block}
self._add_device("sensor", fake_block)
async def _stop(self, _):
"""Stop Shelly."""
_LOGGER.info("Shutting down Shelly")
self.pys.close()
def _get_specific_config_root(self, key, *ids):
item = self._get_specific_config(key, None, *ids)
if item is None:
item = self.conf.get(key)
return item
def _find_device_config(self, device_id):
device_conf_list = self.conf.get(CONF_DEVICES)
for item in device_conf_list:
if item[CONF_ID].upper() == device_id:
return item
return None
def _get_device_config(self, device_id, id_2=None):
"""Get config for device."""
item = self._find_device_config(device_id)
if item is None and id_2 is not None:
item = self._find_device_config(id_2)
if item is None:
return {}
return item
def _get_specific_config(self, key, default, *ids):
for device_id in ids:
item = self._find_device_config(device_id)
if item is not None and key in item:
return item[key]
return default
def _get_sensor_config(self, *ids):
sensors = self._get_specific_config(CONF_SENSORS, None, *ids)
if sensors is None:
sensors = self.conf.get(CONF_SENSORS)
if SENSOR_ALL in sensors:
return [*SENSOR_TYPES.keys()]
if sensors is None:
return {}
return sensors
def _add_device(self, platform, dev):
self.hass.add_job(self._async_add_device(platform, dev))
async def _async_add_device(self, platform, dev):
if platform not in self.platforms:
self.platforms[platform] = asyncio.Event()
await self.hass.config_entries.async_forward_entry_setup(
self.config_entry, platform)
self.platforms[platform].set()
await self.platforms[platform].wait()
async_dispatcher_send(self.hass, "shelly_new_" + platform \
, dev, self)
def _block_updated(self, block):
hass_data = block.hass_data
if hass_data['discover']:
if hass_data['allow_upgrade_switch']:
has_update = block.info_values.get('has_firmware_update', False)
update_switch = getattr(block, 'firmware_switch', None)
if has_update:
if update_switch is None:
attr = {'firmware': True, 'block':block}
self._add_device("switch", attr)
elif update_switch is not None:
update_switch.remove()
#block_key = _get_block_key(block)
for key, _value in block.info_values.items():
ukey = block.id + '-' + key
if not ukey in BLOCK_SENSORS:
BLOCK_SENSORS.append(ukey)
for sensor in hass_data['sensor_cfg']:
if SENSOR_TYPES[sensor].get('attr') == key:
attr = {'sensor_type':key,
'itm': block}
self._add_device("sensor", attr)
def _block_added(self, block):
self.hass.add_job(self._async_block_added(block))
async def _async_block_added(self, block):
block.cb_updated.append(self._block_updated)
discover_block = self.discover \
or self._get_device_config(block.id) != {}
block.hass_data = {
'allow_upgrade_switch' :
self._get_specific_config_root(CONF_UPGRADE_SWITCH, block.id),
'sensor_cfg' : self._get_sensor_config(block.id),
'discover': discover_block
}
#Config block
if block.unavailable_after_sec is None:
block.unavailable_after_sec \
= self._get_specific_config_root(CONF_UNAVALABLE_AFTER_SEC,
block.id)
#if conf.get(CONF_ADDITIONAL_INFO):
#block.update_status_information()
# cfg_sensors = conf.get(CONF_SENSORS)
# for sensor in cfg_sensors:
# sensor_type = SENSOR_TYPES[sensor]
# if 'attr' in sensor_type:
# attr = {'sensor_type':sensor_type['attr'],
# SHELLY_BLOCK_ID : block_key}
# discovery.load_platform(hass, 'sensor', DOMAIN, attr,
# config)
def _device_added(self, dev, _code):
self.hass.add_job(self._async_device_added(dev, _code))
async def _async_device_added(self, dev, _code):
device_config = self._get_device_config(dev.id, dev.block.id)
if not self.discover and device_config == {}:
return
if dev.device_type == "ROLLER":
self._add_device("cover", dev)
if dev.device_type == "RELAY":
if device_config.get(CONF_LIGHT_SWITCH):
self._add_device("light", dev)
else:
self._add_device("switch", dev)
elif dev.device_type == 'POWERMETER':
sensor_cfg = self._get_sensor_config(dev.id, dev.block.id)
if SENSOR_POWER in sensor_cfg:
self._add_device("sensor", dev)
elif dev.device_type == 'SWITCH':
sensor_cfg = self._get_sensor_config(dev.id, dev.block.id)
if SENSOR_SWITCH in sensor_cfg:
self._add_device("sensor", dev)
elif dev.device_type in ["SENSOR"]: #, "INFOSENSOR"]:
self._add_device("sensor", dev)
elif dev.device_type in ["LIGHT", "DIMMER"]:
self._add_device("light", dev)
def _device_removed(self, dev, _code):
dev.shelly_device.remove()
try:
pass
#key = _dev_key(dev)
#del DEVICES[key]
except KeyError:
pass
class ShellyBlock(Entity):
"""Base class for Shelly entities"""
def __init__(self, block, instance, prefix=""):
conf = instance.conf
id_prefix = conf.get(CONF_OBJECT_ID_PREFIX)
self._unique_id = slugify(id_prefix + "_" + block.type + "_" +
block.id + prefix)
self.entity_id = "." + self._unique_id
entity_id = instance._get_specific_config(CONF_ENTITY_ID , None, block.id)
if entity_id is not None:
self.entity_id = "." + slugify(id_prefix + "_" + entity_id + prefix)
self._unique_id += "_" + slugify(entity_id)
#self._name = None
#block.type_name()
#if conf.get(CONF_SHOW_ID_IN_NAME):
# self._name += " [" + block.id + "]"
self.fake_block = isinstance(block, dict) #:'fake_block' in block
self._show_id_in_name = conf.get(CONF_SHOW_ID_IN_NAME)
self._block = block
self.hass = instance.hass
self.instance = instance
self._block.cb_updated.append(self._updated)
block.shelly_device = self
self._name = instance._get_specific_config(CONF_NAME, None, block.id)
self._name_ext = None
self._is_removed = False
self.hass.add_job(self.setup_device(block))
async def setup_device(self, block):
dev_reg = await self.hass.helpers.device_registry.async_get_registry()
dev_reg.async_get_or_create(
config_entry_id=self.entity_id,
identifiers={(DOMAIN, block.id)},
manufacturer="Shelly",
name=block.friendly_name(),
model=block.type_name(),
sw_version="0.0.1",
)
@property
def name(self):
"""Return the display name of this device."""
if self.fake_block:
name = 'Fake'
if self._name is None:
name = self._block.friendly_name()
else:
name = self._name
if self._name_ext:
name += ' - ' + self._name_ext
if self._show_id_in_name:
name += " [" + self._block.id + "]"
return name
def _updated(self, _block):
"""Receive events when the switch state changed (by mobile,
switch etc)"""
if self.entity_id is not None and not self._is_removed:
self.schedule_update_ha_state(True)
@property
def device_state_attributes(self):
"""Show state attributes in HASS"""
if self.fake_block:
return {}
attrs = {'ip_address': self._block.ip_addr,
'shelly_type': self._block.type_name(),
'shelly_id': self._block.id,
'discovery': self._block.discovery_src
}
room = self._block.room_name()
if room:
attrs['room'] = room
if self._block.info_values is not None:
for key, value in self._block.info_values.items():
attrs[key] = value
return attrs
@property
def device_info(self):
return {
'identifiers': {
(DOMAIN, self._block.id)
}
# 'name': self.name,
# 'manufacturer': "Shelly",
# 'model': self._block.type,
# 'sw_version': '0.0.1',
# #'via_device': (hue.DOMAIN, self.api.bridgeid),
}
def remove(self):
self._is_removed = True
self.hass.add_job(self.async_remove)
class ShellyDevice(Entity):
"""Base class for Shelly entities"""
def __init__(self, dev, instance):
conf = instance.conf
id_prefix = conf.get(CONF_OBJECT_ID_PREFIX)
self._unique_id = id_prefix + "_" + dev.type + "_" + dev.id
self.entity_id = "." + slugify(self._unique_id)
entity_id = instance._get_specific_config(CONF_ENTITY_ID,
None, dev.id, dev.block.id)
if entity_id is not None:
self.entity_id = "." + slugify(id_prefix + "_" + entity_id)
self._unique_id += "_" + slugify(entity_id)
self._show_id_in_name = conf.get(CONF_SHOW_ID_IN_NAME)
#self._name = dev.type_name()
#if conf.get(CONF_SHOW_ID_IN_NAME):
# self._name += " [" + dev.id + "]" # 'Test' #light.name
self._dev = dev
self.hass = instance.hass
self.instance = instance
self._dev.cb_updated.append(self._updated)
dev.shelly_device = self
self._name = instance._get_specific_config(CONF_NAME, None,
dev.id, dev.block.id)
self._sensor_conf = instance._get_sensor_config(dev.id, dev.block.id)
self._is_removed = False
def _updated(self, _block):
"""Receive events when the switch state changed (by mobile,
switch etc)"""
if self.entity_id is not None and not self._is_removed:
self.schedule_update_ha_state(True)
if self._dev.info_values is not None:
for key, _value in self._dev.info_values.items():
ukey = self._dev.id + '-' + key
if not ukey in DEVICE_SENSORS:
DEVICE_SENSORS.append(ukey)
for sensor in self._sensor_conf:
if SENSOR_TYPES[sensor].get('attr') == key:
attr = {'sensor_type':key,
'itm':self._dev}
conf = self.hass.data[SHELLY_CONFIG]
#discovery.load_platform(self.hass, 'sensor',
# DOMAIN, attr, conf)
@property
def name(self):
"""Return the display name of this device."""
if self._name is None:
name = self._dev.friendly_name()
else:
name = self._name
if self._show_id_in_name:
name += " [" + self._dev.id + "]"
return name
@property
def device_state_attributes(self):
"""Show state attributes in HASS"""
attrs = {'ip_address': self._dev.ip_addr,
'shelly_type': self._dev.type_name(),
'shelly_id': self._dev.id,
'discovery': self._dev.discovery_src
}
room = self._dev.room_name()
if room:
attrs['room'] = room
if self._dev.block.info_values is not None:
for key, value in self._dev.block.info_values.items():
attrs[key] = value
if self._dev.info_values is not None:
for key, value in self._dev.info_values.items():
attrs[key] = value
if self._dev.sensor_values is not None:
for key, value in self._dev.sensor_values.items():
attrs[key] = value
return attrs
@property
def device_info(self):
return {
'identifiers': {
# Serial numbers are unique identifiers within a specific domain
(DOMAIN, self._dev.block.id)
},
# 'name': self._dev.block.friendly_name(),
# 'manufacturer': "Shelly",
# 'model': self._dev.block.type_name(),
# 'sw_version': '0.0.1',
#'via_device': (hue.DOMAIN, self.api.bridgeid),
}
@property
def unique_id(self):
"""Return the ID of this device."""
return self._unique_id
@property
def available(self):
"""Return true if switch is available."""
return self._dev.available()
def remove(self):
self._is_removed = True
self.hass.add_job(self.async_remove)
@property
def should_poll(self):
"""No polling needed."""
return False
| 36.003565 | 87 | 0.576839 |
28274273d3b6e8ded8878a57fe78503427048f15 | 664 | py | Python | examples/petstore/migrations/versions/36745fa33987_remove_unique_constraint.py | fastack-dev/fastack-migrate | 1e9d3b3b1d25bec000432026b975053e5350e3da | [
"MIT"
] | 1 | 2021-12-23T03:20:57.000Z | 2021-12-23T03:20:57.000Z | examples/petstore/migrations/versions/36745fa33987_remove_unique_constraint.py | fastack-dev/fastack-migrate | 1e9d3b3b1d25bec000432026b975053e5350e3da | [
"MIT"
] | 1 | 2022-02-09T08:10:30.000Z | 2022-02-09T08:10:30.000Z | examples/petstore/migrations/versions/36745fa33987_remove_unique_constraint.py | fastack-dev/fastack-migrate | 1e9d3b3b1d25bec000432026b975053e5350e3da | [
"MIT"
] | null | null | null | """remove unique constraint
Revision ID: 36745fa33987
Revises: 6b7ad8fd60f9
Create Date: 2022-01-06 08:31:55.141039
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = "36745fa33987"
down_revision = "6b7ad8fd60f9"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint("species_name_key", "species", type_="unique")
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_unique_constraint("species_name_key", "species", ["name"])
# ### end Alembic commands ###
| 24.592593 | 72 | 0.701807 |
282798301fe62d89dc92c6e1905920362da8011c | 564 | py | Python | addons/website_event_track_live_quiz/controllers/track_live_quiz.py | SHIVJITH/Odoo_Machine_Test | 310497a9872db7844b521e6dab5f7a9f61d365a4 | [
"Apache-2.0"
] | null | null | null | addons/website_event_track_live_quiz/controllers/track_live_quiz.py | SHIVJITH/Odoo_Machine_Test | 310497a9872db7844b521e6dab5f7a9f61d365a4 | [
"Apache-2.0"
] | null | null | null | addons/website_event_track_live_quiz/controllers/track_live_quiz.py | SHIVJITH/Odoo_Machine_Test | 310497a9872db7844b521e6dab5f7a9f61d365a4 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.addons.website_event_track_live.controllers.track_live import EventTrackLiveController
class EventTrackLiveQuizController(EventTrackLiveController):
def _prepare_track_suggestion_values(self, track, track_suggestion):
res = super(EventTrackLiveQuizController, self)._prepare_track_suggestion_values(track, track_suggestion)
res['current_track']['show_quiz'] = bool(track.quiz_id) and not track.is_quiz_completed
return res
| 43.384615 | 113 | 0.79078 |
28289fb87251908aef9d071f6be27139338d9810 | 2,640 | py | Python | strongholds-and-followers/retainer/retainer.py | kbsletten/AvraeAliases | a392881dcddccc2155d10fd4d231f1be53c54bda | [
"MIT"
] | null | null | null | strongholds-and-followers/retainer/retainer.py | kbsletten/AvraeAliases | a392881dcddccc2155d10fd4d231f1be53c54bda | [
"MIT"
] | null | null | null | strongholds-and-followers/retainer/retainer.py | kbsletten/AvraeAliases | a392881dcddccc2155d10fd4d231f1be53c54bda | [
"MIT"
] | 1 | 2022-03-17T18:06:25.000Z | 2022-03-17T18:06:25.000Z | embed
<drac2>
GVARS = load_json(get_gvar("c1ee7d0f-750d-4f92-8d87-70fa22c07a81"))
CLASSES = [load_json(get_gvar(gvar)) for gvar in GVARS]
DISPLAY = {
"acrobatics": "Acrobatics",
"animalhandling": "Animal Handling",
"athletics": "Athletics",
"arcana": "Arcana",
"deception": "Deception",
"dex": "Dexterity",
"dexterity": "Dexterity",
"cha": "Charisma",
"charisma": "Charisma",
"con": "Constitution",
"constitution": "Constitution",
"history": "History",
"investigation": "Investigation",
"insight": "Insight",
"int": "Intelligence",
"intelligence": "Intelligence",
"intimidation": "Intimidation",
"medicine": "Medicine",
"nature": "Nature",
"perception": "Perception",
"performance": "Performance",
"persuasion": "Persuasion",
"religion": "Religion",
"sleightofhand": "Sleight of Hand",
"survival": "Survival",
"stealth": "Stealth",
"str": "Strength",
"strength": "Strength",
"wis": "Wisdom",
"wisdom": "Wisdom"
}
char = character()
ret_name = get("_retainerName")
ret_class = get("_retainerClass")
ret_level = int(get("_retainerLevel", 0))
ret_hp = char.get_cc("Retainer HP") if char and char.cc_exists("Retainer HP") else 0
title = f"{char.name} doesn't have a retainer!"
if ret_name and ret_class and ret_level:
title = f"{char.name} has {ret_name} a level {ret_level} {ret_class} retainer!"
cl_info = [c for c in CLASSES if c["name"] == ret_class]
cl_info = cl_info[0] if cl_info else None
fields = ""
if cl_info:
fields += f"""-f "HP|{ret_hp}/{ret_level}|inline" """
fields += f"""-f "AC|{cl_info["ac"]}|inline" """
fields += f"""-f "Primary Ability|{DISPLAY[cl_info["primary"]]}|inline" """
fields += f"""-f "Saves|{", ".join(DISPLAY[x] for x in cl_info["saves"])}|inline" """
fields += f"""-f "Skills|{", ".join(DISPLAY[x] for x in cl_info["skills"])}|inline" """
attack_text = [node for node in cl_info["attack"]["automation"] if node["type"] == "text"]
fields += f"""-f "{cl_info["attack"]["name"]}|{attack_text[0]["text"] if attack_text else ""}" """
for action in cl_info["actions"]:
if ret_level < action["level"]:
continue
attack_text = [node for node in action["attack"]["automation"] if node["type"] == "text"]
fields += f"""-f "{action["attack"]["name"]} ({action["cc_max"]}/Day)|{attack_text[0]["text"] if attack_text else ""}
{char.cc_str(action["cc"]) if char and action["cc"] and char.cc_exists(action["cc"]) else ""}" """
</drac2>
-title "{{title}}"
{{fields}}
-footer "!retainer | kbsletten#5710"
-color <color> -thumb {{get("_retainerImage")}}
| 36.164384 | 122 | 0.625 |
2829cb6a0e893f3f47a265e061c7b3ffa93b9eea | 8,351 | py | Python | bloomberg_functions.py | sophierubin1224/strategy_draft | 410206e5679865ffa25506e733c13b5b03416586 | [
"MIT"
] | null | null | null | bloomberg_functions.py | sophierubin1224/strategy_draft | 410206e5679865ffa25506e733c13b5b03416586 | [
"MIT"
] | null | null | null | bloomberg_functions.py | sophierubin1224/strategy_draft | 410206e5679865ffa25506e733c13b5b03416586 | [
"MIT"
] | 4 | 2021-04-12T23:30:14.000Z | 2021-04-13T13:19:15.000Z | ################################################################################
##### For Bloomberg ------------------------------------------------------------
##### Can't use this if you're on a Mac :(
################################################################################
from __future__ import print_function
from __future__ import absolute_import
from optparse import OptionParser
import os
import platform as plat
import sys
if sys.version_info >= (3, 8) and plat.system().lower() == "windows":
# pylint: disable=no-member
with os.add_dll_directory(os.getenv('BLPAPI_LIBDIR')):
import blpapi
else:
import blpapi
from utils import date_to_str
import pandas as pd
def parseCmdLine():
parser = OptionParser(description="Retrieve reference data.")
parser.add_option("-a",
"--ip",
dest="host",
help="server name or IP (default: %default)",
metavar="ipAddress",
default="localhost")
parser.add_option("-p",
dest="port",
type="int",
help="server port (default: %default)",
metavar="tcpPort",
default=8194)
(options, args) = parser.parse_args()
return options
def req_historical_data(bbg_identifier, startDate, endDate):
# Recast start & end dates in Bloomberg's format
startDate = date_to_str(startDate, "%Y%m%d")
endDate = date_to_str(endDate, "%Y%m%d")
if(pd.to_datetime(startDate) >= pd.to_datetime(endDate)):
sys.exit(
"in req_historical_data in 'bloomberg_functions.py': " + \
"specified startDate is later than endDate!"
)
# First, check to see if there is already a local .p data file with the
# data you need for bbg_identifier. If it's not there, create it.
if not os.path.isdir("bbg_data"):
os.makedirs("bbg_data")
print("created the 'bbg_data' folder.")
if (bbg_identifier + ".csv") in os.listdir("bbg_data"):
old_bbg_data = pd.read_csv("bbg_data/" + bbg_identifier + ".csv")
first_old = pd.to_datetime(min(old_bbg_data['Date'])).date()
last_old = pd.to_datetime(max(old_bbg_data['Date'])).date()
first_new = pd.to_datetime(startDate).date()
last_new = pd.to_datetime(endDate).date()
if first_old <= first_new and last_old >= last_new:
# Don't need to make a query; have all data we need.
histdata = old_bbg_data[[
(pd.to_datetime(x).date() <= last_new) & (
pd.to_datetime(x).date() >= first_new
) for x in old_bbg_data['Date']
]]
histdata.reset_index(drop=True, inplace=True)
return histdata
if first_old > first_new and last_old < last_new:
# do nothing for now, just requery the bigger dataset. Can refine
# this case later.
print(
"overwriting old data with date range: " + startDate + \
" to " + endDate
)
else:
if first_new < first_old:
endDate = date_to_str(first_old, "%Y%m%d")
else:
startDate = date_to_str(last_old, "%Y%m%d")
print(startDate)
options = parseCmdLine()
# Fill SessionOptions
sessionOptions = blpapi.SessionOptions()
sessionOptions.setServerHost(options.host)
sessionOptions.setServerPort(options.port)
print("Connecting to %s:%s" % (options.host, options.port))
# Create a Session
session = blpapi.Session(sessionOptions)
# Start a Session
if not session.start():
print("Failed to start session.")
return
try:
# Open service to get historical data from
if not session.openService("//blp/refdata"):
print("Failed to open //blp/refdata")
return
# Obtain previously opened service
refDataService = session.getService("//blp/refdata")
# Create and fill the request for the historical data
request = refDataService.createRequest("HistoricalDataRequest")
request.getElement("securities").appendValue(bbg_identifier)
request.getElement("fields").appendValue("OPEN")
request.getElement("fields").appendValue("HIGH")
request.getElement("fields").appendValue("LOW")
request.getElement("fields").appendValue("PX_LAST")
request.getElement("fields").appendValue("EQY_WEIGHTED_AVG_PX")
request.set("periodicityAdjustment", "ACTUAL")
request.set("periodicitySelection", "DAILY")
request.set("startDate", startDate)
request.set("endDate", endDate)
request.set("maxDataPoints", 1400) # Don't adjust please :)
print("Sending Request:", request)
# Send the request
session.sendRequest(request)
# Process received events
while (True):
# We provide timeout to give the chance for Ctrl+C handling:
ev = session.nextEvent(500)
for msg in ev:
if str(msg.messageType()) == "HistoricalDataResponse":
histdata = []
for fd in msg.getElement("securityData").getElement(
"fieldData").values():
histdata.append([fd.getElementAsString("date"), \
fd.getElementAsFloat("OPEN"),
fd.getElementAsFloat(
"HIGH"),
fd.getElementAsFloat("LOW"), \
fd.getElementAsFloat("PX_LAST"), \
fd.getElementAsFloat(
"EQY_WEIGHTED_AVG_PX")])
histdata = pd.DataFrame(histdata, columns=["Date",
"Open",
"High", "Low",
"Close", "VWAP"])
if ev.eventType() == blpapi.Event.RESPONSE:
# Response completely received, so we could exit
if 'old_bbg_data' in locals():
histdata = pd.concat([histdata, old_bbg_data], axis=0)
histdata = histdata.drop_duplicates('Date')
histdata = histdata.sort_values('Date')
histdata.reset_index(drop=True, inplace=True)
pd.DataFrame.to_csv(
histdata, "bbg_data/" + bbg_identifier + ".csv", index=False
)
return histdata
finally:
# Stop the session
session.stop()
__copyright__ = """
Copyright 2012. Bloomberg Finance L.P.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions: The above
copyright notice and this permission notice shall be included in all copies
or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
"""
####### End of Bloomberg Section -----------------------------------------------
################################################################################
| 41.341584 | 81 | 0.544246 |
282a2f90de76609dae1135f6d1faa97131c57a5d | 3,767 | py | Python | backend/histocat/worker/segmentation/processors/acquisition_processor.py | BodenmillerGroup/histocat-web | c598cd07506febf0b7c209626d4eb869761f2e62 | [
"MIT"
] | 4 | 2021-06-14T15:19:25.000Z | 2022-02-09T13:17:39.000Z | backend/histocat/worker/segmentation/processors/acquisition_processor.py | BodenmillerGroup/histocat-web | c598cd07506febf0b7c209626d4eb869761f2e62 | [
"MIT"
] | null | null | null | backend/histocat/worker/segmentation/processors/acquisition_processor.py | BodenmillerGroup/histocat-web | c598cd07506febf0b7c209626d4eb869761f2e62 | [
"MIT"
] | 1 | 2022-02-09T13:17:41.000Z | 2022-02-09T13:17:41.000Z | import os
from typing import Sequence, Union
import numpy as np
import tifffile
from deepcell.applications import Mesmer
from imctools.io.ometiff.ometiffparser import OmeTiffParser
from skimage import measure
from sqlalchemy.orm import Session
from histocat.core.acquisition import service as acquisition_service
from histocat.core.dataset.models import DatasetModel
from histocat.core.errors import SegmentationError
from histocat.core.segmentation.dto import SegmentationSubmissionDto
from histocat.core.utils import timeit
def normalize_by_minmax(img: Sequence[Union[np.ndarray, np.ndarray]]):
channel_mins = np.nanmin(img, axis=(1, 2), keepdims=True)
channel_maxs = np.nanmax(img, axis=(1, 2), keepdims=True)
img = (img - channel_mins) / (channel_maxs - channel_mins)
return img
def normalize_by_zscore(img: Sequence[Union[np.ndarray, np.ndarray]]):
channel_means = np.nanmean(img, axis=(1, 2), keepdims=True)
channel_stds = np.nanstd(img, axis=(1, 2), keepdims=True)
img = (img - channel_means) / channel_stds
return img
@timeit
def process_acquisition(
db: Session, acquisition_id: int, params: SegmentationSubmissionDto, model, dataset: DatasetModel
):
acquisition = acquisition_service.get_by_id(db, acquisition_id)
if not acquisition:
raise SegmentationError(f"Acquisition id:{acquisition_id} not found")
parser = OmeTiffParser(acquisition.location)
acquisition_data = parser.get_acquisition_data()
nuclei_channels = acquisition_data.get_image_stack_by_names(params.nuclei_channels)
cytoplasm_channels = acquisition_data.get_image_stack_by_names(params.cytoplasm_channels)
if params.preprocessing.channels_normalization == "minmax":
nuclei_channels = normalize_by_minmax(nuclei_channels)
cytoplasm_channels = normalize_by_minmax(cytoplasm_channels)
elif params.preprocessing.channels_normalization == "zscore":
nuclei_channels = normalize_by_zscore(nuclei_channels)
cytoplasm_channels = normalize_by_zscore(cytoplasm_channels)
nuclei_channels = np.nanmean(nuclei_channels, axis=0)
cytoplasm_channels = np.nanmean(cytoplasm_channels, axis=0)
# Combined together and expand to 4D
im = np.stack((nuclei_channels, cytoplasm_channels), axis=-1)
im = np.expand_dims(im, 0)
app = Mesmer(model)
segmentation_predictions = app.predict(
im,
batch_size=1,
image_mpp=1.0,
compartment=params.compartment,
preprocess_kwargs=params.preprocessing.dict(),
postprocess_kwargs_whole_cell=params.postprocessing.dict(),
postprocess_kwargs_nuclear=params.postprocessing.dict(),
)
mask_filename = os.path.basename(acquisition.location).replace("ome.tiff", "mask.tiff")
tifffile.imwrite(os.path.join(dataset.location, mask_filename), segmentation_predictions[0, :, :, 0])
output = {
"acquisition": acquisition,
"mask_location": os.path.join(dataset.location, mask_filename),
"object_numbers": None,
"centroids_x": None,
"centroids_y": None,
"mean_intensities": {},
"channel_names": acquisition_data.channel_names,
}
for c in acquisition_data.channel_names:
d = measure.regionprops_table(
label_image=segmentation_predictions[0, :, :, 0],
intensity_image=acquisition_data.get_image_by_name(c),
properties=("label", "centroid", "mean_intensity"),
)
if output["object_numbers"] is None:
output["object_numbers"] = d.get("label")
output["centroids_x"] = d.get("centroid-1")
output["centroids_y"] = d.get("centroid-0")
output["mean_intensities"][c] = d.get("mean_intensity")
return output
| 38.835052 | 105 | 0.723653 |
282c0f9d07c95149eb897900350f51bc9e832909 | 9,800 | py | Python | Fractals.py | do-it-for-coffee/fractals | 6050dc72ddeed45aefafed489e07a40ee8d8dc1d | [
"Apache-2.0"
] | null | null | null | Fractals.py | do-it-for-coffee/fractals | 6050dc72ddeed45aefafed489e07a40ee8d8dc1d | [
"Apache-2.0"
] | null | null | null | Fractals.py | do-it-for-coffee/fractals | 6050dc72ddeed45aefafed489e07a40ee8d8dc1d | [
"Apache-2.0"
] | null | null | null | import math
import numpy as np
import random
import os
from PIL import Image
import pyttsx3
class TopError(Exception):
pass
class OddResolutionError(Exception):
pass
class Fractal:
'''
Makes images of the Mandelbrot set given a center coordinate and the
imaginary coordinate of the top row of pixels.
'''
# If the image directory doesn't exist, create it.
IMAGE_DIR = os.path.dirname(os.path.realpath(__file__))
IMAGE_DIR = os.path.join(IMAGE_DIR, 'PNG')
if not os.path.exists(IMAGE_DIR):
os.makedirs(IMAGE_DIR)
# String for image progress.
P_STRING = 'Your {cr}+{ci}i image is {p} percent complete.'
def __init__(self):
'''
Initiate the TTS engine. These take a while to render, so I found, for
while I wait, to hear the progress of the image helped.
'''
self.tts = pyttsx3.init()
def mandelbrot(self, image_size, colors, center=None, top=1.6,
magnification=1, divergence_iterations=1600, speak=True,
image_number=None):
# Speech toggle.
self.SPEAK = speak
# Since the resolution of the image is divided by two, an even number is
# required.
if image_size[0]%2 == 1 or image_size[1]%2 == 1:
raise OddResolutionError
# This center with the default top place the whole Mandelbrot
# visualisation centered in the image.
if center is None:
center = (-0.66, 0)
# If the center of the image is set to the same value as the top of the
# image, raise an error.
if center[1] >= top:
raise TopError
'''
Magnification is how many subpixels each pixel is divided into. This
defines the brightness of the pixel. Divergence iterations define how
many iterations the code executes until saying whether a coordinate
diverges or not. Divergence levels are gradations of color for those
coordinates who do end up diverging from the set.
'''
self.MAGNIFICATION = magnification
self.DIVERGENCE_ITERATIONS = divergence_iterations
self.DIVERGENCE_LEVELS = [self.DIVERGENCE_ITERATIONS//x for x in \
reversed(range(1, 256))]
for key, value in colors.items():
colors[key] = np.array(value).reshape(1, 3)
# Initiate a few variables for the image.
self.IMAGE_SIZE = image_size
self.CENTER = center
self.TOP = top
self.CENTER_PIXEL = (int(self.IMAGE_SIZE[0] / 2),
int(self.IMAGE_SIZE[1] / 2))
image_array = np.zeros((self.IMAGE_SIZE[1], self.IMAGE_SIZE[0], 3),
dtype=np.uint8)
# Variables for percent complete.
i = 0
p_complete = 0
print(self.P_STRING.format(cr=self.CENTER[0],
ci=self.CENTER[1],
p=p_complete))
if self.SPEAK:
self.tts.say(self.P_STRING.format(cr=self.CENTER[0],
ci=self.CENTER[1],
p=p_complete))
self.tts.runAndWait()
'''
Test each pixel of the image for divergence or inclusion in the
Mandelbrot set.
'''
for pixel_y in range(self.IMAGE_SIZE[1]):
for pixel_x in range(self.IMAGE_SIZE[0]):
# Count for subpixels.
tally = {'mandelbrot': 0,
'divergence': 0}
# Reflects how many iterations a divergent coordinate hangs on.
divergence_multipliers = []
for magnification in range(1, self.MAGNIFICATION+1):
# Subpixel float values.
x = pixel_x + magnification/self.MAGNIFICATION
y = pixel_y + magnification/self.MAGNIFICATION
# Find the coordinates for a subpixel.
real, imaginary = self.pixels_to_coordinates(x, y)
# "Good" is defined as not already diverged from the circle
# of radius 2 that contains the Mandelbrot set so as to not
# spend time calculating what is already known.
if self.coordinates_good(real, imaginary):
# Iterate the equation to test for divergence.
c = real + imaginary * 1j
z = None
diverges = False
for d_i in range(self.DIVERGENCE_ITERATIONS):
z = self.next_mandelbrot(z, c)
if not self.coordinates_good(z.real, z.imag):
# Count for divergence.
tally['divergence'] += 1
# The divergence multiplier determines the
# brightness of a pixel in the image. If the
# divergent coordinate hangs on for more
# iterations, it gets a brighter color.
d_min = min([x for x in self.DIVERGENCE_LEVELS if d_i+1 <= x])
divergence_multiplier = self.DIVERGENCE_LEVELS.index(d_min)
divergence_multipliers.append(divergence_multiplier)
diverges = True
break
if not diverges:
# Count for the Mandelbrot set.
tally['mandelbrot'] += 1
else:
tally['divergence'] += 1
divergence_multipliers.append(0)
'''
Make a NumPy array with one of each color value for each tally.
If there are 3 subpixels in the Mandelbrot set and 1 subpixel
divergent, then the array has three RGB values of the Mandelbrot
color and one RGB value for divergent subpixels. The divergent
subpixel RGB value is brighter if the subpixel remained in the
circle for more iterations. The average of these RGB values is
what determines the RGB value for the entire pixel.
'''
color = np.empty((0, 3))
if tally['mandelbrot'] > 0:
for _ in range(tally['mandelbrot']):
color = np.append(color, colors['m'], axis=0)
if tally['divergence'] > 0:
for _ in range(tally['divergence']):
divergence_multiplier = np.average(divergence_multipliers)
divergence_multiplier /= len(self.DIVERGENCE_LEVELS)
color = np.append(color, colors['d'] * divergence_multiplier, axis=0)
color = np.average(color, axis=0)
image_array[pixel_y, pixel_x] = color
# The calculations are complete for this iteration, so show the
# percent complete if the percent complete is a multiple of 10.
i += 1
percent_complete = (i+1) / (self.IMAGE_SIZE[0] * \
self.IMAGE_SIZE[1])
p_complete_ = int(percent_complete*100)//10*10
if p_complete_ > p_complete:
p_complete = p_complete_
print(self.P_STRING.format(cr=self.CENTER[0],
ci=self.CENTER[1],
p=p_complete))
if self.SPEAK:
self.tts.say(self.P_STRING.format(cr=self.CENTER[0],
ci=self.CENTER[1],
p=p_complete))
self.tts.runAndWait()
# Finally, write the image.
IMAGE_F = 'mandelbrot center=' + str(self.CENTER[0]) + '+' + \
str(self.CENTER[1]) + 'i, top=' + str(self.TOP) + 'i, ' + \
str(self.IMAGE_SIZE[0]) + '×' + str(self.IMAGE_SIZE[1]) + \
'.png'
if image_number is not None:
IMAGE_F = "%04d" % image_number + ' ' + IMAGE_F
IMAGE_PATH = os.path.join(self.IMAGE_DIR, IMAGE_F)
self.write_image(image_array, IMAGE_PATH)
def write_image(self, image_array, IMAGE_PATH):
'''
Write the image array to an image.
'''
image = Image.fromarray(image_array)
image.save(IMAGE_PATH)
def next_mandelbrot(self, z, c):
'''
Iterate the Mandelbrot equation.
'''
if z is None:
z = complex(0 + 0j)
return z**2 + c
def pixels_to_coordinates(self, pixel_x, pixel_y):
'''
Find the coordinate of a pixel or subpixel in the complex plane.
'''
unit_per_pixel = (self.TOP - self.CENTER[1]) / (self.IMAGE_SIZE[1] / 2)
real = (pixel_x - self.CENTER_PIXEL[0]) * unit_per_pixel + \
self.CENTER[0]
imaginary = (pixel_y - self.CENTER_PIXEL[1]) * unit_per_pixel - \
self.CENTER[1]
return (real, imaginary)
def coordinates_good(self, real, imaginary):
'''
Return True if the coordinates are in a circle of radius 2 with the
center at the origin of the complex plane and False if otherwise.
'''
return math.sqrt(real**2 + imaginary**2) <= 2
if __name__ == '__main__':
pass
| 35.507246 | 94 | 0.521429 |