max_stars_repo_path stringlengths 4 286 | max_stars_repo_name stringlengths 5 119 | max_stars_count int64 0 191k | id stringlengths 1 7 | content stringlengths 6 1.03M | content_cleaned stringlengths 6 1.03M | language stringclasses 111 values | language_score float64 0.03 1 | comments stringlengths 0 556k | edu_score float64 0.32 5.03 | edu_int_score int64 0 5 |
|---|---|---|---|---|---|---|---|---|---|---|
flaskblog/models.py | JayeshLocharla/DigiBus | 2 | 6621151 | <reponame>JayeshLocharla/DigiBus
from datetime import datetime
from flaskblog import db, login_manager
from flask_login import UserMixin
now = datetime.now()
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
fullname = db.Column(db.String(30), nullable=False)
username = db.Column(db.String(20), unique=True, nullable=False)
email = db.Column(db.String(120), unique=True, nullable=False)
image_file = db.Column(db.String(20), nullable=False, default = 'default1.jpg')
password = db.Column(db.String(60), nullable=False)
passes = db.relationship('Pass', backref='author', lazy=True)
wallet = db.Column(db.Integer, nullable=False, default=0)
def __repr__(self):
return f"User('{self.fullname}','{self.username}','{self.email}','{self.image_file}', '{self.wallet})"
class Pass(db.Model):
id = db.Column(db.Integer, primary_key=True)
city = db.Column(db.String(100), nullable=False)
source = db.Column(db.String(100), nullable=False)
dest = db.Column(db.String(100), nullable=False)
date = db.Column(db.DateTime, nullable=False, default=datetime.now)
price = db.Column(db.Integer, nullable=False, default=200)
pass_type = db.Column(db.String(100), nullable=False)
booking_date = db.Column(db.DateTime, nullable=False,default=datetime.now)
expiry = db.Column(db.DateTime, nullable=False, default=datetime.now)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False)
def __repr__(self):
return f"User Pass('{self.source}', '{self.dest}', '{self.date}', '{self.user_id}', '{self.id}')"
def init_db():
db.create_all()
db.session.commit()
if __name__ == '__main__':
init_db()
| from datetime import datetime
from flaskblog import db, login_manager
from flask_login import UserMixin
now = datetime.now()
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
class User(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
fullname = db.Column(db.String(30), nullable=False)
username = db.Column(db.String(20), unique=True, nullable=False)
email = db.Column(db.String(120), unique=True, nullable=False)
image_file = db.Column(db.String(20), nullable=False, default = 'default1.jpg')
password = db.Column(db.String(60), nullable=False)
passes = db.relationship('Pass', backref='author', lazy=True)
wallet = db.Column(db.Integer, nullable=False, default=0)
def __repr__(self):
return f"User('{self.fullname}','{self.username}','{self.email}','{self.image_file}', '{self.wallet})"
class Pass(db.Model):
id = db.Column(db.Integer, primary_key=True)
city = db.Column(db.String(100), nullable=False)
source = db.Column(db.String(100), nullable=False)
dest = db.Column(db.String(100), nullable=False)
date = db.Column(db.DateTime, nullable=False, default=datetime.now)
price = db.Column(db.Integer, nullable=False, default=200)
pass_type = db.Column(db.String(100), nullable=False)
booking_date = db.Column(db.DateTime, nullable=False,default=datetime.now)
expiry = db.Column(db.DateTime, nullable=False, default=datetime.now)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'), nullable=False)
def __repr__(self):
return f"User Pass('{self.source}', '{self.dest}', '{self.date}', '{self.user_id}', '{self.id}')"
def init_db():
db.create_all()
db.session.commit()
if __name__ == '__main__':
init_db() | none | 1 | 2.602054 | 3 | |
parentheses/1021_remove_outmost_parentheses.py | MartinMa28/Algorithms_review | 0 | 6621152 | <filename>parentheses/1021_remove_outmost_parentheses.py
from functools import reduce
class Solution:
def removeOutermostParentheses(self, S: str) -> str:
stack = []
primi_splits = []
for idx, s in enumerate(S):
if s == '(':
stack.append(idx)
elif s == ')':
start_idx = stack.pop()
if len(stack) == 0:
primi_splits.append(S[start_idx: idx + 1])
return str(reduce(lambda x, y: x + y, map(lambda x: x[1:-1], primi_splits)))
if __name__ == "__main__":
solu = Solution()
S = '(()())(())'
print(solu.removeOutermostParentheses(S))
| <filename>parentheses/1021_remove_outmost_parentheses.py
from functools import reduce
class Solution:
def removeOutermostParentheses(self, S: str) -> str:
stack = []
primi_splits = []
for idx, s in enumerate(S):
if s == '(':
stack.append(idx)
elif s == ')':
start_idx = stack.pop()
if len(stack) == 0:
primi_splits.append(S[start_idx: idx + 1])
return str(reduce(lambda x, y: x + y, map(lambda x: x[1:-1], primi_splits)))
if __name__ == "__main__":
solu = Solution()
S = '(()())(())'
print(solu.removeOutermostParentheses(S))
| none | 1 | 3.185186 | 3 | |
streamkov/metamarkov.py | bhtucker/streamkov | 1 | 6621153 | <filename>streamkov/metamarkov.py<gh_stars>1-10
# -*- coding: utf-8 -*-
"""
metamarkov
~~~~~~~~~~
Module for combining and drawing from multiple
"""
from streamkov.markov import MarkovGenerator
from functools import reduce
import random
class MetaMarkov(MarkovGenerator):
"""
Combine multiple MarkovGenerators to draw
"""
def __init__(self, *markov_generators):
self.markov_generators = markov_generators
original_lists = [c.word_list for c in markov_generators]
words = reduce(
lambda a, b: a.union(b),
[set(mk.word_list) for mk in markov_generators]
)
self.word_list = list(words)
self.word_index = {v: ix for ix, v in enumerate(self.word_list)}
for child_generator in self.markov_generators:
child_generator.index_mapper = make_mapper(child_generator, self)
self.word_states = {
ix: MetaWordState(self, word=v)
for ix, v in enumerate(self.word_list)
}
self.initial_state = MetaWordState(self)
def receive(self, word):
raise NotImplementedError('The MetaMarkov is generate-only')
class MetaWordState(object):
"""
Information and methods for transitioning from a word
"""
def __init__(self, metamarkov, word=None):
self.component_states = []
for component in metamarkov.markov_generators:
component_state = _get_component_state(component, word)
if not component_state:
continue
component_state.mapper = component.index_mapper
self.component_states.append(component_state)
def draw(self):
choices = reduce(
lambda a, b: a + b,
[c.mapped_adjacencies for c in self.component_states]
)
assert all([c is not None for c in choices])
return random.choice(choices)
def is_stop_word(self):
return not any([c.adjacencies for c in self.component_states])
def make_mapper(child_mg, parent_mm):
# returns a function mapping from a child's index to the master index
return lambda v: parent_mm.word_index.get(
child_mg.word_list[v]
)
def _get_component_state(component, word):
if not word:
return component.initial_state
component_idx = component.word_index.get(word)
if not component_idx:
return
return component.word_states[component_idx]
| <filename>streamkov/metamarkov.py<gh_stars>1-10
# -*- coding: utf-8 -*-
"""
metamarkov
~~~~~~~~~~
Module for combining and drawing from multiple
"""
from streamkov.markov import MarkovGenerator
from functools import reduce
import random
class MetaMarkov(MarkovGenerator):
"""
Combine multiple MarkovGenerators to draw
"""
def __init__(self, *markov_generators):
self.markov_generators = markov_generators
original_lists = [c.word_list for c in markov_generators]
words = reduce(
lambda a, b: a.union(b),
[set(mk.word_list) for mk in markov_generators]
)
self.word_list = list(words)
self.word_index = {v: ix for ix, v in enumerate(self.word_list)}
for child_generator in self.markov_generators:
child_generator.index_mapper = make_mapper(child_generator, self)
self.word_states = {
ix: MetaWordState(self, word=v)
for ix, v in enumerate(self.word_list)
}
self.initial_state = MetaWordState(self)
def receive(self, word):
raise NotImplementedError('The MetaMarkov is generate-only')
class MetaWordState(object):
"""
Information and methods for transitioning from a word
"""
def __init__(self, metamarkov, word=None):
self.component_states = []
for component in metamarkov.markov_generators:
component_state = _get_component_state(component, word)
if not component_state:
continue
component_state.mapper = component.index_mapper
self.component_states.append(component_state)
def draw(self):
choices = reduce(
lambda a, b: a + b,
[c.mapped_adjacencies for c in self.component_states]
)
assert all([c is not None for c in choices])
return random.choice(choices)
def is_stop_word(self):
return not any([c.adjacencies for c in self.component_states])
def make_mapper(child_mg, parent_mm):
# returns a function mapping from a child's index to the master index
return lambda v: parent_mm.word_index.get(
child_mg.word_list[v]
)
def _get_component_state(component, word):
if not word:
return component.initial_state
component_idx = component.word_index.get(word)
if not component_idx:
return
return component.word_states[component_idx]
| en | 0.744429 | # -*- coding: utf-8 -*- metamarkov ~~~~~~~~~~ Module for combining and drawing from multiple Combine multiple MarkovGenerators to draw Information and methods for transitioning from a word # returns a function mapping from a child's index to the master index | 2.662938 | 3 |
destiny/manifest.py | HazelTheWitch/destiny.py | 0 | 6621154 | from typing import TYPE_CHECKING, Union, List, Any, Dict
import sqlite3
import requests
import zipfile
import os
import json
from .errors import *
if TYPE_CHECKING:
from .destiny import *
__all__ = [
'Manifest'
]
class Manifest:
"""Handles interactions with the Destiny 2 manifest."""
def __init__(self, application: 'DestinyApplication') -> None:
self.application = application
self.manifests = {
'en': '',
'fr': '',
'es': '',
'de': '',
'it': '',
'ja': '',
'pt-br': '',
'es-mx': '',
'ru': '',
'pl': '',
'zh-cht': ''
}
def decodeHash(self, hash: Union[int, str], definition: str, locale: str) -> List[Dict[Any, Any]]:
"""
Decode a given hash from the manifest.
:param hash: the hash to decode
:param definition: the defition to look up within
:param locale: the locale to use to look up
:return: the json response from the manifest
"""
if locale not in self.manifests:
raise InvalidLocaleError(locale)
if not self.manifests[locale]:
self.update(locale)
identifier = 'id'
if definition == 'DestinyHistoricalStatsDefinition':
identifier = 'key'
con = sqlite3.connect(self.manifests[locale])
cur = con.cursor()
cur.execute(f'SELECT json FROM {definition} WHERE {identifier} = {hash};')
results = cur.fetchall()
return list(map(lambda jsonStr: json.loads(jsonStr[0]), results))
def update(self, locale: str) -> None:
"""
Update the manifest from bungie.net.
:param locale: the locale to update
"""
if locale not in self.manifests:
raise InvalidLocaleError(locale)
url = self.application.destiny2.getDestinyManifest()['mobileWorldContentPaths'][locale]
fp = url.split('/')[-1]
self._download('https://www.bungie.net' + url, 'manifestZip')
zipRef = zipfile.ZipFile('manifestZip', 'r')
zipRef.extractall('.')
zipRef.close()
os.remove('manifestZip')
self.manifests[locale] = fp
@staticmethod
def _download(url: str, name: str) -> None:
with requests.get(url, stream=True) as resp:
resp.raise_for_status()
with open(name, 'wb') as f:
for chunk in resp.iter_content(chunk_size=8192):
f.write(chunk)
| from typing import TYPE_CHECKING, Union, List, Any, Dict
import sqlite3
import requests
import zipfile
import os
import json
from .errors import *
if TYPE_CHECKING:
from .destiny import *
__all__ = [
'Manifest'
]
class Manifest:
"""Handles interactions with the Destiny 2 manifest."""
def __init__(self, application: 'DestinyApplication') -> None:
self.application = application
self.manifests = {
'en': '',
'fr': '',
'es': '',
'de': '',
'it': '',
'ja': '',
'pt-br': '',
'es-mx': '',
'ru': '',
'pl': '',
'zh-cht': ''
}
def decodeHash(self, hash: Union[int, str], definition: str, locale: str) -> List[Dict[Any, Any]]:
"""
Decode a given hash from the manifest.
:param hash: the hash to decode
:param definition: the defition to look up within
:param locale: the locale to use to look up
:return: the json response from the manifest
"""
if locale not in self.manifests:
raise InvalidLocaleError(locale)
if not self.manifests[locale]:
self.update(locale)
identifier = 'id'
if definition == 'DestinyHistoricalStatsDefinition':
identifier = 'key'
con = sqlite3.connect(self.manifests[locale])
cur = con.cursor()
cur.execute(f'SELECT json FROM {definition} WHERE {identifier} = {hash};')
results = cur.fetchall()
return list(map(lambda jsonStr: json.loads(jsonStr[0]), results))
def update(self, locale: str) -> None:
"""
Update the manifest from bungie.net.
:param locale: the locale to update
"""
if locale not in self.manifests:
raise InvalidLocaleError(locale)
url = self.application.destiny2.getDestinyManifest()['mobileWorldContentPaths'][locale]
fp = url.split('/')[-1]
self._download('https://www.bungie.net' + url, 'manifestZip')
zipRef = zipfile.ZipFile('manifestZip', 'r')
zipRef.extractall('.')
zipRef.close()
os.remove('manifestZip')
self.manifests[locale] = fp
@staticmethod
def _download(url: str, name: str) -> None:
with requests.get(url, stream=True) as resp:
resp.raise_for_status()
with open(name, 'wb') as f:
for chunk in resp.iter_content(chunk_size=8192):
f.write(chunk)
| en | 0.829362 | Handles interactions with the Destiny 2 manifest. Decode a given hash from the manifest. :param hash: the hash to decode :param definition: the defition to look up within :param locale: the locale to use to look up :return: the json response from the manifest Update the manifest from bungie.net. :param locale: the locale to update | 2.725769 | 3 |
runway/lookups/handlers/__init__.py | onicagroup/runway | 134 | 6621155 | """Runway lookup handlers."""
from . import cfn, ecr, env, random_string, ssm, var
__all__ = ["cfn", "ecr", "env", "random_string", "ssm", "var"]
| """Runway lookup handlers."""
from . import cfn, ecr, env, random_string, ssm, var
__all__ = ["cfn", "ecr", "env", "random_string", "ssm", "var"]
| en | 0.713416 | Runway lookup handlers. | 1.246985 | 1 |
GUI_Web.py | KangFrank/Python_Start | 2 | 6621156 | #!usr/bin/env python3
#-*-coding:utf-8 -*-
#Filename:GUI_Web.py
#Write the first GUI program
from tkinter import *
class Application(Frame):
def __init__(self,master=None):
Frame.__init__(self,master)
self.pack()
self.createWidgets()
def createWidgets(self):
self.hellolabel=Label(self,text='Hello,world!')
self.hellolabel.pack()
self.quitbutton=Button(self,text='Intel_Quit',command=self.quit)
self.quitbutton.pack()
def hello(self):
name=self.nameInput.get() or 'world'
messagebox.showinfo('Message','Hello, %s'%name)
'''
app=Application()
app.master.title('Hello World')
app.mainloop
'''
#Add the function of input
from tkinter import *
import tkinter.messagebox as messagebox
class Application1(Frame):
def __init__(self,master=None):
Frame.__init__(self,master)
self.pack()
self.createWidgets()
def createWidgets(self):
self.nameInput=Entry(self)
self.nameInput.pack()
self.alertbutton=Button(self,text='Hello',command=self.hello)
self.alertbutton.pack()
def hello(self):
name=self.nameInput.get() or 'world'
messagebox.showinfo('Message','Hello, %s'%name)
'''
app1=Application1()
app1.master.title('Hello World')
app1.mainloop()
'''
#TCP/IP web network communications
import socket
s=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
#establish the connection with sina
s.connect(('www.sina.com',80))
#send data
s.send(b'GET/HTTP/1.1\r\nHost:www.sian.com.cn\r\nConnection:close\r\n\r\n')
#receive data
buffer=[]
while True:
#Each time can only receive no more than 1k data
d=s.recv(1024)
if d:
buffer.append(d)
else:
break
data=b''.join(buffer)
s.close
#print the HTTP, seprate the head and the content
header,html=data.split(b'\r\n\r\n',1)
print(header.decode('utf-8'))
#Write the received data into file
with open('sina.html','wb') as f:
f.write(html)
| #!usr/bin/env python3
#-*-coding:utf-8 -*-
#Filename:GUI_Web.py
#Write the first GUI program
from tkinter import *
class Application(Frame):
def __init__(self,master=None):
Frame.__init__(self,master)
self.pack()
self.createWidgets()
def createWidgets(self):
self.hellolabel=Label(self,text='Hello,world!')
self.hellolabel.pack()
self.quitbutton=Button(self,text='Intel_Quit',command=self.quit)
self.quitbutton.pack()
def hello(self):
name=self.nameInput.get() or 'world'
messagebox.showinfo('Message','Hello, %s'%name)
'''
app=Application()
app.master.title('Hello World')
app.mainloop
'''
#Add the function of input
from tkinter import *
import tkinter.messagebox as messagebox
class Application1(Frame):
def __init__(self,master=None):
Frame.__init__(self,master)
self.pack()
self.createWidgets()
def createWidgets(self):
self.nameInput=Entry(self)
self.nameInput.pack()
self.alertbutton=Button(self,text='Hello',command=self.hello)
self.alertbutton.pack()
def hello(self):
name=self.nameInput.get() or 'world'
messagebox.showinfo('Message','Hello, %s'%name)
'''
app1=Application1()
app1.master.title('Hello World')
app1.mainloop()
'''
#TCP/IP web network communications
import socket
s=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
#establish the connection with sina
s.connect(('www.sina.com',80))
#send data
s.send(b'GET/HTTP/1.1\r\nHost:www.sian.com.cn\r\nConnection:close\r\n\r\n')
#receive data
buffer=[]
while True:
#Each time can only receive no more than 1k data
d=s.recv(1024)
if d:
buffer.append(d)
else:
break
data=b''.join(buffer)
s.close
#print the HTTP, seprate the head and the content
header,html=data.split(b'\r\n\r\n',1)
print(header.decode('utf-8'))
#Write the received data into file
with open('sina.html','wb') as f:
f.write(html)
| en | 0.696241 | #!usr/bin/env python3 #-*-coding:utf-8 -*- #Filename:GUI_Web.py #Write the first GUI program app=Application()
app.master.title('Hello World')
app.mainloop #Add the function of input app1=Application1()
app1.master.title('Hello World')
app1.mainloop() #TCP/IP web network communications #establish the connection with sina #send data #receive data #Each time can only receive no more than 1k data #print the HTTP, seprate the head and the content #Write the received data into file | 3.770838 | 4 |
api/controller.py | datalogics/circulation | 0 | 6621157 | <filename>api/controller.py
from nose.tools import set_trace
import json
import logging
import sys
import urllib
import datetime
from wsgiref.handlers import format_date_time
from time import mktime
from lxml import etree
from sqlalchemy.orm import eagerload
from functools import wraps
import flask
from flask import (
make_response,
Response,
redirect,
)
from flask.ext.babel import lazy_gettext as _
from core.app_server import (
entry_response,
feed_response,
cdn_url_for,
url_for,
load_lending_policy,
load_facets_from_request,
load_pagination_from_request,
ComplaintController,
HeartbeatController,
URNLookupController,
)
from core.external_search import (
ExternalSearchIndex,
DummyExternalSearchIndex,
)
from core.facets import FacetConfig
from core.log import LogConfiguration
from core.lane import (
Facets,
Pagination,
Lane,
LaneList,
)
from core.model import (
get_one,
get_one_or_create,
production_session,
Admin,
Annotation,
CachedFeed,
CirculationEvent,
Collection,
Complaint,
ConfigurationSetting,
DataSource,
DeliveryMechanism,
ExternalIntegration,
Hold,
Identifier,
Library,
LicensePool,
Loan,
LicensePoolDeliveryMechanism,
PatronProfileStorage,
Representation,
Session,
Work,
)
from core.opds import (
AcquisitionFeed,
)
from core.util.opds_writer import (
OPDSFeed,
)
from core.opensearch import OpenSearchDocument
from core.user_profile import ProfileController as CoreProfileController
from core.util.flask_util import (
problem,
)
from core.util.authentication_for_opds import AuthenticationForOPDSDocument
from core.util.problem_detail import ProblemDetail
from core.util.http import (
RemoteIntegrationException,
)
from circulation_exceptions import *
from opds import (
CirculationManagerAnnotator,
CirculationManagerLoanAndHoldAnnotator,
)
from annotations import (
AnnotationWriter,
AnnotationParser,
)
from problem_details import *
from authenticator import (
Authenticator,
OAuthController,
)
from config import (
Configuration,
CannotLoadConfiguration,
)
from lanes import (
make_lanes,
ContributorLane,
RecommendationLane,
RelatedBooksLane,
SeriesLane,
)
from adobe_vendor_id import (
AdobeVendorIDController,
DeviceManagementProtocolController,
AuthdataUtility,
)
from axis import Axis360API
from overdrive import OverdriveAPI
from bibliotheca import BibliothecaAPI
from circulation import CirculationAPI
from novelist import (
NoveListAPI,
MockNoveListAPI,
)
from base_controller import BaseCirculationManagerController
from testing import MockCirculationAPI
from services import ServiceStatus
from core.analytics import Analytics
class CirculationManager(object):
def __init__(self, _db, lanes=None, testing=False):
self.log = logging.getLogger("Circulation manager web app")
self._db = _db
if not testing:
try:
self.config = Configuration.load(_db)
except CannotLoadConfiguration, e:
self.log.error("Could not load configuration file: %s" % e)
sys.exit()
self.testing = testing
self.site_configuration_last_update = (
Configuration.site_configuration_last_update(self._db, timeout=0)
)
self.lane_descriptions = lanes
self.setup_one_time_controllers()
self.load_settings()
def reload_settings_if_changed(self):
"""If the site configuration has been updated, reload the
CirculationManager's configuration from the database.
"""
last_update = Configuration.site_configuration_last_update(self._db)
if last_update > self.site_configuration_last_update:
self.load_settings()
self.site_configuration_last_update = last_update
def load_settings(self):
"""Load all necessary configuration settings and external
integrations from the database.
This is called once when the CirculationManager is
initialized. It may also be called later to reload the site
configuration after changes are made in the administrative
interface.
"""
LogConfiguration.initialize(self._db)
self.analytics = Analytics(self._db)
self.auth = Authenticator(self._db, self.analytics)
self.setup_external_search()
# Track the Lane configuration for each library by mapping its
# short name to the top-level lane.
new_top_level_lanes = {}
# Create a CirculationAPI for each library.
new_circulation_apis = {}
new_adobe_device_management = None
for library in self._db.query(Library):
lanes = make_lanes(self._db, library, self.lane_descriptions)
new_top_level_lanes[library.id] = (
self.create_top_level_lane(
self._db, library, lanes
)
)
new_circulation_apis[library.id] = self.setup_circulation(
library, self.analytics
)
authdata = self.setup_adobe_vendor_id(self._db, library)
if authdata and not new_adobe_device_management:
# There's at least one library on this system that
# wants Vendor IDs. This means we need to advertise support
# for the Device Management Protocol.
new_adobe_device_management = DeviceManagementProtocolController(self)
self.adobe_device_management = new_adobe_device_management
self.top_level_lanes = new_top_level_lanes
self.circulation_apis = new_circulation_apis
self.lending_policy = load_lending_policy(
Configuration.policy('lending', {})
)
self.patron_web_client_url = ConfigurationSetting.sitewide(
self._db, Configuration.PATRON_WEB_CLIENT_URL).value
self.setup_configuration_dependent_controllers()
self.authentication_for_opds_documents = {}
@property
def external_search(self):
"""Retrieve or create a connection to the search interface.
This is created lazily so that a failure to connect only
affects searches, not the rest of the circulation manager.
"""
if not self.__external_search:
self.setup_external_search()
return self.__external_search
def setup_external_search(self):
try:
self.__external_search = self.setup_search()
self.external_search_initialization_exception = None
except CannotLoadConfiguration, e:
self.log.error(
"Exception loading search configuration: %s", e
)
self.__external_search = None
self.external_search_initialization_exception = e
return self.__external_search
def create_top_level_lane(self, _db, library, lanelist):
name = 'All Books'
return Lane(
_db,
library, name,
display_name=name,
parent=None,
sublanes=lanelist.lanes,
include_all=False,
languages=None,
searchable=True,
invisible=True
)
def cdn_url_for(self, view, *args, **kwargs):
return cdn_url_for(view, *args, **kwargs)
def url_for(self, view, *args, **kwargs):
kwargs['_external'] = True
return url_for(view, *args, **kwargs)
def log_lanes(self, lanelist=None, level=0):
"""Output information about the lane layout."""
lanelist = lanelist or self.top_level_lane.sublanes
for lane in lanelist:
self.log.debug("%s%r", "-" * level, lane)
if lane.sublanes:
self.log_lanes(lane.sublanes, level+1)
def setup_search(self):
"""Set up a search client."""
if self.testing:
return DummyExternalSearchIndex()
else:
search = ExternalSearchIndex(self._db)
if not search:
self.log.warn("No external search server configured.")
return None
return search
def setup_circulation(self, library, analytics):
"""Set up the Circulation object."""
if self.testing:
cls = MockCirculationAPI
else:
cls = CirculationAPI
return cls(self._db, library, analytics)
def setup_one_time_controllers(self):
"""Set up all the controllers that will be used by the web app.
This method will be called only once, no matter how many times the
site configuration changes.
"""
self.index_controller = IndexController(self)
self.opds_feeds = OPDSFeedController(self)
self.loans = LoanController(self)
self.annotations = AnnotationController(self)
self.urn_lookup = URNLookupController(self._db)
self.work_controller = WorkController(self)
self.analytics_controller = AnalyticsController(self)
self.profiles = ProfileController(self)
self.heartbeat = HeartbeatController()
self.service_status = ServiceStatusController(self)
def setup_configuration_dependent_controllers(self):
"""Set up all the controllers that depend on the
current site configuration.
This method will be called fresh every time the site
configuration changes.
"""
self.oauth_controller = OAuthController(self.auth)
def setup_adobe_vendor_id(self, _db, library):
"""If this Library has an Adobe Vendor ID integration,
configure the controller for it.
:return: An Authdata object for `library`, if one could be created.
"""
short_client_token_initialization_exceptions = dict()
adobe = ExternalIntegration.lookup(
_db, ExternalIntegration.ADOBE_VENDOR_ID,
ExternalIntegration.DRM_GOAL, library=library
)
warning = (
'Adobe Vendor ID controller is disabled due to missing or'
' incomplete configuration. This is probably nothing to'
' worry about.'
)
new_adobe_vendor_id = None
if adobe:
# Relatively few libraries will have this setup.
vendor_id = adobe.username
node_value = adobe.password
if vendor_id and node_value:
if new_adobe_vendor_id:
self.log.warn(
"Multiple libraries define an Adobe Vendor ID integration. This is not supported and the last library seen will take precedence."
)
new_adobe_vendor_id = AdobeVendorIDController(
_db,
library,
vendor_id,
node_value,
self.auth
)
else:
self.log.warn("Adobe Vendor ID controller is disabled due to missing or incomplete configuration. This is probably nothing to worry about.")
self.adobe_vendor_id = new_adobe_vendor_id
# But almost all libraries will have a Short Client Token
# setup. We're not setting anything up here, but this is useful
# information for the calling code to have so it knows
# whether or not we should support the Device Management Protocol.
registry = ExternalIntegration.lookup(
_db, ExternalIntegration.OPDS_REGISTRATION,
ExternalIntegration.DISCOVERY_GOAL, library=library
)
authdata = None
if registry:
try:
authdata = AuthdataUtility.from_config(library, _db)
except CannotLoadConfiguration, e:
short_client_token_initialization_exceptions[library.id] = e
self.log.error(
"Short Client Token configuration for %s is present but not working. This may be cause for concern. Original error: %s",
library.name, e
)
self.short_client_token_initialization_exceptions = short_client_token_initialization_exceptions
return authdata
def annotator(self, lane, *args, **kwargs):
"""Create an appropriate OPDS annotator for the given lane."""
if lane:
library = lane.library
else:
library = flask.request.library
return CirculationManagerAnnotator(
self.circulation_apis[library.id], lane, library,
top_level_title='All Books', *args, **kwargs
)
@property
def authentication_for_opds_document(self):
"""Make sure the current request's library has an Authentication For
OPDS document in the cache, then return the cached version.
"""
name = flask.request.library.short_name
if name not in self.authentication_for_opds_documents:
self.authentication_for_opds_documents[name] = self.auth.create_authentication_document()
return self.authentication_for_opds_documents[name]
@property
def public_key_integration_document(self):
site_id = ConfigurationSetting.sitewide(self._db, Configuration.BASE_URL_KEY).value
document = dict(id=site_id)
public_key_dict = dict()
public_key = ConfigurationSetting.sitewide(self._db, Configuration.PUBLIC_KEY).value
if public_key:
public_key_dict['type'] = 'RSA'
public_key_dict['value'] = public_key
document['public_key'] = public_key_dict
return json.dumps(document)
class CirculationManagerController(BaseCirculationManagerController):
@property
def circulation(self):
"""Return the appropriate CirculationAPI for the request Library."""
library_id = flask.request.library.id
return self.manager.circulation_apis[library_id]
def load_lane(self, language_key, name):
"""Turn user input into a Lane object."""
library_id = flask.request.library.id
top_level_lane = self.manager.top_level_lanes[library_id]
if language_key is None and name is None:
return top_level_lane
lanelist = top_level_lane.sublanes
if not language_key in lanelist.by_languages:
return NO_SUCH_LANE.detailed(
_("Unrecognized language key: %(language_key)s", language_key=language_key)
)
if name:
name = name.replace("__", "/")
lanes = lanelist.by_languages[language_key]
if not name:
defaults = [x for x in lanes.values() if x.default_for_language]
if len(defaults) == 1:
# This language has one, and only one, default lane.
return defaults[0]
if name not in lanes:
return NO_SUCH_LANE.detailed(
_("No such lane: %(lane_name)s", lane_name=name)
)
return lanes[name]
def load_work(self, library, identifier_type, identifier):
pools = self.load_licensepools(library, identifier_type, identifier)
if isinstance(pools, ProblemDetail):
return pools
# We know there is at least one LicensePool, and all LicensePools
# for an Identifier have the same Work.
return pools[0].work
def load_licensepools(self, library, identifier_type, identifier):
"""Turn user input into one or more LicensePool objects.
:param library: The LicensePools must be associated with one of this
Library's Collections.
:param identifier_type: A type of identifier, e.g. "ISBN"
:param identifier: An identifier string, used with `identifier_type`
to look up an Identifier.
"""
_db = Session.object_session(library)
pools = _db.query(LicensePool).join(LicensePool.collection).join(
LicensePool.identifier).join(Collection.libraries).filter(
Identifier.type==identifier_type
).filter(
Identifier.identifier==identifier
).filter(
Library.id==library.id
).all()
if not pools:
return NO_LICENSES.detailed(
_("The item you're asking about (%s/%s) isn't in this collection.") % (
identifier_type, identifier
)
)
return pools
def load_licensepool(self, license_pool_id):
"""Turns user input into a LicensePool"""
license_pool = get_one(self._db, LicensePool, id=license_pool_id)
if not license_pool:
return INVALID_INPUT.detailed(
_("License Pool #%d does not exist.") % license_pool_id
)
return license_pool
def load_licensepooldelivery(self, pool, mechanism_id):
"""Turn user input into a LicensePoolDeliveryMechanism object."""
mechanism = get_one(
self._db, LicensePoolDeliveryMechanism,
data_source=pool.data_source, identifier=pool.identifier,
delivery_mechanism_id=mechanism_id, on_multiple='interchangeable'
)
return mechanism or BAD_DELIVERY_MECHANISM
def apply_borrowing_policy(self, patron, license_pool):
if isinstance(patron, ProblemDetail):
return patron
if not patron.can_borrow(license_pool.work, self.manager.lending_policy):
return FORBIDDEN_BY_POLICY.detailed(
_("Library policy prohibits us from lending you this book."),
status_code=451
)
if (not patron.library.allow_holds and
license_pool.licenses_available == 0 and
not license_pool.open_access
):
return FORBIDDEN_BY_POLICY.detailed(
_("Library policy prohibits the placement of holds."),
status_code=403
)
return None
class IndexController(CirculationManagerController):
"""Redirect the patron to the appropriate feed."""
def __call__(self):
# The simple case: the app is equally open to all clients.
library_short_name = flask.request.library.short_name
policy = Configuration.root_lane_policy()
if not policy:
return redirect(self.cdn_url_for('acquisition_groups', library_short_name=library_short_name))
# The more complex case. We must authorize the patron, check
# their type, and redirect them to an appropriate feed.
return self.appropriate_index_for_patron_type()
def authentication_document(self):
"""Serve this library's Authentication For OPDS document."""
return Response(
self.manager.authentication_for_opds_document,
200,
{
"Content-Type" : AuthenticationForOPDSDocument.MEDIA_TYPE
}
)
def authenticated_patron_root_lane(self):
patron = self.authenticated_patron_from_request()
if isinstance(patron, ProblemDetail):
return patron
if isinstance(patron, Response):
return patron
policy = Configuration.root_lane_policy()
lane_info = policy.get(patron.external_type)
if lane_info is None:
return None
else:
lang_key, name = lane_info
return self.load_lane(lang_key, name)
def appropriate_index_for_patron_type(self):
library_short_name = flask.request.library.short_name
root_lane = self.authenticated_patron_root_lane()
if isinstance(root_lane, ProblemDetail):
return root_lane
if isinstance(root_lane, Response):
return root_lane
if root_lane is None:
return redirect(
self.cdn_url_for(
'acquisition_groups',
library_short_name=library_short_name,
)
)
return redirect(
self.cdn_url_for(
'acquisition_groups',
library_short_name=library_short_name,
languages=root_lane.language_key,
lane_name=root_lane.url_name
)
)
def public_key_document(self):
"""Serves a sitewide public key document"""
return Response(
self.manager.public_key_integration_document,
200, { 'Content-Type' : 'application/opds+json' }
)
class OPDSFeedController(CirculationManagerController):
def groups(self, languages, lane_name):
"""Build or retrieve a grouped acquisition feed."""
lane = self.load_lane(languages, lane_name)
if isinstance(lane, ProblemDetail):
return lane
library_short_name = flask.request.library.short_name
url = self.cdn_url_for(
"acquisition_groups", languages=languages, lane_name=lane_name, library_short_name=library_short_name,
)
title = lane.display_name
annotator = self.manager.annotator(lane)
feed = AcquisitionFeed.groups(self._db, title, url, lane, annotator)
return feed_response(feed.content)
def feed(self, languages, lane_name):
"""Build or retrieve a paginated acquisition feed."""
lane = self.load_lane(languages, lane_name)
if isinstance(lane, ProblemDetail):
return lane
library_short_name = flask.request.library.short_name
url = self.cdn_url_for(
"feed", languages=languages, lane_name=lane_name,
library_short_name=library_short_name,
)
title = lane.display_name
annotator = self.manager.annotator(lane)
facets = load_facets_from_request()
if isinstance(facets, ProblemDetail):
return facets
pagination = load_pagination_from_request()
if isinstance(pagination, ProblemDetail):
return pagination
feed = AcquisitionFeed.page(
self._db, title, url, lane, annotator=annotator,
facets=facets,
pagination=pagination,
)
return feed_response(feed.content)
def search(self, languages, lane_name):
lane = self.load_lane(languages, lane_name)
if isinstance(lane, ProblemDetail):
return lane
query = flask.request.args.get('q')
library_short_name = flask.request.library.short_name
this_url = self.url_for(
'lane_search', languages=languages, lane_name=lane_name,
library_short_name=library_short_name,
)
if not query:
# Send the search form
return OpenSearchDocument.for_lane(lane, this_url)
pagination = load_pagination_from_request(default_size=Pagination.DEFAULT_SEARCH_SIZE)
if isinstance(pagination, ProblemDetail):
return pagination
# Run a search.
this_url += "?q=" + urllib.quote(query.encode("utf8"))
annotator = self.manager.annotator(lane)
info = OpenSearchDocument.search_info(lane)
opds_feed = AcquisitionFeed.search(
_db=self._db, title=info['name'],
url=this_url, lane=lane, search_engine=self.manager.external_search,
query=query, annotator=annotator, pagination=pagination,
)
return feed_response(opds_feed)
class LoanController(CirculationManagerController):
def get_patron_circ_objects(self, object_class, patron, license_pools):
pool_ids = [pool.id for pool in license_pools]
return self._db.query(object_class).filter(
object_class.patron_id==patron.id,
object_class.license_pool_id.in_(pool_ids)
).options(eagerload(object_class.license_pool)).all()
def get_patron_loan(self, patron, license_pools):
loans = self.get_patron_circ_objects(Loan, patron, license_pools)
if loans:
loan = loans[0]
return loan, loan.license_pool
return None, None
def get_patron_hold(self, patron, license_pools):
holds = self.get_patron_circ_objects(Hold, patron, license_pools)
if holds:
hold = holds[0]
return hold, hold.license_pool
return None, None
def sync(self):
if flask.request.method=='HEAD':
return Response()
patron = flask.request.patron
# First synchronize our local list of loans and holds with all
# third-party loan providers.
if patron.authorization_identifier:
header = self.authorization_header()
credential = self.manager.auth.get_credential_from_header(header)
try:
self.circulation.sync_bookshelf(patron, credential)
except Exception, e:
# If anything goes wrong, omit the sync step and just
# display the current active loans, as we understand them.
self.manager.log.error(
"ERROR DURING SYNC for %s: %r", patron.id, e, exc_info=e
)
# Then make the feed.
feed = CirculationManagerLoanAndHoldAnnotator.active_loans_for(
self.circulation, patron)
return feed_response(feed, cache_for=None)
def borrow(self, identifier_type, identifier, mechanism_id=None):
"""Create a new loan or hold for a book.
Return an OPDS Acquisition feed that includes a link of rel
"http://opds-spec.org/acquisition", which can be used to fetch the
book or the license file.
"""
patron = flask.request.patron
library = flask.request.library
result = self.best_lendable_pool(
library, patron, identifier_type, identifier, mechanism_id
)
if not result:
# No LicensePools were found and no ProblemDetail
# was returned. Send a generic ProblemDetail.
return NO_LICENSES.detailed(
_("I've never heard of this work.")
)
if isinstance(result, ProblemDetail):
return result
pool, mechanism = result
header = self.authorization_header()
credential = self.manager.auth.get_credential_from_header(header)
problem_doc = None
try:
loan, hold, is_new = self.circulation.borrow(
patron, credential, pool, mechanism
)
except NoOpenAccessDownload, e:
problem_doc = NO_LICENSES.detailed(
_("Couldn't find an open-access download link for this book."),
status_code=404
)
except PatronAuthorizationFailedException, e:
problem_doc = INVALID_CREDENTIALS
except PatronLoanLimitReached, e:
problem_doc = LOAN_LIMIT_REACHED.with_debug(str(e))
except PatronHoldLimitReached, e:
problem_doc = e.as_problem_detail_document()
except DeliveryMechanismError, e:
return BAD_DELIVERY_MECHANISM.with_debug(
str(e), status_code=e.status_code
)
except OutstandingFines, e:
problem_doc = OUTSTANDING_FINES.detailed(
_("You must pay your $%(fine_amount).2f outstanding fines before you can borrow more books.", fine_amount=patron.fines)
)
except AuthorizationExpired, e:
return e.as_problem_detail_document(debug=False)
except AuthorizationBlocked, e:
return e.as_problem_detail_document(debug=False)
except CannotLoan, e:
problem_doc = CHECKOUT_FAILED.with_debug(str(e))
except CannotHold, e:
problem_doc = HOLD_FAILED.with_debug(str(e))
except CannotRenew, e:
problem_doc = RENEW_FAILED.with_debug(str(e))
except NotFoundOnRemote, e:
problem_doc = NOT_FOUND_ON_REMOTE
except CirculationException, e:
# Generic circulation error.
problem_doc = CHECKOUT_FAILED.with_debug(str(e))
if problem_doc:
return problem_doc
# At this point we have either a loan or a hold. If a loan, serve
# a feed that tells the patron how to fulfill the loan. If a hold,
# serve a feed that talks about the hold.
if loan:
feed = CirculationManagerLoanAndHoldAnnotator.single_loan_feed(
self.circulation, loan)
elif hold:
feed = CirculationManagerLoanAndHoldAnnotator.single_hold_feed(
self.circulation, hold)
else:
# This should never happen -- we should have sent a more specific
# error earlier.
return HOLD_FAILED
if isinstance(feed, OPDSFeed):
content = unicode(feed)
else:
content = etree.tostring(feed)
if is_new:
status_code = 201
else:
status_code = 200
headers = { "Content-Type" : OPDSFeed.ACQUISITION_FEED_TYPE }
return Response(content, status_code, headers)
def best_lendable_pool(self, library, patron, identifier_type, identifier,
mechanism_id):
"""Of the available LicensePools for the given Identifier, return the
one that's the best candidate for loaning out right now.
"""
# Turn source + identifier into a set of LicensePools
pools = self.load_licensepools(
library, identifier_type, identifier
)
if isinstance(pools, ProblemDetail):
# Something went wrong.
return pools
best = None
mechanism = None
problem_doc = None
existing_loans = self._db.query(Loan).filter(
Loan.license_pool_id.in_([lp.id for lp in pools]),
Loan.patron==patron
).all()
if existing_loans:
return ALREADY_CHECKED_OUT
# We found a number of LicensePools. Try to locate one that
# we can actually loan to the patron.
for pool in pools:
problem_doc = self.apply_borrowing_policy(patron, pool)
if problem_doc:
# As a matter of policy, the patron is not allowed to borrow
# this book.
continue
# Beyond this point we know that site policy does not prohibit
# us from lending this pool to this patron.
if mechanism_id:
# But the patron has requested a license pool that
# supports a specific delivery mechanism. This pool
# must offer that mechanism.
mechanism = self.load_licensepooldelivery(pool, mechanism_id)
if isinstance(mechanism, ProblemDetail):
problem_doc = mechanism
continue
# Beyond this point we have a license pool that we can
# actually loan or put on hold.
# But there might be many such LicensePools, and we want
# to pick the one that will get the book to the patron
# with the shortest wait.
if (not best
or pool.licenses_available > best.licenses_available
or pool.patrons_in_hold_queue < best.patrons_in_hold_queue):
best = pool
if not best:
# We were unable to find any LicensePool that fit the
# criteria.
return problem_doc
return best, mechanism
def fulfill(self, license_pool_id, mechanism_id=None, do_get=None):
"""Fulfill a book that has already been checked out.
If successful, this will serve the patron a downloadable copy of
the book, or a DRM license file which can be used to get the
book). Alternatively, for a streaming delivery mechanism it may
serve an OPDS entry with a link to a third-party web page that
streams the content.
"""
do_get = do_get or Representation.simple_http_get
patron = flask.request.patron
header = self.authorization_header()
credential = self.manager.auth.get_credential_from_header(header)
# Turn source + identifier into a LicensePool.
pool = self.load_licensepool(license_pool_id)
if isinstance(pool, ProblemDetail):
return pool
loan, loan_license_pool = self.get_patron_loan(patron, [pool])
if not loan or not loan_license_pool:
return NO_ACTIVE_LOAN.detailed(
_("You have no active loan for this title.")
)
# Find the LicensePoolDeliveryMechanism they asked for.
mechanism = None
if mechanism_id:
mechanism = self.load_licensepooldelivery(
loan_license_pool, mechanism_id
)
if isinstance(mechanism, ProblemDetail):
return mechanism
if not mechanism:
# See if the loan already has a mechanism set. We can use that.
if loan and loan.fulfillment:
mechanism = loan.fulfillment
else:
return BAD_DELIVERY_MECHANISM.detailed(
_("You must specify a delivery mechanism to fulfill this loan.")
)
try:
fulfillment = self.circulation.fulfill(
patron, credential, loan.license_pool, mechanism
)
except DeliveryMechanismConflict, e:
return DELIVERY_CONFLICT.detailed(e.message)
except NoActiveLoan, e:
return NO_ACTIVE_LOAN.detailed(
_('Can\'t fulfill loan because you have no active loan for this book.'),
status_code=e.status_code
)
except CannotFulfill, e:
return CANNOT_FULFILL.with_debug(
str(e), status_code=e.status_code
)
except FormatNotAvailable, e:
return NO_ACCEPTABLE_FORMAT.with_debug(
str(e), status_code=e.status_code
)
except DeliveryMechanismError, e:
return BAD_DELIVERY_MECHANISM.with_debug(
str(e), status_code=e.status_code
)
headers = dict()
encoding_header = dict()
if (fulfillment.data_source_name == DataSource.ENKI
and mechanism.delivery_mechanism.drm_scheme_media_type == DeliveryMechanism.NO_DRM):
encoding_header["Accept-Encoding"] = "deflate"
if mechanism.delivery_mechanism.is_streaming:
# If this is a streaming delivery mechanism, create an OPDS entry
# with a fulfillment link to the streaming reader url.
feed = CirculationManagerLoanAndHoldAnnotator.single_fulfillment_feed(
self.circulation, loan, fulfillment)
if isinstance(feed, OPDSFeed):
content = unicode(feed)
else:
content = etree.tostring(feed)
status_code = 200
headers["Content-Type"] = OPDSFeed.ACQUISITION_FEED_TYPE
else:
content = fulfillment.content
if fulfillment.content_link:
# If we have a link to the content on a remote server, web clients may not
# be able to access it if the remote server does not support CORS requests.
# We need to fetch the content and return it instead of redirecting to it.
try:
status_code, headers, content = do_get(fulfillment.content_link, headers=encoding_header)
headers = dict(headers)
except RemoteIntegrationException, e:
return e.as_problem_detail_document(debug=False)
else:
status_code = 200
if fulfillment.content_type:
headers['Content-Type'] = fulfillment.content_type
return Response(content, status_code, headers)
def revoke(self, license_pool_id):
patron = flask.request.patron
pool = self.load_licensepool(license_pool_id)
if isinstance(pool, ProblemDetail):
return pool
loan, _ignore = self.get_patron_loan(patron, [pool])
if loan:
hold = None
else:
hold, _ignore = self.get_patron_hold(patron, [pool])
if not loan and not hold:
if not pool.work:
title = 'this book'
else:
title = '"%s"' % pool.work.title
return NO_ACTIVE_LOAN_OR_HOLD.detailed(
_('Can\'t revoke because you have no active loan or hold for "%(title)s".', title=title),
status_code=404
)
header = self.authorization_header()
credential = self.manager.auth.get_credential_from_header(header)
if loan:
try:
self.circulation.revoke_loan(patron, credential, pool)
except RemoteRefusedReturn, e:
title = _("Loan deleted locally but remote refused. Loan is likely to show up again on next sync.")
return COULD_NOT_MIRROR_TO_REMOTE.detailed(title, status_code=503)
except CannotReturn, e:
title = _("Loan deleted locally but remote failed.")
return COULD_NOT_MIRROR_TO_REMOTE.detailed(title, 503).with_debug(str(e))
elif hold:
if not self.circulation.can_revoke_hold(pool, hold):
title = _("Cannot release a hold once it enters reserved state.")
return CANNOT_RELEASE_HOLD.detailed(title, 400)
try:
self.circulation.release_hold(patron, credential, pool)
except CannotReleaseHold, e:
title = _("Hold released locally but remote failed.")
return CANNOT_RELEASE_HOLD.detailed(title, 503).with_debug(str(e))
work = pool.work
annotator = self.manager.annotator(None)
return entry_response(
AcquisitionFeed.single_entry(self._db, work, annotator)
)
def detail(self, identifier_type, identifier):
if flask.request.method=='DELETE':
return self.revoke_loan_or_hold(identifier_type, identifier)
patron = flask.request.patron
library = flask.request.library
pools = self.load_licensepools(library, identifier_type, identifier)
if isinstance(pools, ProblemDetail):
return pools
loan, pool = self.get_patron_loan(patron, pools)
if loan:
hold = None
else:
hold, pool = self.get_patron_hold(patron, pools)
if not loan and not hold:
return NO_ACTIVE_LOAN_OR_HOLD.detailed(
_('You have no active loan or hold for "%(title)s".', title=pool.work.title),
status_code=404
)
if flask.request.method=='GET':
if loan:
feed = CirculationManagerLoanAndHoldAnnotator.single_loan_feed(
self.circulation, loan)
else:
feed = CirculationManagerLoanAndHoldAnnotator.single_hold_feed(
self.circulation, hold)
feed = unicode(feed)
return feed_response(feed, None)
class AnnotationController(CirculationManagerController):
def container(self, identifier=None, accept_post=True):
headers = dict()
if accept_post:
headers['Allow'] = 'GET,HEAD,OPTIONS,POST'
headers['Accept-Post'] = AnnotationWriter.CONTENT_TYPE
else:
headers['Allow'] = 'GET,HEAD,OPTIONS'
if flask.request.method=='HEAD':
return Response(status=200, headers=headers)
patron = flask.request.patron
if flask.request.method == 'GET':
headers['Link'] = ['<http://www.w3.org/ns/ldp#BasicContainer>; rel="type"',
'<http://www.w3.org/TR/annotation-protocol/>; rel="http://www.w3.org/ns/ldp#constrainedBy"']
headers['Content-Type'] = AnnotationWriter.CONTENT_TYPE
container, timestamp = AnnotationWriter.annotation_container_for(patron, identifier=identifier)
etag = 'W/""'
if timestamp:
etag = 'W/"%s"' % timestamp
headers['Last-Modified'] = format_date_time(mktime(timestamp.timetuple()))
headers['ETag'] = etag
content = json.dumps(container)
return Response(content, status=200, headers=headers)
data = flask.request.data
annotation = AnnotationParser.parse(self._db, data, patron)
if isinstance(annotation, ProblemDetail):
return annotation
content = json.dumps(AnnotationWriter.detail(annotation))
status_code = 200
headers['Link'] = '<http://www.w3.org/ns/ldp#Resource>; rel="type"'
headers['Content-Type'] = AnnotationWriter.CONTENT_TYPE
return Response(content, status_code, headers)
def container_for_work(self, identifier_type, identifier):
id_obj, ignore = Identifier.for_foreign_id(
self._db, identifier_type, identifier)
return self.container(identifier=id_obj, accept_post=False)
def detail(self, annotation_id):
headers = dict()
headers['Allow'] = 'GET,HEAD,OPTIONS,DELETE'
if flask.request.method=='HEAD':
return Response(status=200, headers=headers)
patron = flask.request.patron
annotation = get_one(
self._db, Annotation,
patron=patron,
id=annotation_id,
active=True)
if not annotation:
return NO_ANNOTATION
if flask.request.method == 'DELETE':
annotation.set_inactive()
return Response()
content = json.dumps(AnnotationWriter.detail(annotation))
status_code = 200
headers['Link'] = '<http://www.w3.org/ns/ldp#Resource>; rel="type"'
headers['Content-Type'] = AnnotationWriter.CONTENT_TYPE
return Response(content, status_code, headers)
class WorkController(CirculationManagerController):
def _lane_details(self, languages, audiences):
if languages:
languages = languages.split(',')
if audiences:
audiences = [urllib.unquote_plus(a) for a in audiences.split(',')]
return languages, audiences
def contributor(self, contributor_name, languages, audiences):
"""Serve a feed of books written by a particular author"""
library = flask.request.library
if not contributor_name:
return NO_SUCH_LANE.detailed(_("No contributor provided"))
languages, audiences = self._lane_details(languages, audiences)
lane = ContributorLane(
self._db, library, contributor_name, languages=languages, audiences=audiences
)
annotator = self.manager.annotator(lane)
facets = load_facets_from_request()
if isinstance(facets, ProblemDetail):
return facets
pagination = load_pagination_from_request()
if isinstance(pagination, ProblemDetail):
return pagination
url = annotator.feed_url(
lane,
facets=facets,
pagination=pagination,
)
feed = AcquisitionFeed.page(
self._db, lane.display_name, url, lane,
facets=facets, pagination=pagination,
annotator=annotator, cache_type=CachedFeed.CONTRIBUTOR_TYPE
)
return feed_response(unicode(feed.content))
def permalink(self, identifier_type, identifier):
"""Serve an entry for a single book.
This does not include any loan or hold-specific information for
the authenticated patron.
This is different from the /works lookup protocol, in that it
returns a single entry while the /works lookup protocol returns a
feed containing any number of entries.
"""
library = flask.request.library
work = self.load_work(library, identifier_type, identifier)
if isinstance(work, ProblemDetail):
return work
annotator = self.manager.annotator(None)
return entry_response(
AcquisitionFeed.single_entry(self._db, work, annotator)
)
def related(self, identifier_type, identifier, novelist_api=None):
"""Serve a groups feed of books related to a given book."""
library = flask.request.library
work = self.load_work(library, identifier_type, identifier)
if isinstance(work, ProblemDetail):
return work
try:
lane_name = "Books Related to %s by %s" % (
work.title, work.author
)
lane = RelatedBooksLane(
self._db, library, work, lane_name, novelist_api=novelist_api
)
except ValueError, e:
# No related books were found.
return NO_SUCH_LANE.detailed(e.message)
annotator = self.manager.annotator(lane)
facets = load_facets_from_request()
if isinstance(facets, ProblemDetail):
return facets
pagination = load_pagination_from_request()
if isinstance(pagination, ProblemDetail):
return pagination
url = annotator.feed_url(
lane,
facets=facets,
pagination=pagination,
)
feed = AcquisitionFeed.groups(
self._db, lane.DISPLAY_NAME, url, lane, annotator=annotator
)
return feed_response(unicode(feed.content))
def recommendations(self, identifier_type, identifier, novelist_api=None):
"""Serve a feed of recommendations related to a given book."""
library = flask.request.library
work = self.load_work(library, identifier_type, identifier)
if isinstance(work, ProblemDetail):
return work
lane_name = "Recommendations for %s by %s" % (work.title, work.author)
try:
lane = RecommendationLane(
self._db, library, work, lane_name, novelist_api=novelist_api
)
except ValueError, e:
# NoveList isn't configured.
return NO_SUCH_LANE.detailed(_("Recommendations not available"))
annotator = self.manager.annotator(lane)
facets = load_facets_from_request()
if isinstance(facets, ProblemDetail):
return facets
pagination = load_pagination_from_request()
if isinstance(pagination, ProblemDetail):
return pagination
url = annotator.feed_url(
lane,
facets=facets,
pagination=pagination,
)
feed = AcquisitionFeed.page(
self._db, lane.DISPLAY_NAME, url, lane,
facets=facets, pagination=pagination,
annotator=annotator, cache_type=CachedFeed.RECOMMENDATIONS_TYPE
)
return feed_response(unicode(feed.content))
def report(self, identifier_type, identifier):
"""Report a problem with a book."""
# TODO: We don't have a reliable way of knowing whether the
# complaing is being lodged against the work or against a
# specific LicensePool.
# Turn source + identifier into a set of LicensePools
library = flask.request.library
pools = self.load_licensepools(library, identifier_type, identifier)
if isinstance(pools, ProblemDetail):
# Something went wrong.
return pools
if flask.request.method == 'GET':
# Return a list of valid URIs to use as the type of a problem detail
# document.
data = "\n".join(Complaint.VALID_TYPES)
return Response(data, 200, {"Content-Type" : "text/uri-list"})
data = flask.request.data
controller = ComplaintController()
return controller.register(pools[0], data)
def series(self, series_name, languages, audiences):
"""Serve a feed of books in the same series as a given book."""
library = flask.request.library
if not series_name:
return NO_SUCH_LANE.detailed(_("No series provided"))
languages, audiences = self._lane_details(languages, audiences)
lane = SeriesLane(self._db, library, series_name=series_name,
languages=languages, audiences=audiences
)
annotator = self.manager.annotator(lane)
# In addition to the orderings enabled for this library, a
# series collection may be ordered by series position, and is
# ordered that way by default.
facet_config = FacetConfig.from_library(library)
facet_config.set_default_facet(
Facets.ORDER_FACET_GROUP_NAME, Facets.ORDER_SERIES_POSITION
)
facets = load_facets_from_request(facet_config=facet_config)
if isinstance(facets, ProblemDetail):
return facets
pagination = load_pagination_from_request()
if isinstance(pagination, ProblemDetail):
return pagination
url = annotator.feed_url(
lane,
facets=facets,
pagination=pagination,
)
feed = AcquisitionFeed.page(
self._db, lane.display_name, url, lane,
facets=facets, pagination=pagination,
annotator=annotator, cache_type=CachedFeed.SERIES_TYPE
)
return feed_response(unicode(feed.content))
class ProfileController(CirculationManagerController):
"""Implement the User Profile Management Protocol."""
@property
def _controller(self):
"""Instantiate a CoreProfileController that actually does the work.
"""
patron = self.authenticated_patron_from_request()
storage = PatronProfileStorage(patron)
return CoreProfileController(storage)
def protocol(self):
"""Handle a UPMP request."""
controller = self._controller
if flask.request.method == 'GET':
result = controller.get()
else:
result = controller.put(flask.request.headers, flask.request.data)
if isinstance(result, ProblemDetail):
return result
return make_response(*result)
class AnalyticsController(CirculationManagerController):
def track_event(self, identifier_type, identifier, event_type):
# TODO: It usually doesn't matter, but there should be
# a way to distinguish between different LicensePools for the
# same book.
if event_type in CirculationEvent.CLIENT_EVENTS:
library = flask.request.library
pools = self.load_licensepools(library, identifier_type, identifier)
if isinstance(pools, ProblemDetail):
return pools
self.manager.analytics.collect_event(library, pools[0], event_type, datetime.datetime.utcnow())
return Response({}, 200)
else:
return INVALID_ANALYTICS_EVENT_TYPE
class ServiceStatusController(CirculationManagerController):
template = """<!DOCTYPE HTML>
<html lang="en" class="">
<head>
<meta charset="utf8">
</head>
<body>
<ul>
%(statuses)s
</ul>
</body>
</html>
"""
def __call__(self):
library = flask.request.library
circulation = self.manager.circulation_apis[library.id]
service_status = ServiceStatus(circulation)
timings = service_status.loans_status(response=True)
statuses = []
for k, v in sorted(timings.items()):
statuses.append(" <li><b>%s</b>: %s</li>" % (k, v))
doc = self.template % dict(statuses="\n".join(statuses))
return Response(doc, 200, {"Content-Type": "text/html"})
| <filename>api/controller.py
from nose.tools import set_trace
import json
import logging
import sys
import urllib
import datetime
from wsgiref.handlers import format_date_time
from time import mktime
from lxml import etree
from sqlalchemy.orm import eagerload
from functools import wraps
import flask
from flask import (
make_response,
Response,
redirect,
)
from flask.ext.babel import lazy_gettext as _
from core.app_server import (
entry_response,
feed_response,
cdn_url_for,
url_for,
load_lending_policy,
load_facets_from_request,
load_pagination_from_request,
ComplaintController,
HeartbeatController,
URNLookupController,
)
from core.external_search import (
ExternalSearchIndex,
DummyExternalSearchIndex,
)
from core.facets import FacetConfig
from core.log import LogConfiguration
from core.lane import (
Facets,
Pagination,
Lane,
LaneList,
)
from core.model import (
get_one,
get_one_or_create,
production_session,
Admin,
Annotation,
CachedFeed,
CirculationEvent,
Collection,
Complaint,
ConfigurationSetting,
DataSource,
DeliveryMechanism,
ExternalIntegration,
Hold,
Identifier,
Library,
LicensePool,
Loan,
LicensePoolDeliveryMechanism,
PatronProfileStorage,
Representation,
Session,
Work,
)
from core.opds import (
AcquisitionFeed,
)
from core.util.opds_writer import (
OPDSFeed,
)
from core.opensearch import OpenSearchDocument
from core.user_profile import ProfileController as CoreProfileController
from core.util.flask_util import (
problem,
)
from core.util.authentication_for_opds import AuthenticationForOPDSDocument
from core.util.problem_detail import ProblemDetail
from core.util.http import (
RemoteIntegrationException,
)
from circulation_exceptions import *
from opds import (
CirculationManagerAnnotator,
CirculationManagerLoanAndHoldAnnotator,
)
from annotations import (
AnnotationWriter,
AnnotationParser,
)
from problem_details import *
from authenticator import (
Authenticator,
OAuthController,
)
from config import (
Configuration,
CannotLoadConfiguration,
)
from lanes import (
make_lanes,
ContributorLane,
RecommendationLane,
RelatedBooksLane,
SeriesLane,
)
from adobe_vendor_id import (
AdobeVendorIDController,
DeviceManagementProtocolController,
AuthdataUtility,
)
from axis import Axis360API
from overdrive import OverdriveAPI
from bibliotheca import BibliothecaAPI
from circulation import CirculationAPI
from novelist import (
NoveListAPI,
MockNoveListAPI,
)
from base_controller import BaseCirculationManagerController
from testing import MockCirculationAPI
from services import ServiceStatus
from core.analytics import Analytics
class CirculationManager(object):
def __init__(self, _db, lanes=None, testing=False):
self.log = logging.getLogger("Circulation manager web app")
self._db = _db
if not testing:
try:
self.config = Configuration.load(_db)
except CannotLoadConfiguration, e:
self.log.error("Could not load configuration file: %s" % e)
sys.exit()
self.testing = testing
self.site_configuration_last_update = (
Configuration.site_configuration_last_update(self._db, timeout=0)
)
self.lane_descriptions = lanes
self.setup_one_time_controllers()
self.load_settings()
def reload_settings_if_changed(self):
"""If the site configuration has been updated, reload the
CirculationManager's configuration from the database.
"""
last_update = Configuration.site_configuration_last_update(self._db)
if last_update > self.site_configuration_last_update:
self.load_settings()
self.site_configuration_last_update = last_update
def load_settings(self):
"""Load all necessary configuration settings and external
integrations from the database.
This is called once when the CirculationManager is
initialized. It may also be called later to reload the site
configuration after changes are made in the administrative
interface.
"""
LogConfiguration.initialize(self._db)
self.analytics = Analytics(self._db)
self.auth = Authenticator(self._db, self.analytics)
self.setup_external_search()
# Track the Lane configuration for each library by mapping its
# short name to the top-level lane.
new_top_level_lanes = {}
# Create a CirculationAPI for each library.
new_circulation_apis = {}
new_adobe_device_management = None
for library in self._db.query(Library):
lanes = make_lanes(self._db, library, self.lane_descriptions)
new_top_level_lanes[library.id] = (
self.create_top_level_lane(
self._db, library, lanes
)
)
new_circulation_apis[library.id] = self.setup_circulation(
library, self.analytics
)
authdata = self.setup_adobe_vendor_id(self._db, library)
if authdata and not new_adobe_device_management:
# There's at least one library on this system that
# wants Vendor IDs. This means we need to advertise support
# for the Device Management Protocol.
new_adobe_device_management = DeviceManagementProtocolController(self)
self.adobe_device_management = new_adobe_device_management
self.top_level_lanes = new_top_level_lanes
self.circulation_apis = new_circulation_apis
self.lending_policy = load_lending_policy(
Configuration.policy('lending', {})
)
self.patron_web_client_url = ConfigurationSetting.sitewide(
self._db, Configuration.PATRON_WEB_CLIENT_URL).value
self.setup_configuration_dependent_controllers()
self.authentication_for_opds_documents = {}
@property
def external_search(self):
"""Retrieve or create a connection to the search interface.
This is created lazily so that a failure to connect only
affects searches, not the rest of the circulation manager.
"""
if not self.__external_search:
self.setup_external_search()
return self.__external_search
def setup_external_search(self):
try:
self.__external_search = self.setup_search()
self.external_search_initialization_exception = None
except CannotLoadConfiguration, e:
self.log.error(
"Exception loading search configuration: %s", e
)
self.__external_search = None
self.external_search_initialization_exception = e
return self.__external_search
def create_top_level_lane(self, _db, library, lanelist):
name = 'All Books'
return Lane(
_db,
library, name,
display_name=name,
parent=None,
sublanes=lanelist.lanes,
include_all=False,
languages=None,
searchable=True,
invisible=True
)
def cdn_url_for(self, view, *args, **kwargs):
return cdn_url_for(view, *args, **kwargs)
def url_for(self, view, *args, **kwargs):
kwargs['_external'] = True
return url_for(view, *args, **kwargs)
def log_lanes(self, lanelist=None, level=0):
"""Output information about the lane layout."""
lanelist = lanelist or self.top_level_lane.sublanes
for lane in lanelist:
self.log.debug("%s%r", "-" * level, lane)
if lane.sublanes:
self.log_lanes(lane.sublanes, level+1)
def setup_search(self):
"""Set up a search client."""
if self.testing:
return DummyExternalSearchIndex()
else:
search = ExternalSearchIndex(self._db)
if not search:
self.log.warn("No external search server configured.")
return None
return search
def setup_circulation(self, library, analytics):
"""Set up the Circulation object."""
if self.testing:
cls = MockCirculationAPI
else:
cls = CirculationAPI
return cls(self._db, library, analytics)
def setup_one_time_controllers(self):
"""Set up all the controllers that will be used by the web app.
This method will be called only once, no matter how many times the
site configuration changes.
"""
self.index_controller = IndexController(self)
self.opds_feeds = OPDSFeedController(self)
self.loans = LoanController(self)
self.annotations = AnnotationController(self)
self.urn_lookup = URNLookupController(self._db)
self.work_controller = WorkController(self)
self.analytics_controller = AnalyticsController(self)
self.profiles = ProfileController(self)
self.heartbeat = HeartbeatController()
self.service_status = ServiceStatusController(self)
def setup_configuration_dependent_controllers(self):
"""Set up all the controllers that depend on the
current site configuration.
This method will be called fresh every time the site
configuration changes.
"""
self.oauth_controller = OAuthController(self.auth)
def setup_adobe_vendor_id(self, _db, library):
"""If this Library has an Adobe Vendor ID integration,
configure the controller for it.
:return: An Authdata object for `library`, if one could be created.
"""
short_client_token_initialization_exceptions = dict()
adobe = ExternalIntegration.lookup(
_db, ExternalIntegration.ADOBE_VENDOR_ID,
ExternalIntegration.DRM_GOAL, library=library
)
warning = (
'Adobe Vendor ID controller is disabled due to missing or'
' incomplete configuration. This is probably nothing to'
' worry about.'
)
new_adobe_vendor_id = None
if adobe:
# Relatively few libraries will have this setup.
vendor_id = adobe.username
node_value = adobe.password
if vendor_id and node_value:
if new_adobe_vendor_id:
self.log.warn(
"Multiple libraries define an Adobe Vendor ID integration. This is not supported and the last library seen will take precedence."
)
new_adobe_vendor_id = AdobeVendorIDController(
_db,
library,
vendor_id,
node_value,
self.auth
)
else:
self.log.warn("Adobe Vendor ID controller is disabled due to missing or incomplete configuration. This is probably nothing to worry about.")
self.adobe_vendor_id = new_adobe_vendor_id
# But almost all libraries will have a Short Client Token
# setup. We're not setting anything up here, but this is useful
# information for the calling code to have so it knows
# whether or not we should support the Device Management Protocol.
registry = ExternalIntegration.lookup(
_db, ExternalIntegration.OPDS_REGISTRATION,
ExternalIntegration.DISCOVERY_GOAL, library=library
)
authdata = None
if registry:
try:
authdata = AuthdataUtility.from_config(library, _db)
except CannotLoadConfiguration, e:
short_client_token_initialization_exceptions[library.id] = e
self.log.error(
"Short Client Token configuration for %s is present but not working. This may be cause for concern. Original error: %s",
library.name, e
)
self.short_client_token_initialization_exceptions = short_client_token_initialization_exceptions
return authdata
def annotator(self, lane, *args, **kwargs):
"""Create an appropriate OPDS annotator for the given lane."""
if lane:
library = lane.library
else:
library = flask.request.library
return CirculationManagerAnnotator(
self.circulation_apis[library.id], lane, library,
top_level_title='All Books', *args, **kwargs
)
@property
def authentication_for_opds_document(self):
"""Make sure the current request's library has an Authentication For
OPDS document in the cache, then return the cached version.
"""
name = flask.request.library.short_name
if name not in self.authentication_for_opds_documents:
self.authentication_for_opds_documents[name] = self.auth.create_authentication_document()
return self.authentication_for_opds_documents[name]
@property
def public_key_integration_document(self):
site_id = ConfigurationSetting.sitewide(self._db, Configuration.BASE_URL_KEY).value
document = dict(id=site_id)
public_key_dict = dict()
public_key = ConfigurationSetting.sitewide(self._db, Configuration.PUBLIC_KEY).value
if public_key:
public_key_dict['type'] = 'RSA'
public_key_dict['value'] = public_key
document['public_key'] = public_key_dict
return json.dumps(document)
class CirculationManagerController(BaseCirculationManagerController):
@property
def circulation(self):
"""Return the appropriate CirculationAPI for the request Library."""
library_id = flask.request.library.id
return self.manager.circulation_apis[library_id]
def load_lane(self, language_key, name):
"""Turn user input into a Lane object."""
library_id = flask.request.library.id
top_level_lane = self.manager.top_level_lanes[library_id]
if language_key is None and name is None:
return top_level_lane
lanelist = top_level_lane.sublanes
if not language_key in lanelist.by_languages:
return NO_SUCH_LANE.detailed(
_("Unrecognized language key: %(language_key)s", language_key=language_key)
)
if name:
name = name.replace("__", "/")
lanes = lanelist.by_languages[language_key]
if not name:
defaults = [x for x in lanes.values() if x.default_for_language]
if len(defaults) == 1:
# This language has one, and only one, default lane.
return defaults[0]
if name not in lanes:
return NO_SUCH_LANE.detailed(
_("No such lane: %(lane_name)s", lane_name=name)
)
return lanes[name]
def load_work(self, library, identifier_type, identifier):
pools = self.load_licensepools(library, identifier_type, identifier)
if isinstance(pools, ProblemDetail):
return pools
# We know there is at least one LicensePool, and all LicensePools
# for an Identifier have the same Work.
return pools[0].work
def load_licensepools(self, library, identifier_type, identifier):
"""Turn user input into one or more LicensePool objects.
:param library: The LicensePools must be associated with one of this
Library's Collections.
:param identifier_type: A type of identifier, e.g. "ISBN"
:param identifier: An identifier string, used with `identifier_type`
to look up an Identifier.
"""
_db = Session.object_session(library)
pools = _db.query(LicensePool).join(LicensePool.collection).join(
LicensePool.identifier).join(Collection.libraries).filter(
Identifier.type==identifier_type
).filter(
Identifier.identifier==identifier
).filter(
Library.id==library.id
).all()
if not pools:
return NO_LICENSES.detailed(
_("The item you're asking about (%s/%s) isn't in this collection.") % (
identifier_type, identifier
)
)
return pools
def load_licensepool(self, license_pool_id):
"""Turns user input into a LicensePool"""
license_pool = get_one(self._db, LicensePool, id=license_pool_id)
if not license_pool:
return INVALID_INPUT.detailed(
_("License Pool #%d does not exist.") % license_pool_id
)
return license_pool
def load_licensepooldelivery(self, pool, mechanism_id):
"""Turn user input into a LicensePoolDeliveryMechanism object."""
mechanism = get_one(
self._db, LicensePoolDeliveryMechanism,
data_source=pool.data_source, identifier=pool.identifier,
delivery_mechanism_id=mechanism_id, on_multiple='interchangeable'
)
return mechanism or BAD_DELIVERY_MECHANISM
def apply_borrowing_policy(self, patron, license_pool):
if isinstance(patron, ProblemDetail):
return patron
if not patron.can_borrow(license_pool.work, self.manager.lending_policy):
return FORBIDDEN_BY_POLICY.detailed(
_("Library policy prohibits us from lending you this book."),
status_code=451
)
if (not patron.library.allow_holds and
license_pool.licenses_available == 0 and
not license_pool.open_access
):
return FORBIDDEN_BY_POLICY.detailed(
_("Library policy prohibits the placement of holds."),
status_code=403
)
return None
class IndexController(CirculationManagerController):
"""Redirect the patron to the appropriate feed."""
def __call__(self):
# The simple case: the app is equally open to all clients.
library_short_name = flask.request.library.short_name
policy = Configuration.root_lane_policy()
if not policy:
return redirect(self.cdn_url_for('acquisition_groups', library_short_name=library_short_name))
# The more complex case. We must authorize the patron, check
# their type, and redirect them to an appropriate feed.
return self.appropriate_index_for_patron_type()
def authentication_document(self):
"""Serve this library's Authentication For OPDS document."""
return Response(
self.manager.authentication_for_opds_document,
200,
{
"Content-Type" : AuthenticationForOPDSDocument.MEDIA_TYPE
}
)
def authenticated_patron_root_lane(self):
patron = self.authenticated_patron_from_request()
if isinstance(patron, ProblemDetail):
return patron
if isinstance(patron, Response):
return patron
policy = Configuration.root_lane_policy()
lane_info = policy.get(patron.external_type)
if lane_info is None:
return None
else:
lang_key, name = lane_info
return self.load_lane(lang_key, name)
def appropriate_index_for_patron_type(self):
library_short_name = flask.request.library.short_name
root_lane = self.authenticated_patron_root_lane()
if isinstance(root_lane, ProblemDetail):
return root_lane
if isinstance(root_lane, Response):
return root_lane
if root_lane is None:
return redirect(
self.cdn_url_for(
'acquisition_groups',
library_short_name=library_short_name,
)
)
return redirect(
self.cdn_url_for(
'acquisition_groups',
library_short_name=library_short_name,
languages=root_lane.language_key,
lane_name=root_lane.url_name
)
)
def public_key_document(self):
"""Serves a sitewide public key document"""
return Response(
self.manager.public_key_integration_document,
200, { 'Content-Type' : 'application/opds+json' }
)
class OPDSFeedController(CirculationManagerController):
def groups(self, languages, lane_name):
"""Build or retrieve a grouped acquisition feed."""
lane = self.load_lane(languages, lane_name)
if isinstance(lane, ProblemDetail):
return lane
library_short_name = flask.request.library.short_name
url = self.cdn_url_for(
"acquisition_groups", languages=languages, lane_name=lane_name, library_short_name=library_short_name,
)
title = lane.display_name
annotator = self.manager.annotator(lane)
feed = AcquisitionFeed.groups(self._db, title, url, lane, annotator)
return feed_response(feed.content)
def feed(self, languages, lane_name):
"""Build or retrieve a paginated acquisition feed."""
lane = self.load_lane(languages, lane_name)
if isinstance(lane, ProblemDetail):
return lane
library_short_name = flask.request.library.short_name
url = self.cdn_url_for(
"feed", languages=languages, lane_name=lane_name,
library_short_name=library_short_name,
)
title = lane.display_name
annotator = self.manager.annotator(lane)
facets = load_facets_from_request()
if isinstance(facets, ProblemDetail):
return facets
pagination = load_pagination_from_request()
if isinstance(pagination, ProblemDetail):
return pagination
feed = AcquisitionFeed.page(
self._db, title, url, lane, annotator=annotator,
facets=facets,
pagination=pagination,
)
return feed_response(feed.content)
def search(self, languages, lane_name):
lane = self.load_lane(languages, lane_name)
if isinstance(lane, ProblemDetail):
return lane
query = flask.request.args.get('q')
library_short_name = flask.request.library.short_name
this_url = self.url_for(
'lane_search', languages=languages, lane_name=lane_name,
library_short_name=library_short_name,
)
if not query:
# Send the search form
return OpenSearchDocument.for_lane(lane, this_url)
pagination = load_pagination_from_request(default_size=Pagination.DEFAULT_SEARCH_SIZE)
if isinstance(pagination, ProblemDetail):
return pagination
# Run a search.
this_url += "?q=" + urllib.quote(query.encode("utf8"))
annotator = self.manager.annotator(lane)
info = OpenSearchDocument.search_info(lane)
opds_feed = AcquisitionFeed.search(
_db=self._db, title=info['name'],
url=this_url, lane=lane, search_engine=self.manager.external_search,
query=query, annotator=annotator, pagination=pagination,
)
return feed_response(opds_feed)
class LoanController(CirculationManagerController):
def get_patron_circ_objects(self, object_class, patron, license_pools):
pool_ids = [pool.id for pool in license_pools]
return self._db.query(object_class).filter(
object_class.patron_id==patron.id,
object_class.license_pool_id.in_(pool_ids)
).options(eagerload(object_class.license_pool)).all()
def get_patron_loan(self, patron, license_pools):
loans = self.get_patron_circ_objects(Loan, patron, license_pools)
if loans:
loan = loans[0]
return loan, loan.license_pool
return None, None
def get_patron_hold(self, patron, license_pools):
holds = self.get_patron_circ_objects(Hold, patron, license_pools)
if holds:
hold = holds[0]
return hold, hold.license_pool
return None, None
def sync(self):
if flask.request.method=='HEAD':
return Response()
patron = flask.request.patron
# First synchronize our local list of loans and holds with all
# third-party loan providers.
if patron.authorization_identifier:
header = self.authorization_header()
credential = self.manager.auth.get_credential_from_header(header)
try:
self.circulation.sync_bookshelf(patron, credential)
except Exception, e:
# If anything goes wrong, omit the sync step and just
# display the current active loans, as we understand them.
self.manager.log.error(
"ERROR DURING SYNC for %s: %r", patron.id, e, exc_info=e
)
# Then make the feed.
feed = CirculationManagerLoanAndHoldAnnotator.active_loans_for(
self.circulation, patron)
return feed_response(feed, cache_for=None)
def borrow(self, identifier_type, identifier, mechanism_id=None):
"""Create a new loan or hold for a book.
Return an OPDS Acquisition feed that includes a link of rel
"http://opds-spec.org/acquisition", which can be used to fetch the
book or the license file.
"""
patron = flask.request.patron
library = flask.request.library
result = self.best_lendable_pool(
library, patron, identifier_type, identifier, mechanism_id
)
if not result:
# No LicensePools were found and no ProblemDetail
# was returned. Send a generic ProblemDetail.
return NO_LICENSES.detailed(
_("I've never heard of this work.")
)
if isinstance(result, ProblemDetail):
return result
pool, mechanism = result
header = self.authorization_header()
credential = self.manager.auth.get_credential_from_header(header)
problem_doc = None
try:
loan, hold, is_new = self.circulation.borrow(
patron, credential, pool, mechanism
)
except NoOpenAccessDownload, e:
problem_doc = NO_LICENSES.detailed(
_("Couldn't find an open-access download link for this book."),
status_code=404
)
except PatronAuthorizationFailedException, e:
problem_doc = INVALID_CREDENTIALS
except PatronLoanLimitReached, e:
problem_doc = LOAN_LIMIT_REACHED.with_debug(str(e))
except PatronHoldLimitReached, e:
problem_doc = e.as_problem_detail_document()
except DeliveryMechanismError, e:
return BAD_DELIVERY_MECHANISM.with_debug(
str(e), status_code=e.status_code
)
except OutstandingFines, e:
problem_doc = OUTSTANDING_FINES.detailed(
_("You must pay your $%(fine_amount).2f outstanding fines before you can borrow more books.", fine_amount=patron.fines)
)
except AuthorizationExpired, e:
return e.as_problem_detail_document(debug=False)
except AuthorizationBlocked, e:
return e.as_problem_detail_document(debug=False)
except CannotLoan, e:
problem_doc = CHECKOUT_FAILED.with_debug(str(e))
except CannotHold, e:
problem_doc = HOLD_FAILED.with_debug(str(e))
except CannotRenew, e:
problem_doc = RENEW_FAILED.with_debug(str(e))
except NotFoundOnRemote, e:
problem_doc = NOT_FOUND_ON_REMOTE
except CirculationException, e:
# Generic circulation error.
problem_doc = CHECKOUT_FAILED.with_debug(str(e))
if problem_doc:
return problem_doc
# At this point we have either a loan or a hold. If a loan, serve
# a feed that tells the patron how to fulfill the loan. If a hold,
# serve a feed that talks about the hold.
if loan:
feed = CirculationManagerLoanAndHoldAnnotator.single_loan_feed(
self.circulation, loan)
elif hold:
feed = CirculationManagerLoanAndHoldAnnotator.single_hold_feed(
self.circulation, hold)
else:
# This should never happen -- we should have sent a more specific
# error earlier.
return HOLD_FAILED
if isinstance(feed, OPDSFeed):
content = unicode(feed)
else:
content = etree.tostring(feed)
if is_new:
status_code = 201
else:
status_code = 200
headers = { "Content-Type" : OPDSFeed.ACQUISITION_FEED_TYPE }
return Response(content, status_code, headers)
def best_lendable_pool(self, library, patron, identifier_type, identifier,
mechanism_id):
"""Of the available LicensePools for the given Identifier, return the
one that's the best candidate for loaning out right now.
"""
# Turn source + identifier into a set of LicensePools
pools = self.load_licensepools(
library, identifier_type, identifier
)
if isinstance(pools, ProblemDetail):
# Something went wrong.
return pools
best = None
mechanism = None
problem_doc = None
existing_loans = self._db.query(Loan).filter(
Loan.license_pool_id.in_([lp.id for lp in pools]),
Loan.patron==patron
).all()
if existing_loans:
return ALREADY_CHECKED_OUT
# We found a number of LicensePools. Try to locate one that
# we can actually loan to the patron.
for pool in pools:
problem_doc = self.apply_borrowing_policy(patron, pool)
if problem_doc:
# As a matter of policy, the patron is not allowed to borrow
# this book.
continue
# Beyond this point we know that site policy does not prohibit
# us from lending this pool to this patron.
if mechanism_id:
# But the patron has requested a license pool that
# supports a specific delivery mechanism. This pool
# must offer that mechanism.
mechanism = self.load_licensepooldelivery(pool, mechanism_id)
if isinstance(mechanism, ProblemDetail):
problem_doc = mechanism
continue
# Beyond this point we have a license pool that we can
# actually loan or put on hold.
# But there might be many such LicensePools, and we want
# to pick the one that will get the book to the patron
# with the shortest wait.
if (not best
or pool.licenses_available > best.licenses_available
or pool.patrons_in_hold_queue < best.patrons_in_hold_queue):
best = pool
if not best:
# We were unable to find any LicensePool that fit the
# criteria.
return problem_doc
return best, mechanism
def fulfill(self, license_pool_id, mechanism_id=None, do_get=None):
"""Fulfill a book that has already been checked out.
If successful, this will serve the patron a downloadable copy of
the book, or a DRM license file which can be used to get the
book). Alternatively, for a streaming delivery mechanism it may
serve an OPDS entry with a link to a third-party web page that
streams the content.
"""
do_get = do_get or Representation.simple_http_get
patron = flask.request.patron
header = self.authorization_header()
credential = self.manager.auth.get_credential_from_header(header)
# Turn source + identifier into a LicensePool.
pool = self.load_licensepool(license_pool_id)
if isinstance(pool, ProblemDetail):
return pool
loan, loan_license_pool = self.get_patron_loan(patron, [pool])
if not loan or not loan_license_pool:
return NO_ACTIVE_LOAN.detailed(
_("You have no active loan for this title.")
)
# Find the LicensePoolDeliveryMechanism they asked for.
mechanism = None
if mechanism_id:
mechanism = self.load_licensepooldelivery(
loan_license_pool, mechanism_id
)
if isinstance(mechanism, ProblemDetail):
return mechanism
if not mechanism:
# See if the loan already has a mechanism set. We can use that.
if loan and loan.fulfillment:
mechanism = loan.fulfillment
else:
return BAD_DELIVERY_MECHANISM.detailed(
_("You must specify a delivery mechanism to fulfill this loan.")
)
try:
fulfillment = self.circulation.fulfill(
patron, credential, loan.license_pool, mechanism
)
except DeliveryMechanismConflict, e:
return DELIVERY_CONFLICT.detailed(e.message)
except NoActiveLoan, e:
return NO_ACTIVE_LOAN.detailed(
_('Can\'t fulfill loan because you have no active loan for this book.'),
status_code=e.status_code
)
except CannotFulfill, e:
return CANNOT_FULFILL.with_debug(
str(e), status_code=e.status_code
)
except FormatNotAvailable, e:
return NO_ACCEPTABLE_FORMAT.with_debug(
str(e), status_code=e.status_code
)
except DeliveryMechanismError, e:
return BAD_DELIVERY_MECHANISM.with_debug(
str(e), status_code=e.status_code
)
headers = dict()
encoding_header = dict()
if (fulfillment.data_source_name == DataSource.ENKI
and mechanism.delivery_mechanism.drm_scheme_media_type == DeliveryMechanism.NO_DRM):
encoding_header["Accept-Encoding"] = "deflate"
if mechanism.delivery_mechanism.is_streaming:
# If this is a streaming delivery mechanism, create an OPDS entry
# with a fulfillment link to the streaming reader url.
feed = CirculationManagerLoanAndHoldAnnotator.single_fulfillment_feed(
self.circulation, loan, fulfillment)
if isinstance(feed, OPDSFeed):
content = unicode(feed)
else:
content = etree.tostring(feed)
status_code = 200
headers["Content-Type"] = OPDSFeed.ACQUISITION_FEED_TYPE
else:
content = fulfillment.content
if fulfillment.content_link:
# If we have a link to the content on a remote server, web clients may not
# be able to access it if the remote server does not support CORS requests.
# We need to fetch the content and return it instead of redirecting to it.
try:
status_code, headers, content = do_get(fulfillment.content_link, headers=encoding_header)
headers = dict(headers)
except RemoteIntegrationException, e:
return e.as_problem_detail_document(debug=False)
else:
status_code = 200
if fulfillment.content_type:
headers['Content-Type'] = fulfillment.content_type
return Response(content, status_code, headers)
def revoke(self, license_pool_id):
patron = flask.request.patron
pool = self.load_licensepool(license_pool_id)
if isinstance(pool, ProblemDetail):
return pool
loan, _ignore = self.get_patron_loan(patron, [pool])
if loan:
hold = None
else:
hold, _ignore = self.get_patron_hold(patron, [pool])
if not loan and not hold:
if not pool.work:
title = 'this book'
else:
title = '"%s"' % pool.work.title
return NO_ACTIVE_LOAN_OR_HOLD.detailed(
_('Can\'t revoke because you have no active loan or hold for "%(title)s".', title=title),
status_code=404
)
header = self.authorization_header()
credential = self.manager.auth.get_credential_from_header(header)
if loan:
try:
self.circulation.revoke_loan(patron, credential, pool)
except RemoteRefusedReturn, e:
title = _("Loan deleted locally but remote refused. Loan is likely to show up again on next sync.")
return COULD_NOT_MIRROR_TO_REMOTE.detailed(title, status_code=503)
except CannotReturn, e:
title = _("Loan deleted locally but remote failed.")
return COULD_NOT_MIRROR_TO_REMOTE.detailed(title, 503).with_debug(str(e))
elif hold:
if not self.circulation.can_revoke_hold(pool, hold):
title = _("Cannot release a hold once it enters reserved state.")
return CANNOT_RELEASE_HOLD.detailed(title, 400)
try:
self.circulation.release_hold(patron, credential, pool)
except CannotReleaseHold, e:
title = _("Hold released locally but remote failed.")
return CANNOT_RELEASE_HOLD.detailed(title, 503).with_debug(str(e))
work = pool.work
annotator = self.manager.annotator(None)
return entry_response(
AcquisitionFeed.single_entry(self._db, work, annotator)
)
def detail(self, identifier_type, identifier):
if flask.request.method=='DELETE':
return self.revoke_loan_or_hold(identifier_type, identifier)
patron = flask.request.patron
library = flask.request.library
pools = self.load_licensepools(library, identifier_type, identifier)
if isinstance(pools, ProblemDetail):
return pools
loan, pool = self.get_patron_loan(patron, pools)
if loan:
hold = None
else:
hold, pool = self.get_patron_hold(patron, pools)
if not loan and not hold:
return NO_ACTIVE_LOAN_OR_HOLD.detailed(
_('You have no active loan or hold for "%(title)s".', title=pool.work.title),
status_code=404
)
if flask.request.method=='GET':
if loan:
feed = CirculationManagerLoanAndHoldAnnotator.single_loan_feed(
self.circulation, loan)
else:
feed = CirculationManagerLoanAndHoldAnnotator.single_hold_feed(
self.circulation, hold)
feed = unicode(feed)
return feed_response(feed, None)
class AnnotationController(CirculationManagerController):
def container(self, identifier=None, accept_post=True):
headers = dict()
if accept_post:
headers['Allow'] = 'GET,HEAD,OPTIONS,POST'
headers['Accept-Post'] = AnnotationWriter.CONTENT_TYPE
else:
headers['Allow'] = 'GET,HEAD,OPTIONS'
if flask.request.method=='HEAD':
return Response(status=200, headers=headers)
patron = flask.request.patron
if flask.request.method == 'GET':
headers['Link'] = ['<http://www.w3.org/ns/ldp#BasicContainer>; rel="type"',
'<http://www.w3.org/TR/annotation-protocol/>; rel="http://www.w3.org/ns/ldp#constrainedBy"']
headers['Content-Type'] = AnnotationWriter.CONTENT_TYPE
container, timestamp = AnnotationWriter.annotation_container_for(patron, identifier=identifier)
etag = 'W/""'
if timestamp:
etag = 'W/"%s"' % timestamp
headers['Last-Modified'] = format_date_time(mktime(timestamp.timetuple()))
headers['ETag'] = etag
content = json.dumps(container)
return Response(content, status=200, headers=headers)
data = flask.request.data
annotation = AnnotationParser.parse(self._db, data, patron)
if isinstance(annotation, ProblemDetail):
return annotation
content = json.dumps(AnnotationWriter.detail(annotation))
status_code = 200
headers['Link'] = '<http://www.w3.org/ns/ldp#Resource>; rel="type"'
headers['Content-Type'] = AnnotationWriter.CONTENT_TYPE
return Response(content, status_code, headers)
def container_for_work(self, identifier_type, identifier):
id_obj, ignore = Identifier.for_foreign_id(
self._db, identifier_type, identifier)
return self.container(identifier=id_obj, accept_post=False)
def detail(self, annotation_id):
headers = dict()
headers['Allow'] = 'GET,HEAD,OPTIONS,DELETE'
if flask.request.method=='HEAD':
return Response(status=200, headers=headers)
patron = flask.request.patron
annotation = get_one(
self._db, Annotation,
patron=patron,
id=annotation_id,
active=True)
if not annotation:
return NO_ANNOTATION
if flask.request.method == 'DELETE':
annotation.set_inactive()
return Response()
content = json.dumps(AnnotationWriter.detail(annotation))
status_code = 200
headers['Link'] = '<http://www.w3.org/ns/ldp#Resource>; rel="type"'
headers['Content-Type'] = AnnotationWriter.CONTENT_TYPE
return Response(content, status_code, headers)
class WorkController(CirculationManagerController):
def _lane_details(self, languages, audiences):
if languages:
languages = languages.split(',')
if audiences:
audiences = [urllib.unquote_plus(a) for a in audiences.split(',')]
return languages, audiences
def contributor(self, contributor_name, languages, audiences):
"""Serve a feed of books written by a particular author"""
library = flask.request.library
if not contributor_name:
return NO_SUCH_LANE.detailed(_("No contributor provided"))
languages, audiences = self._lane_details(languages, audiences)
lane = ContributorLane(
self._db, library, contributor_name, languages=languages, audiences=audiences
)
annotator = self.manager.annotator(lane)
facets = load_facets_from_request()
if isinstance(facets, ProblemDetail):
return facets
pagination = load_pagination_from_request()
if isinstance(pagination, ProblemDetail):
return pagination
url = annotator.feed_url(
lane,
facets=facets,
pagination=pagination,
)
feed = AcquisitionFeed.page(
self._db, lane.display_name, url, lane,
facets=facets, pagination=pagination,
annotator=annotator, cache_type=CachedFeed.CONTRIBUTOR_TYPE
)
return feed_response(unicode(feed.content))
def permalink(self, identifier_type, identifier):
"""Serve an entry for a single book.
This does not include any loan or hold-specific information for
the authenticated patron.
This is different from the /works lookup protocol, in that it
returns a single entry while the /works lookup protocol returns a
feed containing any number of entries.
"""
library = flask.request.library
work = self.load_work(library, identifier_type, identifier)
if isinstance(work, ProblemDetail):
return work
annotator = self.manager.annotator(None)
return entry_response(
AcquisitionFeed.single_entry(self._db, work, annotator)
)
def related(self, identifier_type, identifier, novelist_api=None):
"""Serve a groups feed of books related to a given book."""
library = flask.request.library
work = self.load_work(library, identifier_type, identifier)
if isinstance(work, ProblemDetail):
return work
try:
lane_name = "Books Related to %s by %s" % (
work.title, work.author
)
lane = RelatedBooksLane(
self._db, library, work, lane_name, novelist_api=novelist_api
)
except ValueError, e:
# No related books were found.
return NO_SUCH_LANE.detailed(e.message)
annotator = self.manager.annotator(lane)
facets = load_facets_from_request()
if isinstance(facets, ProblemDetail):
return facets
pagination = load_pagination_from_request()
if isinstance(pagination, ProblemDetail):
return pagination
url = annotator.feed_url(
lane,
facets=facets,
pagination=pagination,
)
feed = AcquisitionFeed.groups(
self._db, lane.DISPLAY_NAME, url, lane, annotator=annotator
)
return feed_response(unicode(feed.content))
def recommendations(self, identifier_type, identifier, novelist_api=None):
"""Serve a feed of recommendations related to a given book."""
library = flask.request.library
work = self.load_work(library, identifier_type, identifier)
if isinstance(work, ProblemDetail):
return work
lane_name = "Recommendations for %s by %s" % (work.title, work.author)
try:
lane = RecommendationLane(
self._db, library, work, lane_name, novelist_api=novelist_api
)
except ValueError, e:
# NoveList isn't configured.
return NO_SUCH_LANE.detailed(_("Recommendations not available"))
annotator = self.manager.annotator(lane)
facets = load_facets_from_request()
if isinstance(facets, ProblemDetail):
return facets
pagination = load_pagination_from_request()
if isinstance(pagination, ProblemDetail):
return pagination
url = annotator.feed_url(
lane,
facets=facets,
pagination=pagination,
)
feed = AcquisitionFeed.page(
self._db, lane.DISPLAY_NAME, url, lane,
facets=facets, pagination=pagination,
annotator=annotator, cache_type=CachedFeed.RECOMMENDATIONS_TYPE
)
return feed_response(unicode(feed.content))
def report(self, identifier_type, identifier):
"""Report a problem with a book."""
# TODO: We don't have a reliable way of knowing whether the
# complaing is being lodged against the work or against a
# specific LicensePool.
# Turn source + identifier into a set of LicensePools
library = flask.request.library
pools = self.load_licensepools(library, identifier_type, identifier)
if isinstance(pools, ProblemDetail):
# Something went wrong.
return pools
if flask.request.method == 'GET':
# Return a list of valid URIs to use as the type of a problem detail
# document.
data = "\n".join(Complaint.VALID_TYPES)
return Response(data, 200, {"Content-Type" : "text/uri-list"})
data = flask.request.data
controller = ComplaintController()
return controller.register(pools[0], data)
def series(self, series_name, languages, audiences):
"""Serve a feed of books in the same series as a given book."""
library = flask.request.library
if not series_name:
return NO_SUCH_LANE.detailed(_("No series provided"))
languages, audiences = self._lane_details(languages, audiences)
lane = SeriesLane(self._db, library, series_name=series_name,
languages=languages, audiences=audiences
)
annotator = self.manager.annotator(lane)
# In addition to the orderings enabled for this library, a
# series collection may be ordered by series position, and is
# ordered that way by default.
facet_config = FacetConfig.from_library(library)
facet_config.set_default_facet(
Facets.ORDER_FACET_GROUP_NAME, Facets.ORDER_SERIES_POSITION
)
facets = load_facets_from_request(facet_config=facet_config)
if isinstance(facets, ProblemDetail):
return facets
pagination = load_pagination_from_request()
if isinstance(pagination, ProblemDetail):
return pagination
url = annotator.feed_url(
lane,
facets=facets,
pagination=pagination,
)
feed = AcquisitionFeed.page(
self._db, lane.display_name, url, lane,
facets=facets, pagination=pagination,
annotator=annotator, cache_type=CachedFeed.SERIES_TYPE
)
return feed_response(unicode(feed.content))
class ProfileController(CirculationManagerController):
"""Implement the User Profile Management Protocol."""
@property
def _controller(self):
"""Instantiate a CoreProfileController that actually does the work.
"""
patron = self.authenticated_patron_from_request()
storage = PatronProfileStorage(patron)
return CoreProfileController(storage)
def protocol(self):
"""Handle a UPMP request."""
controller = self._controller
if flask.request.method == 'GET':
result = controller.get()
else:
result = controller.put(flask.request.headers, flask.request.data)
if isinstance(result, ProblemDetail):
return result
return make_response(*result)
class AnalyticsController(CirculationManagerController):
def track_event(self, identifier_type, identifier, event_type):
# TODO: It usually doesn't matter, but there should be
# a way to distinguish between different LicensePools for the
# same book.
if event_type in CirculationEvent.CLIENT_EVENTS:
library = flask.request.library
pools = self.load_licensepools(library, identifier_type, identifier)
if isinstance(pools, ProblemDetail):
return pools
self.manager.analytics.collect_event(library, pools[0], event_type, datetime.datetime.utcnow())
return Response({}, 200)
else:
return INVALID_ANALYTICS_EVENT_TYPE
class ServiceStatusController(CirculationManagerController):
template = """<!DOCTYPE HTML>
<html lang="en" class="">
<head>
<meta charset="utf8">
</head>
<body>
<ul>
%(statuses)s
</ul>
</body>
</html>
"""
def __call__(self):
library = flask.request.library
circulation = self.manager.circulation_apis[library.id]
service_status = ServiceStatus(circulation)
timings = service_status.loans_status(response=True)
statuses = []
for k, v in sorted(timings.items()):
statuses.append(" <li><b>%s</b>: %s</li>" % (k, v))
doc = self.template % dict(statuses="\n".join(statuses))
return Response(doc, 200, {"Content-Type": "text/html"})
| en | 0.900869 | If the site configuration has been updated, reload the CirculationManager's configuration from the database. Load all necessary configuration settings and external integrations from the database. This is called once when the CirculationManager is initialized. It may also be called later to reload the site configuration after changes are made in the administrative interface. # Track the Lane configuration for each library by mapping its # short name to the top-level lane. # Create a CirculationAPI for each library. # There's at least one library on this system that # wants Vendor IDs. This means we need to advertise support # for the Device Management Protocol. Retrieve or create a connection to the search interface. This is created lazily so that a failure to connect only affects searches, not the rest of the circulation manager. Output information about the lane layout. Set up a search client. Set up the Circulation object. Set up all the controllers that will be used by the web app. This method will be called only once, no matter how many times the site configuration changes. Set up all the controllers that depend on the current site configuration. This method will be called fresh every time the site configuration changes. If this Library has an Adobe Vendor ID integration, configure the controller for it. :return: An Authdata object for `library`, if one could be created. # Relatively few libraries will have this setup. # But almost all libraries will have a Short Client Token # setup. We're not setting anything up here, but this is useful # information for the calling code to have so it knows # whether or not we should support the Device Management Protocol. Create an appropriate OPDS annotator for the given lane. Make sure the current request's library has an Authentication For OPDS document in the cache, then return the cached version. Return the appropriate CirculationAPI for the request Library. Turn user input into a Lane object. # This language has one, and only one, default lane. # We know there is at least one LicensePool, and all LicensePools # for an Identifier have the same Work. Turn user input into one or more LicensePool objects. :param library: The LicensePools must be associated with one of this Library's Collections. :param identifier_type: A type of identifier, e.g. "ISBN" :param identifier: An identifier string, used with `identifier_type` to look up an Identifier. Turns user input into a LicensePool #%d does not exist.") % license_pool_id Turn user input into a LicensePoolDeliveryMechanism object. Redirect the patron to the appropriate feed. # The simple case: the app is equally open to all clients. # The more complex case. We must authorize the patron, check # their type, and redirect them to an appropriate feed. Serve this library's Authentication For OPDS document. Serves a sitewide public key document Build or retrieve a grouped acquisition feed. Build or retrieve a paginated acquisition feed. # Send the search form # Run a search. # First synchronize our local list of loans and holds with all # third-party loan providers. # If anything goes wrong, omit the sync step and just # display the current active loans, as we understand them. # Then make the feed. Create a new loan or hold for a book. Return an OPDS Acquisition feed that includes a link of rel "http://opds-spec.org/acquisition", which can be used to fetch the book or the license file. # No LicensePools were found and no ProblemDetail # was returned. Send a generic ProblemDetail. # Generic circulation error. # At this point we have either a loan or a hold. If a loan, serve # a feed that tells the patron how to fulfill the loan. If a hold, # serve a feed that talks about the hold. # This should never happen -- we should have sent a more specific # error earlier. Of the available LicensePools for the given Identifier, return the one that's the best candidate for loaning out right now. # Turn source + identifier into a set of LicensePools # Something went wrong. # We found a number of LicensePools. Try to locate one that # we can actually loan to the patron. # As a matter of policy, the patron is not allowed to borrow # this book. # Beyond this point we know that site policy does not prohibit # us from lending this pool to this patron. # But the patron has requested a license pool that # supports a specific delivery mechanism. This pool # must offer that mechanism. # Beyond this point we have a license pool that we can # actually loan or put on hold. # But there might be many such LicensePools, and we want # to pick the one that will get the book to the patron # with the shortest wait. # We were unable to find any LicensePool that fit the # criteria. Fulfill a book that has already been checked out. If successful, this will serve the patron a downloadable copy of the book, or a DRM license file which can be used to get the book). Alternatively, for a streaming delivery mechanism it may serve an OPDS entry with a link to a third-party web page that streams the content. # Turn source + identifier into a LicensePool. # Find the LicensePoolDeliveryMechanism they asked for. # See if the loan already has a mechanism set. We can use that. # If this is a streaming delivery mechanism, create an OPDS entry # with a fulfillment link to the streaming reader url. # If we have a link to the content on a remote server, web clients may not # be able to access it if the remote server does not support CORS requests. # We need to fetch the content and return it instead of redirecting to it. #BasicContainer>; rel="type"', #constrainedBy"'] #Resource>; rel="type"' #Resource>; rel="type"' Serve a feed of books written by a particular author Serve an entry for a single book. This does not include any loan or hold-specific information for the authenticated patron. This is different from the /works lookup protocol, in that it returns a single entry while the /works lookup protocol returns a feed containing any number of entries. Serve a groups feed of books related to a given book. # No related books were found. Serve a feed of recommendations related to a given book. # NoveList isn't configured. Report a problem with a book. # TODO: We don't have a reliable way of knowing whether the # complaing is being lodged against the work or against a # specific LicensePool. # Turn source + identifier into a set of LicensePools # Something went wrong. # Return a list of valid URIs to use as the type of a problem detail # document. Serve a feed of books in the same series as a given book. # In addition to the orderings enabled for this library, a # series collection may be ordered by series position, and is # ordered that way by default. Implement the User Profile Management Protocol. Instantiate a CoreProfileController that actually does the work. Handle a UPMP request. # TODO: It usually doesn't matter, but there should be # a way to distinguish between different LicensePools for the # same book. <!DOCTYPE HTML> <html lang="en" class=""> <head> <meta charset="utf8"> </head> <body> <ul> %(statuses)s </ul> </body> </html> | 1.524163 | 2 |
tests/test_for_suite/test_for_cli/test_for_runner.py | siddC/memote | 0 | 6621158 | <reponame>siddC/memote<filename>tests/test_for_suite/test_for_cli/test_for_runner.py
# -*- coding: utf-8 -*-
# Copyright 2017 Novo Nordisk Foundation Center for Biosustainability,
# Technical University of Denmark.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Ensure the expected functioning of ``memote.suite.cli.runner``."""
from __future__ import absolute_import
from builtins import str
from os.path import exists
import pytest
from memote.suite.cli.runner import cli
def test_cli(runner):
"""Expect a simple memote invocation to be successful."""
result = runner.invoke(cli)
assert result.exit_code == 0
assert result.output.startswith(
"Usage: cli [OPTIONS] COMMAND [ARGS]...")
def test_run_simple(runner, model_file):
"""Expect a simple run to function."""
result = runner.invoke(cli, [
"run", "--no-collect", "--ignore-git", model_file])
assert result.exit_code == 0
def test_run_output(runner, model_file):
"""Expect a simple run to function."""
output = model_file.split(".", 1)[0] + ".json"
result = runner.invoke(cli, [
"run", "--filename", output, "--ignore-git", model_file])
assert result.exit_code == 0
assert exists(output)
@pytest.mark.skip(reason="TODO: Need to provide input somehow.")
def test_run_output(runner, tmpdir):
"""Expect a simple run to function."""
output = str(tmpdir)
result = runner.invoke(cli, [
"new", "--directory", output])
assert result.exit_code == 0
assert exists(output)
# TODO: Check complete template structure.
| # -*- coding: utf-8 -*-
# Copyright 2017 Novo Nordisk Foundation Center for Biosustainability,
# Technical University of Denmark.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Ensure the expected functioning of ``memote.suite.cli.runner``."""
from __future__ import absolute_import
from builtins import str
from os.path import exists
import pytest
from memote.suite.cli.runner import cli
def test_cli(runner):
"""Expect a simple memote invocation to be successful."""
result = runner.invoke(cli)
assert result.exit_code == 0
assert result.output.startswith(
"Usage: cli [OPTIONS] COMMAND [ARGS]...")
def test_run_simple(runner, model_file):
"""Expect a simple run to function."""
result = runner.invoke(cli, [
"run", "--no-collect", "--ignore-git", model_file])
assert result.exit_code == 0
def test_run_output(runner, model_file):
"""Expect a simple run to function."""
output = model_file.split(".", 1)[0] + ".json"
result = runner.invoke(cli, [
"run", "--filename", output, "--ignore-git", model_file])
assert result.exit_code == 0
assert exists(output)
@pytest.mark.skip(reason="TODO: Need to provide input somehow.")
def test_run_output(runner, tmpdir):
"""Expect a simple run to function."""
output = str(tmpdir)
result = runner.invoke(cli, [
"new", "--directory", output])
assert result.exit_code == 0
assert exists(output)
# TODO: Check complete template structure. | en | 0.797921 | # -*- coding: utf-8 -*- # Copyright 2017 Novo Nordisk Foundation Center for Biosustainability, # Technical University of Denmark. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. Ensure the expected functioning of ``memote.suite.cli.runner``. Expect a simple memote invocation to be successful. Expect a simple run to function. Expect a simple run to function. Expect a simple run to function. # TODO: Check complete template structure. | 2.109654 | 2 |
tests/litmus-framework-fastapi.py | Project-Dream-Weaver/pyre-http | 54 | 6621159 | import asyncio
import litmus
import uvloop
from fastapi import FastAPI
uvloop.install()
litmus.init_logger("info", None, True)
# asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
app = FastAPI()
server = None
@app.get("/stats")
async def show_stats():
print(server._server.len_clients())
@app.get("/hello")
async def hello_world():
return "hello, world"
async def main():
global server
runner = litmus.LSGIToASGIAdapter(app)
server = litmus.Server(runner, listen_on="0.0.0.0:8000")
server.ignite()
await server.run_forever()
if __name__ == '__main__':
asyncio.run(main())
| import asyncio
import litmus
import uvloop
from fastapi import FastAPI
uvloop.install()
litmus.init_logger("info", None, True)
# asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
app = FastAPI()
server = None
@app.get("/stats")
async def show_stats():
print(server._server.len_clients())
@app.get("/hello")
async def hello_world():
return "hello, world"
async def main():
global server
runner = litmus.LSGIToASGIAdapter(app)
server = litmus.Server(runner, listen_on="0.0.0.0:8000")
server.ignite()
await server.run_forever()
if __name__ == '__main__':
asyncio.run(main())
| en | 0.298551 | # asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy()) | 2.362912 | 2 |
zemberek/normalization/turkish_sentence_normalizer.py | Loodos/zemberek-python | 52 | 6621160 | import math
from pkg_resources import resource_filename
from typing import List, Tuple, Dict, FrozenSet, Set, Union
from zemberek.core.turkish import TurkishAlphabet, SecondaryPos
from zemberek.lm import SmoothLM
from zemberek.morphology import TurkishMorphology
from zemberek.morphology.analysis.word_analysis import WordAnalysis
from zemberek.morphology.analysis.informal_analysis_converter import InformalAnalysisConverter
from zemberek.morphology.generator import WordGenerator
from zemberek.tokenization.turkish_tokenizer import TurkishTokenizer
from zemberek.tokenization.token import Token
from zemberek.normalization.stem_ending_graph import StemEndingGraph
from zemberek.normalization.character_graph_decoder import CharacterGraphDecoder
from zemberek.normalization.turkish_spell_checker import TurkishSpellChecker
from zemberek.normalization.deasciifier.deasciifier import Deasciifier
def load_replacements() -> Dict[str, str]:
with open(resource_filename("zemberek", "resources/normalization/multi-word-replacements.txt"), "r",
encoding="utf-8") as f:
replacements: Dict[str, str] = {}
for line in f:
tokens = line.replace('\n', "").split("=")
replacements[tokens[0].strip()] = tokens[1].strip()
return replacements
def load_no_split() -> FrozenSet[str]:
with open(resource_filename("zemberek", "resources/normalization/no-split.txt"), "r", encoding="utf-8") as f:
s = set()
for line in f:
if len(line.replace('\n', "").strip()) > 0:
s.add(line.replace('\n', "").strip())
return frozenset(s)
def load_common_split() -> Dict[str, str]:
common_splits: Dict[str, str] = {}
with open(resource_filename("zemberek", "resources/normalization/split.txt"), "r", encoding="utf-8") as f:
for line in f:
tokens = line.replace('\n', "").split('-')
common_splits[tokens[0].strip()] = tokens[1].strip()
return common_splits
def load_multimap(resource: str) -> Dict[str, Tuple[str]]:
with open(resource, "r", encoding="utf-8") as f:
lines: List[str] = f.read().split('\n')
multimap: Dict[str, Tuple[str, ...]] = {}
for i, line in enumerate(lines):
if len(line.strip()) == 0:
continue
index = line.find("=")
if index < 0:
raise BaseException(f"Line needs to have `=` symbol. But it is: {i} -" + line)
key, value = line[0:index].strip(), line[index + 1:].strip()
if value.find(',') >= 0:
if key in multimap.keys():
multimap[key] = tuple(value.split(','))
else:
if key in multimap.keys():
multimap[key] = multimap[key] + (value,)
else:
multimap[key] = (value,)
return multimap
class TurkishSentenceNormalizer:
START: 'TurkishSentenceNormalizer.Candidate'
END: 'TurkishSentenceNormalizer.Candidate'
END_CANDIDATES: 'TurkishSentenceNormalizer.Candidates'
def __init__(self, morphology: TurkishMorphology):
self.morphology = morphology
self.analysis_converter: InformalAnalysisConverter = InformalAnalysisConverter(morphology.word_generator)
self.lm: SmoothLM = SmoothLM.builder(resource_filename("zemberek", "resources/lm.2gram.slm")). \
log_base(math.e).build()
graph = StemEndingGraph(morphology)
decoder = CharacterGraphDecoder(graph.stem_graph)
self.spell_checker = TurkishSpellChecker(morphology, decoder=decoder,
matcher=CharacterGraphDecoder.DIACRITICS_IGNORING_MATCHER)
self.replacements: Dict[str, str] = load_replacements()
self.no_split_words: FrozenSet[str] = load_no_split()
self.common_splits = load_common_split()
with open(resource_filename("zemberek", "resources/normalization/question-suffixes.txt"), "r",
encoding="utf-8") as f:
lines = f.read().split('\n')
del f
self.common_connected_suffixes: FrozenSet[str] = frozenset(lines)
self.always_apply_deasciifier = False
self.lookup_manual: Dict[str, Tuple[str]] = load_multimap(
resource_filename("zemberek", "resources/normalization/candidates-manual.txt"))
self.lookup_from_graph: Dict[str, Tuple[str]] = load_multimap(resource_filename("zemberek",
"resources/normalization/"
"lookup-from-graph.txt"))
self.lookup_from_ascii: Dict[str, Tuple[str]] = load_multimap(
resource_filename("zemberek", "resources/normalization/ascii-map.txt"))
for s in self.lookup_manual.keys():
try:
self.lookup_from_graph.pop(s)
except KeyError:
pass
self.informal_ascii_tolerant_morphology = TurkishMorphology.builder(morphology.lexicon) \
.use_informal_analysis().ignore_diacritics_in_analysis_().build()
def normalize(self, sentence: str) -> str:
processed = self.pre_process(sentence)
tokens: Tuple[Token] = tuple(TurkishTokenizer.DEFAULT.tokenize(processed))
candidates_list: List['TurkishSentenceNormalizer.Candidates'] = []
for i, current_token in enumerate(tokens):
current = current_token.content
next_ = None if i == len(tokens) - 1 else tokens[i + 1].content
previous = None if i == 0 else tokens[i - 1].content
candidates: Set[str] = set()
candidates.update(self.lookup_manual.get(current, ()))
candidates.update(self.lookup_from_graph.get(current, ()))
candidates.update(self.lookup_from_ascii.get(current, ()))
analyses: WordAnalysis = self.informal_ascii_tolerant_morphology.analyze(current)
for analysis in analyses:
if analysis.contains_informal_morpheme():
result: Union[WordGenerator.Result, TurkishSentenceNormalizer.Candidates]
result = self.analysis_converter.convert(current, analysis)
if result:
candidates.add(result.surface)
else:
results: Tuple[WordGenerator.Result] = self.morphology.word_generator.generate(
item=analysis.item, morphemes=analysis.get_morphemes()
)
for result in results:
candidates.add(result.surface)
if len(analyses.analysis_results) == 0 and len(current) > 3:
spell_candidates = self.spell_checker.suggest_for_word_for_normalization(
current, previous, next_, self.lm
)
if len(spell_candidates) > 3:
spell_candidates = spell_candidates[:3]
candidates.update(spell_candidates)
if len(candidates) == 0 or self.morphology.analyze(current).is_correct():
candidates.add(current)
result = TurkishSentenceNormalizer.Candidates(current_token.content,
tuple(TurkishSentenceNormalizer.Candidate(s) for
s in candidates))
candidates_list.append(result)
return ' '.join(self.decode(candidates_list))
def decode(self, candidates_list: List['TurkishSentenceNormalizer.Candidates']) -> Tuple[str]:
current: List['TurkishSentenceNormalizer.Hypothesis'] = []
next_: List['TurkishSentenceNormalizer.Hypothesis'] = []
candidates_list.append(TurkishSentenceNormalizer.END_CANDIDATES)
initial = TurkishSentenceNormalizer.Hypothesis()
lm_order = self.lm.order
initial.history = [TurkishSentenceNormalizer.START] * (lm_order - 1)
initial.current = TurkishSentenceNormalizer.START
initial.score = 0.
current.append(initial)
for candidates in candidates_list:
for h in current:
for c in candidates.candidates:
new_hyp = TurkishSentenceNormalizer.Hypothesis()
hist = [None] * (lm_order - 1)
if lm_order > 2:
hist = h.history[1: lm_order]
hist[-1] = h.current
new_hyp.current = c
new_hyp.history = hist
new_hyp.previous = h
indexes = [0] * lm_order
for j in range(lm_order - 1):
indexes[j] = self.lm.vocabulary.index_of(hist[j].content)
indexes[-1] = self.lm.vocabulary.index_of(c.content)
score = self.lm.get_probability(tuple(indexes))
new_hyp.score = h.score + score
next_.append(new_hyp)
current = next_
next_ = []
best: 'TurkishSentenceNormalizer.Hypothesis' = self.get_best(current)
seq: List[str] = []
h = best
h = h.previous
while h and h.current != TurkishSentenceNormalizer.START:
seq.append(h.current.content)
h = h.previous
return tuple(reversed(seq))
@staticmethod
def get_best(li: List['TurkishSentenceNormalizer.Hypothesis']) -> 'TurkishSentenceNormalizer.Hypothesis':
best = None
for t in li:
if t:
if not best or t.score > best.score:
best = t
return best
def pre_process(self, sentence: str) -> str:
sentence = sentence.translate(TurkishAlphabet.lower_map).lower()
tokens: Tuple[Token] = TurkishTokenizer.DEFAULT.tokenize(sentence)
s: str = self.replace_common(tokens)
tokens: Tuple[Token] = TurkishTokenizer.DEFAULT.tokenize(s)
s = self.combine_necessary_words(tokens)
tokens: Tuple[Token] = TurkishTokenizer.DEFAULT.tokenize(s)
s = self.split_necessary_words(tokens, use_look_up=False)
if self.always_apply_deasciifier or self.probably_requires_deasciifier(s):
deasciifier = Deasciifier(s)
s = deasciifier.convert_to_turkish()
tokens: Tuple[Token] = TurkishTokenizer.DEFAULT.tokenize(s)
s = self.combine_necessary_words(tokens)
tokens: Tuple[Token] = TurkishTokenizer.DEFAULT.tokenize(s)
return self.split_necessary_words(tokens, use_look_up=True)
def split_necessary_words(self, tokens: Tuple[Token], use_look_up: bool) -> str:
result: List[str] = []
for token in tokens:
text = token.content
if self.is_word(token):
result.append(self.separate_common(text, use_look_up))
else:
result.append(text)
return ' '.join(result)
def separate_common(self, inp: str, use_look_up: bool) -> str:
if inp in self.no_split_words:
return inp
if use_look_up and inp in self.common_splits:
return self.common_splits[inp]
if not self.has_regular_analysis(inp):
for i in range(len(inp)):
tail = inp[i:]
if tail in self.common_connected_suffixes:
head = inp[0:i]
if len(tail) < 3:
if not self.lm.ngram_exists(self.lm.vocabulary.to_indexes((head, tail))):
return inp
if self.has_regular_analysis(head):
return f"{head} {tail}"
else:
return inp
return inp
@staticmethod
def probably_requires_deasciifier(sentence: str) -> bool:
turkish_spec_count = 0
for c in sentence:
if c != 'ı' and c != 'I' and TurkishAlphabet.INSTANCE.is_turkish_specific(c):
turkish_spec_count += 1
ratio = turkish_spec_count * 1. / len(sentence)
return ratio < 0.1
def combine_necessary_words(self, tokens: Tuple[Token]) -> str:
result: List[str] = []
combined = False
for i in range(len(tokens) - 1):
first: Token = tokens[i]
second: Token = tokens[i + 1]
first_s = first.content
second_s = second.content
if self.is_word(first) and self.is_word(second):
if combined:
combined = False
else:
c = self.combine_common(first_s, second_s)
if len(c) > 0:
result.append(c)
combined = True
else:
result.append(first.content)
combined = False
else:
combined = False
result.append(first_s)
if not combined:
result.append(tokens[-1].content)
return ' '.join(result)
def combine_common(self, i1: str, i2: str) -> str:
combined = i1 + i2
if i2.startswith("'") or i2.startswith("bil"):
w: WordAnalysis = self.morphology.analyze(combined)
if self.has_analysis(w):
return combined
if not self.has_regular_analysis(i2):
w: WordAnalysis = self.morphology.analyze(combined)
if self.has_analysis(w):
return combined
return ""
def has_regular_analysis(self, s: str) -> bool:
a: WordAnalysis = self.morphology.analyze(s)
for s in a:
if (not s.is_unknown()) and (not s.is_runtime()) and s.item.secondary_pos != SecondaryPos.ProperNoun \
and s.item.secondary_pos != SecondaryPos.Abbreviation:
return True
return False
@staticmethod
def has_analysis(w: WordAnalysis) -> bool:
for s in w:
if (not s.is_runtime()) and (not s.is_unknown()):
return True
return False
@staticmethod
def is_word(token: Token) -> bool:
typ: Token.Type = token.type_
return typ == Token.Type.Word or typ == Token.Type.WordWithSymbol or typ == Token.Type.WordAlphanumerical \
or typ == Token.Type.UnknownWord
def replace_common(self, tokens: Tuple[Token]) -> str:
result: List[str] = []
for token in tokens:
text = token.content
result.append(self.replacements.get(text, text))
return ' '.join(result)
class Hypothesis:
def __init__(self):
self.history: Union[List['TurkishSentenceNormalizer.Candidate'], None] = None
self.current: Union['TurkishSentenceNormalizer.Candidate', None] = None
self.previous: Union['TurkishSentenceNormalizer.Hypothesis', None] = None
self.score: Union[float, None] = None
def __eq__(self, other):
if self is other:
return True
if isinstance(other, TurkishSentenceNormalizer.Hypothesis):
return False if self.history != other.history else self.current == other.current
return False
def __hash__(self):
result = 0
for c in self.history:
result = 31 * result + (hash(c) if c else 0)
result = 31 * result + hash(self.current)
return result
def __str__(self):
return "Hypothesis{history=" + f"{' '.join([str(s) for s in self.history])}" + f", current={self.current}" \
f", score={self.score}" + '}'
class Candidate:
def __init__(self, content: str):
self.content = content
self.score = 1.0
def __eq__(self, other):
if self is other:
return True
if isinstance(other, TurkishSentenceNormalizer.Candidate):
return self.content == other.content
return False
def __hash__(self):
return hash(self.content)
def __str__(self):
return "Candidate{content='" + self.content + f"', score={self.score}" + '}'
class Candidates:
def __init__(self, word: str, candidates: Tuple['TurkishSentenceNormalizer.Candidate']):
self.word = word
self.candidates = candidates
def __str__(self):
return "Candidates{word='" + self.word + "', candidates=" + ' '.join(str(self.candidates)) + '}'
TurkishSentenceNormalizer.START = TurkishSentenceNormalizer.Candidate(content="<s>")
TurkishSentenceNormalizer.END = TurkishSentenceNormalizer.Candidate(content="</s>")
TurkishSentenceNormalizer.END_CANDIDATES = TurkishSentenceNormalizer.Candidates(word="</s>",
candidates=(
TurkishSentenceNormalizer.END,
))
| import math
from pkg_resources import resource_filename
from typing import List, Tuple, Dict, FrozenSet, Set, Union
from zemberek.core.turkish import TurkishAlphabet, SecondaryPos
from zemberek.lm import SmoothLM
from zemberek.morphology import TurkishMorphology
from zemberek.morphology.analysis.word_analysis import WordAnalysis
from zemberek.morphology.analysis.informal_analysis_converter import InformalAnalysisConverter
from zemberek.morphology.generator import WordGenerator
from zemberek.tokenization.turkish_tokenizer import TurkishTokenizer
from zemberek.tokenization.token import Token
from zemberek.normalization.stem_ending_graph import StemEndingGraph
from zemberek.normalization.character_graph_decoder import CharacterGraphDecoder
from zemberek.normalization.turkish_spell_checker import TurkishSpellChecker
from zemberek.normalization.deasciifier.deasciifier import Deasciifier
def load_replacements() -> Dict[str, str]:
with open(resource_filename("zemberek", "resources/normalization/multi-word-replacements.txt"), "r",
encoding="utf-8") as f:
replacements: Dict[str, str] = {}
for line in f:
tokens = line.replace('\n', "").split("=")
replacements[tokens[0].strip()] = tokens[1].strip()
return replacements
def load_no_split() -> FrozenSet[str]:
with open(resource_filename("zemberek", "resources/normalization/no-split.txt"), "r", encoding="utf-8") as f:
s = set()
for line in f:
if len(line.replace('\n', "").strip()) > 0:
s.add(line.replace('\n', "").strip())
return frozenset(s)
def load_common_split() -> Dict[str, str]:
common_splits: Dict[str, str] = {}
with open(resource_filename("zemberek", "resources/normalization/split.txt"), "r", encoding="utf-8") as f:
for line in f:
tokens = line.replace('\n', "").split('-')
common_splits[tokens[0].strip()] = tokens[1].strip()
return common_splits
def load_multimap(resource: str) -> Dict[str, Tuple[str]]:
with open(resource, "r", encoding="utf-8") as f:
lines: List[str] = f.read().split('\n')
multimap: Dict[str, Tuple[str, ...]] = {}
for i, line in enumerate(lines):
if len(line.strip()) == 0:
continue
index = line.find("=")
if index < 0:
raise BaseException(f"Line needs to have `=` symbol. But it is: {i} -" + line)
key, value = line[0:index].strip(), line[index + 1:].strip()
if value.find(',') >= 0:
if key in multimap.keys():
multimap[key] = tuple(value.split(','))
else:
if key in multimap.keys():
multimap[key] = multimap[key] + (value,)
else:
multimap[key] = (value,)
return multimap
class TurkishSentenceNormalizer:
START: 'TurkishSentenceNormalizer.Candidate'
END: 'TurkishSentenceNormalizer.Candidate'
END_CANDIDATES: 'TurkishSentenceNormalizer.Candidates'
def __init__(self, morphology: TurkishMorphology):
self.morphology = morphology
self.analysis_converter: InformalAnalysisConverter = InformalAnalysisConverter(morphology.word_generator)
self.lm: SmoothLM = SmoothLM.builder(resource_filename("zemberek", "resources/lm.2gram.slm")). \
log_base(math.e).build()
graph = StemEndingGraph(morphology)
decoder = CharacterGraphDecoder(graph.stem_graph)
self.spell_checker = TurkishSpellChecker(morphology, decoder=decoder,
matcher=CharacterGraphDecoder.DIACRITICS_IGNORING_MATCHER)
self.replacements: Dict[str, str] = load_replacements()
self.no_split_words: FrozenSet[str] = load_no_split()
self.common_splits = load_common_split()
with open(resource_filename("zemberek", "resources/normalization/question-suffixes.txt"), "r",
encoding="utf-8") as f:
lines = f.read().split('\n')
del f
self.common_connected_suffixes: FrozenSet[str] = frozenset(lines)
self.always_apply_deasciifier = False
self.lookup_manual: Dict[str, Tuple[str]] = load_multimap(
resource_filename("zemberek", "resources/normalization/candidates-manual.txt"))
self.lookup_from_graph: Dict[str, Tuple[str]] = load_multimap(resource_filename("zemberek",
"resources/normalization/"
"lookup-from-graph.txt"))
self.lookup_from_ascii: Dict[str, Tuple[str]] = load_multimap(
resource_filename("zemberek", "resources/normalization/ascii-map.txt"))
for s in self.lookup_manual.keys():
try:
self.lookup_from_graph.pop(s)
except KeyError:
pass
self.informal_ascii_tolerant_morphology = TurkishMorphology.builder(morphology.lexicon) \
.use_informal_analysis().ignore_diacritics_in_analysis_().build()
def normalize(self, sentence: str) -> str:
processed = self.pre_process(sentence)
tokens: Tuple[Token] = tuple(TurkishTokenizer.DEFAULT.tokenize(processed))
candidates_list: List['TurkishSentenceNormalizer.Candidates'] = []
for i, current_token in enumerate(tokens):
current = current_token.content
next_ = None if i == len(tokens) - 1 else tokens[i + 1].content
previous = None if i == 0 else tokens[i - 1].content
candidates: Set[str] = set()
candidates.update(self.lookup_manual.get(current, ()))
candidates.update(self.lookup_from_graph.get(current, ()))
candidates.update(self.lookup_from_ascii.get(current, ()))
analyses: WordAnalysis = self.informal_ascii_tolerant_morphology.analyze(current)
for analysis in analyses:
if analysis.contains_informal_morpheme():
result: Union[WordGenerator.Result, TurkishSentenceNormalizer.Candidates]
result = self.analysis_converter.convert(current, analysis)
if result:
candidates.add(result.surface)
else:
results: Tuple[WordGenerator.Result] = self.morphology.word_generator.generate(
item=analysis.item, morphemes=analysis.get_morphemes()
)
for result in results:
candidates.add(result.surface)
if len(analyses.analysis_results) == 0 and len(current) > 3:
spell_candidates = self.spell_checker.suggest_for_word_for_normalization(
current, previous, next_, self.lm
)
if len(spell_candidates) > 3:
spell_candidates = spell_candidates[:3]
candidates.update(spell_candidates)
if len(candidates) == 0 or self.morphology.analyze(current).is_correct():
candidates.add(current)
result = TurkishSentenceNormalizer.Candidates(current_token.content,
tuple(TurkishSentenceNormalizer.Candidate(s) for
s in candidates))
candidates_list.append(result)
return ' '.join(self.decode(candidates_list))
def decode(self, candidates_list: List['TurkishSentenceNormalizer.Candidates']) -> Tuple[str]:
current: List['TurkishSentenceNormalizer.Hypothesis'] = []
next_: List['TurkishSentenceNormalizer.Hypothesis'] = []
candidates_list.append(TurkishSentenceNormalizer.END_CANDIDATES)
initial = TurkishSentenceNormalizer.Hypothesis()
lm_order = self.lm.order
initial.history = [TurkishSentenceNormalizer.START] * (lm_order - 1)
initial.current = TurkishSentenceNormalizer.START
initial.score = 0.
current.append(initial)
for candidates in candidates_list:
for h in current:
for c in candidates.candidates:
new_hyp = TurkishSentenceNormalizer.Hypothesis()
hist = [None] * (lm_order - 1)
if lm_order > 2:
hist = h.history[1: lm_order]
hist[-1] = h.current
new_hyp.current = c
new_hyp.history = hist
new_hyp.previous = h
indexes = [0] * lm_order
for j in range(lm_order - 1):
indexes[j] = self.lm.vocabulary.index_of(hist[j].content)
indexes[-1] = self.lm.vocabulary.index_of(c.content)
score = self.lm.get_probability(tuple(indexes))
new_hyp.score = h.score + score
next_.append(new_hyp)
current = next_
next_ = []
best: 'TurkishSentenceNormalizer.Hypothesis' = self.get_best(current)
seq: List[str] = []
h = best
h = h.previous
while h and h.current != TurkishSentenceNormalizer.START:
seq.append(h.current.content)
h = h.previous
return tuple(reversed(seq))
@staticmethod
def get_best(li: List['TurkishSentenceNormalizer.Hypothesis']) -> 'TurkishSentenceNormalizer.Hypothesis':
best = None
for t in li:
if t:
if not best or t.score > best.score:
best = t
return best
def pre_process(self, sentence: str) -> str:
sentence = sentence.translate(TurkishAlphabet.lower_map).lower()
tokens: Tuple[Token] = TurkishTokenizer.DEFAULT.tokenize(sentence)
s: str = self.replace_common(tokens)
tokens: Tuple[Token] = TurkishTokenizer.DEFAULT.tokenize(s)
s = self.combine_necessary_words(tokens)
tokens: Tuple[Token] = TurkishTokenizer.DEFAULT.tokenize(s)
s = self.split_necessary_words(tokens, use_look_up=False)
if self.always_apply_deasciifier or self.probably_requires_deasciifier(s):
deasciifier = Deasciifier(s)
s = deasciifier.convert_to_turkish()
tokens: Tuple[Token] = TurkishTokenizer.DEFAULT.tokenize(s)
s = self.combine_necessary_words(tokens)
tokens: Tuple[Token] = TurkishTokenizer.DEFAULT.tokenize(s)
return self.split_necessary_words(tokens, use_look_up=True)
def split_necessary_words(self, tokens: Tuple[Token], use_look_up: bool) -> str:
result: List[str] = []
for token in tokens:
text = token.content
if self.is_word(token):
result.append(self.separate_common(text, use_look_up))
else:
result.append(text)
return ' '.join(result)
def separate_common(self, inp: str, use_look_up: bool) -> str:
if inp in self.no_split_words:
return inp
if use_look_up and inp in self.common_splits:
return self.common_splits[inp]
if not self.has_regular_analysis(inp):
for i in range(len(inp)):
tail = inp[i:]
if tail in self.common_connected_suffixes:
head = inp[0:i]
if len(tail) < 3:
if not self.lm.ngram_exists(self.lm.vocabulary.to_indexes((head, tail))):
return inp
if self.has_regular_analysis(head):
return f"{head} {tail}"
else:
return inp
return inp
@staticmethod
def probably_requires_deasciifier(sentence: str) -> bool:
turkish_spec_count = 0
for c in sentence:
if c != 'ı' and c != 'I' and TurkishAlphabet.INSTANCE.is_turkish_specific(c):
turkish_spec_count += 1
ratio = turkish_spec_count * 1. / len(sentence)
return ratio < 0.1
def combine_necessary_words(self, tokens: Tuple[Token]) -> str:
result: List[str] = []
combined = False
for i in range(len(tokens) - 1):
first: Token = tokens[i]
second: Token = tokens[i + 1]
first_s = first.content
second_s = second.content
if self.is_word(first) and self.is_word(second):
if combined:
combined = False
else:
c = self.combine_common(first_s, second_s)
if len(c) > 0:
result.append(c)
combined = True
else:
result.append(first.content)
combined = False
else:
combined = False
result.append(first_s)
if not combined:
result.append(tokens[-1].content)
return ' '.join(result)
def combine_common(self, i1: str, i2: str) -> str:
combined = i1 + i2
if i2.startswith("'") or i2.startswith("bil"):
w: WordAnalysis = self.morphology.analyze(combined)
if self.has_analysis(w):
return combined
if not self.has_regular_analysis(i2):
w: WordAnalysis = self.morphology.analyze(combined)
if self.has_analysis(w):
return combined
return ""
def has_regular_analysis(self, s: str) -> bool:
a: WordAnalysis = self.morphology.analyze(s)
for s in a:
if (not s.is_unknown()) and (not s.is_runtime()) and s.item.secondary_pos != SecondaryPos.ProperNoun \
and s.item.secondary_pos != SecondaryPos.Abbreviation:
return True
return False
@staticmethod
def has_analysis(w: WordAnalysis) -> bool:
for s in w:
if (not s.is_runtime()) and (not s.is_unknown()):
return True
return False
@staticmethod
def is_word(token: Token) -> bool:
typ: Token.Type = token.type_
return typ == Token.Type.Word or typ == Token.Type.WordWithSymbol or typ == Token.Type.WordAlphanumerical \
or typ == Token.Type.UnknownWord
def replace_common(self, tokens: Tuple[Token]) -> str:
result: List[str] = []
for token in tokens:
text = token.content
result.append(self.replacements.get(text, text))
return ' '.join(result)
class Hypothesis:
def __init__(self):
self.history: Union[List['TurkishSentenceNormalizer.Candidate'], None] = None
self.current: Union['TurkishSentenceNormalizer.Candidate', None] = None
self.previous: Union['TurkishSentenceNormalizer.Hypothesis', None] = None
self.score: Union[float, None] = None
def __eq__(self, other):
if self is other:
return True
if isinstance(other, TurkishSentenceNormalizer.Hypothesis):
return False if self.history != other.history else self.current == other.current
return False
def __hash__(self):
result = 0
for c in self.history:
result = 31 * result + (hash(c) if c else 0)
result = 31 * result + hash(self.current)
return result
def __str__(self):
return "Hypothesis{history=" + f"{' '.join([str(s) for s in self.history])}" + f", current={self.current}" \
f", score={self.score}" + '}'
class Candidate:
def __init__(self, content: str):
self.content = content
self.score = 1.0
def __eq__(self, other):
if self is other:
return True
if isinstance(other, TurkishSentenceNormalizer.Candidate):
return self.content == other.content
return False
def __hash__(self):
return hash(self.content)
def __str__(self):
return "Candidate{content='" + self.content + f"', score={self.score}" + '}'
class Candidates:
def __init__(self, word: str, candidates: Tuple['TurkishSentenceNormalizer.Candidate']):
self.word = word
self.candidates = candidates
def __str__(self):
return "Candidates{word='" + self.word + "', candidates=" + ' '.join(str(self.candidates)) + '}'
TurkishSentenceNormalizer.START = TurkishSentenceNormalizer.Candidate(content="<s>")
TurkishSentenceNormalizer.END = TurkishSentenceNormalizer.Candidate(content="</s>")
TurkishSentenceNormalizer.END_CANDIDATES = TurkishSentenceNormalizer.Candidates(word="</s>",
candidates=(
TurkishSentenceNormalizer.END,
))
| none | 1 | 2.439645 | 2 | |
transaction_service/transactions/migrations/0012_remove_transactions_wallets_to_pay_and_more.py | deorz/TransactionService | 0 | 6621161 | <gh_stars>0
# Generated by Django 4.0.3 on 2022-03-13 15:55
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('transactions', '0011_transactions_wallets_to_pay_new'),
]
operations = [
migrations.RemoveField(
model_name='transactions',
name='wallets_to_pay',
),
migrations.AlterField(
model_name='transactions',
name='wallets_to_pay_new',
field=models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, related_name='wallets', to='transactions.wallets'),
),
]
| # Generated by Django 4.0.3 on 2022-03-13 15:55
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('transactions', '0011_transactions_wallets_to_pay_new'),
]
operations = [
migrations.RemoveField(
model_name='transactions',
name='wallets_to_pay',
),
migrations.AlterField(
model_name='transactions',
name='wallets_to_pay_new',
field=models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, related_name='wallets', to='transactions.wallets'),
),
] | en | 0.806731 | # Generated by Django 4.0.3 on 2022-03-13 15:55 | 1.463786 | 1 |
0313.Super Ugly Number/solution.py | zhlinh/leetcode | 0 | 6621162 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
*****************************************
Author: zhlinh
Email: <EMAIL>
Version: 0.0.1
Created Time: 2016-05-16
Last_modify: 2016-05-16
******************************************
'''
'''
Write a program to find the n-th super ugly number.
Super ugly numbers are positive numbers whose all prime factors are
in the given prime list primes of size k.
For example, [1, 2, 4, 7, 8, 13, 14, 16, 19, 26, 28, 32]
is the sequence of the first 12 super ugly numbers given
primes = [2, 7, 13, 19] of size 4.
Note:
(1) 1 is a super ugly number for any given primes.
(2) The given numbers in primes are in ascending order.
(3) 0 < k ≤ 100, 0 < n ≤ 106, 0 < primes[i] < 1000.
Credits:
Special thanks to @dietpepsi for adding this problem and creating all test cases.
'''
class Solution(object):
def nthSuperUglyNumber(self, n, primes):
"""
:type n: int
:type primes: List[int]
:rtype: int
"""
import heapq
q = []
uglyNums = [1] * n
k = len(primes)
for i in range(k):
heapq.heappush(q, (primes[i], 0, primes[i]))
for ui in range(1, n):
val, index, prime = q[0]
uglyNums[ui] = val
while q and q[0][0] == val:
val, index, prime = heapq.heappop(q)
heapq.heappush(q, (prime * uglyNums[index+1], index+1, prime))
return uglyNums[n-1]
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
*****************************************
Author: zhlinh
Email: <EMAIL>
Version: 0.0.1
Created Time: 2016-05-16
Last_modify: 2016-05-16
******************************************
'''
'''
Write a program to find the n-th super ugly number.
Super ugly numbers are positive numbers whose all prime factors are
in the given prime list primes of size k.
For example, [1, 2, 4, 7, 8, 13, 14, 16, 19, 26, 28, 32]
is the sequence of the first 12 super ugly numbers given
primes = [2, 7, 13, 19] of size 4.
Note:
(1) 1 is a super ugly number for any given primes.
(2) The given numbers in primes are in ascending order.
(3) 0 < k ≤ 100, 0 < n ≤ 106, 0 < primes[i] < 1000.
Credits:
Special thanks to @dietpepsi for adding this problem and creating all test cases.
'''
class Solution(object):
def nthSuperUglyNumber(self, n, primes):
"""
:type n: int
:type primes: List[int]
:rtype: int
"""
import heapq
q = []
uglyNums = [1] * n
k = len(primes)
for i in range(k):
heapq.heappush(q, (primes[i], 0, primes[i]))
for ui in range(1, n):
val, index, prime = q[0]
uglyNums[ui] = val
while q and q[0][0] == val:
val, index, prime = heapq.heappop(q)
heapq.heappush(q, (prime * uglyNums[index+1], index+1, prime))
return uglyNums[n-1]
| en | 0.692399 | #!/usr/bin/env python # -*- coding: utf-8 -*- ***************************************** Author: zhlinh Email: <EMAIL> Version: 0.0.1 Created Time: 2016-05-16 Last_modify: 2016-05-16 ****************************************** Write a program to find the n-th super ugly number. Super ugly numbers are positive numbers whose all prime factors are in the given prime list primes of size k. For example, [1, 2, 4, 7, 8, 13, 14, 16, 19, 26, 28, 32] is the sequence of the first 12 super ugly numbers given primes = [2, 7, 13, 19] of size 4. Note: (1) 1 is a super ugly number for any given primes. (2) The given numbers in primes are in ascending order. (3) 0 < k ≤ 100, 0 < n ≤ 106, 0 < primes[i] < 1000. Credits: Special thanks to @dietpepsi for adding this problem and creating all test cases. :type n: int :type primes: List[int] :rtype: int | 3.948285 | 4 |
aoc_2015/day11.py | geoffbeier/aoc_2021 | 0 | 6621163 | import itertools
import re
import string
from collections import defaultdict, namedtuple, Counter
from dataclasses import dataclass
from itertools import product
from math import prod
from typing import List, Dict, Any, Tuple
import aocd
from . import aoc_year
from loguru import logger
aoc_day = 11
@dataclass
class AOCContext:
raw: List[str]
current_password: str
def preprocess():
raw = aocd.get_data(day=aoc_day, year=aoc_year).splitlines()
current_password = raw[0].strip()
context = AOCContext(raw, current_password)
return context
def contains_straight(password):
for i in range(len(password) - 2):
if password[i : i + 3] in string.ascii_lowercase:
return True
return False
def legal_characters_only(password):
illegal_characters = ["i", "o", "l"]
return all(c not in password for c in illegal_characters)
def repeating_pairs(password):
return len(Counter(a for a, b in zip(password, password[1:]) if a == b).keys()) >= 2
def is_valid(password: str):
return (
contains_straight(password)
and legal_characters_only(password)
and repeating_pairs(password)
)
def skip_illegal_letters(password):
r = list(reversed(password))
illegal_letters = ["i", "o", "l"]
found_illegals = []
for c in illegal_letters:
if c in r:
found_illegals.append(r.index(c))
if not found_illegals:
return password
first_illegal = min(found_illegals)
r[first_illegal] = chr(ord(r[first_illegal]) + 1)
for i in reversed(range(0, first_illegal)):
r[i] = "a"
return skip_illegal_letters("".join(reversed(r)))
def increment_password(password: str):
codes = [ord(c) for c in password]
codes.reverse()
for i, c in enumerate(codes):
if c < ord("z"):
codes[i] += 1
break
else:
codes[i] = ord("a")
return skip_illegal_letters("".join([chr(i) for i in reversed(codes)]))
def part1(context: AOCContext):
password = context.current_password
while True:
password = <PASSWORD>(password)
logger.debug(f"trying {password}")
if is_valid(password):
context.current_password = password
return password
def part2(context: AOCContext):
part1(context)
return part1(context)
tests = [
(
"""abcdefgh
""",
"abcdffaa",
part1,
),
]
def test(start: int = 0, finish: int = len(tests)):
for i, t in enumerate(tests[start:finish]):
def gd(*args, **kwargs):
return t[0]
aocd.get_data = gd
result = t[2](preprocess())
if f"{result}" != f"{t[1]}":
logger.error(f"Test {start + i + 1} failed: got {result}, expected {t[1]}")
break
else:
logger.success(f"Test {start + i + 1}: {t[1]}")
if __name__ == "__main__":
test()
| import itertools
import re
import string
from collections import defaultdict, namedtuple, Counter
from dataclasses import dataclass
from itertools import product
from math import prod
from typing import List, Dict, Any, Tuple
import aocd
from . import aoc_year
from loguru import logger
aoc_day = 11
@dataclass
class AOCContext:
raw: List[str]
current_password: str
def preprocess():
raw = aocd.get_data(day=aoc_day, year=aoc_year).splitlines()
current_password = raw[0].strip()
context = AOCContext(raw, current_password)
return context
def contains_straight(password):
for i in range(len(password) - 2):
if password[i : i + 3] in string.ascii_lowercase:
return True
return False
def legal_characters_only(password):
illegal_characters = ["i", "o", "l"]
return all(c not in password for c in illegal_characters)
def repeating_pairs(password):
return len(Counter(a for a, b in zip(password, password[1:]) if a == b).keys()) >= 2
def is_valid(password: str):
return (
contains_straight(password)
and legal_characters_only(password)
and repeating_pairs(password)
)
def skip_illegal_letters(password):
r = list(reversed(password))
illegal_letters = ["i", "o", "l"]
found_illegals = []
for c in illegal_letters:
if c in r:
found_illegals.append(r.index(c))
if not found_illegals:
return password
first_illegal = min(found_illegals)
r[first_illegal] = chr(ord(r[first_illegal]) + 1)
for i in reversed(range(0, first_illegal)):
r[i] = "a"
return skip_illegal_letters("".join(reversed(r)))
def increment_password(password: str):
codes = [ord(c) for c in password]
codes.reverse()
for i, c in enumerate(codes):
if c < ord("z"):
codes[i] += 1
break
else:
codes[i] = ord("a")
return skip_illegal_letters("".join([chr(i) for i in reversed(codes)]))
def part1(context: AOCContext):
password = context.current_password
while True:
password = <PASSWORD>(password)
logger.debug(f"trying {password}")
if is_valid(password):
context.current_password = password
return password
def part2(context: AOCContext):
part1(context)
return part1(context)
tests = [
(
"""abcdefgh
""",
"abcdffaa",
part1,
),
]
def test(start: int = 0, finish: int = len(tests)):
for i, t in enumerate(tests[start:finish]):
def gd(*args, **kwargs):
return t[0]
aocd.get_data = gd
result = t[2](preprocess())
if f"{result}" != f"{t[1]}":
logger.error(f"Test {start + i + 1} failed: got {result}, expected {t[1]}")
break
else:
logger.success(f"Test {start + i + 1}: {t[1]}")
if __name__ == "__main__":
test()
| none | 1 | 3.100452 | 3 | |
tests/optimizer/test_insert.py | hongfuli/sharding-py | 1 | 6621164 | <filename>tests/optimizer/test_insert.py
import unittest
from shardingpy.api.config.base import load_sharding_rule_config_from_dict
from shardingpy.constant import ShardingOperator
from shardingpy.optimizer.insert_optimizer import InsertOptimizeEngine
from shardingpy.parsing.parser.context.condition import AndCondition, Condition, Column
from shardingpy.parsing.parser.context.insertvalue import InsertValue
from shardingpy.parsing.parser.context.table import Table
from shardingpy.parsing.parser.expressionparser import SQLPlaceholderExpression
from shardingpy.parsing.parser.sql.dml.insert import InsertStatement
from shardingpy.parsing.parser.token import TableToken, InsertValuesToken
from shardingpy.routing.router.sharding.base import GeneratedKey
from shardingpy.rule.base import ShardingRule
from . import optimizer_rule
class InsertOptimizeEngineTest(unittest.TestCase):
def setUp(self):
sharding_rule_config = load_sharding_rule_config_from_dict(optimizer_rule.sharding_rule_config['sharding_rule'])
self.sharding_rule = ShardingRule(sharding_rule_config,
optimizer_rule.sharding_rule_config['data_sources'].keys())
self.insert_statement = insert_statement = InsertStatement()
insert_statement.tables.add(Table('t_order', None))
insert_statement.parameters_index = 4
insert_statement.insert_values_list_last_position = 45
insert_statement.sql_tokens.append(TableToken(12, 0, 't_order'))
insert_statement.sql_tokens.append(InsertValuesToken(39, 't_order'))
and_condition1 = AndCondition()
and_condition1.conditions.append(
Condition(Column('user_id', 't_order'), ShardingOperator.EQUAL, SQLPlaceholderExpression(0)))
insert_statement.conditions.or_condition.and_conditions.append(and_condition1)
and_condition2 = AndCondition()
and_condition2.conditions.append(
Condition(Column('user_id', 't_order'), ShardingOperator.EQUAL, SQLPlaceholderExpression(2)))
insert_statement.conditions.or_condition.and_conditions.append(and_condition2)
insert_statement.insert_values.insert_values.append(InsertValue('(?, ?)', 2))
insert_statement.insert_values.insert_values.append(InsertValue('(?, ?)', 2))
self.parameters = [10, 'init', 11, 'init']
def test_optimize_with_generated_key(self):
generated_key = GeneratedKey(Column('order_id', 't_order'))
generated_key.generated_keys = [1, 2]
actual = InsertOptimizeEngine(self.sharding_rule, self.insert_statement, self.parameters,
generated_key).optimize()
self.assertFalse(actual.is_always_false())
self.assertEqual(len(actual.sharding_conditions), 2)
self.assertEqual(len(actual.sharding_conditions[0].parameters), 3)
self.assertEqual(len(actual.sharding_conditions[1].parameters), 3)
self.assertEqual(actual.sharding_conditions[0].parameters, [10, 'init', 1])
self.assertEqual(actual.sharding_conditions[1].parameters, [11, 'init', 2])
self.assertEqual(actual.sharding_conditions[0].insert_value_expression, '(?, ?, ?)')
self.assertEqual(actual.sharding_conditions[1].insert_value_expression, '(?, ?, ?)')
self.assertEqual(len(actual.sharding_conditions[0].sharding_values), 2)
self.assertEqual(len(actual.sharding_conditions[1].sharding_values), 2)
self._assert_sharding_value(actual.sharding_conditions[0].sharding_values[0], 1)
self._assert_sharding_value(actual.sharding_conditions[0].sharding_values[1], 10)
self._assert_sharding_value(actual.sharding_conditions[1].sharding_values[0], 2)
self._assert_sharding_value(actual.sharding_conditions[1].sharding_values[1], 11)
def test_optimize_without_generated_key(self):
self.insert_statement.generate_key_column_index = 1
actual = InsertOptimizeEngine(self.sharding_rule, self.insert_statement, self.parameters, None).optimize()
self.assertFalse(actual.is_always_false())
self.assertEqual(len(actual.sharding_conditions), 2)
self.assertEqual(len(actual.sharding_conditions[0].parameters), 2)
self.assertEqual(len(actual.sharding_conditions[1].parameters), 2)
self.assertEqual(actual.sharding_conditions[0].parameters, [10, 'init'])
self.assertEqual(actual.sharding_conditions[1].parameters, [11, 'init'])
self.assertEqual(actual.sharding_conditions[0].insert_value_expression, '(?, ?)')
self.assertEqual(actual.sharding_conditions[1].insert_value_expression, '(?, ?)')
self.assertEqual(len(actual.sharding_conditions[0].sharding_values), 1)
self.assertEqual(len(actual.sharding_conditions[1].sharding_values), 1)
self._assert_sharding_value(actual.sharding_conditions[0].sharding_values[0], 10)
self._assert_sharding_value(actual.sharding_conditions[1].sharding_values[0], 11)
def _assert_sharding_value(self, sharding_value, value):
self.assertEqual(sharding_value.values, [value])
| <filename>tests/optimizer/test_insert.py
import unittest
from shardingpy.api.config.base import load_sharding_rule_config_from_dict
from shardingpy.constant import ShardingOperator
from shardingpy.optimizer.insert_optimizer import InsertOptimizeEngine
from shardingpy.parsing.parser.context.condition import AndCondition, Condition, Column
from shardingpy.parsing.parser.context.insertvalue import InsertValue
from shardingpy.parsing.parser.context.table import Table
from shardingpy.parsing.parser.expressionparser import SQLPlaceholderExpression
from shardingpy.parsing.parser.sql.dml.insert import InsertStatement
from shardingpy.parsing.parser.token import TableToken, InsertValuesToken
from shardingpy.routing.router.sharding.base import GeneratedKey
from shardingpy.rule.base import ShardingRule
from . import optimizer_rule
class InsertOptimizeEngineTest(unittest.TestCase):
def setUp(self):
sharding_rule_config = load_sharding_rule_config_from_dict(optimizer_rule.sharding_rule_config['sharding_rule'])
self.sharding_rule = ShardingRule(sharding_rule_config,
optimizer_rule.sharding_rule_config['data_sources'].keys())
self.insert_statement = insert_statement = InsertStatement()
insert_statement.tables.add(Table('t_order', None))
insert_statement.parameters_index = 4
insert_statement.insert_values_list_last_position = 45
insert_statement.sql_tokens.append(TableToken(12, 0, 't_order'))
insert_statement.sql_tokens.append(InsertValuesToken(39, 't_order'))
and_condition1 = AndCondition()
and_condition1.conditions.append(
Condition(Column('user_id', 't_order'), ShardingOperator.EQUAL, SQLPlaceholderExpression(0)))
insert_statement.conditions.or_condition.and_conditions.append(and_condition1)
and_condition2 = AndCondition()
and_condition2.conditions.append(
Condition(Column('user_id', 't_order'), ShardingOperator.EQUAL, SQLPlaceholderExpression(2)))
insert_statement.conditions.or_condition.and_conditions.append(and_condition2)
insert_statement.insert_values.insert_values.append(InsertValue('(?, ?)', 2))
insert_statement.insert_values.insert_values.append(InsertValue('(?, ?)', 2))
self.parameters = [10, 'init', 11, 'init']
def test_optimize_with_generated_key(self):
generated_key = GeneratedKey(Column('order_id', 't_order'))
generated_key.generated_keys = [1, 2]
actual = InsertOptimizeEngine(self.sharding_rule, self.insert_statement, self.parameters,
generated_key).optimize()
self.assertFalse(actual.is_always_false())
self.assertEqual(len(actual.sharding_conditions), 2)
self.assertEqual(len(actual.sharding_conditions[0].parameters), 3)
self.assertEqual(len(actual.sharding_conditions[1].parameters), 3)
self.assertEqual(actual.sharding_conditions[0].parameters, [10, 'init', 1])
self.assertEqual(actual.sharding_conditions[1].parameters, [11, 'init', 2])
self.assertEqual(actual.sharding_conditions[0].insert_value_expression, '(?, ?, ?)')
self.assertEqual(actual.sharding_conditions[1].insert_value_expression, '(?, ?, ?)')
self.assertEqual(len(actual.sharding_conditions[0].sharding_values), 2)
self.assertEqual(len(actual.sharding_conditions[1].sharding_values), 2)
self._assert_sharding_value(actual.sharding_conditions[0].sharding_values[0], 1)
self._assert_sharding_value(actual.sharding_conditions[0].sharding_values[1], 10)
self._assert_sharding_value(actual.sharding_conditions[1].sharding_values[0], 2)
self._assert_sharding_value(actual.sharding_conditions[1].sharding_values[1], 11)
def test_optimize_without_generated_key(self):
self.insert_statement.generate_key_column_index = 1
actual = InsertOptimizeEngine(self.sharding_rule, self.insert_statement, self.parameters, None).optimize()
self.assertFalse(actual.is_always_false())
self.assertEqual(len(actual.sharding_conditions), 2)
self.assertEqual(len(actual.sharding_conditions[0].parameters), 2)
self.assertEqual(len(actual.sharding_conditions[1].parameters), 2)
self.assertEqual(actual.sharding_conditions[0].parameters, [10, 'init'])
self.assertEqual(actual.sharding_conditions[1].parameters, [11, 'init'])
self.assertEqual(actual.sharding_conditions[0].insert_value_expression, '(?, ?)')
self.assertEqual(actual.sharding_conditions[1].insert_value_expression, '(?, ?)')
self.assertEqual(len(actual.sharding_conditions[0].sharding_values), 1)
self.assertEqual(len(actual.sharding_conditions[1].sharding_values), 1)
self._assert_sharding_value(actual.sharding_conditions[0].sharding_values[0], 10)
self._assert_sharding_value(actual.sharding_conditions[1].sharding_values[0], 11)
def _assert_sharding_value(self, sharding_value, value):
self.assertEqual(sharding_value.values, [value])
| none | 1 | 2.168556 | 2 | |
gluetool_modules_framework/pipelines/pipeline_install_ancestors.py | testing-farm/gluetool-modules | 0 | 6621165 | <gh_stars>0
# Copyright Contributors to the Testing Farm project.
# SPDX-License-Identifier: Apache-2.0
import gluetool
from gluetool.result import Ok, Error
from gluetool.utils import normalize_shell_option, normalize_multistring_option
from gluetool.log import log_dict
from gluetool_modules_framework.libs.guest_setup import guest_setup_log_dirpath, GuestSetupStage
from typing import Any, Dict, List, TYPE_CHECKING, Union, Optional # noqa
if TYPE_CHECKING:
from gluetool_modules_framework.libs.guest import NetworkedGuest
from gluetool_modules_framework.libs.guest_setup import SetupGuestReturnType
class PipelineInstallAncestors(gluetool.Module):
"""
Installs package ancestors in a separate pipeline.
The ancestors names are resolved from ``primary_task`` component name using ``ancestors``
shared function. When ``ancestors`` shared function is not available or if it returns empty list,
we suppose ancestor name is the same as the component name.
If option ``ancestors`` is set, its value is used.
Then these ancestors names are used to resolve specific brew builds on the given tag
specified by the option ``tag``.
Guest is setup by `guest-setup` module.
"""
name = 'pipeline-install-ancestors'
options = {
'tag': {
'help': 'Tag to use when looking up ancestors.'
},
'install-rpms-blacklist': {
'help': """
Value is passed to inner called `brew-build-task-params` module (default: %(default)s).
""",
'type': str,
'default': ''
},
'ancestors': {
'help': """
Comma separated list of packages to be install on the guest (default: none).
""",
'default': [],
'action': 'append'
},
}
required_options = ('tag',)
shared_functions = ['setup_guest']
def __init__(self, *args, **kwargs):
# type: (*Any, **Any) -> None
super(PipelineInstallAncestors, self).__init__(*args, **kwargs)
self.context = {} # type: Dict[str, Any]
def _build_exists(self, name, tag):
# type: (str, str) -> bool
self.require_shared('koji_session')
koji_session = self.shared('koji_session')
builds = koji_session.listTagged(tag, package=name, inherit=True, latest=True)
return len(builds) > 0
@gluetool.utils.cached_property
def _brew_options(self):
# type: () -> Optional[str]
ancestors = [] # type: List[str]
self.require_shared('primary_task')
component = self.shared('primary_task').component
if self.option('ancestors'):
self.info('Ancestors set by option')
ancestors = normalize_multistring_option(self.option('ancestors'))
elif self.has_shared('ancestors'):
self.info('Ancestors set by shared function')
ancestors = self.shared('ancestors', component)
if ancestors:
log_dict(self.info, "Ancestors of '{}'".format(component), ancestors)
else:
self.info("No ancestors of '{}' found, assume ancestor's name is the same.".format(component))
ancestors = [component]
tag = self.option('tag')
self.info("Filter out ancestors without builds tagged '{}'".format(tag))
ancestors = [ancestor for ancestor in ancestors if self._build_exists(ancestor, tag)]
if ancestors:
log_dict(self.info, "Ancestors of '{}' with builds tagged '{}'".format(component, tag), ancestors)
return '--tag {} --name {}'.format(tag, ','.join(ancestors))
self.info('No ancestors left, nothing will be installed on SUT.')
return None
def setup_guest(self, guest, stage=GuestSetupStage.PRE_ARTIFACT_INSTALLATION, log_dirpath=None, **kwargs):
# type: (NetworkedGuest, GuestSetupStage, Optional[str], **Any) -> SetupGuestReturnType
log_dirpath = guest_setup_log_dirpath(guest, log_dirpath)
# Make sure previous setup_guest methods are called. This is out of decency only - we don't expect there
# to be any other `setup_guest` in the pipeline. If there were, it would be operate within the context
# of the initial primary artifact while we're trying to do our job within context of the ancestor.
r_overloaded_guest_setup_output = self.overloaded_shared(
'setup_guest',
guest,
stage=stage,
log_dirpath=log_dirpath,
**kwargs
) # type: SetupGuestReturnType
if r_overloaded_guest_setup_output is None:
r_overloaded_guest_setup_output = Ok([])
if r_overloaded_guest_setup_output.is_error:
return r_overloaded_guest_setup_output
# Containers for guest setup outputs and result from the child pipeline.
guest_setup_output = r_overloaded_guest_setup_output.unwrap() or []
guest_setup_output_result = [Ok(guest_setup_output)] # type: List[SetupGuestReturnType]
# Callback to initiate setup guest in child pipeline - will add its outputs to our container,
# and it should propagate any failure - or at least the first one - by updating the result.
def do_setup_guest(self):
# type: (PipelineInstallAncestors) -> None
r_guest_setup = self.shared(
'setup_guest',
guest,
stage=stage,
log_dirpath=log_dirpath,
**kwargs
)
if r_guest_setup is None:
r_guest_setup = Ok([])
if r_guest_setup.is_error:
# Just like the successful result, the failed one also carries list of outputs
# we need to propagate to our parent pipeline.
outputs, exc = r_guest_setup.value
guest_setup_output.extend(outputs)
# If the current global outcome of guest-setup is still set to "success", change that to failed.
# If it's already an error, we don't care, just propagate the outputs.
if guest_setup_output_result[0].is_ok:
guest_setup_output_result[0] = Error((
guest_setup_output,
exc
))
else:
guest_setup_output.extend(r_guest_setup.unwrap() or [])
#
# Run the installation of the ancestors in a separate pipeline. We are using a separate pipeline
# so we do not spoil the parent pipeline with the build initialization.
#
# Please note that we are already in 'setup_guest' function here, and will be requiring to kick
# additional ``setup_guest`` for modules in the separate pipeline. For that kick we use a helper
# function ``do_guest_setup``.
#
modules = [] # type: List[Union[gluetool.glue.PipelineStepModule, gluetool.glue.PipelineStepCallback]]
# If we have an ancestor build, by adding `brew` module at the beginning of our pipeline we're running
# all the modules in the context of the ancestor build.
if self._brew_options:
modules += [
gluetool.glue.PipelineStepModule('brew', argv=normalize_shell_option(self._brew_options))
]
else:
# When there's no artifact we'd inject into our child pipeline, we try at least to "fake" its presence
# by providing dummy eval context content, to fool modules that need it, like guest-setup and its
# method of picking playbooks via map based on artifact's build target.
self.context = {
'BUILD_TARGET': self.option('tag'),
}
# We always want to run guest-setup (or any other module hooked on setup_guest function), for all
# stages.
modules += [
gluetool.glue.PipelineStepModule('guest-setup'),
gluetool.glue.PipelineStepCallback('do_setup_guest', do_setup_guest)
]
# In the artifact-installation stage, throw in modules to install the ancestor.
if stage == GuestSetupStage.ARTIFACT_INSTALLATION and self._brew_options:
self.info('installing the ancestor {}'.format(self.shared('primary_task').nvr))
blacklist = self.option('install-rpms-blacklist')
brew_build_task_params_argv = ['--install-rpms-blacklist', blacklist] if blacklist else []
modules += [
gluetool.glue.PipelineStepModule('brew-build-task-params', argv=brew_build_task_params_argv),
gluetool.glue.PipelineStepModule('install-koji-build', argv=['--skip-overloaded-shared']),
gluetool.glue.PipelineStepCallback('do_setup_guest', do_setup_guest)
]
failure_execute, failure_destroy = self.glue.run_modules(modules)
# Finalize the response. We must return Result, either Ok or Error, with a list of guest setup
# outputs and possible the exception.
#
# Note that we can return just a single exception, so the first one wins. If there were more
# exceptions raised somewhere later, then we at least log them.
result = guest_setup_output_result[0]
if failure_execute:
assert isinstance(failure_execute.exception, Exception)
if result.is_ok:
result = Error((
guest_setup_output,
failure_execute.exception
))
else:
guest.error(
'Exception raised: {}'.format(failure_execute.exception),
exc_info=failure_execute.exc_info
)
if failure_destroy:
assert isinstance(failure_destroy.exception, Exception)
if result.is_ok:
result = Error((
guest_setup_output,
failure_destroy.exception
))
else:
guest.error(
'Exception raised: {}'.format(failure_destroy.exception),
exc_info=failure_destroy.exc_info
)
return result
@property
def eval_context(self):
# type: () -> Dict[str, Any]
__content__ = { # noqa
'BUILD_TARGET': """
Build target of build we were looking for in case nothing found.
If build was found, this value is provided by artifact provider (etc. koji, brew or copr).
"""
}
return self.context
| # Copyright Contributors to the Testing Farm project.
# SPDX-License-Identifier: Apache-2.0
import gluetool
from gluetool.result import Ok, Error
from gluetool.utils import normalize_shell_option, normalize_multistring_option
from gluetool.log import log_dict
from gluetool_modules_framework.libs.guest_setup import guest_setup_log_dirpath, GuestSetupStage
from typing import Any, Dict, List, TYPE_CHECKING, Union, Optional # noqa
if TYPE_CHECKING:
from gluetool_modules_framework.libs.guest import NetworkedGuest
from gluetool_modules_framework.libs.guest_setup import SetupGuestReturnType
class PipelineInstallAncestors(gluetool.Module):
"""
Installs package ancestors in a separate pipeline.
The ancestors names are resolved from ``primary_task`` component name using ``ancestors``
shared function. When ``ancestors`` shared function is not available or if it returns empty list,
we suppose ancestor name is the same as the component name.
If option ``ancestors`` is set, its value is used.
Then these ancestors names are used to resolve specific brew builds on the given tag
specified by the option ``tag``.
Guest is setup by `guest-setup` module.
"""
name = 'pipeline-install-ancestors'
options = {
'tag': {
'help': 'Tag to use when looking up ancestors.'
},
'install-rpms-blacklist': {
'help': """
Value is passed to inner called `brew-build-task-params` module (default: %(default)s).
""",
'type': str,
'default': ''
},
'ancestors': {
'help': """
Comma separated list of packages to be install on the guest (default: none).
""",
'default': [],
'action': 'append'
},
}
required_options = ('tag',)
shared_functions = ['setup_guest']
def __init__(self, *args, **kwargs):
# type: (*Any, **Any) -> None
super(PipelineInstallAncestors, self).__init__(*args, **kwargs)
self.context = {} # type: Dict[str, Any]
def _build_exists(self, name, tag):
# type: (str, str) -> bool
self.require_shared('koji_session')
koji_session = self.shared('koji_session')
builds = koji_session.listTagged(tag, package=name, inherit=True, latest=True)
return len(builds) > 0
@gluetool.utils.cached_property
def _brew_options(self):
# type: () -> Optional[str]
ancestors = [] # type: List[str]
self.require_shared('primary_task')
component = self.shared('primary_task').component
if self.option('ancestors'):
self.info('Ancestors set by option')
ancestors = normalize_multistring_option(self.option('ancestors'))
elif self.has_shared('ancestors'):
self.info('Ancestors set by shared function')
ancestors = self.shared('ancestors', component)
if ancestors:
log_dict(self.info, "Ancestors of '{}'".format(component), ancestors)
else:
self.info("No ancestors of '{}' found, assume ancestor's name is the same.".format(component))
ancestors = [component]
tag = self.option('tag')
self.info("Filter out ancestors without builds tagged '{}'".format(tag))
ancestors = [ancestor for ancestor in ancestors if self._build_exists(ancestor, tag)]
if ancestors:
log_dict(self.info, "Ancestors of '{}' with builds tagged '{}'".format(component, tag), ancestors)
return '--tag {} --name {}'.format(tag, ','.join(ancestors))
self.info('No ancestors left, nothing will be installed on SUT.')
return None
def setup_guest(self, guest, stage=GuestSetupStage.PRE_ARTIFACT_INSTALLATION, log_dirpath=None, **kwargs):
# type: (NetworkedGuest, GuestSetupStage, Optional[str], **Any) -> SetupGuestReturnType
log_dirpath = guest_setup_log_dirpath(guest, log_dirpath)
# Make sure previous setup_guest methods are called. This is out of decency only - we don't expect there
# to be any other `setup_guest` in the pipeline. If there were, it would be operate within the context
# of the initial primary artifact while we're trying to do our job within context of the ancestor.
r_overloaded_guest_setup_output = self.overloaded_shared(
'setup_guest',
guest,
stage=stage,
log_dirpath=log_dirpath,
**kwargs
) # type: SetupGuestReturnType
if r_overloaded_guest_setup_output is None:
r_overloaded_guest_setup_output = Ok([])
if r_overloaded_guest_setup_output.is_error:
return r_overloaded_guest_setup_output
# Containers for guest setup outputs and result from the child pipeline.
guest_setup_output = r_overloaded_guest_setup_output.unwrap() or []
guest_setup_output_result = [Ok(guest_setup_output)] # type: List[SetupGuestReturnType]
# Callback to initiate setup guest in child pipeline - will add its outputs to our container,
# and it should propagate any failure - or at least the first one - by updating the result.
def do_setup_guest(self):
# type: (PipelineInstallAncestors) -> None
r_guest_setup = self.shared(
'setup_guest',
guest,
stage=stage,
log_dirpath=log_dirpath,
**kwargs
)
if r_guest_setup is None:
r_guest_setup = Ok([])
if r_guest_setup.is_error:
# Just like the successful result, the failed one also carries list of outputs
# we need to propagate to our parent pipeline.
outputs, exc = r_guest_setup.value
guest_setup_output.extend(outputs)
# If the current global outcome of guest-setup is still set to "success", change that to failed.
# If it's already an error, we don't care, just propagate the outputs.
if guest_setup_output_result[0].is_ok:
guest_setup_output_result[0] = Error((
guest_setup_output,
exc
))
else:
guest_setup_output.extend(r_guest_setup.unwrap() or [])
#
# Run the installation of the ancestors in a separate pipeline. We are using a separate pipeline
# so we do not spoil the parent pipeline with the build initialization.
#
# Please note that we are already in 'setup_guest' function here, and will be requiring to kick
# additional ``setup_guest`` for modules in the separate pipeline. For that kick we use a helper
# function ``do_guest_setup``.
#
modules = [] # type: List[Union[gluetool.glue.PipelineStepModule, gluetool.glue.PipelineStepCallback]]
# If we have an ancestor build, by adding `brew` module at the beginning of our pipeline we're running
# all the modules in the context of the ancestor build.
if self._brew_options:
modules += [
gluetool.glue.PipelineStepModule('brew', argv=normalize_shell_option(self._brew_options))
]
else:
# When there's no artifact we'd inject into our child pipeline, we try at least to "fake" its presence
# by providing dummy eval context content, to fool modules that need it, like guest-setup and its
# method of picking playbooks via map based on artifact's build target.
self.context = {
'BUILD_TARGET': self.option('tag'),
}
# We always want to run guest-setup (or any other module hooked on setup_guest function), for all
# stages.
modules += [
gluetool.glue.PipelineStepModule('guest-setup'),
gluetool.glue.PipelineStepCallback('do_setup_guest', do_setup_guest)
]
# In the artifact-installation stage, throw in modules to install the ancestor.
if stage == GuestSetupStage.ARTIFACT_INSTALLATION and self._brew_options:
self.info('installing the ancestor {}'.format(self.shared('primary_task').nvr))
blacklist = self.option('install-rpms-blacklist')
brew_build_task_params_argv = ['--install-rpms-blacklist', blacklist] if blacklist else []
modules += [
gluetool.glue.PipelineStepModule('brew-build-task-params', argv=brew_build_task_params_argv),
gluetool.glue.PipelineStepModule('install-koji-build', argv=['--skip-overloaded-shared']),
gluetool.glue.PipelineStepCallback('do_setup_guest', do_setup_guest)
]
failure_execute, failure_destroy = self.glue.run_modules(modules)
# Finalize the response. We must return Result, either Ok or Error, with a list of guest setup
# outputs and possible the exception.
#
# Note that we can return just a single exception, so the first one wins. If there were more
# exceptions raised somewhere later, then we at least log them.
result = guest_setup_output_result[0]
if failure_execute:
assert isinstance(failure_execute.exception, Exception)
if result.is_ok:
result = Error((
guest_setup_output,
failure_execute.exception
))
else:
guest.error(
'Exception raised: {}'.format(failure_execute.exception),
exc_info=failure_execute.exc_info
)
if failure_destroy:
assert isinstance(failure_destroy.exception, Exception)
if result.is_ok:
result = Error((
guest_setup_output,
failure_destroy.exception
))
else:
guest.error(
'Exception raised: {}'.format(failure_destroy.exception),
exc_info=failure_destroy.exc_info
)
return result
@property
def eval_context(self):
# type: () -> Dict[str, Any]
__content__ = { # noqa
'BUILD_TARGET': """
Build target of build we were looking for in case nothing found.
If build was found, this value is provided by artifact provider (etc. koji, brew or copr).
"""
}
return self.context | en | 0.859038 | # Copyright Contributors to the Testing Farm project. # SPDX-License-Identifier: Apache-2.0 # noqa Installs package ancestors in a separate pipeline. The ancestors names are resolved from ``primary_task`` component name using ``ancestors`` shared function. When ``ancestors`` shared function is not available or if it returns empty list, we suppose ancestor name is the same as the component name. If option ``ancestors`` is set, its value is used. Then these ancestors names are used to resolve specific brew builds on the given tag specified by the option ``tag``. Guest is setup by `guest-setup` module. Value is passed to inner called `brew-build-task-params` module (default: %(default)s). Comma separated list of packages to be install on the guest (default: none). # type: (*Any, **Any) -> None # type: Dict[str, Any] # type: (str, str) -> bool # type: () -> Optional[str] # type: List[str] # type: (NetworkedGuest, GuestSetupStage, Optional[str], **Any) -> SetupGuestReturnType # Make sure previous setup_guest methods are called. This is out of decency only - we don't expect there # to be any other `setup_guest` in the pipeline. If there were, it would be operate within the context # of the initial primary artifact while we're trying to do our job within context of the ancestor. # type: SetupGuestReturnType # Containers for guest setup outputs and result from the child pipeline. # type: List[SetupGuestReturnType] # Callback to initiate setup guest in child pipeline - will add its outputs to our container, # and it should propagate any failure - or at least the first one - by updating the result. # type: (PipelineInstallAncestors) -> None # Just like the successful result, the failed one also carries list of outputs # we need to propagate to our parent pipeline. # If the current global outcome of guest-setup is still set to "success", change that to failed. # If it's already an error, we don't care, just propagate the outputs. # # Run the installation of the ancestors in a separate pipeline. We are using a separate pipeline # so we do not spoil the parent pipeline with the build initialization. # # Please note that we are already in 'setup_guest' function here, and will be requiring to kick # additional ``setup_guest`` for modules in the separate pipeline. For that kick we use a helper # function ``do_guest_setup``. # # type: List[Union[gluetool.glue.PipelineStepModule, gluetool.glue.PipelineStepCallback]] # If we have an ancestor build, by adding `brew` module at the beginning of our pipeline we're running # all the modules in the context of the ancestor build. # When there's no artifact we'd inject into our child pipeline, we try at least to "fake" its presence # by providing dummy eval context content, to fool modules that need it, like guest-setup and its # method of picking playbooks via map based on artifact's build target. # We always want to run guest-setup (or any other module hooked on setup_guest function), for all # stages. # In the artifact-installation stage, throw in modules to install the ancestor. # Finalize the response. We must return Result, either Ok or Error, with a list of guest setup # outputs and possible the exception. # # Note that we can return just a single exception, so the first one wins. If there were more # exceptions raised somewhere later, then we at least log them. # type: () -> Dict[str, Any] # noqa Build target of build we were looking for in case nothing found. If build was found, this value is provided by artifact provider (etc. koji, brew or copr). | 2.018311 | 2 |
gateway/qrTest3.py | dustinengle/smart-mailbox | 0 | 6621166 | import pyqrcode
from Tkinter import *
import tkMessageBox
import os
import sys
import requests
import json
import time
import signal
import multiprocessing as mp
import requests
import ed25519
from kit.controller import Controller
from kit.file import read_file
from kit.codec import decode, encode
from kit.crypto import decrypt, encrypt
import kit.env as env
from kit.logger import error, info
from kit.pubsub import get_message, subscribe, unsubscribe, publish
import subprocess
class Gateway(Frame):
def __init__(self, root=None):
Frame.__init__(self, root)
def on_closing():
if tkMessageBox.askokcancel("Quit", "Do you want to quit?\n(This will shutdown communication with any Mailboxes)"):
self.stop()
root.destroy()
root.protocol("WM_DELETE_WINDOW", on_closing)
self.url = "http://172.16.31.10:10000/v1"
self.root = root
self.controller = Controller()
self.pw = ''
self.get_env()
self.currentWin = 'START'
menu = Menu(self.root)
#menu.add_command(label="Kit", command = self.adminMenu)
menu.add_command(label="Mailbox", command = self.mailboxMenu)
#menu.add_command(label="Help")
self.root.config(menu=menu)
self.root.geometry("500x500")
self.activeList = []
keys = os.environ['GATE_PUB_KEYS'].split(',')
for x in keys:
self.activeList.append('REGISTERED')
#print(self.activeList)
self.pack()
self.startScreen()
def stop(self):
try:
print(self.kit.is_alive())
print(self.controller.running)
if(self.kit.is_alive() == True):
self.controller.stop()
self.kit.terminate()
self.kit.join()
except:
print('kit undefined')
def get_env(self):
print 'info:main: loading .env file'
env.load('.env')
def startScreen(self):
var = IntVar()
newact = False
while(not self.active_check()):
startwin = Frame(self.root)
startwin.pack()
message = Message(startwin, text="This gateway has not been activated. Please use Its Here App to activate.", width=500)
message.pack()
button = Button(startwin, text="Show Activation Code", command=lambda: var.set(1))
button.pack()
button.wait_variable(var)
self.activate()
startwin.destroy()
newact = True
pw = ''
if newact:
pw = self.create_pw()
else:
while(self.check_pw(pw) == False):
try:
pw = self.enter_pw()
except:
print('password invalid')
"""
self.controller.setup(pw)
self.controller.start()
self.pw = pw
"""
self.kit = mp.Process(target=lambda: self.kit_start(pw,self.controller))
self.kit_setup(pw)
self.pw = pw
#self.sensor = mp.Process(target=lambda: self.sensor_start())
#self.sensor_setup()
if(os.environ['KIT_REGISTERED'] != '1'):
self.send_register()
s = open('.env').read()
s = s.replace("KIT_REGISTERED=", "KIT_REGISTERED=1")
f = open('.env', 'w')
f.write(s)
f.close()
self.get_env()
#time.sleep(30)
while(self.mailbox_check() <= 0):
print('?')
self.mailbox_setup(1)
#self.scan_mailbox(1)
time.sleep(1)
#restart kit
self.mailboxMenu()
#self.adminMenu()
def active_check(self):
return os.environ['KIT_CHANNEL'] != '' and os.environ['KIT_DEVICE_ID'] != '' and os.environ['KIT_DEVICE_KEY'] != ''
def activate(self):
gID = self.get_gatewayID()
if(not self.active_check()):
self.display_qr(gID, 'Gateway')
self.handle_activate(gID)
def get_gatewayID(self):
if(os.environ['GATE_PUB_KEYS'] == ''):
self.gen_key()
return os.environ['GATE_PUB_KEYS'].split(',')[0]
def gen_key(self):
priv, verifying_key = ed25519.create_keypair()
print(verifying_key)
vkey_hex = verifying_key.to_ascii(encoding="hex")
s = open('.env').read()
s = s.replace("GATE_PUB_KEYS=", "GATE_PUB_KEYS="+vkey_hex)
f = open('.env', 'w')
f.write(s)
f.close()
self.get_env()
open(os.environ['GATE_SEED_PATH'],"wb").write(priv.to_bytes())
vkey = os.environ['GATE_PUB_KEYS'].split(',')[0]
print "the public key is", vkey
def display_qr(self, id, box):
var = IntVar()
actwin = Toplevel(self.root)
actwin.title('Activate' + box)
message = Message(actwin, text="Scan "+box+" code with mobile app", width=200)
code_bmp = BitmapImage(data=self.gen_qr(id))
code_bmp.config(background='white')
qr = Label(actwin, image=code_bmp)
button = Button(actwin, text="ok", command=lambda: var.set(1))
message.pack(padx=5, pady=5)
qr.pack()
button.pack()
button.wait_variable(var)
actwin.destroy()
def gen_qr(self, qr):
code = pyqrcode.create(qr)
code_xbm = code.xbm(scale=5)
return code_xbm
def handle_activate(self, gID):
if(self.display_wait(gID) and self.active_check()):
self.display_activated()
else:
self.display_failure()
return 0
def display_wait(self, gID):
waitwin = Toplevel(self.root)
waitwin.title('Activate')
message = Message(waitwin, text="Please wait...", width=200)
message.pack(padx=5, pady=5)
"""
i=0
while(i < 20):
self.root.update_idletasks()
self.root.update()
time.sleep(.25)
i+=1
"""
response = self.send_activate(gID)
waitwin.destroy()
return response
def send_activate(self,gID):
"""
add signiture and ts
"""
ts = int(time.time())
sig = str(self.sign(str(ts)))
#headers = json.loads(json.dumps({'Content-Type':'application/json','Authorization':sig}))
headers = json.loads('{"Content-Type":"application/json","Authorization":"'+sig+'"}')
payload = json.loads('{"publicKey":"'+self.get_gatewayID()+'","timestamp":"'+str(ts)+'"}')
try:
response = requests.post(self.url+"/activate",json=payload,headers=headers)
json_response = response.json()
except:
return False
#print(json_response['result']['deviceId'])
try:
data = json_response['result']['gateway']
print(data)
print('ok')
return self.append_env(data['deviceId'],data['deviceKey'],data['channelId'])
except:
return False
def sign(self, msg):
keydata = open(os.environ['GATE_SEED_PATH'],"rb").read()
signing_key = ed25519.SigningKey(keydata)
sig = signing_key.sign(msg, encoding="base64")
print "sig is:", sig
return sig
def append_env(self, devID, devKey, chanID):
s = open('.env').read()
s = s.replace("KIT_CHANNEL=", "KIT_CHANNEL="+chanID)
s = s.replace("KIT_DEVICE_ID=", "KIT_DEVICE_ID="+devID)
s = s.replace("KIT_DEVICE_KEY=", "KIT_DEVICE_KEY="+devKey)
f = open('.env', 'w')
f.write(s)
f.close()
"""
for line in fileinput.input('.env', inplace = 1):
if line == "KIT_CHANNEL":
print line.replace("KIT_CHANNEL=", "KIT_CHANNEL="+chanID)
if line == "KIT_DEVICE_ID":
print line.replace("KIT_DEVICE_ID=", "KIT_DEVICE_ID="+devID)
if line == "KIT_DEVICE_KEY":
print line.replace("KIT_DEVICE_KEY=", "KIT_DEVICE_KEY="+devKey)
"""
self.get_env()
return self.active_check()
def display_activated(self):
var = IntVar()
actwin = Toplevel(self.root)
actwin.title('Activate')
message = Message(actwin, text="Gateway is now activated", width=200)
button = Button(actwin, text="ok", command=lambda: var.set(1))
message.pack(padx=5, pady=5)
button.pack()
button.wait_variable(var)
actwin.destroy()
def display_failure(self):
var = IntVar()
actwin = Toplevel(self.root)
actwin.title('Activate')
message = Message(actwin, text="There was an issue during activation", width=200)
button = Button(actwin, text="ok", command=lambda: var.set(1))
message.pack(padx=5, pady=5)
button.pack()
button.wait_variable(var)
actwin.destroy()
def check_pw(self, _password):
try:
data = read_file(path=os.environ['KIT_SECRET_PATH'])
seed = decrypt(data, _password)
if seed[0] == 'S':
return True
else:
return False
except:
return False
raise Exception('Decryption of secret data failed, password incorrect!')
def create_pw(self):
var = IntVar()
pw = ''
conpw = '.'
while pw != conpw:
pwwin = Frame(self.root)
pwwin.pack()
if(pw != ''):
match = Message(pwwin, text="Entries did not match", width=500)
match.pack()
message = Message(pwwin, text="Please enter password:", width=500)
message.pack()
e = Entry(pwwin)
e.pack()
button = Button(pwwin, text="submit", command=lambda: var.set(1))
button.pack()
button.wait_variable(var)
pw = e.get()
pwwin.destroy()
pwwin = Frame(self.root)
pwwin.pack()
message = Message(pwwin, text="Please confirm password:", width=500)
message.pack()
e = Entry(pwwin)
e.pack()
button = Button(pwwin, text="submit", command=lambda: var.set(1))
button.pack()
button.wait_variable(var)
conpw = e.get()
pwwin.destroy()
return pw
def enter_pw(self):
var = IntVar()
pwwin = Frame(self.root)
pwwin.pack()
message = Message(pwwin, text="Please enter password:", width=500)
message.pack()
e = Entry(pwwin)
e.pack()
button = Button(pwwin, text="submit", command=lambda: var.set(1))
button.pack()
button.wait_variable(var)
pw = e.get()
pwwin.destroy()
return pw
#starts kit as a multiprocess and sends register request to api
def kit_setup(self, pw=None):
kitwin = Frame(self.root)
kitwin.pack()
Message(kitwin, text="Kit Starting. Please wait.").pack()
self.kit.start()
self.activeList[0] = 'ACTIVE'
time.sleep(5)
kitwin.destroy()
print 'kit end'
#self.send_register()
def kit_start(self, pw=None, controller=Controller()):
print('kit_setup')
#controller = self.controller
#controller = Controller()
# Handle SIGNIT and close the controller.
def signal_handler(sig, frame):
info('main', 'sigint')
controller.stop()
time.sleep(1)
print 'Goodbye.'
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
# Load the configuration dictionary.
print 'info:main: loading .env file'
env.load('.env')
# Setup our controller object and start it.
controller.setup(pw)
controller.start()
# Wait for SIGINT.
signal.pause()
return 0
def send_register(self):
"""
add signiture and ts
"""
ts = int(time.time())
sig = str(self.sign(str(ts)))
headers = json.loads('{"Content-Type":"application/json","Authorization":"'+sig+'"}')
payload = json.loads('{"publicKey":"'+self.get_gatewayID()+'","timestamp":"'+str(ts)+'"}')
try:
response = requests.post(self.url+"/connect",json=payload,headers=headers)
except:
return False
print(response)
return 0
def mailbox_check(self):
channel = os.environ['GATE_PUB_KEYS']
topics = channel.split(',')
return len(topics) - 1
def mailbox_setup(self, boxNum):
self.currentWin = 'MAILBOX_SETUP'
var = IntVar()
boxwin = Frame(self.root)
boxwin.pack()
message = Message(boxwin, text="Please use Its Here App to scan the Activation Code on the Mailbox. Only click continue once the code is scanned.", width=500)
message.pack()
button = Button(boxwin, text="Continue", command=lambda: var.set(1))
button.pack()
#wait for message on inbound
scan = mp.Process(target=lambda: self.scan_mailbox(boxNum))
scan.start()
print(var.get())
button.wait_variable(var)
scan.terminate()
scan.join()
unsubscribe()
boxwin.destroy()
if(self.mailbox_check() >= boxNum):
return
else:
boxChan = self.get_box_channel(boxNum)
if(boxChan != ''):
s = open('.env').read()
s = s.replace("KIT_CHANNEL="+os.environ['KIT_CHANNEL'], "KIT_CHANNEL="+os.environ['KIT_CHANNEL']+','+boxChan)
f = open('.env', 'w')
f.write(s)
f.close()
self.get_env()
self.kit.terminate()
self.kit.join()
self.kit = mp.Process(target=lambda: self.kit_start(self.pw))
self.kit.start()
#self.sensor.terminate()
#self.sensor.join()
self.sensor = mp.Process(target=lambda: self.sensor_start())
self.sensor.start()
self.activeList.append('REGISTERED')
#self.sensor_setup(boxNum)
else:
return
def scan_mailbox(self,boxNum):
print('scan')
def handle_mailbox(msg):
print('in here............')
try:
info('handle', str(msg))
name = msg.get_name()
print(name)
if name == 'ADD':
boxinfo = msg.get_str()
print(boxinfo)
if self.mailbox_exists(boxinfo):
print('mailbox_exists')
return
#boxinfo[0] = Pub_Key
s = open('.env').read()
s = s.replace("GATE_PUB_KEYS="+os.environ['GATE_PUB_KEYS'], "GATE_PUB_KEYS="+os.environ['GATE_PUB_KEYS']+','+boxinfo)
#s = s.replace("KIT_CHANNEL="+os.environ['KIT_CHANNEL'], "KIT_CHANNEL="+os.environ['KIT_CHANNEL']+','+self.get_box_channel(boxNum))
f = open('.env', 'w')
f.write(s)
f.close()
#look up activate to get mailbox channel
self.get_env()
print('added', name, str(msg))
except Exception as ex:
error('handle', str(ex))
subscribe(fn=handle_mailbox, channel='inbound')
while(self.mailbox_check()<boxNum):
get_message()
#time.sleep(.5)
unsubscribe()
def mailbox_exists(self, boxKey):
keys = os.environ['GATE_PUB_KEYS'].split(',')
for x in keys:
if boxKey == x:
return True
return False
def get_box_channel(self, boxNum):
ts = int(time.time())
sig = str(self.sign(str(ts)))
#headers = json.loads(json.dumps({'Content-Type':'application/json','Authorization':sig}))
headers = json.loads('{"Content-Type":"application/json","Authorization":"'+sig+'"}')
payload = json.loads('{"publicKey":"'+self.get_gatewayID()+'","timestamp":"'+str(ts)+'"}')
try:
response = requests.post(self.url+"/activate",json=payload,headers=headers)
json_response = response.json()
except:
return ''
#print(json_response['result']['deviceId'])
try:
data = json_response['result']['mailboxes'][boxNum-1]
print(data)
print('ok')
return data['channelId']
except:
return ''
def sensor_setup(self):
print('sensor_setup')
#subprocess.call("safebox/sensor.py", shell=True)
self.sensor.start()
def sensor_start(self):
print('sensor_start')
os.system('python safebox/sensor.py')
return 0
def adminMenu(self):
kitwin = Toplevel(self.root)
if self.active_check():
button = Button(kitwin, text="Start admin")
button.pack()
else:
kitwin.title('Kit')
message = Message(kitwin, text="Please Activate Gateway", width=200)
button = Button(kitwin, text="ok", command=kitwin.destroy)
message.pack(padx=5, pady=5)
button.pack()
def mailboxMenu(self):
if self.active_check() and self.mailbox_check()>0 and self.currentWin != 'MAILBOX':
self.currentWin = 'MAILBOX'
var = IntVar()
print('Mailbox Menu')
kitwin = Frame(self.root)
kitwin.pack()
i = 1
listbox = Listbox(kitwin)
listbox.pack()
while(i<=self.mailbox_check()):
boxText = "Mailbox "+str(i)+": "+self.activeList[i]
listbox.insert(END, boxText)
i+=1
newMailbox = Button(kitwin, text="Add Mailbox", command=lambda: [kitwin.destroy(), self.mailbox_setup(self.mailbox_check()+1)])
newMailbox.pack()
button = Button(kitwin, text="Done", command=lambda: var.set(1))
button.pack()
button.wait_variable(var)
self.currentWin = 'START'
kitwin.destroy()
else:
kitwin = Toplevel(self.root)
kitwin.title('Kit')
message = Message(kitwin, text="Please Activate Mailbox", width=200)
button = Button(kitwin, text="ok", command=kitwin.destroy)
message.pack(padx=5, pady=5)
button.pack()
if __name__ == '__main__':
root = Tk()
#gate.get_env()
gate = Gateway(root=root)
try:
gate.mainloop()
except:
root.destroy()
unsubscribe()
| import pyqrcode
from Tkinter import *
import tkMessageBox
import os
import sys
import requests
import json
import time
import signal
import multiprocessing as mp
import requests
import ed25519
from kit.controller import Controller
from kit.file import read_file
from kit.codec import decode, encode
from kit.crypto import decrypt, encrypt
import kit.env as env
from kit.logger import error, info
from kit.pubsub import get_message, subscribe, unsubscribe, publish
import subprocess
class Gateway(Frame):
def __init__(self, root=None):
Frame.__init__(self, root)
def on_closing():
if tkMessageBox.askokcancel("Quit", "Do you want to quit?\n(This will shutdown communication with any Mailboxes)"):
self.stop()
root.destroy()
root.protocol("WM_DELETE_WINDOW", on_closing)
self.url = "http://172.16.31.10:10000/v1"
self.root = root
self.controller = Controller()
self.pw = ''
self.get_env()
self.currentWin = 'START'
menu = Menu(self.root)
#menu.add_command(label="Kit", command = self.adminMenu)
menu.add_command(label="Mailbox", command = self.mailboxMenu)
#menu.add_command(label="Help")
self.root.config(menu=menu)
self.root.geometry("500x500")
self.activeList = []
keys = os.environ['GATE_PUB_KEYS'].split(',')
for x in keys:
self.activeList.append('REGISTERED')
#print(self.activeList)
self.pack()
self.startScreen()
def stop(self):
try:
print(self.kit.is_alive())
print(self.controller.running)
if(self.kit.is_alive() == True):
self.controller.stop()
self.kit.terminate()
self.kit.join()
except:
print('kit undefined')
def get_env(self):
print 'info:main: loading .env file'
env.load('.env')
def startScreen(self):
var = IntVar()
newact = False
while(not self.active_check()):
startwin = Frame(self.root)
startwin.pack()
message = Message(startwin, text="This gateway has not been activated. Please use Its Here App to activate.", width=500)
message.pack()
button = Button(startwin, text="Show Activation Code", command=lambda: var.set(1))
button.pack()
button.wait_variable(var)
self.activate()
startwin.destroy()
newact = True
pw = ''
if newact:
pw = self.create_pw()
else:
while(self.check_pw(pw) == False):
try:
pw = self.enter_pw()
except:
print('password invalid')
"""
self.controller.setup(pw)
self.controller.start()
self.pw = pw
"""
self.kit = mp.Process(target=lambda: self.kit_start(pw,self.controller))
self.kit_setup(pw)
self.pw = pw
#self.sensor = mp.Process(target=lambda: self.sensor_start())
#self.sensor_setup()
if(os.environ['KIT_REGISTERED'] != '1'):
self.send_register()
s = open('.env').read()
s = s.replace("KIT_REGISTERED=", "KIT_REGISTERED=1")
f = open('.env', 'w')
f.write(s)
f.close()
self.get_env()
#time.sleep(30)
while(self.mailbox_check() <= 0):
print('?')
self.mailbox_setup(1)
#self.scan_mailbox(1)
time.sleep(1)
#restart kit
self.mailboxMenu()
#self.adminMenu()
def active_check(self):
return os.environ['KIT_CHANNEL'] != '' and os.environ['KIT_DEVICE_ID'] != '' and os.environ['KIT_DEVICE_KEY'] != ''
def activate(self):
gID = self.get_gatewayID()
if(not self.active_check()):
self.display_qr(gID, 'Gateway')
self.handle_activate(gID)
def get_gatewayID(self):
if(os.environ['GATE_PUB_KEYS'] == ''):
self.gen_key()
return os.environ['GATE_PUB_KEYS'].split(',')[0]
def gen_key(self):
priv, verifying_key = ed25519.create_keypair()
print(verifying_key)
vkey_hex = verifying_key.to_ascii(encoding="hex")
s = open('.env').read()
s = s.replace("GATE_PUB_KEYS=", "GATE_PUB_KEYS="+vkey_hex)
f = open('.env', 'w')
f.write(s)
f.close()
self.get_env()
open(os.environ['GATE_SEED_PATH'],"wb").write(priv.to_bytes())
vkey = os.environ['GATE_PUB_KEYS'].split(',')[0]
print "the public key is", vkey
def display_qr(self, id, box):
var = IntVar()
actwin = Toplevel(self.root)
actwin.title('Activate' + box)
message = Message(actwin, text="Scan "+box+" code with mobile app", width=200)
code_bmp = BitmapImage(data=self.gen_qr(id))
code_bmp.config(background='white')
qr = Label(actwin, image=code_bmp)
button = Button(actwin, text="ok", command=lambda: var.set(1))
message.pack(padx=5, pady=5)
qr.pack()
button.pack()
button.wait_variable(var)
actwin.destroy()
def gen_qr(self, qr):
code = pyqrcode.create(qr)
code_xbm = code.xbm(scale=5)
return code_xbm
def handle_activate(self, gID):
if(self.display_wait(gID) and self.active_check()):
self.display_activated()
else:
self.display_failure()
return 0
def display_wait(self, gID):
waitwin = Toplevel(self.root)
waitwin.title('Activate')
message = Message(waitwin, text="Please wait...", width=200)
message.pack(padx=5, pady=5)
"""
i=0
while(i < 20):
self.root.update_idletasks()
self.root.update()
time.sleep(.25)
i+=1
"""
response = self.send_activate(gID)
waitwin.destroy()
return response
def send_activate(self,gID):
"""
add signiture and ts
"""
ts = int(time.time())
sig = str(self.sign(str(ts)))
#headers = json.loads(json.dumps({'Content-Type':'application/json','Authorization':sig}))
headers = json.loads('{"Content-Type":"application/json","Authorization":"'+sig+'"}')
payload = json.loads('{"publicKey":"'+self.get_gatewayID()+'","timestamp":"'+str(ts)+'"}')
try:
response = requests.post(self.url+"/activate",json=payload,headers=headers)
json_response = response.json()
except:
return False
#print(json_response['result']['deviceId'])
try:
data = json_response['result']['gateway']
print(data)
print('ok')
return self.append_env(data['deviceId'],data['deviceKey'],data['channelId'])
except:
return False
def sign(self, msg):
keydata = open(os.environ['GATE_SEED_PATH'],"rb").read()
signing_key = ed25519.SigningKey(keydata)
sig = signing_key.sign(msg, encoding="base64")
print "sig is:", sig
return sig
def append_env(self, devID, devKey, chanID):
s = open('.env').read()
s = s.replace("KIT_CHANNEL=", "KIT_CHANNEL="+chanID)
s = s.replace("KIT_DEVICE_ID=", "KIT_DEVICE_ID="+devID)
s = s.replace("KIT_DEVICE_KEY=", "KIT_DEVICE_KEY="+devKey)
f = open('.env', 'w')
f.write(s)
f.close()
"""
for line in fileinput.input('.env', inplace = 1):
if line == "KIT_CHANNEL":
print line.replace("KIT_CHANNEL=", "KIT_CHANNEL="+chanID)
if line == "KIT_DEVICE_ID":
print line.replace("KIT_DEVICE_ID=", "KIT_DEVICE_ID="+devID)
if line == "KIT_DEVICE_KEY":
print line.replace("KIT_DEVICE_KEY=", "KIT_DEVICE_KEY="+devKey)
"""
self.get_env()
return self.active_check()
def display_activated(self):
var = IntVar()
actwin = Toplevel(self.root)
actwin.title('Activate')
message = Message(actwin, text="Gateway is now activated", width=200)
button = Button(actwin, text="ok", command=lambda: var.set(1))
message.pack(padx=5, pady=5)
button.pack()
button.wait_variable(var)
actwin.destroy()
def display_failure(self):
var = IntVar()
actwin = Toplevel(self.root)
actwin.title('Activate')
message = Message(actwin, text="There was an issue during activation", width=200)
button = Button(actwin, text="ok", command=lambda: var.set(1))
message.pack(padx=5, pady=5)
button.pack()
button.wait_variable(var)
actwin.destroy()
def check_pw(self, _password):
try:
data = read_file(path=os.environ['KIT_SECRET_PATH'])
seed = decrypt(data, _password)
if seed[0] == 'S':
return True
else:
return False
except:
return False
raise Exception('Decryption of secret data failed, password incorrect!')
def create_pw(self):
var = IntVar()
pw = ''
conpw = '.'
while pw != conpw:
pwwin = Frame(self.root)
pwwin.pack()
if(pw != ''):
match = Message(pwwin, text="Entries did not match", width=500)
match.pack()
message = Message(pwwin, text="Please enter password:", width=500)
message.pack()
e = Entry(pwwin)
e.pack()
button = Button(pwwin, text="submit", command=lambda: var.set(1))
button.pack()
button.wait_variable(var)
pw = e.get()
pwwin.destroy()
pwwin = Frame(self.root)
pwwin.pack()
message = Message(pwwin, text="Please confirm password:", width=500)
message.pack()
e = Entry(pwwin)
e.pack()
button = Button(pwwin, text="submit", command=lambda: var.set(1))
button.pack()
button.wait_variable(var)
conpw = e.get()
pwwin.destroy()
return pw
def enter_pw(self):
var = IntVar()
pwwin = Frame(self.root)
pwwin.pack()
message = Message(pwwin, text="Please enter password:", width=500)
message.pack()
e = Entry(pwwin)
e.pack()
button = Button(pwwin, text="submit", command=lambda: var.set(1))
button.pack()
button.wait_variable(var)
pw = e.get()
pwwin.destroy()
return pw
#starts kit as a multiprocess and sends register request to api
def kit_setup(self, pw=None):
kitwin = Frame(self.root)
kitwin.pack()
Message(kitwin, text="Kit Starting. Please wait.").pack()
self.kit.start()
self.activeList[0] = 'ACTIVE'
time.sleep(5)
kitwin.destroy()
print 'kit end'
#self.send_register()
def kit_start(self, pw=None, controller=Controller()):
print('kit_setup')
#controller = self.controller
#controller = Controller()
# Handle SIGNIT and close the controller.
def signal_handler(sig, frame):
info('main', 'sigint')
controller.stop()
time.sleep(1)
print 'Goodbye.'
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
# Load the configuration dictionary.
print 'info:main: loading .env file'
env.load('.env')
# Setup our controller object and start it.
controller.setup(pw)
controller.start()
# Wait for SIGINT.
signal.pause()
return 0
def send_register(self):
"""
add signiture and ts
"""
ts = int(time.time())
sig = str(self.sign(str(ts)))
headers = json.loads('{"Content-Type":"application/json","Authorization":"'+sig+'"}')
payload = json.loads('{"publicKey":"'+self.get_gatewayID()+'","timestamp":"'+str(ts)+'"}')
try:
response = requests.post(self.url+"/connect",json=payload,headers=headers)
except:
return False
print(response)
return 0
def mailbox_check(self):
channel = os.environ['GATE_PUB_KEYS']
topics = channel.split(',')
return len(topics) - 1
def mailbox_setup(self, boxNum):
self.currentWin = 'MAILBOX_SETUP'
var = IntVar()
boxwin = Frame(self.root)
boxwin.pack()
message = Message(boxwin, text="Please use Its Here App to scan the Activation Code on the Mailbox. Only click continue once the code is scanned.", width=500)
message.pack()
button = Button(boxwin, text="Continue", command=lambda: var.set(1))
button.pack()
#wait for message on inbound
scan = mp.Process(target=lambda: self.scan_mailbox(boxNum))
scan.start()
print(var.get())
button.wait_variable(var)
scan.terminate()
scan.join()
unsubscribe()
boxwin.destroy()
if(self.mailbox_check() >= boxNum):
return
else:
boxChan = self.get_box_channel(boxNum)
if(boxChan != ''):
s = open('.env').read()
s = s.replace("KIT_CHANNEL="+os.environ['KIT_CHANNEL'], "KIT_CHANNEL="+os.environ['KIT_CHANNEL']+','+boxChan)
f = open('.env', 'w')
f.write(s)
f.close()
self.get_env()
self.kit.terminate()
self.kit.join()
self.kit = mp.Process(target=lambda: self.kit_start(self.pw))
self.kit.start()
#self.sensor.terminate()
#self.sensor.join()
self.sensor = mp.Process(target=lambda: self.sensor_start())
self.sensor.start()
self.activeList.append('REGISTERED')
#self.sensor_setup(boxNum)
else:
return
def scan_mailbox(self,boxNum):
print('scan')
def handle_mailbox(msg):
print('in here............')
try:
info('handle', str(msg))
name = msg.get_name()
print(name)
if name == 'ADD':
boxinfo = msg.get_str()
print(boxinfo)
if self.mailbox_exists(boxinfo):
print('mailbox_exists')
return
#boxinfo[0] = Pub_Key
s = open('.env').read()
s = s.replace("GATE_PUB_KEYS="+os.environ['GATE_PUB_KEYS'], "GATE_PUB_KEYS="+os.environ['GATE_PUB_KEYS']+','+boxinfo)
#s = s.replace("KIT_CHANNEL="+os.environ['KIT_CHANNEL'], "KIT_CHANNEL="+os.environ['KIT_CHANNEL']+','+self.get_box_channel(boxNum))
f = open('.env', 'w')
f.write(s)
f.close()
#look up activate to get mailbox channel
self.get_env()
print('added', name, str(msg))
except Exception as ex:
error('handle', str(ex))
subscribe(fn=handle_mailbox, channel='inbound')
while(self.mailbox_check()<boxNum):
get_message()
#time.sleep(.5)
unsubscribe()
def mailbox_exists(self, boxKey):
keys = os.environ['GATE_PUB_KEYS'].split(',')
for x in keys:
if boxKey == x:
return True
return False
def get_box_channel(self, boxNum):
ts = int(time.time())
sig = str(self.sign(str(ts)))
#headers = json.loads(json.dumps({'Content-Type':'application/json','Authorization':sig}))
headers = json.loads('{"Content-Type":"application/json","Authorization":"'+sig+'"}')
payload = json.loads('{"publicKey":"'+self.get_gatewayID()+'","timestamp":"'+str(ts)+'"}')
try:
response = requests.post(self.url+"/activate",json=payload,headers=headers)
json_response = response.json()
except:
return ''
#print(json_response['result']['deviceId'])
try:
data = json_response['result']['mailboxes'][boxNum-1]
print(data)
print('ok')
return data['channelId']
except:
return ''
def sensor_setup(self):
print('sensor_setup')
#subprocess.call("safebox/sensor.py", shell=True)
self.sensor.start()
def sensor_start(self):
print('sensor_start')
os.system('python safebox/sensor.py')
return 0
def adminMenu(self):
kitwin = Toplevel(self.root)
if self.active_check():
button = Button(kitwin, text="Start admin")
button.pack()
else:
kitwin.title('Kit')
message = Message(kitwin, text="Please Activate Gateway", width=200)
button = Button(kitwin, text="ok", command=kitwin.destroy)
message.pack(padx=5, pady=5)
button.pack()
def mailboxMenu(self):
if self.active_check() and self.mailbox_check()>0 and self.currentWin != 'MAILBOX':
self.currentWin = 'MAILBOX'
var = IntVar()
print('Mailbox Menu')
kitwin = Frame(self.root)
kitwin.pack()
i = 1
listbox = Listbox(kitwin)
listbox.pack()
while(i<=self.mailbox_check()):
boxText = "Mailbox "+str(i)+": "+self.activeList[i]
listbox.insert(END, boxText)
i+=1
newMailbox = Button(kitwin, text="Add Mailbox", command=lambda: [kitwin.destroy(), self.mailbox_setup(self.mailbox_check()+1)])
newMailbox.pack()
button = Button(kitwin, text="Done", command=lambda: var.set(1))
button.pack()
button.wait_variable(var)
self.currentWin = 'START'
kitwin.destroy()
else:
kitwin = Toplevel(self.root)
kitwin.title('Kit')
message = Message(kitwin, text="Please Activate Mailbox", width=200)
button = Button(kitwin, text="ok", command=kitwin.destroy)
message.pack(padx=5, pady=5)
button.pack()
if __name__ == '__main__':
root = Tk()
#gate.get_env()
gate = Gateway(root=root)
try:
gate.mainloop()
except:
root.destroy()
unsubscribe()
| en | 0.351808 | #menu.add_command(label="Kit", command = self.adminMenu) #menu.add_command(label="Help") #print(self.activeList) self.controller.setup(pw) self.controller.start() self.pw = pw #self.sensor = mp.Process(target=lambda: self.sensor_start()) #self.sensor_setup() #time.sleep(30) #self.scan_mailbox(1) #restart kit #self.adminMenu() i=0 while(i < 20): self.root.update_idletasks() self.root.update() time.sleep(.25) i+=1 add signiture and ts #headers = json.loads(json.dumps({'Content-Type':'application/json','Authorization':sig})) #print(json_response['result']['deviceId']) for line in fileinput.input('.env', inplace = 1): if line == "KIT_CHANNEL": print line.replace("KIT_CHANNEL=", "KIT_CHANNEL="+chanID) if line == "KIT_DEVICE_ID": print line.replace("KIT_DEVICE_ID=", "KIT_DEVICE_ID="+devID) if line == "KIT_DEVICE_KEY": print line.replace("KIT_DEVICE_KEY=", "KIT_DEVICE_KEY="+devKey) #starts kit as a multiprocess and sends register request to api #self.send_register() #controller = self.controller #controller = Controller() # Handle SIGNIT and close the controller. # Load the configuration dictionary. # Setup our controller object and start it. # Wait for SIGINT. add signiture and ts #wait for message on inbound #self.sensor.terminate() #self.sensor.join() #self.sensor_setup(boxNum) #boxinfo[0] = Pub_Key #s = s.replace("KIT_CHANNEL="+os.environ['KIT_CHANNEL'], "KIT_CHANNEL="+os.environ['KIT_CHANNEL']+','+self.get_box_channel(boxNum)) #look up activate to get mailbox channel #time.sleep(.5) #headers = json.loads(json.dumps({'Content-Type':'application/json','Authorization':sig})) #print(json_response['result']['deviceId']) #subprocess.call("safebox/sensor.py", shell=True) #gate.get_env() | 2.179888 | 2 |
mlprogram/entrypoint/modules/torchnlp.py | HiroakiMikami/mlprogram | 9 | 6621167 | <reponame>HiroakiMikami/mlprogram<gh_stars>1-10
from torchnlp.encoders import LabelEncoder
types = {
"torchnlp.encoders.LabelEncoder": LabelEncoder
}
| from torchnlp.encoders import LabelEncoder
types = {
"torchnlp.encoders.LabelEncoder": LabelEncoder
} | none | 1 | 1.4252 | 1 | |
generators/__init__.py | mathpresso/qmwp | 14 | 6621168 | from generators.arithmetic import generate_arithmetic
from generators.combination import generate_combination
from generators.comparison import generate_comparison
from generators.figure import generate_figure
from generators.number import generate_numbers
from generators.ordering import generate_ordering
__all__ = [
'generate_arithmetic',
'generate_combination',
'generate_comparison',
'generate_figure',
'generate_numbers',
'generate_ordering',
]
| from generators.arithmetic import generate_arithmetic
from generators.combination import generate_combination
from generators.comparison import generate_comparison
from generators.figure import generate_figure
from generators.number import generate_numbers
from generators.ordering import generate_ordering
__all__ = [
'generate_arithmetic',
'generate_combination',
'generate_comparison',
'generate_figure',
'generate_numbers',
'generate_ordering',
]
| none | 1 | 2.028611 | 2 | |
Model/DataObject/SendingData/SendingTarget.py | MaximeGLegault/UI-Debug | 2 | 6621169 | # Under MIT License, see LICENSE.txt
from Model.DataObject.BaseDataObject import catch_format_error
from Model.DataObject.SendingData.BaseDataSending import BaseDataSending
__author__ = 'RoboCupULaval'
class SendingTarget(BaseDataSending):
def __init__(self, data_in=None):
super().__init__(data_in)
self._format_data()
@catch_format_error
def _check_obligatory_data(self):
assert isinstance(self.data, dict), \
"data: {} n'est pas un dictionnaire.".format(type(self.data))
keys = self.data.keys()
assert 'target' in keys, \
"data['target'] n'existe pas."
assert self._point_is_valid(self.data['target']), \
"data['target']: {} n'est pas un point valide (int, int)".format(type(self.data['target']))
assert 'id' in keys, \
"data['id'] n'existe pas."
assert isinstance(self.data['id'], int), \
"data['id']: {} n'a pas le format attendu (int)".format(type(self.data['id']))
@catch_format_error
def _check_optional_data(self):
pass
@staticmethod
def get_default_data_dict():
""" Retourne une dictionnaire de données par défaut """
return dict(zip(['target', 'id'],
[(0, 0), 0]))
@staticmethod
def get_type():
return 5003
| # Under MIT License, see LICENSE.txt
from Model.DataObject.BaseDataObject import catch_format_error
from Model.DataObject.SendingData.BaseDataSending import BaseDataSending
__author__ = 'RoboCupULaval'
class SendingTarget(BaseDataSending):
def __init__(self, data_in=None):
super().__init__(data_in)
self._format_data()
@catch_format_error
def _check_obligatory_data(self):
assert isinstance(self.data, dict), \
"data: {} n'est pas un dictionnaire.".format(type(self.data))
keys = self.data.keys()
assert 'target' in keys, \
"data['target'] n'existe pas."
assert self._point_is_valid(self.data['target']), \
"data['target']: {} n'est pas un point valide (int, int)".format(type(self.data['target']))
assert 'id' in keys, \
"data['id'] n'existe pas."
assert isinstance(self.data['id'], int), \
"data['id']: {} n'a pas le format attendu (int)".format(type(self.data['id']))
@catch_format_error
def _check_optional_data(self):
pass
@staticmethod
def get_default_data_dict():
""" Retourne une dictionnaire de données par défaut """
return dict(zip(['target', 'id'],
[(0, 0), 0]))
@staticmethod
def get_type():
return 5003
| fr | 0.989451 | # Under MIT License, see LICENSE.txt Retourne une dictionnaire de données par défaut | 2.421896 | 2 |
asanakoy/unet.py | chritter/kaggle_carvana_segmentation | 447 | 6621170 | import torch
import torch.nn as nn
class Unet4(nn.Module):
def __init__(self, feature_scale=1, n_classes=1, is_deconv=True, in_channels=3,
is_batchnorm=True, filters=None):
super(Unet4, self).__init__()
self.is_deconv = is_deconv
self.in_channels = in_channels
self.is_batchnorm = is_batchnorm
self.feature_scale = feature_scale
if filters is None:
filters = [64, 128, 256, 512, 1024]
print 'Unet4 filter sizes:', filters
filters = [x / self.feature_scale for x in filters]
self.down1 = UnetDown(self.in_channels, filters[0], self.is_batchnorm)
self.down2 = UnetDown(filters[0], filters[1], self.is_batchnorm)
self.down3 = UnetDown(filters[1], filters[2], self.is_batchnorm)
self.down4 = UnetDown(filters[2], filters[3], self.is_batchnorm)
self.center = UnetConvBlock(filters[3], filters[4], self.is_batchnorm)
self.up4 = UnetUp(filters[4], filters[3], self.is_deconv,
is_batch_norm=self.is_batchnorm)
self.up3 = UnetUp(filters[3], filters[2], self.is_deconv,
is_batch_norm=self.is_batchnorm)
self.up2 = UnetUp(filters[2], filters[1], self.is_deconv,
is_batch_norm=self.is_batchnorm)
self.up1 = UnetUp(filters[1], filters[0], self.is_deconv,
is_batch_norm=self.is_batchnorm)
self.final = nn.Conv2d(filters[0], n_classes, kernel_size=1)
def forward(self, inputs):
res1, out = self.down1(inputs)
res2, out = self.down2(out)
res3, out = self.down3(out)
res4, out = self.down4(out)
out = self.center(out)
out = self.up4(res4, out)
out = self.up3(res3, out)
out = self.up2(res2, out)
out = self.up1(res1, out)
return self.final(out)
class Unet5(nn.Module):
def __init__(self, feature_scale=1, n_classes=1, is_deconv=True, in_channels=3,
is_batchnorm=True, filters=None):
super(Unet5, self).__init__()
self.is_deconv = is_deconv
self.in_channels = in_channels
self.is_batchnorm = is_batchnorm
self.feature_scale = feature_scale
if filters is None:
filters = [64, 128, 256, 512, 1024, 1024]
print 'Unet5 filter sizes:', filters
filters = [x / self.feature_scale for x in filters]
self.down1 = UnetDown(self.in_channels, filters[0], self.is_batchnorm)
self.down2 = UnetDown(filters[0], filters[1], self.is_batchnorm)
self.down3 = UnetDown(filters[1], filters[2], self.is_batchnorm)
self.down4 = UnetDown(filters[2], filters[3], self.is_batchnorm)
self.down5 = UnetDown(filters[3], filters[4], self.is_batchnorm)
self.center = UnetConvBlock(filters[4], filters[5], self.is_batchnorm)
self.up5 = UnetUp(filters[5], filters[4], self.is_deconv, is_batch_norm=self.is_batchnorm)
self.up4 = UnetUp(filters[4], filters[3], self.is_deconv, is_batch_norm=self.is_batchnorm)
self.up3 = UnetUp(filters[3], filters[2], self.is_deconv, is_batch_norm=self.is_batchnorm)
self.up2 = UnetUp(filters[2], filters[1], self.is_deconv, is_batch_norm=self.is_batchnorm)
self.up1 = UnetUp(filters[1], filters[0], self.is_deconv, is_batch_norm=self.is_batchnorm)
self.final = nn.Conv2d(filters[0], n_classes, kernel_size=1)
def forward(self, inputs):
res1, out = self.down1(inputs)
res2, out = self.down2(out)
res3, out = self.down3(out)
res4, out = self.down4(out)
res5, out = self.down5(out)
out = self.center(out)
out = self.up5(res5, out)
out = self.up4(res4, out)
out = self.up3(res3, out)
out = self.up2(res2, out)
out = self.up1(res1, out)
return self.final(out)
class Unet(nn.Module):
def __init__(self, feature_scale=1, n_classes=1, is_deconv=True, in_channels=3,
is_batchnorm=True, filters=None):
super(Unet, self).__init__()
self.is_deconv = is_deconv
self.in_channels = in_channels
self.is_batchnorm = is_batchnorm
self.feature_scale = feature_scale
if filters is None:
filters = [32, 64, 128, 256, 512, 1024, 1024]
print 'Unet filter sizes:', filters
filters = [x / self.feature_scale for x in filters]
self.down1 = UnetDown(self.in_channels, filters[0], self.is_batchnorm)
self.down2 = UnetDown(filters[0], filters[1], self.is_batchnorm)
self.down3 = UnetDown(filters[1], filters[2], self.is_batchnorm)
self.down4 = UnetDown(filters[2], filters[3], self.is_batchnorm)
self.down5 = UnetDown(filters[3], filters[4], self.is_batchnorm)
self.down6 = UnetDown(filters[4], filters[5], self.is_batchnorm)
self.center = UnetConvBlock(filters[5], filters[6], self.is_batchnorm)
self.up6 = UnetUp(filters[6], filters[5], self.is_deconv, is_batch_norm=self.is_batchnorm)
self.up5 = UnetUp(filters[5], filters[4], self.is_deconv, is_batch_norm=self.is_batchnorm)
self.up4 = UnetUp(filters[4], filters[3], self.is_deconv, is_batch_norm=self.is_batchnorm)
self.up3 = UnetUp(filters[3], filters[2], self.is_deconv, is_batch_norm=self.is_batchnorm)
self.up2 = UnetUp(filters[2], filters[1], self.is_deconv, is_batch_norm=self.is_batchnorm)
self.up1 = UnetUp(filters[1], filters[0], self.is_deconv, is_batch_norm=self.is_batchnorm)
self.final = nn.Conv2d(filters[0], n_classes, kernel_size=1)
def forward(self, inputs):
res1, out = self.down1(inputs)
res2, out = self.down2(out)
res3, out = self.down3(out)
res4, out = self.down4(out)
res5, out = self.down5(out)
res6, out = self.down6(out)
out = self.center(out)
out = self.up6(res6, out)
out = self.up5(res5, out)
out = self.up4(res4, out)
out = self.up3(res3, out)
out = self.up2(res2, out)
out = self.up1(res1, out)
return self.final(out)
UNarrow = Unet
class Unet7(nn.Module):
def __init__(self, feature_scale=1, n_classes=1, is_deconv=True, in_channels=3,
is_batchnorm=True, filters=None):
super(Unet7, self).__init__()
self.is_deconv = is_deconv
self.in_channels = in_channels
self.is_batchnorm = is_batchnorm
self.feature_scale = feature_scale
if filters is None:
filters = [32, 64, 128, 256, 512, 1024, 1024, 2048]
print 'Unet7 filter sizes:', filters
filters = [x / self.feature_scale for x in filters]
self.down1 = UnetDown(self.in_channels, filters[0], self.is_batchnorm)
self.down2 = UnetDown(filters[0], filters[1], self.is_batchnorm)
self.down3 = UnetDown(filters[1], filters[2], self.is_batchnorm)
self.down4 = UnetDown(filters[2], filters[3], self.is_batchnorm)
self.down5 = UnetDown(filters[3], filters[4], self.is_batchnorm)
self.down6 = UnetDown(filters[4], filters[5], self.is_batchnorm)
self.down7 = UnetDown(filters[5], filters[6], self.is_batchnorm)
self.center = UnetConvBlock(filters[6], filters[7], self.is_batchnorm)
self.up7 = UnetUp(filters[7], filters[6], self.is_deconv,
is_batch_norm=self.is_batchnorm)
self.up6 = UnetUp(filters[6], filters[5], self.is_deconv,
is_batch_norm=self.is_batchnorm)
self.up5 = UnetUp(filters[5], filters[4], self.is_deconv,
is_batch_norm=self.is_batchnorm)
self.up4 = UnetUp(filters[4], filters[3], self.is_deconv,
is_batch_norm=self.is_batchnorm)
self.up3 = UnetUp(filters[3], filters[2], self.is_deconv,
is_batch_norm=self.is_batchnorm)
self.up2 = UnetUp(filters[2], filters[1], self.is_deconv,
is_batch_norm=self.is_batchnorm)
self.up1 = UnetUp(filters[1], filters[0], self.is_deconv,
is_batch_norm=self.is_batchnorm)
self.final = nn.Conv2d(filters[0], n_classes, kernel_size=1)
def forward(self, inputs):
res1, out = self.down1(inputs)
res2, out = self.down2(out)
res3, out = self.down3(out)
res4, out = self.down4(out)
res5, out = self.down5(out)
res6, out = self.down6(out)
res7, out = self.down7(out)
out = self.center(out)
out = self.up7(res7, out)
out = self.up6(res6, out)
out = self.up5(res5, out)
out = self.up4(res4, out)
out = self.up3(res3, out)
out = self.up2(res2, out)
out = self.up1(res1, out)
return self.final(out)
class UnetConvBlock(nn.Module):
def __init__(self, in_size, out_size, is_batchnorm, num_layers=2):
super(UnetConvBlock, self).__init__()
self.convs = nn.ModuleList()
if is_batchnorm:
conv = nn.Sequential(nn.Conv2d(in_size, out_size, 3, 1, padding=1),
nn.BatchNorm2d(out_size),
nn.ReLU())
self.convs.append(conv)
for i in xrange(1, num_layers):
conv = nn.Sequential(nn.Conv2d(out_size, out_size, 3, 1, padding=1),
nn.BatchNorm2d(out_size),
nn.ReLU())
self.convs.append(conv)
else:
conv = nn.Sequential(nn.Conv2d(in_size, out_size, 3, 1, padding=1),
nn.ReLU())
self.convs.append(conv)
for i in xrange(1, num_layers):
conv = nn.Sequential(nn.Conv2d(out_size, out_size, 3, 1, padding=1),
nn.ReLU())
self.convs.append(conv)
def forward(self, inputs):
outputs = inputs
for conv in self.convs:
outputs = conv(outputs)
return outputs
class UnetDown(nn.Module):
def __init__(self, in_size, out_size, is_batchnorm):
super(UnetDown, self).__init__()
self.conv = UnetConvBlock(in_size, out_size, is_batchnorm, num_layers=2)
self.pool = nn.MaxPool2d(2, 2)
def forward(self, inputs):
residual = self.conv(inputs)
outputs = self.pool(residual)
return residual, outputs
class UnetUp(nn.Module):
def __init__(self, in_size, out_size, is_deconv=False, residual_size=None, is_batch_norm=False):
super(UnetUp, self).__init__()
if residual_size is None:
residual_size = out_size
if is_deconv:
# TODO: fixme. Some dimensions could be wrong
self.up = nn.ConvTranspose2d(in_size, in_size, kernel_size=2, stride=2)
self.conv = UnetConvBlock(in_size + residual_size, out_size, is_batchnorm=is_batch_norm, num_layers=2)
else:
self.up = nn.Upsample(scale_factor=2, mode='bilinear')
self.conv = UnetConvBlock(in_size + residual_size, out_size, is_batchnorm=is_batch_norm, num_layers=3)
# print 'UnetUp convBlock::{}->{}'.format(in_size + residual_size, out_size)
def forward(self, residual, previous):
upsampled = self.up(previous)
# print 'previous ({}) -> upsampled ({})'.format(previous.size()[1], upsampled.size()[1])
# print 'residual.size(), upsampled.size()', residual.size(), upsampled.size()
result = self.conv(torch.cat([residual, upsampled], 1))
# print 'Result size:', result.size()
return result
| import torch
import torch.nn as nn
class Unet4(nn.Module):
def __init__(self, feature_scale=1, n_classes=1, is_deconv=True, in_channels=3,
is_batchnorm=True, filters=None):
super(Unet4, self).__init__()
self.is_deconv = is_deconv
self.in_channels = in_channels
self.is_batchnorm = is_batchnorm
self.feature_scale = feature_scale
if filters is None:
filters = [64, 128, 256, 512, 1024]
print 'Unet4 filter sizes:', filters
filters = [x / self.feature_scale for x in filters]
self.down1 = UnetDown(self.in_channels, filters[0], self.is_batchnorm)
self.down2 = UnetDown(filters[0], filters[1], self.is_batchnorm)
self.down3 = UnetDown(filters[1], filters[2], self.is_batchnorm)
self.down4 = UnetDown(filters[2], filters[3], self.is_batchnorm)
self.center = UnetConvBlock(filters[3], filters[4], self.is_batchnorm)
self.up4 = UnetUp(filters[4], filters[3], self.is_deconv,
is_batch_norm=self.is_batchnorm)
self.up3 = UnetUp(filters[3], filters[2], self.is_deconv,
is_batch_norm=self.is_batchnorm)
self.up2 = UnetUp(filters[2], filters[1], self.is_deconv,
is_batch_norm=self.is_batchnorm)
self.up1 = UnetUp(filters[1], filters[0], self.is_deconv,
is_batch_norm=self.is_batchnorm)
self.final = nn.Conv2d(filters[0], n_classes, kernel_size=1)
def forward(self, inputs):
res1, out = self.down1(inputs)
res2, out = self.down2(out)
res3, out = self.down3(out)
res4, out = self.down4(out)
out = self.center(out)
out = self.up4(res4, out)
out = self.up3(res3, out)
out = self.up2(res2, out)
out = self.up1(res1, out)
return self.final(out)
class Unet5(nn.Module):
def __init__(self, feature_scale=1, n_classes=1, is_deconv=True, in_channels=3,
is_batchnorm=True, filters=None):
super(Unet5, self).__init__()
self.is_deconv = is_deconv
self.in_channels = in_channels
self.is_batchnorm = is_batchnorm
self.feature_scale = feature_scale
if filters is None:
filters = [64, 128, 256, 512, 1024, 1024]
print 'Unet5 filter sizes:', filters
filters = [x / self.feature_scale for x in filters]
self.down1 = UnetDown(self.in_channels, filters[0], self.is_batchnorm)
self.down2 = UnetDown(filters[0], filters[1], self.is_batchnorm)
self.down3 = UnetDown(filters[1], filters[2], self.is_batchnorm)
self.down4 = UnetDown(filters[2], filters[3], self.is_batchnorm)
self.down5 = UnetDown(filters[3], filters[4], self.is_batchnorm)
self.center = UnetConvBlock(filters[4], filters[5], self.is_batchnorm)
self.up5 = UnetUp(filters[5], filters[4], self.is_deconv, is_batch_norm=self.is_batchnorm)
self.up4 = UnetUp(filters[4], filters[3], self.is_deconv, is_batch_norm=self.is_batchnorm)
self.up3 = UnetUp(filters[3], filters[2], self.is_deconv, is_batch_norm=self.is_batchnorm)
self.up2 = UnetUp(filters[2], filters[1], self.is_deconv, is_batch_norm=self.is_batchnorm)
self.up1 = UnetUp(filters[1], filters[0], self.is_deconv, is_batch_norm=self.is_batchnorm)
self.final = nn.Conv2d(filters[0], n_classes, kernel_size=1)
def forward(self, inputs):
res1, out = self.down1(inputs)
res2, out = self.down2(out)
res3, out = self.down3(out)
res4, out = self.down4(out)
res5, out = self.down5(out)
out = self.center(out)
out = self.up5(res5, out)
out = self.up4(res4, out)
out = self.up3(res3, out)
out = self.up2(res2, out)
out = self.up1(res1, out)
return self.final(out)
class Unet(nn.Module):
def __init__(self, feature_scale=1, n_classes=1, is_deconv=True, in_channels=3,
is_batchnorm=True, filters=None):
super(Unet, self).__init__()
self.is_deconv = is_deconv
self.in_channels = in_channels
self.is_batchnorm = is_batchnorm
self.feature_scale = feature_scale
if filters is None:
filters = [32, 64, 128, 256, 512, 1024, 1024]
print 'Unet filter sizes:', filters
filters = [x / self.feature_scale for x in filters]
self.down1 = UnetDown(self.in_channels, filters[0], self.is_batchnorm)
self.down2 = UnetDown(filters[0], filters[1], self.is_batchnorm)
self.down3 = UnetDown(filters[1], filters[2], self.is_batchnorm)
self.down4 = UnetDown(filters[2], filters[3], self.is_batchnorm)
self.down5 = UnetDown(filters[3], filters[4], self.is_batchnorm)
self.down6 = UnetDown(filters[4], filters[5], self.is_batchnorm)
self.center = UnetConvBlock(filters[5], filters[6], self.is_batchnorm)
self.up6 = UnetUp(filters[6], filters[5], self.is_deconv, is_batch_norm=self.is_batchnorm)
self.up5 = UnetUp(filters[5], filters[4], self.is_deconv, is_batch_norm=self.is_batchnorm)
self.up4 = UnetUp(filters[4], filters[3], self.is_deconv, is_batch_norm=self.is_batchnorm)
self.up3 = UnetUp(filters[3], filters[2], self.is_deconv, is_batch_norm=self.is_batchnorm)
self.up2 = UnetUp(filters[2], filters[1], self.is_deconv, is_batch_norm=self.is_batchnorm)
self.up1 = UnetUp(filters[1], filters[0], self.is_deconv, is_batch_norm=self.is_batchnorm)
self.final = nn.Conv2d(filters[0], n_classes, kernel_size=1)
def forward(self, inputs):
res1, out = self.down1(inputs)
res2, out = self.down2(out)
res3, out = self.down3(out)
res4, out = self.down4(out)
res5, out = self.down5(out)
res6, out = self.down6(out)
out = self.center(out)
out = self.up6(res6, out)
out = self.up5(res5, out)
out = self.up4(res4, out)
out = self.up3(res3, out)
out = self.up2(res2, out)
out = self.up1(res1, out)
return self.final(out)
UNarrow = Unet
class Unet7(nn.Module):
def __init__(self, feature_scale=1, n_classes=1, is_deconv=True, in_channels=3,
is_batchnorm=True, filters=None):
super(Unet7, self).__init__()
self.is_deconv = is_deconv
self.in_channels = in_channels
self.is_batchnorm = is_batchnorm
self.feature_scale = feature_scale
if filters is None:
filters = [32, 64, 128, 256, 512, 1024, 1024, 2048]
print 'Unet7 filter sizes:', filters
filters = [x / self.feature_scale for x in filters]
self.down1 = UnetDown(self.in_channels, filters[0], self.is_batchnorm)
self.down2 = UnetDown(filters[0], filters[1], self.is_batchnorm)
self.down3 = UnetDown(filters[1], filters[2], self.is_batchnorm)
self.down4 = UnetDown(filters[2], filters[3], self.is_batchnorm)
self.down5 = UnetDown(filters[3], filters[4], self.is_batchnorm)
self.down6 = UnetDown(filters[4], filters[5], self.is_batchnorm)
self.down7 = UnetDown(filters[5], filters[6], self.is_batchnorm)
self.center = UnetConvBlock(filters[6], filters[7], self.is_batchnorm)
self.up7 = UnetUp(filters[7], filters[6], self.is_deconv,
is_batch_norm=self.is_batchnorm)
self.up6 = UnetUp(filters[6], filters[5], self.is_deconv,
is_batch_norm=self.is_batchnorm)
self.up5 = UnetUp(filters[5], filters[4], self.is_deconv,
is_batch_norm=self.is_batchnorm)
self.up4 = UnetUp(filters[4], filters[3], self.is_deconv,
is_batch_norm=self.is_batchnorm)
self.up3 = UnetUp(filters[3], filters[2], self.is_deconv,
is_batch_norm=self.is_batchnorm)
self.up2 = UnetUp(filters[2], filters[1], self.is_deconv,
is_batch_norm=self.is_batchnorm)
self.up1 = UnetUp(filters[1], filters[0], self.is_deconv,
is_batch_norm=self.is_batchnorm)
self.final = nn.Conv2d(filters[0], n_classes, kernel_size=1)
def forward(self, inputs):
res1, out = self.down1(inputs)
res2, out = self.down2(out)
res3, out = self.down3(out)
res4, out = self.down4(out)
res5, out = self.down5(out)
res6, out = self.down6(out)
res7, out = self.down7(out)
out = self.center(out)
out = self.up7(res7, out)
out = self.up6(res6, out)
out = self.up5(res5, out)
out = self.up4(res4, out)
out = self.up3(res3, out)
out = self.up2(res2, out)
out = self.up1(res1, out)
return self.final(out)
class UnetConvBlock(nn.Module):
def __init__(self, in_size, out_size, is_batchnorm, num_layers=2):
super(UnetConvBlock, self).__init__()
self.convs = nn.ModuleList()
if is_batchnorm:
conv = nn.Sequential(nn.Conv2d(in_size, out_size, 3, 1, padding=1),
nn.BatchNorm2d(out_size),
nn.ReLU())
self.convs.append(conv)
for i in xrange(1, num_layers):
conv = nn.Sequential(nn.Conv2d(out_size, out_size, 3, 1, padding=1),
nn.BatchNorm2d(out_size),
nn.ReLU())
self.convs.append(conv)
else:
conv = nn.Sequential(nn.Conv2d(in_size, out_size, 3, 1, padding=1),
nn.ReLU())
self.convs.append(conv)
for i in xrange(1, num_layers):
conv = nn.Sequential(nn.Conv2d(out_size, out_size, 3, 1, padding=1),
nn.ReLU())
self.convs.append(conv)
def forward(self, inputs):
outputs = inputs
for conv in self.convs:
outputs = conv(outputs)
return outputs
class UnetDown(nn.Module):
def __init__(self, in_size, out_size, is_batchnorm):
super(UnetDown, self).__init__()
self.conv = UnetConvBlock(in_size, out_size, is_batchnorm, num_layers=2)
self.pool = nn.MaxPool2d(2, 2)
def forward(self, inputs):
residual = self.conv(inputs)
outputs = self.pool(residual)
return residual, outputs
class UnetUp(nn.Module):
def __init__(self, in_size, out_size, is_deconv=False, residual_size=None, is_batch_norm=False):
super(UnetUp, self).__init__()
if residual_size is None:
residual_size = out_size
if is_deconv:
# TODO: fixme. Some dimensions could be wrong
self.up = nn.ConvTranspose2d(in_size, in_size, kernel_size=2, stride=2)
self.conv = UnetConvBlock(in_size + residual_size, out_size, is_batchnorm=is_batch_norm, num_layers=2)
else:
self.up = nn.Upsample(scale_factor=2, mode='bilinear')
self.conv = UnetConvBlock(in_size + residual_size, out_size, is_batchnorm=is_batch_norm, num_layers=3)
# print 'UnetUp convBlock::{}->{}'.format(in_size + residual_size, out_size)
def forward(self, residual, previous):
upsampled = self.up(previous)
# print 'previous ({}) -> upsampled ({})'.format(previous.size()[1], upsampled.size()[1])
# print 'residual.size(), upsampled.size()', residual.size(), upsampled.size()
result = self.conv(torch.cat([residual, upsampled], 1))
# print 'Result size:', result.size()
return result
| en | 0.359729 | # TODO: fixme. Some dimensions could be wrong # print 'UnetUp convBlock::{}->{}'.format(in_size + residual_size, out_size) # print 'previous ({}) -> upsampled ({})'.format(previous.size()[1], upsampled.size()[1]) # print 'residual.size(), upsampled.size()', residual.size(), upsampled.size() # print 'Result size:', result.size() | 2.225243 | 2 |
awslambda_lookup/exceptions.py | ITProKyle/runway-hook-awslambda | 1 | 6621171 | <gh_stars>1-10
"""High-level exceptions."""
from __future__ import annotations
from runway.cfngin.exceptions import CfnginError
class CfnginOnlyLookupError(CfnginError):
"""Attempted to use a CFNgin lookup outside of CFNgin."""
lookup_name: str
def __init__(self, lookup_name: str) -> None:
"""Instantiate class."""
self.lookup_name = lookup_name
self.message = (
f"attempted to use CFNgin only lookup {lookup_name} outside of CFNgin"
)
super().__init__()
| """High-level exceptions."""
from __future__ import annotations
from runway.cfngin.exceptions import CfnginError
class CfnginOnlyLookupError(CfnginError):
"""Attempted to use a CFNgin lookup outside of CFNgin."""
lookup_name: str
def __init__(self, lookup_name: str) -> None:
"""Instantiate class."""
self.lookup_name = lookup_name
self.message = (
f"attempted to use CFNgin only lookup {lookup_name} outside of CFNgin"
)
super().__init__() | en | 0.873792 | High-level exceptions. Attempted to use a CFNgin lookup outside of CFNgin. Instantiate class. | 2.347157 | 2 |
viz/models/cycles_parallel/render.py | mepearson/Dash | 3 | 6621172 | ##### RENDER.PY #####
## FOR LIVE
from viz.utils import *
##
#styling
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css',
'https://codepen.io/chriddyp/pen/brPBPO.css']
# Options lists for cycles. Move to callback when metadata available
options_list=['end_planting_day','fertilizer_rate','start_planting_day', 'weed_fraction', 'total_biomass', 'root_biomass',
'grain_yield', 'forage_yield', 'ag_residue', 'harvest_index', 'potential_tr', 'actual_tr', 'soil_evap', 'total_n', 'root_n',
'grain_n', 'forage_n', '"cum._n_stress"', 'n_in_harvest', 'n_in_residue', 'n_concn_forage']
selected_options=['fertilizer_rate','start_planting_day', 'weed_fraction','grain_yield']
# Layout
def generate_layout(thread_id):
layout = html.Div([
# Local Data Stores
dcc.Store(id='cyclespc-s-filtdata'),
dcc.Store(id='cyclespc-map-ids'),
dcc.Store(id='cyclespc-map-selected'),
# dcc.Store(id='cyclespc-s-settings'),
# dcc.Store(id='cyclespc-cyclespc-s-sqldata'),
#Page elements
html.Div([
html.H3(['Parallel Coordinates Graph']),
html.Label(['for MINT modeling thread: '],style={'float':'left'}),
dcc.Input(id='thread_id', value=thread_id,style={'float':'left'}),
],className='row'),
html.Div([
html.Div([
html.P('CROP'),
dcc.Dropdown(id='dd_crop'),
],className='three columns'),
html.Div([
html.P('PLANTING START DATE'),
dcc.Dropdown(id='dd_planting',multi=True),
],className='three columns'),
# ],className='row'),
# html.Div([
html.Div([
html.P('AXES:'),
dcc.Dropdown(id='dd_pcoptions',
options=[dict(label=x, value=x) for x in sorted(options_list)],
value=selected_options,
multi=True),
],className='six columns'),
],className='row'),
html.Div([
html.Div([
html.P('YEAR'),
html.Div(id='div_rs_year',children=[dcc.RangeSlider(id='rs_year')]),
],className="six columns"),
html.Div([
html.P('SCALE:'),
dcc.Dropdown(id='dd_pcscale',
options=[dict(label=x, value=x) for x in sorted(options_list)],
value=selected_options[0]
),
],className="three columns"),
html.Div([
html.Button('Build Parallel Coordinates', id='btn-pc',style={'margin':'30px'})
],className="three columns"),
],className="row"),
html.Div([
html.Div([
dcc.Loading(id='l-cycles-map',children=[
html.Div(id='cycles-map'),
],type="circle"),
],className="four columns"),
html.Div([
dcc.Loading(id='l-pc-graph',children=[
html.Div(id='cycles-pc')
],type="circle"),
],className="eight columns"),
],className="row"),
html.Div([
html.Div(id="cycles-datatable")
],className='row')
])
return layout
# FUNCTIONS
def load_spatial_data(thread_id):
if thread_id is not None:
if ' ' in thread_id:
return None
if thread_id.isalnum():
spatial_query = """SELECT DISTINCT threadid, x as lon, y as lat,
id from threads_inputs where threadid='{}' and spatial_type = 'Point';""".format(thread_id)
spatial_df = pd.DataFrame(pd.read_sql(spatial_query, con))
if spatial_df.empty:
return None
return spatial_df
return None
return None
# Callbacks
# Build Map
@app.callback([Output('cycles-map', 'children'),Output('cyclespc-map-ids','data')],
[Input('thread_id', 'value')],
[State('cyclespc-map-ids','data')])
def update_output(thread_id,mapdata):
if thread_id == '':
kids = ['Please enter a thread ID']
if ' ' in thread_id or thread_id.isalnum()==False:
kids = ['Please enter a properly formatted threadid.']
df=pd.DataFrame()
if thread_id.isalnum():
df = load_spatial_data(thread_id)
if df is None:
return ['This thread has no Spatial data']
fig = px.scatter_mapbox(df, lat="lat", lon="lon",
color_discrete_sequence=["fuchsia"], zoom=6, height=300)
fig.update_layout(mapbox_style="open-street-map")
fig.update_layout(margin={"r":0,"t":0,"l":0,"b":0})
kids = [html.P('Please select points from the map below using the plotly selection tools (box or lasso) located in the top right of the map.'),
dcc.Graph(id='locations_map', figure=fig)]
locationsdata = df.to_dict('records')
return kids, locationsdata
# Show result of selecting data with either box select or lasso
@app.callback(Output('cyclespc-map-selected','data'),
[Input('locations_map','selectedData')],
[State('cyclespc-map-selected','data')]
)
def selectData(selectData,sData):
kids = ''
if selectData is None:
return {}
dfPoints = pd.DataFrame(selectData['points'])
selectedData = dfPoints.to_dict('records')
return selectedData
#Set Dropdown Values
@app.callback(
[Output('dd_crop','options'),Output('dd_crop','value'),
Output('dd_planting','options'), Output('dd_planting','value')
,Output('div_rs_year','children')],
[
# Input('cyclespc-s-settings','data'),
Input(component_id='thread_id', component_property='value')
]
)
def set_dropdowns(thread_id):
if thread_id is None or thread_id == '':
raise PreventUpdate
tablename = 'cycles_0_9_4_alpha_runs'
query = """SELECT crop_name, fertilizer_rate, start_planting_day, weed_fraction, start_year,end_year
FROM {} where threadid = '{}';""".format(tablename,thread_id)
df = pd.DataFrame(pd.read_sql(query,con))
#dropdown options
crops = df.crop_name.unique()
crop_options = [dict(label=x, value=x) for x in sorted(crops)]
planting_starts = df.start_planting_day.unique()
planting_options =[dict(label=x, value=x) for x in sorted(planting_starts)]
#year range slider options
start_year = df.start_year.min()
end_year = df.end_year.max()
year_options = [dict(label=x, value=x) for x in range(start_year, end_year)]
testdiv = 'years: {} - {}'.format(start_year, end_year)
yearslider =dcc.RangeSlider(
id='rs_year',
min=start_year,
max=end_year,
marks={i: '{}'.format(i) for i in range(start_year,end_year+1)},
step=None,
value=[end_year,(end_year+1)],
allowCross=False
),
return [crop_options,crops[0],
planting_options,planting_starts,
yearslider]
@app.callback(
Output('cycles-pc','children'),
[Input('btn-pc', 'n_clicks'),Input('cyclespc-map-selected','data')]
,[State('dd_crop','value'),State('dd_planting','value'), State('rs_year','value')
,State('rs_year','min'),State('rs_year','max')
,State('dd_pcoptions','value'),State('dd_pcscale','value'),State('thread_id', 'value')
,State('cyclespc-map-ids','data')
]
)
def update_figure(n_clicks,selectedPoints,crop,planting,year,yearmin,yearmax,selectlist,scale,thread_id,mapData):
if n_clicks is None:
raise PreventUpdate
# Get Data filtered by top selections
for item in (crop,planting,year):
if item is None or item == '':
# raise PreventUpdate
return "Please ensure all variables are selected"
ins = 'cycles_0_9_4_alpha_runs'
outs = 'cycles_0_9_4_alpha_cycles_season'
thread = "'" + thread_id + "'"
# build select lists, correcting for database characterization of columns as text
select_cols = 'crop'
selectlist.append(scale)
selectlist = list(sorted(set(selectlist)))
if isinstance(selectlist, list):
scols = "::numeric,".join(list(selectlist))
if len(selectlist) > 0:
select_cols = select_cols + ', ' + scols + '::numeric'
# build lists for ints
planting_list = ",".join(str(x) for x in list(planting))
locations_filter=''
if selectedPoints is not None:
md = pd.DataFrame(mapData)
dfPoints = pd.DataFrame(selectedPoints)
dfMap = pd.merge(md,dfPoints, left_on=['lat','lon'], right_on=['lat','lon'])
cycles_weather_list = "','".join(dfMap.id.unique())
cycles_weather_list = "'" + cycles_weather_list + "'"
locations_filter = 'AND cycles_weather IN ({})'.format(cycles_weather_list)
query="""SELECT {}
FROM
(
select * from {}
where threadid = '{}'
AND crop_name LIKE '{}'
AND start_planting_day IN ({})
{}
) ins
INNER JOIN
(Select * from
(SELECT *, EXTRACT(year FROM TO_DATE(date, 'YYYY-MM-DD')) as year
FROM {}) o
WHERE year >= {} and year <= {}
) outs
ON ins.mint_runid = outs.mint_runid""".format(select_cols,ins,thread_id,crop,planting_list,locations_filter,outs,year[0],year[1])
# get data filtered to settings
figdata = pd.DataFrame(pd.read_sql(query,con))
fig = px.parallel_coordinates(figdata, color=scale,
# color_continuous_midpoint = figdata.loc[:,scale].median(),
# color_continuous_scale=px.colors.diverging.Tealrose
)
pc = dcc.Graph(id='graphid',figure=fig)
return pc
##### END RENDER.PY #####
| ##### RENDER.PY #####
## FOR LIVE
from viz.utils import *
##
#styling
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css',
'https://codepen.io/chriddyp/pen/brPBPO.css']
# Options lists for cycles. Move to callback when metadata available
options_list=['end_planting_day','fertilizer_rate','start_planting_day', 'weed_fraction', 'total_biomass', 'root_biomass',
'grain_yield', 'forage_yield', 'ag_residue', 'harvest_index', 'potential_tr', 'actual_tr', 'soil_evap', 'total_n', 'root_n',
'grain_n', 'forage_n', '"cum._n_stress"', 'n_in_harvest', 'n_in_residue', 'n_concn_forage']
selected_options=['fertilizer_rate','start_planting_day', 'weed_fraction','grain_yield']
# Layout
def generate_layout(thread_id):
layout = html.Div([
# Local Data Stores
dcc.Store(id='cyclespc-s-filtdata'),
dcc.Store(id='cyclespc-map-ids'),
dcc.Store(id='cyclespc-map-selected'),
# dcc.Store(id='cyclespc-s-settings'),
# dcc.Store(id='cyclespc-cyclespc-s-sqldata'),
#Page elements
html.Div([
html.H3(['Parallel Coordinates Graph']),
html.Label(['for MINT modeling thread: '],style={'float':'left'}),
dcc.Input(id='thread_id', value=thread_id,style={'float':'left'}),
],className='row'),
html.Div([
html.Div([
html.P('CROP'),
dcc.Dropdown(id='dd_crop'),
],className='three columns'),
html.Div([
html.P('PLANTING START DATE'),
dcc.Dropdown(id='dd_planting',multi=True),
],className='three columns'),
# ],className='row'),
# html.Div([
html.Div([
html.P('AXES:'),
dcc.Dropdown(id='dd_pcoptions',
options=[dict(label=x, value=x) for x in sorted(options_list)],
value=selected_options,
multi=True),
],className='six columns'),
],className='row'),
html.Div([
html.Div([
html.P('YEAR'),
html.Div(id='div_rs_year',children=[dcc.RangeSlider(id='rs_year')]),
],className="six columns"),
html.Div([
html.P('SCALE:'),
dcc.Dropdown(id='dd_pcscale',
options=[dict(label=x, value=x) for x in sorted(options_list)],
value=selected_options[0]
),
],className="three columns"),
html.Div([
html.Button('Build Parallel Coordinates', id='btn-pc',style={'margin':'30px'})
],className="three columns"),
],className="row"),
html.Div([
html.Div([
dcc.Loading(id='l-cycles-map',children=[
html.Div(id='cycles-map'),
],type="circle"),
],className="four columns"),
html.Div([
dcc.Loading(id='l-pc-graph',children=[
html.Div(id='cycles-pc')
],type="circle"),
],className="eight columns"),
],className="row"),
html.Div([
html.Div(id="cycles-datatable")
],className='row')
])
return layout
# FUNCTIONS
def load_spatial_data(thread_id):
if thread_id is not None:
if ' ' in thread_id:
return None
if thread_id.isalnum():
spatial_query = """SELECT DISTINCT threadid, x as lon, y as lat,
id from threads_inputs where threadid='{}' and spatial_type = 'Point';""".format(thread_id)
spatial_df = pd.DataFrame(pd.read_sql(spatial_query, con))
if spatial_df.empty:
return None
return spatial_df
return None
return None
# Callbacks
# Build Map
@app.callback([Output('cycles-map', 'children'),Output('cyclespc-map-ids','data')],
[Input('thread_id', 'value')],
[State('cyclespc-map-ids','data')])
def update_output(thread_id,mapdata):
if thread_id == '':
kids = ['Please enter a thread ID']
if ' ' in thread_id or thread_id.isalnum()==False:
kids = ['Please enter a properly formatted threadid.']
df=pd.DataFrame()
if thread_id.isalnum():
df = load_spatial_data(thread_id)
if df is None:
return ['This thread has no Spatial data']
fig = px.scatter_mapbox(df, lat="lat", lon="lon",
color_discrete_sequence=["fuchsia"], zoom=6, height=300)
fig.update_layout(mapbox_style="open-street-map")
fig.update_layout(margin={"r":0,"t":0,"l":0,"b":0})
kids = [html.P('Please select points from the map below using the plotly selection tools (box or lasso) located in the top right of the map.'),
dcc.Graph(id='locations_map', figure=fig)]
locationsdata = df.to_dict('records')
return kids, locationsdata
# Show result of selecting data with either box select or lasso
@app.callback(Output('cyclespc-map-selected','data'),
[Input('locations_map','selectedData')],
[State('cyclespc-map-selected','data')]
)
def selectData(selectData,sData):
kids = ''
if selectData is None:
return {}
dfPoints = pd.DataFrame(selectData['points'])
selectedData = dfPoints.to_dict('records')
return selectedData
#Set Dropdown Values
@app.callback(
[Output('dd_crop','options'),Output('dd_crop','value'),
Output('dd_planting','options'), Output('dd_planting','value')
,Output('div_rs_year','children')],
[
# Input('cyclespc-s-settings','data'),
Input(component_id='thread_id', component_property='value')
]
)
def set_dropdowns(thread_id):
if thread_id is None or thread_id == '':
raise PreventUpdate
tablename = 'cycles_0_9_4_alpha_runs'
query = """SELECT crop_name, fertilizer_rate, start_planting_day, weed_fraction, start_year,end_year
FROM {} where threadid = '{}';""".format(tablename,thread_id)
df = pd.DataFrame(pd.read_sql(query,con))
#dropdown options
crops = df.crop_name.unique()
crop_options = [dict(label=x, value=x) for x in sorted(crops)]
planting_starts = df.start_planting_day.unique()
planting_options =[dict(label=x, value=x) for x in sorted(planting_starts)]
#year range slider options
start_year = df.start_year.min()
end_year = df.end_year.max()
year_options = [dict(label=x, value=x) for x in range(start_year, end_year)]
testdiv = 'years: {} - {}'.format(start_year, end_year)
yearslider =dcc.RangeSlider(
id='rs_year',
min=start_year,
max=end_year,
marks={i: '{}'.format(i) for i in range(start_year,end_year+1)},
step=None,
value=[end_year,(end_year+1)],
allowCross=False
),
return [crop_options,crops[0],
planting_options,planting_starts,
yearslider]
@app.callback(
Output('cycles-pc','children'),
[Input('btn-pc', 'n_clicks'),Input('cyclespc-map-selected','data')]
,[State('dd_crop','value'),State('dd_planting','value'), State('rs_year','value')
,State('rs_year','min'),State('rs_year','max')
,State('dd_pcoptions','value'),State('dd_pcscale','value'),State('thread_id', 'value')
,State('cyclespc-map-ids','data')
]
)
def update_figure(n_clicks,selectedPoints,crop,planting,year,yearmin,yearmax,selectlist,scale,thread_id,mapData):
if n_clicks is None:
raise PreventUpdate
# Get Data filtered by top selections
for item in (crop,planting,year):
if item is None or item == '':
# raise PreventUpdate
return "Please ensure all variables are selected"
ins = 'cycles_0_9_4_alpha_runs'
outs = 'cycles_0_9_4_alpha_cycles_season'
thread = "'" + thread_id + "'"
# build select lists, correcting for database characterization of columns as text
select_cols = 'crop'
selectlist.append(scale)
selectlist = list(sorted(set(selectlist)))
if isinstance(selectlist, list):
scols = "::numeric,".join(list(selectlist))
if len(selectlist) > 0:
select_cols = select_cols + ', ' + scols + '::numeric'
# build lists for ints
planting_list = ",".join(str(x) for x in list(planting))
locations_filter=''
if selectedPoints is not None:
md = pd.DataFrame(mapData)
dfPoints = pd.DataFrame(selectedPoints)
dfMap = pd.merge(md,dfPoints, left_on=['lat','lon'], right_on=['lat','lon'])
cycles_weather_list = "','".join(dfMap.id.unique())
cycles_weather_list = "'" + cycles_weather_list + "'"
locations_filter = 'AND cycles_weather IN ({})'.format(cycles_weather_list)
query="""SELECT {}
FROM
(
select * from {}
where threadid = '{}'
AND crop_name LIKE '{}'
AND start_planting_day IN ({})
{}
) ins
INNER JOIN
(Select * from
(SELECT *, EXTRACT(year FROM TO_DATE(date, 'YYYY-MM-DD')) as year
FROM {}) o
WHERE year >= {} and year <= {}
) outs
ON ins.mint_runid = outs.mint_runid""".format(select_cols,ins,thread_id,crop,planting_list,locations_filter,outs,year[0],year[1])
# get data filtered to settings
figdata = pd.DataFrame(pd.read_sql(query,con))
fig = px.parallel_coordinates(figdata, color=scale,
# color_continuous_midpoint = figdata.loc[:,scale].median(),
# color_continuous_scale=px.colors.diverging.Tealrose
)
pc = dcc.Graph(id='graphid',figure=fig)
return pc
##### END RENDER.PY #####
| en | 0.5754 | ##### RENDER.PY ##### ## FOR LIVE ## #styling # Options lists for cycles. Move to callback when metadata available # Layout # Local Data Stores # dcc.Store(id='cyclespc-s-settings'), # dcc.Store(id='cyclespc-cyclespc-s-sqldata'), #Page elements # ],className='row'), # html.Div([ # FUNCTIONS SELECT DISTINCT threadid, x as lon, y as lat,
id from threads_inputs where threadid='{}' and spatial_type = 'Point'; # Callbacks # Build Map # Show result of selecting data with either box select or lasso #Set Dropdown Values # Input('cyclespc-s-settings','data'), SELECT crop_name, fertilizer_rate, start_planting_day, weed_fraction, start_year,end_year
FROM {} where threadid = '{}'; #dropdown options #year range slider options # Get Data filtered by top selections # raise PreventUpdate # build select lists, correcting for database characterization of columns as text # build lists for ints SELECT {}
FROM
(
select * from {}
where threadid = '{}'
AND crop_name LIKE '{}'
AND start_planting_day IN ({})
{}
) ins
INNER JOIN
(Select * from
(SELECT *, EXTRACT(year FROM TO_DATE(date, 'YYYY-MM-DD')) as year
FROM {}) o
WHERE year >= {} and year <= {}
) outs
ON ins.mint_runid = outs.mint_runid # get data filtered to settings # color_continuous_midpoint = figdata.loc[:,scale].median(), # color_continuous_scale=px.colors.diverging.Tealrose ##### END RENDER.PY ##### | 1.846877 | 2 |
tests/views.py | jockerz/Starlette-Login | 0 | 6621173 | from urllib.parse import parse_qsl
from starlette.requests import Request
from starlette.responses import (
HTMLResponse, RedirectResponse, PlainTextResponse, JSONResponse
)
from starlette_login.decorator import login_required, fresh_login_required
from starlette_login.utils import login_user, logout_user
from .decorators import admin_only
from .extension import login_manager
from .model import user_list
HOME_PAGE = "You are logged in as {{ user.username }}"
LOGIN_PAGE = """
<h4>{error}<h4>
<form method="POST">
<label>username <input name="username"></label>
<label>Password <input name="password" type="password"></label>
<button type="submit">Login</button>
</form>
"""
async def login_page(request: Request):
error = ''
if request.method == 'POST':
body = (await request.body()).decode()
data = dict(parse_qsl(body))
user = user_list.get_by_username(data['username'])
if not user:
error = 'Invalid username'
elif user.check_password(data['password']) is False:
error = 'Invalid password'
else:
await login_user(request, user, bool(data.get('remember')))
return RedirectResponse('/', 302)
return HTMLResponse(LOGIN_PAGE.format(error=error))
async def logout_page(request: Request):
if request.user.is_authenticated:
content = 'Logged out'
await logout_user(request)
else:
content = 'You not logged in'
return PlainTextResponse(content)
async def home_page(request: Request):
if request.user.is_authenticated:
content = f'You are logged in as {request.user.username}'
else:
content = 'You are not logged in'
return PlainTextResponse(content=content)
@login_required
async def protected_page(request: Request):
if getattr(request, 'user') is not None:
username = request.user.username
else:
username = None
return PlainTextResponse(f'You are logged in as {username}')
@login_required
def sync_protected_page(request: Request):
return PlainTextResponse(
f'You are logged in as {request.user.username}'
)
@login_required
def get_request_data(request: Request):
return JSONResponse({
'user': request.user.__dict__,
'session': request.session,
'cookie': request.cookies
})
@fresh_login_required
def sync_fresh_login(request: Request):
result = {'cookie': request.cookies, 'session': request.session}
return JSONResponse(result)
@fresh_login_required
async def async_fresh_login(request: Request):
result = {'cookie': request.cookies, 'session': request.session}
return JSONResponse(result)
@login_required
@admin_only
async def admin_only_page(request: Request):
return PlainTextResponse('You are an admin')
def un_fresh_login(request: Request):
session_fresh = login_manager.config.SESSION_NAME_FRESH
request.session[session_fresh] = False
result = {'cookie': request.cookies, 'session': request.session}
return JSONResponse(result)
def clear_session(request: Request):
for key in login_manager.config.session_keys:
if key == login_manager.config.REMEMBER_COOKIE_NAME:
continue
try:
request.session.pop(key)
except KeyError:
pass
result = {'cookie': request.cookies, 'session': request.session}
return JSONResponse(result)
async def excluded(request: Request):
try:
user = request.user
except AssertionError:
# Ignore starlette(`AuthenticationMiddleware`) exception
user = None
return JSONResponse({
'user': getattr(user, 'username', None)
})
| from urllib.parse import parse_qsl
from starlette.requests import Request
from starlette.responses import (
HTMLResponse, RedirectResponse, PlainTextResponse, JSONResponse
)
from starlette_login.decorator import login_required, fresh_login_required
from starlette_login.utils import login_user, logout_user
from .decorators import admin_only
from .extension import login_manager
from .model import user_list
HOME_PAGE = "You are logged in as {{ user.username }}"
LOGIN_PAGE = """
<h4>{error}<h4>
<form method="POST">
<label>username <input name="username"></label>
<label>Password <input name="password" type="password"></label>
<button type="submit">Login</button>
</form>
"""
async def login_page(request: Request):
error = ''
if request.method == 'POST':
body = (await request.body()).decode()
data = dict(parse_qsl(body))
user = user_list.get_by_username(data['username'])
if not user:
error = 'Invalid username'
elif user.check_password(data['password']) is False:
error = 'Invalid password'
else:
await login_user(request, user, bool(data.get('remember')))
return RedirectResponse('/', 302)
return HTMLResponse(LOGIN_PAGE.format(error=error))
async def logout_page(request: Request):
if request.user.is_authenticated:
content = 'Logged out'
await logout_user(request)
else:
content = 'You not logged in'
return PlainTextResponse(content)
async def home_page(request: Request):
if request.user.is_authenticated:
content = f'You are logged in as {request.user.username}'
else:
content = 'You are not logged in'
return PlainTextResponse(content=content)
@login_required
async def protected_page(request: Request):
if getattr(request, 'user') is not None:
username = request.user.username
else:
username = None
return PlainTextResponse(f'You are logged in as {username}')
@login_required
def sync_protected_page(request: Request):
return PlainTextResponse(
f'You are logged in as {request.user.username}'
)
@login_required
def get_request_data(request: Request):
return JSONResponse({
'user': request.user.__dict__,
'session': request.session,
'cookie': request.cookies
})
@fresh_login_required
def sync_fresh_login(request: Request):
result = {'cookie': request.cookies, 'session': request.session}
return JSONResponse(result)
@fresh_login_required
async def async_fresh_login(request: Request):
result = {'cookie': request.cookies, 'session': request.session}
return JSONResponse(result)
@login_required
@admin_only
async def admin_only_page(request: Request):
return PlainTextResponse('You are an admin')
def un_fresh_login(request: Request):
session_fresh = login_manager.config.SESSION_NAME_FRESH
request.session[session_fresh] = False
result = {'cookie': request.cookies, 'session': request.session}
return JSONResponse(result)
def clear_session(request: Request):
for key in login_manager.config.session_keys:
if key == login_manager.config.REMEMBER_COOKIE_NAME:
continue
try:
request.session.pop(key)
except KeyError:
pass
result = {'cookie': request.cookies, 'session': request.session}
return JSONResponse(result)
async def excluded(request: Request):
try:
user = request.user
except AssertionError:
# Ignore starlette(`AuthenticationMiddleware`) exception
user = None
return JSONResponse({
'user': getattr(user, 'username', None)
})
| en | 0.239969 | <h4>{error}<h4> <form method="POST"> <label>username <input name="username"></label> <label>Password <input name="password" type="password"></label> <button type="submit">Login</button> </form> # Ignore starlette(`AuthenticationMiddleware`) exception | 2.546739 | 3 |
BBNNet.py | yudasong/Reinforcement-Learning-Branch-and-Bound | 14 | 6621174 | import os
import shutil
import time
import random
import numpy as np
import math
import sys
sys.path.append('../../')
from utils import *
from pytorch_classification.utils import Bar, AverageMeter
from NeuralNet import NeuralNet
import tensorflow as tf
args = dotdict({
'architecture': 'CNN',
'lr': 0.001,
'dropout': 0.25,
'survival_decay': 0.0,
'epochs': 15,
'batch_size': 64,
'num_channels': 64,
})
class NNetWrapper(NeuralNet):
def __init__(self,game):
if args.architecture == 'CNN':
self.nnet = CNN(game, args)
elif args.architecture[0] == 'Sdepth_ResNet':
self.nnet = Sdepth_ResNet(game, args)
self.board_x, self.board_y = game.getBoardSize()
self.action_size = game.getActionSize()
self.sess = tf.Session(graph=self.nnet.graph)
self.saver = None
with tf.Session() as temp_sess:
temp_sess.run(tf.global_variables_initializer())
self.sess.run(tf.variables_initializer(self.nnet.graph.get_collection('variables')))
def train(self, examples):
"""
examples: list of examples, each example is of form (board, pi, v)
"""
for epoch in range(args.epochs):
print('EPOCH ::: ' + str(epoch+1))
data_time = AverageMeter()
batch_time = AverageMeter()
pi_losses = AverageMeter()
v_losses = AverageMeter()
end = time.time()
bar = Bar('Training Net', max=int(len(examples)/args.batch_size))
batch_idx = 0
# self.sess.run(tf.local_variables_initializer())
while batch_idx < int(len(examples)/args.batch_size):
sample_ids = np.random.randint(len(examples), size=args.batch_size)
boards, pis, vs = list(zip(*[examples[i] for i in sample_ids]))
# predict and compute gradient and do SGD step
input_dict = {self.nnet.input_boards: boards, self.nnet.target_pis: pis, self.nnet.target_vs: vs, self.nnet.dropout: args.dropout, self.nnet.isTraining: True}
# measure data loading time
data_time.update(time.time() - end)
# record loss
self.sess.run(self.nnet.train_step, feed_dict=input_dict)
pi_loss, v_loss = self.sess.run([self.nnet.loss_pi, self.nnet.loss_v], feed_dict=input_dict)
pi_losses.update(pi_loss, len(boards))
v_losses.update(v_loss, len(boards))
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
batch_idx += 1
# plot progress
bar.suffix = '({batch}/{size}) Data: {data:.3f}s | Batch: {bt:.3f}s | Total: {total:} | ETA: {eta:} | Loss_pi: {lpi:.4f} | Loss_v: {lv:.3f}'.format(
batch=batch_idx,
size=int(len(examples)/args.batch_size),
data=data_time.avg,
bt=batch_time.avg,
total=bar.elapsed_td,
eta=bar.eta_td,
lpi=pi_losses.avg,
lv=v_losses.avg,
)
bar.next()
bar.finish()
def predict(self, board):
"""
board: np array with board
"""
# timing
# start = time.time()
"""# preparing input
board = np.stack([current_state,constrain_matrix],axis=0)"""
# run
prob, v = self.sess.run([self.nnet.prob, self.nnet.v], feed_dict={self.nnet.input_boards: board, self.nnet.dropout: 0, self.nnet.isTraining: False})
#print('PREDICTION TIME TAKEN : {0:03f}'.format(time.time()-start))
return prob[0], v[0]
def save_checkpoint(self, folder='./checkpoints/', filename='ckpt_0'):
if isinstance(filename, tuple):
filename = filename[0] + str(filename[1])
filename = filename + 'pth.tar'
filepath = os.path.join(folder, filename)
if not os.path.exists(folder):
print("Make new directory {}".format(folder))
os.mkdir(folder)
else:
print("Checkpoint Directory exists! ")
if self.saver == None:
self.saver = tf.train.Saver(self.nnet.graph.get_collection('variables'))
with self.nnet.graph.as_default():
self.saver.save(self.sess, filepath)
def load_checkpoint(self, folder='./checkpoints/', filename='ckpt_0'):
if isinstance(filename, tuple):
filename = filename[0] + str(filename[1])
filename = filename+'pth.tar'
filepath = os.path.join(folder, filename)
if not os.path.exists(filepath+'.meta'):
raise("No saved model found in {}".format(filepath))
print("load model " + filename)
with self.nnet.graph.as_default():
self.saver = tf.train.Saver()
self.saver.restore(self.sess, filepath)
class CNN():
def __init__(self, game, args):
# game params
self.board_x, self.board_y = game.getBoardSize()
print(self.board_x, self.board_y)
self.action_size = game.getActionSize()
self.args = args
# Renaming functions
Relu = tf.nn.relu
Tanh = tf.nn.tanh
BatchNormalization = tf.layers.batch_normalization
Dropout = tf.layers.dropout
Dense = tf.layers.dense
Softmax = tf.nn.softmax
# Neural Net
self.graph = tf.Graph()
with self.graph.as_default():
self.input_boards = tf.placeholder(tf.float32, shape=[self.board_x, self.board_y]) # s: batch_size x board_x x board_y
self.dropout = tf.placeholder(tf.float32)
self.isTraining = tf.placeholder(tf.bool, name="is_training")
x_image = tf.reshape(self.input_boards, [-1, self.board_x, self.board_y, 1]) # batch_size x board_x x board_y x 1
conv1 = Relu(BatchNormalization(self.conv_3(x_image, args.num_channels), axis=3, training=self.isTraining))
conv2 = Relu(BatchNormalization(self.conv_3(conv1, args.num_channels*2, padding='VALID'), axis=3, training=self.isTraining))
conv3 = Relu(BatchNormalization(self.conv_3(conv2, args.num_channels*4, padding='VALID'), axis=3, training=self.isTraining))
conv4 = Relu(BatchNormalization(self.conv_3(conv3, args.num_channels*4, padding='VALID'), axis=3, training=self.isTraining))
features = Relu(BatchNormalization(self.conv_3(conv4, args.num_channels*8, padding='VALID'), axis=3, training=self.isTraining))
pi_conv = Relu(BatchNormalization(self.conv_3(features, args.num_channels), axis = 3, training = self.isTraining))
v_conv = Relu(BatchNormalization(self.conv_3(features, int(args.num_channels / 2)), axis = 3, training = self.isTraining))
pi_flat = tf.reshape(pi_conv, [-1, args.num_channels * (self.board_x - 8) * (self.board_y - 8)])
v_flat = tf.reshape(v_conv, [-1, int(args.num_channels / 2) * (self.board_x - 8) * (self.board_y - 8)])
pi_fc = Dropout(Relu(BatchNormalization(Dense(pi_flat, 256), axis = 1, training = self.isTraining)), rate = self.dropout)
v_fc = Dropout(Relu(BatchNormalization(Dense(v_flat, 128), axis = 1, training = self.isTraining)), rate = self.dropout)
self.pi = Dense(pi_fc, self.action_size)
self.prob = Softmax(self.pi)
self.v = Tanh(Dense(v_fc, 1)) # batch_size x self.action_size
self.calculate_loss()
def conv_3(self, x, out_channels, strides = [1, 1], padding = 'SAME'):
return tf.layers.conv2d(x, out_channels, kernel_size = [3, 3], strides = strides, padding = padding)
def calculate_loss(self):
self.target_pis = tf.placeholder(tf.float32, shape=[None, self.action_size])
self.target_vs = tf.placeholder(tf.float32, shape=[None])
self.loss_pi = tf.losses.softmax_cross_entropy(self.target_pis, self.pi)
self.loss_v = tf.losses.mean_squared_error(self.target_vs, tf.reshape(self.v, shape=[-1,]))
self.total_loss = self.loss_pi + self.loss_v
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
self.train_step = tf.train.AdamOptimizer(self.args.lr).minimize(self.total_loss)
class Sdepth_ResNet():
def __init__(self, game, args):
# game params
self.board_x, self.board_y = game.getBoardSize()
self.action_size = game.getActionSize()
self.args = args
# Renaming functions
Relu = tf.nn.relu
Tanh = tf.nn.tanh
BatchNormalization = tf.layers.batch_normalization
Dropout = tf.layers.dropout
Dense = tf.layers.dense
Softmax = tf.nn.softmax
# Neural Net
def res_block(x, bottleneck_channels, out_channels, survival_rate):
in_channels = x.get_shape()[3].value
if in_channels == out_channels:
strides = [1, 1]
res = x
else:
strides = [2, 2]
res = self.conv_1(x, out_channels, strides)
survival_rate = tf.constant(survival_rate)
def originblock():
block = Relu(BatchNormalization(self.conv_3(x, bottleneck_channels, strides), axis = 3, training = self.isTraining))
block_out = Relu(BatchNormalization(self.conv_3(x, out_channels), axis = 3,training = self.isTraining))
return block_out
def bottleneck():
bottleneck_1 = Relu(BatchNormalization(self.conv_1(x, bottleneck_channels), axis = 3, training = self.isTraining))
bottleneck_3 = Relu(BatchNormalization(self.conv_3(bottleneck_1, bottleneck_channels, strides), axis = 3, training = self.isTraining))
bottleneck_out = BatchNormalization(self.conv_1(bottleneck_3, out_channels), axis = 3, training = self.isTraining)
return bottleneck_out
def training():
def thru_block():
output = bottleneck() if args.architecture[1] == 'bottleneck' else originblock()
output = Relu(tf.add(output, res))
return output
def skip_block():
output = Relu(res)
return output
survive = tf.random_uniform(shape = [], minval = 0., maxval = 1., dtype = tf.float32)
survive = tf.less(survive, survival_rate)
return tf.cond(survive, thru_block, skip_block)
def testing():
output = tf.multiply(bottleneck() if args.architecture[1] == 'bottleneck' else originblock(), survival_rate)
output = tf.add(output, res)
return output
return tf.cond(self.isTraining, training, testing)
self.graph = tf.Graph()
with self.graph.as_default():
self.input_boards = tf.placeholder(tf.float32, shape=[None, self.board_x, self.board_y]) # s: batch_size x board_x x board_y
self.dropout = tf.placeholder(tf.float32)
self.isTraining = tf.placeholder(tf.bool, name="is_training")
x_image = tf.reshape(self.input_boards, [-1, self.board_x, self.board_y, 1]) # batch_size x board_x x board_y x 1
conv1 = Relu(BatchNormalization(self.conv_3(x_image, args.num_channels*2), axis=3, training=self.isTraining))
resblock1 = res_block(conv1, args.num_channels, args.num_channels*4, 1.0)
# resblock2 = res_block(resblock1, args.num_channels, args.num_channels*4, 1.0)
features = res_block(resblock1, args.num_channels*2, args.num_channels*8, 1.0 - args.survival_decay)
pi_conv = Relu(BatchNormalization(self.conv_3(features, args.num_channels*2), axis = 3, training = self.isTraining))
v_conv = Relu(BatchNormalization(self.conv_3(features, args.num_channels), axis = 3, training = self.isTraining))
pi_flat = tf.reshape(pi_conv, [-1, args.num_channels * 2 * (self.board_x - 8) * (self.board_y - 8)])
v_flat = tf.reshape(v_conv, [-1, args.num_channels * (self.board_x - 8) * (self.board_y - 8)])
pi_fc = Dropout(Relu(BatchNormalization(Dense(pi_flat, 256), axis = 1, training = self.isTraining)), rate = self.dropout)
v_fc = Dropout(Relu(BatchNormalization(Dense(v_flat, 128), axis = 1, training = self.isTraining)), rate = self.dropout)
self.pi = Dense(pi_fc, self.action_size)
self.prob = Softmax(self.pi)
self.v = Tanh(Dense(v_fc, 1)) # batch_size x self.action_size
self.calculate_loss()
def conv_3(self, x, out_channels, strides = [1, 1], padding = 'SAME'):
return tf.layers.conv2d(x, out_channels, kernel_size = [3, 3], strides = strides, padding = padding)
def conv_1(self, x, out_channels, strides = [1, 1], padding = 'SAME'):
return tf.layers.conv2d(x, out_channels, kernel_size = [1, 1], strides = strides, padding = padding)
def calculate_loss(self):
self.target_pis = tf.placeholder(tf.float32, shape=[None, self.action_size])
self.target_vs = tf.placeholder(tf.float32, shape=[None])
self.loss_pi = tf.losses.softmax_cross_entropy(self.target_pis, self.pi)
self.loss_v = tf.losses.mean_squared_error(self.target_vs, tf.reshape(self.v, shape=[-1,]))
self.total_loss = self.loss_pi + self.loss_v
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
self.train_step = tf.train.AdamOptimizer(self.args.lr).minimize(self.total_loss)
| import os
import shutil
import time
import random
import numpy as np
import math
import sys
sys.path.append('../../')
from utils import *
from pytorch_classification.utils import Bar, AverageMeter
from NeuralNet import NeuralNet
import tensorflow as tf
args = dotdict({
'architecture': 'CNN',
'lr': 0.001,
'dropout': 0.25,
'survival_decay': 0.0,
'epochs': 15,
'batch_size': 64,
'num_channels': 64,
})
class NNetWrapper(NeuralNet):
def __init__(self,game):
if args.architecture == 'CNN':
self.nnet = CNN(game, args)
elif args.architecture[0] == 'Sdepth_ResNet':
self.nnet = Sdepth_ResNet(game, args)
self.board_x, self.board_y = game.getBoardSize()
self.action_size = game.getActionSize()
self.sess = tf.Session(graph=self.nnet.graph)
self.saver = None
with tf.Session() as temp_sess:
temp_sess.run(tf.global_variables_initializer())
self.sess.run(tf.variables_initializer(self.nnet.graph.get_collection('variables')))
def train(self, examples):
"""
examples: list of examples, each example is of form (board, pi, v)
"""
for epoch in range(args.epochs):
print('EPOCH ::: ' + str(epoch+1))
data_time = AverageMeter()
batch_time = AverageMeter()
pi_losses = AverageMeter()
v_losses = AverageMeter()
end = time.time()
bar = Bar('Training Net', max=int(len(examples)/args.batch_size))
batch_idx = 0
# self.sess.run(tf.local_variables_initializer())
while batch_idx < int(len(examples)/args.batch_size):
sample_ids = np.random.randint(len(examples), size=args.batch_size)
boards, pis, vs = list(zip(*[examples[i] for i in sample_ids]))
# predict and compute gradient and do SGD step
input_dict = {self.nnet.input_boards: boards, self.nnet.target_pis: pis, self.nnet.target_vs: vs, self.nnet.dropout: args.dropout, self.nnet.isTraining: True}
# measure data loading time
data_time.update(time.time() - end)
# record loss
self.sess.run(self.nnet.train_step, feed_dict=input_dict)
pi_loss, v_loss = self.sess.run([self.nnet.loss_pi, self.nnet.loss_v], feed_dict=input_dict)
pi_losses.update(pi_loss, len(boards))
v_losses.update(v_loss, len(boards))
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
batch_idx += 1
# plot progress
bar.suffix = '({batch}/{size}) Data: {data:.3f}s | Batch: {bt:.3f}s | Total: {total:} | ETA: {eta:} | Loss_pi: {lpi:.4f} | Loss_v: {lv:.3f}'.format(
batch=batch_idx,
size=int(len(examples)/args.batch_size),
data=data_time.avg,
bt=batch_time.avg,
total=bar.elapsed_td,
eta=bar.eta_td,
lpi=pi_losses.avg,
lv=v_losses.avg,
)
bar.next()
bar.finish()
def predict(self, board):
"""
board: np array with board
"""
# timing
# start = time.time()
"""# preparing input
board = np.stack([current_state,constrain_matrix],axis=0)"""
# run
prob, v = self.sess.run([self.nnet.prob, self.nnet.v], feed_dict={self.nnet.input_boards: board, self.nnet.dropout: 0, self.nnet.isTraining: False})
#print('PREDICTION TIME TAKEN : {0:03f}'.format(time.time()-start))
return prob[0], v[0]
def save_checkpoint(self, folder='./checkpoints/', filename='ckpt_0'):
if isinstance(filename, tuple):
filename = filename[0] + str(filename[1])
filename = filename + 'pth.tar'
filepath = os.path.join(folder, filename)
if not os.path.exists(folder):
print("Make new directory {}".format(folder))
os.mkdir(folder)
else:
print("Checkpoint Directory exists! ")
if self.saver == None:
self.saver = tf.train.Saver(self.nnet.graph.get_collection('variables'))
with self.nnet.graph.as_default():
self.saver.save(self.sess, filepath)
def load_checkpoint(self, folder='./checkpoints/', filename='ckpt_0'):
if isinstance(filename, tuple):
filename = filename[0] + str(filename[1])
filename = filename+'pth.tar'
filepath = os.path.join(folder, filename)
if not os.path.exists(filepath+'.meta'):
raise("No saved model found in {}".format(filepath))
print("load model " + filename)
with self.nnet.graph.as_default():
self.saver = tf.train.Saver()
self.saver.restore(self.sess, filepath)
class CNN():
def __init__(self, game, args):
# game params
self.board_x, self.board_y = game.getBoardSize()
print(self.board_x, self.board_y)
self.action_size = game.getActionSize()
self.args = args
# Renaming functions
Relu = tf.nn.relu
Tanh = tf.nn.tanh
BatchNormalization = tf.layers.batch_normalization
Dropout = tf.layers.dropout
Dense = tf.layers.dense
Softmax = tf.nn.softmax
# Neural Net
self.graph = tf.Graph()
with self.graph.as_default():
self.input_boards = tf.placeholder(tf.float32, shape=[self.board_x, self.board_y]) # s: batch_size x board_x x board_y
self.dropout = tf.placeholder(tf.float32)
self.isTraining = tf.placeholder(tf.bool, name="is_training")
x_image = tf.reshape(self.input_boards, [-1, self.board_x, self.board_y, 1]) # batch_size x board_x x board_y x 1
conv1 = Relu(BatchNormalization(self.conv_3(x_image, args.num_channels), axis=3, training=self.isTraining))
conv2 = Relu(BatchNormalization(self.conv_3(conv1, args.num_channels*2, padding='VALID'), axis=3, training=self.isTraining))
conv3 = Relu(BatchNormalization(self.conv_3(conv2, args.num_channels*4, padding='VALID'), axis=3, training=self.isTraining))
conv4 = Relu(BatchNormalization(self.conv_3(conv3, args.num_channels*4, padding='VALID'), axis=3, training=self.isTraining))
features = Relu(BatchNormalization(self.conv_3(conv4, args.num_channels*8, padding='VALID'), axis=3, training=self.isTraining))
pi_conv = Relu(BatchNormalization(self.conv_3(features, args.num_channels), axis = 3, training = self.isTraining))
v_conv = Relu(BatchNormalization(self.conv_3(features, int(args.num_channels / 2)), axis = 3, training = self.isTraining))
pi_flat = tf.reshape(pi_conv, [-1, args.num_channels * (self.board_x - 8) * (self.board_y - 8)])
v_flat = tf.reshape(v_conv, [-1, int(args.num_channels / 2) * (self.board_x - 8) * (self.board_y - 8)])
pi_fc = Dropout(Relu(BatchNormalization(Dense(pi_flat, 256), axis = 1, training = self.isTraining)), rate = self.dropout)
v_fc = Dropout(Relu(BatchNormalization(Dense(v_flat, 128), axis = 1, training = self.isTraining)), rate = self.dropout)
self.pi = Dense(pi_fc, self.action_size)
self.prob = Softmax(self.pi)
self.v = Tanh(Dense(v_fc, 1)) # batch_size x self.action_size
self.calculate_loss()
def conv_3(self, x, out_channels, strides = [1, 1], padding = 'SAME'):
return tf.layers.conv2d(x, out_channels, kernel_size = [3, 3], strides = strides, padding = padding)
def calculate_loss(self):
self.target_pis = tf.placeholder(tf.float32, shape=[None, self.action_size])
self.target_vs = tf.placeholder(tf.float32, shape=[None])
self.loss_pi = tf.losses.softmax_cross_entropy(self.target_pis, self.pi)
self.loss_v = tf.losses.mean_squared_error(self.target_vs, tf.reshape(self.v, shape=[-1,]))
self.total_loss = self.loss_pi + self.loss_v
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
self.train_step = tf.train.AdamOptimizer(self.args.lr).minimize(self.total_loss)
class Sdepth_ResNet():
def __init__(self, game, args):
# game params
self.board_x, self.board_y = game.getBoardSize()
self.action_size = game.getActionSize()
self.args = args
# Renaming functions
Relu = tf.nn.relu
Tanh = tf.nn.tanh
BatchNormalization = tf.layers.batch_normalization
Dropout = tf.layers.dropout
Dense = tf.layers.dense
Softmax = tf.nn.softmax
# Neural Net
def res_block(x, bottleneck_channels, out_channels, survival_rate):
in_channels = x.get_shape()[3].value
if in_channels == out_channels:
strides = [1, 1]
res = x
else:
strides = [2, 2]
res = self.conv_1(x, out_channels, strides)
survival_rate = tf.constant(survival_rate)
def originblock():
block = Relu(BatchNormalization(self.conv_3(x, bottleneck_channels, strides), axis = 3, training = self.isTraining))
block_out = Relu(BatchNormalization(self.conv_3(x, out_channels), axis = 3,training = self.isTraining))
return block_out
def bottleneck():
bottleneck_1 = Relu(BatchNormalization(self.conv_1(x, bottleneck_channels), axis = 3, training = self.isTraining))
bottleneck_3 = Relu(BatchNormalization(self.conv_3(bottleneck_1, bottleneck_channels, strides), axis = 3, training = self.isTraining))
bottleneck_out = BatchNormalization(self.conv_1(bottleneck_3, out_channels), axis = 3, training = self.isTraining)
return bottleneck_out
def training():
def thru_block():
output = bottleneck() if args.architecture[1] == 'bottleneck' else originblock()
output = Relu(tf.add(output, res))
return output
def skip_block():
output = Relu(res)
return output
survive = tf.random_uniform(shape = [], minval = 0., maxval = 1., dtype = tf.float32)
survive = tf.less(survive, survival_rate)
return tf.cond(survive, thru_block, skip_block)
def testing():
output = tf.multiply(bottleneck() if args.architecture[1] == 'bottleneck' else originblock(), survival_rate)
output = tf.add(output, res)
return output
return tf.cond(self.isTraining, training, testing)
self.graph = tf.Graph()
with self.graph.as_default():
self.input_boards = tf.placeholder(tf.float32, shape=[None, self.board_x, self.board_y]) # s: batch_size x board_x x board_y
self.dropout = tf.placeholder(tf.float32)
self.isTraining = tf.placeholder(tf.bool, name="is_training")
x_image = tf.reshape(self.input_boards, [-1, self.board_x, self.board_y, 1]) # batch_size x board_x x board_y x 1
conv1 = Relu(BatchNormalization(self.conv_3(x_image, args.num_channels*2), axis=3, training=self.isTraining))
resblock1 = res_block(conv1, args.num_channels, args.num_channels*4, 1.0)
# resblock2 = res_block(resblock1, args.num_channels, args.num_channels*4, 1.0)
features = res_block(resblock1, args.num_channels*2, args.num_channels*8, 1.0 - args.survival_decay)
pi_conv = Relu(BatchNormalization(self.conv_3(features, args.num_channels*2), axis = 3, training = self.isTraining))
v_conv = Relu(BatchNormalization(self.conv_3(features, args.num_channels), axis = 3, training = self.isTraining))
pi_flat = tf.reshape(pi_conv, [-1, args.num_channels * 2 * (self.board_x - 8) * (self.board_y - 8)])
v_flat = tf.reshape(v_conv, [-1, args.num_channels * (self.board_x - 8) * (self.board_y - 8)])
pi_fc = Dropout(Relu(BatchNormalization(Dense(pi_flat, 256), axis = 1, training = self.isTraining)), rate = self.dropout)
v_fc = Dropout(Relu(BatchNormalization(Dense(v_flat, 128), axis = 1, training = self.isTraining)), rate = self.dropout)
self.pi = Dense(pi_fc, self.action_size)
self.prob = Softmax(self.pi)
self.v = Tanh(Dense(v_fc, 1)) # batch_size x self.action_size
self.calculate_loss()
def conv_3(self, x, out_channels, strides = [1, 1], padding = 'SAME'):
return tf.layers.conv2d(x, out_channels, kernel_size = [3, 3], strides = strides, padding = padding)
def conv_1(self, x, out_channels, strides = [1, 1], padding = 'SAME'):
return tf.layers.conv2d(x, out_channels, kernel_size = [1, 1], strides = strides, padding = padding)
def calculate_loss(self):
self.target_pis = tf.placeholder(tf.float32, shape=[None, self.action_size])
self.target_vs = tf.placeholder(tf.float32, shape=[None])
self.loss_pi = tf.losses.softmax_cross_entropy(self.target_pis, self.pi)
self.loss_v = tf.losses.mean_squared_error(self.target_vs, tf.reshape(self.v, shape=[-1,]))
self.total_loss = self.loss_pi + self.loss_v
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
self.train_step = tf.train.AdamOptimizer(self.args.lr).minimize(self.total_loss)
| en | 0.656031 | examples: list of examples, each example is of form (board, pi, v) # self.sess.run(tf.local_variables_initializer()) # predict and compute gradient and do SGD step # measure data loading time # record loss # measure elapsed time # plot progress board: np array with board # timing # start = time.time() # preparing input board = np.stack([current_state,constrain_matrix],axis=0) # run #print('PREDICTION TIME TAKEN : {0:03f}'.format(time.time()-start)) # game params # Renaming functions # Neural Net # s: batch_size x board_x x board_y # batch_size x board_x x board_y x 1 # batch_size x self.action_size # game params # Renaming functions # Neural Net # s: batch_size x board_x x board_y # batch_size x board_x x board_y x 1 # resblock2 = res_block(resblock1, args.num_channels, args.num_channels*4, 1.0) # batch_size x self.action_size | 2.567693 | 3 |
python-data-analysis/matplotlib/subplot/matplotlib_subplot.py | nkhn37/python-tech-sample-source | 0 | 6621175 | """matplotlib
複数グラフを一つのウィンドウに表示する(subplot)
[説明ページ]
https://tech.nkhn37.net/matplotlib-subplots/#_subplot
"""
import matplotlib.pyplot as plt
import numpy as np
x = np.linspace(0, 10, 100)
# 引数は、(行数, 列数, 指定する位置)
plt.subplot(2, 1, 1)
plt.plot(x, np.sin(x))
plt.subplot(2, 1, 2)
plt.plot(x, np.cos(x))
plt.show()
| """matplotlib
複数グラフを一つのウィンドウに表示する(subplot)
[説明ページ]
https://tech.nkhn37.net/matplotlib-subplots/#_subplot
"""
import matplotlib.pyplot as plt
import numpy as np
x = np.linspace(0, 10, 100)
# 引数は、(行数, 列数, 指定する位置)
plt.subplot(2, 1, 1)
plt.plot(x, np.sin(x))
plt.subplot(2, 1, 2)
plt.plot(x, np.cos(x))
plt.show()
| ja | 0.946963 | matplotlib 複数グラフを一つのウィンドウに表示する(subplot) [説明ページ] https://tech.nkhn37.net/matplotlib-subplots/#_subplot # 引数は、(行数, 列数, 指定する位置) | 3.90057 | 4 |
compiladores/algoritmos/Compiler/models/Symbol.py | WesleyAdriann/iesb | 0 | 6621176 |
class Symbol():
code = dict({
'LF': 10,
'ETX': 3,
'=': 11,
'+': 21,
'-': 22,
'*': 23,
'/': 24,
'%': 25,
'==': 31,
'!=': 32,
'>': 33,
'<': 34,
'>=': 35,
'<=': 36,
'var': 41,
'int': 51,
'rem': 61,
'input': 62,
'let': 63,
'print': 64,
'goto': 65,
'if': 66,
'end': 67,
'error': 99
})
def __init__(self, uid):
self.uid = uid
|
class Symbol():
code = dict({
'LF': 10,
'ETX': 3,
'=': 11,
'+': 21,
'-': 22,
'*': 23,
'/': 24,
'%': 25,
'==': 31,
'!=': 32,
'>': 33,
'<': 34,
'>=': 35,
'<=': 36,
'var': 41,
'int': 51,
'rem': 61,
'input': 62,
'let': 63,
'print': 64,
'goto': 65,
'if': 66,
'end': 67,
'error': 99
})
def __init__(self, uid):
self.uid = uid
| none | 1 | 2.760429 | 3 | |
tests/unit/controllers/test_user_employment_controller.py | Maxcutex/pm_api | 0 | 6621177 | """
Unit tests for the User Employment Controller.
"""
from datetime import datetime, date
from unittest.mock import patch
from app.controllers.user_employment_controller import UserEmploymentController
from app.models import User, UserEmployment, UserEmploymentSkill
from app.repositories.user_employment_repo import UserEmploymentRepo
from factories.skill_category_factory import (
CategoryWithSkillsFactory,
SkillFactory,
SkillFactoryFake,
)
from tests.base_test_case import BaseTestCase
class TestUserEmploymentController(BaseTestCase):
def setUp(self):
self.BaseSetUp()
self.skill_category = CategoryWithSkillsFactory.create(skills=4)
self.skill_category.save()
self.skill_one = self.skill_category.skills[0]
self.skill_two = self.skill_category.skills[1]
self.skill_three = self.skill_category.skills[2]
self.skill_four = self.skill_category.skills[3]
self.mock_user = User(
id=1,
first_name="test",
last_name="test",
gender="male",
password="<PASSWORD>",
is_active=True,
is_deleted=False,
created_at=datetime.now(),
updated_at=datetime.now(),
)
self.mock_user_employment = UserEmployment(
id=1,
institution_name="InstitutionName",
job_title="InstitutionName",
employment_type="full",
institution_url="http://www.fake.com",
institution_city="fake city",
institution_country="fake.country()",
institution_size="11-50 employees",
work_summary="fake.paragraph(nb_sentences=5)",
accomplishments="fake.paragraph(nb_sentences=5)",
start_date=date(year=2018, month=1, day=31),
end_date=date(year=2020, month=1, day=31),
user_id=self.mock_user.id,
is_current=False,
created_at=datetime.now(),
updated_at=datetime.now(),
)
self.mock_user_employment_skill = UserEmploymentSkill(
user_employment_id=self.mock_user_employment.id, skill_id=self.skill_one.id
)
def tearDown(self):
self.BaseTearDown()
@patch.object(UserEmploymentRepo, "get_unpaginated")
def test_list_user_employments_ok_response(
self,
mock_user_employment_repo_get_unpaginated,
):
"""Test list_user_employments OK response."""
# Arrange
with self.app.app_context():
mock_user_employment_repo_get_unpaginated.return_value.items = [
self.mock_user_employment,
]
user_employment_controller = UserEmploymentController(self.request_context)
# Act
result = user_employment_controller.list_user_employment_history(1)
# Assert
assert result.status_code == 200
assert result.get_json()["msg"] == "OK"
@patch.object(UserEmploymentRepo, "get")
def test_get_user_employment_when_invalid_or_missing(
self, mock_user_employment_repo_get
):
"""Test get_user_employment invalid repo response."""
# Arrange
with self.app.app_context():
mock_user_employment_repo_get.return_value = None
user_employment_controller = UserEmploymentController(self.request_context)
# Act
result = user_employment_controller.get_user_employment(1)
# Assert
assert result.status_code == 400
assert (
result.get_json()["msg"]
== "Invalid User Employment or Missing user_employment_id"
)
@patch.object(UserEmploymentRepo, "get")
def test_get_user_employment_ok_response(self, mock_user_employment_repo_get):
"""Test get_user_employment OK response."""
# Arrange
with self.app.app_context():
mock_user_employment_repo_get.return_value = self.mock_user_employment
user_employment_controller = UserEmploymentController(self.request_context)
# Act
result = user_employment_controller.get_user_employment(1)
# Assert
assert result.status_code == 200
assert result.get_json()["msg"] == "OK"
# import pdb
# pdb.set_trace()
# assert result.get_json()["payload"]["user_employment"]["skills"][0] == "OK"
# assert result.get_json()["msg"] == "OK"
@patch.object(UserEmploymentController, "request_params")
def test_create_user_employment_start_date_less_than_end_date_response(
self,
mock_user_employment_controller_request_params,
):
"""
Test create user employment is invalid when start date is greater than end date
:param mock_user_employment_controller_request_params:
:return:
"""
with self.app.app_context():
mock_user_employment_controller_request_params.return_value = (
1,
"Institution name",
"Job title",
"full",
"http://www.fake.com",
"fake city",
"fake.country()",
"11-50 employees",
"fake.paragraph(nb_sentences=5)",
"fake.paragraph(nb_sentences=5)",
date(year=2028, month=1, day=31),
date(year=2020, month=1, day=31),
False,
[self.skill_one.id, self.skill_two.id],
)
user_employment_controller = UserEmploymentController(self.request_context)
# Act
result = user_employment_controller.create_user_employment()
# Assert
assert result.status_code == 400
assert (
result.get_json()["msg"]
== "Start Date cannot be greater than End date "
)
@patch.object(UserEmploymentController, "request_params")
@patch.object(UserEmploymentRepo, "find_first")
def test_create_user_employment_ok_response(
self,
mock_user_employment_repo_find_first,
mock_user_employment_controller_request_params,
):
"""Test create_user_employment OK response."""
# Arrange
with self.app.app_context():
mock_user_employment_controller_request_params.return_value = (
1,
"Institution name",
"Job title",
"full",
"http://www.fake.com",
"fake city",
"fake.country()",
"11-50 employees",
"fake.paragraph(nb_sentences=5)",
"fake.paragraph(nb_sentences=5)",
date(year=2018, month=1, day=31),
date(year=2020, month=1, day=31),
False,
[self.skill_one.id, self.skill_two.id],
)
mock_user_employment_repo_find_first.return_value = None
user_employment_controller = UserEmploymentController(self.request_context)
# Act
result = user_employment_controller.create_user_employment()
# Assert
assert result.status_code == 201
assert result.get_json()["msg"] == "OK"
assert (
result.get_json()["payload"]["user_employment"]["skills"][0]["name"]
== self.skill_one.name
)
@patch.object(UserEmploymentController, "request_params")
@patch.object(UserEmploymentRepo, "get")
def test_update_user_employment_when_user_employment_doesnot_exist(
self,
mock_user_employment_repo_get,
mock_user_employment_controller_request_params,
):
"""Test update_user_employment when role doesn't exist."""
# Arrange
with self.app.app_context():
mock_user_employment_repo_get.return_value = None
mock_user_employment_controller_request_params.return_value = (
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
)
user_employment_controller = UserEmploymentController(self.request_context)
# Act
result = user_employment_controller.update_user_employment(1)
# Assert
assert result.status_code == 400
assert (
result.get_json()["msg"] == "Invalid or incorrect "
"user_employment_id provided"
)
@patch.object(UserEmploymentRepo, "find_first")
@patch.object(UserEmploymentController, "request_params")
@patch.object(UserEmploymentRepo, "get")
def test_update_user_employment_ok_response(
self,
mock_user_employment_repo_get,
mock_user_employment_controller_request_params,
mock_user_employment_repo_find_first,
):
"""Test update_user_employment when role doesn't exist."""
# Arrange
with self.app.app_context():
mock_user_employment_repo_get.return_value = self.mock_user_employment
mock_user_employment_repo_find_first.return_value = None
mock_user_employment_controller_request_params.return_value = (
1,
1,
"Institution name",
"Job title",
"full",
"http://www.fake.com",
"fake city",
"fake.country()",
"11-50 employees",
"fake.paragraph(nb_sentences=5)",
"fake.paragraph(nb_sentences=5)",
date(year=2018, month=1, day=31),
date(year=2020, month=1, day=31),
True,
[self.skill_one.id, self.skill_two.id],
)
user_employment_controller = UserEmploymentController(self.request_context)
# Act
result = user_employment_controller.update_user_employment(1)
# Assert
assert result.status_code == 200
assert result.get_json()["msg"] == "OK"
assert (
result.get_json()["payload"]["user_employment"]["skills"][0]["name"]
== self.skill_one.name
)
@patch.object(UserEmploymentRepo, "get")
def test_delete_user_employment_when_user_employment_is_invalid(
self, mock_user_employment_repo_get
):
"""Test delete_user_employment when the role is invalid."""
# Arrange
with self.app.app_context():
mock_user_employment_repo_get.return_value = None
user_employment_controler = UserEmploymentController(self.request_context)
# Act
result = user_employment_controler.delete_user_employment(1)
# Assert
assert result.status_code == 404
assert (
result.get_json()["msg"] == "Invalid or incorrect "
"user_employment_id provided"
)
@patch.object(UserEmploymentRepo, "get")
@patch.object(UserEmploymentRepo, "update")
def test_delete_user_employment_ok_response(
self, mock_user_employment_repo_update, mock_user_employment_repo_get
):
"""Test delete_user_employment when the role is invalid."""
# Arrange
with self.app.app_context():
mock_user_employment_repo_get.return_value = self.mock_user_employment
mock_user_employment_repo_update.return_value = self.mock_user_employment
user_employment_controler = UserEmploymentController(self.request_context)
# Act
result = user_employment_controler.delete_user_employment(1)
# Assert
assert result.status_code == 200
assert result.get_json()["msg"] == "user employment deleted"
@patch.object(UserEmploymentController, "request_params")
@patch.object(UserEmploymentRepo, "find_first")
def test_user_employment_create_with_skills_valid(
self,
mock_user_employment_repo_find_first,
mock_user_employment_controller_request_params,
):
"""
Test create_user_employment with skills OK response.
"""
# Arrange
with self.app.app_context():
mock_user_employment_controller_request_params.return_value = (
1,
"Institution name",
"Job title",
"full",
"http://www.fake.com",
"fake city",
"fake.country()",
"11-50 employees",
"fake.paragraph(nb_sentences=5)",
"fake.paragraph(nb_sentences=5)",
date(year=2018, month=1, day=31),
date(year=2020, month=1, day=31),
False,
[self.skill_one.id, self.skill_two.id],
)
mock_user_employment_repo_find_first.return_value = None
user_employment_controller = UserEmploymentController(self.request_context)
# Act
result = user_employment_controller.create_user_employment()
# Assert
assert result.status_code == 201
assert result.get_json()["msg"] == "OK"
def test_user_employment_create_with_skills_invalid_skills(self):
pass
| """
Unit tests for the User Employment Controller.
"""
from datetime import datetime, date
from unittest.mock import patch
from app.controllers.user_employment_controller import UserEmploymentController
from app.models import User, UserEmployment, UserEmploymentSkill
from app.repositories.user_employment_repo import UserEmploymentRepo
from factories.skill_category_factory import (
CategoryWithSkillsFactory,
SkillFactory,
SkillFactoryFake,
)
from tests.base_test_case import BaseTestCase
class TestUserEmploymentController(BaseTestCase):
def setUp(self):
self.BaseSetUp()
self.skill_category = CategoryWithSkillsFactory.create(skills=4)
self.skill_category.save()
self.skill_one = self.skill_category.skills[0]
self.skill_two = self.skill_category.skills[1]
self.skill_three = self.skill_category.skills[2]
self.skill_four = self.skill_category.skills[3]
self.mock_user = User(
id=1,
first_name="test",
last_name="test",
gender="male",
password="<PASSWORD>",
is_active=True,
is_deleted=False,
created_at=datetime.now(),
updated_at=datetime.now(),
)
self.mock_user_employment = UserEmployment(
id=1,
institution_name="InstitutionName",
job_title="InstitutionName",
employment_type="full",
institution_url="http://www.fake.com",
institution_city="fake city",
institution_country="fake.country()",
institution_size="11-50 employees",
work_summary="fake.paragraph(nb_sentences=5)",
accomplishments="fake.paragraph(nb_sentences=5)",
start_date=date(year=2018, month=1, day=31),
end_date=date(year=2020, month=1, day=31),
user_id=self.mock_user.id,
is_current=False,
created_at=datetime.now(),
updated_at=datetime.now(),
)
self.mock_user_employment_skill = UserEmploymentSkill(
user_employment_id=self.mock_user_employment.id, skill_id=self.skill_one.id
)
def tearDown(self):
self.BaseTearDown()
@patch.object(UserEmploymentRepo, "get_unpaginated")
def test_list_user_employments_ok_response(
self,
mock_user_employment_repo_get_unpaginated,
):
"""Test list_user_employments OK response."""
# Arrange
with self.app.app_context():
mock_user_employment_repo_get_unpaginated.return_value.items = [
self.mock_user_employment,
]
user_employment_controller = UserEmploymentController(self.request_context)
# Act
result = user_employment_controller.list_user_employment_history(1)
# Assert
assert result.status_code == 200
assert result.get_json()["msg"] == "OK"
@patch.object(UserEmploymentRepo, "get")
def test_get_user_employment_when_invalid_or_missing(
self, mock_user_employment_repo_get
):
"""Test get_user_employment invalid repo response."""
# Arrange
with self.app.app_context():
mock_user_employment_repo_get.return_value = None
user_employment_controller = UserEmploymentController(self.request_context)
# Act
result = user_employment_controller.get_user_employment(1)
# Assert
assert result.status_code == 400
assert (
result.get_json()["msg"]
== "Invalid User Employment or Missing user_employment_id"
)
@patch.object(UserEmploymentRepo, "get")
def test_get_user_employment_ok_response(self, mock_user_employment_repo_get):
"""Test get_user_employment OK response."""
# Arrange
with self.app.app_context():
mock_user_employment_repo_get.return_value = self.mock_user_employment
user_employment_controller = UserEmploymentController(self.request_context)
# Act
result = user_employment_controller.get_user_employment(1)
# Assert
assert result.status_code == 200
assert result.get_json()["msg"] == "OK"
# import pdb
# pdb.set_trace()
# assert result.get_json()["payload"]["user_employment"]["skills"][0] == "OK"
# assert result.get_json()["msg"] == "OK"
@patch.object(UserEmploymentController, "request_params")
def test_create_user_employment_start_date_less_than_end_date_response(
self,
mock_user_employment_controller_request_params,
):
"""
Test create user employment is invalid when start date is greater than end date
:param mock_user_employment_controller_request_params:
:return:
"""
with self.app.app_context():
mock_user_employment_controller_request_params.return_value = (
1,
"Institution name",
"Job title",
"full",
"http://www.fake.com",
"fake city",
"fake.country()",
"11-50 employees",
"fake.paragraph(nb_sentences=5)",
"fake.paragraph(nb_sentences=5)",
date(year=2028, month=1, day=31),
date(year=2020, month=1, day=31),
False,
[self.skill_one.id, self.skill_two.id],
)
user_employment_controller = UserEmploymentController(self.request_context)
# Act
result = user_employment_controller.create_user_employment()
# Assert
assert result.status_code == 400
assert (
result.get_json()["msg"]
== "Start Date cannot be greater than End date "
)
@patch.object(UserEmploymentController, "request_params")
@patch.object(UserEmploymentRepo, "find_first")
def test_create_user_employment_ok_response(
self,
mock_user_employment_repo_find_first,
mock_user_employment_controller_request_params,
):
"""Test create_user_employment OK response."""
# Arrange
with self.app.app_context():
mock_user_employment_controller_request_params.return_value = (
1,
"Institution name",
"Job title",
"full",
"http://www.fake.com",
"fake city",
"fake.country()",
"11-50 employees",
"fake.paragraph(nb_sentences=5)",
"fake.paragraph(nb_sentences=5)",
date(year=2018, month=1, day=31),
date(year=2020, month=1, day=31),
False,
[self.skill_one.id, self.skill_two.id],
)
mock_user_employment_repo_find_first.return_value = None
user_employment_controller = UserEmploymentController(self.request_context)
# Act
result = user_employment_controller.create_user_employment()
# Assert
assert result.status_code == 201
assert result.get_json()["msg"] == "OK"
assert (
result.get_json()["payload"]["user_employment"]["skills"][0]["name"]
== self.skill_one.name
)
@patch.object(UserEmploymentController, "request_params")
@patch.object(UserEmploymentRepo, "get")
def test_update_user_employment_when_user_employment_doesnot_exist(
self,
mock_user_employment_repo_get,
mock_user_employment_controller_request_params,
):
"""Test update_user_employment when role doesn't exist."""
# Arrange
with self.app.app_context():
mock_user_employment_repo_get.return_value = None
mock_user_employment_controller_request_params.return_value = (
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
)
user_employment_controller = UserEmploymentController(self.request_context)
# Act
result = user_employment_controller.update_user_employment(1)
# Assert
assert result.status_code == 400
assert (
result.get_json()["msg"] == "Invalid or incorrect "
"user_employment_id provided"
)
@patch.object(UserEmploymentRepo, "find_first")
@patch.object(UserEmploymentController, "request_params")
@patch.object(UserEmploymentRepo, "get")
def test_update_user_employment_ok_response(
self,
mock_user_employment_repo_get,
mock_user_employment_controller_request_params,
mock_user_employment_repo_find_first,
):
"""Test update_user_employment when role doesn't exist."""
# Arrange
with self.app.app_context():
mock_user_employment_repo_get.return_value = self.mock_user_employment
mock_user_employment_repo_find_first.return_value = None
mock_user_employment_controller_request_params.return_value = (
1,
1,
"Institution name",
"Job title",
"full",
"http://www.fake.com",
"fake city",
"fake.country()",
"11-50 employees",
"fake.paragraph(nb_sentences=5)",
"fake.paragraph(nb_sentences=5)",
date(year=2018, month=1, day=31),
date(year=2020, month=1, day=31),
True,
[self.skill_one.id, self.skill_two.id],
)
user_employment_controller = UserEmploymentController(self.request_context)
# Act
result = user_employment_controller.update_user_employment(1)
# Assert
assert result.status_code == 200
assert result.get_json()["msg"] == "OK"
assert (
result.get_json()["payload"]["user_employment"]["skills"][0]["name"]
== self.skill_one.name
)
@patch.object(UserEmploymentRepo, "get")
def test_delete_user_employment_when_user_employment_is_invalid(
self, mock_user_employment_repo_get
):
"""Test delete_user_employment when the role is invalid."""
# Arrange
with self.app.app_context():
mock_user_employment_repo_get.return_value = None
user_employment_controler = UserEmploymentController(self.request_context)
# Act
result = user_employment_controler.delete_user_employment(1)
# Assert
assert result.status_code == 404
assert (
result.get_json()["msg"] == "Invalid or incorrect "
"user_employment_id provided"
)
@patch.object(UserEmploymentRepo, "get")
@patch.object(UserEmploymentRepo, "update")
def test_delete_user_employment_ok_response(
self, mock_user_employment_repo_update, mock_user_employment_repo_get
):
"""Test delete_user_employment when the role is invalid."""
# Arrange
with self.app.app_context():
mock_user_employment_repo_get.return_value = self.mock_user_employment
mock_user_employment_repo_update.return_value = self.mock_user_employment
user_employment_controler = UserEmploymentController(self.request_context)
# Act
result = user_employment_controler.delete_user_employment(1)
# Assert
assert result.status_code == 200
assert result.get_json()["msg"] == "user employment deleted"
@patch.object(UserEmploymentController, "request_params")
@patch.object(UserEmploymentRepo, "find_first")
def test_user_employment_create_with_skills_valid(
self,
mock_user_employment_repo_find_first,
mock_user_employment_controller_request_params,
):
"""
Test create_user_employment with skills OK response.
"""
# Arrange
with self.app.app_context():
mock_user_employment_controller_request_params.return_value = (
1,
"Institution name",
"Job title",
"full",
"http://www.fake.com",
"fake city",
"fake.country()",
"11-50 employees",
"fake.paragraph(nb_sentences=5)",
"fake.paragraph(nb_sentences=5)",
date(year=2018, month=1, day=31),
date(year=2020, month=1, day=31),
False,
[self.skill_one.id, self.skill_two.id],
)
mock_user_employment_repo_find_first.return_value = None
user_employment_controller = UserEmploymentController(self.request_context)
# Act
result = user_employment_controller.create_user_employment()
# Assert
assert result.status_code == 201
assert result.get_json()["msg"] == "OK"
def test_user_employment_create_with_skills_invalid_skills(self):
pass
| en | 0.736651 | Unit tests for the User Employment Controller. Test list_user_employments OK response. # Arrange # Act # Assert Test get_user_employment invalid repo response. # Arrange # Act # Assert Test get_user_employment OK response. # Arrange # Act # Assert # import pdb # pdb.set_trace() # assert result.get_json()["payload"]["user_employment"]["skills"][0] == "OK" # assert result.get_json()["msg"] == "OK" Test create user employment is invalid when start date is greater than end date :param mock_user_employment_controller_request_params: :return: # Act # Assert Test create_user_employment OK response. # Arrange # Act # Assert Test update_user_employment when role doesn't exist. # Arrange # Act # Assert Test update_user_employment when role doesn't exist. # Arrange # Act # Assert Test delete_user_employment when the role is invalid. # Arrange # Act # Assert Test delete_user_employment when the role is invalid. # Arrange # Act # Assert Test create_user_employment with skills OK response. # Arrange # Act # Assert | 2.94887 | 3 |
devskill/4-great-the-work-is-done.py | neizod/problems | 1 | 6621178 | <gh_stars>1-10
#!/usr/bin/env python3
def main():
try:
while True:
hours, n = [int(n) for n in input().split()]
perf = sum(int(input()) for _ in range(n))
days, rem = divmod(hours, perf)
days += (rem > 0)
if days == 1:
print('Project will finish within {} day.'.format(days))
else:
print('Project will finish within {} days.'.format(days))
except EOFError:
pass
if __name__ == '__main__':
main()
| #!/usr/bin/env python3
def main():
try:
while True:
hours, n = [int(n) for n in input().split()]
perf = sum(int(input()) for _ in range(n))
days, rem = divmod(hours, perf)
days += (rem > 0)
if days == 1:
print('Project will finish within {} day.'.format(days))
else:
print('Project will finish within {} days.'.format(days))
except EOFError:
pass
if __name__ == '__main__':
main() | fr | 0.221828 | #!/usr/bin/env python3 | 3.453788 | 3 |
omoide/presentation/infra/paginator.py | IgorZyktin/omoide | 0 | 6621179 | # -*- coding: utf-8 -*-
"""Paginator that works with page numbers.
"""
import math
from typing import Iterator, Optional
from pydantic import BaseModel
class PageNum(BaseModel):
"""Single page representation."""
number: int
is_dummy: bool
is_current: bool
class Paginator:
"""Paginator that works with page numbers."""
def __init__(
self,
page: int,
total_items: int,
items_per_page: int,
pages_in_block: int,
) -> None:
"""Initialize instance."""
assert page >= 1
assert items_per_page >= 1
assert pages_in_block >= 1
assert total_items >= 0
self.page = page
self.total_items = total_items
self.items_per_page = items_per_page
self.pages_in_block = pages_in_block
self.total_pages = int(
math.ceil(self.total_items / self.items_per_page)
)
self.window = pages_in_block // 2
def __repr__(self) -> str:
"""Return string representation."""
_class = type(self).__name__
return (
f'{_class}('
f'page={self.page}, '
f'total_items={self.total_items}, '
f'items_per_page={self.items_per_page}, '
f'pages_in_block={self.pages_in_block}'
')'
)
def __iter__(self) -> Iterator[PageNum]:
"""Iterate over current page."""
if self.is_fitting:
# [1][2][3][4][5]
yield from self._iterate_short()
else:
# [1][...][55][56][57][...][70]
yield from self._iterate_long()
def __len__(self) -> int:
"""Return total amount of items in the sequence."""
return self.total_items
@property
def is_fitting(self) -> bool:
"""Return True if all pages can be displayed at once."""
return self.total_pages <= self.pages_in_block
@classmethod
def empty(cls) -> 'Paginator':
"""Create empty paginator."""
return cls(
page=1,
total_items=0,
items_per_page=1,
pages_in_block=1,
)
@property
def has_previous(self) -> bool:
"""Return True if we can go back."""
return self.page > self.first_page
@property
def has_next(self) -> bool:
"""Return True if we can go further."""
return self.page < self.total_pages
@property
def previous_page(self) -> Optional[int]:
"""Return previous page number."""
if self.page > 1:
return self.page - 1
return None
@property
def next_page(self) -> Optional[int]:
"""Return next page number."""
if self.page < self.last_page:
return self.page + 1
return None
@property
def first_page(self) -> int:
"""Return first page number."""
return 1
@property
def last_page(self) -> int:
"""Return last page number."""
return max(1, self.total_pages)
def _iterate_short(self) -> Iterator[PageNum]:
"""Iterate over all pages, no exclusions."""
for number in range(1, self.total_pages + 1):
yield PageNum(
number=number,
is_dummy=False,
is_current=number == self.page,
)
def _iterate_long(self) -> Iterator[PageNum]:
"""Iterate over all pages, but show only some of them."""
left_threshold = 1 + self.window - 1
right_threshold = self.total_pages - self.window - 1
if self.page < left_threshold:
yield from self._left_leaning_design()
elif self.page > right_threshold:
yield from self._right_leaning_design()
else:
yield from self._centered_design()
def _left_leaning_design(self) -> Iterator[PageNum]:
"""Render like [1][2][3][4][...][9]."""
taken = 1
for i in range(1, self.pages_in_block - taken):
yield PageNum(
number=i,
is_dummy=False,
is_current=i == self.page,
)
yield PageNum(
number=-1,
is_dummy=True,
is_current=False,
)
yield PageNum(
number=self.total_pages,
is_dummy=False,
is_current=self.total_pages == self.page,
)
def _centered_design(self) -> Iterator[PageNum]:
"""Render like [1][...][10][11][12][...][45]."""
yield PageNum(
number=self.first_page,
is_dummy=False,
is_current=self.first_page == self.page,
)
yield PageNum(
number=-1,
is_dummy=True,
is_current=False,
)
left = self.page - self.window // 2
right = self.page + self.window // 2 + 1
for i in range(left, right + 1):
yield PageNum(
number=i,
is_dummy=False,
is_current=i == self.page,
)
yield PageNum(
number=-1,
is_dummy=True,
is_current=False,
)
yield PageNum(
number=self.total_pages,
is_dummy=False,
is_current=self.total_pages == self.page,
)
def _right_leaning_design(self) -> Iterator[PageNum]:
"""Render like [1][...][7][8][9]."""
yield PageNum(
number=self.first_page,
is_dummy=False,
is_current=self.first_page == self.page,
)
yield PageNum(
number=-1,
is_dummy=True,
is_current=False,
)
taken = 3
start = self.total_pages - self.pages_in_block + taken
for i in range(start, self.total_pages + 1):
yield PageNum(
number=i,
is_dummy=False,
is_current=i == self.page,
)
| # -*- coding: utf-8 -*-
"""Paginator that works with page numbers.
"""
import math
from typing import Iterator, Optional
from pydantic import BaseModel
class PageNum(BaseModel):
"""Single page representation."""
number: int
is_dummy: bool
is_current: bool
class Paginator:
"""Paginator that works with page numbers."""
def __init__(
self,
page: int,
total_items: int,
items_per_page: int,
pages_in_block: int,
) -> None:
"""Initialize instance."""
assert page >= 1
assert items_per_page >= 1
assert pages_in_block >= 1
assert total_items >= 0
self.page = page
self.total_items = total_items
self.items_per_page = items_per_page
self.pages_in_block = pages_in_block
self.total_pages = int(
math.ceil(self.total_items / self.items_per_page)
)
self.window = pages_in_block // 2
def __repr__(self) -> str:
"""Return string representation."""
_class = type(self).__name__
return (
f'{_class}('
f'page={self.page}, '
f'total_items={self.total_items}, '
f'items_per_page={self.items_per_page}, '
f'pages_in_block={self.pages_in_block}'
')'
)
def __iter__(self) -> Iterator[PageNum]:
"""Iterate over current page."""
if self.is_fitting:
# [1][2][3][4][5]
yield from self._iterate_short()
else:
# [1][...][55][56][57][...][70]
yield from self._iterate_long()
def __len__(self) -> int:
"""Return total amount of items in the sequence."""
return self.total_items
@property
def is_fitting(self) -> bool:
"""Return True if all pages can be displayed at once."""
return self.total_pages <= self.pages_in_block
@classmethod
def empty(cls) -> 'Paginator':
"""Create empty paginator."""
return cls(
page=1,
total_items=0,
items_per_page=1,
pages_in_block=1,
)
@property
def has_previous(self) -> bool:
"""Return True if we can go back."""
return self.page > self.first_page
@property
def has_next(self) -> bool:
"""Return True if we can go further."""
return self.page < self.total_pages
@property
def previous_page(self) -> Optional[int]:
"""Return previous page number."""
if self.page > 1:
return self.page - 1
return None
@property
def next_page(self) -> Optional[int]:
"""Return next page number."""
if self.page < self.last_page:
return self.page + 1
return None
@property
def first_page(self) -> int:
"""Return first page number."""
return 1
@property
def last_page(self) -> int:
"""Return last page number."""
return max(1, self.total_pages)
def _iterate_short(self) -> Iterator[PageNum]:
"""Iterate over all pages, no exclusions."""
for number in range(1, self.total_pages + 1):
yield PageNum(
number=number,
is_dummy=False,
is_current=number == self.page,
)
def _iterate_long(self) -> Iterator[PageNum]:
"""Iterate over all pages, but show only some of them."""
left_threshold = 1 + self.window - 1
right_threshold = self.total_pages - self.window - 1
if self.page < left_threshold:
yield from self._left_leaning_design()
elif self.page > right_threshold:
yield from self._right_leaning_design()
else:
yield from self._centered_design()
def _left_leaning_design(self) -> Iterator[PageNum]:
"""Render like [1][2][3][4][...][9]."""
taken = 1
for i in range(1, self.pages_in_block - taken):
yield PageNum(
number=i,
is_dummy=False,
is_current=i == self.page,
)
yield PageNum(
number=-1,
is_dummy=True,
is_current=False,
)
yield PageNum(
number=self.total_pages,
is_dummy=False,
is_current=self.total_pages == self.page,
)
def _centered_design(self) -> Iterator[PageNum]:
"""Render like [1][...][10][11][12][...][45]."""
yield PageNum(
number=self.first_page,
is_dummy=False,
is_current=self.first_page == self.page,
)
yield PageNum(
number=-1,
is_dummy=True,
is_current=False,
)
left = self.page - self.window // 2
right = self.page + self.window // 2 + 1
for i in range(left, right + 1):
yield PageNum(
number=i,
is_dummy=False,
is_current=i == self.page,
)
yield PageNum(
number=-1,
is_dummy=True,
is_current=False,
)
yield PageNum(
number=self.total_pages,
is_dummy=False,
is_current=self.total_pages == self.page,
)
def _right_leaning_design(self) -> Iterator[PageNum]:
"""Render like [1][...][7][8][9]."""
yield PageNum(
number=self.first_page,
is_dummy=False,
is_current=self.first_page == self.page,
)
yield PageNum(
number=-1,
is_dummy=True,
is_current=False,
)
taken = 3
start = self.total_pages - self.pages_in_block + taken
for i in range(start, self.total_pages + 1):
yield PageNum(
number=i,
is_dummy=False,
is_current=i == self.page,
)
| en | 0.642197 | # -*- coding: utf-8 -*- Paginator that works with page numbers. Single page representation. Paginator that works with page numbers. Initialize instance. Return string representation. Iterate over current page. # [1][2][3][4][5] # [1][...][55][56][57][...][70] Return total amount of items in the sequence. Return True if all pages can be displayed at once. Create empty paginator. Return True if we can go back. Return True if we can go further. Return previous page number. Return next page number. Return first page number. Return last page number. Iterate over all pages, no exclusions. Iterate over all pages, but show only some of them. Render like [1][2][3][4][...][9]. Render like [1][...][10][11][12][...][45]. Render like [1][...][7][8][9]. | 3.488404 | 3 |
fable/fable_sources/libtbx/command_line/prime_factors_of.py | hickerson/bbn | 4 | 6621180 | from __future__ import division
from libtbx.math_utils import prime_factors_of
import sys
def run(args):
for arg in args:
n = int(arg)
assert n > 0
print "prime factors of %d:" % n, prime_factors_of(n)
if (__name__ == "__main__"):
run(args=sys.argv[1:])
| from __future__ import division
from libtbx.math_utils import prime_factors_of
import sys
def run(args):
for arg in args:
n = int(arg)
assert n > 0
print "prime factors of %d:" % n, prime_factors_of(n)
if (__name__ == "__main__"):
run(args=sys.argv[1:])
| none | 1 | 2.383993 | 2 | |
tests/swarm/test_swarm.py | widdowquinn/lpbio | 0 | 6621181 | #!/usr/bin/env python
"""Tests of wrapper code in pycits.swarm"""
import os
import shutil
import unittest
import pytest
from lpbio import swarm, LPBioNotExecutableError
class TestSwarm(unittest.TestCase):
"""Class collecting tests for Swarm wrapper."""
def setUp(self):
"""Set up test fixtures"""
# Input/output paths
self.testdir = os.path.join("tests", "swarm")
self.indir = os.path.join(self.testdir, "input")
self.infile = os.path.join(self.indir, "swarm_coded_with_abundance.fasta")
self.outdir = os.path.join(self.testdir, "output")
self.outfile = os.path.join(self.outdir, "swarm.out")
# Target paths
self.targetdir = os.path.join(self.testdir, "targets")
self.targetfile = os.path.join(self.targetdir, "swarm.out")
# remove and recreate the output directory
try:
shutil.rmtree(self.outdir)
except FileNotFoundError:
pass
os.makedirs(self.outdir, exist_ok=True)
@staticmethod
def test_swarm_wrapper_creation():
"""swarm executable is in $PATH"""
swarm.Swarm("swarm")
def test_swarm_parameters(self):
parameters = swarm.SwarmParameters(t=1, d=1)
self.assertEqual(parameters.t, 1)
self.assertEqual(parameters.d, 1)
def test_swarm_cmd(self):
"""swarm module returns correct form of cmd-line"""
parameters = swarm.SwarmParameters(t=1, d=1)
cmd = swarm.build_cmd(self.infile, self.outfile, parameters)
self.assertEqual(
cmd, ["swarm", "-t 1", "-d 1", "-o", self.outfile, self.infile]
)
def test_swarm_wrapper_cmd(self):
"""swarm wrapper returns correct form of cmd-line"""
cluster = swarm.Swarm("swarm")
target = ["swarm", "-t 1", "-d 1", "-o", self.outfile, self.infile]
parameters = swarm.SwarmParameters(t=1, d=1)
self.assertEqual(
cluster.run(self.infile, self.outdir, parameters, dry_run=True), target
)
@staticmethod
def test_swarm_wrapper_exec_notexist():
"""error thrown when swarm executable does not exist"""
with pytest.raises(LPBioNotExecutableError):
swarm.Swarm(os.path.join(".", "swarm"))
def test_swarm_wrapper_run(self):
"""swarm clusters test data"""
cluster = swarm.Swarm("swarm")
parameters = swarm.SwarmParameters(t=1, d=1)
cluster.run(self.infile, self.outdir, parameters)
def test_swarm_output_parse(self):
"""Swarm runs and output parses correctly"""
cluster = swarm.Swarm("swarm")
parameters = swarm.SwarmParameters(t=1, d=1)
result = cluster.run(self.infile, self.outdir, parameters)
parser = swarm.SwarmParser()
target = parser.read(self.targetfile)
swarms = parser.read(result.outfilename)
self.assertEqual(target, swarms)
| #!/usr/bin/env python
"""Tests of wrapper code in pycits.swarm"""
import os
import shutil
import unittest
import pytest
from lpbio import swarm, LPBioNotExecutableError
class TestSwarm(unittest.TestCase):
"""Class collecting tests for Swarm wrapper."""
def setUp(self):
"""Set up test fixtures"""
# Input/output paths
self.testdir = os.path.join("tests", "swarm")
self.indir = os.path.join(self.testdir, "input")
self.infile = os.path.join(self.indir, "swarm_coded_with_abundance.fasta")
self.outdir = os.path.join(self.testdir, "output")
self.outfile = os.path.join(self.outdir, "swarm.out")
# Target paths
self.targetdir = os.path.join(self.testdir, "targets")
self.targetfile = os.path.join(self.targetdir, "swarm.out")
# remove and recreate the output directory
try:
shutil.rmtree(self.outdir)
except FileNotFoundError:
pass
os.makedirs(self.outdir, exist_ok=True)
@staticmethod
def test_swarm_wrapper_creation():
"""swarm executable is in $PATH"""
swarm.Swarm("swarm")
def test_swarm_parameters(self):
parameters = swarm.SwarmParameters(t=1, d=1)
self.assertEqual(parameters.t, 1)
self.assertEqual(parameters.d, 1)
def test_swarm_cmd(self):
"""swarm module returns correct form of cmd-line"""
parameters = swarm.SwarmParameters(t=1, d=1)
cmd = swarm.build_cmd(self.infile, self.outfile, parameters)
self.assertEqual(
cmd, ["swarm", "-t 1", "-d 1", "-o", self.outfile, self.infile]
)
def test_swarm_wrapper_cmd(self):
"""swarm wrapper returns correct form of cmd-line"""
cluster = swarm.Swarm("swarm")
target = ["swarm", "-t 1", "-d 1", "-o", self.outfile, self.infile]
parameters = swarm.SwarmParameters(t=1, d=1)
self.assertEqual(
cluster.run(self.infile, self.outdir, parameters, dry_run=True), target
)
@staticmethod
def test_swarm_wrapper_exec_notexist():
"""error thrown when swarm executable does not exist"""
with pytest.raises(LPBioNotExecutableError):
swarm.Swarm(os.path.join(".", "swarm"))
def test_swarm_wrapper_run(self):
"""swarm clusters test data"""
cluster = swarm.Swarm("swarm")
parameters = swarm.SwarmParameters(t=1, d=1)
cluster.run(self.infile, self.outdir, parameters)
def test_swarm_output_parse(self):
"""Swarm runs and output parses correctly"""
cluster = swarm.Swarm("swarm")
parameters = swarm.SwarmParameters(t=1, d=1)
result = cluster.run(self.infile, self.outdir, parameters)
parser = swarm.SwarmParser()
target = parser.read(self.targetfile)
swarms = parser.read(result.outfilename)
self.assertEqual(target, swarms)
| en | 0.641631 | #!/usr/bin/env python Tests of wrapper code in pycits.swarm Class collecting tests for Swarm wrapper. Set up test fixtures # Input/output paths # Target paths # remove and recreate the output directory swarm executable is in $PATH swarm module returns correct form of cmd-line swarm wrapper returns correct form of cmd-line error thrown when swarm executable does not exist swarm clusters test data Swarm runs and output parses correctly | 2.427671 | 2 |
modules/make_report.py | tbersez/Allmine | 5 | 6621182 | <reponame>tbersez/Allmine
# Report generator
#
# Starts the .py script for report generation
#
# Inputs:
# - sample_non_synm_variants.vcf
#
# Output:
# - Non_synonymous_variants_summary.tab
#
# Parameters:
# None
rule make_report :
input:
non_syno = expand(config["VAR"] + "{samples}/{samples}_varscan.avinput.exonic_variant_function", samples = config["samples"])
output:
report = 'Non_synonymous_variants_summary.tab'
shell:
"""
./modules/report_generator.py
"""
| # Report generator
#
# Starts the .py script for report generation
#
# Inputs:
# - sample_non_synm_variants.vcf
#
# Output:
# - Non_synonymous_variants_summary.tab
#
# Parameters:
# None
rule make_report :
input:
non_syno = expand(config["VAR"] + "{samples}/{samples}_varscan.avinput.exonic_variant_function", samples = config["samples"])
output:
report = 'Non_synonymous_variants_summary.tab'
shell:
"""
./modules/report_generator.py
""" | en | 0.452613 | # Report generator # # Starts the .py script for report generation # # Inputs: # - sample_non_synm_variants.vcf # # Output: # - Non_synonymous_variants_summary.tab # # Parameters: # None ./modules/report_generator.py | 1.912473 | 2 |
pynamodb/util.py | dataframehq/PynamoDB | 1 | 6621183 | <filename>pynamodb/util.py
"""
Utils
"""
import re
def snake_to_camel_case(var_name: str) -> str:
"""
Converts camel case variable names to snake case variable_names
"""
first_pass = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', var_name)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', first_pass).lower()
| <filename>pynamodb/util.py
"""
Utils
"""
import re
def snake_to_camel_case(var_name: str) -> str:
"""
Converts camel case variable names to snake case variable_names
"""
first_pass = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', var_name)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', first_pass).lower()
| en | 0.592292 | Utils Converts camel case variable names to snake case variable_names | 3.161222 | 3 |
modelmaker/resources/templates/text_classification/scripts/test.py | shirecoding/ModelMaker | 0 | 6621184 | import os
import sys
import numpy as np
from tensorflow import keras
file_path = os.path.abspath(__file__)
current_directory = os.path.dirname(file_path)
project_directory = os.path.dirname(current_directory)
sys.path.insert(0, project_directory)
from {{package_name}}.models import {{ project_name }}
# load model in development mode
model_path = os.path.join(project_directory, 'saved_models', '{{ package_name }}')
model = {{ project_name }}().load_model(model_path)
sample_text = [
'The movie was cool. The animation and the graphics were out of this world. I would recommend this movie.',
"I saw this film as it was the second feature on a disc containing the previously banned Video Nasty 'Blood Rites'. \
As Blood Rites was entirely awful, I really wasn't expecting much from this film; but actually, it would seem that trash \
director <NAME> has outdone himself this time as Seeds of Sin tops Blood Rites in style and stands tall as a more than \
adequate slice of sick sixties sexploitation. The plot is actually quite similar to Blood Rites, as we focus on a \
dysfunctional family unit, and of course; there is an inheritance at stake. The film is shot in black and white, and \
the look and feel of it reminded me a lot of the trash classic 'The Curious Dr Humpp'. There's barely any gore on display, \
and the director seems keener to focus on sex, with themes of incest and hatred seeping through. The acting is typically \
trashy, but most of the women get to appear nude at some point and despite a poor reputation, director <NAME> actually \
seems to have an eye for this sort of thing, as many of the sequences in this film are actually quite beautiful. The plot is \
paper thin, and most of the film is filler; but the music is catchy, and the director also does a surprisingly good job with \
the sex scenes themselves, as most are somewhat erotic. Overall, this is not a great film; but it's likely to appeal to \
the cult fan, and gets a much higher recommendation than the better known and lower quality 'Blood Rites'."
]
for x, y in zip(sample_text, model(sample_text)):
print(f"\nreview: {x}\nscore: {y}\n")
| import os
import sys
import numpy as np
from tensorflow import keras
file_path = os.path.abspath(__file__)
current_directory = os.path.dirname(file_path)
project_directory = os.path.dirname(current_directory)
sys.path.insert(0, project_directory)
from {{package_name}}.models import {{ project_name }}
# load model in development mode
model_path = os.path.join(project_directory, 'saved_models', '{{ package_name }}')
model = {{ project_name }}().load_model(model_path)
sample_text = [
'The movie was cool. The animation and the graphics were out of this world. I would recommend this movie.',
"I saw this film as it was the second feature on a disc containing the previously banned Video Nasty 'Blood Rites'. \
As Blood Rites was entirely awful, I really wasn't expecting much from this film; but actually, it would seem that trash \
director <NAME> has outdone himself this time as Seeds of Sin tops Blood Rites in style and stands tall as a more than \
adequate slice of sick sixties sexploitation. The plot is actually quite similar to Blood Rites, as we focus on a \
dysfunctional family unit, and of course; there is an inheritance at stake. The film is shot in black and white, and \
the look and feel of it reminded me a lot of the trash classic 'The Curious Dr Humpp'. There's barely any gore on display, \
and the director seems keener to focus on sex, with themes of incest and hatred seeping through. The acting is typically \
trashy, but most of the women get to appear nude at some point and despite a poor reputation, director <NAME> actually \
seems to have an eye for this sort of thing, as many of the sequences in this film are actually quite beautiful. The plot is \
paper thin, and most of the film is filler; but the music is catchy, and the director also does a surprisingly good job with \
the sex scenes themselves, as most are somewhat erotic. Overall, this is not a great film; but it's likely to appeal to \
the cult fan, and gets a much higher recommendation than the better known and lower quality 'Blood Rites'."
]
for x, y in zip(sample_text, model(sample_text)):
print(f"\nreview: {x}\nscore: {y}\n")
| en | 0.838268 | # load model in development mode | 2.32274 | 2 |
auth_gitlab/constants.py | fufik/sentry-auth-gitlab | 0 | 6621185 | from django.conf import settings
CLIENT_ID = getattr(settings, 'GITLAB_APP_ID', None)
CLIENT_SECRET = getattr(settings, 'GITLAB_APP_SECRET', None)
BASE_DOMAIN = getattr(settings, 'GITLAB_BASE_DOMAIN', None)
SCHEME = getattr(settings, 'GITLAB_HTTP_SCHEME', 'https')
API_VERSION = getattr(settings, 'GITLAB_API_VERSION', 4)
SCOPE = getattr(settings, 'GITLAB_AUTH_SCOPE', 'api')
ACCESS_TOKEN_URL = '{0}://{1}/oauth/token'.format(SCHEME, BASE_DOMAIN)
AUTHORIZE_URL = '{0}://{1}/oauth/authorize'.format(SCHEME, BASE_DOMAIN)
API_BASE_URL = '{0}://{1}/api/v{2}'.format(SCHEME, BASE_DOMAIN, API_VERSION)
# Just dummies from copied GitHub API so far
ERR_NO_ORG_ACCESS = "You do not have access to the required GitLab organization."
ERR_NO_PRIMARY_EMAIL = "We were unable to find a primary email address associated with your GitLab account."
ERR_NO_SINGLE_PRIMARY_EMAIL = "We were unable to find a single primary email address associated with your GitLab account."
ERR_NO_VERIFIED_PRIMARY_EMAIL = "We were unable to find a verified, primary email address associated with your GitLab account."
ERR_NO_SINGLE_VERIFIED_PRIMARY_EMAIL = "We were unable to find a single verified, primary email address associated with your GitLab account"
| from django.conf import settings
CLIENT_ID = getattr(settings, 'GITLAB_APP_ID', None)
CLIENT_SECRET = getattr(settings, 'GITLAB_APP_SECRET', None)
BASE_DOMAIN = getattr(settings, 'GITLAB_BASE_DOMAIN', None)
SCHEME = getattr(settings, 'GITLAB_HTTP_SCHEME', 'https')
API_VERSION = getattr(settings, 'GITLAB_API_VERSION', 4)
SCOPE = getattr(settings, 'GITLAB_AUTH_SCOPE', 'api')
ACCESS_TOKEN_URL = '{0}://{1}/oauth/token'.format(SCHEME, BASE_DOMAIN)
AUTHORIZE_URL = '{0}://{1}/oauth/authorize'.format(SCHEME, BASE_DOMAIN)
API_BASE_URL = '{0}://{1}/api/v{2}'.format(SCHEME, BASE_DOMAIN, API_VERSION)
# Just dummies from copied GitHub API so far
ERR_NO_ORG_ACCESS = "You do not have access to the required GitLab organization."
ERR_NO_PRIMARY_EMAIL = "We were unable to find a primary email address associated with your GitLab account."
ERR_NO_SINGLE_PRIMARY_EMAIL = "We were unable to find a single primary email address associated with your GitLab account."
ERR_NO_VERIFIED_PRIMARY_EMAIL = "We were unable to find a verified, primary email address associated with your GitLab account."
ERR_NO_SINGLE_VERIFIED_PRIMARY_EMAIL = "We were unable to find a single verified, primary email address associated with your GitLab account"
| en | 0.27415 | # Just dummies from copied GitHub API so far | 1.941684 | 2 |
weiboCrawler/migrations/0002_auto_20210413_0745.py | SongYuQiu/Social-Network-Portrait-Analysis-System-BackCode | 0 | 6621186 | # Generated by Django 2.2.19 on 2021-04-13 07:45
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('weiboCrawler', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='weibotext',
name='source_text',
field=models.TextField(blank=True, null=True),
),
migrations.AlterField(
model_name='weibotext',
name='source_topic',
field=models.CharField(blank=True, max_length=128, null=True),
),
]
| # Generated by Django 2.2.19 on 2021-04-13 07:45
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('weiboCrawler', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='weibotext',
name='source_text',
field=models.TextField(blank=True, null=True),
),
migrations.AlterField(
model_name='weibotext',
name='source_topic',
field=models.CharField(blank=True, max_length=128, null=True),
),
]
| en | 0.752692 | # Generated by Django 2.2.19 on 2021-04-13 07:45 | 1.573404 | 2 |
ViscoIndent.py | yu-efremov/ViscoIndent | 3 | 6621187 | <reponame>yu-efremov/ViscoIndent
# -*- coding: utf-8 -*-
"""
gui for viscoindent, v. May-2021
with images for viscomodels
gui_Viscoindent
"""
import sys
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtWidgets import QMainWindow, QApplication, QWidget, QSizePolicy,\
QVBoxLayout, QHBoxLayout, QLabel,\
QPushButton, QComboBox
from PyQt5.QtCore import Qt, QPersistentModelIndex, QModelIndex
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.figure import Figure
# import matplotlib.pyplot as plt
import numpy as np
# import csv
from tingFC_constructor import tingFC_constructor
from relaxation_functions import relaxation_function, modellist
def dicttolist(Dict):
temp = []
dictlist = []
for key, value in Dict.items():
temp = [key, value]
dictlist.append(temp)
return dictlist
class App(QMainWindow):
def read_data(self):
model = self.table.model() # general parameters (indenter, etc.)
model2 = self.table2.model() # indentation history
model3 = self.table3.model() # viscoelastic parameters
Pars2 = []
for row in range(model.rowCount(0)):
Pars2.append([])
for column in range(model.columnCount(0)):
index = model.index(row, column)
try:
Pars2[row].append(float(model.data(index, 0)))
except:
Pars2[row].append((model.data(index, 0)))
indpars = []
for row in range(model2.rowCount(0)):
indpars.append([])
column = 1
index = model2.index(row, column)
try:
indpars[row].append(float(model2.data(index, 0)))
except:
indpars[row].append((model2.data(index, 0)))
# print(indpars)
indpars = np.squeeze(indpars)
vpars = []
for row in range(model3.rowCount(0)):
vpars.append([])
column = 1
index = model3.index(row, column)
try:
vpars[row].append(float(model3.data(index, 0)))
except:
vpars[row].append((model3.data(index, 0)))
# print(vpars)
vpars = np.squeeze(vpars)
Pars = dict(Pars2)
Pars['indpars'] = indpars
Pars['Vpars'] = vpars
Pars['graph'] = str(self.graphT.currentText())
# print(Pars)
self.ParsCur = Pars
indentationfull = np.array([0, 1])
time, ind, force, cradius, indentationfullL, forceL = tingFC_constructor(Pars, indentationfull)
self.curve_data = [time, ind, force, cradius, indentationfullL, forceL]
def button1(self):
self.read_data()
curve_data = self.curve_data
Pars = self.ParsCur
print(Pars)
viscpars = relaxation_function(Pars['Vpars'], Pars['viscomodel'], np.ones(1))[2]
self.labelViscoHelp.setText(Pars['viscomodel'] + ' pars: ' + str(viscpars))
self.curvedata = PlotCanvas.plot(self.m, Pars, curve_data)
self.show()
def button2(self):
Pars = self.ParsCur
# print(self.curvedata)
arr = np.vstack([self.curve_data[0], self.curve_data[1], self.curve_data[2]])
np.savetxt('force_curve_data.csv', arr.transpose(), delimiter=',',
fmt=['%f', '%f', '%f'], header=str(Pars) + '\n' +
'time; indentation; force')
# with open('force_curve_data.csv', mode='w', newline='') as csv_file:
# wr = csv.writer(csv_file)
# wr.writerow(self.curvedata)
def changeviscomodel(self):
viscomodel = str(self.cbDel2.currentText())
viscpars = relaxation_function([0.4, 0.3, 0.2, 0.1, 0], viscomodel, np.ones(1))[2]
self.labelViscoHelp.setText(viscomodel + ' pars: ' + str(viscpars))
for ij in range(len(viscpars)):
indx = self.table3.model().index(ij, 0)
self.table3.model().setData(indx, viscpars[ij], 0)
self.table3.selectRow(ij)
self.table3.setRowHidden(ij, False)
self.table3.clearSelection()
if ij < 4:
for ik in range(4, ij, -1):
self.table3.setRowHidden(ik, True)
try:
filevname = "images/" + viscomodel + ".png"
pixmap = QtGui.QPixmap(filevname)
pixmap2 = pixmap.scaled(300, 300, QtCore.Qt.KeepAspectRatio, transformMode=Qt.SmoothTransformation)
self.VmodelImage.setPixmap(pixmap2)
except:
print('image for the viscomodel does not exist')
def changefigaxis(self):
self.curvedata = PlotCanvas.plot(self.m, self.ParsCur, self.curve_data)
self.show()
def __init__(self):
super().__init__()
self.left = 50
self.top = 50
self.title = 'PyQt5 gui for ViscIndent'
self.width = 1200
self.height = 750
self.initUI()
def initUI(self):
self.setWindowTitle(self.title)
self.setGeometry(self.left, self.top, self.width, self.height)
Pars = {}
Pars['probe_shape'] = 'sphere'
Pars['probe_dimension'] = 5000
Pars['Poisson'] = 0.5 # Poisson's ratio of the sample
Pars['dT'] = 1e-3 # Sampling time
Pars['height'] = 0
Pars['viscomodel'] = 'sPLR'
# Pars['indpars'] = np.array([1, 50, 50, 1000, 1]
# Pars['Vpars'] = np.array([1000, 0.8, 0, 20])
Pars['noise'] = 0 # % noise level from median force
Pars['hydrodrag'] = 0
IndPars = {} # [yes/no; depth; speed; numpoimts; ramp/sin];
IndPars['define_indentation'] = 1 # Pars['indpars'][0]
IndPars['depth'] = 50 # Pars['indpars'][1]
IndPars['speed'] = 50 # Pars['indpars'][2]
IndPars['number of pts'] = 1000 # Pars['indpars'][3]
IndPars['tri(0) or sin(1)'] = 0 # Pars['indpars'][4]
ViscoPars = {}
ViscoPars['visco-par1'] = float(1000) # Pars['Vpars'][0]
ViscoPars['visco-par2'] = float(0.2) # Pars['Vpars'][1]
ViscoPars['visco-par3'] = float(0) # Pars['Vpars'][2]
ViscoPars['visco-par4'] = float(0) # Pars['Vpars'][3]
ViscoPars['visco-par5'] = float(0) # Pars['Vpars'][3]
# Pars.pop('Vpars', None)
# Pars.pop('indpars', None)
listPars = dicttolist(Pars)
listInd = dicttolist(IndPars)
listVisco = dicttolist(ViscoPars)
Pars['indpars'] = np.squeeze(list(IndPars.values()))
Pars['Vpars'] = np.squeeze(list(ViscoPars.values()))
self.table = QtWidgets.QTableView()
self.model = TableModel(listPars)
self.table.setModel(self.model)
indx = self.table.model().index(0, 1)
pix = QPersistentModelIndex(indx)
cbDel = QComboBox()
cbDel.currentIndexChanged[str].connect(lambda txt, pix=pix: self.table.model().setData(QModelIndex(pix), txt, 0))
cbDel.addItems(['sphere', 'cone', 'cylinder'])
self.table.setIndexWidget(indx, cbDel)
indx2 = self.table.model().index(5, 1)
pix2 = QPersistentModelIndex(indx2)
self.cbDel2 = QComboBox()
self.cbDel2.currentIndexChanged[str].connect(lambda txt, pix2=pix2: self.table.model().setData(QModelIndex(pix2), txt, 0))
self.cbDel2.addItems(modellist())
self.cbDel2.setCurrentIndex(9)
self.cbDel2.currentIndexChanged.connect(self.changeviscomodel)
self.table.setIndexWidget(indx2, self.cbDel2)
self.table.setRowHidden(3, True)
self.table2 = QtWidgets.QTableView()
self.model2 = TableModel(listInd)
self.table2.setModel(self.model2)
self.table2.setRowHidden(0, True)
self.table3 = QtWidgets.QTableView()
self.model3 = TableModel(listVisco)
self.table3.setModel(self.model3)
self.graphT = QComboBox()
self.graphT.addItems(['Force versus Indentation', 'Force versus Time'])
self.graphT.currentIndexChanged.connect(self.button1)
self.labelViscoHelp = QLabel(str(self.cbDel2.currentText()) + ' pars: E1, alpha', self)
# self.changeviscomodel()
# self.model3 = TableModel(listVisco)
Pars['graph'] = 'Force versus Indentation'
self.read_data()
curve_data = self.curve_data
self.m = PlotCanvas(self, Pars, curve_data, width=5, height=4)
self.curvedata = PlotCanvas.plot(self.m, Pars, curve_data)
button = QPushButton('Update', self)
button.setToolTip('Update the grapg')
button.clicked.connect(self.button1)
button2 = QPushButton('Export csv', self)
button2.clicked.connect(self.button2)
self.VmodelImage = QLabel(self)
self.VmodelImage.setScaledContents(True)
pixmap = QtGui.QPixmap("images/sPLR.png")
pixmap2 = pixmap.scaled(100, 100, QtCore.Qt.KeepAspectRatio)
self.VmodelImage.setPixmap(pixmap2)
self.changeviscomodel()
layout1 = QHBoxLayout()
layout2 = QVBoxLayout()
layoutV = QVBoxLayout()
layout3 = QVBoxLayout()
layout2.addWidget(button)
layout2.addWidget(self.table)
layout2.addWidget(self.table2)
layoutV.addWidget(self.labelViscoHelp)
layoutV.addWidget(self.table3)
layoutV.addWidget(self.VmodelImage)
layout3.addWidget(self.graphT)
layout3.addWidget(self.m)
layout3.addWidget(button2)
layout1.addLayout(layout2)
layout1.addLayout(layoutV)
layout1.addLayout(layout3)
# three commands to apply layout
widget = QWidget()
widget.setLayout(layout1)
self.setCentralWidget(widget)
self.show()
self.activateWindow()
def closeEvent(self, event):
QApplication.quit()
class PlotCanvas(FigureCanvas):
def __init__(self, QWidget, Pars, curve_data, parent=None, width=5, height=4, dpi=100):
fig = Figure(figsize=(width, height), dpi=dpi)
self.axes = fig.add_subplot(111)
FigureCanvas.__init__(self, fig)
self.setParent(parent)
FigureCanvas.setSizePolicy(self,
QSizePolicy.Expanding,
QSizePolicy.Expanding)
FigureCanvas.updateGeometry(self)
# self.plot(Pars)
def plot(self, Pars, curve_data):
time, ind, force, cradius, indentationfullL, forceL = curve_data
ax = self.axes
ax.clear()
if Pars['graph'] == 'Force versus Indentation':
ax.plot(ind, force, 'r-')
ax.set_title('Force vs indentation')
ax.set_xlabel('Indentation, nm')
ax.set_ylabel('Force, nN')
elif Pars['graph'] == 'Force versus Time':
ax.plot(time, force, 'r-')
ax.set_title('Force vs Time')
ax.set_xlabel('Time, s')
ax.set_ylabel('Force, nN')
self.draw()
class TableModel(QtCore.QAbstractTableModel):
def __init__(self, data):
super(TableModel, self).__init__()
self._data = data
def data(self, index, role):
if role == Qt.DisplayRole:
# Get the raw value
value = self._data[index.row()][index.column()]
if isinstance(value, str):
# Render strings with quotes
return '%s' % value
return value
if role == Qt.EditRole or role == Qt.DisplayRole: # edit without clear
return QtCore.QVariant(self._data[index.row()][index.column()])
# return QtCore.QVariant()
def setData(self, index, value, role):
self._data[index.row()][index.column()] = value # or float(value)
return True
def flags(self, index):
if index.column() == 1:
return QtCore.Qt.ItemIsEditable | QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
if index.column() == 0: # make first column read only
return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
def rowCount(self, index):
return len(self._data)
def columnCount(self, index):
return len(self._data[0])
if __name__ == '__main__':
try:
del app
except:
print('noapp')
app = QApplication(sys.argv)
ex = App()
sys.exit(app.exec_())
| # -*- coding: utf-8 -*-
"""
gui for viscoindent, v. May-2021
with images for viscomodels
gui_Viscoindent
"""
import sys
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtWidgets import QMainWindow, QApplication, QWidget, QSizePolicy,\
QVBoxLayout, QHBoxLayout, QLabel,\
QPushButton, QComboBox
from PyQt5.QtCore import Qt, QPersistentModelIndex, QModelIndex
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.figure import Figure
# import matplotlib.pyplot as plt
import numpy as np
# import csv
from tingFC_constructor import tingFC_constructor
from relaxation_functions import relaxation_function, modellist
def dicttolist(Dict):
temp = []
dictlist = []
for key, value in Dict.items():
temp = [key, value]
dictlist.append(temp)
return dictlist
class App(QMainWindow):
def read_data(self):
model = self.table.model() # general parameters (indenter, etc.)
model2 = self.table2.model() # indentation history
model3 = self.table3.model() # viscoelastic parameters
Pars2 = []
for row in range(model.rowCount(0)):
Pars2.append([])
for column in range(model.columnCount(0)):
index = model.index(row, column)
try:
Pars2[row].append(float(model.data(index, 0)))
except:
Pars2[row].append((model.data(index, 0)))
indpars = []
for row in range(model2.rowCount(0)):
indpars.append([])
column = 1
index = model2.index(row, column)
try:
indpars[row].append(float(model2.data(index, 0)))
except:
indpars[row].append((model2.data(index, 0)))
# print(indpars)
indpars = np.squeeze(indpars)
vpars = []
for row in range(model3.rowCount(0)):
vpars.append([])
column = 1
index = model3.index(row, column)
try:
vpars[row].append(float(model3.data(index, 0)))
except:
vpars[row].append((model3.data(index, 0)))
# print(vpars)
vpars = np.squeeze(vpars)
Pars = dict(Pars2)
Pars['indpars'] = indpars
Pars['Vpars'] = vpars
Pars['graph'] = str(self.graphT.currentText())
# print(Pars)
self.ParsCur = Pars
indentationfull = np.array([0, 1])
time, ind, force, cradius, indentationfullL, forceL = tingFC_constructor(Pars, indentationfull)
self.curve_data = [time, ind, force, cradius, indentationfullL, forceL]
def button1(self):
self.read_data()
curve_data = self.curve_data
Pars = self.ParsCur
print(Pars)
viscpars = relaxation_function(Pars['Vpars'], Pars['viscomodel'], np.ones(1))[2]
self.labelViscoHelp.setText(Pars['viscomodel'] + ' pars: ' + str(viscpars))
self.curvedata = PlotCanvas.plot(self.m, Pars, curve_data)
self.show()
def button2(self):
Pars = self.ParsCur
# print(self.curvedata)
arr = np.vstack([self.curve_data[0], self.curve_data[1], self.curve_data[2]])
np.savetxt('force_curve_data.csv', arr.transpose(), delimiter=',',
fmt=['%f', '%f', '%f'], header=str(Pars) + '\n' +
'time; indentation; force')
# with open('force_curve_data.csv', mode='w', newline='') as csv_file:
# wr = csv.writer(csv_file)
# wr.writerow(self.curvedata)
def changeviscomodel(self):
viscomodel = str(self.cbDel2.currentText())
viscpars = relaxation_function([0.4, 0.3, 0.2, 0.1, 0], viscomodel, np.ones(1))[2]
self.labelViscoHelp.setText(viscomodel + ' pars: ' + str(viscpars))
for ij in range(len(viscpars)):
indx = self.table3.model().index(ij, 0)
self.table3.model().setData(indx, viscpars[ij], 0)
self.table3.selectRow(ij)
self.table3.setRowHidden(ij, False)
self.table3.clearSelection()
if ij < 4:
for ik in range(4, ij, -1):
self.table3.setRowHidden(ik, True)
try:
filevname = "images/" + viscomodel + ".png"
pixmap = QtGui.QPixmap(filevname)
pixmap2 = pixmap.scaled(300, 300, QtCore.Qt.KeepAspectRatio, transformMode=Qt.SmoothTransformation)
self.VmodelImage.setPixmap(pixmap2)
except:
print('image for the viscomodel does not exist')
def changefigaxis(self):
self.curvedata = PlotCanvas.plot(self.m, self.ParsCur, self.curve_data)
self.show()
def __init__(self):
super().__init__()
self.left = 50
self.top = 50
self.title = 'PyQt5 gui for ViscIndent'
self.width = 1200
self.height = 750
self.initUI()
def initUI(self):
self.setWindowTitle(self.title)
self.setGeometry(self.left, self.top, self.width, self.height)
Pars = {}
Pars['probe_shape'] = 'sphere'
Pars['probe_dimension'] = 5000
Pars['Poisson'] = 0.5 # Poisson's ratio of the sample
Pars['dT'] = 1e-3 # Sampling time
Pars['height'] = 0
Pars['viscomodel'] = 'sPLR'
# Pars['indpars'] = np.array([1, 50, 50, 1000, 1]
# Pars['Vpars'] = np.array([1000, 0.8, 0, 20])
Pars['noise'] = 0 # % noise level from median force
Pars['hydrodrag'] = 0
IndPars = {} # [yes/no; depth; speed; numpoimts; ramp/sin];
IndPars['define_indentation'] = 1 # Pars['indpars'][0]
IndPars['depth'] = 50 # Pars['indpars'][1]
IndPars['speed'] = 50 # Pars['indpars'][2]
IndPars['number of pts'] = 1000 # Pars['indpars'][3]
IndPars['tri(0) or sin(1)'] = 0 # Pars['indpars'][4]
ViscoPars = {}
ViscoPars['visco-par1'] = float(1000) # Pars['Vpars'][0]
ViscoPars['visco-par2'] = float(0.2) # Pars['Vpars'][1]
ViscoPars['visco-par3'] = float(0) # Pars['Vpars'][2]
ViscoPars['visco-par4'] = float(0) # Pars['Vpars'][3]
ViscoPars['visco-par5'] = float(0) # Pars['Vpars'][3]
# Pars.pop('Vpars', None)
# Pars.pop('indpars', None)
listPars = dicttolist(Pars)
listInd = dicttolist(IndPars)
listVisco = dicttolist(ViscoPars)
Pars['indpars'] = np.squeeze(list(IndPars.values()))
Pars['Vpars'] = np.squeeze(list(ViscoPars.values()))
self.table = QtWidgets.QTableView()
self.model = TableModel(listPars)
self.table.setModel(self.model)
indx = self.table.model().index(0, 1)
pix = QPersistentModelIndex(indx)
cbDel = QComboBox()
cbDel.currentIndexChanged[str].connect(lambda txt, pix=pix: self.table.model().setData(QModelIndex(pix), txt, 0))
cbDel.addItems(['sphere', 'cone', 'cylinder'])
self.table.setIndexWidget(indx, cbDel)
indx2 = self.table.model().index(5, 1)
pix2 = QPersistentModelIndex(indx2)
self.cbDel2 = QComboBox()
self.cbDel2.currentIndexChanged[str].connect(lambda txt, pix2=pix2: self.table.model().setData(QModelIndex(pix2), txt, 0))
self.cbDel2.addItems(modellist())
self.cbDel2.setCurrentIndex(9)
self.cbDel2.currentIndexChanged.connect(self.changeviscomodel)
self.table.setIndexWidget(indx2, self.cbDel2)
self.table.setRowHidden(3, True)
self.table2 = QtWidgets.QTableView()
self.model2 = TableModel(listInd)
self.table2.setModel(self.model2)
self.table2.setRowHidden(0, True)
self.table3 = QtWidgets.QTableView()
self.model3 = TableModel(listVisco)
self.table3.setModel(self.model3)
self.graphT = QComboBox()
self.graphT.addItems(['Force versus Indentation', 'Force versus Time'])
self.graphT.currentIndexChanged.connect(self.button1)
self.labelViscoHelp = QLabel(str(self.cbDel2.currentText()) + ' pars: E1, alpha', self)
# self.changeviscomodel()
# self.model3 = TableModel(listVisco)
Pars['graph'] = 'Force versus Indentation'
self.read_data()
curve_data = self.curve_data
self.m = PlotCanvas(self, Pars, curve_data, width=5, height=4)
self.curvedata = PlotCanvas.plot(self.m, Pars, curve_data)
button = QPushButton('Update', self)
button.setToolTip('Update the grapg')
button.clicked.connect(self.button1)
button2 = QPushButton('Export csv', self)
button2.clicked.connect(self.button2)
self.VmodelImage = QLabel(self)
self.VmodelImage.setScaledContents(True)
pixmap = QtGui.QPixmap("images/sPLR.png")
pixmap2 = pixmap.scaled(100, 100, QtCore.Qt.KeepAspectRatio)
self.VmodelImage.setPixmap(pixmap2)
self.changeviscomodel()
layout1 = QHBoxLayout()
layout2 = QVBoxLayout()
layoutV = QVBoxLayout()
layout3 = QVBoxLayout()
layout2.addWidget(button)
layout2.addWidget(self.table)
layout2.addWidget(self.table2)
layoutV.addWidget(self.labelViscoHelp)
layoutV.addWidget(self.table3)
layoutV.addWidget(self.VmodelImage)
layout3.addWidget(self.graphT)
layout3.addWidget(self.m)
layout3.addWidget(button2)
layout1.addLayout(layout2)
layout1.addLayout(layoutV)
layout1.addLayout(layout3)
# three commands to apply layout
widget = QWidget()
widget.setLayout(layout1)
self.setCentralWidget(widget)
self.show()
self.activateWindow()
def closeEvent(self, event):
QApplication.quit()
class PlotCanvas(FigureCanvas):
def __init__(self, QWidget, Pars, curve_data, parent=None, width=5, height=4, dpi=100):
fig = Figure(figsize=(width, height), dpi=dpi)
self.axes = fig.add_subplot(111)
FigureCanvas.__init__(self, fig)
self.setParent(parent)
FigureCanvas.setSizePolicy(self,
QSizePolicy.Expanding,
QSizePolicy.Expanding)
FigureCanvas.updateGeometry(self)
# self.plot(Pars)
def plot(self, Pars, curve_data):
time, ind, force, cradius, indentationfullL, forceL = curve_data
ax = self.axes
ax.clear()
if Pars['graph'] == 'Force versus Indentation':
ax.plot(ind, force, 'r-')
ax.set_title('Force vs indentation')
ax.set_xlabel('Indentation, nm')
ax.set_ylabel('Force, nN')
elif Pars['graph'] == 'Force versus Time':
ax.plot(time, force, 'r-')
ax.set_title('Force vs Time')
ax.set_xlabel('Time, s')
ax.set_ylabel('Force, nN')
self.draw()
class TableModel(QtCore.QAbstractTableModel):
def __init__(self, data):
super(TableModel, self).__init__()
self._data = data
def data(self, index, role):
if role == Qt.DisplayRole:
# Get the raw value
value = self._data[index.row()][index.column()]
if isinstance(value, str):
# Render strings with quotes
return '%s' % value
return value
if role == Qt.EditRole or role == Qt.DisplayRole: # edit without clear
return QtCore.QVariant(self._data[index.row()][index.column()])
# return QtCore.QVariant()
def setData(self, index, value, role):
self._data[index.row()][index.column()] = value # or float(value)
return True
def flags(self, index):
if index.column() == 1:
return QtCore.Qt.ItemIsEditable | QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
if index.column() == 0: # make first column read only
return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable
def rowCount(self, index):
return len(self._data)
def columnCount(self, index):
return len(self._data[0])
if __name__ == '__main__':
try:
del app
except:
print('noapp')
app = QApplication(sys.argv)
ex = App()
sys.exit(app.exec_()) | en | 0.38015 | # -*- coding: utf-8 -*- gui for viscoindent, v. May-2021
with images for viscomodels
gui_Viscoindent # import matplotlib.pyplot as plt # import csv # general parameters (indenter, etc.) # indentation history # viscoelastic parameters # print(indpars) # print(vpars) # print(Pars) # print(self.curvedata) # with open('force_curve_data.csv', mode='w', newline='') as csv_file: # wr = csv.writer(csv_file) # wr.writerow(self.curvedata) # Poisson's ratio of the sample # Sampling time # Pars['indpars'] = np.array([1, 50, 50, 1000, 1] # Pars['Vpars'] = np.array([1000, 0.8, 0, 20]) # % noise level from median force # [yes/no; depth; speed; numpoimts; ramp/sin]; # Pars['indpars'][0] # Pars['indpars'][1] # Pars['indpars'][2] # Pars['indpars'][3] # Pars['indpars'][4] # Pars['Vpars'][0] # Pars['Vpars'][1] # Pars['Vpars'][2] # Pars['Vpars'][3] # Pars['Vpars'][3] # Pars.pop('Vpars', None) # Pars.pop('indpars', None) # self.changeviscomodel() # self.model3 = TableModel(listVisco) # three commands to apply layout # self.plot(Pars) # Get the raw value # Render strings with quotes # edit without clear # return QtCore.QVariant() # or float(value) # make first column read only | 2.259176 | 2 |
problems/problem_16.py | minuq/project-euler | 0 | 6621188 | <filename>problems/problem_16.py
"""What is the sum of the digits of the number 2^1000? """
def main():
bignum = 1
result = 0
for i in range(0, 1000):
bignum *= 2
for i in range(0, len(str(bignum))):
result += int(str(bignum)[i])
print("Problem 16: {0}".format(result))
| <filename>problems/problem_16.py
"""What is the sum of the digits of the number 2^1000? """
def main():
bignum = 1
result = 0
for i in range(0, 1000):
bignum *= 2
for i in range(0, len(str(bignum))):
result += int(str(bignum)[i])
print("Problem 16: {0}".format(result))
| en | 0.886841 | What is the sum of the digits of the number 2^1000? | 3.438479 | 3 |
simulaqron/start/start_qnodeos.py | WrathfulSpatula/SimulaQron | 25 | 6621189 | #!/usr/bin/env python
import sys
import time
import signal
from timeit import default_timer as timer
from twisted.internet import reactor
from twisted.internet.error import ConnectionRefusedError, CannotListenError
from twisted.spread import pb
from netqasm.logging.glob import get_netqasm_logger, set_log_level
from simulaqron.netqasm_backend.factory import NetQASMFactory
from simulaqron.netqasm_backend.qnodeos import SubroutineHandler
from simulaqron.general.host_config import SocketsConfig
from simulaqron.settings import simulaqron_settings
logger = get_netqasm_logger("start_qnodeos")
_RETRY_TIME = 0.1
_TIMEOUT = 10
def init_register(virtRoot, myName, node):
"""Retrieves the relevant root objects to talk to such remote connections"""
logger.debug("LOCAL %s: All connections set up.", myName)
# Set the virtual node
node.set_virtual_node(virtRoot)
# Start listening to NetQASM messages
setup_netqasm_server(myName, node)
def connect_to_virtNode(myName, netqasm_factory, virtual_network):
"""Trys to connect to local virtual node.
If connection is refused, we try again after a set amount of time
(specified in handle_connection_error)
"""
logger.debug("LOCAL %s: Trying to connect to local virtual node.", myName)
virtual_node = virtual_network.hostDict[myName]
factory = pb.PBClientFactory()
# Connect
reactor.connectTCP(virtual_node.hostname, virtual_node.port, factory)
deferVirtual = factory.getRootObject()
# If connection succeeds do:
deferVirtual.addCallback(init_register, myName, netqasm_factory)
# If connection fails do:
deferVirtual.addErrback(handle_connection_error, myName, netqasm_factory, virtual_network)
def handle_connection_error(reason, myName, netqasm_factory, virtual_network):
""" Handles errors from trying to connect to local virtual node.
If a ConnectionRefusedError is raised another try will be made after
Settings.CONF_WAIT_TIME seconds. Any other error is raised again.
"""
try:
reason.raiseException()
except ConnectionRefusedError:
logger.debug("LOCAL %s: Could not connect, trying again...", myName)
reactor.callLater(
simulaqron_settings.conn_retry_time,
connect_to_virtNode,
myName,
netqasm_factory,
virtual_network,
)
except Exception as e:
logger.error(
"LOCAL %s: Critical error when connection to local virtual node: %s",
myName,
e,
)
reactor.stop()
def setup_netqasm_server(myName, netqasm_factory):
"""Setup NetQASM server to handle remote on the classical communication network."""
t_start = timer()
while timer() - t_start < _TIMEOUT:
try:
logger.debug(
"LOCAL %s: Starting local classical communication server.", myName
)
myHost = netqasm_factory.host
myHost.root = netqasm_factory
myHost.factory = netqasm_factory
reactor.listenTCP(myHost.port, myHost.factory)
break
except CannotListenError:
logger.error(
"LOCAL {}: NetQASM server address ({}) is already in use, trying again.".format(
myName, myHost.port
)
)
time.sleep(_RETRY_TIME)
except Exception as e:
logger.error(
"LOCAL {}: Critical error when starting NetQASM server: {}".format(myName, e)
)
reactor.stop()
else:
reactor.stop()
def sigterm_handler(_signo, _stack_frame):
reactor.stop()
def main(myName, network_name="default", log_level="WARNING"):
"""Start the indicated backend NetQASM Server"""
set_log_level(log_level)
logger.debug(f"Starting QNodeOS at {myName}")
signal.signal(signal.SIGTERM, sigterm_handler)
signal.signal(signal.SIGINT, sigterm_handler)
# Since version 3.0.0 a single config file is used
network_config_file = simulaqron_settings.network_config_file
# Read configuration files for the virtual quantum, as well as the classical network
virtual_network = SocketsConfig(network_config_file, network_name=network_name, config_type="vnode")
qnodeos_network = SocketsConfig(network_config_file, network_name=network_name, config_type="qnodeos")
# Check if we are in the host-dictionary
if myName in qnodeos_network.hostDict:
myHost = qnodeos_network.hostDict[myName]
logger.debug(f"Setting up QNodeOS protocol factory for {myName}")
netqasm_factory = NetQASMFactory(
myHost,
myName,
qnodeos_network,
SubroutineHandler,
network_name=network_name,
)
else:
logger.error("LOCAL %s: Cannot start classical communication servers.", myName)
return
# Connect to the local virtual node simulating the "local" qubits
logger.debug(f"Connect to virtual node {myName}")
connect_to_virtNode(myName, netqasm_factory, virtual_network)
# Run reactor
reactor.run()
logger.debug(f"Ending QNodeOS at {myName}")
if __name__ == '__main__':
main(sys.argv[1])
| #!/usr/bin/env python
import sys
import time
import signal
from timeit import default_timer as timer
from twisted.internet import reactor
from twisted.internet.error import ConnectionRefusedError, CannotListenError
from twisted.spread import pb
from netqasm.logging.glob import get_netqasm_logger, set_log_level
from simulaqron.netqasm_backend.factory import NetQASMFactory
from simulaqron.netqasm_backend.qnodeos import SubroutineHandler
from simulaqron.general.host_config import SocketsConfig
from simulaqron.settings import simulaqron_settings
logger = get_netqasm_logger("start_qnodeos")
_RETRY_TIME = 0.1
_TIMEOUT = 10
def init_register(virtRoot, myName, node):
"""Retrieves the relevant root objects to talk to such remote connections"""
logger.debug("LOCAL %s: All connections set up.", myName)
# Set the virtual node
node.set_virtual_node(virtRoot)
# Start listening to NetQASM messages
setup_netqasm_server(myName, node)
def connect_to_virtNode(myName, netqasm_factory, virtual_network):
"""Trys to connect to local virtual node.
If connection is refused, we try again after a set amount of time
(specified in handle_connection_error)
"""
logger.debug("LOCAL %s: Trying to connect to local virtual node.", myName)
virtual_node = virtual_network.hostDict[myName]
factory = pb.PBClientFactory()
# Connect
reactor.connectTCP(virtual_node.hostname, virtual_node.port, factory)
deferVirtual = factory.getRootObject()
# If connection succeeds do:
deferVirtual.addCallback(init_register, myName, netqasm_factory)
# If connection fails do:
deferVirtual.addErrback(handle_connection_error, myName, netqasm_factory, virtual_network)
def handle_connection_error(reason, myName, netqasm_factory, virtual_network):
""" Handles errors from trying to connect to local virtual node.
If a ConnectionRefusedError is raised another try will be made after
Settings.CONF_WAIT_TIME seconds. Any other error is raised again.
"""
try:
reason.raiseException()
except ConnectionRefusedError:
logger.debug("LOCAL %s: Could not connect, trying again...", myName)
reactor.callLater(
simulaqron_settings.conn_retry_time,
connect_to_virtNode,
myName,
netqasm_factory,
virtual_network,
)
except Exception as e:
logger.error(
"LOCAL %s: Critical error when connection to local virtual node: %s",
myName,
e,
)
reactor.stop()
def setup_netqasm_server(myName, netqasm_factory):
"""Setup NetQASM server to handle remote on the classical communication network."""
t_start = timer()
while timer() - t_start < _TIMEOUT:
try:
logger.debug(
"LOCAL %s: Starting local classical communication server.", myName
)
myHost = netqasm_factory.host
myHost.root = netqasm_factory
myHost.factory = netqasm_factory
reactor.listenTCP(myHost.port, myHost.factory)
break
except CannotListenError:
logger.error(
"LOCAL {}: NetQASM server address ({}) is already in use, trying again.".format(
myName, myHost.port
)
)
time.sleep(_RETRY_TIME)
except Exception as e:
logger.error(
"LOCAL {}: Critical error when starting NetQASM server: {}".format(myName, e)
)
reactor.stop()
else:
reactor.stop()
def sigterm_handler(_signo, _stack_frame):
reactor.stop()
def main(myName, network_name="default", log_level="WARNING"):
"""Start the indicated backend NetQASM Server"""
set_log_level(log_level)
logger.debug(f"Starting QNodeOS at {myName}")
signal.signal(signal.SIGTERM, sigterm_handler)
signal.signal(signal.SIGINT, sigterm_handler)
# Since version 3.0.0 a single config file is used
network_config_file = simulaqron_settings.network_config_file
# Read configuration files for the virtual quantum, as well as the classical network
virtual_network = SocketsConfig(network_config_file, network_name=network_name, config_type="vnode")
qnodeos_network = SocketsConfig(network_config_file, network_name=network_name, config_type="qnodeos")
# Check if we are in the host-dictionary
if myName in qnodeos_network.hostDict:
myHost = qnodeos_network.hostDict[myName]
logger.debug(f"Setting up QNodeOS protocol factory for {myName}")
netqasm_factory = NetQASMFactory(
myHost,
myName,
qnodeos_network,
SubroutineHandler,
network_name=network_name,
)
else:
logger.error("LOCAL %s: Cannot start classical communication servers.", myName)
return
# Connect to the local virtual node simulating the "local" qubits
logger.debug(f"Connect to virtual node {myName}")
connect_to_virtNode(myName, netqasm_factory, virtual_network)
# Run reactor
reactor.run()
logger.debug(f"Ending QNodeOS at {myName}")
if __name__ == '__main__':
main(sys.argv[1])
| en | 0.823128 | #!/usr/bin/env python Retrieves the relevant root objects to talk to such remote connections # Set the virtual node # Start listening to NetQASM messages Trys to connect to local virtual node. If connection is refused, we try again after a set amount of time (specified in handle_connection_error) # Connect # If connection succeeds do: # If connection fails do: Handles errors from trying to connect to local virtual node. If a ConnectionRefusedError is raised another try will be made after Settings.CONF_WAIT_TIME seconds. Any other error is raised again. Setup NetQASM server to handle remote on the classical communication network. Start the indicated backend NetQASM Server # Since version 3.0.0 a single config file is used # Read configuration files for the virtual quantum, as well as the classical network # Check if we are in the host-dictionary # Connect to the local virtual node simulating the "local" qubits # Run reactor | 2.004815 | 2 |
AdventOfCode2020/02.simple.py | wandyezj/scripts | 0 | 6621190 | <filename>AdventOfCode2020/02.simple.py
def readFileLines(file):
f = open(file);
data = f.read().strip()
f.close()
return data.split("\n")
lines = readFileLines("02.data.txt")
partOneCount = 0
partTwoCount = 0
#go through each line
for line in lines:
# a b letter password
# a-b letter: password
pieces = line.split(": ")
password = pieces[1].strip()
pieces = pieces[0].split(" ")
letter = pieces[1].strip()
pieces = pieces[0].split("-")
a = int(pieces[0])
b = int(pieces[1])
#print("{} {} {} {}".format(a, b, letter, password))
# part 1
# count how many of the letter is in the password
letterCount = password.count(letter)
# test against a and b
if letterCount >= a and letterCount <= b:
partOneCount += 1
# part 2
# test for letters presence at a and b using one based index
password = " " + password
one = password[a] == letter
two = password[b] == letter
if one ^ two:
partTwoCount += 1
# count the number that pass
print("Part 1")
print(partOneCount)
print("Part 2")
print(partTwoCount)
| <filename>AdventOfCode2020/02.simple.py
def readFileLines(file):
f = open(file);
data = f.read().strip()
f.close()
return data.split("\n")
lines = readFileLines("02.data.txt")
partOneCount = 0
partTwoCount = 0
#go through each line
for line in lines:
# a b letter password
# a-b letter: password
pieces = line.split(": ")
password = pieces[1].strip()
pieces = pieces[0].split(" ")
letter = pieces[1].strip()
pieces = pieces[0].split("-")
a = int(pieces[0])
b = int(pieces[1])
#print("{} {} {} {}".format(a, b, letter, password))
# part 1
# count how many of the letter is in the password
letterCount = password.count(letter)
# test against a and b
if letterCount >= a and letterCount <= b:
partOneCount += 1
# part 2
# test for letters presence at a and b using one based index
password = " " + password
one = password[a] == letter
two = password[b] == letter
if one ^ two:
partTwoCount += 1
# count the number that pass
print("Part 1")
print(partOneCount)
print("Part 2")
print(partTwoCount)
| en | 0.8528 | #go through each line # a b letter password # a-b letter: password #print("{} {} {} {}".format(a, b, letter, password)) # part 1 # count how many of the letter is in the password # test against a and b # part 2 # test for letters presence at a and b using one based index # count the number that pass | 3.664104 | 4 |
menvod/html_extract.py | anokata/pythonPetProjects | 3 | 6621191 | from lxml import html
import glob
import os
import re
def file_to_html(filename):
if os.path.exists(filename):
with open(filename, 'rt') as fin:
content = fin.read()
html_doc = html.fromstring(content)
return html_doc
def get_all_tags(ht, tag):
return ht.xpath('//%s'%tag)
def extract_text(elements):
text = list()
for e in elements:
text.append(e.text_content())
text.append('\n'*2)
return ''.join(text)
def save_text(filename, text):
with open(filename, 'wt') as fout:
fout.write(text)
def get_star_files(path):
return glob.glob(path + '*', recursive=True)
def extract_new_name(path, extension=None):
if extension == None:
extension = re.search('\.[a-zA-Z]*', path).group()
print(path)
numbers = re.search('\d+', path)
if numbers == None:
numbers = ''
else:
numbers = numbers.group()
part1 = ''.join(re.findall('/[a-zA-Z]', path)).replace('/', '')
real_name = os.path.split(path)[1]
name = re.sub('[:|\s\.\,]', '', real_name)[::4]
pth = os.path.split(path)[0] + os.path.sep
end_name = pth + name + part1 + numbers + extension
#if os.path.exists(end_name):
# return None
return end_name
def html_p_texts(pattern):
files = get_star_files(path)
for fn in files:
htm = file_to_html(fn)
tags = get_all_tags(htm, 'p')
text = extract_text(tags)
name = extract_new_name(fn, '.txt')
if name != None:
save_text(name, text)
else:
print('sorry')
pass
#if __name__=='__main__':
path = '/home/ksi/Downloads/html/Mother of Learning'
f = file_to_html('/home/ksi/Downloads/html/Mother of Learning Chapter 58: Questions and Answers, a fantasy fiction | FictionPress.html')
t = get_all_tags(f, 'p')
x = extract_text(t)
#save_text('/home/ksi/Downloads/mol58.txt', x)
html_p_texts(path)
| from lxml import html
import glob
import os
import re
def file_to_html(filename):
if os.path.exists(filename):
with open(filename, 'rt') as fin:
content = fin.read()
html_doc = html.fromstring(content)
return html_doc
def get_all_tags(ht, tag):
return ht.xpath('//%s'%tag)
def extract_text(elements):
text = list()
for e in elements:
text.append(e.text_content())
text.append('\n'*2)
return ''.join(text)
def save_text(filename, text):
with open(filename, 'wt') as fout:
fout.write(text)
def get_star_files(path):
return glob.glob(path + '*', recursive=True)
def extract_new_name(path, extension=None):
if extension == None:
extension = re.search('\.[a-zA-Z]*', path).group()
print(path)
numbers = re.search('\d+', path)
if numbers == None:
numbers = ''
else:
numbers = numbers.group()
part1 = ''.join(re.findall('/[a-zA-Z]', path)).replace('/', '')
real_name = os.path.split(path)[1]
name = re.sub('[:|\s\.\,]', '', real_name)[::4]
pth = os.path.split(path)[0] + os.path.sep
end_name = pth + name + part1 + numbers + extension
#if os.path.exists(end_name):
# return None
return end_name
def html_p_texts(pattern):
files = get_star_files(path)
for fn in files:
htm = file_to_html(fn)
tags = get_all_tags(htm, 'p')
text = extract_text(tags)
name = extract_new_name(fn, '.txt')
if name != None:
save_text(name, text)
else:
print('sorry')
pass
#if __name__=='__main__':
path = '/home/ksi/Downloads/html/Mother of Learning'
f = file_to_html('/home/ksi/Downloads/html/Mother of Learning Chapter 58: Questions and Answers, a fantasy fiction | FictionPress.html')
t = get_all_tags(f, 'p')
x = extract_text(t)
#save_text('/home/ksi/Downloads/mol58.txt', x)
html_p_texts(path)
| en | 0.369688 | #if os.path.exists(end_name): # return None #if __name__=='__main__': #save_text('/home/ksi/Downloads/mol58.txt', x) | 3.041179 | 3 |
django/solution/untitled/ksiazkaadresowa/models/address.py | giserh/book-python | 1 | 6621192 | <reponame>giserh/book-python
from django.db import models
from django.utils.translation import ugettext_lazy as _
class Address(models.Model):
person = models.ForeignKey(verbose_name=_('Person'), to='ksiazkaadresowa.Person', on_delete=models.CASCADE)
street = models.CharField(verbose_name=_('Street'), max_length=30, db_index=True)
house_number = models.CharField(verbose_name=_('House Number'), max_length=5)
apartment_number = models.CharField(verbose_name=_('Apartment Number'), max_length=5, null=True, blank=True)
city = models.CharField(verbose_name=_('City'), max_length=30, null=True, blank=True, db_index=True)
post_code = models.IntegerField(verbose_name=_('Post Code'), null=True, blank=True)
region = models.CharField(verbose_name=_('Region'), max_length=30, null=True, blank=True)
country = models.CharField(verbose_name=_('Country'), max_length=30, null=True, blank=True)
def __str__(self):
return f'{self.street}, {self.city}'
class Meta:
verbose_name = _('Address')
verbose_name_plural = _('Addresses')
| from django.db import models
from django.utils.translation import ugettext_lazy as _
class Address(models.Model):
person = models.ForeignKey(verbose_name=_('Person'), to='ksiazkaadresowa.Person', on_delete=models.CASCADE)
street = models.CharField(verbose_name=_('Street'), max_length=30, db_index=True)
house_number = models.CharField(verbose_name=_('House Number'), max_length=5)
apartment_number = models.CharField(verbose_name=_('Apartment Number'), max_length=5, null=True, blank=True)
city = models.CharField(verbose_name=_('City'), max_length=30, null=True, blank=True, db_index=True)
post_code = models.IntegerField(verbose_name=_('Post Code'), null=True, blank=True)
region = models.CharField(verbose_name=_('Region'), max_length=30, null=True, blank=True)
country = models.CharField(verbose_name=_('Country'), max_length=30, null=True, blank=True)
def __str__(self):
return f'{self.street}, {self.city}'
class Meta:
verbose_name = _('Address')
verbose_name_plural = _('Addresses') | none | 1 | 2.271821 | 2 | |
python/2020_04_1.py | wensby/advent-of-code | 0 | 6621193 | import sys
required_fields = ['byr', 'iyr', 'eyr', 'hgt', 'hcl', 'ecl', 'pid', 'cid']
def run(input):
passports = parse_passports(input)
return len([p for p in passports if is_valid(p)])
def parse_passports(input):
passports = []
passport = {}
passports.append(passport)
for line in input.splitlines():
if not line:
passport = {}
passports.append(passport)
else:
for field in line.split(' '):
key, value = field.split(':')
passport[key] = value
return passports
def is_valid(passport):
missing_fields = [f for f in required_fields if f not in passport.keys()]
return not missing_fields or missing_fields == ['cid']
if __name__ == '__main__':
print(run(sys.stdin.read()))
| import sys
required_fields = ['byr', 'iyr', 'eyr', 'hgt', 'hcl', 'ecl', 'pid', 'cid']
def run(input):
passports = parse_passports(input)
return len([p for p in passports if is_valid(p)])
def parse_passports(input):
passports = []
passport = {}
passports.append(passport)
for line in input.splitlines():
if not line:
passport = {}
passports.append(passport)
else:
for field in line.split(' '):
key, value = field.split(':')
passport[key] = value
return passports
def is_valid(passport):
missing_fields = [f for f in required_fields if f not in passport.keys()]
return not missing_fields or missing_fields == ['cid']
if __name__ == '__main__':
print(run(sys.stdin.read()))
| none | 1 | 3.609857 | 4 | |
agent.py | akhilcjacob/flappybird-parallel-rl | 0 | 6621194 | <reponame>akhilcjacob/flappybird-parallel-rl<filename>agent.py<gh_stars>0
import json
import os
import random
class Agent(object):
def __init__(self, agent_name, b_model=None):
self.q_table_loc = "./models/"
self._verify_dir()
self.last_state = '0_0_0'
self.base_model = {self.last_state: [0, 0]}
self.agent_name = agent_name
self.learning_rate = 0.7
self.reward = {0: 1, 1: -1000}
self.output_file_loc = self.q_table_loc + agent_name + ".json"
self.move_list = []
self.last_action = 0
self.count = 0
self.epsilon = 0.001
if b_model != None:
print("Recieved data from constructor")
self.base_model = b_model
elif len(self.base_model) == 1:
self._import_q_table()
else:
print("Starting with an empty data set")
def _verify_dir(self):
if not (os.path.isdir(self.q_table_loc)):
print("Models Directory Doesn't exist...\n Creating Models/ Directory...")
os.makedirs(self.q_table_loc)
else: print("Models Directory Exists")
def get_table(self):
return self.base_model
def _import_q_table(self):
if os.path.exists(self.output_file_loc):
with open(self.output_file_loc) as json_file:
self.base_model = json.load(json_file)
print("Succesfully to imported from file")
def _export_q_table(self):
# print(self.base_model)
# if self.base_model != None:
f = open(self.output_file_loc, "w")
f.write(json.dumps(self.base_model))
f.close()
def set_table(self, table):
self.base_model = table
def update_model(self, new_model):
self.base_model = new_model
def action(self, x_distance, y_distance, velocity):
if x_distance < 140:
x_distance = int(x_distance) - (int(x_distance) % 10)
else:
x_distance = int(x_distance) - (int(x_distance) % 70)
if y_distance < 180:
y_distance = int(y_distance) - (int(y_distance) % 10)
else:
y_distance = int(y_distance) - (int(y_distance) % 60)
ident = [str(int(x_distance)), str(int(y_distance)), str(velocity)]
state = "_".join(ident)
self.move_list.append((self.last_state, self.last_action, state))
self.last_state = state
# Check to see if this state exists
if state not in self.base_model:
# self.base_model = self.base_model.append(columns)
self.base_model[state] = [0, 0]
# print('appending')
# columns = [{"id": state, "x": x_distance, "y": y_distance, "v": velocity, "a0": 0, "a1": 0}]
# columns = [{"id":'0_1_2', "x":0, "y":0, "v":0, "a0":0, "a1":0}]
# print(columns)
# self.last_action = int(random.randint(0, 1)>0.75)
# self.last_action = 0
# print(self.base_model[self.base_model.id == state])
# return self.last_action
# return 0
if random.uniform(0,1)<self.epsilon:
self.last_action = int(random.uniform(0, 1)<0.5)
# print("Picking a random action")
elif self.base_model[state][0] >= self.base_model[state][1]:
# if self.base_model[self.base_model.id == state].a0.values.tolist()[0] >= self.base_model[self.base_model.id == state].a1.values.tolist()[0]:
# print("This is the better option")
self.last_action = 0
else:
self.last_action = 1
return self.last_action
def update_scores(self, dump_base_model=False):
history = list(reversed(self.move_list))
hit_upper_pipe = float(history[0][2].split('_')[1]) > 120
reward = 1
for exp in history:
# print("working with hist")
state = exp[0]
act = exp[1]
res_state = exp[2]
# Select reward
if reward == 1 or reward == 2:
cur_reward = self.reward[1]
elif hit_upper_pipe and act:
cur_reward = self.reward[1]
hit_upper_pipe = False
else:
cur_reward = self.reward[0]
if state not in self.base_model:
self.base_model[state] = [0, 0]
# print('appending')
prev_rew = (1-self.learning_rate) * (self.base_model[state][act])
new_rew = self.learning_rate * (cur_reward + 0.7*max(self.base_model[res_state]))
self.base_model[state][act] = new_rew+prev_rew
reward += 1
self.count += 1 # increase game count
# if dump_base_model:
# self._export_q_table() # Dump q values (if game count % DUMPING_N == 0)
self.move_list = [] # clear history after updating strategies
def _generate_model(self):
# self.base_model = pd.DataFrame()
print("Intitializing an emtpy model")
# output = []
# columns = [{"id": '0_0_0', "x": 0, "y": 0, "v": 0, "a0": 0, "a1": 0}]
# ID = (x_y_v), x distance to next pipe,
# y dist to next pipe, v = current vel, reward total for action =0, reward total for action =1
# self.base_model = pd.DataFrame(
# columns=["id", "x", "y", "v", "a0", "a1"], data=columns
# )
| import json
import os
import random
class Agent(object):
def __init__(self, agent_name, b_model=None):
self.q_table_loc = "./models/"
self._verify_dir()
self.last_state = '0_0_0'
self.base_model = {self.last_state: [0, 0]}
self.agent_name = agent_name
self.learning_rate = 0.7
self.reward = {0: 1, 1: -1000}
self.output_file_loc = self.q_table_loc + agent_name + ".json"
self.move_list = []
self.last_action = 0
self.count = 0
self.epsilon = 0.001
if b_model != None:
print("Recieved data from constructor")
self.base_model = b_model
elif len(self.base_model) == 1:
self._import_q_table()
else:
print("Starting with an empty data set")
def _verify_dir(self):
if not (os.path.isdir(self.q_table_loc)):
print("Models Directory Doesn't exist...\n Creating Models/ Directory...")
os.makedirs(self.q_table_loc)
else: print("Models Directory Exists")
def get_table(self):
return self.base_model
def _import_q_table(self):
if os.path.exists(self.output_file_loc):
with open(self.output_file_loc) as json_file:
self.base_model = json.load(json_file)
print("Succesfully to imported from file")
def _export_q_table(self):
# print(self.base_model)
# if self.base_model != None:
f = open(self.output_file_loc, "w")
f.write(json.dumps(self.base_model))
f.close()
def set_table(self, table):
self.base_model = table
def update_model(self, new_model):
self.base_model = new_model
def action(self, x_distance, y_distance, velocity):
if x_distance < 140:
x_distance = int(x_distance) - (int(x_distance) % 10)
else:
x_distance = int(x_distance) - (int(x_distance) % 70)
if y_distance < 180:
y_distance = int(y_distance) - (int(y_distance) % 10)
else:
y_distance = int(y_distance) - (int(y_distance) % 60)
ident = [str(int(x_distance)), str(int(y_distance)), str(velocity)]
state = "_".join(ident)
self.move_list.append((self.last_state, self.last_action, state))
self.last_state = state
# Check to see if this state exists
if state not in self.base_model:
# self.base_model = self.base_model.append(columns)
self.base_model[state] = [0, 0]
# print('appending')
# columns = [{"id": state, "x": x_distance, "y": y_distance, "v": velocity, "a0": 0, "a1": 0}]
# columns = [{"id":'0_1_2', "x":0, "y":0, "v":0, "a0":0, "a1":0}]
# print(columns)
# self.last_action = int(random.randint(0, 1)>0.75)
# self.last_action = 0
# print(self.base_model[self.base_model.id == state])
# return self.last_action
# return 0
if random.uniform(0,1)<self.epsilon:
self.last_action = int(random.uniform(0, 1)<0.5)
# print("Picking a random action")
elif self.base_model[state][0] >= self.base_model[state][1]:
# if self.base_model[self.base_model.id == state].a0.values.tolist()[0] >= self.base_model[self.base_model.id == state].a1.values.tolist()[0]:
# print("This is the better option")
self.last_action = 0
else:
self.last_action = 1
return self.last_action
def update_scores(self, dump_base_model=False):
history = list(reversed(self.move_list))
hit_upper_pipe = float(history[0][2].split('_')[1]) > 120
reward = 1
for exp in history:
# print("working with hist")
state = exp[0]
act = exp[1]
res_state = exp[2]
# Select reward
if reward == 1 or reward == 2:
cur_reward = self.reward[1]
elif hit_upper_pipe and act:
cur_reward = self.reward[1]
hit_upper_pipe = False
else:
cur_reward = self.reward[0]
if state not in self.base_model:
self.base_model[state] = [0, 0]
# print('appending')
prev_rew = (1-self.learning_rate) * (self.base_model[state][act])
new_rew = self.learning_rate * (cur_reward + 0.7*max(self.base_model[res_state]))
self.base_model[state][act] = new_rew+prev_rew
reward += 1
self.count += 1 # increase game count
# if dump_base_model:
# self._export_q_table() # Dump q values (if game count % DUMPING_N == 0)
self.move_list = [] # clear history after updating strategies
def _generate_model(self):
# self.base_model = pd.DataFrame()
print("Intitializing an emtpy model")
# output = []
# columns = [{"id": '0_0_0', "x": 0, "y": 0, "v": 0, "a0": 0, "a1": 0}]
# ID = (x_y_v), x distance to next pipe,
# y dist to next pipe, v = current vel, reward total for action =0, reward total for action =1
# self.base_model = pd.DataFrame(
# columns=["id", "x", "y", "v", "a0", "a1"], data=columns
# ) | en | 0.357908 | # print(self.base_model) # if self.base_model != None: # Check to see if this state exists # self.base_model = self.base_model.append(columns) # print('appending') # columns = [{"id": state, "x": x_distance, "y": y_distance, "v": velocity, "a0": 0, "a1": 0}] # columns = [{"id":'0_1_2', "x":0, "y":0, "v":0, "a0":0, "a1":0}] # print(columns) # self.last_action = int(random.randint(0, 1)>0.75) # self.last_action = 0 # print(self.base_model[self.base_model.id == state]) # return self.last_action # return 0 # print("Picking a random action") # if self.base_model[self.base_model.id == state].a0.values.tolist()[0] >= self.base_model[self.base_model.id == state].a1.values.tolist()[0]: # print("This is the better option") # print("working with hist") # Select reward # print('appending') # increase game count # if dump_base_model: # self._export_q_table() # Dump q values (if game count % DUMPING_N == 0) # clear history after updating strategies # self.base_model = pd.DataFrame() # output = [] # columns = [{"id": '0_0_0', "x": 0, "y": 0, "v": 0, "a0": 0, "a1": 0}] # ID = (x_y_v), x distance to next pipe, # y dist to next pipe, v = current vel, reward total for action =0, reward total for action =1 # self.base_model = pd.DataFrame( # columns=["id", "x", "y", "v", "a0", "a1"], data=columns # ) | 2.892795 | 3 |
src/main.py | skbobade/UniversalRemote | 1 | 6621195 | ''' Main file to execute all micropython codes '''
import _thread
import machine
import utime
import mqtt
# Simple implementation for logging
logfile = 'mainlog.txt'
try:
_thread.start_new_thread(mqtt.start, ())
except Exception as exc:
with open(logfile, 'a+') as f:
print(str(exc))
f.write(str(exc))
f.write('\n')
utime.sleep(5)
machine.reset()
| ''' Main file to execute all micropython codes '''
import _thread
import machine
import utime
import mqtt
# Simple implementation for logging
logfile = 'mainlog.txt'
try:
_thread.start_new_thread(mqtt.start, ())
except Exception as exc:
with open(logfile, 'a+') as f:
print(str(exc))
f.write(str(exc))
f.write('\n')
utime.sleep(5)
machine.reset()
| en | 0.780405 | Main file to execute all micropython codes # Simple implementation for logging | 2.481964 | 2 |
language/apps.py | DiegoBrian/Flashcards | 1 | 6621196 | <filename>language/apps.py
from django.apps import AppConfig
class Language2Config(AppConfig):
name = 'language2'
| <filename>language/apps.py
from django.apps import AppConfig
class Language2Config(AppConfig):
name = 'language2'
| none | 1 | 1.375414 | 1 | |
azure/functions/decorators/eventhub.py | ShaneMicro/azure-functions-python-library | 0 | 6621197 | <gh_stars>0
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from typing import Optional
from azure.functions.decorators.constants import EVENT_HUB_TRIGGER, EVENT_HUB
from azure.functions.decorators.core import Trigger, DataType, OutputBinding, \
Cardinality
class EventHubTrigger(Trigger):
@staticmethod
def get_binding_name() -> str:
return EVENT_HUB_TRIGGER
def __init__(self,
name: str,
connection: str,
event_hub_name: str,
data_type: Optional[DataType] = None,
cardinality: Optional[Cardinality] = None,
consumer_group: Optional[str] = None,
**kwargs):
self.connection = connection
self.event_hub_name = event_hub_name
self.cardinality = cardinality
self.consumer_group = consumer_group
super().__init__(name=name, data_type=data_type)
class EventHubOutput(OutputBinding):
@staticmethod
def get_binding_name() -> str:
return EVENT_HUB
def __init__(self,
name: str,
connection: str,
event_hub_name: str,
data_type: Optional[DataType] = None,
**kwargs):
self.connection = connection
self.event_hub_name = event_hub_name
super().__init__(name=name, data_type=data_type)
| # Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from typing import Optional
from azure.functions.decorators.constants import EVENT_HUB_TRIGGER, EVENT_HUB
from azure.functions.decorators.core import Trigger, DataType, OutputBinding, \
Cardinality
class EventHubTrigger(Trigger):
@staticmethod
def get_binding_name() -> str:
return EVENT_HUB_TRIGGER
def __init__(self,
name: str,
connection: str,
event_hub_name: str,
data_type: Optional[DataType] = None,
cardinality: Optional[Cardinality] = None,
consumer_group: Optional[str] = None,
**kwargs):
self.connection = connection
self.event_hub_name = event_hub_name
self.cardinality = cardinality
self.consumer_group = consumer_group
super().__init__(name=name, data_type=data_type)
class EventHubOutput(OutputBinding):
@staticmethod
def get_binding_name() -> str:
return EVENT_HUB
def __init__(self,
name: str,
connection: str,
event_hub_name: str,
data_type: Optional[DataType] = None,
**kwargs):
self.connection = connection
self.event_hub_name = event_hub_name
super().__init__(name=name, data_type=data_type) | en | 0.853886 | # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. | 2.057477 | 2 |
test.py | FinnishArmy/Fibinachi-Sequence | 0 | 6621198 | def fib(n):
"""
Inputs a number,
Returns None if number is negative
Otherwise return final fibinachi number in sequence
Must use recursion
"""
#If n is less than or equal to 0, return an error.
if n < 0:
return None
#If n is equal to 0, return 0.
if n == 0:
return 0
#If n is equal to 1, return 1.
elif n == 1:
return 1
#If n is equal to 2, return 1.
elif n == 2:
return 1
#If n is something else, return the fibinachi number.
else:
return(fib(n-1) + fib(n-2))
#Tests if 1 equals 1.
def test1():
if fib(1) == 1:
print('1 Does give 1')
else:
print('That is not correct')
test1()
#Tests if 0 equals 0.
def test0():
if fib(0) == 0:
print('0 Does give 0')
else:
print('That is not correct')
test0()
#Tests if a negative number gives None.
def testNegative():
if fib(-15) == None:
print('-15 Is Nothing')
else:
print('-15 Should be nothing')
testNegative()
#Test if 8 is equal to 21.
def test8():
if fib(8) == 21:
print('8 Does give 21')
else:
print('That is not correct')
test8()
| def fib(n):
"""
Inputs a number,
Returns None if number is negative
Otherwise return final fibinachi number in sequence
Must use recursion
"""
#If n is less than or equal to 0, return an error.
if n < 0:
return None
#If n is equal to 0, return 0.
if n == 0:
return 0
#If n is equal to 1, return 1.
elif n == 1:
return 1
#If n is equal to 2, return 1.
elif n == 2:
return 1
#If n is something else, return the fibinachi number.
else:
return(fib(n-1) + fib(n-2))
#Tests if 1 equals 1.
def test1():
if fib(1) == 1:
print('1 Does give 1')
else:
print('That is not correct')
test1()
#Tests if 0 equals 0.
def test0():
if fib(0) == 0:
print('0 Does give 0')
else:
print('That is not correct')
test0()
#Tests if a negative number gives None.
def testNegative():
if fib(-15) == None:
print('-15 Is Nothing')
else:
print('-15 Should be nothing')
testNegative()
#Test if 8 is equal to 21.
def test8():
if fib(8) == 21:
print('8 Does give 21')
else:
print('That is not correct')
test8()
| en | 0.678538 | Inputs a number, Returns None if number is negative Otherwise return final fibinachi number in sequence Must use recursion #If n is less than or equal to 0, return an error. #If n is equal to 0, return 0. #If n is equal to 1, return 1. #If n is equal to 2, return 1. #If n is something else, return the fibinachi number. #Tests if 1 equals 1. #Tests if 0 equals 0. #Tests if a negative number gives None. #Test if 8 is equal to 21. | 4.116222 | 4 |
katas/kyu_4/roman_numerals_decoder.py | the-zebulan/CodeWars | 40 | 6621199 | from itertools import groupby, izip_longest
ROMAN = {'M': 1000, 'D': 500, 'C': 100, 'L': 50, 'X': 10, 'V': 5, 'I': 1}
def solution(roman):
pairs = [sum(g) for _, g in groupby(ROMAN[a] for a in roman)]
return sum(a + b if a > b else b - a
for a, b in izip_longest(pairs[::2], pairs[1::2], fillvalue=0))
| from itertools import groupby, izip_longest
ROMAN = {'M': 1000, 'D': 500, 'C': 100, 'L': 50, 'X': 10, 'V': 5, 'I': 1}
def solution(roman):
pairs = [sum(g) for _, g in groupby(ROMAN[a] for a in roman)]
return sum(a + b if a > b else b - a
for a, b in izip_longest(pairs[::2], pairs[1::2], fillvalue=0))
| none | 1 | 3.03968 | 3 | |
src/user_utils.py | ECS-OH-Bot/OH-Bot | 6 | 6621200 | """
Utility functions for working with user and member instances
"""
from typing import Optional
from discord.ext import commands
from discord import User, Member
from constants import GetConstants
from errors import CommandPermissionError
async def userToMember(user: User, bot: commands.Bot) -> Optional[Member]:
"""
Resolves a user into a member of the guild
When the bot receives a direct message the author of the message is a User
To get information about this user as a member of a guild, a member instance is needed
:param user: The user instance
:param bot: An instance of the bot
:return: The member instance, or None if the user is not a member of the guild
"""
guild = bot.get_guild(GetConstants().GUILD_ID)
if guild is None:
guild = await bot.fetch_guild(GetConstants().GUILD_ID)
member = guild.get_member(user.id)
if member is None:
member = await guild.fetch_member(user.id)
return member
async def membership_check(context: commands.Context, role_id: str, role_name: str, throw_exception: bool = True) -> bool:
"""
Checks if the author of the message in question belongs to the parameterized role
:param throw_exception: If true, will throw exception if user is not a member of the specified role
:param context: Object containing metadata about the most recent message sent
:param role_id: The UUID of the role for which we are checking for membership in
:param role_name: The human-readable name of the role for which we are checking for membership in
:return: True if user is belongs to role, False otherwise
"""
roles = None
if isinstance(context.author, User):
# If the message is a DM, we need to look up the authors roles in the server
member = await userToMember(context.author, context.bot)
if member is None:
return False
roles = member.roles
else:
# Otherwise, the message came from within the server. The roles can be directly extracted from the context
roles = context.author.roles
if not any(role.id == role_id for role in roles):
if throw_exception:
raise CommandPermissionError(f"User is not an {role_name}")
else:
return False
return True
async def isAdmin(context: commands.Context) -> bool:
"""
Returns true if context.author has the Admin role, else raises CommandPermissionError
This is used with the @command.check decorator to facilitate authentication for elevated commands
"""
return await membership_check(context, GetConstants().ADMIN_ROLE_ID, GetConstants().ADMIN)
async def isInstructor(context: commands.Context) -> bool:
"""
Returns true if context.author has the Instructor role, else raises CommandPermissionError
This is used with the @command.check decorator to facilitate authentication for elevated commands
"""
return await membership_check(context, GetConstants().INSTRUCTOR_ROLE_ID, GetConstants().INSTRUCTOR)
async def isAtLeastInstructor(context: commands.Context) -> bool:
"""
Returns true if context.author is either an admin or an instructor and False otherwise
:param context:
:return:
"""
return await isInstructor(context) or await isAdmin(context)
async def isStudent(context: commands.Context) -> bool:
"""
Returns true if context.author has the Student role, false otherwise
"""
return await membership_check(context, GetConstants().STUDENT_ROLE_ID, GetConstants().STUDENT, throw_exception=False)
| """
Utility functions for working with user and member instances
"""
from typing import Optional
from discord.ext import commands
from discord import User, Member
from constants import GetConstants
from errors import CommandPermissionError
async def userToMember(user: User, bot: commands.Bot) -> Optional[Member]:
"""
Resolves a user into a member of the guild
When the bot receives a direct message the author of the message is a User
To get information about this user as a member of a guild, a member instance is needed
:param user: The user instance
:param bot: An instance of the bot
:return: The member instance, or None if the user is not a member of the guild
"""
guild = bot.get_guild(GetConstants().GUILD_ID)
if guild is None:
guild = await bot.fetch_guild(GetConstants().GUILD_ID)
member = guild.get_member(user.id)
if member is None:
member = await guild.fetch_member(user.id)
return member
async def membership_check(context: commands.Context, role_id: str, role_name: str, throw_exception: bool = True) -> bool:
"""
Checks if the author of the message in question belongs to the parameterized role
:param throw_exception: If true, will throw exception if user is not a member of the specified role
:param context: Object containing metadata about the most recent message sent
:param role_id: The UUID of the role for which we are checking for membership in
:param role_name: The human-readable name of the role for which we are checking for membership in
:return: True if user is belongs to role, False otherwise
"""
roles = None
if isinstance(context.author, User):
# If the message is a DM, we need to look up the authors roles in the server
member = await userToMember(context.author, context.bot)
if member is None:
return False
roles = member.roles
else:
# Otherwise, the message came from within the server. The roles can be directly extracted from the context
roles = context.author.roles
if not any(role.id == role_id for role in roles):
if throw_exception:
raise CommandPermissionError(f"User is not an {role_name}")
else:
return False
return True
async def isAdmin(context: commands.Context) -> bool:
"""
Returns true if context.author has the Admin role, else raises CommandPermissionError
This is used with the @command.check decorator to facilitate authentication for elevated commands
"""
return await membership_check(context, GetConstants().ADMIN_ROLE_ID, GetConstants().ADMIN)
async def isInstructor(context: commands.Context) -> bool:
"""
Returns true if context.author has the Instructor role, else raises CommandPermissionError
This is used with the @command.check decorator to facilitate authentication for elevated commands
"""
return await membership_check(context, GetConstants().INSTRUCTOR_ROLE_ID, GetConstants().INSTRUCTOR)
async def isAtLeastInstructor(context: commands.Context) -> bool:
"""
Returns true if context.author is either an admin or an instructor and False otherwise
:param context:
:return:
"""
return await isInstructor(context) or await isAdmin(context)
async def isStudent(context: commands.Context) -> bool:
"""
Returns true if context.author has the Student role, false otherwise
"""
return await membership_check(context, GetConstants().STUDENT_ROLE_ID, GetConstants().STUDENT, throw_exception=False)
| en | 0.865091 | Utility functions for working with user and member instances Resolves a user into a member of the guild When the bot receives a direct message the author of the message is a User To get information about this user as a member of a guild, a member instance is needed :param user: The user instance :param bot: An instance of the bot :return: The member instance, or None if the user is not a member of the guild Checks if the author of the message in question belongs to the parameterized role :param throw_exception: If true, will throw exception if user is not a member of the specified role :param context: Object containing metadata about the most recent message sent :param role_id: The UUID of the role for which we are checking for membership in :param role_name: The human-readable name of the role for which we are checking for membership in :return: True if user is belongs to role, False otherwise # If the message is a DM, we need to look up the authors roles in the server # Otherwise, the message came from within the server. The roles can be directly extracted from the context Returns true if context.author has the Admin role, else raises CommandPermissionError This is used with the @command.check decorator to facilitate authentication for elevated commands Returns true if context.author has the Instructor role, else raises CommandPermissionError This is used with the @command.check decorator to facilitate authentication for elevated commands Returns true if context.author is either an admin or an instructor and False otherwise :param context: :return: Returns true if context.author has the Student role, false otherwise | 3.225272 | 3 |
tests/test_repozewho.py | passy/glashammer-rdrei | 1 | 6621201 | <reponame>passy/glashammer-rdrei
from unittest import TestCase
from glashammer import make_app
from glashammer.utils import Response
from werkzeug.test import Client
from repoze.who.middleware import PluggableAuthenticationMiddleware
from repoze.who.interfaces import IIdentifier
from repoze.who.interfaces import IChallenger
from repoze.who.plugins.basicauth import BasicAuthPlugin
from repoze.who.plugins.auth_tkt import AuthTktCookiePlugin
from repoze.who.plugins.cookie import InsecureCookiePlugin
from repoze.who.plugins.form import FormPlugin
from repoze.who.plugins.htpasswd import HTPasswdPlugin
from StringIO import StringIO
io = StringIO()
salt = 'aa'
for name, password in [ ('admin', '<PASSWORD>'), ('<PASSWORD>', '<PASSWORD>') ]:
io.write('%s:%s\n' % (name, password))
io.seek(0)
def cleartext_check(password, hashed):
return password == hashed
htpasswd = HTPasswdPlugin(io, cleartext_check)
basicauth = BasicAuthPlugin('repoze.who')
auth_tkt = AuthTktCookiePlugin('secret', 'auth_tkt')
form = FormPlugin('__do_login', rememberer_name='auth_tkt')
form.classifications = { IIdentifier:['browser'],
IChallenger:['browser'] } # only for browser
identifiers = [('form', form),('auth_tkt',auth_tkt),('basicauth',basicauth)]
authenticators = [('htpasswd', htpasswd)]
challengers = [('form',form), ('basicauth',basicauth)]
mdproviders = []
from repoze.who.classifiers import default_request_classifier
from repoze.who.classifiers import default_challenge_decider
log_stream = None
import logging
import os, sys
log_stream = sys.stdout
kw=dict(
identifiers=identifiers,
authenticators=authenticators,
challengers=challengers,
mdproviders=mdproviders,
classifier=default_request_classifier,
challenge_decider=default_challenge_decider,
)
def _authd_view(req):
if not req.environ.get('repoze.who.identity'):
return Response(status=401)
else:
return Response('ok')
def _setup(app):
from glashammer.bundles.contrib.auth.repozewho import setup_repozewho
app.add_setup(setup_repozewho, **kw)
app.add_url('/', 'home', _authd_view)
class TestRepozeWho(TestCase):
def setUp(self):
self.app = make_app(_setup)
self.c = Client(self.app)
def get(self, url='/'):
appiter, status, headers = self.c.open(url)
return ''.join(appiter)
def post(self, login, password, do_login=True, url='/'):
if do_login:
url = url + '?__do_login=true'
appiter, status, headers = self.c.post(url,
data=dict(login=login, password=password))
return appiter, status, headers
def test_starts(self):
assert '<form' in self.get()
def test_good_login(self):
appiter, status, headers = self.post('admin', 'admin')
assert status.startswith('302')
assert self.get() == 'ok'
def test_bad_login(self):
appiter, status, headers = self.post('a', 'a')
assert status.startswith('302')
assert self.get() != 'ok'
def test_nocookie_client(self):
self.c = Client(self.app, use_cookies=False)
appiter, status, headers = self.post('admin', 'admin')
assert status.startswith('302')
assert self.get() != 'ok'
| from unittest import TestCase
from glashammer import make_app
from glashammer.utils import Response
from werkzeug.test import Client
from repoze.who.middleware import PluggableAuthenticationMiddleware
from repoze.who.interfaces import IIdentifier
from repoze.who.interfaces import IChallenger
from repoze.who.plugins.basicauth import BasicAuthPlugin
from repoze.who.plugins.auth_tkt import AuthTktCookiePlugin
from repoze.who.plugins.cookie import InsecureCookiePlugin
from repoze.who.plugins.form import FormPlugin
from repoze.who.plugins.htpasswd import HTPasswdPlugin
from StringIO import StringIO
io = StringIO()
salt = 'aa'
for name, password in [ ('admin', '<PASSWORD>'), ('<PASSWORD>', '<PASSWORD>') ]:
io.write('%s:%s\n' % (name, password))
io.seek(0)
def cleartext_check(password, hashed):
return password == hashed
htpasswd = HTPasswdPlugin(io, cleartext_check)
basicauth = BasicAuthPlugin('repoze.who')
auth_tkt = AuthTktCookiePlugin('secret', 'auth_tkt')
form = FormPlugin('__do_login', rememberer_name='auth_tkt')
form.classifications = { IIdentifier:['browser'],
IChallenger:['browser'] } # only for browser
identifiers = [('form', form),('auth_tkt',auth_tkt),('basicauth',basicauth)]
authenticators = [('htpasswd', htpasswd)]
challengers = [('form',form), ('basicauth',basicauth)]
mdproviders = []
from repoze.who.classifiers import default_request_classifier
from repoze.who.classifiers import default_challenge_decider
log_stream = None
import logging
import os, sys
log_stream = sys.stdout
kw=dict(
identifiers=identifiers,
authenticators=authenticators,
challengers=challengers,
mdproviders=mdproviders,
classifier=default_request_classifier,
challenge_decider=default_challenge_decider,
)
def _authd_view(req):
if not req.environ.get('repoze.who.identity'):
return Response(status=401)
else:
return Response('ok')
def _setup(app):
from glashammer.bundles.contrib.auth.repozewho import setup_repozewho
app.add_setup(setup_repozewho, **kw)
app.add_url('/', 'home', _authd_view)
class TestRepozeWho(TestCase):
def setUp(self):
self.app = make_app(_setup)
self.c = Client(self.app)
def get(self, url='/'):
appiter, status, headers = self.c.open(url)
return ''.join(appiter)
def post(self, login, password, do_login=True, url='/'):
if do_login:
url = url + '?__do_login=true'
appiter, status, headers = self.c.post(url,
data=dict(login=login, password=password))
return appiter, status, headers
def test_starts(self):
assert '<form' in self.get()
def test_good_login(self):
appiter, status, headers = self.post('admin', 'admin')
assert status.startswith('302')
assert self.get() == 'ok'
def test_bad_login(self):
appiter, status, headers = self.post('a', 'a')
assert status.startswith('302')
assert self.get() != 'ok'
def test_nocookie_client(self):
self.c = Client(self.app, use_cookies=False)
appiter, status, headers = self.post('admin', 'admin')
assert status.startswith('302')
assert self.get() != 'ok' | en | 0.498159 | # only for browser | 1.99368 | 2 |
seeq/addons/clustering/app/historicalBenchmarking/__init__.py | eparsonnet93/seeq-clustering | 3 | 6621202 | from .core import *
from .cluster import *
from .contour import * | from .core import *
from .cluster import *
from .contour import * | none | 1 | 0.930786 | 1 | |
.conan/conanfile.py | Viatorus/compile-time-printer | 35 | 6621203 | from conans import ConanFile, tools
from conans.errors import CalledProcessErrorWithStderr
class ConanPackage(ConanFile):
name = 'compile-time-printer'
license = 'BSL-1.0'
url = 'https://github.com/Viatorus/compile-time-printer'
description = 'The C++ files for the compile-time printer.'
exports_sources = '../include/**'
no_copy_source = True
def set_version(self):
git = tools.Git(folder=self.recipe_folder)
try:
self.version = git.run('describe --tags --abbrev=0')
except CalledProcessErrorWithStderr:
self.version = '0.0.0'
def package(self):
self.copy('*.hpp', dst='include')
def package_id(self):
self.info.header_only()
def package_info(self):
self.cpp_info.cxxflags = ['-fpermissive']
| from conans import ConanFile, tools
from conans.errors import CalledProcessErrorWithStderr
class ConanPackage(ConanFile):
name = 'compile-time-printer'
license = 'BSL-1.0'
url = 'https://github.com/Viatorus/compile-time-printer'
description = 'The C++ files for the compile-time printer.'
exports_sources = '../include/**'
no_copy_source = True
def set_version(self):
git = tools.Git(folder=self.recipe_folder)
try:
self.version = git.run('describe --tags --abbrev=0')
except CalledProcessErrorWithStderr:
self.version = '0.0.0'
def package(self):
self.copy('*.hpp', dst='include')
def package_id(self):
self.info.header_only()
def package_info(self):
self.cpp_info.cxxflags = ['-fpermissive']
| none | 1 | 2.01837 | 2 | |
trace_for_guess/calculate_fsdscl.py | wtraylor/trace21ka_for_lpjguess | 0 | 6621204 | <gh_stars>0
# SPDX-FileCopyrightText: 2021 <NAME> <<EMAIL>>
#
# SPDX-License-Identifier: MIT
import glob
import os
import shutil
import subprocess
import xarray as xr
from termcolor import cprint
from trace_for_guess.skip import skip
def calculate_fsdscl(cldtot_file, fsds_file, fsdsc_file, out_file):
"""Re-construct the CCSM3 FSDSCL variable from CLDTOT, FSDS, and FSDSC.
- FSDS: Downwelling solar flux at surface in W/m².
- CLDTOT: Vertically-integrated total cloud fraction. This is equivalent to
the cld variable in the CRU dataset.
- FSDSC: Incoming radiation with a completely clear sky (zero cloud cover).
- FSDSCL: Incoming radiation with a completely overcast sky (100% cloud
cover).
Args:
cldtot_file: Path to the CLDTOT input file.
fsds_file: Path to the FSDS input file.
fsdsc_file: Path to the FSDSC input file.
out_file: Path to the FSDSCL output file (to be created).
Returns:
The path to the output file (=`out_file`).
Raises:
FileNotFoundError: One of the 3 input files is missing.
"""
if not os.path.isfile(cldtot_file):
raise FileNotFoundError("Could not find CLDTOT file: '%s'" %
cldtot_file)
if not os.path.isfile(fsds_file):
raise FileNotFoundError("Could not find FSDS file: '%s'" % fsds_file)
if not os.path.isfile(fsdsc_file):
raise FileNotFoundError("Could not find FSDSC file: '%s'" % fsdsc_file)
# TODO: check for commands
if skip([cldtot_file, fsds_file, fsdsc_file], out_file):
return out_file
cprint(f"Generating FSDSCL file: '{out_file}'", 'yellow')
try:
# Merge all variables (FSDS, FSDSC, CLDTOT) into one file, and then
# perform the operation in it.
subprocess.run(['ncks', '--append', fsds_file, out_file], check=True)
subprocess.run(['ncks', '--append', fsdsc_file, out_file], check=True)
subprocess.run(['ncks', '--append', cldtot_file, out_file], check=True)
script = 'FSDSCL = (FSDS - FSDSC * (1 - CLDTOT)) / CLDTOT'
subprocess.run(['ncap2', '--append', '--script', script, out_file],
check=True)
except Exception:
if os.path.isfile(out_file):
cprint(f"Removing file '{out_file}'.", 'red')
os.remove(out_file)
# Remove temporary file created by ncks.
for g in glob(f'{out_file}.pid*.ncks.tmp'):
cprint(f"Removing file '{g}'.", 'red')
os.remove(g)
raise
assert (os.path.isfile(out_file))
cprint(f"Successfully created '{out_file}'.", 'green')
return out_file
| # SPDX-FileCopyrightText: 2021 <NAME> <<EMAIL>>
#
# SPDX-License-Identifier: MIT
import glob
import os
import shutil
import subprocess
import xarray as xr
from termcolor import cprint
from trace_for_guess.skip import skip
def calculate_fsdscl(cldtot_file, fsds_file, fsdsc_file, out_file):
"""Re-construct the CCSM3 FSDSCL variable from CLDTOT, FSDS, and FSDSC.
- FSDS: Downwelling solar flux at surface in W/m².
- CLDTOT: Vertically-integrated total cloud fraction. This is equivalent to
the cld variable in the CRU dataset.
- FSDSC: Incoming radiation with a completely clear sky (zero cloud cover).
- FSDSCL: Incoming radiation with a completely overcast sky (100% cloud
cover).
Args:
cldtot_file: Path to the CLDTOT input file.
fsds_file: Path to the FSDS input file.
fsdsc_file: Path to the FSDSC input file.
out_file: Path to the FSDSCL output file (to be created).
Returns:
The path to the output file (=`out_file`).
Raises:
FileNotFoundError: One of the 3 input files is missing.
"""
if not os.path.isfile(cldtot_file):
raise FileNotFoundError("Could not find CLDTOT file: '%s'" %
cldtot_file)
if not os.path.isfile(fsds_file):
raise FileNotFoundError("Could not find FSDS file: '%s'" % fsds_file)
if not os.path.isfile(fsdsc_file):
raise FileNotFoundError("Could not find FSDSC file: '%s'" % fsdsc_file)
# TODO: check for commands
if skip([cldtot_file, fsds_file, fsdsc_file], out_file):
return out_file
cprint(f"Generating FSDSCL file: '{out_file}'", 'yellow')
try:
# Merge all variables (FSDS, FSDSC, CLDTOT) into one file, and then
# perform the operation in it.
subprocess.run(['ncks', '--append', fsds_file, out_file], check=True)
subprocess.run(['ncks', '--append', fsdsc_file, out_file], check=True)
subprocess.run(['ncks', '--append', cldtot_file, out_file], check=True)
script = 'FSDSCL = (FSDS - FSDSC * (1 - CLDTOT)) / CLDTOT'
subprocess.run(['ncap2', '--append', '--script', script, out_file],
check=True)
except Exception:
if os.path.isfile(out_file):
cprint(f"Removing file '{out_file}'.", 'red')
os.remove(out_file)
# Remove temporary file created by ncks.
for g in glob(f'{out_file}.pid*.ncks.tmp'):
cprint(f"Removing file '{g}'.", 'red')
os.remove(g)
raise
assert (os.path.isfile(out_file))
cprint(f"Successfully created '{out_file}'.", 'green')
return out_file | en | 0.79733 | # SPDX-FileCopyrightText: 2021 <NAME> <<EMAIL>> # # SPDX-License-Identifier: MIT Re-construct the CCSM3 FSDSCL variable from CLDTOT, FSDS, and FSDSC. - FSDS: Downwelling solar flux at surface in W/m². - CLDTOT: Vertically-integrated total cloud fraction. This is equivalent to the cld variable in the CRU dataset. - FSDSC: Incoming radiation with a completely clear sky (zero cloud cover). - FSDSCL: Incoming radiation with a completely overcast sky (100% cloud cover). Args: cldtot_file: Path to the CLDTOT input file. fsds_file: Path to the FSDS input file. fsdsc_file: Path to the FSDSC input file. out_file: Path to the FSDSCL output file (to be created). Returns: The path to the output file (=`out_file`). Raises: FileNotFoundError: One of the 3 input files is missing. # TODO: check for commands # Merge all variables (FSDS, FSDSC, CLDTOT) into one file, and then # perform the operation in it. # Remove temporary file created by ncks. | 2.358957 | 2 |
parse.py | tbicr/OfflineMap | 195 | 6621205 | <filename>parse.py
import os
import urllib2
import math
import base64
import json
from operator import attrgetter
from sys import maxint as MAX_INT
from multiprocessing.pool import ThreadPool as Pool
def get_length(point1, point2):
return math.sqrt((point1.x - point2.x) ** 2 + (point1.y - point2.y) ** 2)
def get_angle(angle_point, end_point1, end_point2):
side_angle_point1 = get_length(angle_point, end_point1)
side_angle_point2 = get_length(angle_point, end_point2)
side_point1_point2 = get_length(end_point1, end_point2)
if side_angle_point1 == 0 or side_angle_point2 == 0:
return 0
value = (side_angle_point1 ** 2 + side_angle_point2 ** 2 - side_point1_point2 ** 2) /\
(2 * side_angle_point1 * side_angle_point2)
return math.acos(min(max(value, -1), 1))
def get_next_polygon_point(angle_point, first_end_point, points):
next_point = angle_point
next_angle = 0
for point in points:
current_angle = get_angle(angle_point, first_end_point, point)
if (current_angle > next_angle or
current_angle == next_angle and
get_length(angle_point, point) > get_length(angle_point, next_point)):
next_angle = current_angle
next_point = point
return next_point
def get_polar_polygon_from_points(points):
top_point = max(points, key=attrgetter('lng'))
polygon_points = [top_point]
while True:
angle_point = polygon_points[-1]
first_end_point = polygon_points[-2] if len(polygon_points) > 1 else angle_point.clone(delta_lat=1)
next_point = get_next_polygon_point(angle_point, first_end_point, points)
if top_point != next_point:
polygon_points.append(next_point)
else:
break
return polygon_points
def polar_to_int(lat, lng, zoom):
x = int(2 ** zoom * (180 + lng) / 360)
d = min(max(math.sin(lat * math.pi / 180), -0.9999), 0.9999)
y = int(2 ** zoom * (2 * math.pi - math.log((1 + d) / (1 - d))) / (4 * math.pi))
return Tile(x, y, zoom)
def polar_to_int_polygon(polar_polygon, zoom):
return [polar_to_int(point.lat, point.lng, zoom) for point in polar_polygon]
def get_int_polygon_rectangle(int_polygon):
x_top_left = MAX_INT
y_top_left = MAX_INT
x_bottom_right = 0
y_bottom_right = 0
zoom = int_polygon[0].zoom
for point in int_polygon:
x_top_left = min(x_top_left, point.x)
y_top_left = min(y_top_left, point.y)
x_bottom_right = max(x_bottom_right, point.x)
y_bottom_right = max(y_bottom_right, point.y)
return Tile(x_top_left, y_top_left, zoom), Tile(x_bottom_right, y_bottom_right, zoom)
def check_point_in_int_polygon(tile, int_polygon, imprecision=0.1):
for polygon_point in int_polygon:
if polygon_point == tile:
return True
angle_sum = 0
for index, tile1 in enumerate(int_polygon):
tile2 = int_polygon[index + 1] if index + 1 != len(int_polygon) else int_polygon[0]
angle_sum += get_angle(tile, tile1, tile2)
return angle_sum >= 2 * math.pi - imprecision
def create_dirs(save_file_path):
dir_name = os.path.dirname(save_file_path)
if not os.path.exists(dir_name):
os.makedirs(dir_name)
def download_tile(tile, url_template, save_file_path_template):
url = tile.render(url_template)
save_file_path = tile.render(save_file_path_template)
create_dirs(save_file_path)
with open(save_file_path, 'wb') as file:
file.write(urllib2.urlopen(url).read())
def check_and_download_tile(tile, url_template, save_file_path_template, int_polygon):
if not check_point_in_int_polygon(tile, int_polygon):
return
download_tile(tile, url_template, save_file_path_template)
def download_tiles_in_polar_polygon(polar_polygon, zooms, url_template, save_file_path_template, threads_count=10):
for zoom in zooms:
int_polygon = polar_to_int_polygon(polar_polygon, zoom)
point_top_left, point_bottom_right = get_int_polygon_rectangle(int_polygon)
threads_pull = Pool(threads_count)
for x in xrange(point_top_left.x, point_bottom_right.x + 1):
for y in xrange(point_top_left.y, point_bottom_right.y + 1):
threads_pull.apply_async(check_and_download_tile,
[Tile(x, y, zoom), url_template, save_file_path_template, int_polygon])
threads_pull.close()
threads_pull.join()
def get_images_path_list(root_path, from_root_path='', filter=''):
for path, dirs, files in os.walk(root_path):
for file in files:
file_path = os.path.relpath(os.path.join(path, file), from_root_path)
if file_path[-len(filter):] == filter:
yield file_path
def get_zoom_and_coord_from_path(path):
zoom = os.path.basename(os.path.dirname(path))
coord = os.path.basename(path).split('.')[0]
return '%s_%s' % (zoom, coord)
def image_to_base64(path):
with open(path, 'rb') as file:
return base64.b64encode(file.read())
def save_images_path_list(file_list, save_file):
metadata_file_map = dict([(get_zoom_and_coord_from_path(file), file) for file in file_list])
with open(save_file, 'wb') as file:
json.dump(metadata_file_map, file)
def save_images_base64_list(file_list, save_file):
metadata_file_map = dict([(get_zoom_and_coord_from_path(file), image_to_base64(file)) for file in file_list])
with open(save_file, 'wb') as file:
json.dump(metadata_file_map, file)
class Point():
def __init__(self, lat, lng):
self.lat = lat
self.lng = lng
def __eq__(self, other):
return self.lat == other.lat and self.lng == other.lng
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash((self.lat, self.lng))
def __repr__(self):
return '{lat: %(lat)s, lng: %(lng)s}' % {'lat': self.lat, 'lng': self.lng}
def clone(self, delta_lat=0, delta_lng=0):
return Point(self.lat + delta_lat, self.lng + delta_lng)
@property
def x(self):
return self.lng
@property
def y(self):
return self.lat
class Tile():
def __init__(self, x, y, zoom):
self.x = x
self.y = y
self.zoom = zoom
def __eq__(self, other):
return self.x == other.x and self.y == other.y and self.zoom == other.zoom
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash((self.x, self.y, self.zoom))
def __repr__(self):
return self.render('{x: %(x)s, y: %(y)s, zoom: %(zoom)s}')
def render(self, template):
return template % {'x': self.x, 'y': self.y, 'zoom': self.zoom}
if __name__ == '__main__':
from fixtures import all_points
url_template = 'http://mt0.googleapis.com/vt?src=apiv3&x=%(x)s&y=%(y)s&z=%(zoom)s'
project_path = 'site'
save_files_path = project_path + '/cache'
save_file_path_template = save_files_path + '/%(zoom)s/%(x)s_%(y)s.png'
zooms = xrange(15 + 1)
points = [Point(lat, lng) for lat, lng in all_points]
polar_polygon = get_polar_polygon_from_points(points)
download_tiles_in_polar_polygon(polar_polygon, zooms, url_template, save_file_path_template)
images_path_list = save_files_path + '/images.json'
file_list = list(get_images_path_list(save_files_path, project_path, '.png'))
save_images_path_list(file_list, images_path_list)
images_base64_list = save_files_path + '/imagesBase64.json'
file_list = list(get_images_path_list(save_files_path, '', '.png'))
save_images_base64_list(file_list, images_base64_list)
| <filename>parse.py
import os
import urllib2
import math
import base64
import json
from operator import attrgetter
from sys import maxint as MAX_INT
from multiprocessing.pool import ThreadPool as Pool
def get_length(point1, point2):
return math.sqrt((point1.x - point2.x) ** 2 + (point1.y - point2.y) ** 2)
def get_angle(angle_point, end_point1, end_point2):
side_angle_point1 = get_length(angle_point, end_point1)
side_angle_point2 = get_length(angle_point, end_point2)
side_point1_point2 = get_length(end_point1, end_point2)
if side_angle_point1 == 0 or side_angle_point2 == 0:
return 0
value = (side_angle_point1 ** 2 + side_angle_point2 ** 2 - side_point1_point2 ** 2) /\
(2 * side_angle_point1 * side_angle_point2)
return math.acos(min(max(value, -1), 1))
def get_next_polygon_point(angle_point, first_end_point, points):
next_point = angle_point
next_angle = 0
for point in points:
current_angle = get_angle(angle_point, first_end_point, point)
if (current_angle > next_angle or
current_angle == next_angle and
get_length(angle_point, point) > get_length(angle_point, next_point)):
next_angle = current_angle
next_point = point
return next_point
def get_polar_polygon_from_points(points):
top_point = max(points, key=attrgetter('lng'))
polygon_points = [top_point]
while True:
angle_point = polygon_points[-1]
first_end_point = polygon_points[-2] if len(polygon_points) > 1 else angle_point.clone(delta_lat=1)
next_point = get_next_polygon_point(angle_point, first_end_point, points)
if top_point != next_point:
polygon_points.append(next_point)
else:
break
return polygon_points
def polar_to_int(lat, lng, zoom):
x = int(2 ** zoom * (180 + lng) / 360)
d = min(max(math.sin(lat * math.pi / 180), -0.9999), 0.9999)
y = int(2 ** zoom * (2 * math.pi - math.log((1 + d) / (1 - d))) / (4 * math.pi))
return Tile(x, y, zoom)
def polar_to_int_polygon(polar_polygon, zoom):
return [polar_to_int(point.lat, point.lng, zoom) for point in polar_polygon]
def get_int_polygon_rectangle(int_polygon):
x_top_left = MAX_INT
y_top_left = MAX_INT
x_bottom_right = 0
y_bottom_right = 0
zoom = int_polygon[0].zoom
for point in int_polygon:
x_top_left = min(x_top_left, point.x)
y_top_left = min(y_top_left, point.y)
x_bottom_right = max(x_bottom_right, point.x)
y_bottom_right = max(y_bottom_right, point.y)
return Tile(x_top_left, y_top_left, zoom), Tile(x_bottom_right, y_bottom_right, zoom)
def check_point_in_int_polygon(tile, int_polygon, imprecision=0.1):
for polygon_point in int_polygon:
if polygon_point == tile:
return True
angle_sum = 0
for index, tile1 in enumerate(int_polygon):
tile2 = int_polygon[index + 1] if index + 1 != len(int_polygon) else int_polygon[0]
angle_sum += get_angle(tile, tile1, tile2)
return angle_sum >= 2 * math.pi - imprecision
def create_dirs(save_file_path):
dir_name = os.path.dirname(save_file_path)
if not os.path.exists(dir_name):
os.makedirs(dir_name)
def download_tile(tile, url_template, save_file_path_template):
url = tile.render(url_template)
save_file_path = tile.render(save_file_path_template)
create_dirs(save_file_path)
with open(save_file_path, 'wb') as file:
file.write(urllib2.urlopen(url).read())
def check_and_download_tile(tile, url_template, save_file_path_template, int_polygon):
if not check_point_in_int_polygon(tile, int_polygon):
return
download_tile(tile, url_template, save_file_path_template)
def download_tiles_in_polar_polygon(polar_polygon, zooms, url_template, save_file_path_template, threads_count=10):
for zoom in zooms:
int_polygon = polar_to_int_polygon(polar_polygon, zoom)
point_top_left, point_bottom_right = get_int_polygon_rectangle(int_polygon)
threads_pull = Pool(threads_count)
for x in xrange(point_top_left.x, point_bottom_right.x + 1):
for y in xrange(point_top_left.y, point_bottom_right.y + 1):
threads_pull.apply_async(check_and_download_tile,
[Tile(x, y, zoom), url_template, save_file_path_template, int_polygon])
threads_pull.close()
threads_pull.join()
def get_images_path_list(root_path, from_root_path='', filter=''):
for path, dirs, files in os.walk(root_path):
for file in files:
file_path = os.path.relpath(os.path.join(path, file), from_root_path)
if file_path[-len(filter):] == filter:
yield file_path
def get_zoom_and_coord_from_path(path):
zoom = os.path.basename(os.path.dirname(path))
coord = os.path.basename(path).split('.')[0]
return '%s_%s' % (zoom, coord)
def image_to_base64(path):
with open(path, 'rb') as file:
return base64.b64encode(file.read())
def save_images_path_list(file_list, save_file):
metadata_file_map = dict([(get_zoom_and_coord_from_path(file), file) for file in file_list])
with open(save_file, 'wb') as file:
json.dump(metadata_file_map, file)
def save_images_base64_list(file_list, save_file):
metadata_file_map = dict([(get_zoom_and_coord_from_path(file), image_to_base64(file)) for file in file_list])
with open(save_file, 'wb') as file:
json.dump(metadata_file_map, file)
class Point():
def __init__(self, lat, lng):
self.lat = lat
self.lng = lng
def __eq__(self, other):
return self.lat == other.lat and self.lng == other.lng
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash((self.lat, self.lng))
def __repr__(self):
return '{lat: %(lat)s, lng: %(lng)s}' % {'lat': self.lat, 'lng': self.lng}
def clone(self, delta_lat=0, delta_lng=0):
return Point(self.lat + delta_lat, self.lng + delta_lng)
@property
def x(self):
return self.lng
@property
def y(self):
return self.lat
class Tile():
def __init__(self, x, y, zoom):
self.x = x
self.y = y
self.zoom = zoom
def __eq__(self, other):
return self.x == other.x and self.y == other.y and self.zoom == other.zoom
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash((self.x, self.y, self.zoom))
def __repr__(self):
return self.render('{x: %(x)s, y: %(y)s, zoom: %(zoom)s}')
def render(self, template):
return template % {'x': self.x, 'y': self.y, 'zoom': self.zoom}
if __name__ == '__main__':
from fixtures import all_points
url_template = 'http://mt0.googleapis.com/vt?src=apiv3&x=%(x)s&y=%(y)s&z=%(zoom)s'
project_path = 'site'
save_files_path = project_path + '/cache'
save_file_path_template = save_files_path + '/%(zoom)s/%(x)s_%(y)s.png'
zooms = xrange(15 + 1)
points = [Point(lat, lng) for lat, lng in all_points]
polar_polygon = get_polar_polygon_from_points(points)
download_tiles_in_polar_polygon(polar_polygon, zooms, url_template, save_file_path_template)
images_path_list = save_files_path + '/images.json'
file_list = list(get_images_path_list(save_files_path, project_path, '.png'))
save_images_path_list(file_list, images_path_list)
images_base64_list = save_files_path + '/imagesBase64.json'
file_list = list(get_images_path_list(save_files_path, '', '.png'))
save_images_base64_list(file_list, images_base64_list)
| none | 1 | 2.785791 | 3 | |
test_geo.py | Frangoulides/Flood_Warning_System_179 | 0 | 6621206 | from floodsystem import geo
from floodsystem import stationdata
from haversine import haversine
from floodsystem.station import MonitoringStation
def test_stations_by_distance():
stations_list = geo.stations_by_distance(stationdata.build_station_list(), (52.2053, 0.1218))
assert stations_list[0][0].name == '<NAME>'
assert stations_list[-1][0].name == 'Penberth'
assert stations_list[0][1] == 0.840237595667494
def test_stations_within_radius():
stations_list = geo.stations_within_radius(stationdata.build_station_list(), (52.2053, 0.1218), 10)
assert len(stations_list) == 11
for station in stations_list:
radius = haversine(station.coord, (52.2053, 0.1218))
assert radius < 10
def test_rivers_by_station_number():
# Create 100 new stations on an imaginary river called 'HopefullyNotARealRiverName'.
stations = stationdata.build_station_list()
for i in range(100):
s_id = "test-s-id"
m_id = "test-m-id"
label = "some station"
coord = (-2.0, 4.0)
trange = (-2.3, 3.4445)
catchment = 'catchment'
river = "HopefullyNotARealRiverName"
town = "My Town"
s = MonitoringStation(s_id, m_id, label, coord, trange, catchment, river, town)
stations.append(s)
assert ("HopefullyNotARealRiverName", 100) in geo.rivers_by_station_number(stations, 5)
assert len(geo.rivers_by_station_number(stations, 5)) >= 5
def test_catchment_with_stations():
stations = []
for i in range(10):
s_id = "test-s-id"
m_id = "test-m-id"
label = "some station"
coord = (-2.0, 4.0)
trange = (-2.3, 3.4445)
catchment = 'catchment'
river = "HopefullyNotARealRiverName"
town = "My Town"
s = MonitoringStation(s_id, m_id, label, coord, trange, catchment, river, town)
stations.append(s)
assert 'catchment' in geo.catchments_with_station(stations)
def test_stations_by_catchment():
stations = stationdata.build_station_list()
for i in range(10):
s_id = "test-s-id"
m_id = "test-m-id"
label = "some station"
coord = (-2.0, 4.0)
trange = (-2.3, 3.4445)
catchment = 'catchment'
river = "HopefullyNotARealRiverName"
town = "My Town"
s = MonitoringStation(s_id, m_id, label, coord, trange, catchment, river, town)
stations.append(s)
assert len(geo.stations_by_catchment(stations)['catchment']) == 10
| from floodsystem import geo
from floodsystem import stationdata
from haversine import haversine
from floodsystem.station import MonitoringStation
def test_stations_by_distance():
stations_list = geo.stations_by_distance(stationdata.build_station_list(), (52.2053, 0.1218))
assert stations_list[0][0].name == '<NAME>'
assert stations_list[-1][0].name == 'Penberth'
assert stations_list[0][1] == 0.840237595667494
def test_stations_within_radius():
stations_list = geo.stations_within_radius(stationdata.build_station_list(), (52.2053, 0.1218), 10)
assert len(stations_list) == 11
for station in stations_list:
radius = haversine(station.coord, (52.2053, 0.1218))
assert radius < 10
def test_rivers_by_station_number():
# Create 100 new stations on an imaginary river called 'HopefullyNotARealRiverName'.
stations = stationdata.build_station_list()
for i in range(100):
s_id = "test-s-id"
m_id = "test-m-id"
label = "some station"
coord = (-2.0, 4.0)
trange = (-2.3, 3.4445)
catchment = 'catchment'
river = "HopefullyNotARealRiverName"
town = "My Town"
s = MonitoringStation(s_id, m_id, label, coord, trange, catchment, river, town)
stations.append(s)
assert ("HopefullyNotARealRiverName", 100) in geo.rivers_by_station_number(stations, 5)
assert len(geo.rivers_by_station_number(stations, 5)) >= 5
def test_catchment_with_stations():
stations = []
for i in range(10):
s_id = "test-s-id"
m_id = "test-m-id"
label = "some station"
coord = (-2.0, 4.0)
trange = (-2.3, 3.4445)
catchment = 'catchment'
river = "HopefullyNotARealRiverName"
town = "My Town"
s = MonitoringStation(s_id, m_id, label, coord, trange, catchment, river, town)
stations.append(s)
assert 'catchment' in geo.catchments_with_station(stations)
def test_stations_by_catchment():
stations = stationdata.build_station_list()
for i in range(10):
s_id = "test-s-id"
m_id = "test-m-id"
label = "some station"
coord = (-2.0, 4.0)
trange = (-2.3, 3.4445)
catchment = 'catchment'
river = "HopefullyNotARealRiverName"
town = "My Town"
s = MonitoringStation(s_id, m_id, label, coord, trange, catchment, river, town)
stations.append(s)
assert len(geo.stations_by_catchment(stations)['catchment']) == 10
| en | 0.877972 | # Create 100 new stations on an imaginary river called 'HopefullyNotARealRiverName'. | 3.026811 | 3 |
examples/hello_world.py | mohamedelkansouli/https-github.com-jshaffstall-PyPhysicsSandbox | 38 | 6621207 | <gh_stars>10-100
"""
A traditional Hello World example for PyPhysicsSandbox. A screencast showing the development
of this example can be found at: https://www.youtube.com/watch?v=xux3z2unaME
"""
from pyphysicssandbox import *
window('Hello World', 300, 300)
floor = static_box((0, 290), 300, 10)
floor.color = Color('blue')
caption = text((125, 15), 'Hello World!')
caption.angle = 90
caption.wrap = True
run()
| """
A traditional Hello World example for PyPhysicsSandbox. A screencast showing the development
of this example can be found at: https://www.youtube.com/watch?v=xux3z2unaME
"""
from pyphysicssandbox import *
window('Hello World', 300, 300)
floor = static_box((0, 290), 300, 10)
floor.color = Color('blue')
caption = text((125, 15), 'Hello World!')
caption.angle = 90
caption.wrap = True
run() | en | 0.624103 | A traditional Hello World example for PyPhysicsSandbox. A screencast showing the development of this example can be found at: https://www.youtube.com/watch?v=xux3z2unaME | 3.119733 | 3 |
其他面试题/DNA改造.py | lih627/python-algorithm-templates | 24 | 6621208 | """
360公司 2020笔试题
有一种特殊的DNA, 仅仅由 A 和 T 组成, 顺次链接
科学家通过一种手段, 可以改变这种DNA, 每一次, 科学家可以交换改DNA上的
两个核酸的位置, 也可以把特定位置的某个核酸修改为另外一种.
有一个DNA 希望改造成另外一个DNA, 计算最小操作次数
输入
ATTTAA
TTAATT
返回 3
"""
def solve(s1, s2):
"""
找规律题, 先通过更改核酸, 让两个核酸的A和T数量一致,
注意修改后的核酸放到正确位置上
然后对不满足要求的位置记录其更换次数
更换次数 等于 总不满足要求的位置个数 // 2
"""
from collections import Counter
counter1 = Counter(s1)
counter2 = Counter(s2)
s1 = list(s1)
s2 = list(s2)
res = 0
a_num = counter1['A'] - counter2['A']
res += abs(a_num)
A2T = True if a_num < 0 else False
if a_num != 0:
cnt = abs(a_num)
if A2T:
for i in range(len(s1)):
if s1[i] == 'T' and s2[i] == 'A':
s2[i] = 'T'
cnt -= 1
if cnt == 0:
break
else:
for i in range(len(s1)):
if s1[i] == 'A' and s2[i] == 'T':
s2[i] = 'A'
cnt -= 1
if cnt == 0:
break
for i in range(len(s1)):
if s1[i] == 'A' and s2[i] == 'T':
res += 1
return res
if __name__ == '__main__':
s1 = input()
s2 = input()
print(solve(s1, s2))
| """
360公司 2020笔试题
有一种特殊的DNA, 仅仅由 A 和 T 组成, 顺次链接
科学家通过一种手段, 可以改变这种DNA, 每一次, 科学家可以交换改DNA上的
两个核酸的位置, 也可以把特定位置的某个核酸修改为另外一种.
有一个DNA 希望改造成另外一个DNA, 计算最小操作次数
输入
ATTTAA
TTAATT
返回 3
"""
def solve(s1, s2):
"""
找规律题, 先通过更改核酸, 让两个核酸的A和T数量一致,
注意修改后的核酸放到正确位置上
然后对不满足要求的位置记录其更换次数
更换次数 等于 总不满足要求的位置个数 // 2
"""
from collections import Counter
counter1 = Counter(s1)
counter2 = Counter(s2)
s1 = list(s1)
s2 = list(s2)
res = 0
a_num = counter1['A'] - counter2['A']
res += abs(a_num)
A2T = True if a_num < 0 else False
if a_num != 0:
cnt = abs(a_num)
if A2T:
for i in range(len(s1)):
if s1[i] == 'T' and s2[i] == 'A':
s2[i] = 'T'
cnt -= 1
if cnt == 0:
break
else:
for i in range(len(s1)):
if s1[i] == 'A' and s2[i] == 'T':
s2[i] = 'A'
cnt -= 1
if cnt == 0:
break
for i in range(len(s1)):
if s1[i] == 'A' and s2[i] == 'T':
res += 1
return res
if __name__ == '__main__':
s1 = input()
s2 = input()
print(solve(s1, s2))
| zh | 0.9967 | 360公司 2020笔试题 有一种特殊的DNA, 仅仅由 A 和 T 组成, 顺次链接 科学家通过一种手段, 可以改变这种DNA, 每一次, 科学家可以交换改DNA上的 两个核酸的位置, 也可以把特定位置的某个核酸修改为另外一种. 有一个DNA 希望改造成另外一个DNA, 计算最小操作次数 输入 ATTTAA TTAATT 返回 3 找规律题, 先通过更改核酸, 让两个核酸的A和T数量一致, 注意修改后的核酸放到正确位置上 然后对不满足要求的位置记录其更换次数 更换次数 等于 总不满足要求的位置个数 // 2 | 3.404774 | 3 |
hackerrank/Python/Set Mutations/solution.py | ATrain951/01.python-com_Qproject | 4 | 6621209 | <gh_stars>1-10
_, A = int(input().rstrip()), set(map(int, input().rstrip().split()))
N = int(input())
for _ in range(N):
method, new_set = input().rstrip().split()[0], set(map(int, input().rstrip().split()))
getattr(A, method)(new_set)
print(sum(A))
| _, A = int(input().rstrip()), set(map(int, input().rstrip().split()))
N = int(input())
for _ in range(N):
method, new_set = input().rstrip().split()[0], set(map(int, input().rstrip().split()))
getattr(A, method)(new_set)
print(sum(A)) | none | 1 | 2.96508 | 3 | |
2020/09/Teil 2 - V02 - optimal window sliding.py | HeWeMel/adventofcode | 1 | 6621210 | import sys, itertools
import re
with open('input.txt') as f:
lines = f.readlines() # read complete file, create list of lines with CRs
g = 258585477
# create list of ints from the lines
ints = []
for str in lines:
str = str.rstrip('\n')
i = int(str)
ints.append(i)
# solve problem: slide window over the numbers (n), move start or end if sum does not fit (-> n^2)
window = []
while True:
s = sum(window)
if s < g:
window.append(ints.pop(0))
elif s > g:
window.pop(0)
else:
print(min(window) + max(window))
break
# 36981213 | import sys, itertools
import re
with open('input.txt') as f:
lines = f.readlines() # read complete file, create list of lines with CRs
g = 258585477
# create list of ints from the lines
ints = []
for str in lines:
str = str.rstrip('\n')
i = int(str)
ints.append(i)
# solve problem: slide window over the numbers (n), move start or end if sum does not fit (-> n^2)
window = []
while True:
s = sum(window)
if s < g:
window.append(ints.pop(0))
elif s > g:
window.pop(0)
else:
print(min(window) + max(window))
break
# 36981213 | en | 0.669368 | # read complete file, create list of lines with CRs # create list of ints from the lines # solve problem: slide window over the numbers (n), move start or end if sum does not fit (-> n^2) # 36981213 | 3.222947 | 3 |
flexible_filter_conditions/migrations/0003_auto_20200218_1236.py | auto-mat/django-flexible-filter-conditions | 0 | 6621211 | <gh_stars>0
# Generated by Django 2.2.10 on 2020-02-18 11:36
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('flexible_filter_conditions', '0002_auto_20200218_1214'),
]
operations = [
migrations.AlterField(
model_name='condition',
name='named_condition',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='conditions', to='flexible_filter_conditions.NamedCondition'),
),
migrations.AlterField(
model_name='condition',
name='operation',
field=models.CharField(choices=[('and', 'and'), ('or', 'or'), ('xor', 'xor (one or the other)')], max_length=30, verbose_name='Operation'),
),
]
| # Generated by Django 2.2.10 on 2020-02-18 11:36
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('flexible_filter_conditions', '0002_auto_20200218_1214'),
]
operations = [
migrations.AlterField(
model_name='condition',
name='named_condition',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='conditions', to='flexible_filter_conditions.NamedCondition'),
),
migrations.AlterField(
model_name='condition',
name='operation',
field=models.CharField(choices=[('and', 'and'), ('or', 'or'), ('xor', 'xor (one or the other)')], max_length=30, verbose_name='Operation'),
),
] | en | 0.764828 | # Generated by Django 2.2.10 on 2020-02-18 11:36 | 1.661222 | 2 |
enelvo/candidate_scoring/__init__.py | tfcbertaglia/enelvo | 15 | 6621212 | <filename>enelvo/candidate_scoring/__init__.py
from .baselines import *
from .embeddings import *
| <filename>enelvo/candidate_scoring/__init__.py
from .baselines import *
from .embeddings import *
| none | 1 | 1.084911 | 1 | |
Server/app/views/restaurant/menu.py | TblMaker/TableMaker-Backend | 0 | 6621213 | <filename>Server/app/views/restaurant/menu.py
from flask import Blueprint, Response, abort, g, request
from flask_restful import Api
from flasgger import swag_from
from app.views import BaseResource, auth_required, json_required
api = Api(Blueprint('menu-api', __name__))
@api.resource('/menu/<restaurant_id>')
class MenuList(BaseResource):
def get(self, restaurant_id):
"""
특정 식당의 메뉴 목록 조회
"""
@api.resource('/menu/<menu_id>')
class Menu(BaseResource):
def get(self, menu_id):
"""
특정 메뉴의 정보 조회
""" | <filename>Server/app/views/restaurant/menu.py
from flask import Blueprint, Response, abort, g, request
from flask_restful import Api
from flasgger import swag_from
from app.views import BaseResource, auth_required, json_required
api = Api(Blueprint('menu-api', __name__))
@api.resource('/menu/<restaurant_id>')
class MenuList(BaseResource):
def get(self, restaurant_id):
"""
특정 식당의 메뉴 목록 조회
"""
@api.resource('/menu/<menu_id>')
class Menu(BaseResource):
def get(self, menu_id):
"""
특정 메뉴의 정보 조회
""" | ko | 1.00007 | 특정 식당의 메뉴 목록 조회 특정 메뉴의 정보 조회 | 2.326831 | 2 |
car_number_detection.py | function-test/Car-Number-Detect | 0 | 6621214 | <gh_stars>0
import sys
import os
import cv2
import numpy as np
original_image = None
valid_rects = None
def car_number_detection():
image_file_name = 'original.png'
global original_image
original_image = cv2.imread(image_file_name)
gray_image = gray_scale(original_image)
save_image(gray_image, image_file_name, 'gray')
threshold_image = adaptive_threshold(gray_image)
save_image(threshold_image, image_file_name, 'threshold')
(contours, contour_image) = get_contours(threshold_image)
save_image(contour_image, image_file_name, 'contour')
(rects, rect_contour_image) = rect_contours(contours)
save_image(rect_contour_image, image_file_name, 'rect')
global valid_rects
(valid_rects, valid_rect_image) = validate_rect(rects)
save_image(valid_rect_image, image_file_name, 'valid-rect')
result_idxs = validate_rect_group(valid_rects)
detection_image = detection(result_idxs)
save_image(detection_image, image_file_name, 'detection')
## 이미지 저장
# image : source image
# image_file_name : image name
# middle_name : image's middle name
def save_image(image, image_file_name, middle_name):
image_name, image_extension = os.path.splitext(image_file_name)
cv2.imwrite(image_name + '-' + middle_name + image_extension, image)
## temp image를 생성해 주는 함수
# @return: original_image와 똑같은 사이즈의 temp image
def create_temp_image():
# original_image 의 크기를 가져옴
global original_image
height, width, channel = original_image.shape
# 이미지 생성을 위해서 이미지 크기의 빈 array 선언
temp = np.zeros((height, width, channel), dtype=np.uint8)
return temp
## 이미지 흑백으로 변경
# image : source image
# @return gray scale image
def gray_scale(image):
# 색 변경. gray scale로 변경
return cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
## 이미지를 임계치 값으로 변경
# image : source image
# @return thresholded image
def adaptive_threshold(image):
# 노이즈 제거
blur = cv2.GaussianBlur(image, ksize=(5,5), sigmaX=0)
# 이미지의 threshold 설정
return cv2.adaptiveThreshold(
blur,
maxValue=255.0,
adaptiveMethod=cv2.ADAPTIVE_THRESH_GAUSSIAN_C,
thresholdType=cv2.THRESH_BINARY_INV,
blockSize=19,
C=9
)
## 이미지의 윤곽선을 찾아줌
# image : source image
# @return (contour list, contour image)
def get_contours(image):
# 윤곽선 찾기
contours, _ = cv2.findContours(
image,
mode=cv2.RETR_LIST,
method=cv2.CHAIN_APPROX_SIMPLE
)
# 빈 이미지 생성
contour_image = create_temp_image()
# 윤곽선을 그려줌
cv2.drawContours(contour_image, contours=contours, contourIdx=-1, color=(255, 255, 255))
return contours, contour_image
## 윤곽선을 사각형 모양으로 그리기 위한 함수
# contours: 윤곽선 목록
# @return: 사각형 목록, 사각형 이미지
def rect_contours(contours):
# 사각형의 위치 정보를 저장하기 위해 선언
rects = []
# 이미지 저장을 위한 이미지 생성
rect_contour_image = create_temp_image()
for contour in contours:
# 윤곽선의 x, y 좌표, 폭, 높이를 가져옴
x, y, w, h = cv2.boundingRect(contour)
# 이미지에 사각형을 그려줌
cv2.rectangle(rect_contour_image, pt1=(x,y), pt2=(x+w,y+h), color=(255,255,255), thickness=2)
# 사각형 정보를 넣어줌
# cx: x좌표의 중심, cy: y 좌표의 중심
rects.append({
'contour': contour,
'x': x,
'y': y,
'w': w,
'h': h,
'cx': x + (w / 2),
'cy': y + (h / 2)
})
return rects, rect_contour_image
## 사각형 중 유효한 사각형를 추출
# rects : 사각형 목록
# @return 유효한 사각형 목록, 유효한 사각형 이미지
def validate_rect(rects):
# 사각형의 최소 넓이
MIN_AREA = 80
# 사각형의 최소 폭, 높이
MIN_WIDTH, MIN_HEIGHT = 2, 8
# 사각형의 최소, 최대 가로 세로 비율
MIN_RATIO, MAX_RATIO = 0.25, 1.0
# 유효한 사각형 목록
valid_rects = []
# 유효한 사각형에 부여되는 index
idx = 0
# 이미지 저장을 위한 이미지 생성
valid_rect_image = create_temp_image()
for rect in rects:
# 넓이
area = rect['w'] * rect['h']
# 비율
ratio = rect['w'] / rect['h']
if area > MIN_AREA \
and rect['w'] > MIN_WIDTH \
and rect['h'] > MIN_HEIGHT \
and MIN_RATIO < ratio < MAX_RATIO:
# 인덱스를 부여하고 valid_rects에 추가
rect['idx'] = idx
idx += 1
valid_rects.append(rect)
# 사각형 추가
cv2.rectangle(valid_rect_image, pt1=(rect['x'], rect['y']), pt2=(rect['x']+rect['w'], rect['y']+rect['h']), color=(255,255,255), thickness=2)
return valid_rects, valid_rect_image
## 유효한 사각형 그룹을 가져오는 함수, recursive function
# rects : 사각형 목록
# @return 유효한 사각형 그룹의 목록
def validate_rect_group(rects):
# 사각형의 대각선 길이의 5배가 최대 간격
MAX_DIAG_MULTIPLYER = 5
# 사각형의 중심 최대 각도
MAX_ANGLE_DIFF = 12.0
# 사각형의 면적 차이
MAX_AREA_DIFF = 0.5
# 사각형의 넓이 차이
MAX_WIDTH_DIFF = 0.8
# 사각형의 높이 차이
MAX_HEIGHT_DIFF = 0.2
# 사각형의 그룹의 최소 갯수
MIN_N_MATCHED = 3
matched_result_idxs = []
for rect1 in rects:
matched_rect_idxs = []
for rect2 in rects:
if rect1['idx'] == rect2['idx']:
continue
# 각을 구하기 위한 중심 거리 계산
dx = abs(rect1['cx'] - rect2['cx'])
dy = abs(rect1['cy'] - rect2['cy'])
# 각 계산
if dx == 0:
angle_diff = 90
else:
angle_diff = np.degrees(np.arctan(dy/dx))
# rect1의 대각선 길이
diagonal1 = np.sqrt(rect1['w'] ** 2 + rect1['h'] ** 2)
# 중심 간격
distance = np.linalg.norm(np.array([rect1['cx'], rect1['cy']]) - np.array([rect2['cx'], rect2['cy']]))
# 면적 비율
rect1_area = rect1['w'] * rect1['h']
rect2_area = rect2['w'] * rect2['h']
area_diff = abs(rect1_area - rect2_area) / rect1_area
# 폭의 비율
width_diff = abs(rect1['w'] - rect2['w']) / rect1['w']
# 높이의 비율
height_diff = abs(rect1['h'] - rect2['h']) / rect1['h']
# 조건 확인
if distance < diagonal1 * MAX_DIAG_MULTIPLYER \
and angle_diff < MAX_ANGLE_DIFF \
and area_diff < MAX_AREA_DIFF \
and width_diff < MAX_WIDTH_DIFF \
and height_diff < MAX_HEIGHT_DIFF:
matched_rect_idxs.append(rect2['idx'])
# rect1도 넣어준다.
matched_rect_idxs.append(rect1['idx'])
# rect group이 기준 이하면 결과에 포함하지 않음
if len(matched_rect_idxs) < MIN_N_MATCHED:
continue
else:
# 결과에 포함
matched_result_idxs.append(matched_rect_idxs)
# 매칭이 안된 것끼리 다시 진행
unmatched_rect_idxs = []
for rect in rects:
if rect['idx'] not in matched_rect_idxs:
unmatched_rect_idxs.append(rect['idx'])
global valid_rects
unmatched_rect = np.take(valid_rects, unmatched_rect_idxs)
# recursive call
recursive_rect_list = validate_rect_group(unmatched_rect)
# recursive 결과 취합
for idx in recursive_rect_list:
matched_result_idxs.append(idx)
break
return matched_result_idxs
## 최종적으로 detection하여 비식별화하기 위한 함수
# result_idxs : 최종적으로 선택된 group list
# @return 비식별 처리 된 image
def detection(result_idxs):
global valid_rects
global original_image
# 최종 사각형 저장하기 위한 배열
result_group = []
for idx in result_idxs:
result_group.append(np.take(valid_rects, idx))
for group in result_group:
min_x, min_y = sys.maxsize, sys.maxsize
max_x, max_y = sys.maxsize * -1, sys.maxsize * -1
for rect in group:
min_x = min_x if min_x < rect['x'] else rect['x']
min_y = min_y if min_y < rect['y'] else rect['y']
max_x = max_x if max_x > rect['x']+rect['w'] else rect['x']+rect['w']
max_y = max_y if max_y > rect['y']+rect['h'] else rect['y']+rect['h']
cv2.rectangle(original_image, pt1=(min_x, min_y), pt2=(max_x, max_y), color=(0, 0, 0), thickness=cv2.FILLED)
return original_image
if __name__ == '__main__':
car_number_detection()
| import sys
import os
import cv2
import numpy as np
original_image = None
valid_rects = None
def car_number_detection():
image_file_name = 'original.png'
global original_image
original_image = cv2.imread(image_file_name)
gray_image = gray_scale(original_image)
save_image(gray_image, image_file_name, 'gray')
threshold_image = adaptive_threshold(gray_image)
save_image(threshold_image, image_file_name, 'threshold')
(contours, contour_image) = get_contours(threshold_image)
save_image(contour_image, image_file_name, 'contour')
(rects, rect_contour_image) = rect_contours(contours)
save_image(rect_contour_image, image_file_name, 'rect')
global valid_rects
(valid_rects, valid_rect_image) = validate_rect(rects)
save_image(valid_rect_image, image_file_name, 'valid-rect')
result_idxs = validate_rect_group(valid_rects)
detection_image = detection(result_idxs)
save_image(detection_image, image_file_name, 'detection')
## 이미지 저장
# image : source image
# image_file_name : image name
# middle_name : image's middle name
def save_image(image, image_file_name, middle_name):
image_name, image_extension = os.path.splitext(image_file_name)
cv2.imwrite(image_name + '-' + middle_name + image_extension, image)
## temp image를 생성해 주는 함수
# @return: original_image와 똑같은 사이즈의 temp image
def create_temp_image():
# original_image 의 크기를 가져옴
global original_image
height, width, channel = original_image.shape
# 이미지 생성을 위해서 이미지 크기의 빈 array 선언
temp = np.zeros((height, width, channel), dtype=np.uint8)
return temp
## 이미지 흑백으로 변경
# image : source image
# @return gray scale image
def gray_scale(image):
# 색 변경. gray scale로 변경
return cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
## 이미지를 임계치 값으로 변경
# image : source image
# @return thresholded image
def adaptive_threshold(image):
# 노이즈 제거
blur = cv2.GaussianBlur(image, ksize=(5,5), sigmaX=0)
# 이미지의 threshold 설정
return cv2.adaptiveThreshold(
blur,
maxValue=255.0,
adaptiveMethod=cv2.ADAPTIVE_THRESH_GAUSSIAN_C,
thresholdType=cv2.THRESH_BINARY_INV,
blockSize=19,
C=9
)
## 이미지의 윤곽선을 찾아줌
# image : source image
# @return (contour list, contour image)
def get_contours(image):
# 윤곽선 찾기
contours, _ = cv2.findContours(
image,
mode=cv2.RETR_LIST,
method=cv2.CHAIN_APPROX_SIMPLE
)
# 빈 이미지 생성
contour_image = create_temp_image()
# 윤곽선을 그려줌
cv2.drawContours(contour_image, contours=contours, contourIdx=-1, color=(255, 255, 255))
return contours, contour_image
## 윤곽선을 사각형 모양으로 그리기 위한 함수
# contours: 윤곽선 목록
# @return: 사각형 목록, 사각형 이미지
def rect_contours(contours):
# 사각형의 위치 정보를 저장하기 위해 선언
rects = []
# 이미지 저장을 위한 이미지 생성
rect_contour_image = create_temp_image()
for contour in contours:
# 윤곽선의 x, y 좌표, 폭, 높이를 가져옴
x, y, w, h = cv2.boundingRect(contour)
# 이미지에 사각형을 그려줌
cv2.rectangle(rect_contour_image, pt1=(x,y), pt2=(x+w,y+h), color=(255,255,255), thickness=2)
# 사각형 정보를 넣어줌
# cx: x좌표의 중심, cy: y 좌표의 중심
rects.append({
'contour': contour,
'x': x,
'y': y,
'w': w,
'h': h,
'cx': x + (w / 2),
'cy': y + (h / 2)
})
return rects, rect_contour_image
## 사각형 중 유효한 사각형를 추출
# rects : 사각형 목록
# @return 유효한 사각형 목록, 유효한 사각형 이미지
def validate_rect(rects):
# 사각형의 최소 넓이
MIN_AREA = 80
# 사각형의 최소 폭, 높이
MIN_WIDTH, MIN_HEIGHT = 2, 8
# 사각형의 최소, 최대 가로 세로 비율
MIN_RATIO, MAX_RATIO = 0.25, 1.0
# 유효한 사각형 목록
valid_rects = []
# 유효한 사각형에 부여되는 index
idx = 0
# 이미지 저장을 위한 이미지 생성
valid_rect_image = create_temp_image()
for rect in rects:
# 넓이
area = rect['w'] * rect['h']
# 비율
ratio = rect['w'] / rect['h']
if area > MIN_AREA \
and rect['w'] > MIN_WIDTH \
and rect['h'] > MIN_HEIGHT \
and MIN_RATIO < ratio < MAX_RATIO:
# 인덱스를 부여하고 valid_rects에 추가
rect['idx'] = idx
idx += 1
valid_rects.append(rect)
# 사각형 추가
cv2.rectangle(valid_rect_image, pt1=(rect['x'], rect['y']), pt2=(rect['x']+rect['w'], rect['y']+rect['h']), color=(255,255,255), thickness=2)
return valid_rects, valid_rect_image
## 유효한 사각형 그룹을 가져오는 함수, recursive function
# rects : 사각형 목록
# @return 유효한 사각형 그룹의 목록
def validate_rect_group(rects):
# 사각형의 대각선 길이의 5배가 최대 간격
MAX_DIAG_MULTIPLYER = 5
# 사각형의 중심 최대 각도
MAX_ANGLE_DIFF = 12.0
# 사각형의 면적 차이
MAX_AREA_DIFF = 0.5
# 사각형의 넓이 차이
MAX_WIDTH_DIFF = 0.8
# 사각형의 높이 차이
MAX_HEIGHT_DIFF = 0.2
# 사각형의 그룹의 최소 갯수
MIN_N_MATCHED = 3
matched_result_idxs = []
for rect1 in rects:
matched_rect_idxs = []
for rect2 in rects:
if rect1['idx'] == rect2['idx']:
continue
# 각을 구하기 위한 중심 거리 계산
dx = abs(rect1['cx'] - rect2['cx'])
dy = abs(rect1['cy'] - rect2['cy'])
# 각 계산
if dx == 0:
angle_diff = 90
else:
angle_diff = np.degrees(np.arctan(dy/dx))
# rect1의 대각선 길이
diagonal1 = np.sqrt(rect1['w'] ** 2 + rect1['h'] ** 2)
# 중심 간격
distance = np.linalg.norm(np.array([rect1['cx'], rect1['cy']]) - np.array([rect2['cx'], rect2['cy']]))
# 면적 비율
rect1_area = rect1['w'] * rect1['h']
rect2_area = rect2['w'] * rect2['h']
area_diff = abs(rect1_area - rect2_area) / rect1_area
# 폭의 비율
width_diff = abs(rect1['w'] - rect2['w']) / rect1['w']
# 높이의 비율
height_diff = abs(rect1['h'] - rect2['h']) / rect1['h']
# 조건 확인
if distance < diagonal1 * MAX_DIAG_MULTIPLYER \
and angle_diff < MAX_ANGLE_DIFF \
and area_diff < MAX_AREA_DIFF \
and width_diff < MAX_WIDTH_DIFF \
and height_diff < MAX_HEIGHT_DIFF:
matched_rect_idxs.append(rect2['idx'])
# rect1도 넣어준다.
matched_rect_idxs.append(rect1['idx'])
# rect group이 기준 이하면 결과에 포함하지 않음
if len(matched_rect_idxs) < MIN_N_MATCHED:
continue
else:
# 결과에 포함
matched_result_idxs.append(matched_rect_idxs)
# 매칭이 안된 것끼리 다시 진행
unmatched_rect_idxs = []
for rect in rects:
if rect['idx'] not in matched_rect_idxs:
unmatched_rect_idxs.append(rect['idx'])
global valid_rects
unmatched_rect = np.take(valid_rects, unmatched_rect_idxs)
# recursive call
recursive_rect_list = validate_rect_group(unmatched_rect)
# recursive 결과 취합
for idx in recursive_rect_list:
matched_result_idxs.append(idx)
break
return matched_result_idxs
## 최종적으로 detection하여 비식별화하기 위한 함수
# result_idxs : 최종적으로 선택된 group list
# @return 비식별 처리 된 image
def detection(result_idxs):
global valid_rects
global original_image
# 최종 사각형 저장하기 위한 배열
result_group = []
for idx in result_idxs:
result_group.append(np.take(valid_rects, idx))
for group in result_group:
min_x, min_y = sys.maxsize, sys.maxsize
max_x, max_y = sys.maxsize * -1, sys.maxsize * -1
for rect in group:
min_x = min_x if min_x < rect['x'] else rect['x']
min_y = min_y if min_y < rect['y'] else rect['y']
max_x = max_x if max_x > rect['x']+rect['w'] else rect['x']+rect['w']
max_y = max_y if max_y > rect['y']+rect['h'] else rect['y']+rect['h']
cv2.rectangle(original_image, pt1=(min_x, min_y), pt2=(max_x, max_y), color=(0, 0, 0), thickness=cv2.FILLED)
return original_image
if __name__ == '__main__':
car_number_detection() | ko | 0.999941 | ## 이미지 저장 # image : source image # image_file_name : image name # middle_name : image's middle name ## temp image를 생성해 주는 함수 # @return: original_image와 똑같은 사이즈의 temp image # original_image 의 크기를 가져옴 # 이미지 생성을 위해서 이미지 크기의 빈 array 선언 ## 이미지 흑백으로 변경 # image : source image # @return gray scale image # 색 변경. gray scale로 변경 ## 이미지를 임계치 값으로 변경 # image : source image # @return thresholded image # 노이즈 제거 # 이미지의 threshold 설정 ## 이미지의 윤곽선을 찾아줌 # image : source image # @return (contour list, contour image) # 윤곽선 찾기 # 빈 이미지 생성 # 윤곽선을 그려줌 ## 윤곽선을 사각형 모양으로 그리기 위한 함수 # contours: 윤곽선 목록 # @return: 사각형 목록, 사각형 이미지 # 사각형의 위치 정보를 저장하기 위해 선언 # 이미지 저장을 위한 이미지 생성 # 윤곽선의 x, y 좌표, 폭, 높이를 가져옴 # 이미지에 사각형을 그려줌 # 사각형 정보를 넣어줌 # cx: x좌표의 중심, cy: y 좌표의 중심 ## 사각형 중 유효한 사각형를 추출 # rects : 사각형 목록 # @return 유효한 사각형 목록, 유효한 사각형 이미지 # 사각형의 최소 넓이 # 사각형의 최소 폭, 높이 # 사각형의 최소, 최대 가로 세로 비율 # 유효한 사각형 목록 # 유효한 사각형에 부여되는 index # 이미지 저장을 위한 이미지 생성 # 넓이 # 비율 # 인덱스를 부여하고 valid_rects에 추가 # 사각형 추가 ## 유효한 사각형 그룹을 가져오는 함수, recursive function # rects : 사각형 목록 # @return 유효한 사각형 그룹의 목록 # 사각형의 대각선 길이의 5배가 최대 간격 # 사각형의 중심 최대 각도 # 사각형의 면적 차이 # 사각형의 넓이 차이 # 사각형의 높이 차이 # 사각형의 그룹의 최소 갯수 # 각을 구하기 위한 중심 거리 계산 # 각 계산 # rect1의 대각선 길이 # 중심 간격 # 면적 비율 # 폭의 비율 # 높이의 비율 # 조건 확인 # rect1도 넣어준다. # rect group이 기준 이하면 결과에 포함하지 않음 # 결과에 포함 # 매칭이 안된 것끼리 다시 진행 # recursive call # recursive 결과 취합 ## 최종적으로 detection하여 비식별화하기 위한 함수 # result_idxs : 최종적으로 선택된 group list # @return 비식별 처리 된 image # 최종 사각형 저장하기 위한 배열 | 3.117811 | 3 |
year/2020/11/seat_planner.py | nbalas/advent_of_code | 0 | 6621215 | from collections import namedtuple
from copy import deepcopy
from logs.setup_logs import init_logs
from readers.file_reader import FileReader
logger = init_logs(__name__)
EMPTY_SEAT = 'L'
OCCUPIED_SEAT = '#'
FLOOR = '.'
PART_1_OCCUPIED_LIMIT = 4
PART_2_OCCUPIED_LIMIT = 5
Coordinates = namedtuple("Coordinates", ('x', 'y'))
DIRECTIONS = {
"N": lambda cords: Coordinates(cords.x, cords.y - 1),
"NE": lambda cords: Coordinates(cords.x + 1, cords.y - 1),
"E": lambda cords: Coordinates(cords.x + 1, cords.y),
"SE": lambda cords: Coordinates(cords.x + 1, cords.y + 1),
"S": lambda cords: Coordinates(cords.x, cords.y + 1),
"SW": lambda cords: Coordinates(cords.x - 1, cords.y + 1),
"W": lambda cords: Coordinates(cords.x - 1, cords.y),
"NW": lambda cords: Coordinates(cords.x - 1, cords.y - 1)
}
def main():
current_seating_map = list(map(list, FileReader.read_input_as_list()))
while True:
# print_seat_map(current_seating_map)
new_seating_map = run_rules(current_seating_map)
if new_seating_map == current_seating_map:
logger.info("Our seating map changes no longer, we have stabilized")
break
current_seating_map = new_seating_map
logger.info(f"There are {count_occupied(new_seating_map)} occupied seats")
def run_rules(seating_map):
next_seating_map = deepcopy(seating_map)
for y_value, row in enumerate(seating_map):
for x_value, seat in enumerate(row):
current_cords = Coordinates(x_value, y_value)
if seat is EMPTY_SEAT and not has_occupied_seats_in_any_direction(seating_map, 0, current_cords):
next_seating_map[current_cords.y][current_cords.x] = OCCUPIED_SEAT
elif seat is OCCUPIED_SEAT and has_occupied_seats_in_any_direction(seating_map, PART_2_OCCUPIED_LIMIT, current_cords):
next_seating_map[current_cords.y][current_cords.x] = EMPTY_SEAT
return next_seating_map
# Part 1
def has_occupied_seats_surrounding(seating_map, occupied_seats_limit, current_cords):
occupied_seats = 0
for x_range in range(-1, 2):
for y_range in range(-1, 2):
# logger.debug(f"Processing {x_range}, {y_range}")
if x_range == 0 and y_range == 0:
continue
current_x = current_cords.x + x_range
current_y = current_cords.y + y_range
if invalid_coordinates(seating_map, Coordinates(current_x, current_y)):
continue
if seating_map[current_y][current_x] is OCCUPIED_SEAT:
occupied_seats += 1
if occupied_seats_limit <= occupied_seats:
return True
# logger.debug(f"There are {occupied_seats} occupied_seats for coordinates {current_cords}")
return False
# Part 2
def has_occupied_seats_in_any_direction(seating_map, occupied_seats_limit, current_cords):
occupied_seats = 0
for direction in DIRECTIONS:
directional_cords = current_cords
while True:
directional_cords = DIRECTIONS[direction](directional_cords)
# logger.debug(f"Searching for occupied seat {direction} of {current_cords} in {directional_cords}")
if invalid_coordinates(seating_map, directional_cords) or seating_map[directional_cords.y][directional_cords.x] is EMPTY_SEAT:
break
if seating_map[directional_cords.y][directional_cords.x] is OCCUPIED_SEAT:
occupied_seats += 1
if occupied_seats_limit <= occupied_seats:
return True
break
return False
def invalid_coordinates(seating_map, cords):
return cords.y < 0 or len(seating_map) <= cords.y or cords.x < 0 or len(seating_map[cords.y]) <= cords.x
def count_occupied(seating_map):
return sum(map(len, [[seat for seat in row if seat is OCCUPIED_SEAT] for row in seating_map]))
def print_seat_map(seating_map):
for row in seating_map:
logger.info(row)
logger.info('\n')
if __name__ == '__main__':
main() | from collections import namedtuple
from copy import deepcopy
from logs.setup_logs import init_logs
from readers.file_reader import FileReader
logger = init_logs(__name__)
EMPTY_SEAT = 'L'
OCCUPIED_SEAT = '#'
FLOOR = '.'
PART_1_OCCUPIED_LIMIT = 4
PART_2_OCCUPIED_LIMIT = 5
Coordinates = namedtuple("Coordinates", ('x', 'y'))
DIRECTIONS = {
"N": lambda cords: Coordinates(cords.x, cords.y - 1),
"NE": lambda cords: Coordinates(cords.x + 1, cords.y - 1),
"E": lambda cords: Coordinates(cords.x + 1, cords.y),
"SE": lambda cords: Coordinates(cords.x + 1, cords.y + 1),
"S": lambda cords: Coordinates(cords.x, cords.y + 1),
"SW": lambda cords: Coordinates(cords.x - 1, cords.y + 1),
"W": lambda cords: Coordinates(cords.x - 1, cords.y),
"NW": lambda cords: Coordinates(cords.x - 1, cords.y - 1)
}
def main():
current_seating_map = list(map(list, FileReader.read_input_as_list()))
while True:
# print_seat_map(current_seating_map)
new_seating_map = run_rules(current_seating_map)
if new_seating_map == current_seating_map:
logger.info("Our seating map changes no longer, we have stabilized")
break
current_seating_map = new_seating_map
logger.info(f"There are {count_occupied(new_seating_map)} occupied seats")
def run_rules(seating_map):
next_seating_map = deepcopy(seating_map)
for y_value, row in enumerate(seating_map):
for x_value, seat in enumerate(row):
current_cords = Coordinates(x_value, y_value)
if seat is EMPTY_SEAT and not has_occupied_seats_in_any_direction(seating_map, 0, current_cords):
next_seating_map[current_cords.y][current_cords.x] = OCCUPIED_SEAT
elif seat is OCCUPIED_SEAT and has_occupied_seats_in_any_direction(seating_map, PART_2_OCCUPIED_LIMIT, current_cords):
next_seating_map[current_cords.y][current_cords.x] = EMPTY_SEAT
return next_seating_map
# Part 1
def has_occupied_seats_surrounding(seating_map, occupied_seats_limit, current_cords):
occupied_seats = 0
for x_range in range(-1, 2):
for y_range in range(-1, 2):
# logger.debug(f"Processing {x_range}, {y_range}")
if x_range == 0 and y_range == 0:
continue
current_x = current_cords.x + x_range
current_y = current_cords.y + y_range
if invalid_coordinates(seating_map, Coordinates(current_x, current_y)):
continue
if seating_map[current_y][current_x] is OCCUPIED_SEAT:
occupied_seats += 1
if occupied_seats_limit <= occupied_seats:
return True
# logger.debug(f"There are {occupied_seats} occupied_seats for coordinates {current_cords}")
return False
# Part 2
def has_occupied_seats_in_any_direction(seating_map, occupied_seats_limit, current_cords):
occupied_seats = 0
for direction in DIRECTIONS:
directional_cords = current_cords
while True:
directional_cords = DIRECTIONS[direction](directional_cords)
# logger.debug(f"Searching for occupied seat {direction} of {current_cords} in {directional_cords}")
if invalid_coordinates(seating_map, directional_cords) or seating_map[directional_cords.y][directional_cords.x] is EMPTY_SEAT:
break
if seating_map[directional_cords.y][directional_cords.x] is OCCUPIED_SEAT:
occupied_seats += 1
if occupied_seats_limit <= occupied_seats:
return True
break
return False
def invalid_coordinates(seating_map, cords):
return cords.y < 0 or len(seating_map) <= cords.y or cords.x < 0 or len(seating_map[cords.y]) <= cords.x
def count_occupied(seating_map):
return sum(map(len, [[seat for seat in row if seat is OCCUPIED_SEAT] for row in seating_map]))
def print_seat_map(seating_map):
for row in seating_map:
logger.info(row)
logger.info('\n')
if __name__ == '__main__':
main() | en | 0.339538 | # print_seat_map(current_seating_map) # Part 1 # logger.debug(f"Processing {x_range}, {y_range}") # logger.debug(f"There are {occupied_seats} occupied_seats for coordinates {current_cords}") # Part 2 # logger.debug(f"Searching for occupied seat {direction} of {current_cords} in {directional_cords}") | 3.129552 | 3 |
CSV File import.py | pgmccann/nbbdd | 2 | 6621216 | <reponame>pgmccann/nbbdd<filename>CSV File import.py
from behave import given, when, then
import os.path
@given('the csv file to import exists')
def step_the_csv_file_to_import_exists(context):
assert os.path.exists("results.csv"), "results.csv does not exist"
@when('I call pandas read_csv')
def step_I_call_pandas_read_csv(context):
context.loader = CSVLoader("results.csv")
context.loader.load()
@given('a dataframe with 3 columns and 5 rows should be returned')
def step_a_dataframe_with_3_columns_and_5_rows_should_be_returned(context):
shape = context.loader.df_Results.shape
assert (shape[0] == 5), "5 rows not returned"
assert (shape[1] == 3), "3 columns not returned" | File import.py
from behave import given, when, then
import os.path
@given('the csv file to import exists')
def step_the_csv_file_to_import_exists(context):
assert os.path.exists("results.csv"), "results.csv does not exist"
@when('I call pandas read_csv')
def step_I_call_pandas_read_csv(context):
context.loader = CSVLoader("results.csv")
context.loader.load()
@given('a dataframe with 3 columns and 5 rows should be returned')
def step_a_dataframe_with_3_columns_and_5_rows_should_be_returned(context):
shape = context.loader.df_Results.shape
assert (shape[0] == 5), "5 rows not returned"
assert (shape[1] == 3), "3 columns not returned" | none | 1 | 3.017116 | 3 | |
Kuriakose Eldho/pookalam.py | aldrin163/Code-a-pookalam | 12 | 6621217 | import cv2
import numpy as np
import math
size = 800
center = size//2
radius = 3*size//8
dark_red = (0,0,170)
red = (0,0,240)
dark_orange = (0,80,255)
orange = (0,120,255)
yellow = (0,200,255)
light_yellow = (214,250,255)
white = (255,255,255)
violet = (100,20,140)
dark_violet = (80,0,100)
dark_green = (0,120,0)
green = (65,175,0)
black = (0,0,0)
def rotate(x,y,xo,yo,theta):
xr=math.cos(theta)*(x-xo)-math.sin(theta)*(y-yo) + xo
yr=math.sin(theta)*(x-xo)+math.cos(theta)*(y-yo) + yo
return (int(xr), int(yr))
def gen_points(r, n, xo = center, yo = center, omega = 0):
result = []
theta = math.radians(360/n)
omega = math.radians(omega)
for i in range(n):
result.append( rotate(xo+r, yo, xo, yo, i*theta + omega) )
return result
im = 255 * np.ones(shape=[size, size, 3], dtype=np.uint8)
im2 = 255 * np.ones(shape=[size, size, 3], dtype=np.uint8)
mask = np.zeros(shape=[size, size, 3], dtype=np.uint8)
mask2 = np.zeros(shape=[size, size, 3], dtype=np.uint8)
cv2.circle(im, (size//2, size//2) , radius +24, red, 1, 0)
cv2.circle(mask, (size//2, size//2) , radius +24, white, 1, 0)
cv2.floodFill(mask,None, (0,0), white)
cv2.circle(mask2, (size//2-1, size//2) , 195, white, 1, 0)
cv2.floodFill(mask2,None, (0,0), white)
for point in gen_points(radius//2, 24):
cv2.circle(im, (point[0], point[1]) , radius, (0,0,0), 1, 0)
# Adding Colors
for point in gen_points(radius - 2, 24, omega=2):
cv2.floodFill(im,None, (point[0], point[1]), dark_green)
for point in gen_points(radius - 16, 24, omega=5):
cv2.floodFill(im,None, (point[0], point[1]), green)
for point in gen_points(radius - 2, 6, omega=5):
cv2.floodFill(im,None, (point[0], point[1]), light_yellow)
for point in gen_points(radius - 16, 6, omega=5):
cv2.floodFill(im,None, (point[0], point[1]), yellow)
for point in gen_points(radius - 16, 6, omega=10):
cv2.floodFill(im,None, (point[0], point[1]), yellow)
for point in gen_points(radius -30, 24, omega=5):
cv2.floodFill(im,None, (point[0], point[1]), orange)
for point in gen_points(radius -30, 6, omega=35):
cv2.floodFill(im,None, (point[0], point[1]), light_yellow)
for point in gen_points(radius -60, 24, omega=0 ):
cv2.floodFill(im,None, (point[0], point[1]), red)
for point in gen_points(radius -70, 24, omega=35 ):
cv2.floodFill(im,None, (point[0], point[1]), dark_red)
for point in gen_points(radius -90, 24, omega=35 ):
cv2.floodFill(im,None, (point[0], point[1]), violet)
for point in gen_points(radius -100, 24, omega=35 ):
cv2.floodFill(im,None, (point[0], point[1]), dark_violet)
# Erase construction lines of layer 1
im = cv2.bitwise_or(im,mask)
cv2.circle(im, (size//2-1,size//2), 196,black,-1,8,0)
cv2.circle(im, (size//2-1,size//2), 194,white,-1,8,0)
#second layer
circ_points = gen_points(196, 24, omega=0, xo = center-1 )
for i in range(12):
cv2.line(im2, circ_points[i], circ_points[i+12], black, 1)
for i in range(5):
cv2.circle(im2, (size//2-1,size//2), 195-30*i,black,1,0)
colors = [dark_violet,red,yellow,light_yellow]
for i in range(5):
for j in range(4):
for points in gen_points(188-30*i, 6, omega=10+ 15*j + 15*i, xo = center-1 ):
cv2.floodFill(im2, None, points, colors[j])
cv2.circle(im2, (size//2-1,size//2), 75,dark_green,-1,8,0)
cv2.circle(im2, (size//2-1,size//2), 65,green,-1,8,0)
cv2.circle(im2, (size//2-1,size//2), 75,black,1,8,0)
cv2.circle(im2, (size//2-1,size//2), 30,black,1,8,0)
for point in gen_points(30, 6, omega=5, xo = center-1 ):
cv2.circle(im2, point, 30,black,1,8,0)
for point in gen_points(32, 6, omega=5, xo = center-1 ):
cv2.floodFill(im2,None, point, light_yellow)
for point in gen_points(32, 6, omega=35, xo = center-1 ):
cv2.floodFill(im2,None, point, yellow)
for point in gen_points(16, 6, omega=35, xo = center-1 ):
cv2.floodFill(im2,None, point, orange)
for point in gen_points(16, 6, omega=5, xo = center-1 ):
cv2.floodFill(im2,None, point, red)
# Erase construction lines of layer 2
im2 = cv2.bitwise_or(im2,mask2)
#Joining layer 1 & 2
im = cv2.bitwise_and(im,im2)
cv2.imwrite("pookalam.png",im) | import cv2
import numpy as np
import math
size = 800
center = size//2
radius = 3*size//8
dark_red = (0,0,170)
red = (0,0,240)
dark_orange = (0,80,255)
orange = (0,120,255)
yellow = (0,200,255)
light_yellow = (214,250,255)
white = (255,255,255)
violet = (100,20,140)
dark_violet = (80,0,100)
dark_green = (0,120,0)
green = (65,175,0)
black = (0,0,0)
def rotate(x,y,xo,yo,theta):
xr=math.cos(theta)*(x-xo)-math.sin(theta)*(y-yo) + xo
yr=math.sin(theta)*(x-xo)+math.cos(theta)*(y-yo) + yo
return (int(xr), int(yr))
def gen_points(r, n, xo = center, yo = center, omega = 0):
result = []
theta = math.radians(360/n)
omega = math.radians(omega)
for i in range(n):
result.append( rotate(xo+r, yo, xo, yo, i*theta + omega) )
return result
im = 255 * np.ones(shape=[size, size, 3], dtype=np.uint8)
im2 = 255 * np.ones(shape=[size, size, 3], dtype=np.uint8)
mask = np.zeros(shape=[size, size, 3], dtype=np.uint8)
mask2 = np.zeros(shape=[size, size, 3], dtype=np.uint8)
cv2.circle(im, (size//2, size//2) , radius +24, red, 1, 0)
cv2.circle(mask, (size//2, size//2) , radius +24, white, 1, 0)
cv2.floodFill(mask,None, (0,0), white)
cv2.circle(mask2, (size//2-1, size//2) , 195, white, 1, 0)
cv2.floodFill(mask2,None, (0,0), white)
for point in gen_points(radius//2, 24):
cv2.circle(im, (point[0], point[1]) , radius, (0,0,0), 1, 0)
# Adding Colors
for point in gen_points(radius - 2, 24, omega=2):
cv2.floodFill(im,None, (point[0], point[1]), dark_green)
for point in gen_points(radius - 16, 24, omega=5):
cv2.floodFill(im,None, (point[0], point[1]), green)
for point in gen_points(radius - 2, 6, omega=5):
cv2.floodFill(im,None, (point[0], point[1]), light_yellow)
for point in gen_points(radius - 16, 6, omega=5):
cv2.floodFill(im,None, (point[0], point[1]), yellow)
for point in gen_points(radius - 16, 6, omega=10):
cv2.floodFill(im,None, (point[0], point[1]), yellow)
for point in gen_points(radius -30, 24, omega=5):
cv2.floodFill(im,None, (point[0], point[1]), orange)
for point in gen_points(radius -30, 6, omega=35):
cv2.floodFill(im,None, (point[0], point[1]), light_yellow)
for point in gen_points(radius -60, 24, omega=0 ):
cv2.floodFill(im,None, (point[0], point[1]), red)
for point in gen_points(radius -70, 24, omega=35 ):
cv2.floodFill(im,None, (point[0], point[1]), dark_red)
for point in gen_points(radius -90, 24, omega=35 ):
cv2.floodFill(im,None, (point[0], point[1]), violet)
for point in gen_points(radius -100, 24, omega=35 ):
cv2.floodFill(im,None, (point[0], point[1]), dark_violet)
# Erase construction lines of layer 1
im = cv2.bitwise_or(im,mask)
cv2.circle(im, (size//2-1,size//2), 196,black,-1,8,0)
cv2.circle(im, (size//2-1,size//2), 194,white,-1,8,0)
#second layer
circ_points = gen_points(196, 24, omega=0, xo = center-1 )
for i in range(12):
cv2.line(im2, circ_points[i], circ_points[i+12], black, 1)
for i in range(5):
cv2.circle(im2, (size//2-1,size//2), 195-30*i,black,1,0)
colors = [dark_violet,red,yellow,light_yellow]
for i in range(5):
for j in range(4):
for points in gen_points(188-30*i, 6, omega=10+ 15*j + 15*i, xo = center-1 ):
cv2.floodFill(im2, None, points, colors[j])
cv2.circle(im2, (size//2-1,size//2), 75,dark_green,-1,8,0)
cv2.circle(im2, (size//2-1,size//2), 65,green,-1,8,0)
cv2.circle(im2, (size//2-1,size//2), 75,black,1,8,0)
cv2.circle(im2, (size//2-1,size//2), 30,black,1,8,0)
for point in gen_points(30, 6, omega=5, xo = center-1 ):
cv2.circle(im2, point, 30,black,1,8,0)
for point in gen_points(32, 6, omega=5, xo = center-1 ):
cv2.floodFill(im2,None, point, light_yellow)
for point in gen_points(32, 6, omega=35, xo = center-1 ):
cv2.floodFill(im2,None, point, yellow)
for point in gen_points(16, 6, omega=35, xo = center-1 ):
cv2.floodFill(im2,None, point, orange)
for point in gen_points(16, 6, omega=5, xo = center-1 ):
cv2.floodFill(im2,None, point, red)
# Erase construction lines of layer 2
im2 = cv2.bitwise_or(im2,mask2)
#Joining layer 1 & 2
im = cv2.bitwise_and(im,im2)
cv2.imwrite("pookalam.png",im) | en | 0.469715 | # Adding Colors # Erase construction lines of layer 1 #second layer # Erase construction lines of layer 2 #Joining layer 1 & 2 | 2.886025 | 3 |
tests/test_dataset.py | andylolz/pyandi | 4 | 6621218 | from os.path import abspath, dirname, join
from unittest import TestCase
from mock import patch
from iatikit.data.dataset import DatasetSet, Dataset
from iatikit.utils.config import CONFIG
class TestDatasets(TestCase):
def __init__(self, *args, **kwargs):
super(TestDatasets, self).__init__(*args, **kwargs)
self.registry_path = join(dirname(abspath(__file__)),
'fixtures', 'registry')
def setUp(self):
self.org_datasets = DatasetSet(
join(self.registry_path, 'data', 'fixture-org', '*'),
join(self.registry_path, 'metadata', 'fixture-org', '*'),
)
def test_datasets_iter(self):
dataset_list = list(self.org_datasets)
assert len(dataset_list) == 3
def test_datasets_filter_by_filetype(self):
act_datasets = self.org_datasets.where(filetype='activity').all()
assert len(act_datasets) == 2
assert act_datasets[0].name == 'fixture-org-activities'
def test_datasets_filter_by_name(self):
org_datasets = self.org_datasets.where(name='fixture-org-org').all()
assert len(org_datasets) == 1
assert org_datasets[0].name == 'fixture-org-org'
class TestDataset(TestCase):
def __init__(self, *args, **kwargs):
super(TestDataset, self).__init__(*args, **kwargs)
registry_path = join(dirname(abspath(__file__)),
'fixtures', 'registry')
self.old_org_acts = Dataset(
join(registry_path, 'data',
'old-org', 'old-org-acts.xml'),
join(registry_path, 'metadata',
'old-org', 'old-org-acts.json'),
)
self.fixture_org_acts = Dataset(
join(registry_path, 'data',
'fixture-org', 'fixture-org-activities.xml'),
join(registry_path, 'metadata',
'fixture-org', 'fixture-org-activities.json'),
)
standard_path = join(dirname(abspath(__file__)),
'fixtures', 'standard')
config_dict = {'paths': {'standard': standard_path}}
CONFIG.read_dict(config_dict)
def test_dataset_name(self):
assert self.old_org_acts.name == 'old-org-acts'
def test_dataset_version(self):
assert self.old_org_acts.version == '1.03'
def test_dataset_repr(self):
dataset_repr = '<Dataset (old-org-acts)>'
assert str(self.old_org_acts) == dataset_repr
def test_dataset_raw_xml(self):
assert self.old_org_acts.raw_xml.startswith(
b'<?xml version="1.0" encoding="UTF-8"?>\n')
def test_dataset_validate_xml(self):
assert bool(self.old_org_acts.validate_xml()) is True
def test_dataset_validate_iati(self):
assert bool(self.old_org_acts.validate_iati()) is True
@patch('logging.Logger.warning')
def test_dataset_validate_codelists_old(self, fake_logger_warning):
assert bool(self.old_org_acts.validate_codelists()) is True
msg = ('Can\'t perform codelist validation for ' +
'IATI version %s datasets.', '1.03')
fake_logger_warning.assert_called_once_with(*msg)
def test_dataset_validate_codelists(self):
result = self.fixture_org_acts.validate_codelists()
assert result.is_valid is False
assert len(result.errors) == 2
err_msgs = [
'The value "999" is not in the Sector Vocabulary codelist.',
'The value "6" is not in the Activity Status codelist.',
]
for error in result.errors:
assert str(error) in err_msgs
def test_dataset_root(self):
assert self.old_org_acts.root == 'iati-activities'
@patch('webbrowser.open_new_tab')
def test_dataset_show(self, fake_open_new_tab):
url = 'https://iatiregistry.org/dataset/old-org-acts'
self.old_org_acts.show()
fake_open_new_tab.assert_called_once_with(url)
def test_activities(self):
assert self.old_org_acts.activities.count() == 2
activity = self.old_org_acts.activities[1]
assert activity.id == 'NL-CHC-98765-NL-CHC-98765-XGG00NS00'
def test_metadata(self):
dataset_metadata = self.old_org_acts.metadata
assert dataset_metadata.get('extras') \
.get('publisher_organization_type') == '21'
| from os.path import abspath, dirname, join
from unittest import TestCase
from mock import patch
from iatikit.data.dataset import DatasetSet, Dataset
from iatikit.utils.config import CONFIG
class TestDatasets(TestCase):
def __init__(self, *args, **kwargs):
super(TestDatasets, self).__init__(*args, **kwargs)
self.registry_path = join(dirname(abspath(__file__)),
'fixtures', 'registry')
def setUp(self):
self.org_datasets = DatasetSet(
join(self.registry_path, 'data', 'fixture-org', '*'),
join(self.registry_path, 'metadata', 'fixture-org', '*'),
)
def test_datasets_iter(self):
dataset_list = list(self.org_datasets)
assert len(dataset_list) == 3
def test_datasets_filter_by_filetype(self):
act_datasets = self.org_datasets.where(filetype='activity').all()
assert len(act_datasets) == 2
assert act_datasets[0].name == 'fixture-org-activities'
def test_datasets_filter_by_name(self):
org_datasets = self.org_datasets.where(name='fixture-org-org').all()
assert len(org_datasets) == 1
assert org_datasets[0].name == 'fixture-org-org'
class TestDataset(TestCase):
def __init__(self, *args, **kwargs):
super(TestDataset, self).__init__(*args, **kwargs)
registry_path = join(dirname(abspath(__file__)),
'fixtures', 'registry')
self.old_org_acts = Dataset(
join(registry_path, 'data',
'old-org', 'old-org-acts.xml'),
join(registry_path, 'metadata',
'old-org', 'old-org-acts.json'),
)
self.fixture_org_acts = Dataset(
join(registry_path, 'data',
'fixture-org', 'fixture-org-activities.xml'),
join(registry_path, 'metadata',
'fixture-org', 'fixture-org-activities.json'),
)
standard_path = join(dirname(abspath(__file__)),
'fixtures', 'standard')
config_dict = {'paths': {'standard': standard_path}}
CONFIG.read_dict(config_dict)
def test_dataset_name(self):
assert self.old_org_acts.name == 'old-org-acts'
def test_dataset_version(self):
assert self.old_org_acts.version == '1.03'
def test_dataset_repr(self):
dataset_repr = '<Dataset (old-org-acts)>'
assert str(self.old_org_acts) == dataset_repr
def test_dataset_raw_xml(self):
assert self.old_org_acts.raw_xml.startswith(
b'<?xml version="1.0" encoding="UTF-8"?>\n')
def test_dataset_validate_xml(self):
assert bool(self.old_org_acts.validate_xml()) is True
def test_dataset_validate_iati(self):
assert bool(self.old_org_acts.validate_iati()) is True
@patch('logging.Logger.warning')
def test_dataset_validate_codelists_old(self, fake_logger_warning):
assert bool(self.old_org_acts.validate_codelists()) is True
msg = ('Can\'t perform codelist validation for ' +
'IATI version %s datasets.', '1.03')
fake_logger_warning.assert_called_once_with(*msg)
def test_dataset_validate_codelists(self):
result = self.fixture_org_acts.validate_codelists()
assert result.is_valid is False
assert len(result.errors) == 2
err_msgs = [
'The value "999" is not in the Sector Vocabulary codelist.',
'The value "6" is not in the Activity Status codelist.',
]
for error in result.errors:
assert str(error) in err_msgs
def test_dataset_root(self):
assert self.old_org_acts.root == 'iati-activities'
@patch('webbrowser.open_new_tab')
def test_dataset_show(self, fake_open_new_tab):
url = 'https://iatiregistry.org/dataset/old-org-acts'
self.old_org_acts.show()
fake_open_new_tab.assert_called_once_with(url)
def test_activities(self):
assert self.old_org_acts.activities.count() == 2
activity = self.old_org_acts.activities[1]
assert activity.id == 'NL-CHC-98765-NL-CHC-98765-XGG00NS00'
def test_metadata(self):
dataset_metadata = self.old_org_acts.metadata
assert dataset_metadata.get('extras') \
.get('publisher_organization_type') == '21'
| none | 1 | 2.043299 | 2 | |
mysql-utilities-1.6.0/mysql/fabric/utils.py | bopopescu/mysql-dbcompare | 2 | 6621219 | <reponame>bopopescu/mysql-dbcompare
#
# Copyright (c) 2013,2014, Oracle and/or its affiliates. All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
"""Define features that can be used throughout the code.
"""
import os
import sys
import inspect
import ctypes
import re
import datetime
import uuid
TTL = 0
VERSION_TOKEN = 0
FABRIC_UUID = uuid.UUID('5ca1ab1e-a007-feed-f00d-cab3fe13249e')
class SingletonMeta(type):
"""Define a Singleton.
This Singleton class can be used as follows::
class MyClass(object):
__metaclass__ = SingletonMeta
...
"""
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(SingletonMeta, cls).__call__(*args,
**kwargs)
return cls._instances[cls]
class Singleton(object):
"""Define a Singleton.
This Singleton class can be used as follows::
class MyClass(Singleton):
...
"""
__metaclass__ = SingletonMeta
def _do_fork():
"""Create a process.
"""
try:
if os.fork() > 0:
sys.exit(0)
except OSError, error:
sys.stderr.write("fork failed with errno %d: %s\n" %
(error.errno, error.strerror))
sys.exit(1)
def daemonize(stdin='/dev/null', stdout='/dev/null', stderr='/dev/null'):
"""Standard procedure for daemonizing a process.
This process daemonizes the current process and put it in the
background. When daemonized, logs are written to syslog.
[1] Python Cookbook by Martelli, Ravenscropt, and Ascher.
"""
_do_fork()
os.chdir("/") # The current directory might be removed.
os.umask(0)
os.setsid()
_do_fork()
sys.stdout.flush()
sys.stderr.flush()
sin = file(stdin, 'r')
sout = file(stdout, 'a+')
serr = file(stderr, 'a+', 0)
os.dup2(sin.fileno(), sys.stdin.fileno())
os.dup2(sout.fileno(), sys.stdout.fileno())
os.dup2(serr.fileno(), sys.stdin.fileno())
def async_raise(tid, exctype):
"""Raise an exception within the context of a thread.
:param tid: Thread Id.
:param exctype: Exception class.
:raises: exctype.
"""
if not inspect.isclass(exctype):
raise TypeError("Only types can be raised (not instances).")
res = ctypes.pythonapi.PyThreadState_SetAsyncExc(
ctypes.c_long(tid), ctypes.py_object(exctype)
)
if res == 0:
raise ValueError("Invalid thread id.")
elif res != 1:
ctypes.pythonapi.PyThreadState_SetAsyncExc(ctypes.c_long(tid), None)
raise SystemError("Failed to throw an exception.")
def split_dump_pattern(pattern):
"""Split a comma separated string of patterns, into a list of patterns.
:param pattern: A comma separated string of patterns.
"""
regex = re.compile('\s*,\s*')
return regex.split(pattern)
def split_database_table(fully_qualified_table_name):
"""Split a fully qualified table name, which is the database name
followed by the table name (database_name.table_name).
:param fully_qualified_table_name: The fully qualified table name.
"""
return fully_qualified_table_name.split('.')
def wrap_output(output):
"""Used to wrap the the output in a standard format:
(FABRIC_UUID, VERSION_TOKEN, TTL).
:param output: The output that needs to be wrapped.
:return: the "output" parameter is returned in the following four
tuple format.
"""
return (FABRIC_UUID, VERSION_TOKEN, TTL, output)
def get_time():
"""Get current time using datetime.utcnow().
"""
return datetime.datetime.utcnow().replace(microsecond=0)
def get_time_delta(delta):
"""Transform a value provided through the parameter delta into a
timedelta object.
:param delta: Delta value in seconds.
"""
return datetime.timedelta(seconds=delta)
def get_time_from_timestamp(timestamp):
"""Return a utc time from a timestemp().
"""
return datetime.datetime.utcfromtimestamp(timestamp).replace(microsecond=0)
def get_group_lower_bound_list(input_string):
"""Get the list of GROUP IDs and the LBs from the input string.
:param input_string: String input by the user containing delimited
group ids and LBs.
"""
group_id_list = []
lower_bound_list = []
group_id_lower_bound_list = input_string.replace(' ', '').split(",")
for item in group_id_lower_bound_list:
group_id = None
lower_bound = None
if item.find("/") != -1:
group_id, lower_bound = item.split("/")
else:
group_id = item
if group_id is not None:
group_id_list.append(group_id)
if lower_bound is not None:
lower_bound_list.append(lower_bound)
return group_id_list, lower_bound_list
def dequote(value):
"""Removes single, double or backtick quotes around the value.
If the value is "spam", spam without quotes will be returned. Similar
with single and backtick quotes. If quotes do not match, or the first
character is not single, double or backtick, the value is returned
unchanged.
If value is not a string, the value is simply returned.
:param value: A string.
:return: A string with quotes removed.
"""
if not isinstance(value, basestring):
return value
if value[0] in '\'"`' and value[-1] == value[0]:
return value[1:-1]
return value
def check_number_threads(increasing=0):
"""Check the number of threads that are running and whether the maximum
number of connections in the state store is configured accordingly.
:param increasing: Whether you want to increase the number of threads and
how many threads. Default is zero.
It raises a ConfigurationError exception if the number of connections is
too small.
"""
from mysql.fabric import (
errors as _errors,
executor as _executor,
persistence as _persistence,
services as _services,
server as _server,
)
n_sessions = _services.ServiceManager().get_number_sessions()
n_executors = _executor.Executor().get_number_executors()
n_failure_detectors = len(_server.Group.groups_by_status(_server.Group.ACTIVE))
n_controls = 1
persister = _persistence.current_persister()
max_allowed_connections = persister.max_allowed_connections()
if (n_sessions + n_executors + n_controls + n_failure_detectors +\
increasing) > (max_allowed_connections - 1):
raise _errors.ConfigurationError(
"Too many threads requested. Session threads (%s), Executor "
"threads (%s), Control threads (%s) and Failure Detector threads "
"(%s). The maximum number of threads allowed is (%s). Increase "
"the maximum number of connections in the state store in order "
"to increase this limit." % (n_sessions, n_executors, n_controls,
n_failure_detectors, max_allowed_connections - 1)
)
| #
# Copyright (c) 2013,2014, Oracle and/or its affiliates. All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
"""Define features that can be used throughout the code.
"""
import os
import sys
import inspect
import ctypes
import re
import datetime
import uuid
TTL = 0
VERSION_TOKEN = 0
FABRIC_UUID = uuid.UUID('5ca1ab1e-a007-feed-f00d-cab3fe13249e')
class SingletonMeta(type):
"""Define a Singleton.
This Singleton class can be used as follows::
class MyClass(object):
__metaclass__ = SingletonMeta
...
"""
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(SingletonMeta, cls).__call__(*args,
**kwargs)
return cls._instances[cls]
class Singleton(object):
"""Define a Singleton.
This Singleton class can be used as follows::
class MyClass(Singleton):
...
"""
__metaclass__ = SingletonMeta
def _do_fork():
"""Create a process.
"""
try:
if os.fork() > 0:
sys.exit(0)
except OSError, error:
sys.stderr.write("fork failed with errno %d: %s\n" %
(error.errno, error.strerror))
sys.exit(1)
def daemonize(stdin='/dev/null', stdout='/dev/null', stderr='/dev/null'):
"""Standard procedure for daemonizing a process.
This process daemonizes the current process and put it in the
background. When daemonized, logs are written to syslog.
[1] Python Cookbook by Martelli, Ravenscropt, and Ascher.
"""
_do_fork()
os.chdir("/") # The current directory might be removed.
os.umask(0)
os.setsid()
_do_fork()
sys.stdout.flush()
sys.stderr.flush()
sin = file(stdin, 'r')
sout = file(stdout, 'a+')
serr = file(stderr, 'a+', 0)
os.dup2(sin.fileno(), sys.stdin.fileno())
os.dup2(sout.fileno(), sys.stdout.fileno())
os.dup2(serr.fileno(), sys.stdin.fileno())
def async_raise(tid, exctype):
"""Raise an exception within the context of a thread.
:param tid: Thread Id.
:param exctype: Exception class.
:raises: exctype.
"""
if not inspect.isclass(exctype):
raise TypeError("Only types can be raised (not instances).")
res = ctypes.pythonapi.PyThreadState_SetAsyncExc(
ctypes.c_long(tid), ctypes.py_object(exctype)
)
if res == 0:
raise ValueError("Invalid thread id.")
elif res != 1:
ctypes.pythonapi.PyThreadState_SetAsyncExc(ctypes.c_long(tid), None)
raise SystemError("Failed to throw an exception.")
def split_dump_pattern(pattern):
"""Split a comma separated string of patterns, into a list of patterns.
:param pattern: A comma separated string of patterns.
"""
regex = re.compile('\s*,\s*')
return regex.split(pattern)
def split_database_table(fully_qualified_table_name):
"""Split a fully qualified table name, which is the database name
followed by the table name (database_name.table_name).
:param fully_qualified_table_name: The fully qualified table name.
"""
return fully_qualified_table_name.split('.')
def wrap_output(output):
"""Used to wrap the the output in a standard format:
(FABRIC_UUID, VERSION_TOKEN, TTL).
:param output: The output that needs to be wrapped.
:return: the "output" parameter is returned in the following four
tuple format.
"""
return (FABRIC_UUID, VERSION_TOKEN, TTL, output)
def get_time():
"""Get current time using datetime.utcnow().
"""
return datetime.datetime.utcnow().replace(microsecond=0)
def get_time_delta(delta):
"""Transform a value provided through the parameter delta into a
timedelta object.
:param delta: Delta value in seconds.
"""
return datetime.timedelta(seconds=delta)
def get_time_from_timestamp(timestamp):
"""Return a utc time from a timestemp().
"""
return datetime.datetime.utcfromtimestamp(timestamp).replace(microsecond=0)
def get_group_lower_bound_list(input_string):
"""Get the list of GROUP IDs and the LBs from the input string.
:param input_string: String input by the user containing delimited
group ids and LBs.
"""
group_id_list = []
lower_bound_list = []
group_id_lower_bound_list = input_string.replace(' ', '').split(",")
for item in group_id_lower_bound_list:
group_id = None
lower_bound = None
if item.find("/") != -1:
group_id, lower_bound = item.split("/")
else:
group_id = item
if group_id is not None:
group_id_list.append(group_id)
if lower_bound is not None:
lower_bound_list.append(lower_bound)
return group_id_list, lower_bound_list
def dequote(value):
"""Removes single, double or backtick quotes around the value.
If the value is "spam", spam without quotes will be returned. Similar
with single and backtick quotes. If quotes do not match, or the first
character is not single, double or backtick, the value is returned
unchanged.
If value is not a string, the value is simply returned.
:param value: A string.
:return: A string with quotes removed.
"""
if not isinstance(value, basestring):
return value
if value[0] in '\'"`' and value[-1] == value[0]:
return value[1:-1]
return value
def check_number_threads(increasing=0):
"""Check the number of threads that are running and whether the maximum
number of connections in the state store is configured accordingly.
:param increasing: Whether you want to increase the number of threads and
how many threads. Default is zero.
It raises a ConfigurationError exception if the number of connections is
too small.
"""
from mysql.fabric import (
errors as _errors,
executor as _executor,
persistence as _persistence,
services as _services,
server as _server,
)
n_sessions = _services.ServiceManager().get_number_sessions()
n_executors = _executor.Executor().get_number_executors()
n_failure_detectors = len(_server.Group.groups_by_status(_server.Group.ACTIVE))
n_controls = 1
persister = _persistence.current_persister()
max_allowed_connections = persister.max_allowed_connections()
if (n_sessions + n_executors + n_controls + n_failure_detectors +\
increasing) > (max_allowed_connections - 1):
raise _errors.ConfigurationError(
"Too many threads requested. Session threads (%s), Executor "
"threads (%s), Control threads (%s) and Failure Detector threads "
"(%s). The maximum number of threads allowed is (%s). Increase "
"the maximum number of connections in the state store in order "
"to increase this limit." % (n_sessions, n_executors, n_controls,
n_failure_detectors, max_allowed_connections - 1)
) | en | 0.823548 | # # Copyright (c) 2013,2014, Oracle and/or its affiliates. All rights reserved. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; version 2 of the License. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA # Define features that can be used throughout the code. Define a Singleton. This Singleton class can be used as follows:: class MyClass(object): __metaclass__ = SingletonMeta ... Define a Singleton. This Singleton class can be used as follows:: class MyClass(Singleton): ... Create a process. Standard procedure for daemonizing a process. This process daemonizes the current process and put it in the background. When daemonized, logs are written to syslog. [1] Python Cookbook by Martelli, Ravenscropt, and Ascher. # The current directory might be removed. Raise an exception within the context of a thread. :param tid: Thread Id. :param exctype: Exception class. :raises: exctype. Split a comma separated string of patterns, into a list of patterns. :param pattern: A comma separated string of patterns. Split a fully qualified table name, which is the database name followed by the table name (database_name.table_name). :param fully_qualified_table_name: The fully qualified table name. Used to wrap the the output in a standard format: (FABRIC_UUID, VERSION_TOKEN, TTL). :param output: The output that needs to be wrapped. :return: the "output" parameter is returned in the following four tuple format. Get current time using datetime.utcnow(). Transform a value provided through the parameter delta into a timedelta object. :param delta: Delta value in seconds. Return a utc time from a timestemp(). Get the list of GROUP IDs and the LBs from the input string. :param input_string: String input by the user containing delimited group ids and LBs. Removes single, double or backtick quotes around the value. If the value is "spam", spam without quotes will be returned. Similar with single and backtick quotes. If quotes do not match, or the first character is not single, double or backtick, the value is returned unchanged. If value is not a string, the value is simply returned. :param value: A string. :return: A string with quotes removed. Check the number of threads that are running and whether the maximum number of connections in the state store is configured accordingly. :param increasing: Whether you want to increase the number of threads and how many threads. Default is zero. It raises a ConfigurationError exception if the number of connections is too small. | 2.586774 | 3 |
authors/apps/article/filters.py | arthurarty/ah-backend-poseidon | 1 | 6621220 | from django_filters import FilterSet
from django_filters import rest_framework as filters
from .models import Article
class ArticleFilter(FilterSet):
"""custom filter class for Articles"""
title = filters.CharFilter('title')
keyword = filters.CharFilter('title', 'icontains')
author = filters.CharFilter('author__username')
tags = filters.CharFilter('tags', method='tags_filter')
class Meta:
model = Article
fields = ['title', 'author', 'keyword', 'tags']
def tags_filter(self, queryset, name, value):
return queryset.filter(tags__name__icontains=value)
| from django_filters import FilterSet
from django_filters import rest_framework as filters
from .models import Article
class ArticleFilter(FilterSet):
"""custom filter class for Articles"""
title = filters.CharFilter('title')
keyword = filters.CharFilter('title', 'icontains')
author = filters.CharFilter('author__username')
tags = filters.CharFilter('tags', method='tags_filter')
class Meta:
model = Article
fields = ['title', 'author', 'keyword', 'tags']
def tags_filter(self, queryset, name, value):
return queryset.filter(tags__name__icontains=value)
| en | 0.749494 | custom filter class for Articles | 2.346374 | 2 |
config/test_config.py | jgoney/api-test | 0 | 6621221 | # Config values for testing
TESTING = True
MONGO_DB = 'api_test' | # Config values for testing
TESTING = True
MONGO_DB = 'api_test' | en | 0.153787 | # Config values for testing | 1.00535 | 1 |
server/udp_module.py | peakBreaker/embedded_utils | 1 | 6621222 | <reponame>peakBreaker/embedded_utils<gh_stars>1-10
#!/usr/bin/python3
import socket
from datetime import datetime
socket.setdefaulttimeout(1)
class udp_handler():
"Class for the udp handler"
def __init__(self, **kw):
# Create a TCP/IP socket
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
# Bind the socket to the port
self.server_address = ('0.0.0.0', kw['port'])
self.sock.bind(self.server_address)
self.start_time = "Started at :: " + datetime.now().isoformat(' ')
self.last_incoming = None
self.echo = input("Should I echo UDP requests? [Y/N] > ")
self.echo = True if self.echo == 'Y' else False
print("Echo is set to %s" % self.echo)
print("Running UDP Handler with port %s" % str(kw['port']))
def __repr__(self):
return ("udp_handler module with server address: " +
str(self.server_address) + " || echo is " + str(self.echo))
def status_cb(self):
"Gets the status of the module"
if self.last_incoming is None:
self.last_incoming = "No messages yet"
return ("Up and running - " + self.start_time +
" || Last msg :: " + self.last_incoming)
def get_incoming(self):
"Returns the first incoming udp msg"
while True:
# return "0000001"
try:
data, address = self.sock.recvfrom(4096)
print(data)
if data:
self.last_incoming = datetime.now().isoformat(' ')
if self.echo:
sent = self.sock.sendto(data, address)
sent = self.sock.sendto(data + "2", address)
sent = self.sock.sendto(data + "33", address)
print('echo %s bytes to %s thrice!' % (sent, address))
return str(data)
except socket.timeout:
return None
| #!/usr/bin/python3
import socket
from datetime import datetime
socket.setdefaulttimeout(1)
class udp_handler():
"Class for the udp handler"
def __init__(self, **kw):
# Create a TCP/IP socket
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
# Bind the socket to the port
self.server_address = ('0.0.0.0', kw['port'])
self.sock.bind(self.server_address)
self.start_time = "Started at :: " + datetime.now().isoformat(' ')
self.last_incoming = None
self.echo = input("Should I echo UDP requests? [Y/N] > ")
self.echo = True if self.echo == 'Y' else False
print("Echo is set to %s" % self.echo)
print("Running UDP Handler with port %s" % str(kw['port']))
def __repr__(self):
return ("udp_handler module with server address: " +
str(self.server_address) + " || echo is " + str(self.echo))
def status_cb(self):
"Gets the status of the module"
if self.last_incoming is None:
self.last_incoming = "No messages yet"
return ("Up and running - " + self.start_time +
" || Last msg :: " + self.last_incoming)
def get_incoming(self):
"Returns the first incoming udp msg"
while True:
# return "0000001"
try:
data, address = self.sock.recvfrom(4096)
print(data)
if data:
self.last_incoming = datetime.now().isoformat(' ')
if self.echo:
sent = self.sock.sendto(data, address)
sent = self.sock.sendto(data + "2", address)
sent = self.sock.sendto(data + "33", address)
print('echo %s bytes to %s thrice!' % (sent, address))
return str(data)
except socket.timeout:
return None | en | 0.514783 | #!/usr/bin/python3 # Create a TCP/IP socket # Bind the socket to the port # return "0000001" | 3.353967 | 3 |
today_mood/api/users/urls.py | 5boon/backend | 4 | 6621223 | from django.conf.urls import url
from rest_framework import routers
from api.users.views import UserInformationViewSet, UserRegisterViewSet, UserPasswordViewSet, UserIDViewSet, \
UserCheckViewSet, SNSLoginViewSet
app_name = 'users'
information = UserInformationViewSet.as_view({
'get': 'list',
'post': 'create'
})
resister = UserRegisterViewSet.as_view({
'post': 'create'
})
password = UserPasswordViewSet.as_view({
'post': 'create',
'patch': 'update'
})
identification = UserIDViewSet.as_view({
'post': 'create'
})
check = UserCheckViewSet.as_view({
'get': 'list'
})
sns = SNSLoginViewSet.as_view({
'post': 'create'
})
urlpatterns = [
url(r'^register/$', resister, name='user_register'),
url(r'^sns/$', sns, name='user_sns'),
url(r'^check/$', check, name='user_check'),
url(r'^password/$', password, name='user_password'),
url(r'^id/$', identification, name='user_id'),
]
router = routers.SimpleRouter()
router.register(r'', UserInformationViewSet, basename='information')
urlpatterns += router.urls
| from django.conf.urls import url
from rest_framework import routers
from api.users.views import UserInformationViewSet, UserRegisterViewSet, UserPasswordViewSet, UserIDViewSet, \
UserCheckViewSet, SNSLoginViewSet
app_name = 'users'
information = UserInformationViewSet.as_view({
'get': 'list',
'post': 'create'
})
resister = UserRegisterViewSet.as_view({
'post': 'create'
})
password = UserPasswordViewSet.as_view({
'post': 'create',
'patch': 'update'
})
identification = UserIDViewSet.as_view({
'post': 'create'
})
check = UserCheckViewSet.as_view({
'get': 'list'
})
sns = SNSLoginViewSet.as_view({
'post': 'create'
})
urlpatterns = [
url(r'^register/$', resister, name='user_register'),
url(r'^sns/$', sns, name='user_sns'),
url(r'^check/$', check, name='user_check'),
url(r'^password/$', password, name='user_password'),
url(r'^id/$', identification, name='user_id'),
]
router = routers.SimpleRouter()
router.register(r'', UserInformationViewSet, basename='information')
urlpatterns += router.urls
| none | 1 | 2.051568 | 2 | |
django/apps/forecast/views.py | koreander2001/weather-forecast-api | 0 | 6621224 | import json
import os
import pandas as pd
import requests
from datetime import datetime
from dateutil.tz import gettz
from rest_framework.response import Response
from rest_framework.views import APIView
from typing import Any, Dict
from .models import Forecast
from region.models import City
class ForecastView(APIView):
def _get_forecast_json(self, city_id: str):
city = City.objects.get(id=city_id)
api_key = os.environ['DARKSKY_API_KEY']
exclude = (
'currently',
'minutely',
'hourly',
'alerts',
'flags',
)
exclude_str = ','.join(exclude)
url = f'https://api.darksky.net/forecast/{api_key}/{city.lat},{city.lon}?units=auto&exclude={exclude_str}'
res = requests.get(url)
response_data = dict() # type: Dict[str, Any]
forecast_data = res.json()['daily']['data']
fields = [
'date',
'precipProbability',
'temperatureHigh',
'temperatureLow',
]
forecast_df = pd.json_normalize(forecast_data)
forecast_df['date'] = (
forecast_df['time']
.map(lambda x: datetime.fromtimestamp(x).date().isoformat())
)
response_data['forecast'] = forecast_df[fields].to_dict('records')
response_data['provider'] = {
'message': 'Powered by Dark Sky',
'link': 'https://darksky.net/poweredby/',
}
return json.dumps(response_data)
def get(self, request, city_id):
try:
forecast = Forecast.objects.get(city=city_id)
today = datetime.now(tz=gettz(os.environ['TZ'])).date()
if forecast.date != today:
forecast.date = today
forecast.forecast_json = self._get_forecast_json(city_id)
forecast.save()
except Forecast.DoesNotExist:
forecast = Forecast(
city=City.objects.get(id=city_id),
forecast_json=self._get_forecast_json(city_id),
)
forecast.save()
return Response(json.loads(forecast.forecast_json))
| import json
import os
import pandas as pd
import requests
from datetime import datetime
from dateutil.tz import gettz
from rest_framework.response import Response
from rest_framework.views import APIView
from typing import Any, Dict
from .models import Forecast
from region.models import City
class ForecastView(APIView):
def _get_forecast_json(self, city_id: str):
city = City.objects.get(id=city_id)
api_key = os.environ['DARKSKY_API_KEY']
exclude = (
'currently',
'minutely',
'hourly',
'alerts',
'flags',
)
exclude_str = ','.join(exclude)
url = f'https://api.darksky.net/forecast/{api_key}/{city.lat},{city.lon}?units=auto&exclude={exclude_str}'
res = requests.get(url)
response_data = dict() # type: Dict[str, Any]
forecast_data = res.json()['daily']['data']
fields = [
'date',
'precipProbability',
'temperatureHigh',
'temperatureLow',
]
forecast_df = pd.json_normalize(forecast_data)
forecast_df['date'] = (
forecast_df['time']
.map(lambda x: datetime.fromtimestamp(x).date().isoformat())
)
response_data['forecast'] = forecast_df[fields].to_dict('records')
response_data['provider'] = {
'message': 'Powered by Dark Sky',
'link': 'https://darksky.net/poweredby/',
}
return json.dumps(response_data)
def get(self, request, city_id):
try:
forecast = Forecast.objects.get(city=city_id)
today = datetime.now(tz=gettz(os.environ['TZ'])).date()
if forecast.date != today:
forecast.date = today
forecast.forecast_json = self._get_forecast_json(city_id)
forecast.save()
except Forecast.DoesNotExist:
forecast = Forecast(
city=City.objects.get(id=city_id),
forecast_json=self._get_forecast_json(city_id),
)
forecast.save()
return Response(json.loads(forecast.forecast_json))
| en | 0.060968 | # type: Dict[str, Any] | 2.530878 | 3 |
pycalphad/core/constraints.py | igorjrd/pycalphad | 0 | 6621225 | from symengine import sympify, lambdify, Symbol
from pycalphad.core.cache import cacheit
from pycalphad import variables as v
from pycalphad.core.constants import INTERNAL_CONSTRAINT_SCALING, MULTIPHASE_CONSTRAINT_SCALING
from pycalphad.core.utils import wrap_symbol_symengine
from collections import namedtuple
ConstraintFunctions = namedtuple('ConstraintFunctions', ['cons_func', 'cons_jac', 'cons_hess'])
@cacheit
def _build_constraint_functions(variables, constraints, parameters=None, cse=True):
if parameters is None:
parameters = []
else:
parameters = [wrap_symbol_symengine(p) for p in parameters]
variables = tuple(variables)
wrt = variables
parameters = tuple(parameters)
constraint__func, jacobian_func, hessian_func = None, None, None
inp = sympify(variables + parameters)
graph = sympify(constraints)
constraint_func = lambdify(inp, [graph], backend='lambda', cse=cse)
grad_graphs = list(list(c.diff(w) for w in wrt) for c in graph)
jacobian_func = lambdify(inp, grad_graphs, backend='lambda', cse=cse)
hess_graphs = list(list(list(g.diff(w) for w in wrt) for g in c) for c in grad_graphs)
hessian_func = lambdify(inp, hess_graphs, backend='lambda', cse=cse)
return ConstraintFunctions(cons_func=constraint_func, cons_jac=jacobian_func, cons_hess=hessian_func)
ConstraintTuple = namedtuple('ConstraintTuple', ['internal_cons_func', 'internal_cons_jac', 'internal_cons_hess',
'multiphase_cons_func', 'multiphase_cons_jac', 'multiphase_cons_hess',
'num_internal_cons', 'num_multiphase_cons'])
def is_multiphase_constraint(cond):
cond = str(cond)
if cond == 'N' or cond.startswith('X_'):
return True
else:
return False
def build_constraints(mod, variables, conds, parameters=None):
internal_constraints = mod.get_internal_constraints()
internal_constraints = [INTERNAL_CONSTRAINT_SCALING*x for x in internal_constraints]
multiphase_constraints = mod.get_multiphase_constraints(conds)
multiphase_constraints = [MULTIPHASE_CONSTRAINT_SCALING*x for x in multiphase_constraints]
cf_output = _build_constraint_functions(variables, internal_constraints,
parameters=parameters)
internal_cons_func = cf_output.cons_func
internal_cons_jac = cf_output.cons_jac
internal_cons_hess = cf_output.cons_hess
result_build = _build_constraint_functions(variables + [Symbol('NP')],
multiphase_constraints,
parameters=parameters)
multiphase_cons_func = result_build.cons_func
multiphase_cons_jac = result_build.cons_jac
multiphase_cons_hess = result_build.cons_hess
return ConstraintTuple(internal_cons_func=internal_cons_func, internal_cons_jac=internal_cons_jac,
internal_cons_hess=internal_cons_hess,
multiphase_cons_func=multiphase_cons_func, multiphase_cons_jac=multiphase_cons_jac,
multiphase_cons_hess=multiphase_cons_hess,
num_internal_cons=len(internal_constraints), num_multiphase_cons=len(multiphase_constraints))
def get_multiphase_constraint_rhs(conds):
return [MULTIPHASE_CONSTRAINT_SCALING*float(value) for cond, value in conds.items() if is_multiphase_constraint(cond)]
| from symengine import sympify, lambdify, Symbol
from pycalphad.core.cache import cacheit
from pycalphad import variables as v
from pycalphad.core.constants import INTERNAL_CONSTRAINT_SCALING, MULTIPHASE_CONSTRAINT_SCALING
from pycalphad.core.utils import wrap_symbol_symengine
from collections import namedtuple
ConstraintFunctions = namedtuple('ConstraintFunctions', ['cons_func', 'cons_jac', 'cons_hess'])
@cacheit
def _build_constraint_functions(variables, constraints, parameters=None, cse=True):
if parameters is None:
parameters = []
else:
parameters = [wrap_symbol_symengine(p) for p in parameters]
variables = tuple(variables)
wrt = variables
parameters = tuple(parameters)
constraint__func, jacobian_func, hessian_func = None, None, None
inp = sympify(variables + parameters)
graph = sympify(constraints)
constraint_func = lambdify(inp, [graph], backend='lambda', cse=cse)
grad_graphs = list(list(c.diff(w) for w in wrt) for c in graph)
jacobian_func = lambdify(inp, grad_graphs, backend='lambda', cse=cse)
hess_graphs = list(list(list(g.diff(w) for w in wrt) for g in c) for c in grad_graphs)
hessian_func = lambdify(inp, hess_graphs, backend='lambda', cse=cse)
return ConstraintFunctions(cons_func=constraint_func, cons_jac=jacobian_func, cons_hess=hessian_func)
ConstraintTuple = namedtuple('ConstraintTuple', ['internal_cons_func', 'internal_cons_jac', 'internal_cons_hess',
'multiphase_cons_func', 'multiphase_cons_jac', 'multiphase_cons_hess',
'num_internal_cons', 'num_multiphase_cons'])
def is_multiphase_constraint(cond):
cond = str(cond)
if cond == 'N' or cond.startswith('X_'):
return True
else:
return False
def build_constraints(mod, variables, conds, parameters=None):
internal_constraints = mod.get_internal_constraints()
internal_constraints = [INTERNAL_CONSTRAINT_SCALING*x for x in internal_constraints]
multiphase_constraints = mod.get_multiphase_constraints(conds)
multiphase_constraints = [MULTIPHASE_CONSTRAINT_SCALING*x for x in multiphase_constraints]
cf_output = _build_constraint_functions(variables, internal_constraints,
parameters=parameters)
internal_cons_func = cf_output.cons_func
internal_cons_jac = cf_output.cons_jac
internal_cons_hess = cf_output.cons_hess
result_build = _build_constraint_functions(variables + [Symbol('NP')],
multiphase_constraints,
parameters=parameters)
multiphase_cons_func = result_build.cons_func
multiphase_cons_jac = result_build.cons_jac
multiphase_cons_hess = result_build.cons_hess
return ConstraintTuple(internal_cons_func=internal_cons_func, internal_cons_jac=internal_cons_jac,
internal_cons_hess=internal_cons_hess,
multiphase_cons_func=multiphase_cons_func, multiphase_cons_jac=multiphase_cons_jac,
multiphase_cons_hess=multiphase_cons_hess,
num_internal_cons=len(internal_constraints), num_multiphase_cons=len(multiphase_constraints))
def get_multiphase_constraint_rhs(conds):
return [MULTIPHASE_CONSTRAINT_SCALING*float(value) for cond, value in conds.items() if is_multiphase_constraint(cond)]
| none | 1 | 2.157244 | 2 | |
ThreadFixProApi/Applications/_utils/__init__.py | denimgroup/threadfix-python-api | 1 | 6621226 | from ._team import TeamsAPI
from ._application import ApplicationsAPI
from ._defect_trackers import DefectTrackersAPI
from ._policies import PoliciesAPI
from ._scans import ScansAPI
from ._tags import TagsAPI
from ._tasks import TasksAPI
from ._vulnerabilities import VulnerabilitiesAPI
from ._waf import WafsAPI
from ._cicd import CICDAPI
from ._remote_providers import RemoteProvidersAPI
from ._users_roles_and_groups import UsersRolesAndGroupsAPI
from ._email_reporting import EmailReportingAPI
from ._miscellaneous import MiscellaneousAPI
from ._system_customization import SystemCustomizationAPI | from ._team import TeamsAPI
from ._application import ApplicationsAPI
from ._defect_trackers import DefectTrackersAPI
from ._policies import PoliciesAPI
from ._scans import ScansAPI
from ._tags import TagsAPI
from ._tasks import TasksAPI
from ._vulnerabilities import VulnerabilitiesAPI
from ._waf import WafsAPI
from ._cicd import CICDAPI
from ._remote_providers import RemoteProvidersAPI
from ._users_roles_and_groups import UsersRolesAndGroupsAPI
from ._email_reporting import EmailReportingAPI
from ._miscellaneous import MiscellaneousAPI
from ._system_customization import SystemCustomizationAPI | none | 1 | 1.044711 | 1 | |
work/run.py | jackthgu/K-AR_HYU_Deform_Simulation | 0 | 6621227 | import os,sys
os.chdir('../../work')
os.system(sys.argv[1])
| import os,sys
os.chdir('../../work')
os.system(sys.argv[1])
| none | 1 | 1.529572 | 2 | |
PicoWizard/modules/locale/locale.py | PureTryOut/pico-wizard | 11 | 6621228 | # SPDX-FileCopyrightText: 2021 <NAME> <<EMAIL>>
#
# SPDX-License-Identifier: MIT
import os
from PySide2.QtCore import QUrl, Slot, Property, QObject, Signal, QSortFilterProxyModel, Qt, QProcess
from PySide2.QtQml import qmlRegisterType
from PicoWizard.module import Module
from PicoWizard.modules.locale.localemodel import LocaleModel
from PicoWizard.modules.locale.localeslist import locales
from PicoWizard.utils.logger import Logger
class Locale(Module):
log = Logger.getLogger(__name__)
__filterText__ = ''
def __init__(self, parent=None):
super().__init__(__file__, parent)
self.__localeModel__ = LocaleModel(parent)
self.__localeProxyModel__ = QSortFilterProxyModel(parent)
self.__localeProxyModel__.setSourceModel(self.__localeModel__)
self.__localeProxyModel__.setFilterRole(LocaleModel.Roles.NameRole)
self.__localeProxyModel__.setFilterCaseSensitivity(Qt.CaseInsensitive)
self.__localeProxyModel__.setSortRole(LocaleModel.Roles.NameRole)
self.__localeProxyModel__.setSortCaseSensitivity(Qt.CaseInsensitive)
self.__localeProxyModel__.sort(0, Qt.AscendingOrder)
for locale in locales:
self.__localeModel__.addLocaleItem(locale)
@staticmethod
def registerTypes() -> None:
qmlRegisterType(Locale, 'PicoWizard', 1, 0, 'LocaleModule')
qmlRegisterType(LocaleModel, 'PicoWizard', 1, 0, 'LocaleModel')
@staticmethod
def qmlPath() -> QUrl:
return QUrl(os.path.join(os.path.dirname(os.path.realpath(__file__)), "Locale.qml"))
@Slot(None, result=str)
def moduleName(self) -> str:
return self.tr("Locale")
@Slot(None)
def writeLocaleGenConfig(self):
self.log.debug(f'Selected locales : {self.__localeModel__.getSelectedLocales()}')
process = QProcess(self)
args = [os.path.join(os.path.dirname(os.path.realpath(__file__)), "writelocalegenconfig.sh")]
for locale in self.__localeModel__.getSelectedLocales():
args.append(f"{locale[0]} {locale[1]}")
self.log.debug(f"writelocalegenconfig.sh arguments : {args}")
process.start('/usr/bin/pkexec', args)
process.finished.connect(lambda exitCode, exitStatus: self.writeLocaleScriptSuccess(exitCode, exitStatus, process))
process.error.connect(lambda err: self.writeLocaleScriptError(err))
def writeLocaleScriptSuccess(self, exitCode, exitStatus, process):
if exitCode != 0:
self.log.error('Failed to write `localegen` config')
self.localeSetupFailed.emit()
self.errorOccurred.emit("Failed to write `localegen` config")
else:
self.log.info('`localegen` config written successfully')
self.log.info("Running `locale-gen` command to generate locales")
process = QProcess(self)
args = ['locale-gen']
process.start('/usr/bin/pkexec', args)
process.finished.connect(lambda exitCode, exitStatus: self.localeGenCmdSuccess(exitCode, exitStatus, process))
process.error.connect(lambda err: self.localeGenCmdError(err))
def writeLocaleScriptError(self, err):
self.log.error('Failed to write `localegen` config')
self.log.error(err)
self.localeSetupFailed.emit()
self.errorOccurred.emit("Failed to write `localegen` config")
def localeGenCmdSuccess(self, exitCode, exitStatus, process):
if exitCode != 0:
self.log.error('`locale-gen` command failed')
self.localeSetupFailed.emit()
self.errorOccurred.emit("`locale-gen` command failed")
else:
self.log.info('`locale-gen` command complete')
self.log.debug(process.readAll())
self.localeSetupSuccess.emit()
def localeGenCmdError(self, err):
self.log.error('`locale-gen` command failed')
self.log.error(err)
self.localeSetupFailed.emit()
self.errorOccurred.emit("`locale-gen` command failed")
def __getModel__(self):
return self.__localeProxyModel__
def __getFilterText__(self):
return self.__filterText__
def __setFilterText__(self, text):
self.__filterText__ = text
self.__localeProxyModel__.setFilterRegExp(text)
filterTextChanged = Signal()
modelChanged = Signal()
localeSetupSuccess = Signal()
localeSetupFailed = Signal()
model = Property(QObject, __getModel__, notify=modelChanged)
filterText = Property(str, __getFilterText__, __setFilterText__, notify=filterTextChanged)
| # SPDX-FileCopyrightText: 2021 <NAME> <<EMAIL>>
#
# SPDX-License-Identifier: MIT
import os
from PySide2.QtCore import QUrl, Slot, Property, QObject, Signal, QSortFilterProxyModel, Qt, QProcess
from PySide2.QtQml import qmlRegisterType
from PicoWizard.module import Module
from PicoWizard.modules.locale.localemodel import LocaleModel
from PicoWizard.modules.locale.localeslist import locales
from PicoWizard.utils.logger import Logger
class Locale(Module):
log = Logger.getLogger(__name__)
__filterText__ = ''
def __init__(self, parent=None):
super().__init__(__file__, parent)
self.__localeModel__ = LocaleModel(parent)
self.__localeProxyModel__ = QSortFilterProxyModel(parent)
self.__localeProxyModel__.setSourceModel(self.__localeModel__)
self.__localeProxyModel__.setFilterRole(LocaleModel.Roles.NameRole)
self.__localeProxyModel__.setFilterCaseSensitivity(Qt.CaseInsensitive)
self.__localeProxyModel__.setSortRole(LocaleModel.Roles.NameRole)
self.__localeProxyModel__.setSortCaseSensitivity(Qt.CaseInsensitive)
self.__localeProxyModel__.sort(0, Qt.AscendingOrder)
for locale in locales:
self.__localeModel__.addLocaleItem(locale)
@staticmethod
def registerTypes() -> None:
qmlRegisterType(Locale, 'PicoWizard', 1, 0, 'LocaleModule')
qmlRegisterType(LocaleModel, 'PicoWizard', 1, 0, 'LocaleModel')
@staticmethod
def qmlPath() -> QUrl:
return QUrl(os.path.join(os.path.dirname(os.path.realpath(__file__)), "Locale.qml"))
@Slot(None, result=str)
def moduleName(self) -> str:
return self.tr("Locale")
@Slot(None)
def writeLocaleGenConfig(self):
self.log.debug(f'Selected locales : {self.__localeModel__.getSelectedLocales()}')
process = QProcess(self)
args = [os.path.join(os.path.dirname(os.path.realpath(__file__)), "writelocalegenconfig.sh")]
for locale in self.__localeModel__.getSelectedLocales():
args.append(f"{locale[0]} {locale[1]}")
self.log.debug(f"writelocalegenconfig.sh arguments : {args}")
process.start('/usr/bin/pkexec', args)
process.finished.connect(lambda exitCode, exitStatus: self.writeLocaleScriptSuccess(exitCode, exitStatus, process))
process.error.connect(lambda err: self.writeLocaleScriptError(err))
def writeLocaleScriptSuccess(self, exitCode, exitStatus, process):
if exitCode != 0:
self.log.error('Failed to write `localegen` config')
self.localeSetupFailed.emit()
self.errorOccurred.emit("Failed to write `localegen` config")
else:
self.log.info('`localegen` config written successfully')
self.log.info("Running `locale-gen` command to generate locales")
process = QProcess(self)
args = ['locale-gen']
process.start('/usr/bin/pkexec', args)
process.finished.connect(lambda exitCode, exitStatus: self.localeGenCmdSuccess(exitCode, exitStatus, process))
process.error.connect(lambda err: self.localeGenCmdError(err))
def writeLocaleScriptError(self, err):
self.log.error('Failed to write `localegen` config')
self.log.error(err)
self.localeSetupFailed.emit()
self.errorOccurred.emit("Failed to write `localegen` config")
def localeGenCmdSuccess(self, exitCode, exitStatus, process):
if exitCode != 0:
self.log.error('`locale-gen` command failed')
self.localeSetupFailed.emit()
self.errorOccurred.emit("`locale-gen` command failed")
else:
self.log.info('`locale-gen` command complete')
self.log.debug(process.readAll())
self.localeSetupSuccess.emit()
def localeGenCmdError(self, err):
self.log.error('`locale-gen` command failed')
self.log.error(err)
self.localeSetupFailed.emit()
self.errorOccurred.emit("`locale-gen` command failed")
def __getModel__(self):
return self.__localeProxyModel__
def __getFilterText__(self):
return self.__filterText__
def __setFilterText__(self, text):
self.__filterText__ = text
self.__localeProxyModel__.setFilterRegExp(text)
filterTextChanged = Signal()
modelChanged = Signal()
localeSetupSuccess = Signal()
localeSetupFailed = Signal()
model = Property(QObject, __getModel__, notify=modelChanged)
filterText = Property(str, __getFilterText__, __setFilterText__, notify=filterTextChanged)
| de | 0.260507 | # SPDX-FileCopyrightText: 2021 <NAME> <<EMAIL>> # # SPDX-License-Identifier: MIT | 1.864278 | 2 |
custom_components/tekmar_482/helpers.py | WillCodeForCats/tekmar-482 | 0 | 6621229 |
def regBytes(integer):
return divmod(integer, 0x100)
def degCtoF(degC):
return ((degC * 9/5) + 32)
def degCtoE(degC):
return (2 * degC)
def degEtoC(degE):
#degE = 2*(degC)
return (degE / 2)
def degHtoF(degH):
#degH = 10*(degF) + 850
return ((degH - 850) / 10)
def degFtoC(degF):
return ((degF - 32) / 1.8)
def degHtoC(degH):
return degFtoC(degHtoF(degH))
|
def regBytes(integer):
return divmod(integer, 0x100)
def degCtoF(degC):
return ((degC * 9/5) + 32)
def degCtoE(degC):
return (2 * degC)
def degEtoC(degE):
#degE = 2*(degC)
return (degE / 2)
def degHtoF(degH):
#degH = 10*(degF) + 850
return ((degH - 850) / 10)
def degFtoC(degF):
return ((degF - 32) / 1.8)
def degHtoC(degH):
return degFtoC(degHtoF(degH))
| es | 0.202226 | #degE = 2*(degC) #degH = 10*(degF) + 850 | 2.694842 | 3 |
mwach/settings_hiv.py | uw-ictd/mwbase | 1 | 6621230 | from .settings_base import *
### app specific overides
INSTALLED_APPS += ('mwhiv',)
ROOT_URLCONF = 'mwach.urls.hiv'
STATICFILES_DIRS = [f'{PROJECT_ROOT}/mwhiv/static', f'{PROJECT_ROOT}/mwbase/static']
### Swappable classes and inherited classes
SMSBANK_CLASS = 'utils.sms_utils.FinalRowHIV'
MWBASE_AUTOMATEDMESSAGE_MODEL = "mwhiv.AutomatedMessageHIV"
MWBASE_PARTICIPANT_MODEL = "mwhiv.Participant"
MWBASE_STATUSCHANGE_MODEL = "mwhiv.StatusChange" | from .settings_base import *
### app specific overides
INSTALLED_APPS += ('mwhiv',)
ROOT_URLCONF = 'mwach.urls.hiv'
STATICFILES_DIRS = [f'{PROJECT_ROOT}/mwhiv/static', f'{PROJECT_ROOT}/mwbase/static']
### Swappable classes and inherited classes
SMSBANK_CLASS = 'utils.sms_utils.FinalRowHIV'
MWBASE_AUTOMATEDMESSAGE_MODEL = "mwhiv.AutomatedMessageHIV"
MWBASE_PARTICIPANT_MODEL = "mwhiv.Participant"
MWBASE_STATUSCHANGE_MODEL = "mwhiv.StatusChange" | en | 0.737788 | ### app specific overides ### Swappable classes and inherited classes | 1.519014 | 2 |
django-rest-greetings/greetings/testtask_api/views.py | orionoiro/test-tasks | 0 | 6621231 | import datetime
from rest_framework import mixins, generics
from drf_renderer_xlsx.mixins import XLSXFileMixin
from .models import Record
from .serializers import RecordSerializer
class RecordsView(mixins.ListModelMixin,
mixins.CreateModelMixin,
XLSXFileMixin,
generics.GenericAPIView):
queryset = Record.objects.all()
serializer_class = RecordSerializer
filename = 'greetings.xlsx'
def get_queryset(self):
date = self.request.query_params.get('date', None)
if date:
self.queryset = Record.objects.filter(date=datetime.date.fromisoformat(date))
return self.queryset
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
def put(self, request, *args, **kwargs):
# HTML-form browsable-api date filtering
if request.data['date']:
date = request.data['date']
self.queryset = Record.objects.filter(date=datetime.date.fromisoformat(date))
return self.list(request, *args, **kwargs)
| import datetime
from rest_framework import mixins, generics
from drf_renderer_xlsx.mixins import XLSXFileMixin
from .models import Record
from .serializers import RecordSerializer
class RecordsView(mixins.ListModelMixin,
mixins.CreateModelMixin,
XLSXFileMixin,
generics.GenericAPIView):
queryset = Record.objects.all()
serializer_class = RecordSerializer
filename = 'greetings.xlsx'
def get_queryset(self):
date = self.request.query_params.get('date', None)
if date:
self.queryset = Record.objects.filter(date=datetime.date.fromisoformat(date))
return self.queryset
def get(self, request, *args, **kwargs):
return self.list(request, *args, **kwargs)
def put(self, request, *args, **kwargs):
# HTML-form browsable-api date filtering
if request.data['date']:
date = request.data['date']
self.queryset = Record.objects.filter(date=datetime.date.fromisoformat(date))
return self.list(request, *args, **kwargs)
| en | 0.299495 | # HTML-form browsable-api date filtering | 2.188594 | 2 |
hardware/testing/arfan.py | haziquehaikal/smartdb | 0 | 6621232 | <gh_stars>0
import schedule
import time
import urllib3
import json
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setup(4, GPIO.OUT)
GPIO.setup(17, GPIO.OUT)
p = GPIO.PWM(4, 50)
p.start(2.5)
t = GPIO.PWM(17, 50)
t.start(2.5)
op = {"FUSE OPEN"}
cl = {"FUSE CLOSE"}
o = json.dumps(op)
c = json.dumps(cl)
print("ACTIVATION SCRIPT SERVER")
def stopcheck(self,stat):
self.test = stat
def checkactivation():
print("CHECK ACTIVATION")
http = urllib3.PoolManager()
r = http.request('GET', 'http://192.168.43.135/smartdbbox/api/public/api/device/check')
stat = json.loads(r.data)
fuse1 = json.dumps({
"Up":"1"
})
fuse2 = json.dumps({
"Up":"1"
})
s = http.request('POST', 'http://192.168.43.135/smartdbbox/api/public/api/device/check', headers={'Content-Type': 'application/json'}, body=fuse1)
job = schedule.every(2).seconds.do(checkactivation)
while True:
motor = input('Select motor')
if (motor == "A"):
state = input('Enter Fuse A State')
if (state == '1'):
p.ChangeDutyCycle(7.5)
time.sleep(1)
fuse1up = 1
p.ChangeDutyCycle(2.5)
print(o)
elif (state == '0'):
p.ChangeDutyCycle(12.5)
time.sleep(1)
fuse1down = 0
p.ChangeDutyCycle(2.5)
print(c)
elif(motor == 'B'):
state = input('Enter Fuse B State ')
if (state == '1'):
t.ChangeDutyCycle(7.5)
time.sleep(1)
fuse2up = 1
t.ChangeDutyCycle(2.5)
print(o)
elif (state == '0'):
t.ChangeDutyCycle(12.5)
time.sleep(1)
fuse2down = 0
t.ChangeDutyCycle(2.5)
print(c)
http = urllib3.PoolManager()
r = http.request('POST', 'http://192.168.43.135/smartdbbox/api/public/api/device/check',
headers={'Content-Type': 'application/json'},
body=fuse1)
stat = json.loads(r.data)
| import schedule
import time
import urllib3
import json
import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setup(4, GPIO.OUT)
GPIO.setup(17, GPIO.OUT)
p = GPIO.PWM(4, 50)
p.start(2.5)
t = GPIO.PWM(17, 50)
t.start(2.5)
op = {"FUSE OPEN"}
cl = {"FUSE CLOSE"}
o = json.dumps(op)
c = json.dumps(cl)
print("ACTIVATION SCRIPT SERVER")
def stopcheck(self,stat):
self.test = stat
def checkactivation():
print("CHECK ACTIVATION")
http = urllib3.PoolManager()
r = http.request('GET', 'http://192.168.43.135/smartdbbox/api/public/api/device/check')
stat = json.loads(r.data)
fuse1 = json.dumps({
"Up":"1"
})
fuse2 = json.dumps({
"Up":"1"
})
s = http.request('POST', 'http://192.168.43.135/smartdbbox/api/public/api/device/check', headers={'Content-Type': 'application/json'}, body=fuse1)
job = schedule.every(2).seconds.do(checkactivation)
while True:
motor = input('Select motor')
if (motor == "A"):
state = input('Enter Fuse A State')
if (state == '1'):
p.ChangeDutyCycle(7.5)
time.sleep(1)
fuse1up = 1
p.ChangeDutyCycle(2.5)
print(o)
elif (state == '0'):
p.ChangeDutyCycle(12.5)
time.sleep(1)
fuse1down = 0
p.ChangeDutyCycle(2.5)
print(c)
elif(motor == 'B'):
state = input('Enter Fuse B State ')
if (state == '1'):
t.ChangeDutyCycle(7.5)
time.sleep(1)
fuse2up = 1
t.ChangeDutyCycle(2.5)
print(o)
elif (state == '0'):
t.ChangeDutyCycle(12.5)
time.sleep(1)
fuse2down = 0
t.ChangeDutyCycle(2.5)
print(c)
http = urllib3.PoolManager()
r = http.request('POST', 'http://192.168.43.135/smartdbbox/api/public/api/device/check',
headers={'Content-Type': 'application/json'},
body=fuse1)
stat = json.loads(r.data) | none | 1 | 2.640187 | 3 | |
chartingperformance/__init__.py | netplusdesign/home-performance-flask | 1 | 6621233 | <filename>chartingperformance/__init__.py
from flask import Flask
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy import exc
from sqlalchemy import event
from sqlalchemy.pool import Pool
from flask_cors import CORS
app = Flask(__name__)
app.config.from_object('chartingperformance.default_settings')
app.config.from_envvar('HOMEPERFORMANCE_SETTINGS')
engine = create_engine(app.config['DATABASE_URI'])
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
@event.listens_for(Pool, "checkout")
def ping_connection(dbapi_connection, connection_record, connection_proxy):
cursor = dbapi_connection.cursor()
try:
cursor.execute("SELECT 1")
except:
# optional - dispose the whole pool
# instead of invalidating one at a time
# connection_proxy._pool.dispose()
# raise DisconnectionError - pool will try
# connecting again up to three times before raising.
raise exc.DisconnectionError()
cursor.close()
CORS(app, resources=r'/api/*', allow_headers='Content-Type')
import chartingperformance.routes
@app.teardown_appcontext
def shutdown_session(exception=None):
db_session.remove()
if __name__ == '__main__':
app.run(host=app.config['HOST'], port=app.config['PORT'], debug=app.config['DEBUG'])
| <filename>chartingperformance/__init__.py
from flask import Flask
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy import exc
from sqlalchemy import event
from sqlalchemy.pool import Pool
from flask_cors import CORS
app = Flask(__name__)
app.config.from_object('chartingperformance.default_settings')
app.config.from_envvar('HOMEPERFORMANCE_SETTINGS')
engine = create_engine(app.config['DATABASE_URI'])
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
@event.listens_for(Pool, "checkout")
def ping_connection(dbapi_connection, connection_record, connection_proxy):
cursor = dbapi_connection.cursor()
try:
cursor.execute("SELECT 1")
except:
# optional - dispose the whole pool
# instead of invalidating one at a time
# connection_proxy._pool.dispose()
# raise DisconnectionError - pool will try
# connecting again up to three times before raising.
raise exc.DisconnectionError()
cursor.close()
CORS(app, resources=r'/api/*', allow_headers='Content-Type')
import chartingperformance.routes
@app.teardown_appcontext
def shutdown_session(exception=None):
db_session.remove()
if __name__ == '__main__':
app.run(host=app.config['HOST'], port=app.config['PORT'], debug=app.config['DEBUG'])
| en | 0.731611 | # optional - dispose the whole pool # instead of invalidating one at a time # connection_proxy._pool.dispose() # raise DisconnectionError - pool will try # connecting again up to three times before raising. | 2.204992 | 2 |
prestodb/prestodb.py | sasank1/plugins | 36 | 6621234 | #!/usr/bin/python
import json
import jpype
from jpype import java
from jpype import javax
# if any impacting changes to this plugin kindly increment the plugin version here
plugin_version = 1
# Setting this to true will alert you when there is a communication problem while posting plugin data to server
heartbeat_required = "true"
host=None
port=None
prestodb_metrics={
"AbandonedQueries.TotalCount" : "execution_abandoned_queries_total_count",
"CanceledQueries.TotalCount" : "execution_canceled_queries_total_count",
"CompletedQueries.TotalCount" : "execution_completed_queries_total_count",
"ConsumedCpuTimeSecs.TotalCount" : "execution_consumed_cpu_time_secs_total_count",
"StartedQueries.TotalCount" : "execution_started_queries_total_count",
"Executor.ActiveCount" : "executor_active_count",
"Executor.CompletedTaskCount" : "executor_completed_task_count",
"Executor.CorePoolSize" : "executor_core_pool_size",
"Executor.LargestPoolSize" : "executor_largest_pool_size",
"Executor.MaximumPoolSize" : "executor_maximum_pool_size",
"Executor.QueuedTaskCount" : "executor_queued_task_count",
"Executor.TaskCount" : "executor_task_count",
"ActiveCount" : "failure_detector_active_count",
"ClusterMemoryBytes" : "cluster_memory_bytes",
"AssignedQueries" : "memory_assigned_queries",
"BlockedNodes" : "memory_blocked_nodes",
"FreeDistributedBytes" : "memory_free_distributed_bytes",
"Nodes" : "memory_nodes",
"ReservedDistributedBytes" : "memory_reserved_distributed_bytes",
"ReservedRevocableDistributedBytes" : "memory_reserved_revocable_distributed_bytes",
"TotalDistributedBytes" : "memory_total_distributed_bytes",
"FreeBytes" : "memory_free_bytes",
"MaxBytes" : "memory_max_bytes",
"ReservedBytes" : "memory_reserved_bytes",
"ReservedRevocableBytes" : "memory_reserved_revocable_bytes"
}
metric_units={
"execution_consumed_cpu_time_secs_total_count" : "second",
"cluster_memory_bytes" : "byte",
"memory_assigned_queries" : "byte",
"memory_blocked_nodes" : "byte",
"memory_free_distributed_bytes" : "byte",
"memory_nodes" : "byte",
"memory_reserved_distributed_bytes" : "byte",
"memory_reserved_revocable_distributed_bytes" : "byte",
"memory_total_distributed_bytes" : "byte",
"memory_free_bytes" : "byte",
"memory_max_bytes" : "byte",
"memory_reserved_bytes" : "byte",
"memory_reserved_revocable_bytes" : "byte"
}
user=""
passw=""
result={}
def metric_collector(url):
try:
jpype.startJVM()
jhash=java.util.HashMap()
jarray=jpype.JArray(java.lang.String)([user,passw])
jhash.put(javax.management.remote.JMXConnector.CREDENTIALS,jarray);
jmxurl=javax.management.remote.JMXServiceURL(url)
jmxsoc=javax.management.remote.JMXConnectorFactory.connect(jmxurl,jhash)
connection=jmxsoc.getMBeanServerConnection();
queries={
"com.facebook.presto.execution:name=QueryManager" : ["AbandonedQueries.TotalCount","CanceledQueries.TotalCount","CompletedQueries.TotalCount","ConsumedCpuTimeSecs.TotalCount","StartedQueries.TotalCount"],
"com.facebook.presto.execution:name=RemoteTaskFactory" : ["Executor.ActiveCount","Executor.CompletedTaskCount","Executor.CorePoolSize","Executor.LargestPoolSize","Executor.MaximumPoolSize","Executor.QueuedTaskCount","Executor.TaskCount"],
"com.facebook.presto.failureDetector:name=HeartbeatFailureDetector" : ["ActiveCount"],
"com.facebook.presto.memory:name=ClusterMemoryManager" : ["ClusterMemoryBytes"],
"com.facebook.presto.memory:type=ClusterMemoryPool,name=general" : ["AssignedQueries","BlockedNodes","FreeDistributedBytes","Nodes","ReservedDistributedBytes","ReservedRevocableDistributedBytes","TotalDistributedBytes"],
"com.facebook.presto.memory:type=MemoryPool,name=general" : ["FreeBytes","MaxBytes","ReservedBytes","ReservedRevocableBytes"]
}
for query in queries:
for metric in queries[query]:
metric_value=round(connection.getAttribute(javax.management.ObjectName(query),metric))
result[prestodb_metrics[metric]]=metric_value
result["plugin_version"]=plugin_version
result["units"]=metric_units
except Exception as e:
result["msg"]=str(e)
result["status"]=0
result["heartbeat_required"]=heartbeat_required
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--host', help="provide host adress",type=str)
parser.add_argument('--port', help="provide port number",type=str)
args = parser.parse_args()
if args.host:
host=args.host
if args.port:
port=args.port
url="service:jmx:rmi:///jndi/rmi://%s:%s/jmxrmi" %(host,port)
metric_collector(url)
print(json.dumps(result, indent=4, sort_keys=True))
| #!/usr/bin/python
import json
import jpype
from jpype import java
from jpype import javax
# if any impacting changes to this plugin kindly increment the plugin version here
plugin_version = 1
# Setting this to true will alert you when there is a communication problem while posting plugin data to server
heartbeat_required = "true"
host=None
port=None
prestodb_metrics={
"AbandonedQueries.TotalCount" : "execution_abandoned_queries_total_count",
"CanceledQueries.TotalCount" : "execution_canceled_queries_total_count",
"CompletedQueries.TotalCount" : "execution_completed_queries_total_count",
"ConsumedCpuTimeSecs.TotalCount" : "execution_consumed_cpu_time_secs_total_count",
"StartedQueries.TotalCount" : "execution_started_queries_total_count",
"Executor.ActiveCount" : "executor_active_count",
"Executor.CompletedTaskCount" : "executor_completed_task_count",
"Executor.CorePoolSize" : "executor_core_pool_size",
"Executor.LargestPoolSize" : "executor_largest_pool_size",
"Executor.MaximumPoolSize" : "executor_maximum_pool_size",
"Executor.QueuedTaskCount" : "executor_queued_task_count",
"Executor.TaskCount" : "executor_task_count",
"ActiveCount" : "failure_detector_active_count",
"ClusterMemoryBytes" : "cluster_memory_bytes",
"AssignedQueries" : "memory_assigned_queries",
"BlockedNodes" : "memory_blocked_nodes",
"FreeDistributedBytes" : "memory_free_distributed_bytes",
"Nodes" : "memory_nodes",
"ReservedDistributedBytes" : "memory_reserved_distributed_bytes",
"ReservedRevocableDistributedBytes" : "memory_reserved_revocable_distributed_bytes",
"TotalDistributedBytes" : "memory_total_distributed_bytes",
"FreeBytes" : "memory_free_bytes",
"MaxBytes" : "memory_max_bytes",
"ReservedBytes" : "memory_reserved_bytes",
"ReservedRevocableBytes" : "memory_reserved_revocable_bytes"
}
metric_units={
"execution_consumed_cpu_time_secs_total_count" : "second",
"cluster_memory_bytes" : "byte",
"memory_assigned_queries" : "byte",
"memory_blocked_nodes" : "byte",
"memory_free_distributed_bytes" : "byte",
"memory_nodes" : "byte",
"memory_reserved_distributed_bytes" : "byte",
"memory_reserved_revocable_distributed_bytes" : "byte",
"memory_total_distributed_bytes" : "byte",
"memory_free_bytes" : "byte",
"memory_max_bytes" : "byte",
"memory_reserved_bytes" : "byte",
"memory_reserved_revocable_bytes" : "byte"
}
user=""
passw=""
result={}
def metric_collector(url):
try:
jpype.startJVM()
jhash=java.util.HashMap()
jarray=jpype.JArray(java.lang.String)([user,passw])
jhash.put(javax.management.remote.JMXConnector.CREDENTIALS,jarray);
jmxurl=javax.management.remote.JMXServiceURL(url)
jmxsoc=javax.management.remote.JMXConnectorFactory.connect(jmxurl,jhash)
connection=jmxsoc.getMBeanServerConnection();
queries={
"com.facebook.presto.execution:name=QueryManager" : ["AbandonedQueries.TotalCount","CanceledQueries.TotalCount","CompletedQueries.TotalCount","ConsumedCpuTimeSecs.TotalCount","StartedQueries.TotalCount"],
"com.facebook.presto.execution:name=RemoteTaskFactory" : ["Executor.ActiveCount","Executor.CompletedTaskCount","Executor.CorePoolSize","Executor.LargestPoolSize","Executor.MaximumPoolSize","Executor.QueuedTaskCount","Executor.TaskCount"],
"com.facebook.presto.failureDetector:name=HeartbeatFailureDetector" : ["ActiveCount"],
"com.facebook.presto.memory:name=ClusterMemoryManager" : ["ClusterMemoryBytes"],
"com.facebook.presto.memory:type=ClusterMemoryPool,name=general" : ["AssignedQueries","BlockedNodes","FreeDistributedBytes","Nodes","ReservedDistributedBytes","ReservedRevocableDistributedBytes","TotalDistributedBytes"],
"com.facebook.presto.memory:type=MemoryPool,name=general" : ["FreeBytes","MaxBytes","ReservedBytes","ReservedRevocableBytes"]
}
for query in queries:
for metric in queries[query]:
metric_value=round(connection.getAttribute(javax.management.ObjectName(query),metric))
result[prestodb_metrics[metric]]=metric_value
result["plugin_version"]=plugin_version
result["units"]=metric_units
except Exception as e:
result["msg"]=str(e)
result["status"]=0
result["heartbeat_required"]=heartbeat_required
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--host', help="provide host adress",type=str)
parser.add_argument('--port', help="provide port number",type=str)
args = parser.parse_args()
if args.host:
host=args.host
if args.port:
port=args.port
url="service:jmx:rmi:///jndi/rmi://%s:%s/jmxrmi" %(host,port)
metric_collector(url)
print(json.dumps(result, indent=4, sort_keys=True))
| en | 0.71822 | #!/usr/bin/python # if any impacting changes to this plugin kindly increment the plugin version here # Setting this to true will alert you when there is a communication problem while posting plugin data to server | 1.655494 | 2 |
main.py | alejogs4/statistics-python | 0 | 6621235 | # Arhivo para hacer pruebas de cada uno de los modulos
import descriptive
import dispersion
import probability
from distributions import binomial_distribution, acumulated_binomial, hypergeometric, acumulated_hypergeometric, poisson, exponential, norm
from distributions import uniform
from intervals import interval_poblational_average, interval_poblational_average_t, get_interval_p, get_sample_length_p_m, get_sample_length_p_p, get_z
from hipotesis import get_z_two_queues, get_p, proof_pi, proof_diff_means, proof_diff_means_t, proof_diff_proportions, get_size_to_estimate_population_diff_means, get_size_to_estimate_population_diff_proportions
from ANOVA import anova
# Taller
# print(binomial_distribution(10, 0.05, 2))
# Si me piden menor a 2
# print(acumulated_binomial(range(0, 3), 10, 0.05))
'''Aqui es si me piden mayor a algo en poisson'''
# sum = 0
# for i in range(0, 11):
# sum += poisson(i, 10.0)
# print(1 - sum)
# Taller
'''
1)
Supongamos que la probabilidad de tener una unidad defectuosa en una línea de
ensamblaje es de 0.05. Si el conjunto de unidades terminadas constituye un conjunto de
ensayos independientes
'''
print
| # Arhivo para hacer pruebas de cada uno de los modulos
import descriptive
import dispersion
import probability
from distributions import binomial_distribution, acumulated_binomial, hypergeometric, acumulated_hypergeometric, poisson, exponential, norm
from distributions import uniform
from intervals import interval_poblational_average, interval_poblational_average_t, get_interval_p, get_sample_length_p_m, get_sample_length_p_p, get_z
from hipotesis import get_z_two_queues, get_p, proof_pi, proof_diff_means, proof_diff_means_t, proof_diff_proportions, get_size_to_estimate_population_diff_means, get_size_to_estimate_population_diff_proportions
from ANOVA import anova
# Taller
# print(binomial_distribution(10, 0.05, 2))
# Si me piden menor a 2
# print(acumulated_binomial(range(0, 3), 10, 0.05))
'''Aqui es si me piden mayor a algo en poisson'''
# sum = 0
# for i in range(0, 11):
# sum += poisson(i, 10.0)
# print(1 - sum)
# Taller
'''
1)
Supongamos que la probabilidad de tener una unidad defectuosa en una línea de
ensamblaje es de 0.05. Si el conjunto de unidades terminadas constituye un conjunto de
ensayos independientes
'''
print
| es | 0.827017 | # Arhivo para hacer pruebas de cada uno de los modulos # Taller # print(binomial_distribution(10, 0.05, 2)) # Si me piden menor a 2 # print(acumulated_binomial(range(0, 3), 10, 0.05)) Aqui es si me piden mayor a algo en poisson # sum = 0 # for i in range(0, 11): # sum += poisson(i, 10.0) # print(1 - sum) # Taller 1) Supongamos que la probabilidad de tener una unidad defectuosa en una línea de ensamblaje es de 0.05. Si el conjunto de unidades terminadas constituye un conjunto de ensayos independientes | 3.197472 | 3 |
receipts/receipts.py | dannysauer/misc_python | 0 | 6621236 | <filename>receipts/receipts.py
#!/usr/bin/env python3
import pickle
import os.path
from googleapiclient.discovery import build
from googleapiclient.http import MediaFileUpload
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
# If modifying these scopes, delete the file token.pickle.
class receipts:
""" main receipt upload class
"""
def __init__(self):
self.SCOPES = [
'https://www.googleapis.com/auth/drive.file',
# 'https://www.googleapis.com/auth/spreadsheets',
]
self.folder_id = None
self.folder_name = 'Receipt images'
self.folder_type = 'application/vnd.google-apps.folder'
self.sheet_id = None
self.sheet_name = 'Receipts sheet'
self.sheet_type = 'application/vnd.google-apps.spreadsheet'
self.service = None
self._auth('/home/sauer/google_receipt-upload.json')
self._init_folder()
self._init_sheet()
def _auth(self, cred_file):
cache_file = 'token.pickle'
creds = None
# The cache_file stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the
# first time.
if os.path.exists(cache_file):
with open(cache_file, 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
cred_file, self.SCOPES
)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open(cache_file, 'wb') as token:
pickle.dump(creds, token)
self.service = build('drive', 'v3', credentials=creds)
def get_or_create(self, name, mime_type):
""" Get (after first creating as needed) the named item
"""
file = None
result = self.service.files().list(
q=(f"mimeType = '{mime_type}' "
f"and name = '{name}' "
f"and trashed != true "
),
spaces='drive',
fields='files(id, name)',
).execute().get('files', [])
if result:
print("Found target")
if len(result) > 1:
print("some kind of warning with logger")
# and then look in to adding metadata which distinguishes ours
file = result[0]
else:
print(f"Creating new object {name}\n")
file_metadata = {
'name': name,
'mimeType': mime_type
}
file = self.service.files(
).create(body=file_metadata, fields='id').execute()
return file.get('id')
def _init_folder(self):
self.folder_id = self.get_or_create(
self.folder_name,
self.folder_type
)
print(f"Folder (name: {self.folder_name}) ID: {self.folder_id}")
def _init_sheet(self):
self.sheet_id = self.get_or_create(
self.sheet_name,
self.sheet_type
)
print(f"Sheet (name: {self.sheet_name}) ID: {self.sheet_id}")
def process_image(self, img_path):
if not os.path.exists(img_path):
return False
id = self.upload_image(img_path)
return(id)
def upload_image(self, img_path):
metadata = {
'name': os.path.basename(img_path).lower(),
'parents': [self.folder_id]
}
media = MediaFileUpload(img_path, mimetype='image/jpeg')
file = self.service.files().create(
body=metadata,
media_body=media,
fields='id',
).execute()
return file
def main():
r = receipts()
#print(r)
r.process_image('/home/sauer/dev/github/receipt-parser/data/img/IMG_0349.jpeg')
if __name__ == '__main__':
main()
| <filename>receipts/receipts.py
#!/usr/bin/env python3
import pickle
import os.path
from googleapiclient.discovery import build
from googleapiclient.http import MediaFileUpload
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
# If modifying these scopes, delete the file token.pickle.
class receipts:
""" main receipt upload class
"""
def __init__(self):
self.SCOPES = [
'https://www.googleapis.com/auth/drive.file',
# 'https://www.googleapis.com/auth/spreadsheets',
]
self.folder_id = None
self.folder_name = 'Receipt images'
self.folder_type = 'application/vnd.google-apps.folder'
self.sheet_id = None
self.sheet_name = 'Receipts sheet'
self.sheet_type = 'application/vnd.google-apps.spreadsheet'
self.service = None
self._auth('/home/sauer/google_receipt-upload.json')
self._init_folder()
self._init_sheet()
def _auth(self, cred_file):
cache_file = 'token.pickle'
creds = None
# The cache_file stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the
# first time.
if os.path.exists(cache_file):
with open(cache_file, 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
cred_file, self.SCOPES
)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open(cache_file, 'wb') as token:
pickle.dump(creds, token)
self.service = build('drive', 'v3', credentials=creds)
def get_or_create(self, name, mime_type):
""" Get (after first creating as needed) the named item
"""
file = None
result = self.service.files().list(
q=(f"mimeType = '{mime_type}' "
f"and name = '{name}' "
f"and trashed != true "
),
spaces='drive',
fields='files(id, name)',
).execute().get('files', [])
if result:
print("Found target")
if len(result) > 1:
print("some kind of warning with logger")
# and then look in to adding metadata which distinguishes ours
file = result[0]
else:
print(f"Creating new object {name}\n")
file_metadata = {
'name': name,
'mimeType': mime_type
}
file = self.service.files(
).create(body=file_metadata, fields='id').execute()
return file.get('id')
def _init_folder(self):
self.folder_id = self.get_or_create(
self.folder_name,
self.folder_type
)
print(f"Folder (name: {self.folder_name}) ID: {self.folder_id}")
def _init_sheet(self):
self.sheet_id = self.get_or_create(
self.sheet_name,
self.sheet_type
)
print(f"Sheet (name: {self.sheet_name}) ID: {self.sheet_id}")
def process_image(self, img_path):
if not os.path.exists(img_path):
return False
id = self.upload_image(img_path)
return(id)
def upload_image(self, img_path):
metadata = {
'name': os.path.basename(img_path).lower(),
'parents': [self.folder_id]
}
media = MediaFileUpload(img_path, mimetype='image/jpeg')
file = self.service.files().create(
body=metadata,
media_body=media,
fields='id',
).execute()
return file
def main():
r = receipts()
#print(r)
r.process_image('/home/sauer/dev/github/receipt-parser/data/img/IMG_0349.jpeg')
if __name__ == '__main__':
main()
| en | 0.841787 | #!/usr/bin/env python3 # If modifying these scopes, delete the file token.pickle. main receipt upload class # 'https://www.googleapis.com/auth/spreadsheets', # The cache_file stores the user's access and refresh tokens, and is # created automatically when the authorization flow completes for the # first time. # If there are no (valid) credentials available, let the user log in. # Save the credentials for the next run Get (after first creating as needed) the named item # and then look in to adding metadata which distinguishes ours #print(r) | 2.634859 | 3 |
code2/day09/demo11.py | picktsh/python | 1 | 6621237 | <reponame>picktsh/python<filename>code2/day09/demo11.py
# 教学系统的浏览器设置方法
from selenium.webdriver.chrome.webdriver import RemoteWebDriver # 从selenium库中调用RemoteWebDriver模块
from selenium.webdriver.chrome.options import Options # 从options模块中调用Options类
from bs4 import BeautifulSoup
import time
chrome_options = Options() # 实例化Option对象
chrome_options.add_argument('--headless') # 对浏览器的设置
driver = RemoteWebDriver("http://chromedriver.python-class-fos.svc:4444/wd/hub",
chrome_options.to_capabilities()) # 声明浏览器对象
driver.get('https://y.qq.com/n/yqq/song/000xdZuV2LcQ19.html') # 访问页面
time.sleep(2)
button = driver.find_element_by_class_name('js_get_more_hot') # 根据类名找到【点击加载更多】
button.click() # 点击
time.sleep(2) # 等待两秒
pageSource = driver.page_source # 获取Elements中渲染完成的网页源代码
soup = BeautifulSoup(pageSource, 'html.parser') # 使用bs解析网页
comments = soup.find('ul', class_='js_hot_list').find_all('li', class_='js_cmt_li') # 使用bs提取元素
print(len(comments)) # 打印comments的数量
for comment in comments: # 循环
sweet = comment.find('p') # 提取评论
print('评论:%s\n ---\n' % sweet.text) # 打印评论
driver.close() # 关闭浏览器
| # 教学系统的浏览器设置方法
from selenium.webdriver.chrome.webdriver import RemoteWebDriver # 从selenium库中调用RemoteWebDriver模块
from selenium.webdriver.chrome.options import Options # 从options模块中调用Options类
from bs4 import BeautifulSoup
import time
chrome_options = Options() # 实例化Option对象
chrome_options.add_argument('--headless') # 对浏览器的设置
driver = RemoteWebDriver("http://chromedriver.python-class-fos.svc:4444/wd/hub",
chrome_options.to_capabilities()) # 声明浏览器对象
driver.get('https://y.qq.com/n/yqq/song/000xdZuV2LcQ19.html') # 访问页面
time.sleep(2)
button = driver.find_element_by_class_name('js_get_more_hot') # 根据类名找到【点击加载更多】
button.click() # 点击
time.sleep(2) # 等待两秒
pageSource = driver.page_source # 获取Elements中渲染完成的网页源代码
soup = BeautifulSoup(pageSource, 'html.parser') # 使用bs解析网页
comments = soup.find('ul', class_='js_hot_list').find_all('li', class_='js_cmt_li') # 使用bs提取元素
print(len(comments)) # 打印comments的数量
for comment in comments: # 循环
sweet = comment.find('p') # 提取评论
print('评论:%s\n ---\n' % sweet.text) # 打印评论
driver.close() # 关闭浏览器 | zh | 0.954877 | # 教学系统的浏览器设置方法 # 从selenium库中调用RemoteWebDriver模块 # 从options模块中调用Options类 # 实例化Option对象 # 对浏览器的设置 # 声明浏览器对象 # 访问页面 # 根据类名找到【点击加载更多】 # 点击 # 等待两秒 # 获取Elements中渲染完成的网页源代码 # 使用bs解析网页 # 使用bs提取元素 # 打印comments的数量 # 循环 # 提取评论 # 打印评论 # 关闭浏览器 | 3.090243 | 3 |
lambdata_Vincent_Emma/df_utils.py | Vincent-Emma/lambdata_Vincent_Emma | 0 | 6621238 | <gh_stars>0
import numpy as np
import pandas as pd
ONES = pd.Series(np.ones(20))
ZEROS = pd.Series(np.zeros(20))
def isna(df):
print(df.isna().sum())
def correlation_matrix(df):
print(df.corr())
| import numpy as np
import pandas as pd
ONES = pd.Series(np.ones(20))
ZEROS = pd.Series(np.zeros(20))
def isna(df):
print(df.isna().sum())
def correlation_matrix(df):
print(df.corr()) | none | 1 | 2.715399 | 3 | |
pycounter/test/test_pr1.py | yannsta/pycounter | 60 | 6621239 | """Test COUNTER DB1 database report"""
import os
import pycounter.report
def test_pr1_reportname(pr1_report):
assert pr1_report.report_type == "PR1"
def test_pr1_stats(pr1_report):
publication = pr1_report.pubs[0]
assert [x[2] for x in publication] == [91, 41, 13, 21, 44, 8, 0, 0, 36, 36, 7, 2]
def test_pr1_row_metric(pr1_report):
# test metric of the first row
jan_data = next(iter(pr1_report.pubs[0]))
assert jan_data[1] == "Regular Searches"
# test metric of the second row
jan_data = next(iter(pr1_report.pubs[1]))
assert jan_data[1] == "Searches-federated and automated"
def test_output(tmp_path):
report = pycounter.report.parse(
os.path.join(os.path.dirname(__file__), "data/PR1.tsv")
)
report.write_tsv(str(tmp_path / "outputfile.tsv"))
with open(str(tmp_path / "outputfile.tsv"), "rb") as new_file:
new_content = new_file.read()
assert b"Searches-federated" in new_content
| """Test COUNTER DB1 database report"""
import os
import pycounter.report
def test_pr1_reportname(pr1_report):
assert pr1_report.report_type == "PR1"
def test_pr1_stats(pr1_report):
publication = pr1_report.pubs[0]
assert [x[2] for x in publication] == [91, 41, 13, 21, 44, 8, 0, 0, 36, 36, 7, 2]
def test_pr1_row_metric(pr1_report):
# test metric of the first row
jan_data = next(iter(pr1_report.pubs[0]))
assert jan_data[1] == "Regular Searches"
# test metric of the second row
jan_data = next(iter(pr1_report.pubs[1]))
assert jan_data[1] == "Searches-federated and automated"
def test_output(tmp_path):
report = pycounter.report.parse(
os.path.join(os.path.dirname(__file__), "data/PR1.tsv")
)
report.write_tsv(str(tmp_path / "outputfile.tsv"))
with open(str(tmp_path / "outputfile.tsv"), "rb") as new_file:
new_content = new_file.read()
assert b"Searches-federated" in new_content
| en | 0.71579 | Test COUNTER DB1 database report # test metric of the first row # test metric of the second row | 2.704726 | 3 |
Code/sets.py | Andre-Williams22/CS-1.3-Core-Data-Structures | 0 | 6621240 | <filename>Code/sets.py<gh_stars>0
#!python
#from hashtable import HashTable
from binarytree import BinarySearchTree, BinaryTreeNode
class Treeset:
def __init__(self, elements=None):
''' initialize a new empty set structure, and add each element if a sequence is given '''
#self.hash = HashTable()
self.tree = BinarySearchTree()
# self.element = BinaryTreeNode()
self.size = 0
if elements is not None:
for item in elements:
self.add(item)
def size(self):
'''property that tracks the number of elements in constant time
Average Case Runtime: O(1) because we're updating the size variable after each deletion and addition to an element'''
return self.tree.size
def contains(self,element):
''' return a boolean indicating whether element is in this set
Average Case Runtime: O(log(n)) because we're searching the binary search tree'''
return self.tree.contains(element)
def add(self,element):
''' add element to this set, if not present already
Average case Runtime: O(log(n)) Because we must apply binary search to add an element '''
if self.tree.contains(element):
raise ValueError(f'Cannot add element to set again: {element}')
else:
self.tree.insert(element)
self.size += 1
def remove(self,element):
'''remove element from this set, if present, or else raise KeyError
Average case Runtime: O(log (n)) Because we perform binary search to find the item to delete'''
if self.tree.contains(element):
self.tree.delete(element)
self.size -= 1
else:
raise ValueError(f'Element is not in the set: {element}')
def union(self,other_set):
'''return a new set that is the union of this set and other_set
Runtime: (m + n) * log(m) because we add the length of each set to the time of the add function calls '''
new_set = self.tree.items_pre_order()
for element in other_set.tree.items_pre_order(): #O(log(m)) # pre-order will generate the exact same tree every time
if element not in new_set:
new_set.append(element)
return Treeset(new_set)
# for element in self.tree.items_pre_order(): # pre-order will generate the exact same tree every time
# if other_set.contains(element): #O(log(m))
# new_set.add(element)
# return new_set
def intersection(self,other_set):
'''return a new set that is the intersection of this set and other_set
Average Case Runtime: O(n) '''
new_set = Treeset()
for element in self.tree.items_pre_order():
if other_set.contains(element):
new_set.add(element)
return new_set
def difference(self,other_set):
'''return a new set that is the difference of this set and other_set
Average Case Runtime: O(n) because we have to check all the nodes to see if in the other set.'''
# make a new empty set
new_set = Treeset()
# iterate over each item in the self set
for element in self.tree.items_pre_order():
# if the item is not in other set then add to the new set
if not other_set.contains(element):
new_set.add(element)
# return the new set
return new_set
def is_subset(self, other_set):
'''return a boolean indicating whether other_set is a subset of this set
Best Case: O(1) if the size is of other_set is bigger then we return False
Average Case Runtime: O(log(n)) if we traverse through all the nodes. In the smaller set. '''
if self.size > other_set.size:
return False
for item in self.tree.items_pre_order():
if not other_set.contains(item):
# found an item not in the two sets
return False
return True | <filename>Code/sets.py<gh_stars>0
#!python
#from hashtable import HashTable
from binarytree import BinarySearchTree, BinaryTreeNode
class Treeset:
def __init__(self, elements=None):
''' initialize a new empty set structure, and add each element if a sequence is given '''
#self.hash = HashTable()
self.tree = BinarySearchTree()
# self.element = BinaryTreeNode()
self.size = 0
if elements is not None:
for item in elements:
self.add(item)
def size(self):
'''property that tracks the number of elements in constant time
Average Case Runtime: O(1) because we're updating the size variable after each deletion and addition to an element'''
return self.tree.size
def contains(self,element):
''' return a boolean indicating whether element is in this set
Average Case Runtime: O(log(n)) because we're searching the binary search tree'''
return self.tree.contains(element)
def add(self,element):
''' add element to this set, if not present already
Average case Runtime: O(log(n)) Because we must apply binary search to add an element '''
if self.tree.contains(element):
raise ValueError(f'Cannot add element to set again: {element}')
else:
self.tree.insert(element)
self.size += 1
def remove(self,element):
'''remove element from this set, if present, or else raise KeyError
Average case Runtime: O(log (n)) Because we perform binary search to find the item to delete'''
if self.tree.contains(element):
self.tree.delete(element)
self.size -= 1
else:
raise ValueError(f'Element is not in the set: {element}')
def union(self,other_set):
'''return a new set that is the union of this set and other_set
Runtime: (m + n) * log(m) because we add the length of each set to the time of the add function calls '''
new_set = self.tree.items_pre_order()
for element in other_set.tree.items_pre_order(): #O(log(m)) # pre-order will generate the exact same tree every time
if element not in new_set:
new_set.append(element)
return Treeset(new_set)
# for element in self.tree.items_pre_order(): # pre-order will generate the exact same tree every time
# if other_set.contains(element): #O(log(m))
# new_set.add(element)
# return new_set
def intersection(self,other_set):
'''return a new set that is the intersection of this set and other_set
Average Case Runtime: O(n) '''
new_set = Treeset()
for element in self.tree.items_pre_order():
if other_set.contains(element):
new_set.add(element)
return new_set
def difference(self,other_set):
'''return a new set that is the difference of this set and other_set
Average Case Runtime: O(n) because we have to check all the nodes to see if in the other set.'''
# make a new empty set
new_set = Treeset()
# iterate over each item in the self set
for element in self.tree.items_pre_order():
# if the item is not in other set then add to the new set
if not other_set.contains(element):
new_set.add(element)
# return the new set
return new_set
def is_subset(self, other_set):
'''return a boolean indicating whether other_set is a subset of this set
Best Case: O(1) if the size is of other_set is bigger then we return False
Average Case Runtime: O(log(n)) if we traverse through all the nodes. In the smaller set. '''
if self.size > other_set.size:
return False
for item in self.tree.items_pre_order():
if not other_set.contains(item):
# found an item not in the two sets
return False
return True | en | 0.765919 | #!python #from hashtable import HashTable initialize a new empty set structure, and add each element if a sequence is given #self.hash = HashTable() # self.element = BinaryTreeNode() property that tracks the number of elements in constant time Average Case Runtime: O(1) because we're updating the size variable after each deletion and addition to an element return a boolean indicating whether element is in this set Average Case Runtime: O(log(n)) because we're searching the binary search tree add element to this set, if not present already Average case Runtime: O(log(n)) Because we must apply binary search to add an element remove element from this set, if present, or else raise KeyError Average case Runtime: O(log (n)) Because we perform binary search to find the item to delete return a new set that is the union of this set and other_set Runtime: (m + n) * log(m) because we add the length of each set to the time of the add function calls #O(log(m)) # pre-order will generate the exact same tree every time # for element in self.tree.items_pre_order(): # pre-order will generate the exact same tree every time # if other_set.contains(element): #O(log(m)) # new_set.add(element) # return new_set return a new set that is the intersection of this set and other_set Average Case Runtime: O(n) return a new set that is the difference of this set and other_set Average Case Runtime: O(n) because we have to check all the nodes to see if in the other set. # make a new empty set # iterate over each item in the self set # if the item is not in other set then add to the new set # return the new set return a boolean indicating whether other_set is a subset of this set Best Case: O(1) if the size is of other_set is bigger then we return False Average Case Runtime: O(log(n)) if we traverse through all the nodes. In the smaller set. # found an item not in the two sets | 4.10752 | 4 |
sdk.py | carlgonz/qt-pyqt-sdk-builder | 1 | 6621241 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
#
# The MIT License (MIT)
#
# Copyright (c) 2014 <NAME>.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
"""Support methods used in both build.py and configure.py"""
from __future__ import print_function
import contextlib
import os
import os.path
import subprocess
import sys
@contextlib.contextmanager
def chdir(path):
"""Changes current working directory for the life of the context manager."""
cwd = os.path.abspath(os.getcwd())
try:
os.chdir(path)
yield
finally:
os.chdir(cwd)
def get_layout(install_root):
"""Returns a dictionary representing the layout of an SDK installation.
For example, the 'bin' key points to the directory which contains the executables under the
given installation root.
Additionally, the installation root is checked to make sure all required files and directories
are there. Since this is an expensive operation, the result of calling get_layout() should be
cached and passed around instead of invoking this function each time.
"""
rootdir = os.path.abspath(install_root)
pydir = 'python%s.%s' % sys.version_info[:2]
layout = {
'root': rootdir, # Installation root
'bin': os.path.join(rootdir, 'bin'), # Executables
'include': os.path.join(rootdir, 'include'), # Includes
'lib': os.path.join(rootdir, 'lib'), # Libraries
'plugins': os.path.join(rootdir, 'plugins'), # Qt Plugins
'python': os.path.join(rootdir, pydir), # Python libraries
'sip': os.path.join(rootdir, 'share', 'sip'), # SIP files
}
# Sanity check
for path in layout.values():
if not os.path.isdir(path):
print('WARNING: Missing required directory %s' % path)
sipconfig = os.path.join(layout['python'], 'sipconfig.py')
if not os.path.isfile(sipconfig):
print('WARNING: Missing required file %s' % sipconfig)
return layout
def start_subshell():
print_box('Starting a subshell with the environment properly set-up for you.')
if sys.platform == 'win32':
sh(os.environ['COMSPEC'], '/K')
else:
sh(os.environ['SHELL'])
print_box('Goodbye.')
def print_box(*args):
print('')
print('=' * 78)
for message in args:
print('{:^78}'.format(message))
print('=' * 78)
print('')
def sh(*args):
print('+', ' '.join(args))
return subprocess.check_call(args, stderr=sys.stderr, stdout=sys.stdout)
def die(*args):
print('')
for message in args:
print(message)
print('Aborting.')
sys.exit(1)
| #!/usr/bin/env python2
# -*- coding: utf-8 -*-
#
# The MIT License (MIT)
#
# Copyright (c) 2014 <NAME>.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
"""Support methods used in both build.py and configure.py"""
from __future__ import print_function
import contextlib
import os
import os.path
import subprocess
import sys
@contextlib.contextmanager
def chdir(path):
"""Changes current working directory for the life of the context manager."""
cwd = os.path.abspath(os.getcwd())
try:
os.chdir(path)
yield
finally:
os.chdir(cwd)
def get_layout(install_root):
"""Returns a dictionary representing the layout of an SDK installation.
For example, the 'bin' key points to the directory which contains the executables under the
given installation root.
Additionally, the installation root is checked to make sure all required files and directories
are there. Since this is an expensive operation, the result of calling get_layout() should be
cached and passed around instead of invoking this function each time.
"""
rootdir = os.path.abspath(install_root)
pydir = 'python%s.%s' % sys.version_info[:2]
layout = {
'root': rootdir, # Installation root
'bin': os.path.join(rootdir, 'bin'), # Executables
'include': os.path.join(rootdir, 'include'), # Includes
'lib': os.path.join(rootdir, 'lib'), # Libraries
'plugins': os.path.join(rootdir, 'plugins'), # Qt Plugins
'python': os.path.join(rootdir, pydir), # Python libraries
'sip': os.path.join(rootdir, 'share', 'sip'), # SIP files
}
# Sanity check
for path in layout.values():
if not os.path.isdir(path):
print('WARNING: Missing required directory %s' % path)
sipconfig = os.path.join(layout['python'], 'sipconfig.py')
if not os.path.isfile(sipconfig):
print('WARNING: Missing required file %s' % sipconfig)
return layout
def start_subshell():
print_box('Starting a subshell with the environment properly set-up for you.')
if sys.platform == 'win32':
sh(os.environ['COMSPEC'], '/K')
else:
sh(os.environ['SHELL'])
print_box('Goodbye.')
def print_box(*args):
print('')
print('=' * 78)
for message in args:
print('{:^78}'.format(message))
print('=' * 78)
print('')
def sh(*args):
print('+', ' '.join(args))
return subprocess.check_call(args, stderr=sys.stderr, stdout=sys.stdout)
def die(*args):
print('')
for message in args:
print(message)
print('Aborting.')
sys.exit(1)
| en | 0.79824 | #!/usr/bin/env python2 # -*- coding: utf-8 -*- # # The MIT License (MIT) # # Copyright (c) 2014 <NAME>. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # Support methods used in both build.py and configure.py Changes current working directory for the life of the context manager. Returns a dictionary representing the layout of an SDK installation. For example, the 'bin' key points to the directory which contains the executables under the given installation root. Additionally, the installation root is checked to make sure all required files and directories are there. Since this is an expensive operation, the result of calling get_layout() should be cached and passed around instead of invoking this function each time. # Installation root # Executables # Includes # Libraries # Qt Plugins # Python libraries # SIP files # Sanity check | 1.59421 | 2 |
test/test_violations_per_group.py | rschuitema/misra | 0 | 6621242 | <reponame>rschuitema/misra<gh_stars>0
from src.misra.misra_guideline import MisraGuideline
from src.queries.violations_per_group import get_violations_per_group
def test_violations_per_group_success():
guidelines = {"1.1": MisraGuideline(("1.1", "rule", "Mandatory", "Functions", "Functions rule 1")),
"1.5": MisraGuideline(("1.5", "rule", "Mandatory", "Parameters", "Functions rule 2")),
"1.7": MisraGuideline(("1.7", "rule", "Mandatory", "Functions", "Functions rule 3")),
"2.1": MisraGuideline(("2.1", "rule", "Mandatory", "Layout", "Functions rule 4")),
"2.2": MisraGuideline(("2.2", "rule", "Mandatory", "Layout", "Functions rule 5"))}
guideline_violations = [
(MisraGuideline(("1.1", "rule", "Mandatory", "Functions", "Functions rule 1")), "test.c", "12", "34", "var"),
(MisraGuideline(("2.1", "rule", "Mandatory", "Layout", "Functions rule 1")), "test.c", "12", "34", "var"),
(MisraGuideline(("2.1", "rule", "Mandatory", "Layout", "Functions rule 1")), "test.c", "12", "34", "var"),
(MisraGuideline(("1.1", "rule", "Mandatory", "Functions", "Functions rule 1")), "test.c", "12", "34", "var"),
(MisraGuideline(("1.7", "rule", "Mandatory", "Functions", "Functions rule 1")), "test.c", "12", "34", "var"),
(MisraGuideline(("1.1", "rule", "Mandatory", "Functions", "Functions rule 1")), "test.c", "12", "34", "var"),
(MisraGuideline(("1.5", "rule", "Mandatory", "Parameters", "Functions rule 1")), "test.c", "12", "34", "var"),
]
violations_per_group = get_violations_per_group(guideline_violations, guidelines)
assert 3 == len(violations_per_group)
assert 4 == violations_per_group["Functions"]
assert 1 == violations_per_group["Parameters"]
assert 2 == violations_per_group["Layout"]
def test_no_violations_all_counts_are_zero():
guidelines = {"1.1": MisraGuideline(("1.1", "rule", "Mandatory", "Functions", "Functions rule 1")),
"1.5": MisraGuideline(("1.5", "rule", "Mandatory", "Parameters", "Functions rule 2")),
"1.7": MisraGuideline(("1.7", "rule", "Mandatory", "Functions", "Functions rule 3")),
"2.1": MisraGuideline(("2.1", "rule", "Mandatory", "Layout", "Functions rule 4")),
"2.2": MisraGuideline(("2.2", "rule", "Mandatory", "Layout", "Functions rule 5"))}
guideline_violations = []
violations_per_group = get_violations_per_group(guideline_violations, guidelines)
assert 3 == len(violations_per_group)
assert 0 == violations_per_group["Functions"]
assert 0 == violations_per_group["Parameters"]
assert 0 == violations_per_group["Layout"]
def test_no_guidelines_empty_violations_per_rule():
guidelines = {}
guideline_violations = [
(MisraGuideline(("1.1", "rule", "Mandatory", "Functions", "Functions rule 1")), "test.c", "12", "34", "var"),
(MisraGuideline(("2.1", "rule", "Mandatory", "Layout", "Functions rule 1")), "test.c", "12", "34", "var"),
(MisraGuideline(("2.1", "rule", "Mandatory", "Layout", "Functions rule 1")), "test.c", "12", "34", "var"),
(MisraGuideline(("1.1", "rule", "Mandatory", "Functions", "Functions rule 1")), "test.c", "12", "34", "var"),
(MisraGuideline(("1.7", "rule", "Mandatory", "Functions", "Functions rule 1")), "test.c", "12", "34", "var"),
(MisraGuideline(("1.1", "rule", "Mandatory", "Functions", "Functions rule 1")), "test.c", "12", "34", "var"),
(MisraGuideline(("1.5", "rule", "Mandatory", "Parameters", "Functions rule 1")), "test.c", "12", "34", "var"),
]
violations_per_group = get_violations_per_group(guideline_violations, guidelines)
assert 0 == len(violations_per_group)
| from src.misra.misra_guideline import MisraGuideline
from src.queries.violations_per_group import get_violations_per_group
def test_violations_per_group_success():
guidelines = {"1.1": MisraGuideline(("1.1", "rule", "Mandatory", "Functions", "Functions rule 1")),
"1.5": MisraGuideline(("1.5", "rule", "Mandatory", "Parameters", "Functions rule 2")),
"1.7": MisraGuideline(("1.7", "rule", "Mandatory", "Functions", "Functions rule 3")),
"2.1": MisraGuideline(("2.1", "rule", "Mandatory", "Layout", "Functions rule 4")),
"2.2": MisraGuideline(("2.2", "rule", "Mandatory", "Layout", "Functions rule 5"))}
guideline_violations = [
(MisraGuideline(("1.1", "rule", "Mandatory", "Functions", "Functions rule 1")), "test.c", "12", "34", "var"),
(MisraGuideline(("2.1", "rule", "Mandatory", "Layout", "Functions rule 1")), "test.c", "12", "34", "var"),
(MisraGuideline(("2.1", "rule", "Mandatory", "Layout", "Functions rule 1")), "test.c", "12", "34", "var"),
(MisraGuideline(("1.1", "rule", "Mandatory", "Functions", "Functions rule 1")), "test.c", "12", "34", "var"),
(MisraGuideline(("1.7", "rule", "Mandatory", "Functions", "Functions rule 1")), "test.c", "12", "34", "var"),
(MisraGuideline(("1.1", "rule", "Mandatory", "Functions", "Functions rule 1")), "test.c", "12", "34", "var"),
(MisraGuideline(("1.5", "rule", "Mandatory", "Parameters", "Functions rule 1")), "test.c", "12", "34", "var"),
]
violations_per_group = get_violations_per_group(guideline_violations, guidelines)
assert 3 == len(violations_per_group)
assert 4 == violations_per_group["Functions"]
assert 1 == violations_per_group["Parameters"]
assert 2 == violations_per_group["Layout"]
def test_no_violations_all_counts_are_zero():
guidelines = {"1.1": MisraGuideline(("1.1", "rule", "Mandatory", "Functions", "Functions rule 1")),
"1.5": MisraGuideline(("1.5", "rule", "Mandatory", "Parameters", "Functions rule 2")),
"1.7": MisraGuideline(("1.7", "rule", "Mandatory", "Functions", "Functions rule 3")),
"2.1": MisraGuideline(("2.1", "rule", "Mandatory", "Layout", "Functions rule 4")),
"2.2": MisraGuideline(("2.2", "rule", "Mandatory", "Layout", "Functions rule 5"))}
guideline_violations = []
violations_per_group = get_violations_per_group(guideline_violations, guidelines)
assert 3 == len(violations_per_group)
assert 0 == violations_per_group["Functions"]
assert 0 == violations_per_group["Parameters"]
assert 0 == violations_per_group["Layout"]
def test_no_guidelines_empty_violations_per_rule():
guidelines = {}
guideline_violations = [
(MisraGuideline(("1.1", "rule", "Mandatory", "Functions", "Functions rule 1")), "test.c", "12", "34", "var"),
(MisraGuideline(("2.1", "rule", "Mandatory", "Layout", "Functions rule 1")), "test.c", "12", "34", "var"),
(MisraGuideline(("2.1", "rule", "Mandatory", "Layout", "Functions rule 1")), "test.c", "12", "34", "var"),
(MisraGuideline(("1.1", "rule", "Mandatory", "Functions", "Functions rule 1")), "test.c", "12", "34", "var"),
(MisraGuideline(("1.7", "rule", "Mandatory", "Functions", "Functions rule 1")), "test.c", "12", "34", "var"),
(MisraGuideline(("1.1", "rule", "Mandatory", "Functions", "Functions rule 1")), "test.c", "12", "34", "var"),
(MisraGuideline(("1.5", "rule", "Mandatory", "Parameters", "Functions rule 1")), "test.c", "12", "34", "var"),
]
violations_per_group = get_violations_per_group(guideline_violations, guidelines)
assert 0 == len(violations_per_group) | none | 1 | 1.929762 | 2 | |
experiments/whoosh-stemming.py | antipatico/pytoyir | 0 | 6621243 | #!/usr/bin/env python3
import whoosh.lang.porter as porter
import whoosh.lang.porter2 as porter2
import whoosh.lang.paicehusk as paicehusk
if __name__ == "__main__":
print(porter.stem("dancer"))
print(porter2.stem("dancer"))
print(paicehusk.stem("dancer"))
| #!/usr/bin/env python3
import whoosh.lang.porter as porter
import whoosh.lang.porter2 as porter2
import whoosh.lang.paicehusk as paicehusk
if __name__ == "__main__":
print(porter.stem("dancer"))
print(porter2.stem("dancer"))
print(paicehusk.stem("dancer"))
| fr | 0.221828 | #!/usr/bin/env python3 | 1.849309 | 2 |
Algorithms/inversions/inversions.py | BackEndTea/Learning | 1 | 6621244 |
def main():
print(sort_and_count_inversions(readfile())[0])
def readfile():
with open('./integerarray.txt') as f:
content = f.readlines()
return map(lambda x : int(x.strip()),content)
def sort_and_count_inversions(input):
n = len(input)
if n == 1:
return (0, input)
x = sort_and_count_inversions(input[:n//2])
y = sort_and_count_inversions(input[n//2:])
z = merge_and_sort_and_count_split_inversions(x[1], y[1])
return (x[0] + y[0] + z[0], z[1])
def merge_and_sort_and_count_split_inversions(input, second = []):
inversions = 0
i = 0
j = 0
n = len(input) + len(second)
out = []
for _ in range(0, n):
try:
inp = input[i]
except IndexError:
out.append(second[j])
j += 1
continue
try:
sec = second[j]
except IndexError:
out.append(input[i])
i += 1
continue
if inp < sec:
out.append(input[i])
i += 1
continue
out.append(second[j])
j += 1
inversions += len(input) - i
return (inversions, out)
if __name__ == "__main__":
main()
|
def main():
print(sort_and_count_inversions(readfile())[0])
def readfile():
with open('./integerarray.txt') as f:
content = f.readlines()
return map(lambda x : int(x.strip()),content)
def sort_and_count_inversions(input):
n = len(input)
if n == 1:
return (0, input)
x = sort_and_count_inversions(input[:n//2])
y = sort_and_count_inversions(input[n//2:])
z = merge_and_sort_and_count_split_inversions(x[1], y[1])
return (x[0] + y[0] + z[0], z[1])
def merge_and_sort_and_count_split_inversions(input, second = []):
inversions = 0
i = 0
j = 0
n = len(input) + len(second)
out = []
for _ in range(0, n):
try:
inp = input[i]
except IndexError:
out.append(second[j])
j += 1
continue
try:
sec = second[j]
except IndexError:
out.append(input[i])
i += 1
continue
if inp < sec:
out.append(input[i])
i += 1
continue
out.append(second[j])
j += 1
inversions += len(input) - i
return (inversions, out)
if __name__ == "__main__":
main()
| none | 1 | 3.40384 | 3 | |
myCompany/rules/urls.py | Rom4eg/myCompany | 0 | 6621245 | <filename>myCompany/rules/urls.py
import rules.views as views
from django.conf.urls import url, include
from rest_framework.routers import SimpleRouter
from rest_framework_nested import routers
router = SimpleRouter()
router.register(r'rules', views.RulesViewSet)
urlpatterns = [
url(r'^', include(router.urls)),
]
| <filename>myCompany/rules/urls.py
import rules.views as views
from django.conf.urls import url, include
from rest_framework.routers import SimpleRouter
from rest_framework_nested import routers
router = SimpleRouter()
router.register(r'rules', views.RulesViewSet)
urlpatterns = [
url(r'^', include(router.urls)),
]
| none | 1 | 1.861157 | 2 | |
ADS - Algoritimos e Estrutura de Dados/exercicio_arr1.py | mlobf/python_advanced | 0 | 6621246 | <gh_stars>0
"""
Recebe um array nao vazio que possui inteiros distintos e um
inteiro respersentando uma soma alvo.
Caso dois numeros no array de entrada gerar a soma alvo,
ele devem retornar estes dois numeros no array de saida.
Se nao gera retorna vazio.
"""
"""
Parametro 1 => Array de inteiros
Parametro 2 => Valor alvo que é a soma de dois numeros.
"""
import re
myarray = [3, 5, -4, 8, 11, 1, -1, 6]
target = 10
def my_func(myarray, target):
soma = 0
v1 = 0
v2 = 0
p1 = 0
p2 = 1
resultado = {"p1": p1, "v1": v1, "p2": p2, "v2": v1, "soma": soma}
# Metodo Força Bruta
while p1 is not len(myarray) - 1:
while p2 is not len(myarray):
v1 = myarray[p1]
v2 = myarray[p2]
"""
print('------------')
print('P1', p1)
print('v1', v1)
print('')
print('P2', p2)
print('v2', v2)
print('------------')
print('SOMA =>', soma)
print('------------')
"""
soma = v1 + v2
p2 += 1
if soma == target:
resultado["p1"] = p1
resultado["v1"] = v1
resultado["p2"] = p2
resultado["v2"] = v2
resultado["soma"] = soma
break
p2 = 1
p1 += 1
return resultado
print(my_func(myarray, target))
| """
Recebe um array nao vazio que possui inteiros distintos e um
inteiro respersentando uma soma alvo.
Caso dois numeros no array de entrada gerar a soma alvo,
ele devem retornar estes dois numeros no array de saida.
Se nao gera retorna vazio.
"""
"""
Parametro 1 => Array de inteiros
Parametro 2 => Valor alvo que é a soma de dois numeros.
"""
import re
myarray = [3, 5, -4, 8, 11, 1, -1, 6]
target = 10
def my_func(myarray, target):
soma = 0
v1 = 0
v2 = 0
p1 = 0
p2 = 1
resultado = {"p1": p1, "v1": v1, "p2": p2, "v2": v1, "soma": soma}
# Metodo Força Bruta
while p1 is not len(myarray) - 1:
while p2 is not len(myarray):
v1 = myarray[p1]
v2 = myarray[p2]
"""
print('------------')
print('P1', p1)
print('v1', v1)
print('')
print('P2', p2)
print('v2', v2)
print('------------')
print('SOMA =>', soma)
print('------------')
"""
soma = v1 + v2
p2 += 1
if soma == target:
resultado["p1"] = p1
resultado["v1"] = v1
resultado["p2"] = p2
resultado["v2"] = v2
resultado["soma"] = soma
break
p2 = 1
p1 += 1
return resultado
print(my_func(myarray, target)) | pt | 0.761747 | Recebe um array nao vazio que possui inteiros distintos e um inteiro respersentando uma soma alvo. Caso dois numeros no array de entrada gerar a soma alvo, ele devem retornar estes dois numeros no array de saida. Se nao gera retorna vazio. Parametro 1 => Array de inteiros Parametro 2 => Valor alvo que é a soma de dois numeros. # Metodo Força Bruta print('------------') print('P1', p1) print('v1', v1) print('') print('P2', p2) print('v2', v2) print('------------') print('SOMA =>', soma) print('------------') | 4.059896 | 4 |
tutorials/01-basics/chainer_basics/main.py | nattochaduke/ChainerImitatingPyTorchTutorial | 0 | 6621247 | import chainer
from chainer.backends import cuda
from chainer import Function, gradient_check, report, training, utils, Variable
from chainer import datasets, iterators, optimizers, serializers
from chainer import Link, Chain, ChainList
from chainer.dataset import convert
import chainer.functions as F
import chainer.links as L
from chainer.training import extensions
import numpy as np
# ================================================================== #
# Table of Contents #
# ================================================================== #
# 1. Basic autograd example 1 (Line 31 to 45)
# 2. Basic autograd example 2 (Line 53 to 91)
# 3. Loading data from numpy (Line 97 to 106)
# 4. Input pipline (Line 112 to 141)
# 5. Input pipline for custom dataset (Line 148 to 169)
# 6. Pretrained model (Line 176 to 206)
# 7. Save and load model (Line 213 to 215)
# ================================================================== #
# 1. Basic autograd example 1 #
# ================================================================== #
# Create variables. In chainer, Variables wraps numpy or cupy arrays.
x = Variable(np.array([1], dtype=np.float32))
w = Variable(np.array([2], dtype=np.float32))
b = Variable(np.array([3], dtype=np.float32))
# Build a computational graph.
y = w * x + b # y = 2 * x + 3
# Compute gradients.
y.backward()
# Print out the gradients.
print(x.grad) # x.grad = 2
print(w.grad) # w.grad = 1
print(b.grad) # b.grad = 1
# ================================================================== #
# 2. Basic autograd example 2 #
# ================================================================== #
# Create tensors of shape (10, 3) and (10, 2).
x = Variable(np.random.randn(10, 3).astype('f'))
y = Variable(np.random.randn(10, 2).astype('f'))
# Build a fully connected layer.
linear = L.Linear(3, 2)
linear.cleargrads()
print ('w: ', linear.W)
print ('b: ', linear.b)
# Build loss function and optimizer.
criterion = F.mean_squared_error
optimizer = optimizers.SGD(lr=0.01)
optimizer.setup(linear)
# Forward pass.
pred = linear(x)
# Compute loss.
loss = criterion(pred, y)
print('loss: ', loss.data)
# Backward pass.
loss.backward()
# Print out the gradients.
print ('dL/dw: ', linear.W.grad)
print ('dL/db: ', linear.b.grad)
# 1-step gradient descent.
optimizer.update()
# You can also perform gradient descent at the low level.
# linear.weight.data.sub_(0.01 * linear.weight.grad.data)
# linear.bias.data.sub_(0.01 * linear.bias.grad.data)
# Print out the loss after 1-step gradient descent.
pred = linear(x)
loss = criterion(pred, y)
print('loss after 1 step optimization: ', loss.data)
# ================================================================== #
# 3. Loading data from numpy #
# ================================================================== #
# Chainer.Variable wraps numpy array or cupy array.
# Create a numpy array
x = np.array([[1, 2], [3, 4]])
# Convert the numpy array to a chainer.Variable
y = Variable(x)
# Convert the Variable to a numpy array.
z = y.data
# ================================================================== #
# 4. Input pipline #
# ================================================================== #
# Download and construct CIFAR-10 dataset.
train, test = chainer.datasets.get_cifar10()
# Fetch one data pair (read data from disk).
image, label = train[0]
print (image.shape)
# Variable.shape returns a tuple of dimensions of each axis of the variable.
# Variable.size returns a integer that is product of Variable.shape
print (label)
# Data iterator (this provides queues and threads in a very simple way).
# iterators have Serial/Multiprocess/mMultithread variants.
train_iter = iterators.SerialIterator(dataset=train, batch_size=64)
# When iteration starts, queue and thread start to load data from files.
data_iter = iter(train_iter)
# Mini-batch images and labels.
# data_iter.next() yields a list that has the shape
# [(image0, label0), (image1, label1), ..., (image63, label63)]
# convert.concat_examples transforms this list into
# (array([image0, image1, ..., image63]), array([label0, ..., label63]))
images, labels = convert.concat_examples(data_iter.next())
# Actual usage of the data loader is as below.
for batch in train_iter:
images, labels = convert.concat_examples(batch)
# Training code should be written here.
break
# ================================================================== #
# 5. Input pipline for custom dataset #
# ================================================================== #
# You should your build your custom dataset as below.
class CustomDataset(chainer.dataset.DatasetMixin):
def __init__(self):
# TODO
# 1. Initialize file paths or a list of file names.
pass
def get_example(self, index):
# DatasetMixin.get_example method is called by DatasetMixin.__getitem__
# TODO
# 1. Read one data from file (e.g. using numpy.fromfile, PIL.Image.open).
# 2. Preprocess the data (e.g. torchvision.Transform).
# 3. Return a data pair (e.g. image and label).
pass
def __len__(self):
# You should change 0 to the total size of your dataset.
return 0
# You can then use the prebuilt data loader.
custom_dataset = CustomDataset()
train_iter = iterators.SerialIterator(dataset=custom_dataset,
batch_size=64,
shuffle=True)
# ================================================================== #
# 6. Pretrained model #
# ================================================================== #
# Load the pretrained ResNet-18.
# Beforehand, have to download pretrained model manually.
resnet = chainer.links.ResNet50Layers(pretrained_model='auto')
# I have no idea how to concisely write fine-tuning model.
# In spite the verbosity, I would define a network class that contains
# freezed pretrained model and learnable fully-connected layer.
class ResNetFineTune(chainer.Chain):
def __init__(self, out_size):
super(ResNetFineTune, self).__init__()
self.base = chainer.links.ResNet50Layers(pretrained_model='auto')
# base will not be updated because it is defined out of init_scope context.
with self.init_scope():
# Layers defined in init_scope context are updated.
self.fc = L.Linear(None, out_size)
# By setting the dimension of input None, the number is determined
# when the instance of this class has first input.
def __call__(self, x):
h = self.base.extract(x)["pool5"]
# PretrainedModel.extract method gives the output of each layer in the pretrained model
y = self.fc(h)
return y
model = ResNetFineTune(100)
# Forward pass.
images = np.random.randn(64, 3, 224, 224)
outputs = model(images) # Inputting numpy array, chainer automatically
# wraps it in Variable.
print(outputs.shape) # (64, 100)
# ================================================================== #
# 7. Save and load the model #
# ================================================================== #
# Save and load only the model parameters.
serializers.save_npz("mymodel.npz", model)
serializers.load_npz('mymodel.npz', model)
| import chainer
from chainer.backends import cuda
from chainer import Function, gradient_check, report, training, utils, Variable
from chainer import datasets, iterators, optimizers, serializers
from chainer import Link, Chain, ChainList
from chainer.dataset import convert
import chainer.functions as F
import chainer.links as L
from chainer.training import extensions
import numpy as np
# ================================================================== #
# Table of Contents #
# ================================================================== #
# 1. Basic autograd example 1 (Line 31 to 45)
# 2. Basic autograd example 2 (Line 53 to 91)
# 3. Loading data from numpy (Line 97 to 106)
# 4. Input pipline (Line 112 to 141)
# 5. Input pipline for custom dataset (Line 148 to 169)
# 6. Pretrained model (Line 176 to 206)
# 7. Save and load model (Line 213 to 215)
# ================================================================== #
# 1. Basic autograd example 1 #
# ================================================================== #
# Create variables. In chainer, Variables wraps numpy or cupy arrays.
x = Variable(np.array([1], dtype=np.float32))
w = Variable(np.array([2], dtype=np.float32))
b = Variable(np.array([3], dtype=np.float32))
# Build a computational graph.
y = w * x + b # y = 2 * x + 3
# Compute gradients.
y.backward()
# Print out the gradients.
print(x.grad) # x.grad = 2
print(w.grad) # w.grad = 1
print(b.grad) # b.grad = 1
# ================================================================== #
# 2. Basic autograd example 2 #
# ================================================================== #
# Create tensors of shape (10, 3) and (10, 2).
x = Variable(np.random.randn(10, 3).astype('f'))
y = Variable(np.random.randn(10, 2).astype('f'))
# Build a fully connected layer.
linear = L.Linear(3, 2)
linear.cleargrads()
print ('w: ', linear.W)
print ('b: ', linear.b)
# Build loss function and optimizer.
criterion = F.mean_squared_error
optimizer = optimizers.SGD(lr=0.01)
optimizer.setup(linear)
# Forward pass.
pred = linear(x)
# Compute loss.
loss = criterion(pred, y)
print('loss: ', loss.data)
# Backward pass.
loss.backward()
# Print out the gradients.
print ('dL/dw: ', linear.W.grad)
print ('dL/db: ', linear.b.grad)
# 1-step gradient descent.
optimizer.update()
# You can also perform gradient descent at the low level.
# linear.weight.data.sub_(0.01 * linear.weight.grad.data)
# linear.bias.data.sub_(0.01 * linear.bias.grad.data)
# Print out the loss after 1-step gradient descent.
pred = linear(x)
loss = criterion(pred, y)
print('loss after 1 step optimization: ', loss.data)
# ================================================================== #
# 3. Loading data from numpy #
# ================================================================== #
# Chainer.Variable wraps numpy array or cupy array.
# Create a numpy array
x = np.array([[1, 2], [3, 4]])
# Convert the numpy array to a chainer.Variable
y = Variable(x)
# Convert the Variable to a numpy array.
z = y.data
# ================================================================== #
# 4. Input pipline #
# ================================================================== #
# Download and construct CIFAR-10 dataset.
train, test = chainer.datasets.get_cifar10()
# Fetch one data pair (read data from disk).
image, label = train[0]
print (image.shape)
# Variable.shape returns a tuple of dimensions of each axis of the variable.
# Variable.size returns a integer that is product of Variable.shape
print (label)
# Data iterator (this provides queues and threads in a very simple way).
# iterators have Serial/Multiprocess/mMultithread variants.
train_iter = iterators.SerialIterator(dataset=train, batch_size=64)
# When iteration starts, queue and thread start to load data from files.
data_iter = iter(train_iter)
# Mini-batch images and labels.
# data_iter.next() yields a list that has the shape
# [(image0, label0), (image1, label1), ..., (image63, label63)]
# convert.concat_examples transforms this list into
# (array([image0, image1, ..., image63]), array([label0, ..., label63]))
images, labels = convert.concat_examples(data_iter.next())
# Actual usage of the data loader is as below.
for batch in train_iter:
images, labels = convert.concat_examples(batch)
# Training code should be written here.
break
# ================================================================== #
# 5. Input pipline for custom dataset #
# ================================================================== #
# You should your build your custom dataset as below.
class CustomDataset(chainer.dataset.DatasetMixin):
def __init__(self):
# TODO
# 1. Initialize file paths or a list of file names.
pass
def get_example(self, index):
# DatasetMixin.get_example method is called by DatasetMixin.__getitem__
# TODO
# 1. Read one data from file (e.g. using numpy.fromfile, PIL.Image.open).
# 2. Preprocess the data (e.g. torchvision.Transform).
# 3. Return a data pair (e.g. image and label).
pass
def __len__(self):
# You should change 0 to the total size of your dataset.
return 0
# You can then use the prebuilt data loader.
custom_dataset = CustomDataset()
train_iter = iterators.SerialIterator(dataset=custom_dataset,
batch_size=64,
shuffle=True)
# ================================================================== #
# 6. Pretrained model #
# ================================================================== #
# Load the pretrained ResNet-18.
# Beforehand, have to download pretrained model manually.
resnet = chainer.links.ResNet50Layers(pretrained_model='auto')
# I have no idea how to concisely write fine-tuning model.
# In spite the verbosity, I would define a network class that contains
# freezed pretrained model and learnable fully-connected layer.
class ResNetFineTune(chainer.Chain):
def __init__(self, out_size):
super(ResNetFineTune, self).__init__()
self.base = chainer.links.ResNet50Layers(pretrained_model='auto')
# base will not be updated because it is defined out of init_scope context.
with self.init_scope():
# Layers defined in init_scope context are updated.
self.fc = L.Linear(None, out_size)
# By setting the dimension of input None, the number is determined
# when the instance of this class has first input.
def __call__(self, x):
h = self.base.extract(x)["pool5"]
# PretrainedModel.extract method gives the output of each layer in the pretrained model
y = self.fc(h)
return y
model = ResNetFineTune(100)
# Forward pass.
images = np.random.randn(64, 3, 224, 224)
outputs = model(images) # Inputting numpy array, chainer automatically
# wraps it in Variable.
print(outputs.shape) # (64, 100)
# ================================================================== #
# 7. Save and load the model #
# ================================================================== #
# Save and load only the model parameters.
serializers.save_npz("mymodel.npz", model)
serializers.load_npz('mymodel.npz', model)
| en | 0.630366 | # ================================================================== # # Table of Contents # # ================================================================== # # 1. Basic autograd example 1 (Line 31 to 45) # 2. Basic autograd example 2 (Line 53 to 91) # 3. Loading data from numpy (Line 97 to 106) # 4. Input pipline (Line 112 to 141) # 5. Input pipline for custom dataset (Line 148 to 169) # 6. Pretrained model (Line 176 to 206) # 7. Save and load model (Line 213 to 215) # ================================================================== # # 1. Basic autograd example 1 # # ================================================================== # # Create variables. In chainer, Variables wraps numpy or cupy arrays. # Build a computational graph. # y = 2 * x + 3 # Compute gradients. # Print out the gradients. # x.grad = 2 # w.grad = 1 # b.grad = 1 # ================================================================== # # 2. Basic autograd example 2 # # ================================================================== # # Create tensors of shape (10, 3) and (10, 2). # Build a fully connected layer. # Build loss function and optimizer. # Forward pass. # Compute loss. # Backward pass. # Print out the gradients. # 1-step gradient descent. # You can also perform gradient descent at the low level. # linear.weight.data.sub_(0.01 * linear.weight.grad.data) # linear.bias.data.sub_(0.01 * linear.bias.grad.data) # Print out the loss after 1-step gradient descent. # ================================================================== # # 3. Loading data from numpy # # ================================================================== # # Chainer.Variable wraps numpy array or cupy array. # Create a numpy array # Convert the numpy array to a chainer.Variable # Convert the Variable to a numpy array. # ================================================================== # # 4. Input pipline # # ================================================================== # # Download and construct CIFAR-10 dataset. # Fetch one data pair (read data from disk). # Variable.shape returns a tuple of dimensions of each axis of the variable. # Variable.size returns a integer that is product of Variable.shape # Data iterator (this provides queues and threads in a very simple way). # iterators have Serial/Multiprocess/mMultithread variants. # When iteration starts, queue and thread start to load data from files. # Mini-batch images and labels. # data_iter.next() yields a list that has the shape # [(image0, label0), (image1, label1), ..., (image63, label63)] # convert.concat_examples transforms this list into # (array([image0, image1, ..., image63]), array([label0, ..., label63])) # Actual usage of the data loader is as below. # Training code should be written here. # ================================================================== # # 5. Input pipline for custom dataset # # ================================================================== # # You should your build your custom dataset as below. # TODO # 1. Initialize file paths or a list of file names. # DatasetMixin.get_example method is called by DatasetMixin.__getitem__ # TODO # 1. Read one data from file (e.g. using numpy.fromfile, PIL.Image.open). # 2. Preprocess the data (e.g. torchvision.Transform). # 3. Return a data pair (e.g. image and label). # You should change 0 to the total size of your dataset. # You can then use the prebuilt data loader. # ================================================================== # # 6. Pretrained model # # ================================================================== # # Load the pretrained ResNet-18. # Beforehand, have to download pretrained model manually. # I have no idea how to concisely write fine-tuning model. # In spite the verbosity, I would define a network class that contains # freezed pretrained model and learnable fully-connected layer. # base will not be updated because it is defined out of init_scope context. # Layers defined in init_scope context are updated. # By setting the dimension of input None, the number is determined # when the instance of this class has first input. # PretrainedModel.extract method gives the output of each layer in the pretrained model # Forward pass. # Inputting numpy array, chainer automatically # wraps it in Variable. # (64, 100) # ================================================================== # # 7. Save and load the model # # ================================================================== # # Save and load only the model parameters. | 2.498675 | 2 |
ocr_calculator.py | rudolfce/ocr_calculator | 2 | 6621248 | # OCR Calculator
# Copyright (C) 2019 <NAME>
#
# This module is part of OCR Calculator and is under the MIT License:
# http://www.opensource.org/licenses/mit-license.php
'''Script that summarizes the ocr calculator. Requires a propperly configured
settings.py file. See settings_example.py for more information'''
import sys
from datetime import datetime
import logging
import logging.handlers as handlers
from calculator import Calculator
import settings
# Creating logger
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(levelname)s: %(message)s')
log_handler = logging.FileHandler('ocr_calculator.log')
log_handler.setFormatter(formatter)
logger.addHandler(log_handler)
# Reading settings
input_regex = settings.INPUT_REGEX
logger.debug('Using regex "{}"'.format(input_regex))
empty_message = settings.EMPTY_MESSAGE
logger.debug('Setting default empty message to "{}"'.format(empty_message))
error_message = settings.ERROR_MESSAGE
logger.debug('Creating instance of calculator')
calculator = Calculator(input_regex)
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Correct usage:\npython {} <input_folder> <output_folder>"
.format(sys.argv[0]))
sys.exit(1)
input_folder = sys.argv[1]
output_folder = sys.argv[2]
logger.debug('Running for input folder {} and output folder {}'
.format(input_folder, output_folder))
logger.info('Starting calculator')
start_time = datetime.now()
calculator.parse_folder(input_folder, output_folder, empty_message, error_message)
end_time = datetime.now()
delta = end_time - start_time
logger.info('Execution finished in {} seconds'.format(delta))
| # OCR Calculator
# Copyright (C) 2019 <NAME>
#
# This module is part of OCR Calculator and is under the MIT License:
# http://www.opensource.org/licenses/mit-license.php
'''Script that summarizes the ocr calculator. Requires a propperly configured
settings.py file. See settings_example.py for more information'''
import sys
from datetime import datetime
import logging
import logging.handlers as handlers
from calculator import Calculator
import settings
# Creating logger
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(levelname)s: %(message)s')
log_handler = logging.FileHandler('ocr_calculator.log')
log_handler.setFormatter(formatter)
logger.addHandler(log_handler)
# Reading settings
input_regex = settings.INPUT_REGEX
logger.debug('Using regex "{}"'.format(input_regex))
empty_message = settings.EMPTY_MESSAGE
logger.debug('Setting default empty message to "{}"'.format(empty_message))
error_message = settings.ERROR_MESSAGE
logger.debug('Creating instance of calculator')
calculator = Calculator(input_regex)
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Correct usage:\npython {} <input_folder> <output_folder>"
.format(sys.argv[0]))
sys.exit(1)
input_folder = sys.argv[1]
output_folder = sys.argv[2]
logger.debug('Running for input folder {} and output folder {}'
.format(input_folder, output_folder))
logger.info('Starting calculator')
start_time = datetime.now()
calculator.parse_folder(input_folder, output_folder, empty_message, error_message)
end_time = datetime.now()
delta = end_time - start_time
logger.info('Execution finished in {} seconds'.format(delta))
| en | 0.686918 | # OCR Calculator # Copyright (C) 2019 <NAME> # # This module is part of OCR Calculator and is under the MIT License: # http://www.opensource.org/licenses/mit-license.php Script that summarizes the ocr calculator. Requires a propperly configured settings.py file. See settings_example.py for more information # Creating logger # Reading settings | 2.709737 | 3 |
main.py | ovvladimir/Training_apparatus | 0 | 6621249 | import pygame
from settings import *
from Back_Ground import *
from Bot_cl import group, bot
bot_pos = Kletky_rect.topleft
font = pygame.font.Font(None, 30)
card = pygame.Surface((200, 50))
card.fill(grey)
looseCard = []
LooseCardPos = []
card_rect = card.get_rect(center=(part1.get_width() // 2, 50))
text = font.render("Forward", 1, black, None)
text_pos = text.get_rect(center=(card.get_width() // 2, card.get_height() // 2))
card.blit(text, text_pos)
cardR = pygame.Surface((200, 50))
cardR.fill(greyR)
cardR_rect = cardR.get_rect(center=(part1.get_width() // 2, 100))
textR = font.render("Right", 1, black, None)
textR_pos = textR.get_rect(center=(cardR.get_width() // 2, cardR.get_height() // 2))
cardR.blit(textR, textR_pos)
cardL = pygame.Surface((200, 50))
cardL.fill(greyL)
cardL_rect = cardL.get_rect(center=(part1.get_width() // 2, 150))
textL = font.render("Left", 1, black, None)
textL_pos = textR.get_rect(center=(cardL.get_width() // 2, cardL.get_height() // 2))
cardL.blit(textL, textL_pos)
cardB = pygame.Surface((200, 50))
cardB.fill(greyU)
cardB_rect = cardB.get_rect(center=(part1.get_width() // 2, 200))
textB = font.render("Back", 1, black, None)
textB_pos = textB.get_rect(center=(cardB.get_width() // 2, cardB.get_height() // 2))
cardB.blit(textB, textB_pos)
run = True
while run:
for e in pygame.event.get():
if e.type == pygame.QUIT or \
e.type == pygame.KEYDOWN and e.key == pygame.K_ESCAPE:
run = False
elif e.type == pygame.MOUSEBUTTONDOWN:
if e.button == 1:
if btn1.collidepoint(e.pos) and len(card_list) != 0 and not card_move:
i.reverse()
card_move = True
block = True
if btn2.collidepoint(e.pos):
if not card_move and not block:
w.reverse()
if w[0]:
de = 150
else:
de = 650
if btn3.collidepoint(e.pos):
if not card_move and not block:
q.reverse()
bot.rect.topleft = bot_pos
card_list.clear()
move_list.clear()
card_pos_list.clear()
if btn4.collidepoint(e.pos):
if not card_move and not block:
r.reverse()
dimming_screen.reverse()
mode.reverse()
if N[0]:
N.reverse()
if btn5.collidepoint(e.pos):
if not card_move and not block:
t.reverse()
##
if crd1.collidepoint(e.pos):
card_list.append(card)
card_open = True
elif crd2.collidepoint(e.pos):
card_list.append(cardR)
card_open = True
elif crd3.collidepoint(e.pos):
card_list.append(cardL)
card_open = True
elif crd4.collidepoint(e.pos):
card_list.append(cardB)
card_open = True
else:
card_open = False
if card_open:
move_list.append(False)
card_pos_list.append(
(card_pos[0] + 50, card_pos[1] + card.get_height() * (
len(card_list) - 1)))
for k, crd in enumerate(card_list):
if (screen.blit(crd, card_pos_list[k])).collidepoint(e.pos):
move_list[k] = True
for j, cpl in enumerate(card_pos_list):
if e.type == pygame.MOUSEBUTTONUP:
move_list[j] = False
elif e.type == pygame.MOUSEMOTION and move_list[j]:
card_pos_list[j] = (
e.pos[0] - card.get_width() // 2,
e.pos[1] - card.get_height() // 2)
if cpl[0] < part1.get_width() - card.get_width() // 2:
card_list.pop(j)
move_list.pop(j)
card_pos_list.pop(j)
for index, obj_1 in enumerate(card_pos_list):
for obj_2 in card_pos_list[index + 1:]:
if abs(obj_1[1] - obj_2[1]) < 70 and abs(obj_1[0] - obj_2[0]) < 70:
if obj_1[1] < obj_2[1]:
card_pos_list[card_pos_list.index(obj_2)] = obj_1[0], obj_1[1] + 50
else:
if e.type == pygame.MOUSEBUTTONUP:
LooseCard = card_list[card_pos_list.index(obj_2)]
LooseCardPos = obj_2
LooseCardPos = obj_1[0], obj_1[1] - 50
card_list.remove(LooseCard)
card_pos_list.remove(obj_2)
card_pos_list.insert(0, LooseCardPos)
card_list.insert(0, LooseCard)
screen.fill(BG_COLOR)
for k, v in dict_draw.items():
screen.blit(k, v) if k != BG_map else part2.blit(k, v)
draw()
btn1 = screen.blit(st_btn if i[0] else image_btn, image_btn_rect)
btn3 = screen.blit(res1 if q[0] else res, res_rect)
btn2 = screen.blit(speed1 if w[0] else speed, speed_rect)
btn4 = screen.blit(inv_lamp if r[0] else lamp, lamp_rect)
btn5 = screen.blit(inv_task if t[0] else task, task_rect)
if mode[0]:
screen.blit(podskazki[0] if N[0] else podskazki[1], pods1_1_rect)
screen.blit(Kletky, Kletky_rect)
'---------------------------------------------------------'
if card_move and len(card_list) != 0:
if bot.rect.centery < Kletky_rect.top or bot.rect.centery > Kletky_rect.bottom:
bot.rect.right = Kletky_rect.left
if bot.rect.right < Kletky_rect.left + 55 or bot.rect.left > Kletky_rect.right - 55:
block2 = True
bot.rect.y += 15
if bot.rect.top > HEIGHT_WIN:
block2 = False
card_move = False
card_score = 0
if i[0]:
i.reverse()
bot.rect.topleft = bot_pos
bot.index = 0
card_score -= 1
block = False
elif block:
bot.rect.topleft = bot_pos
bot.index = 0
card_score -= 1
block = False
elif card_list[card_score] == card:
bot.rect.y += 150
bot.index += 1
elif card_list[card_score] == cardR:
bot.rect.x += 150
bot.index += 1
elif card_list[card_score] == cardL:
bot.rect.x -= 150
bot.index += 1
elif card_list[card_score] == cardB:
size_y -= 25
size_x -= 25
bot.rect.y -= 150
bot.index += 1
if not block2:
bot.image = bot.images[int(bot.index % bot.range)]
card_score += 1
pygame.time.wait(de)
if card_score >= len(card_list):
card_move = False
card_score = 0
i.reverse()
if bot.rect.right < Kletky_rect.left + 55 or bot.rect.left > Kletky_rect.right - 55 \
or bot.rect.centery < Kletky_rect.top or bot.rect.centery > Kletky_rect.bottom:
card_move = True
# group.update()
group.draw(screen)
'---------------------------------------------------------'
crd1 = part1.blit(card, card_rect)
crd2 = part1.blit(cardR, cardR_rect)
crd3 = part1.blit(cardL, cardL_rect)
crd4 = part1.blit(cardB, cardB_rect)
if len(card_list) > 0:
for z, c in enumerate(card_list):
screen.blit(c, card_pos_list[z])
if dimming_screen[0] is True:
screen.blit(part4, part4_rect)
pygame.display.update()
clock.tick(FPS)
if q[0]:
pygame.time.wait(250)
q.reverse()
if r[0]:
pygame.time.wait(250)
r.reverse()
if t[0]:
pygame.time.wait(250)
t.reverse()
| import pygame
from settings import *
from Back_Ground import *
from Bot_cl import group, bot
bot_pos = Kletky_rect.topleft
font = pygame.font.Font(None, 30)
card = pygame.Surface((200, 50))
card.fill(grey)
looseCard = []
LooseCardPos = []
card_rect = card.get_rect(center=(part1.get_width() // 2, 50))
text = font.render("Forward", 1, black, None)
text_pos = text.get_rect(center=(card.get_width() // 2, card.get_height() // 2))
card.blit(text, text_pos)
cardR = pygame.Surface((200, 50))
cardR.fill(greyR)
cardR_rect = cardR.get_rect(center=(part1.get_width() // 2, 100))
textR = font.render("Right", 1, black, None)
textR_pos = textR.get_rect(center=(cardR.get_width() // 2, cardR.get_height() // 2))
cardR.blit(textR, textR_pos)
cardL = pygame.Surface((200, 50))
cardL.fill(greyL)
cardL_rect = cardL.get_rect(center=(part1.get_width() // 2, 150))
textL = font.render("Left", 1, black, None)
textL_pos = textR.get_rect(center=(cardL.get_width() // 2, cardL.get_height() // 2))
cardL.blit(textL, textL_pos)
cardB = pygame.Surface((200, 50))
cardB.fill(greyU)
cardB_rect = cardB.get_rect(center=(part1.get_width() // 2, 200))
textB = font.render("Back", 1, black, None)
textB_pos = textB.get_rect(center=(cardB.get_width() // 2, cardB.get_height() // 2))
cardB.blit(textB, textB_pos)
run = True
while run:
for e in pygame.event.get():
if e.type == pygame.QUIT or \
e.type == pygame.KEYDOWN and e.key == pygame.K_ESCAPE:
run = False
elif e.type == pygame.MOUSEBUTTONDOWN:
if e.button == 1:
if btn1.collidepoint(e.pos) and len(card_list) != 0 and not card_move:
i.reverse()
card_move = True
block = True
if btn2.collidepoint(e.pos):
if not card_move and not block:
w.reverse()
if w[0]:
de = 150
else:
de = 650
if btn3.collidepoint(e.pos):
if not card_move and not block:
q.reverse()
bot.rect.topleft = bot_pos
card_list.clear()
move_list.clear()
card_pos_list.clear()
if btn4.collidepoint(e.pos):
if not card_move and not block:
r.reverse()
dimming_screen.reverse()
mode.reverse()
if N[0]:
N.reverse()
if btn5.collidepoint(e.pos):
if not card_move and not block:
t.reverse()
##
if crd1.collidepoint(e.pos):
card_list.append(card)
card_open = True
elif crd2.collidepoint(e.pos):
card_list.append(cardR)
card_open = True
elif crd3.collidepoint(e.pos):
card_list.append(cardL)
card_open = True
elif crd4.collidepoint(e.pos):
card_list.append(cardB)
card_open = True
else:
card_open = False
if card_open:
move_list.append(False)
card_pos_list.append(
(card_pos[0] + 50, card_pos[1] + card.get_height() * (
len(card_list) - 1)))
for k, crd in enumerate(card_list):
if (screen.blit(crd, card_pos_list[k])).collidepoint(e.pos):
move_list[k] = True
for j, cpl in enumerate(card_pos_list):
if e.type == pygame.MOUSEBUTTONUP:
move_list[j] = False
elif e.type == pygame.MOUSEMOTION and move_list[j]:
card_pos_list[j] = (
e.pos[0] - card.get_width() // 2,
e.pos[1] - card.get_height() // 2)
if cpl[0] < part1.get_width() - card.get_width() // 2:
card_list.pop(j)
move_list.pop(j)
card_pos_list.pop(j)
for index, obj_1 in enumerate(card_pos_list):
for obj_2 in card_pos_list[index + 1:]:
if abs(obj_1[1] - obj_2[1]) < 70 and abs(obj_1[0] - obj_2[0]) < 70:
if obj_1[1] < obj_2[1]:
card_pos_list[card_pos_list.index(obj_2)] = obj_1[0], obj_1[1] + 50
else:
if e.type == pygame.MOUSEBUTTONUP:
LooseCard = card_list[card_pos_list.index(obj_2)]
LooseCardPos = obj_2
LooseCardPos = obj_1[0], obj_1[1] - 50
card_list.remove(LooseCard)
card_pos_list.remove(obj_2)
card_pos_list.insert(0, LooseCardPos)
card_list.insert(0, LooseCard)
screen.fill(BG_COLOR)
for k, v in dict_draw.items():
screen.blit(k, v) if k != BG_map else part2.blit(k, v)
draw()
btn1 = screen.blit(st_btn if i[0] else image_btn, image_btn_rect)
btn3 = screen.blit(res1 if q[0] else res, res_rect)
btn2 = screen.blit(speed1 if w[0] else speed, speed_rect)
btn4 = screen.blit(inv_lamp if r[0] else lamp, lamp_rect)
btn5 = screen.blit(inv_task if t[0] else task, task_rect)
if mode[0]:
screen.blit(podskazki[0] if N[0] else podskazki[1], pods1_1_rect)
screen.blit(Kletky, Kletky_rect)
'---------------------------------------------------------'
if card_move and len(card_list) != 0:
if bot.rect.centery < Kletky_rect.top or bot.rect.centery > Kletky_rect.bottom:
bot.rect.right = Kletky_rect.left
if bot.rect.right < Kletky_rect.left + 55 or bot.rect.left > Kletky_rect.right - 55:
block2 = True
bot.rect.y += 15
if bot.rect.top > HEIGHT_WIN:
block2 = False
card_move = False
card_score = 0
if i[0]:
i.reverse()
bot.rect.topleft = bot_pos
bot.index = 0
card_score -= 1
block = False
elif block:
bot.rect.topleft = bot_pos
bot.index = 0
card_score -= 1
block = False
elif card_list[card_score] == card:
bot.rect.y += 150
bot.index += 1
elif card_list[card_score] == cardR:
bot.rect.x += 150
bot.index += 1
elif card_list[card_score] == cardL:
bot.rect.x -= 150
bot.index += 1
elif card_list[card_score] == cardB:
size_y -= 25
size_x -= 25
bot.rect.y -= 150
bot.index += 1
if not block2:
bot.image = bot.images[int(bot.index % bot.range)]
card_score += 1
pygame.time.wait(de)
if card_score >= len(card_list):
card_move = False
card_score = 0
i.reverse()
if bot.rect.right < Kletky_rect.left + 55 or bot.rect.left > Kletky_rect.right - 55 \
or bot.rect.centery < Kletky_rect.top or bot.rect.centery > Kletky_rect.bottom:
card_move = True
# group.update()
group.draw(screen)
'---------------------------------------------------------'
crd1 = part1.blit(card, card_rect)
crd2 = part1.blit(cardR, cardR_rect)
crd3 = part1.blit(cardL, cardL_rect)
crd4 = part1.blit(cardB, cardB_rect)
if len(card_list) > 0:
for z, c in enumerate(card_list):
screen.blit(c, card_pos_list[z])
if dimming_screen[0] is True:
screen.blit(part4, part4_rect)
pygame.display.update()
clock.tick(FPS)
if q[0]:
pygame.time.wait(250)
q.reverse()
if r[0]:
pygame.time.wait(250)
r.reverse()
if t[0]:
pygame.time.wait(250)
t.reverse()
| th | 0.062603 | ## # group.update() | 2.402542 | 2 |
placidity/core/cd.py | bebraw/Placidity | 2 | 6621250 | from __future__ import absolute_import
import os
class Cd:
aliases = 'cd'
description = 'Change working directory'
prev = os.getcwd()
def matches(self, expression):
parts = expression.split()
return parts[0] == 'cd' and len(parts) == 2
def execute(self, expression, variables):
target = expression.split()[1]
target = variables.get(target, target)
if target == "~":
target = os.path.expanduser('~')
if target == "-":
target = self.prev
try:
cwd = os.getcwd()
target_dir = os.path.join(cwd, target)
os.chdir(target_dir)
self.prev = cwd
except IOError:
return "Directory not found!"
| from __future__ import absolute_import
import os
class Cd:
aliases = 'cd'
description = 'Change working directory'
prev = os.getcwd()
def matches(self, expression):
parts = expression.split()
return parts[0] == 'cd' and len(parts) == 2
def execute(self, expression, variables):
target = expression.split()[1]
target = variables.get(target, target)
if target == "~":
target = os.path.expanduser('~')
if target == "-":
target = self.prev
try:
cwd = os.getcwd()
target_dir = os.path.join(cwd, target)
os.chdir(target_dir)
self.prev = cwd
except IOError:
return "Directory not found!"
| none | 1 | 2.617234 | 3 |