id stringlengths 1 8 | text stringlengths 6 1.05M | dataset_id stringclasses 1
value |
|---|---|---|
1620229 | <reponame>fwilhe2/system-automation
import shutil
import subprocess
import sys
# Convenience script for running playbooks in interactive containers for debugging purposes
def ansible_playbook_executable():
in_path = shutil.which("ansible-playbook")
if in_path == None:
return "/home/user/.local/bin/ansible-playbook"
return in_path
def playbook_path():
if len(sys.argv) > 1:
return f"/home/user/{sys.argv[1]}.yml"
return "/home/user/common.yml"
subprocess.run([
ansible_playbook_executable(),
"--become-method=su",
# "--skip-tags",
# "notest",
"-vv",
playbook_path(),
])
| StarcoderdataPython |
11314156 | """
This module provides `Sprites` to create animation effects with Paths. For more details see
http://asciimatics.readthedocs.io/en/latest/animation.html
"""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
from asciimatics.effects import Sprite
from asciimatics.renderers import StaticRenderer
import random
# Images for Sam-ple sprite.
from asciimatics.screen import Screen
sam_default = [
"""
______
.` `.
/ - - \\
| __ |
| |
\\ /
'.______.'
""",
"""
______
.` `.
/ o o \\
| __ |
| |
\\ /
'.______.'
"""
]
sam_left = """
______
.` `.
/ o \\
| |
|-- |
\\ /
'.______.'
"""
sam_right = """
______
.` `.
/ o \\
| |
| --|
\\ /
'.______.'
"""
sam_down = """
______
.` `.
/ \\
| |
| ^ ^ |
\\ __ /
'.______.'
"""
sam_up = """
______
.` __ `.
/ v v \\
| |
| |
\\ /
'.______.'
"""
# Images for an arrow Sprite.
left_arrow = """
/____
/
\\ ____
\\
"""
up_arrow = """
/\\
/ \\
/| |\\
| |
"""
right_arrow = """
____\\
\\
____ /
/
"""
down_arrow = """
| |
\\| |/
\\ /
\\/
"""
default_arrow = [
"""
/\\
/ \\
/|><|\\
| |
""",
"""
/\\
/ \\
/|oo|\\
| |
""",
]
# Simple static function to swap between 2 images to make a sprite blink.
def _blink():
if random.random() > 0.9:
return 0
else:
return 1
class Sam(Sprite):
"""
Sam Paul sprite - an simple sample animated character.
"""
def __init__(self, screen, path, start_frame=0, stop_frame=0):
"""
See :py:obj:`.Sprite` for details.
"""
super(Sam, self).__init__(
screen,
renderer_dict={
"default": StaticRenderer(images=sam_default, animation=_blink),
"left": StaticRenderer(images=[sam_left]),
"right": StaticRenderer(images=[sam_right]),
"down": StaticRenderer(images=[sam_down]),
"up": StaticRenderer(images=[sam_up]),
},
path=path,
start_frame=start_frame,
stop_frame=stop_frame)
class Arrow(Sprite):
"""
Sample arrow sprite - points where it is going.
"""
def __init__(self, screen, path, colour=Screen.COLOUR_WHITE, start_frame=0,
stop_frame=0):
"""
See :py:obj:`.Sprite` for details.
"""
super(Arrow, self).__init__(
screen,
renderer_dict={
"default": StaticRenderer(images=default_arrow,
animation=_blink),
"left": StaticRenderer(images=[left_arrow]),
"right": StaticRenderer(images=[right_arrow]),
"down": StaticRenderer(images=[down_arrow]),
"up": StaticRenderer(images=[up_arrow]),
},
path=path,
colour=colour,
start_frame=start_frame,
stop_frame=stop_frame)
class Plot(Sprite):
"""
Sample Sprite that simply plots an "X" for each step in the path. Useful
for plotting a path to the screen.
"""
def __init__(self, screen, path, colour=Screen.COLOUR_WHITE, start_frame=0,
stop_frame=0):
"""
See :py:obj:`.Sprite` for details.
"""
super(Plot, self).__init__(
screen,
renderer_dict={
"default": StaticRenderer(images=["X"])
},
path=path,
colour=colour,
clear=False,
start_frame=start_frame,
stop_frame=stop_frame)
| StarcoderdataPython |
1689596 | from flask import render_template
from backend.common.decorators import cached_public
@cached_public
def gameday() -> str:
return render_template("gameday2.html", webcasts_json={}, default_chat=None)
| StarcoderdataPython |
8123402 | from hubcheck.pageobjects.basepagewidget import BasePageWidget
from hubcheck.pageobjects.basepageelement import Link
from hubcheck.pageobjects.basepageelement import TextReadOnly
class ToolsStatusBase(BasePageWidget):
def __init__(self, owner, locatordict={}):
super(ToolsStatusBase,self).__init__(owner,locatordict)
# load hub's classes
ToolsStatusBase_Locators = self.load_class('ToolsStatusBase_Locators')
ToolsStatusToolInfo = self.load_class('ToolsStatusToolInfo')
ToolsStatusRemainingSteps = self.load_class('ToolsStatusRemainingSteps')
ToolsStatusDeveloperTools = self.load_class('ToolsStatusDeveloperTools')
# update this object's locator
self.locators.update(ToolsStatusBase_Locators.locators)
# update the locators with those from the owner
self.update_locators_from_owner()
# setup page object's components
self.alltools = Link(self,{'base':'alltools'})
self.newtool = Link(self,{'base':'alltools'})
self.state = TextReadOnly(self,{'base':'state'})
self.info = ToolsStatusToolInfo(self,{'base':'info'})
self.remaining = ToolsStatusRemainingSteps(self,{'base':'remaining'})
self.system_message = TextReadOnly(self,{'base':'system_message'})
self.devtools = ToolsStatusDeveloperTools(self,{'base':'devtools'})
# update the component's locators with this objects overrides
self._updateLocators()
def goto_all_tools_page(self):
self.alltools.click()
def goto_new_tool_form(self):
self.newtool.click()
def get_tool_state(self):
return self.state.value
def get_todo_register_status(self):
return self.remaining.get_register_status()
def get_todo_upload_status(self):
return self.remaining.get_upload_status()
def get_todo_toolpage_status(self):
return self.remaining.get_toolpage_status()
def get_todo_test_approve_status(self):
return self.remaining.get_test_approve_status()
def get_todo_publish_status(self):
return self.remaining.get_publish_status()
def goto_todo_toolpage_create(self):
return self.remaining.goto_toolpage_create()
def goto_todo_toolpage_preview(self):
return self.remaining.goto_toolpage_preview()
def goto_todo_toolpage_edit(self):
return self.remaining.goto_toolpage_edit()
def goto_todo_upload_done(self):
return self.remaining.goto_upload_done()
def goto_todo_upload_howto(self):
return self.remaining.goto_upload_howto()
def goto_todo_approve_tool(self):
return self.remaining.goto_approve_tool()
def goto_todo_installed_update_tool(self):
return self.remaining.goto_installed_update_tool()
def goto_todo_approved_update_tool(self):
return self.remaining.goto_approved_update_tool()
def goto_toolinfo_edit(self):
return self.info.goto_edit()
def get_toolinfo_title(self):
return self.info.get_title()
def get_toolinfo_version(self):
return self.info.get_version()
def get_toolinfo_glance(self):
return self.info.get_glance()
def goto_toolinfo_toolpage_preview(self):
return self.info.goto_toolpage_preview()
def goto_toolinfo_toolpage_edit(self):
return self.info.goto_toolpage_edit()
def get_toolinfo_vncgeometry(self):
return self.info.get_vncgeometry()
def get_toolinfo_toolaccess(self):
return self.info.get_toolaccess()
def get_toolinfo_codeaccess(self):
return self.info.get_codeaccess()
def get_toolinfo_wikiaccess(self):
return self.info.get_wikiaccess()
def get_toolinfo_devteam(self):
return self.info.get_devteam()
def get_system_message(self):
if self.system_message.is_displayed():
return self.system_message.value
else:
return None
def goto_tooldev_history(self):
return self.devtools.goto_history()
def goto_tooldev_wiki(self):
return self.devtools.goto_wiki()
def goto_tooldev_source_code(self):
return self.devtools.goto_source_code()
def goto_tooldev_timeline(self):
return self.devtools.goto_timeline()
def open_tooldev_message(self):
return self.devtools.open_message()
def cancel_tool(self):
return self.devtools.cancel_tool()
class ToolsStatusBase_Locators_Base(object):
"""locators for ToolsStatusBase object"""
locators = {
'base' : "css=#main",
'alltools' : "css=.main-page",
'newtool' : "css=.add",
'state' : "css=.state_hed",
'info' : "css=#toolstatus",
'whatsnext' : "css=#whatsnext",
'remaining' : "css=#whatsnext",
'system_message' : "css=#system-message",
'devtools' : "css=.adminactions",
}
| StarcoderdataPython |
1948636 | import os, sys
def main():
if len(sys.argv) - 1:
engine(' '.join(sys.argv[1:]))
else:
print(os.path.basename(sys.argv[0]), '<directory>')
def engine(path):
directories = files = 0
for information in os.walk(path):
directories += len(information[1])
files += len(information[2])
print('Directories =', directories)
print('Files =', files)
if __name__ == '__main__':
main()
| StarcoderdataPython |
11362410 | <gh_stars>1-10
# -*- coding: utf-8 -*-
### Import required python modules
from gevent import monkey
monkey.patch_all()
import platform
import os
from os import listdir, stat, makedirs, mkdir, walk, remove, pardir
from os.path import (
isdir,
isfile,
join,
splitext,
getmtime,
basename,
normpath,
exists,
expanduser,
split,
dirname,
getsize,
abspath,
)
import pandas as pd
import time
from time import strftime, localtime
import shutil
from shutil import copy2
from configparser import ConfigParser
import numpy as np
from collections import defaultdict
import subprocess
from websocket import create_connection
import socket
import errno
import re
import gevent
from pennsieve import Pennsieve
from pennsieve.log import get_logger
from pennsieve.api.agent import agent_cmd
from pennsieve.api.agent import AgentError, check_port, socket_address
from urllib.request import urlopen
import json
import collections
from threading import Thread
import pathlib
from datetime import datetime, timezone
from pysoda import bf_get_current_user_permission
"""
Function to get current doi for a selected dataset
Args:
selected_bfaccount: name of selected Pennsieve acccount (string)
selected_bfdataset: name of selected Pennsieve dataset (string)
Return:
Current doi or "None"
"""
def bf_get_doi(selected_bfaccount, selected_bfdataset):
try:
bf = Pennsieve(selected_bfaccount)
except Exception as e:
error = "Error: Please select a valid Pennsieve account"
raise Exception(error)
try:
myds = bf.get_dataset(selected_bfdataset)
except Exception as e:
error = "Error: Please select a valid Pennsieve dataset"
raise Exception(error)
try:
role = bf_get_current_user_permission(bf, myds)
if role not in ["owner", "manager"]:
error = "Error: You don't have permissions to view/edit DOI for this Pennsieve dataset"
raise Exception(error)
except Exception as e:
raise e
try:
selected_dataset_id = myds.id
doi_status = bf._api._get("/datasets/" + str(selected_dataset_id) + "/doi")
return doi_status["doi"]
except Exception as e:
if "doi" in str(e) and "not found" in str(e):
return "None"
else:
raise e
"""
Function to reserve doi for a selected dataset
Args:
selected_bfaccount: name of selected Pennsieve acccount (string)
selected_bfdataset: name of selected Pennsieve dataset (string)
Return:
Success or error message
"""
def bf_reserve_doi(selected_bfaccount, selected_bfdataset):
try:
bf = Pennsieve(selected_bfaccount)
except Exception as e:
error = "Error: Please select a valid Pennsieve account"
raise Exception(error)
try:
myds = bf.get_dataset(selected_bfdataset)
except Exception as e:
error = "Error: Please select a valid Pennsieve dataset"
raise Exception(error)
try:
role = bf_get_current_user_permission(bf, myds)
if role not in ["owner", "manager"]:
error = "Error: You don't have permissions to view/edit DOI for this Pennsieve dataset"
raise Exception(error)
except Exception as e:
raise e
try:
res = bf_get_doi(selected_bfaccount, selected_bfdataset)
if res != "None":
error = "Error: A DOI has already been reserved for this dataset"
raise Exception(error)
except Exception as e:
raise e
try:
selected_dataset_id = myds.id
contributors_list = bf._api._get(
"/datasets/" + str(selected_dataset_id) + "/contributors"
)
creators_list = []
for item in contributors_list:
creators_list.append(item["firstName"] + " " + item["lastName"])
jsonfile = {
"title": selected_bfdataset,
"creators": creators_list,
}
bf._api.datasets._post("/" + str(selected_dataset_id) + "/doi", json=jsonfile)
return "Done!"
except Exception as e:
raise e
"""
Function to get the review request status and publishing status of a dataset
Args:
selected_bfaccount: name of selected Pennsieve acccount (string)
selected_bfdataset: name of selected Pennsieve dataset (string)
Return:
Current reqpusblishing status
"""
def bf_get_publishing_status(selected_bfaccount, selected_bfdataset):
try:
bf = Pennsieve(selected_bfaccount)
except Exception as e:
error = "Error: Please select a valid Pennsieve account"
raise Exception(error)
try:
myds = bf.get_dataset(selected_bfdataset)
except Exception as e:
error = "Error: Please select a valid Pennsieve dataset"
raise Exception(error)
try:
selected_dataset_id = myds.id
review_request_status = bf._api._get("/datasets/" + str(selected_dataset_id))[
"publication"
]["status"]
publishing_status = bf._api._get(
"/datasets/" + str(selected_dataset_id) + "/published"
)["status"]
return [review_request_status, publishing_status]
except Exception as e:
raise e
"""
Function to publish for a selected dataset
Args:
selected_bfaccount: name of selected Pennsieve acccount (string)
selected_bfdataset: name of selected Pennsieve dataset (string)
Return:
Success or error message
"""
def bf_submit_review_dataset(selected_bfaccount, selected_bfdataset):
try:
bf = Pennsieve(selected_bfaccount)
except Exception as e:
error = "Error: Please select a valid Pennsieve account"
raise Exception(error)
try:
myds = bf.get_dataset(selected_bfdataset)
except Exception as e:
error = "Error: Please select a valid Pennsieve dataset"
raise Exception(error)
try:
role = bf_get_current_user_permission(bf, myds)
if role not in ["owner"]:
error = "Error: You must be dataset owner to send a dataset for review"
raise Exception(error)
except Exception as e:
raise e
try:
selected_dataset_id = myds.id
request_publish = bf._api._post(
"/datasets/"
+ str(selected_dataset_id)
+ "/publication/request?publicationType="
+ "publication"
)
return request_publish
except Exception as e:
raise e
def bf_withdraw_review_dataset(selected_bfaccount, selected_bfdataset):
try:
bf = Pennsieve(selected_bfaccount)
except Exception as e:
error = "Error: Please select a valid Pennsieve account"
raise Exception(error)
try:
myds = bf.get_dataset(selected_bfdataset)
except Exception as e:
error = "Error: Please select a valid Pennsieve dataset"
raise Exception(error)
try:
role = bf_get_current_user_permission(bf, myds)
if role not in ["owner"]:
error = "Error: You must be dataset owner to withdraw a dataset from review"
raise Exception(error)
except Exception as e:
raise e
try:
selected_dataset_id = myds.id
withdraw_review = bf._api._post(
"/datasets/"
+ str(selected_dataset_id)
+ "/publication/cancel?publicationType="
+ "publication"
)
return withdraw_review
except Exception as e:
raise e
"""
DEPRECATED
Function to publish for a selected dataset
Args:
selected_bfaccount: name of selected Pennsieve acccount (string)
selected_bfdataset: name of selected Pennsieve dataset (string)
Return:
Success or error message
"""
def bf_publish_dataset(selected_bfaccount, selected_bfdataset):
try:
bf = Pennsieve(selected_bfaccount)
except Exception as e:
error = "Error: Please select a valid Pennsieve account"
raise Exception(error)
try:
myds = bf.get_dataset(selected_bfdataset)
except Exception as e:
error = "Error: Please select a valid Pennsieve dataset"
raise Exception(error)
try:
role = bf_get_current_user_permission(bf, myds)
if role not in ["owner"]:
error = "Error: You must be dataset owner to publish a dataset"
raise Exception(error)
except Exception as e:
raise e
try:
selected_dataset_id = myds.id
request_publish = bf._api._post(
"/datasets/" + str(selected_dataset_id) + "/publish"
)
return request_publish["status"]
except Exception as e:
raise e
| StarcoderdataPython |
202488 | <filename>dictLibrary.py
import collections
defaults = {
"appetizers": "Humans",
"main": "Pizza",
"desert": "Chocolate Cake",
"dink": "Water",
}
def prep_menu(customizations):
return
collections.ChainMap(customizations,defaults)
def print_menu(menu):
for key,value in menu.items():
print("As {key} : {value}.")
if __name__ == '__main__':
menu1 = prep_menu({})
print_menu(menu1) | StarcoderdataPython |
273514 | <filename>trace_normal_of_matrix.py<gh_stars>1-10
#find the trace and normal of a matrix
import math
row, col = list(map(int,input().split()))
if row != col:
print("Invalid Input")
else:
matrix = []
for r in range(row):
new_list = list(map(int, input().split()))
matrix.append(new_list)
total_trace, total_normal = 0, 0
for r in range(row):
for c in range(col):
total_normal += matrix[r][c] * matrix[r][c]
if c==r :
total_trace += matrix[r][c]
total_normal += matrix[r][c] * matrix[r][c]
print(math.sqrt(total_normal),total_trace)
#input
# 3 3
# 1 2 3
# 4 5 6
# 7 8 9
# output
# 19.79898987322333 15 | StarcoderdataPython |
9791837 | <reponame>AnnMarieW/dmc-docs<gh_stars>1-10
import dash_mantine_components as dmc
component = dmc.Anchor(
"Dash Mantine Components Announcement",
href="https://community.plotly.com/t/dash-mantine-components/58414",
)
| StarcoderdataPython |
8051282 | from DataStream.ByteStream import ByteStream
from Logic.Player import Player
from Logic.Helpers import Helpers
import random
from DataBase.DataBase import DataBase
class LogicGiveDeliveryItemsCommand:
def encode(self):
# Brawler Randomaizer:
a = 0
i = 0
droppedChr = 0
helper = Helpers(self.player)
while a == 0 and i < 10:
dropper = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20]
droppedChr = random.randint(0, 20)
if helper.getUnlockedCharacter(droppedChr) == 1:
i += 1
else:
a = 1
DataBase.replaceValue(self, 'brawlBoxTokens', self.player.brawlBoxTokens - 100)
self.writeVInt(10)
self.writeVInt(1)
self.writeVInt(0)
self.writeVInt(3) # reward count
#Gold
if self.player.boxID == 5:
GoldValue = random.randrange(10, 100)
DataBase.replaceValue(self, 'gold', self.player.gold + GoldValue)
else:
GoldValue = random.randrange(30, 150)
DataBase.replaceValue(self, 'gold', self.player.gold + GoldValue)
self.writeVInt(GoldValue) # reward amount
self.writeDataReference(0, 7)
self.writeVInt(0)
#Brawler
# PowerPoints
PPValue = random.randrange(2, 19)
PPBrawlers = random.randrange(0, 20)
self.writeVInt(PPValue)
self.writeDataReference(16, PPBrawlers)
self.writeVInt(6)
self.writeVInt(0)
helper.addPowerPoints(PPBrawlers, PPValue)
#Brawler
if i < 10:
self.writeVInt(1)
self.writeDataReference(16, droppedChr)
self.writeVInt(1)
self.writeVInt(0)
self.writeVInt(0)
helper.UnlockBrawler(droppedChr)
else:
a = 0
PP2Value = random.randrange(2, 19)
PP2Brawlers = 0
while a == 0:
PP2Brawlers = random.randrange(0, 20)
if PP2Brawlers == PPBrawlers:
pass
else:
a = 1
self.writeVInt(PP2Value)
self.writeDataReference(16, PP2Brawlers)
self.writeVInt(6)
self.writeVInt(0)
helper.addPowerPoints(PP2Brawlers, PP2Value)
for x in range(8):
self.writeVInt(x)
self.writeVInt(1)
self.writeVInt(1)
self.writeVInt(1)
self.writeVInt(1)
self.writeVInt(1)
self.writeVInt(1)
self.writeVInt(1)
self.writeVInt(1) | StarcoderdataPython |
5108962 | <filename>bot/core/colours.py
# Future
from __future__ import annotations
# Packages
import discord
MAIN = discord.Colour(0xF1C30F)
RED = discord.Colour(0xF20035)
GREEN = discord.Colour(0x16E037)
| StarcoderdataPython |
379175 | <reponame>arcann/config_wrangler<gh_stars>0
import unittest
import boto3
import moto
from config_wrangler.config_templates.credentials import PasswordSource
from config_wrangler.config_templates.s3_bucket import S3_Bucket, S3_Bucket_Folder
from tests.base_tests_mixin import Base_Tests_Mixin
@moto.mock_s3
class TestS3HelperFunctions(unittest.TestCase, Base_Tests_Mixin):
def setUp(self):
self.mock_client = boto3.client('s3')
self.bucket1_name = 'mock_bucket'
self.mock_client.create_bucket(
Bucket=self.bucket1_name,
ACL='private',
)
self.bucket2_name = 'mock_bucket2'
self.mock_client.create_bucket(
Bucket=self.bucket2_name,
)
self.bucket3_name = 'mock_bucket3'
self.mock_client.create_bucket(
Bucket=self.bucket3_name,
)
self.example1_key = 'test_good.ini'
self.mock_client.upload_file(
Bucket=self.bucket1_name,
Key=self.example1_key,
Filename=str(self.get_test_files_path() / 'test_good.ini')
)
self.example2_key = 'folder1/file.txt'
self.mock_client.upload_file(
Bucket=self.bucket1_name,
Key=self.example2_key,
Filename=str(self.get_test_files_path() / 'test_good.ini')
)
def test_list_files(self):
bucket = S3_Bucket(
bucket_name=self.bucket1_name,
user_id='mock_user',
raw_password='<PASSWORD>',
password_source=PasswordSource.CONFIG_FILE,
)
contents = bucket.list_object_keys(key=None)
self.assertIn(self.example1_key, contents)
self.assertIn(self.example2_key, contents)
self.assertEqual(len(contents), 2)
bucket2 = S3_Bucket(
bucket_name=self.bucket2_name,
user_id='mock_user',
raw_password='<PASSWORD>',
password_source=PasswordSource.CONFIG_FILE,
)
contents = bucket2.list_object_keys(key=None)
self.assertNotIn(self.example1_key, contents)
self.assertEqual(len(contents), 0)
def test_list_folder_files(self):
bucket_folder = S3_Bucket_Folder(
bucket_name=self.bucket1_name,
folder='folder1',
user_id='mock_user',
raw_password='<PASSWORD>',
password_source=PasswordSource.CONFIG_FILE,
)
contents = bucket_folder.list_object_keys(key=None)
self.assertNotIn(self.example1_key, contents)
self.assertIn(self.example2_key, contents)
self.assertEqual(len(contents), 1)
# Ask for a different folder
contents = bucket_folder.list_object_keys(key='')
self.assertIn(self.example1_key, contents)
self.assertIn(self.example2_key, contents)
self.assertEqual(len(contents), 2)
root_folder = S3_Bucket_Folder(
bucket_name=self.bucket1_name,
folder='',
user_id='mock_user',
raw_password='<PASSWORD>',
password_source=PasswordSource.CONFIG_FILE,
)
folder1 = root_folder / 'folder1'
contents = folder1.list_object_keys(key=None)
self.assertNotIn(self.example1_key, contents)
self.assertIn(self.example2_key, contents)
self.assertEqual(len(contents), 1)
def test_bucket_upload(self):
pass
| StarcoderdataPython |
5002502 | import colander
import deform.widget
from pyramid.httpexceptions import HTTPFound
from pyramid.view import view_config
from .models import DBSession, Page
class WikiPage(colander.MappingSchema):
title = colander.SchemaNode(colander.String())
body = colander.SchemaNode(
colander.String(),
widget=deform.widget.RichTextWidget()
)
class WikiViews(object):
def __init__(self, request):
self.request = request
@property
def wiki_form(self):
schema = WikiPage()
return deform.Form(schema, buttons=('submit',))
@property
def reqts(self):
return self.wiki_form.get_widget_resources()
@view_config(route_name='wiki_view', renderer='wiki_view.pt')
def wiki_view(self):
pages = DBSession.query(Page).order_by(Page.title)
return dict(title='Wiki View', pages=pages)
@view_config(route_name='wikipage_add',
renderer='wikipage_addedit.pt')
def wikipage_add(self):
form = self.wiki_form.render()
if 'submit' in self.request.params:
controls = self.request.POST.items()
try:
appstruct = self.wiki_form.validate(controls)
except deform.ValidationFailure as e:
# Form is NOT valid
return dict(form=e.render())
# Add a new page to the database
new_title = appstruct['title']
new_body = appstruct['body']
DBSession.add(Page(title=new_title, body=new_body))
# Get the new ID and redirect
page = DBSession.query(Page).filter_by(title=new_title).one()
new_uid = page.uid
url = self.request.route_url('wikipage_view', uid=new_uid)
return HTTPFound(url)
return dict(form=form)
@view_config(route_name='wikipage_view', renderer='wikipage_view.pt')
def wikipage_view(self):
uid = int(self.request.matchdict['uid'])
page = DBSession.query(Page).filter_by(uid=uid).one()
return dict(page=page)
@view_config(route_name='wikipage_edit',
renderer='wikipage_addedit.pt')
def wikipage_edit(self):
uid = int(self.request.matchdict['uid'])
page = DBSession.query(Page).filter_by(uid=uid).one()
wiki_form = self.wiki_form
if 'submit' in self.request.params:
controls = self.request.POST.items()
try:
appstruct = wiki_form.validate(controls)
except deform.ValidationFailure as e:
return dict(page=page, form=e.render())
# Change the content and redirect to the view
page.title = appstruct['title']
page.body = appstruct['body']
url = self.request.route_url('wikipage_view', uid=uid)
return HTTPFound(url)
form = self.wiki_form.render(dict(
uid=page.uid, title=page.title, body=page.body)
)
return dict(page=page, form=form) | StarcoderdataPython |
1692607 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import uuid
from typing import List
from flask_appbuilder import Model
from sqlalchemy import Column, ForeignKey, Integer, Text
from sqlalchemy.orm import relationship
from sqlalchemy_utils import UUIDType
from superset.models.helpers import AuditMixinNullable
class EmbeddedDashboard(Model, AuditMixinNullable):
"""
A configuration of embedding for a dashboard.
Currently, the only embeddable resource is the Dashboard.
If we add new embeddable resource types, this model should probably be renamed.
References the dashboard, and contains a config for embedding that dashboard.
This data model allows multiple configurations for a given dashboard,
but at this time the API only allows setting one.
"""
__tablename__ = "embedded_dashboards"
uuid = Column(UUIDType(binary=True), default=uuid.uuid4, primary_key=True)
allow_domain_list = Column(Text) # reference the `allowed_domains` property instead
dashboard_id = Column(Integer, ForeignKey("dashboards.id"), nullable=False)
dashboard = relationship(
"Dashboard",
back_populates="embedded",
foreign_keys=[dashboard_id],
)
@property
def allowed_domains(self) -> List[str]:
"""
A list of domains which are allowed to embed the dashboard.
An empty list means any domain can embed.
"""
return self.allow_domain_list.split(",") if self.allow_domain_list else []
| StarcoderdataPython |
11315073 | <reponame>risklayer/corona-landkreis-crawler
#!/usr/bin/python3
from botbase import *
_mettmann_a = re.compile(r"([0-9.]+)\sInfizierte\serfasst")
_mettmann_d = re.compile(r"Verstorbene zählt der Kreis (?:damit |demnach |bislang |insgesamt )*([0-9.]*)\.")
_mettmann_g = re.compile(r"([0-9.]+)\sPersonen\sgelten\sals\sgenesen")
def mettmann(sheets):
soup = get_soup("https://www.kreis-mettmann-corona.de/Aktuelle-Meldungen/")
article = soup.find(class_="mitteilungen").findAll("li")
article = next(x for x in article if "Genesene," in x.get_text())
if not today().strftime("%d.%m.%Y") in article.get_text(): raise NotYetAvailableException("Mettmann noch alt: "+article.find(class_="list-text").find("small").get_text())
url = urljoin("https://www.kreis-mettmann-corona.de/Aktuelle-Meldungen/", article.find("a")["href"])
print("Getting", url)
assert url
soup = get_soup(url)
text = "\n".join(p.get_text(" ") for p in soup.find("article").findAll("p"))
#print(text)
a = force_int(_mettmann_a.search(text).group(1))
d = force_int(_mettmann_d.search(text).group(1))
g = force_int(_mettmann_g.search(text).group(1))
c = a + d + g
update(sheets, 5158, c=c, d=d, g=g, comment="Bot ohne QS")
return True
schedule.append(Task(11, 55, 13, 35, 360, mettmann, 5158))
if __name__ == '__main__': mettmann(googlesheets())
| StarcoderdataPython |
5198237 | <reponame>alirezakazemipour/NN-Without-Frameworks<gh_stars>1-10
import numpy as np
from .utils import *
from .activations import *
from .initializers import *
def supported_layers():
return [x.__name__ for x in ParamLayer.__subclasses__()]
class Layer:
def __init__(self):
self.vars = {}
def forward(self, x):
raise NotImplementedError
def backward(self, x):
raise NotImplementedError
class ParamLayer(Layer, ABC):
def __init__(self,
weight_shape,
weight_initializer,
bias_initializer,
regularizer_type: str = None,
lam: float = 0.):
super().__init__()
i, j = weight_shape
self.vars["W"] = weight_initializer.initialize([[0 for _ in range(j)] for _ in range(i)])
self.vars["b"] = [bias_initializer.initialize([0 for _ in range(j)])]
self.vars["dW"] = [[0 for _ in range(j)] for _ in range(i)]
self.vars["db"] = [[0 for _ in range(j)]]
self.z = None
self.input = None
self.regularizer_type = regularizer_type
self.lam = lam
class Dense(ParamLayer, ABC):
def __init__(self, in_features: int,
out_features: int,
activation: Activation = Linear(),
weight_initializer: Initializer = RandomUniform(),
bias_initializer: Initializer = Constant(),
regularizer_type: str = None,
lam: float = 0.
):
super().__init__(weight_shape=(in_features, out_features),
weight_initializer=weight_initializer,
bias_initializer=bias_initializer,
regularizer_type=regularizer_type,
lam=lam
)
self.in_features = in_features
self.out_features = out_features
self.act = activation
self.weight_initializer = weight_initializer
self.bias_initializer = bias_initializer
self.regularizer_type = regularizer_type
self.lam = lam
def forward(self, x):
if isinstance(x, np.ndarray):
x = np.ndarray.tolist(x)
assert isinstance(x, list)
assert isinstance(x[0], list), "Feed the input to the network in batch mode: (batch_size, n_dims)"
self.input = x
# z = x.dot(self.vars["W"]) + self.vars["b"]
z = mat_mul(x, self.vars["W"])
b = deepcopy(self.vars["b"])
while len(b) < len(x):
b.append(self.vars["b"][0])
z = mat_add(z, b)
self.z = z
a = self.act(z)
return a
def backward(self, delta):
dz = element_wise_mul(delta, self.act.derivative(self.z))
input_t = transpose(self.input)
dw_unscale = mat_mul(input_t, dz)
self.vars["dW"] = rescale(dw_unscale, 1 / len(dz))
# self.vars["db"] = np.sum(dz, axis=0) / dz.shape[0]
ones_t = [[1 for _ in range(len(dz))] for _ in range(1)]
db_unscale = mat_mul(ones_t, dz)
self.vars["db"] = rescale(db_unscale, 1 / len(dz))
if self.regularizer_type == "l2":
self.vars["dW"] = mat_add(self.vars["dW"], rescale(self.vars["W"], self.lam))
# self.vars["db"] = mat_add(self.vars["db"], rescale(self.vars["b"], self.lam))
elif self.regularizer_type == "l1":
self.vars["dW"] = add_scalar(self.vars["dW"], self.lam)
# self.vars["db"] = add_scalar(self.vars["db"], self.lam)
w_t = transpose(self.vars["W"])
# delta = dz.dot(self.vars["W"].T)
delta = mat_mul(dz, w_t)
return delta
def __call__(self, x):
return self.forward(x)
class BatchNorm1d(ParamLayer, ABC):
# https://pytorch.org/docs/stable/generated/torch.nn.BatchNorm1d.html
def __init__(self, in_features: int):
super().__init__(weight_shape=(1, in_features),
weight_initializer=Constant(1.),
bias_initializer=Constant(0.)
)
self.in_features = in_features
self.x_hat = None
self.eps = 1e-5
self.beta = 0.1
self.mu = 0
self.std = 0
self.mu_hat = [[0 for _ in range(self.in_features)]]
self.std_hat = [[1 for _ in range(self.in_features)]]
self.gamma = None
def forward(self, x, eval=False):
assert isinstance(x, list)
assert isinstance(x[0], list), "Feed the input to the network in batch mode: (batch_size, n_dims)"
if not eval:
self.mu = batch_mean(x)
self.std = mat_sqrt(batch_var(x, self.mu))
self.mu_hat = mat_add(rescale(self.mu_hat, 1 - self.beta), rescale(self.mu, self.beta))
self.std_hat = mat_add(rescale(self.std_hat, 1 - self.beta), rescale(self.std, self.beta))
else:
self.mu = self.mu_hat
self.std = self.std_hat
mu = deepcopy(self.mu)
std = deepcopy(self.std)
while len(mu) < len(x):
mu.append(self.mu[0])
std.append(self.std[0])
num = mat_add(x, rescale(mu, -1))
den = mat_sqrt(add_scalar(element_wise_mul(std, std), self.eps))
x_hat = element_wise_mul(num, element_wise_rev(den))
self.x_hat = x_hat
self.gamma = deepcopy(self.vars["W"])
beta = deepcopy(self.vars["b"])
while len(self.gamma) < len(x):
self.gamma.append(self.vars["W"][0])
beta.append(self.vars["b"][0])
y = mat_add(element_wise_mul(self.gamma, x_hat), beta)
return y
def backward(self, delta):
# https://kevinzakka.github.io/2016/09/14/batch_normalization/
dz = delta
dx_hat = element_wise_mul(dz, self.gamma)
m = len(dz)
self.vars["dW"] = rescale(batch_sum(element_wise_mul(self.x_hat, dz)), 1 / m)
self.vars["db"] = rescale(batch_sum(dz), 1 / m)
a1 = rescale(dx_hat, m)
a2 = batch_sum(dx_hat)
a3 = element_wise_mul(*equal_batch_size(self.x_hat, batch_sum(element_wise_mul(dx_hat, self.x_hat))))
num = mat_add(a1, mat_add(*equal_batch_size(rescale(a2, -1), rescale(a3, -1))))
den = rescale(mat_sqrt(add_scalar(element_wise_mul(self.std, self.std), self.eps)), m)
delta = element_wise_mul(*equal_batch_size(num, element_wise_rev(den)))
return delta
def __call__(self, x, eval=False):
return self.forward(x, eval)
| StarcoderdataPython |
6672451 | <filename>addons14/project_status/tests/test_status.py
from odoo.tests.common import SavepointCase
class TestProjectStatus(SavepointCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.ProjectStatus = cls.env["project.status"]
cls.status = cls.ProjectStatus.create(
{
"name": "New Status",
}
)
def _create_project(self):
project = self.env["project.project"].create(
{
"name": "Project 1",
"project_status": self.status.id,
}
)
return project
def test_01_project_status(self):
project = self._create_project()
self.assertEqual(project.project_status.id, self.status.id)
statuses = project._read_group_status_ids(
project.project_status, [], "status_sequence"
)
self.assertTrue(len(statuses) >= 1)
| StarcoderdataPython |
5010512 | import os
from pathlib import Path
import pytest
from dismantle.package import PackageFormat, ZipPackageFormat
def test_inherits() -> None:
assert issubclass(ZipPackageFormat, PackageFormat) is True
def test_grasp_exists(datadir: Path) -> None:
src = datadir.join('package.zip')
assert ZipPackageFormat.grasps(src) is True
def test_grasp_file_url(datadir: Path) -> None:
src = f'file://{datadir.join("package.zip")}'
assert ZipPackageFormat.grasps(src) is True
def test_grasp_not_supported(datadir: Path) -> None:
src = datadir.join('directory_src')
assert ZipPackageFormat.grasps(src) is False
def test_grasp_not_supported_format(datadir: Path) -> None:
src = datadir.join('invalid.file')
assert ZipPackageFormat.grasps(src) is False
def test_extract_not_supported(datadir: Path) -> None:
src = datadir.join('directory_src')
dest = datadir.join(f'{src}_output')
message = 'formatter only supports zip files'
with pytest.raises(ValueError, match=message):
ZipPackageFormat.extract(src, dest)
def test_extract_not_supported_format(datadir: Path) -> None:
src = datadir.join('invalid.file')
dest = datadir.join(f'{src}_output')
message = 'formatter only supports zip files'
with pytest.raises(ValueError, match=message):
ZipPackageFormat.extract(src, dest)
def test_extract_non_existant(datadir: Path) -> None:
src = datadir.join('non_existant.zip')
dest = datadir.join(f'{src}_output')
message = 'invalid zip file'
with pytest.raises(ValueError, match=message):
ZipPackageFormat.extract(src, dest)
def test_extract_already_exists(datadir: Path) -> None:
src = datadir.join('package.zip')
dest = datadir.join('directory_exists')
assert ZipPackageFormat.extract(src, dest) is None
def test_extract_create(datadir: Path) -> None:
src = datadir.join('package.zip')
dest = datadir.join('directory_created')
ZipPackageFormat.extract(src, dest)
assert os.path.exists(dest) is True
assert os.path.exists(dest / 'package.json') is True
| StarcoderdataPython |
1601363 | <reponame>NVlabs/torchtrainers<filename>torchtrainers/trainer.py
#
# Copyright (c) 2013-2019 <NAME>. All rights reserved.
# This file is part of torchtrainers (see unknown).
# See the LICENSE file for licensing terms (BSD-style).
#
"""Training-related part of the Keras engine.
"""
import os
import os.path
import sys
import numpy as np
import matplotlib.pyplot as plt
import torch
import torch.optim
import time
import re
import copy
def get_info():
import platform
import getpass
node = str(platform.node())
user = getpass.getuser()
now = time.ctime()
return "{} {} {}".format(now, node, user)
def loader_test(source, nbatches=10, skip=10):
for i, sample in enumerate(source):
if i >= skip-1: break
start = time.time()
count = 0
for i, sample in enumerate(source):
xs = sample[0]
count += len(xs)
if i >= nbatches-1: break
finish = time.time()
delta = finish-start
print("{:.2f} samples/s {:.2f} batches/s".format(count/delta, nbatches/delta))
for index, a in enumerate(sample):
if isinstance(a, torch.Tensor):
print(index, ":", "Tensor", a.shape, a.device, a.dtype, a.min().item(), a.max().item())
elif isinstance(a, np.ndarray):
print(index, ":", "ndarray", a.shape, a.dtype, np.amin(a), np.amax(a))
else:
print(index, ":", type(a))
def astime(s):
s = int(s+0.999)
seconds = s%60
s = s//60
minutes = s%60
s = s//60
hours = s%24
days = s//24
result = ""
if days>0: result = "{:d}d".format(days)
result += "{:02d}:{:02d}:{:02d}".format(hours, minutes, seconds)
return result
def within_jupyter():
try:
cfg = get_ipython().config
if cfg["IPKernelApp"]["parent_appname"] is not None:
return True
else:
return False
except NameError:
return False
def jupyter_plot(axis, ys, xs=None, sigma=0, xscale=None, yscale=None):
#display.clear_output(wait=True)
axis.cla()
if xscale is not None:
axis.set_xscale(xscale)
if yscale is not None:
axis.set_yscale(yscale)
from scipy.ndimage import filters
if sigma>0:
ys = filters.gaussian_filter(np.array(ys, "f"), sigma, mode="nearest")
if xs is not None:
axis.plot(xs, ys)
else:
axis.plot(ys)
#print(np.amin(ys), np.amax(ys))
class Progress(object):
def __init__(self, chunk=1000):
self.chunk = chunk
self.losses = []
self.avg_losses = []
def add(self, count, loss):
self.losses.append((count, loss))
if self.losses[-1][0]-self.losses[0][0] > self.chunk:
avg_loss = np.mean([x[1] for x in self.losses])
self.avg_losses.append((self.losses[-1][0], avg_loss))
self.losses = []
def value(self, index=1):
if len(self.avg_losses) > 0:
return self.avg_losses[-1][index]
elif len(self.losses) > 0:
return self.losses[-1][index]
else:
return -1
def plot(self, axis=None, **kw):
axis = axis or plt.gca()
if len(self.avg_losses)==0: return
xs = [p[0] for p in self.avg_losses]
ys = [p[1] for p in self.avg_losses]
axis.plot(xs, ys, **kw)
def getvalue(x):
if hasattr(x, "item"):
return x.item()
else:
return float(x)
class MovingAverage0(object):
def __init__(self, initial=1.0, range=5000):
self.range = range
self.maverage = initial
self.count = 0
self.total = 0
def add(self, x, weight=1):
self.total += x
self.count += 1
self.maverage = self.maverage * float(weight) / self.range + \
x * float(self.range - weight) / self.range
return self.maverage
def recent(self):
return self.maverage
def value(self):
return float(self.total) / self.count
class MovingAverage(object):
def __init__(self, range=50):
self.range = range
self.values = []
def add(self, x, weight=1):
self.values.append(x)
def recent(self):
if self.values==[]: return -1
return np.mean(self.values[-self.range:])
def value(self):
if self.values==[]: return -1
return np.mean(self.values)
def __len__(self):
return len(self.values)
class Misclassification(object):
def __init__(self):
self.counts = 0
self.errs = 0
self.moving = MovingAverage()
def name(self):
return "err"
def add(self, pred, target):
assert not torch.is_floating_point(target), target.dtype
assert target.ndimension() == 1
if target.ndimension() == pred.ndimension() - 1:
_, pred = pred.max(-1)
assert target.ndimension() == pred.ndimension(), (target.size(), pred.size())
assert target.size() == pred.size(), (target.size(), pred.size())
counts = len(target)
errs = (target != pred).float().sum()
self.counts += counts
self.errs += errs
self.moving.add(float(errs)/counts, counts)
def value(self):
return float(self.errs) / self.counts
def recent(self):
return self.moving.recent()
def apply1(f, x):
if f is None:
return x
else:
return f(x)
def jupyter_after_batch(self):
#fig = plt.gcf()
plt.close("all")
fig = plt.figure(figsize=(12, 6))
fig.clf()
fig.add_subplot(1, 2, 1)
fig.add_subplot(1, 2, 2)
axis = fig.get_axes()[0]
self.losses.plot(axis)
axis = fig.get_axes()[1]
self.errors.plot(axis)
display.clear_output(wait=True)
display.display(axis.figure)
def compare(x, y):
if x<y: return -1
if x>y: return 1
return 0
def getmeta(meta, key, mode="training"):
if key in meta:
return meta[key]
if key=="loss":
return meta.get(mode+"_loss")
errs = meta.get(mode+"_err")
for k, v in errs:
if k==key:
return v
return None
def extract_log(metalist, key="loss", mode="training"):
xs, ys = [], []
for meta in metalist:
xs.append(getmeta(meta, "ntrain"))
ys.append(getmeta(meta, key=key, mode=mode))
return xs, ys
def compare_metas(meta1, meta2):
if meta1 is None:
return 1
if meta2 is None:
return -1
if "testing_err" in meta1:
return compare(meta1["testing_err"][0][1],
meta2["testing_err"][0][1])
elif "testing_loss" in meta1:
return compare(meta1["testing_loss"],
meta2["testing_loss"])
else:
return compare(meta1["training_loss"],
meta2["training_loss"])
class Trainer(object):
"""The `Model` class adds training & evaluation routines to a `Network`.
"""
def __init__(self,
model,
criterion=None,
optimizer=None,
optimizer_factory=torch.optim.SGD,
preprocess=None,
encode_input=None,
encode_target=None,
decode_pred=None,
target_tensors=None,
stop_if=None,
device=None,
metrics=[],
verbose=True,
#loss_weights=None,
#sample_weight_mode=None,
#weighted_metrics=None,
):
self.device = device
self.set_model(model)
self.criterion = criterion
self.criterion.to(device)
self.optimizer_factory = optimizer_factory
if optimizer is not None:
self.set_optimizer(optimizer)
self.verbose = verbose
self.preprocess = preprocess
self.encode_target = encode_target
self.decode_pred = decode_pred
self.encode_input = decode_pred
self.metrics = metrics
self.reset_metrics()
self.count = 0
self.after_batch = self.default_after_batch
self.after_epoch = self.default_after_epoch
self.report_every = 1
self.record_last = True
self.last_lr = None
self.progress_prefix = "training"
self.logs = []
def set_model(self, model):
self.model = model.to(self.device)
if not hasattr(self.model, "META"):
self.model.META = dict(ntrain=0)
self.optimizer = None
def set_optimizer(self, optimizer):
self.optimizer = optimizer
self.optimizer.zero_grad()
def set_lr(self, lr, **kw):
if lr != self.last_lr:
self.optimizer = self.optimizer_factory(self.model.parameters(), lr, **kw)
sopt = re.sub(r"\s+", " ", str(self.optimizer))
if self.verbose:
print("setting optimizer:", sopt)
self.optimizer.zero_grad()
self.last_lr = lr
def reset_metrics(self, current_size=-1, current_count=0):
self.current_size = current_size
self.current_count = current_count
self.losses = MovingAverage()
self.mobjects = [m() for m in self.metrics]
self.current_start = time.time()
def report_recent_short(self):
if self.current_size == 0:
return "..."
now = time.time()
delta = now - self.current_start
total_time = float(self.current_size) / self.current_count * delta
remaining_time = total_time - delta
progress = self.progress_prefix
progress += " {:3.0f}%".format(
100.0 * float(self.current_count) / self.current_size)
if self.current_count>= self.current_size:
progress += " /{} ".format(
astime(total_time))
else:
progress += " {} ".format(
astime(remaining_time))
progress += " loss {:6.4f}".format(
self.losses.recent())
for m in self.mobjects[:1]:
progress+= " {} {:6.4f}".format(m.name()[:3], m.recent())
return progress
def report_recent(self):
if self.current_size == 0:
return "..."
now = time.time()
delta = now - self.current_start
total_time = float(self.current_size) / self.current_count * delta
remaining_time = total_time - delta
progress = self.progress_prefix
progress += "{:8d} / {:8d}".format(self.current_count, self.current_size)
progress += " time {} / {}".format(astime(remaining_time), astime(total_time))
progress += " {:3.0f}%".format(
100.0 * float(self.current_count) / self.current_size)
progress += " loss {:9.5f} [{:6d}]".format(self.losses.recent(), len(self.losses))
for m in self.mobjects:
progress+= " {} {:9.5f}".format(m.name()[:3], m.recent())
return progress
def report_metrics(self):
report = "loss {:6.4f}".format(self.losses.value())
for m in self.mobjects:
report += " {} {:6.4f}".format(m.name()[:3], m.value())
return report
def default_after_batch(self):
if self.current_count == 0: return
progress = self.report_recent()
print(progress + "\r", end="", flush=True)
def default_after_epoch(self):
print()
def train_on_batch(self, x, target, sample_weight=None, class_weight=None):
assert sample_weight is None
assert class_weight is None
self.model.train()
x, target = apply1(self.preprocess, (x, target))
if x is None: return
batch_size = len(x)
assert batch_size > 0
assert len(x) == len(target)
x = apply1(self.encode_input, x)
target = apply1(self.encode_target, target)
pred = self.model(x.to(self.device))
assert len(pred) == batch_size
target = target.to(self.device)
loss = self.criterion(pred, target)
if self.record_last:
self.last_x = x.to("cpu")
self.last_y = target.to("cpu")
self.last_pred = pred.to("cpu")
self.last_loss = getvalue(loss)
self.losses.add(getvalue(loss), batch_size)
for m in self.mobjects:
m.add(pred, target)
self.optimizer.zero_grad()
loss.backward()
self.optimizer.step()
self.count += len(x)
self.model.META["ntrain"] += len(x)
def predict_on_batch(self, x):
self.model.eval()
pred = self.model(x.to(device))
if self.decode_pred:
pred = self.decode_pred(pred)
return pred
def test_on_batch(self, x, target, sample_weight=None):
assert sample_weight is None
x, target = apply1(self.preprocess, (x, target))
batch_size = len(x)
x = apply1(self.encode_input, x)
target = apply1(self.encode_target, target)
target = target.to(self.device)
self.model.eval()
pred = self.model(x.to(self.device))
loss = self.criterion(pred, target)
self.losses.add(getvalue(loss), batch_size)
for m in self.mobjects:
m.add(pred, target)
def fit_for(self,
generator,
samples_per_epoch,
epochs=1,
verbose=1,
callbacks=None,
class_weight=None,
stop_if=None):
assert callbacks is None
assert class_weight is None
self.model.eval()
self.reset_metrics(samples_per_epoch * epochs)
batch_count = 0
try:
while True:
try:
for (x, target) in generator:
self.train_on_batch(x, target)
self.current_count += len(x)
if self.after_batch is not None and batch_count%self.report_every==0:
self.after_batch()
if stop_if is not None:
if stop_if(self):
return
if self.current_count >= self.current_size:
return
batch_count += 1
finally:
if self.after_epoch is not None:
self.after_epoch()
finally:
self.model.META["training_info"] = get_info()
self.model.META["training_loss"] = self.losses.value()
self.model.META["training_err"] = self.get_metrics()
self.logs.append(copy.copy(self.model.META))
def test_for(self,
generator,
epoch_size=None,
callbacks=None,
class_weight=None,
verbose=0):
assert callbacks is None
assert class_weight is None
self.model.eval()
self.reset_metrics()
count = 0
try:
for (x, target) in generator:
self.test_on_batch(x, target)
count += len(target)
if epoch_size is not None and count >= epoch_size:
break
finally:
if len(self.logs) > 0 and self.logs[-1]["ntrain"] == self.model.META["ntrain"]:
del self.logs[-1]
self.model.META["testing_info"] = get_info()
self.model.META["testing_loss"] = self.losses.value()
self.model.META["testing_err"] = self.get_metrics()
self.logs.append(copy.copy(self.model.META))
return self.report_metrics()
def get_metrics(self):
return [(m.name(), m.value()) for m in self.mobjects]
def metric_names(self):
return [m.name() for m in self.mobjects]
def get_metric(self, name):
return next(m for m in self.mobjects if m.name()==name)
def find_save(savename, epochs):
last_name = (-1, None)
for i in range(epochs):
name = savename.format(epoch=i)
if os.path.exists(name):
last_name = (i, name)
return last_name
def find_lr(rates, n):
assert rates[0][0] == 0
for (i, r) in rates[::-1]:
if n>=i: return r
def save_cpu(fname, model, device=None, LOGS=None):
model = model.cpu()
if LOGS is not None:
model.LOGS = LOGS
torch.save(model, fname)
model.to(device)
def training(trainer,
training,
training_size,
testing,
testing_size,
epochs=100,
save_every=None,
save_best=None,
savename="model-{epoch:04d}.pyd",
learning_rates=[(0, 0.9e-3)],
oneline=False,
restart=True,
verbose=False):
epoch = -1
last_eval = "(none)"
last_len = [0]
def dline(progress):
#progress = "{"+progress+"}"
delta = max(3, last_len[0] - len(progress))
print(progress + " "*delta + "\r", end="", flush=True)
last_len[0] = len(progress)
def after_batch():
progress = "{:4d} test {} ::: train {} ".format(
epoch,
last_eval,
trainer.report_recent_short())
dline(progress)
def after_epoch():
if not oneline:
print()
trainer.after_batch = after_batch
trainer.after_epoch = after_epoch
trainer.progress_prefix = ""
trainer.verbose = not oneline
start_epoch, name = find_save(savename, epochs)
if restart and start_epoch >= 0:
print("\nloading", name)
model = torch.load(name)
trainer.set_model(model)
else:
start_epoch = 0
best_meta = None
for epoch in range(start_epoch, epochs):
ntrain = trainer.model.META["ntrain"]
if callable(learning_rates):
lr = learning_rates(ntrain)
else:
lr = find_lr(learning_rates, ntrain)
trainer.set_lr(lr)
trainer.fit_for(training, training_size)
last_eval = trainer.test_for(testing,testing_size)
if save_every is not None and (epoch+1) % save_every == 0:
fname = savename.format(epoch=epoch)
assert not os.path.exists(fname), fname
last_eval += " [saved]"
save_cpu(fname, trainer.model, device=trainer.device, LOGS=trainer.logs)
if compare_metas(best_meta, trainer.model.META) > 0:
best_meta = copy.copy(trainer.model.META)
if save_best is not None:
save_cpu(save_best, trainer.model, device=trainer.device, LOGS=trainer.logs)
if "saved" not in last_eval: last_eval += " [saved]"
| StarcoderdataPython |
6429263 | a=input()
s=input()
e,E=s.count('e'),s.count('2')
if e==E:
print("yee")
elif e>E:
print("e")
else:
print("2") | StarcoderdataPython |
256200 | <reponame>KDE/craft-blueprints-kde
import info
from CraftConfig import *
from CraftOS.osutils import OsUtils
class subinfo( info.infoclass ):
def setTargets( self ):
self.versionInfo.setDefaultValues()
self.displayName = "FFmpeg Thumbnailer"
self.description = "FFmpeg-based thumbnail creator for video files"
def setDependencies( self ):
self.runtimeDependencies["virtual/base"] = None
self.buildDependencies["kde/frameworks/extra-cmake-modules"] = None
self.runtimeDependencies["libs/qt5/qtbase"] = None
self.runtimeDependencies["libs/ffmpeg"] = None
self.runtimeDependencies["libs/taglib"] = None
self.runtimeDependencies["kde/frameworks/tier1/ki18n"] = None
self.runtimeDependencies["kde/frameworks/tier1/kconfig"] = None
self.runtimeDependencies["kde/frameworks/tier3/kio"] = None
from Package.CMakePackageBase import *
class Package( CMakePackageBase ):
def __init__(self):
CMakePackageBase.__init__(self)
| StarcoderdataPython |
3235652 | <gh_stars>1-10
# Generated by Django 3.0.3 on 2020-02-19 12:44
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('project_core', '0104_delete_colour'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('evaluation', '0004_create_proposal_evaluation'),
]
operations = [
migrations.AddField(
model_name='proposalevaluation',
name='created_by',
field=models.ForeignKey(blank=True, help_text='User by which the entry was created', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='evaluation_proposalevaluation_created_by_related', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='proposalevaluation',
name='created_on',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now, help_text='Date and time at which the entry was created'),
preserve_default=False,
),
migrations.AddField(
model_name='proposalevaluation',
name='modified_by',
field=models.ForeignKey(blank=True, help_text='User by which the entry was modified', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='evaluation_proposalevaluation_modified_by_related', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='proposalevaluation',
name='modified_on',
field=models.DateTimeField(auto_now=True, help_text='Date and time at which the entry was modified', null=True),
),
migrations.AlterField(
model_name='proposalevaluation',
name='board_decision',
field=models.CharField(choices=[('Fund', 'Fund'), ('NotFund', 'Not Fund')], max_length=7),
),
migrations.CreateModel(
name='CallEvaluation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_on', models.DateTimeField(auto_now_add=True, help_text='Date and time at which the entry was created')),
('modified_on', models.DateTimeField(auto_now=True, help_text='Date and time at which the entry was modified', null=True)),
('panel_date', models.DateField()),
('call', models.OneToOneField(on_delete=django.db.models.deletion.PROTECT, to='project_core.Call')),
('created_by', models.ForeignKey(blank=True, help_text='User by which the entry was created', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='evaluation_callevaluation_created_by_related', to=settings.AUTH_USER_MODEL)),
('modified_by', models.ForeignKey(blank=True, help_text='User by which the entry was modified', null=True, on_delete=django.db.models.deletion.PROTECT, related_name='evaluation_callevaluation_modified_by_related', to=settings.AUTH_USER_MODEL)),
],
options={
'abstract': False,
},
),
]
| StarcoderdataPython |
1717001 | <reponame>ijustlovemath/openaps
from unittest import TestCase
from openaps.glucose.display import Display
class DisplayTestCase (TestCase):
"""
Checks that the display function rounds to the correct number
of significant digits
"""
def test_display_mmol_l(self):
self.assertEqual(Display.display('mmol/L', 5.490000), 5.5)
self.assertEqual(Display.display('mmol/L', 5.500001), 5.5)
self.assertEqual(Display.display('mmol/L', 5.510000), 5.5)
self.assertEqual(Display.display('mmol/L', 5.590000), 5.6)
def test_display_mg_dl(self):
self.assertEqual(Display.display('mg/dL', 147.078), 147)
self.assertEqual(Display.display('mg/dL', 268.236), 268)
self.assertEqual(Display.display('mg/dL', 605.970), 606)
self.assertEqual(Display.display('mg/dL', 623.268), 623)
| StarcoderdataPython |
4823427 | <reponame>jkblume/cosmic-ray
"""This is the command-line program for cosmic ray.
Here we manage command-line parsing and launching of the internal
machinery that does mutation testing.
"""
import itertools
import json
import logging
import os
import pprint
import sys
import docopt_subcommands as dsc
import transducer.eager
from transducer.functional import compose
import transducer.lazy
from transducer.transducers import filtering, mapping
import cosmic_ray.commands
import cosmic_ray.counting
import cosmic_ray.modules
import cosmic_ray.worker
from cosmic_ray.testing.test_runner import TestOutcome
from cosmic_ray.timing import Timer
from cosmic_ray.util import redirect_stdout
from cosmic_ray.work_db import use_db, WorkDB
LOG = logging.getLogger()
REMOVE_COMMENTS = mapping(lambda x: x.split('#')[0])
REMOVE_WHITESPACE = mapping(str.strip)
NON_EMPTY = filtering(bool)
CONFIG_FILE_PARSER = compose(REMOVE_COMMENTS,
REMOVE_WHITESPACE,
NON_EMPTY)
def _load_file(config_file):
"""Read configuration from a file.
This reads `config_file`, yielding each non-empty line with
whitespace and comments stripped off.
"""
with open(config_file, 'rt', encoding='utf-8') as f:
yield from transducer.lazy.transduce(CONFIG_FILE_PARSER, f)
@dsc.command('load')
def handle_load(config):
"""usage: cosmic-ray load <config-file>
Load a command configuration from <config-file> and run it.
A "command configuration" is simply a command-line invocation for cosmic-ray,
where each token of the command is on a separate line.
"""
filename = config['<config-file>']
argv = _load_file(filename)
return main(argv=list(argv))
@dsc.command('baseline')
def handle_baseline(configuration):
"""usage: cosmic-ray baseline [options] <top-module> [-- <test-args> ...]
Run an un-mutated baseline of <top-module> using the tests in <test-dir>.
This is largely like running a "worker" process, with the difference
that a baseline run doesn't mutate the code.
options:
--no-local-import Allow importing module from the current directory
--test-runner=R Test-runner plugin to use [default: unittest]
"""
sys.path.insert(0, '')
test_runner = cosmic_ray.plugins.get_test_runner(
configuration['--test-runner'],
configuration['<test-args>']
)
work_record = test_runner()
# note: test_runner() results are meant to represent
# status codes when executed against mutants.
# SURVIVED means that the test suite executed without any error
# hence CR thinks the mutant survived. However when running the
# baseline execution we don't have mutations and really want the
# test suite to report PASS, hence the comparison below!
if work_record.test_outcome != TestOutcome.SURVIVED:
# baseline failed, print whatever was returned
# from the test runner and exit
LOG.error('baseline failed')
print(''.join(work_record.data))
sys.exit(2)
def _get_db_name(session_name):
if session_name.endswith('.json'):
return session_name
else:
return '{}.json'.format(session_name)
@dsc.command('init')
def handle_init(configuration):
"""usage: cosmic-ray init [options] [--exclude-modules=P ...] (--timeout=T | --baseline=M) <session-name> <top-module> [-- <test-args> ...]
Initialize a mutation testing run. The primarily creates a database of "work to
be done" which describes all of the mutations and test runs that need to be
executed for a full mutation testing run. The testing run will mutate
<top-module> (and submodules) using the tests in <test-dir>. This doesn't
actually run any tests. Instead, it scans the modules-under-test and simply
generates the work order which can be executed with other commands.
The session-name argument identifies the run you're creating. Its most
important role is that it's used to name the database file.
options:
--no-local-import Allow importing module from the current directory
--test-runner=R Test-runner plugin to use [default: unittest]
--exclude-modules=P Pattern of module names to exclude from mutation
"""
# This lets us import modules from the current directory. Should probably
# be optional, and needs to also be applied to workers!
sys.path.insert(0, '')
if configuration['--timeout'] is not None:
timeout = float(configuration['--timeout'])
else:
baseline_mult = float(configuration['--baseline'])
assert baseline_mult is not None
with Timer() as t:
handle_baseline(configuration)
timeout = baseline_mult * t.elapsed.total_seconds()
LOG.info('timeout = %f seconds', timeout)
modules = set(
cosmic_ray.modules.find_modules(
cosmic_ray.modules.fixup_module_name(configuration['<top-module>']),
configuration['--exclude-modules']))
LOG.info('Modules discovered: %s', [m.__name__ for m in modules])
db_name = _get_db_name(configuration['<session-name>'])
with use_db(db_name) as db:
cosmic_ray.commands.init(
modules,
db,
configuration['--test-runner'],
configuration['<test-args>'],
timeout)
@dsc.command('exec')
def handle_exec(configuration):
"""usage: cosmic-ray exec [--dist] <session-name>
Perform the remaining work to be done in the specified session. This requires
that the rest of your mutation testing infrastructure (e.g. worker processes)
are already running.
options:
--dist Distribute tests to remote workers
"""
db_name = _get_db_name(configuration['<session-name>'])
dist = configuration['--dist']
with use_db(db_name, mode=WorkDB.Mode.open) as db:
cosmic_ray.commands.execute(db, dist)
@dsc.command('run')
def handle_run(configuration):
"""usage: cosmic-ray run [options] [--dist] [--exclude-modules=P ...] (--timeout=T | --baseline=M) <session-name> <top-module> [-- <test-args> ...]
This simply runs the "init" command followed by the "exec" command.
It's important to remember that "init" clears the session database, including
any results you may have already received. So DO NOT USE THIS COMMAND TO
CONTINUE EXECUTION OF AN INTERRUPTED SESSION! If you do this, you will lose any
existing results.
options:
--no-local-import Allow importing module from the current directory
--test-runner=R Test-runner plugin to use [default: unittest]
--exclude-modules=P Pattern of module names to exclude from mutation
"""
handle_init(configuration)
handle_exec(configuration)
@dsc.command('report')
def handle_report(configuration):
"""usage: cosmic-ray report [--full-report] [--show-pending] <session-name>
Print a nicely formatted report of test results and some basic statistics.
options:
--full-report Show test output and mutation diff for killed mutants
"""
db_name = _get_db_name(configuration['<session-name>'])
show_pending = configuration['--show-pending']
full_report = configuration['--full-report']
with use_db(db_name, WorkDB.Mode.open) as db:
for line in cosmic_ray.commands.create_report(db, show_pending, full_report):
print(line)
@dsc.command('survival-rate')
def handle_survival_rate(configuration):
"""usage: cosmic-ray survival-rate <session-name>
Print the session's survival rate.
"""
db_name = _get_db_name(configuration['<session-name>'])
with use_db(db_name, WorkDB.Mode.open) as db:
rate = cosmic_ray.commands.survival_rate(db)
print('{:.2f}'.format(rate))
@dsc.command('counts')
def handle_counts(configuration):
"""usage: cosmic-ray counts [options] [--exclude-modules=P ...] <top-module>
Count the number of tests that would be run for a given testing configuration.
This is mostly useful for estimating run times and keeping track of testing
statistics.
options:
--no-local-import Allow importing module from the current directory
--test-runner=R Test-runner plugin to use [default: unittest]
--exclude-modules=P Pattern of module names to exclude from mutation
"""
sys.path.insert(0, '')
modules = cosmic_ray.modules.find_modules(
cosmic_ray.modules.fixup_module_name(configuration['<top-module>']),
configuration['--exclude-modules'])
operators = cosmic_ray.plugins.operator_names()
counts = cosmic_ray.counting.count_mutants(modules, operators)
print('[Counts]')
pprint.pprint(counts)
print('\n[Total test runs]\n',
sum(itertools.chain(
*(d.values() for d in counts.values()))))
@dsc.command('test-runners')
def handle_test_runners(config):
"""usage: cosmic-ray test-runners
List the available test-runner plugins.
"""
print('\n'.join(cosmic_ray.plugins.test_runner_names()))
return 0
@dsc.command('operators')
def handle_operators(config):
"""usage: cosmic-ray operators
List the available operator plugins.
"""
print('\n'.join(cosmic_ray.plugins.operator_names()))
return 0
@dsc.command('worker')
def handle_worker(config):
"""usage: cosmic-ray worker [options] <module> <operator> <occurrence> <test-runner> [-- <test-args> ...]
Run a worker process which performs a single mutation and test run. Each
worker does a minimal, isolated chunk of work: it mutates the <occurence>-th
instance of <operator> in <module>, runs the test suite defined by
<test-runner> and <test-args>, prints the results, and exits.
Normally you won't run this directly. Rather, it will be launched by celery
worker tasks.
options:
--no-local-import Disallow importing module from the current directory
--keep-stdout Do not squelch stdout
"""
if not config['--no-local-import']:
sys.path.insert(0, '')
operator = cosmic_ray.plugins.get_operator(config['<operator>'])
test_runner = cosmic_ray.plugins.get_test_runner(
config['<test-runner>'],
config['<test-args>'])
with open(os.devnull, 'w') as devnull,\
redirect_stdout(sys.stdout if config['--keep-stdout'] else devnull):
work_record = cosmic_ray.worker.worker(
config['<module>'],
operator,
int(config['<occurrence>']),
test_runner)
sys.stdout.write(
json.dumps(work_record))
DOC_TEMPLATE = """{program}
Usage: {program} [options] <command> [<args> ...]
Options:
-h --help Show this screen.
-v --verbose Use verbose logging
Available commands:
{available_commands}
See '{program} help <command>' for help on specific commands.
"""
def common_option_handler(config):
if config['--verbose']:
logging.basicConfig(level=logging.INFO)
def main(argv=None):
dsc.main(
'cosmic-ray',
'cosmic-ray v.2',
argv=argv,
doc_template=DOC_TEMPLATE,
common_option_handler=common_option_handler)
if __name__ == '__main__':
main()
| StarcoderdataPython |
4808248 | <gh_stars>0
#!python
# This script is used to rename mp4 files by adding the datetime-hour-minute
# to the filename. This is necessary because some camera manufacturers
# do not add a timestamp and simply name files in sequence (001, 002, ...)
# so you can end up with multiple identically named files when the sequence restarts
import os
from datetime import datetime as dt
import re
import logging
def rename_mp4_timestamp(dir):
for root, dirs, files in os.walk(dir):
for name in files:
logging.info(f'processing {name}')
full_name = os.path.abspath(root + os.sep + name)
file = full_name[:-4]
file_type = full_name[-4:].lower()
if file_type.lower() == '.mp4':
mtime = os.path.getmtime(full_name)
suffix = dt.fromtimestamp(mtime).strftime('_%Y%m%d_%H%M')
if not bool(re.match("(^[0-9]{8})([_])([0-9]{4}$)",file[-13:])):
new_name = file + suffix + file_type
logging.info(new_name)
# os.rename(full_name, new_name)
else:
logging.info(f'already renamed as {name}')
else:
logging.info(f'{name} is not an mp4')
if __name__ == '__main__':
logging.basicConfig(
# filename='hdo_upload.log',
encoding='utf-8',
format='%(asctime)s %(levelname)-8s %(message)s',
level=logging.INFO,
datefmt='%Y-%m-%d %H:%M:%S')
dir = 'project/temp/downloads'
rename_mp4_timestamp(dir)
| StarcoderdataPython |
8118101 | # Copyright Contributors to the Packit project.
# SPDX-License-Identifier: MIT
import functools
from typing import Dict, Type, Optional, Set, Iterable
from ogr.abstract import GitService, GitProject
from ogr.exceptions import OgrException
from ogr.parsing import parse_git_repo
_SERVICE_MAPPING: Dict[str, Type[GitService]] = {}
def use_for_service(service: str, _func=None):
"""
Class decorator that adds the class to the service mapping.
When the project url contains the `service` as a substring,
this implementation will be used to initialize the project.
When using this decorator, be sure that your class is initialized.
(Add the import to `ogr/__init__.py`)
Usage:
```py
@use_for_service("github.com")
class GithubService(BaseGitService):
pass
@use_for_service("pagure.io")
@use_for_service("src.fedoraproject.org")
class PagureService(BaseGitService):
pass
```
Args:
service: URL of the service.
Returns:
Decorator.
"""
def decorator_cover(func):
@functools.wraps(func)
def covered_func(kls: Type[GitService]):
_SERVICE_MAPPING[service] = kls
return kls
return covered_func
return decorator_cover(_func)
def get_project(
url,
service_mapping_update: Dict[str, Type[GitService]] = None,
custom_instances: Iterable[GitService] = None,
force_custom_instance: bool = True,
**kwargs,
) -> GitProject:
"""
Return the project for the given URL.
Args:
url: URL of the project, e.g. `"https://github.com/packit/ogr"`.
service_mapping_update: Custom mapping from
service url (`str`) to service class.
Defaults to no mapping.
custom_instances: List of instances that will be
used when creating a project instance.
Defaults to `None`.
force_custom_instance: Force picking a Git service from the
`custom_instances` list, if there is any provided, raise an error if
that is not possible.
Defaults to `True`.
**kwargs: Arguments forwarded to __init__ of the matching service.
Returns:
`GitProject` using the matching implementation.
"""
mapping = service_mapping_update.copy() if service_mapping_update else {}
custom_instances = custom_instances or []
for instance in custom_instances:
mapping[instance.instance_url] = instance.__class__
kls = get_service_class(url=url, service_mapping_update=mapping)
parsed_repo_url = parse_git_repo(url)
service = None
if custom_instances:
for service_inst in custom_instances:
if (
isinstance(service_inst, kls)
and service_inst.hostname == parsed_repo_url.hostname
):
service = service_inst
break
else:
if force_custom_instance:
raise OgrException(
f"Instance of type {kls.__name__} "
f"matching instance url '{url}' was not provided."
)
if not service:
service = kls(instance_url=parsed_repo_url.get_instance_url(), **kwargs)
return service.get_project_from_url(url=url)
def get_service_class_or_none(
url: str, service_mapping_update: Dict[str, Type[GitService]] = None
) -> Optional[Type[GitService]]:
"""
Get the matching service class from the URL.
Args:
url: URL of the project, e.g. `"https://github.com/packit/ogr"`.
service_mapping_update: Custom mapping from service url (`str`) to service
class.
Defaults to `None`.
Returns:
Matched class (subclass of `GitService`) or `None`.
"""
mapping = {}
mapping.update(_SERVICE_MAPPING)
if service_mapping_update:
mapping.update(service_mapping_update)
for service, service_kls in mapping.items():
if service in url:
return service_kls
return None
def get_service_class(
url: str, service_mapping_update: Dict[str, Type[GitService]] = None
) -> Type[GitService]:
"""
Get the matching service class from the URL.
Args:
url: URL of the project, e.g. `"https://github.com/packit/ogr"`.
service_mapping_update: Custom mapping from service url (str) to service
class.
Defaults to `None`.
Returns:
Matched class (subclass of `GitService`).
"""
service_kls = get_service_class_or_none(
url=url, service_mapping_update=service_mapping_update
)
if service_kls:
return service_kls
raise OgrException("No matching service was found.")
def get_instances_from_dict(instances: Dict) -> Set[GitService]:
"""
Load the service instances from the dictionary in the following form:
- `key` : hostname, url or name that can be mapped to the service-type
- `value` : dictionary with arguments used when creating a new instance of the
service (passed to the `__init__` method)
e.g.:
```py
get_instances_from_dict({
"github.com": {"token": "abcd"},
"pagure": {
"token": "abcd",
"instance_url": "https://src.fedoraproject.org",
},
}) == {
GithubService(token="abcd"),
PagureService(token="abcd", instance_url="https://src.fedoraproject.org")
}
```
When the mapping `key->service-type` is not recognised, you can add a `type`
key to the dictionary and specify the type of the instance.
(It can be either name, hostname or url. The used mapping is same as for
key->service-type.)
The provided `key` is used as an `instance_url` and passed to the `__init__`
method as well.
e.g.:
```py
get_instances_from_dict({
"https://my.gtlb": {"token": "abcd", "type": "gitlab"},
}) == {GitlabService(token="abcd", instance_url="https://my.gtlb")}
```
Args:
instances: Mapping from service name/url/hostname to attributes for the
service creation.
Returns:
Set of the service instances.
"""
services = set()
for key, value in instances.items():
service_kls = get_service_class_or_none(url=key)
if not service_kls:
if "type" not in value:
raise OgrException(
f"No matching service was found for url '{key}'. "
f"Add the service name as a `type` attribute."
)
service_type = value["type"]
if service_type not in _SERVICE_MAPPING:
raise OgrException(
f"No matching service was found for type '{service_type}'."
)
service_kls = _SERVICE_MAPPING[service_type]
value.setdefault("instance_url", key)
del value["type"]
service_instance = service_kls(**value)
services.add(service_instance)
return services
| StarcoderdataPython |
3414303 | from experiments.research.par_v1 import crnn_data, crnn_script, mevm_with_crnn
from experiments.research.par_v1 import grieggs
| StarcoderdataPython |
8198244 | from dist import *
from local import *
from messages import *
| StarcoderdataPython |
11259785 | <filename>keras_models/models/pretrained/vgg16_places365.py
# -*- coding: utf-8 -*-
'''VGG16-places365 model for Keras
This code is forked from: https://github.com/GKalliatakis/Keras-VGG16-places365/blob/master/vgg16_places_365.py
To be re-wrote in Keras way in future.
# Reference:
- [Places: A 10 million Image Database for Scene Recognition](http://places2.csail.mit.edu/PAMI_places.pdf)
'''
import os
import warnings
import numpy as np
from PIL import Image
from cv2 import resize
from pathlib import Path
from keras import backend as K
from keras.layers import Input
from keras.layers.core import Activation, Dense, Flatten
from keras.layers.pooling import MaxPooling2D
from keras.models import Model
from keras.layers import Conv2D
from keras.regularizers import l2
from keras.layers.core import Dropout
from keras.layers import GlobalAveragePooling2D
from keras.layers import GlobalMaxPooling2D
from keras_applications.imagenet_utils import _obtain_input_shape
from keras.engine.topology import get_source_inputs
from keras.utils.data_utils import get_file
from keras.utils import layer_utils
from keras.preprocessing import image
from keras.applications.imagenet_utils import preprocess_input
WEIGHTS_PATH = 'https://github.com/Marcnuth/Keras-Models/releases/download/v0.0.5/vgg16-places365_weights_tf_dim_ordering_tf_kernels.h5'
CLASS_LABELS = [
'airfield', 'airplane_cabin', 'airport_terminal', 'alcove', 'alley', 'amphitheater', 'amusement_arcade', 'amusement_park', 'apartment_building/outdoor',
'aquarium', 'aqueduct', 'arcade', 'arch', 'archaelogical_excavation', 'archive', 'arena/hockey', 'arena/performance', 'arena/rodeo', 'army_base',
'art_gallery', 'art_school', 'art_studio', 'artists_loft', 'assembly_line', 'athletic_field/outdoor', 'atrium/public', 'attic', 'auditorium',
'auto_factory', 'auto_showroom', 'badlands', 'bakery/shop', 'balcony/exterior', 'balcony/interior', 'ball_pit', 'ballroom', 'bamboo_forest', 'bank_vault',
'banquet_hall', 'bar', 'barn', 'barndoor', 'baseball_field', 'basement', 'basketball_court/indoor', 'bathroom', 'bazaar/indoor', 'bazaar/outdoor', 'beach',
'beach_house', 'beauty_salon', 'bedchamber', 'bedroom', 'beer_garden', 'beer_hall', 'berth', 'biology_laboratory', 'boardwalk', 'boat_deck', 'boathouse',
'bookstore', 'booth/indoor', 'botanical_garden', 'bow_window/indoor', 'bowling_alley', 'boxing_ring', 'bridge', 'building_facade', 'bullring',
'burial_chamber', 'bus_interior', 'bus_station/indoor', 'butchers_shop', 'butte', 'cabin/outdoor', 'cafeteria', 'campsite', 'campus', 'canal/natural',
'canal/urban', 'candy_store', 'canyon', 'car_interior', 'carrousel', 'castle', 'catacomb', 'cemetery', 'chalet', 'chemistry_lab', 'childs_room',
'church/indoor', 'church/outdoor', 'classroom', 'clean_room', 'cliff', 'closet', 'clothing_store', 'coast', 'cockpit', 'coffee_shop', 'computer_room',
'conference_center', 'conference_room', 'construction_site', 'corn_field', 'corral', 'corridor', 'cottage', 'courthouse', 'courtyard', 'creek', 'crevasse',
'crosswalk', 'dam', 'delicatessen', 'department_store', 'desert/sand', 'desert/vegetation', 'desert_road', 'diner/outdoor', 'dining_hall', 'dining_room',
'discotheque', 'doorway/outdoor', 'dorm_room', 'downtown', 'dressing_room', 'driveway', 'drugstore', 'elevator/door', 'elevator_lobby', 'elevator_shaft',
'embassy', 'engine_room', 'entrance_hall', 'escalator/indoor', 'excavation', 'fabric_store', 'farm', 'fastfood_restaurant', 'field/cultivated', 'field/wild',
'field_road', 'fire_escape', 'fire_station', 'fishpond', 'flea_market/indoor', 'florist_shop/indoor', 'food_court', 'football_field', 'forest/broadleaf',
'forest_path', 'forest_road', 'formal_garden', 'fountain', 'galley', 'garage/indoor', 'garage/outdoor', 'gas_station', 'gazebo/exterior',
'general_store/indoor', 'general_store/outdoor', 'gift_shop', 'glacier', 'golf_course', 'greenhouse/indoor', 'greenhouse/outdoor', 'grotto',
'gymnasium/indoor', 'hangar/indoor', 'hangar/outdoor', 'harbor', 'hardware_store', 'hayfield', 'heliport', 'highway', 'home_office', 'home_theater',
'hospital', 'hospital_room', 'hot_spring', 'hotel/outdoor', 'hotel_room', 'house', 'hunting_lodge/outdoor', 'ice_cream_parlor', 'ice_floe', 'ice_shelf',
'ice_skating_rink/indoor', 'ice_skating_rink/outdoor', 'iceberg', 'igloo', 'industrial_area', 'inn/outdoor', 'islet', 'jacuzzi/indoor', 'jail_cell',
'japanese_garden', 'jewelry_shop', 'junkyard', 'kasbah', 'kennel/outdoor', 'kindergarden_classroom', 'kitchen', 'lagoon', 'lake/natural', 'landfill',
'landing_deck', 'laundromat', 'lawn', 'lecture_room', 'legislative_chamber', 'library/indoor', 'library/outdoor', 'lighthouse', 'living_room', 'loading_dock',
'lobby', 'lock_chamber', 'locker_room', 'mansion', 'manufactured_home', 'market/indoor', 'market/outdoor', 'marsh', 'martial_arts_gym', 'mausoleum', 'medina',
'mezzanine', 'moat/water', 'mosque/outdoor', 'motel', 'mountain', 'mountain_path', 'mountain_snowy', 'movie_theater/indoor', 'museum/indoor', 'museum/outdoor',
'music_studio', 'natural_history_museum', 'nursery', 'nursing_home', 'oast_house', 'ocean', 'office', 'office_building', 'office_cubicles', 'oilrig',
'operating_room', 'orchard', 'orchestra_pit', 'pagoda', 'palace', 'pantry', 'park', 'parking_garage/indoor', 'parking_garage/outdoor', 'parking_lot',
'pasture', 'patio', 'pavilion', 'pet_shop', 'pharmacy', 'phone_booth', 'physics_laboratory', 'picnic_area', 'pier', 'pizzeria', 'playground', 'playroom',
'plaza', 'pond', 'porch', 'promenade', 'pub/indoor', 'racecourse', 'raceway', 'raft', 'railroad_track', 'rainforest', 'reception', 'recreation_room',
'repair_shop', 'residential_neighborhood', 'restaurant', 'restaurant_kitchen', 'restaurant_patio', 'rice_paddy', 'river', 'rock_arch', 'roof_garden',
'rope_bridge', 'ruin', 'runway', 'sandbox', 'sauna', 'schoolhouse', 'science_museum', 'server_room', 'shed', 'shoe_shop', 'shopfront', 'shopping_mall/indoor',
'shower', 'ski_resort', 'ski_slope', 'sky', 'skyscraper', 'slum', 'snowfield', 'soccer_field', 'stable', 'stadium/baseball', 'stadium/football',
'stadium/soccer', 'stage/indoor', 'stage/outdoor', 'staircase', 'storage_room', 'street', 'subway_station/platform', 'supermarket', 'sushi_bar', 'swamp',
'swimming_hole', 'swimming_pool/indoor', 'swimming_pool/outdoor', 'synagogue/outdoor', 'television_room', 'television_studio', 'temple/asia', 'throne_room',
'ticket_booth', 'topiary_garden', 'tower', 'toyshop', 'train_interior', 'train_station/platform', 'tree_farm', 'tree_house', 'trench', 'tundra',
'underwater/ocean_deep', 'utility_room', 'valley', 'vegetable_garden', 'veterinarians_office', 'viaduct', 'village', 'vineyard', 'volcano',
'volleyball_court/outdoor', 'waiting_room', 'water_park', 'water_tower', 'waterfall', 'watering_hole', 'wave', 'wet_bar', 'wheat_field', 'wind_farm',
'windmill', 'yard', 'youth_hostel', 'zen_garden']
def VGG16_Places365(model_file=None):
"""Instantiates the VGG16-places365 architecture.
Optionally loads weights pre-trained on Places. Note that when using TensorFlow,
for best performance you should set `image_data_format="channels_last"` in your Keras config at ~/.keras/keras.json.
The model and the weights are compatible with both TensorFlow and Theano.
The data format convention used by the model is the one specified in your Keras config file.
# Returns
A Keras model instance.
"""
# Determine proper input shape
input_shape = _obtain_input_shape(None, default_size=224, min_size=48, data_format=K.image_data_format(), require_flatten=True)
img_input = Input(shape=input_shape)
# Block 1
x = Conv2D(filters=64, kernel_size=3, strides=(1, 1), padding='same', kernel_regularizer=l2(0.0002), activation='relu', name='block1_conv1')(img_input)
x = Conv2D(filters=64, kernel_size=3, strides=(1, 1), padding='same', kernel_regularizer=l2(0.0002), activation='relu', name='block1_conv2')(x)
x = MaxPooling2D(pool_size=(2, 2), strides=(2, 2), name="block1_pool", padding='valid')(x)
# Block 2
x = Conv2D(filters=128, kernel_size=3, strides=(1, 1), padding='same', kernel_regularizer=l2(0.0002), activation='relu', name='block2_conv1')(x)
x = Conv2D(filters=128, kernel_size=3, strides=(1, 1), padding='same', kernel_regularizer=l2(0.0002), activation='relu', name='block2_conv2')(x)
x = MaxPooling2D(pool_size=(2, 2), strides=(2, 2), name="block2_pool", padding='valid')(x)
# Block 3
x = Conv2D(filters=256, kernel_size=3, strides=(1, 1), padding='same', kernel_regularizer=l2(0.0002), activation='relu', name='block3_conv1')(x)
x = Conv2D(filters=256, kernel_size=3, strides=(1, 1), padding='same', kernel_regularizer=l2(0.0002), activation='relu', name='block3_conv2')(x)
x = Conv2D(filters=256, kernel_size=3, strides=(1, 1), padding='same', kernel_regularizer=l2(0.0002), activation='relu', name='block3_conv3')(x)
x = MaxPooling2D(pool_size=(2, 2), strides=(2, 2), name="block3_pool", padding='valid')(x)
# Block 4
x = Conv2D(filters=512, kernel_size=3, strides=(1, 1), padding='same', kernel_regularizer=l2(0.0002), activation='relu', name='block4_conv1')(x)
x = Conv2D(filters=512, kernel_size=3, strides=(1, 1), padding='same', kernel_regularizer=l2(0.0002), activation='relu', name='block4_conv2')(x)
x = Conv2D(filters=512, kernel_size=3, strides=(1, 1), padding='same', kernel_regularizer=l2(0.0002), activation='relu', name='block4_conv3')(x)
x = MaxPooling2D(pool_size=(2, 2), strides=(2, 2), name="block4_pool", padding='valid')(x)
# Block 5
x = Conv2D(filters=512, kernel_size=3, strides=(1, 1), padding='same', kernel_regularizer=l2(0.0002), activation='relu', name='block5_conv1')(x)
x = Conv2D(filters=512, kernel_size=3, strides=(1, 1), padding='same', kernel_regularizer=l2(0.0002), activation='relu', name='block5_conv2')(x)
x = Conv2D(filters=512, kernel_size=3, strides=(1, 1), padding='same', kernel_regularizer=l2(0.0002), activation='relu', name='block5_conv3')(x)
x = MaxPooling2D(pool_size=(2, 2), strides=(2, 2), name="block5_pool", padding='valid')(x)
# Classification block
x = Flatten(name='flatten')(x)
x = Dense(4096, activation='relu', name='fc1')(x)
x = Dropout(0.5, name='drop_fc1')(x)
x = Dense(4096, activation='relu', name='fc2')(x)
x = Dropout(0.5, name='drop_fc2')(x)
x = Dense(365, activation='softmax', name="predictions")(x)
# Create model.
model = Model(img_input, x, name='vgg16-places365')
# load weights
if model_file:
model.load_weights(Path(model_file).absolute().as_posix())
else:
weights_path = get_file('vgg16-places365_weights_tf_dim_ordering_tf_kernels.h5', WEIGHTS_PATH, cache_subdir='models')
model.load_weights(weights_path)
if K.backend() == 'theano':
layer_utils.convert_all_kernels_in_model(model)
if K.image_data_format() == 'channels_first':
maxpool = model.get_layer(name='block5_pool')
shape = maxpool.output_shape[1:]
dense = model.get_layer(name='fc1')
layer_utils.convert_dense_weights_data_format(dense, shape, 'channels_first')
if K.backend() == 'tensorflow':
warnings.warn(
'You are using the TensorFlow backend, yet you are using the Theano image data format convention (`image_data_format="channels_first"`). '
'For best performance, set `image_data_format="channels_last"` in your Keras config at ~/.keras/keras.json.'
)
return model
def predict(image_files, n_top=5, model=None):
model = model or VGG16_Places365()
assert isinstance(image_files, list), 'image_files should be a list'
assert len(image_files) > 0, 'image_files should not be empty'
file2img = lambda f: np.expand_dims(resize(np.array(Image.open(f), dtype=np.uint8), (224, 224)), 0)
images = [file2img(Path(f).absolute().as_posix()) for f in image_files]
predict_scores = lambda img: np.argsort(model.predict(img)[0])[::-1][0:n_top]
predict_labels = lambda img: [CLASS_LABELS[v] for v in predict_scores(img)]
return [predict_labels(img) for img in images]
| StarcoderdataPython |
5165913 | from flask import render_template, url_for, redirect, request, flash
from unolocum.amzn_form import UrlForm
from unolocum import app
from bs4 import BeautifulSoup
import requests
from unolocum.sql import cur, conn
from mysql.connector.errors import DataError
from datetime import datetime
# global variables
# amzn
amzn_redirects = 0
change_in_price = 'none'
amzn_table_data = []
amzn_table_headings = ('#', 'Product', 'Current Price')
cur.execute("SELECT id, name, price FROM URL")
print(amzn_table_data)
# nsinfo
hemis = "northern"
ns_table_data = []
ns_table_headings = ('#', 'Name', 'Direction', 'Image')
current_month = datetime.now().strftime("%B")
#------------------------------------------amazon functions-----------------------------------------------
def AmazonTableData(): # reformats the table data into list so that its
global amzn_table_data # more easily accessible
cur.execute("USE unolocum")
cur.execute("SELECT id, name, price FROM URL")
amzn_table_data = cur.fetchall()
for i in range(len(amzn_table_data)):
amzn_table_data[i] = list(amzn_table_data[i])
amzn_table_data[i][2] = str(amzn_table_data[i][2])
AmazonTableData()
def productinfo(url): # gets info of the product from the url
headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36' }
page = requests.get(url, headers = headers)
soup = BeautifulSoup(page.content, 'html.parser')
pname = soup.find(id="productTitle").get_text()
pname = pname.strip() #product name
print(url)
try:
p_price = soup.find(id="priceblock_ourprice").get_text() # price in string form
except AttributeError:
p_price = soup.find(id="priceblock_dealprice").get_text() # price in string form
c_p_price = float(p_price.replace(',', '')[2: ]) # price in float form
return [url, pname, c_p_price]
def update_prices(): # updates prices
global change_in_price
AmazonTableData()
cur.execute("USE unolocum")
cur.execute("SELECT url from URL ORDER BY id")
urls = cur.fetchall()
print(urls)
url_count = 0
for url in urls:
print(url_count)
url = url[0]
product_info = productinfo(url)
c_p_price = product_info[2] # current price
cur.execute(f"SELECT price FROM URL where url='{url}'")
old_price = cur.fetchone()[0]
print(old_price)
print(type(old_price), type(c_p_price))
if c_p_price != old_price:
print(c_p_price)
if c_p_price > old_price:
print("increasing")
amzn_table_data[url_count][2] = str(c_p_price) + ' ▲'
cur.execute(f"UPDATE URL SET price={c_p_price} WHERE url='{url}'")
elif c_p_price < old_price:
print("decreasing")
amzn_table_data[url_count][2] = str(c_p_price) + ' ▼'
cur.execute(f"UPDATE URL SET price={c_p_price} WHERE url='{url}'")
conn.commit()
url_count += 1
#---------------------------------------- nsinfo functions----------------------------------------------
def NSTableData(hemis):
global ns_table_data
ns_table_images = []
cur.execute('USE unolocum_space_objects')
cur.execute(f"SELECT Name, Direction from {hemis} WHERE Month='{current_month}'")
ns_table_data = cur.fetchall()
for i in range(len(ns_table_data)):
ns_table_data[i] = (i+1,) + ns_table_data[i]
ns_table_images.append(f"http://www.allthesky.com/constellations/big/{ns_table_data[i][1].lower()}28vm-b.jpg")
print(ns_table_data)
print(ns_table_images)
ns_table_data = zip(ns_table_data, ns_table_images)
NSTableData(hemis)
# -------------------------------------------routes----------------------------------------------------
@app.route('/')
@app.route('/home')
def home():
return render_template('home.htm')
@app.route('/about')
def about():
return render_template('about.htm', title = 'About')
@app.route('/services')
def services():
return render_template('services.htm', title = 'Services')
@app.route('/test')
def test():
return render_template('test.htm', title = 'test')
@app.route("/callback")
def callback():
global authorization_url
authorization_url = (request.url)
returnf()
return render_template('home.htm')
def returnf():
tempvar = authorization_url
@app.route('/amzn', methods=['GET', 'POST'])
def amzn():
global amzn_redirects
amzn_redirects += 1
if amzn_redirects <= 1:
update_prices()
form = UrlForm()
url = form.url.data
if form.validate_on_submit():
product_info = productinfo(url)
url = product_info[0]
pname = product_info[1]
c_p_price = product_info[2]
try:
cur.execute(f"INSERT INTO URL (url, name, price) VALUES('{url}', '{pname}', {c_p_price})")
except DataError:
pname = pname[ :97] + '...'
print(pname)
cur.execute(f"INSERT INTO URL (url, name, price) VALUES('{url}', '{pname}', {c_p_price})")
conn.commit()
flash(f'Added.', 'success')
update_prices()
return redirect('/amzn')
print(amzn_table_data)
return render_template('amzn.htm', title='Amazon Tracking', form=form, amzn_table_headings=amzn_table_headings, amzn_table_data=amzn_table_data, change_in_price=change_in_price)
@app.route('/nightsky', methods = ['GET', 'POST'])
def nightsky():
global hemis
if request.method == 'POST':
hemis = request.form['hemis']
print(hemis)
NSTableData(hemis)
return render_template('nightsky.htm', title='Night Sky Info', hemis=hemis, ns_table_data=ns_table_data, ns_table_headings=ns_table_headings)
| StarcoderdataPython |
3318596 | import numpy as np
import os, logging
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"
logging.getLogger("tensorflow").setLevel(logging.CRITICAL)
logging.getLogger("tensorflow_hub").setLevel(logging.CRITICAL)
import keras as K
import tensorflow as tf
import copy
import warnings
class batchless_VanillaLSTM_keras(object):
""" Vanilla LSTM implementation using keras """
def __init__(self, num_layers=2, cells_per_layer=50, dropout=0.5, seed=None, stateful=True, lag=5):
"""
Initialise and build the model
"""
self.num_layers = num_layers
self.cells_per_layer = cells_per_layer
self.dropout = dropout
self.seed = seed
self.stateful = stateful
self.lag = lag
if seed != None:
#tf.compat.v1.set_random_seed(seed)
#session_conf = tf.compat.v1.ConfigProto(intra_op_parallelism_threads=1, inter_op_parallelism_threads=1)
#sess = tf.compat.v1.Session(graph=tf.compat.v1.get_default_graph(), config=session_conf)
#k.set_session(sess)
np.random.seed(seed)
def build(self, sequence, debug=False):
"""
Build model
"""
self.sequence = sequence
# Sequence either list of lists or a list.
if sequence.ndim != 1:
self.features = len(sequence[0])
else:
self.features = 1
# Reshape
self.sequence = self.sequence.reshape(1, -1, self.features)
self.model = build_Keras_LSTM(self.num_layers, self.cells_per_layer, self.lag, self.features, self.stateful, self.seed, self.dropout)
if self.features != 1:
self.model.compile(loss='categorical_crossentropy', optimizer=K.optimizers.Adam())
else:
self.model.compile(loss='mse', optimizer=K.optimizers.Adam())
def construct_training_index(self, debug=False):
"""
Construct training index (compatible with model) from sequence of vectors of dimension d,
"""
n = self.sequence.shape[1]
self.index = []
if self.stateful:
# Create groups
self.num_augs = min(self.lag, n - self.lag)
for el in range(self.num_augs):
self.index.append(np.arange(el, n - self.lag, self.lag))
else:
self.num_augs = 1
self.index = np.arange(0, n - self.lag, 1)
def train(self, patience=100, max_epoch=100000, acceptable_loss=np.inf, batch_size = 1, weight_restarts=False, debug=False):
"""
Train the model on the constructed training data
"""
########################################################################
# Weight restarts
########################################################################
if weight_restarts:
weight_restarts = 10
store_weights = [0]*weight_restarts
initial_loss = [0]*weight_restarts
for i in range(weight_restarts):
if self.stateful:
h = self.model.fit(self.sequence[:, 0:self.lag, :], self.sequence[:, self.lag, :], epochs=1, batch_size=1, verbose=0, shuffle=False)
initial_loss[i] = (h.history['loss'])[-1]
self.model.reset_states()
store_weights[i] = self.model.get_weights()
# quick hack to reinitialise weights
json_string = self.model.to_json()
self.model = model_from_json(json_string)
if self.features != 1:
self.model.compile(loss='categorical_crossentropy', optimizer=K.optimizers.Adam())
else:
self.model.compile(loss='mse', optimizer=K.optimizers.Adam())
else:
h = self.model.fit(self.sequence[:, 0:self.lag, :], self.sequence[:, self.lag, :], epochs=1, batch_size=1, verbose=0, shuffle=False) # no shuffling to remove randomness
initial_loss[i] = (h.history['loss'])[-1]
store_weights[i] = self.model.get_weights()
self.model.reset_states()
# quick hack to reinitialise weights
json_string = self.model.to_json()
self.model = K.models.model_from_json(json_string)
if isinstance(self.abba, ABBA):
self.model.compile(loss='categorical_crossentropy', optimizer=Adam())
else:
self.model.compile(loss='mse', optimizer=Adam())
if debug:
print('Initial loss:', initial_loss)
m = np.argmin(initial_loss)
self.model.set_weights(store_weights[int(m)])
del store_weights
########################################################################
# Train
########################################################################
vec_loss = np.zeros(max_epoch)
min_loss = np.inf
min_loss_ind = np.inf
losses = [0]*self.num_augs
if self.stateful: # no shuffle and reset state manually
for iter in range(max_epoch):
rint = np.random.permutation(self.num_augs)
for r in rint:
loss_sum = 0
for i in self.index[r]:
h = self.model.fit(self.sequence[:, i:i+self.lag, :], self.sequence[:, i+self.lag, :], epochs=1, batch_size=1, verbose=0, shuffle=False)
loss_sum += ((h.history['loss'])[-1])**2
losses[r] = loss_sum/len(self.index[r])
self.model.reset_states()
vec_loss[iter] = np.mean(losses)
if vec_loss[iter] >= min_loss:
if iter - min_loss_ind >= patience and min_loss < acceptable_loss:
break
else:
min_loss = vec_loss[iter]
old_weights = self.model.get_weights()
min_loss_ind = iter
else: # shuffle in fit
for iter in range(max_epoch):
loss_sum = 0
for i in np.random.permutation(len(self.index)):
h = self.model.fit(self.sequence[:, i:i+self.lag, :], self.sequence[:, i+self.lag, :], epochs=1, batch_size=1, verbose=0, shuffle=True)
self.model.reset_states()
loss_sum += ((h.history['loss'])[-1])**2
vec_loss[iter] = loss_sum/len(self.index)
if vec_loss[iter] >= min_loss:
if iter - min_loss_ind >= patience and min_loss < acceptable_loss:
break
else:
min_loss = (h.history['loss'])[-1]
old_weights = self.model.get_weights()
min_loss_ind = iter
self.model.reset_states()
self.model.set_weights(old_weights)
self.epoch = iter+1
self.loss = vec_loss[0:iter+1]
def forecast(self, k, randomize=False, debug=False):
"""
Make k step forecast into the future.
"""
prediction = copy.deepcopy(self.sequence)
# Recursively make k one-step forecasts
for ind in range(self.sequence.shape[1], self.sequence.shape[1] + k):
# Build data to feed into model
if self.stateful:
index = np.arange(ind%self.lag, ind, self.lag)
else:
index = [ind - self.lag]
# Feed through model
for i in index:
p = self.model.predict(prediction[:, i:i+self.lag, :], batch_size = 1)
# Convert output
if self.features != 1:
if randomize:
idx = np.random.choice(range(self.features), p=(p.ravel()))
else:
idx = np.argmax(p.ravel())
# Add forecast result to appropriate vectors.
pred = np.zeros([1, 1, self.features])
pred[0, 0, idx] = 1
else:
pred = np.array(float(p)).reshape([1, -1, 1])
prediction = np.concatenate([prediction, pred], axis=1)
# reset states in case stateless
self.model.reset_states()
if self.features != 1:
return prediction.reshape(-1, self.features)
else:
return prediction.reshape(-1)
################################################################################
################################################################################
################################################################################
def build_Keras_LSTM(num_layers, cells_per_layer, lag, features, stateful, seed, dropout):
model = K.models.Sequential()
for index in range(num_layers):
if index == 0:
if num_layers == 1:
if seed:
model.add(K.layers.LSTM(cells_per_layer, batch_input_shape=(1, lag, features), recurrent_activation='tanh', stateful=stateful, return_sequences=False, kernel_initializer=K.initializers.glorot_uniform(seed=seed), recurrent_initializer=K.initializers.Orthogonal(seed=seed)))
model.add(K.layers.Dropout(dropout, seed=seed))
else:
model.add(K.layers.LSTM(cells_per_layer, batch_input_shape=(1, lag, features), recurrent_activation='tanh', stateful=stateful, return_sequences=False))
model.add(K.layers.Dropout(dropout))
else:
if seed:
model.add(K.layers.LSTM(cells_per_layer, batch_input_shape=(1, lag, features), recurrent_activation='tanh', stateful=stateful, return_sequences=True, kernel_initializer=K.initializers.glorot_uniform(seed=seed), recurrent_initializer=K.initializers.Orthogonal(seed=seed)))
model.add(K.layers.Dropout(dropout, seed=seed))
else:
model.add(K.layers.LSTM(cells_per_layer, batch_input_shape=(1, lag, features), recurrent_activation='tanh', stateful=stateful, return_sequences=True))
model.add(K.layers.Dropout(dropout))
elif index == num_layers-1:
if seed:
model.add(K.layers.LSTM(cells_per_layer, stateful=stateful, recurrent_activation='tanh', return_sequences=False, kernel_initializer=K.initializers.glorot_uniform(seed=seed), recurrent_initializer=K.initializers.Orthogonal(seed=seed)))
model.add(K.layers.Dropout(dropout, seed=seed))
else:
model.add(K.layers.LSTM(cells_per_layer, stateful=stateful, recurrent_activation='tanh', return_sequences=False))
model.add(K.layers.Dropout(dropout))
else:
if seed:
model.add(K.layers.LSTM(cells_per_layer, stateful=stateful, recurrent_activation='tanh', return_sequences=True, kernel_initializer=K.initializers.glorot_uniform(seed=seed), recurrent_initializer=K.initializers.Orthogonal(seed=seed)))
model.add(K.layers.Dropout(dropout, seed=seed))
else:
model.add(K.layers.LSTM(cells_per_layer, stateful=stateful, recurrent_activation='tanh', return_sequences=True, dropout=dropout, recurrent_dropout=dropout))
model.add(K.layers.Dropout(dropout))
if seed:
model.add(K.layers.Dense(features, kernel_initializer=glorot_uniform(seed=0)))
else:
model.add(K.layers.Dense(features))
if features != 1:
model.add(K.layers.Activation('softmax'))
return model
| StarcoderdataPython |
6630134 | # os.system("bash command") 运行shell命令,直接显示
# os.popen("bash command).read() 运行shell命令,获取执行结果
# os.getcwd() 获取当前工作目录,即当前python脚本工作的目录路径
# os.chdir("dirname") 改变当前脚本工作目录;相当于shell下cd
import os
# 统计文件的大小
# os.path.getsize('路径') # python的命令
# dir 路径 \C # 操作系统的命令
# 帮助你显示当前路径下的所有文件和文件夹
# os.system('dir 路径') # 使用python语言直接执行操作系统的命令
# os.listdir('路径') # 使用python语言的os模块提供的方法 间接调用了操作系统命令
# 学习python的人
# web开发
# 运维开发 : 运维功底 熟悉操作系统命令
# exec('字符串数据类型的python代码')
# eval('执行字符串数据类型的python代码')
# os.system('执行字符串数据类型的操作系统命令')
# os.popen('执行字符串数据类型的操作系统命令,并返回结果')
# getcwd # 获取当前执行命令的时候所在的目录
# chdir # 修改当前执行命令的时候所在的目录
# ret = os.listdir('D:\sylar\s15')
# print(ret)
# os.chdir('D:\sylar\s15')
# print(os.popen('dir').read())
# os模块所做的事情
# 定制了很多方法 间接的帮助你去调用操作系统的命令 获得结果
# 然后帮助你分析整理成我们需要的数据类型的形态
# 你也可以os.popen/os.system直接去调用操作系统的命令 获得结果
# 但是 分析和整理的工作需要你自己做
# 用os模块的方法本身能够完成的功能我们就用定制好的方法就够了
# 如果有一天 你发现os模块定制好的功能解决不了我们的问题了
# 而刚好操作系统的命令能够很好地帮助我们解决问题。这个时候就用os.popen/os.system | StarcoderdataPython |
9766807 | <reponame>asaf-kali/generic-iterative-stemmer<filename>generic_iterative_stemmer/training/stemming/default_stem_generator.py
from typing import Optional
import editdistance
from ...training.stemming import StemDict, StemGenerator
from ...utils import get_logger
log = get_logger(__name__)
class DefaultStemGenerator(StemGenerator):
def __init__(
self,
k: Optional[int] = 10,
min_cosine_similarity: Optional[float] = 0.75,
min_cosine_similarity_for_edit_distance: Optional[float] = 0.85,
max_len_diff: Optional[int] = 3,
max_edit_distance: Optional[int] = 1,
):
super().__init__()
self.k = k
self.min_cosine_similarity = min_cosine_similarity
self.min_cosine_similarity_for_edit_distance = min_cosine_similarity_for_edit_distance
self.max_len_diff = max_len_diff
self.max_edit_distance = max_edit_distance
def find_word_inflections(self, word: str) -> StemDict:
"""
Find which other words in the vocabulary can be stemmed down to this word.
"""
similarities = self.model.most_similar(word, topn=self.k)
stem_dict = {}
for inflection, cosine_similarity in similarities:
if cosine_similarity < self.min_cosine_similarity:
continue
if len(inflection) <= len(word):
continue
word_frequency, inflection_frequency = self.get_frequency(word), self.get_frequency(inflection)
if word_frequency < inflection_frequency:
continue
if word not in inflection:
if self.max_edit_distance is None:
continue
edit_distance = editdistance.eval(word, inflection)
if edit_distance > self.max_edit_distance:
continue
if cosine_similarity < self.min_cosine_similarity_for_edit_distance:
continue
if self.max_len_diff and abs(len(word) - len(inflection)) > self.max_len_diff:
continue
log.debug(
f"Reducing '{inflection}' to '{word}'",
extra={
"stem": word,
"inflection": inflection,
"grade": round(cosine_similarity, 3),
"stem_frequency": round(word_frequency, 3),
"inflection_frequency": round(inflection_frequency, 3),
},
)
stem_dict[inflection] = word
return stem_dict
def get_frequency(self, word: str) -> float:
return 1 - self.word_to_index[word] / self.vocab_size
@property
def params(self) -> dict:
return {
"k": self.k,
"min_cosine_similarity": self.min_cosine_similarity,
"min_cosine_similarity_for_edit_distance": self.min_cosine_similarity_for_edit_distance,
"max_len_diff": self.max_len_diff,
"max_edit_distance": self.max_edit_distance,
}
| StarcoderdataPython |
1825953 | from .msdnet import MSDNet as msdnet
| StarcoderdataPython |
11292301 | class DatasetConfig:
__DATASET_KEY = 'dataset'
__DATASET_PATH_KEY = 'path'
__ANOMALY_COL_KEY = 'is_anomaly_column'
__SUBSPACE_COL_KEY = 'subspace_column'
__dataset_json_obj = None
def __init__(self):
pass
@staticmethod
def setup(config_json_obj):
DatasetConfig.__dataset_json_obj = config_json_obj[DatasetConfig.__DATASET_KEY]
@staticmethod
def get_dataset_path():
return DatasetConfig.__dataset_json_obj[DatasetConfig.__DATASET_PATH_KEY]
@staticmethod
def get_anomaly_column_name():
return DatasetConfig.__dataset_json_obj[DatasetConfig.__ANOMALY_COL_KEY]
@staticmethod
def get_subspace_column_name():
if DatasetConfig.__SUBSPACE_COL_KEY not in DatasetConfig.__dataset_json_obj:
return None
else:
return DatasetConfig.__dataset_json_obj[DatasetConfig.__SUBSPACE_COL_KEY]
| StarcoderdataPython |
3415961 | <filename>packages/service-library/tests/aiohttp/conftest.py<gh_stars>0
# pylint: disable=redefined-outer-name
# pylint: disable=unused-argument
from pathlib import Path
from typing import Dict
import pytest
from servicelib.aiohttp.openapi import create_openapi_specs
@pytest.fixture
def petstore_spec_file(here) -> Path:
filepath = here / "data/oas3/petstore.yaml"
assert filepath.exists()
return filepath
@pytest.fixture
async def petstore_specs(loop, petstore_spec_file) -> Dict:
specs = await create_openapi_specs(petstore_spec_file)
return specs
| StarcoderdataPython |
4906177 | # -*- coding: utf-8 -*-
"""
Created on Wed Aug 14 11:14:11 2019
Starting point is loading in the pickled class data, then working with it
to create subsetted trees: a list of trees that have e.g. 5 children underneath
Then can take a random pick of a person with (e.g.) 5 people beneath,
create a list of all the children under that person
And feed that into a generator of msprime input
@author: <NAME>
"""
import pickle
import random # to pick from list of poss roots of subtree
import numpy as np
from IndividualClass import Individual # might say unused, but not true, needed to read pickled file
file_loc = 'C:\\Users\\mazmysta\OneDrive - Nexus365\\BDI_proj\\HIV_Transmission_Networks_WillProbert\\19-08-08-first_example_network\\'
with open(file_loc + 'pickled_data_all.pickle', 'rb') as f:
total_tree = pickle.load(f)
#proof it works
print('Pickle loaded, 1st root num kids: {}'.format(total_tree['-1_-1'].total_children_num))
#further proof
ct = 0
for key in total_tree:
if ct < 5:
print(total_tree[key])
print(total_tree[key].__dict__)
else:
break
ct += 1
'''class Individual:
def __init__(self):
self.ID = None
self.infected_others = []
self.infected_by = []
self.time_infected_others =[]
self.time_infected_by = []
self.age_birth = -1
self.age_death = -1
self.num_partners = -1
self.direct_children_num = 0
self.total_children_num = 0'''
# Saving list of people with desired num kids beneath
samples_poss = []
desired_overall_children = 5
for key in total_tree:
if total_tree[key].total_children_num == desired_overall_children:
samples_poss.append(key)
print(len(samples_poss))
# 1222, correct for kids = 5
#pick only one random sample
random.seed(4) # repeatable choice(s) - sequence same
print(random.choice(samples_poss))
print(random.choice(samples_poss))
# Extract info for given kids
random.seed(4)
start = random.choice(samples_poss)
start= '58582_0'
#start = random.choice(samples_poss)
print(total_tree[start].infected_others)
current = start
list_of_kids = []
while len(total_tree[current].infected_others) > 0:
print('x')
for kid in total_tree[current].infected_others:
print('kid is', kid)
print('current is', current)
list_of_kids.extend(total_tree[current].infected_others)
current = kid
print('list of kids:', list_of_kids)
print(list_of_kids)
# nb slower than insert, but keeps lists separate
list_of_root_and_kids = [start] + list_of_kids
# all info required for mini-transmission chain
for item in list_of_root_and_kids:
print(item, total_tree[item].infected_others,
total_tree[item].time_infected_others,
total_tree[item].age_birth,
total_tree[item].age_death)
# know how to build subtree since 'root' = start
## need to somehow convert this to msprime format
## First use times of infection to convert the values to generations:
# 1 gen per day so years*365 gens
# Find total num of generations (days) to model over
times_loop = []
for item in list_of_root_and_kids:
#float to take as number not string, and access list element with [0]
times_loop.append(float(total_tree[item].time_infected_by[0]))
#since floats, can do maths
total_time = (max(times_loop)-min(times_loop))*365
#Furthest back in time is the smallest date (lowest year), do everything in diffs
gens_list = []
for item in list_of_root_and_kids:
#float to take as number not string, and access list element with [0]
diff_calc = 365*(max(times_loop)-float(total_tree[item].time_infected_by[0]))
gens_list.append(diff_calc + 100) # add 100 so most recent infection is '100 gens (days) in past'
# print(float(total_tree[item].time_infected_by[0]))
# info on dates so far
for i in range(len(gens_list)):
print((list_of_root_and_kids)[i],
#times_loop[i],
gens_list[i])
## msprime
import msprime
## source population
PopSource = msprime.PopulationConfiguration(initial_size = 1e8, growth_rate = 0)
## number of populations in present time of model, not including source pop...
final_num_pops = len(list_of_root_and_kids)
## sample_sizes for each population and effective pop sizes
sample_size = 10
infection_size = 1
stable_pop_size = 100
# ## subpops based on infected people, all subpops that want to exist at end of sim (present time) need stated here
pop_list = [PopSource]
#Setting up the end, so all pops exist and at stable pop size, no death in simulation time
for pop in range(final_num_pops):
# print(pop)
pop_list.append(
msprime.PopulationConfiguration(sample_size = sample_size, initial_size = stable_pop_size, growth_rate = 0)
)
# no migration between sources accross time, only infection events,
# so migration matrix is zeros
M = np.zeros((final_num_pops+1,final_num_pops+1))
# Now get transmission events from the data. Use index as population number, but +1 since have fake source pop at index 0.
for i in list_of_root_and_kids:
print(list_of_root_and_kids.index(i) + 1)
####--- new version with sub-pops ---####
## a simple model where independent sub-pop is infection derived from source pop
# if infected by true pop, need to state when diverged from past pop if that's the case
# Oddly source is the destination, i.e. direction of migration is dest -> source if forwards in time view.
# backwards in time means that destination is destination (but it's where the migration has come from)
transfers_list = []
for entry in range(len(pop_list)):
print(entry)
if entry == 0: # ignore 0 since this is the source pop
pass
elif entry == 1: # 1 is root so needs own bit
entry_ID = list_of_root_and_kids[entry-1]
print(entry, entry_ID)
dest_index = 0 #infected from source
transfer_time = gens_list[entry-1] # time infected still stored.
print(transfer_time)
transfers_list.append(msprime.MassMigration(time = transfer_time, source = entry, dest = dest_index, proportion = 1))
elif entry > 1: # 1 is root so needs own bit
# get the index of the infected_by population
# index of current population is its index in pop_list (index in list_of_root... + 1)
entry_ID = list_of_root_and_kids[entry-1]
print(entry, entry_ID)
dest_ID = total_tree[entry_ID].infected_by[0]
dest_index = list_of_root_and_kids.index(dest_ID) + 1
print(dest_index, dest_ID)
transfer_time = gens_list[entry-1]
print(transfer_time)
transfers_list.append(msprime.MassMigration(time = transfer_time, source = entry, dest = dest_index, proportion = 1))
#check as expected
print(transfers_list)
# compare
for i in range(len(gens_list)):
print((list_of_root_and_kids)[i],
#times_loop[i],
gens_list[i])
## now have set of populations, the transfers for pops (infection events)
# still need the bottlenecks (pop growth & stabilisation)
# then can order(sort) the complete demography list by time and simulate
## Bottlenecks: add population growth in so infections from source pop are only ~ 1-5 virions, which then balloons to e.g. ~1000
#### Bottleneck list initiation and creation
Pop_bottleneck_ie_growth_list = []
for entry in range(len(pop_list)):
if entry > 0: # ignore 0 since this is the source pop. Only need infection time for this so root doesn't need own case
transfer_time = gens_list[entry-1]
#infection size setting
pop_entry_bottleneck_start = msprime.PopulationParametersChange(
time = transfer_time, initial_size=infection_size, growth_rate=0, population = entry) #i.e. for epochs inf-> 100*entry, this is growth rate
#growth after infection setting - trial that 20 gens in future (less time back) gives appropriate growth for these params of pop_size = 100, rate = 0.25
pop_entry_bottleneck_end = msprime.PopulationParametersChange(
time = transfer_time-20, growth_rate = 0.23, initial_size=stable_pop_size, population = entry) #i.e. for epochs inf-> 100*entry, this is growth rate
#save to list for manip outside loop
Pop_bottleneck_ie_growth_list.extend((pop_entry_bottleneck_start, pop_entry_bottleneck_end))
# put all events together then sort them
events = Pop_bottleneck_ie_growth_list + transfers_list
events_sorted = sorted(events, key=lambda x: x.time, reverse=False)
#check
for event in events_sorted:
print(event.__dict__) # just for easier digestion of output
my_history = msprime.DemographyDebugger(
population_configurations=pop_list, migration_matrix = M,
demographic_events = events_sorted)
my_history.print_history()
## plot how pop changes ##
time_steps= range(1,int(np.max(gens_list))+100,2)
# print('pop0:', my_history.population_size_trajectory(time_steps)[:,0])
# print('pop1:', my_history.population_size_trajectory(time_steps)[:,1])
# print('time:', np.array(time_steps))
# plot the populations, matplotlib understands array of y's as multiple y's so don't need to call individually
import matplotlib.pyplot as plt
plt.figure(1)
#plt.plot(time_steps, my_history.population_size_trajectory(time_steps)[:,0])
#plt.plot(time_steps, my_history.population_size_trajectory(time_steps)[:,1])
#plt.rc('axes', prop_cycle=(cycler(color=['r', 'g', 'b', 'y'])))
fig, ax = plt.subplots(figsize=(15, 6), dpi=80)
ax.set_prop_cycle(color=["green", "blue", "red", "orange", "grey", "cyan", "black"][1:])
plt.plot(time_steps, my_history.population_size_trajectory(time_steps)[:,1:], '--', alpha=0.5) # this will plot each y (pop size var.) separately
plt.xlim(np.max(time_steps),0) # switch the order of time so present (0) is RHS and past is LHS (max time step)
#plt.xlim(np.max(time_steps),1) # switch the order of time so present (0) is RHS and past is LHS (max time step)
#plt.ylim(np.log(0.5), np.log(150))
#plt.axvline(x=100, color='k', linestyle='-', alpha=0.5) # add a vertical line for migration step
#plt.legend(('1','2','3','4','5','6'),loc='best')
#box = ax.get_position()
#ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
# Put a legend to the right of the current axis
#plt.yscale("log")
#plt.xscale("log")
ax.legend(('1: ' + list_of_root_and_kids[0],
'2: ' + list_of_root_and_kids[1],
'3: ' + list_of_root_and_kids[2],
'4: ' + list_of_root_and_kids[3],
'5: ' + list_of_root_and_kids[4],
'6: ' + list_of_root_and_kids[5]),
loc='center left', bbox_to_anchor=(1.02, 0.5))
plt.show()
# time = 0 is present, larger time is past
############ Simulation time ############
## simulate this extended simple model
ts2=msprime.simulate(
population_configurations=pop_list, migration_matrix = M,
demographic_events = events_sorted,
length = 100,
random_seed = 17, recombination_rate = 0.7e-4, #0.7e-4
mutation_rate = 2.5e-5, end_time=10000000000) #mutation_rate = 2.5e-5
# Print some trees, coloured by pop of each node
from IPython.display import display
from IPython.display import SVG
colour_map = {0:"green", 1:"blue", 2:"red", 3:"orange", 4:"grey", 5:"cyan", 6:"black"}
node_colours = {u.id: colour_map[u.population] for u in ts2.nodes()}
print('colour key:', colour_map)
# print first X trees
X = 1
i = 0
for tree in ts2.trees():
if i < X:
display(SVG(tree.draw(node_colours=node_colours, max_tree_height=2650, height=800, width = 1000)))
print("Tree {} covers [{:.2f}, {:.2f}); TMRCA = {:.4f}".format(
tree.index, *tree.interval, tree.time(tree.roots[0])))
print(tree.branch_length(46))
else:
break
i+=1
print('colour key:', colour_map)
i=0
for tree in ts2.trees():
i+=1
print(i)
print('colour key:', colour_map)
# print first X trees
X = 1
i = 0
for tree in ts2.trees():
if i < X:
display(SVG(tree.draw(node_colours=node_colours, max_tree_height=700, height=1000, width = 1200)))
print("Tree {} covers [{:.2f}, {:.2f}); TMRCA = {:.4f}".format(
tree.index, *tree.interval, tree.time(tree.roots[0])))
# print(tree.branch_length(46))
else:
break
i+=1
print('colour key:', colour_map)
# plotting rank so don't have to cut, but lose some time transfer info
X = 1
i = 0
for tree in ts2.trees():
if i < X:
display(SVG(tree.draw(node_colours=node_colours, tree_height_scale='rank', height=400, width = 1200)))
print("Tree {} covers [{:.2f}, {:.2f}); TMRCA = {:.4f}".format(
tree.index, *tree.interval, tree.time(tree.roots[0])))
# print(tree.branch_length(46))
else:
break
i+=1
print('colour key:', colour_map)
# print out who infected whom for tree comparison - check working etc
for entry in range(len(pop_list)):
if entry == 0: # ignore 0 since this is the source pop
print(entry, 'fake source pop')
elif entry == 1:
entry_ID = list_of_root_and_kids[entry-1]
print(entry, entry_ID, 'infected by',
'fake source pop',
'0')
elif entry > 1:
entry_ID = list_of_root_and_kids[entry-1]
print(entry, entry_ID, 'infected by',
total_tree[entry_ID].infected_by[0],
list_of_root_and_kids.index(total_tree[entry_ID].infected_by[0])+1)
| StarcoderdataPython |
8049957 | <gh_stars>0
# NEON AI (TM) SOFTWARE, Software Development Kit & Application Development System
# All trademark and other rights reserved by their respective owners
# Copyright 2008-2021 Neongecko.com Inc.
# BSD-3
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import json
import os
import itertools
from os import getenv
from typing import Optional
from os.path import join, dirname, expanduser, basename, exists
from neon_utils import LOG
from ruamel.yaml import YAML
from neon_utils.configuration_utils import dict_merge
from neon_diana_utils.rabbitmq_api import RabbitMQAPI
from neon_diana_utils.utils.docker_utils import run_clean_rabbit_mq_docker, cleanup_docker_container, \
write_docker_compose
from neon_diana_utils.utils.kompose_utils import write_kubernetes_spec, generate_secret, generate_config_map
def create_diana_configurations(admin_user: str, admin_pass: str,
services: set, config_path: str = None,
allow_bind_existing: bool = False,
volume_driver: str = "none",
volumes: Optional[dict] = None,
namespace: str = 'default'):
"""
Create configuration files for Neon Diana.
:param admin_user: username to configure for RabbitMQ configuration
:param admin_pass: password associated with admin_user
:param services: list of services to configure on this backend
:param config_path: path to write configuration files (default=NEON_CONFIG_PATH)
:param allow_bind_existing: bool to allow overwriting configuration for a running RabbitMQ instance
:param volume_driver: Docker volume driver (https://docs.docker.com/storage/volumes/#use-a-volume-driver)
:param volumes: Optional dict of volume names to directories (including hostnames for nfs volumes)
:param namespace: k8s namespace to configure services to run in
"""
container = run_clean_rabbit_mq_docker(allow_bind_existing)
container_logs = container.logs(stream=True)
for log in container_logs:
if b"Server startup complete" in log:
break
configure_diana_backend("http://0.0.0.0:15672", admin_user, admin_pass,
services, config_path, volume_driver, volumes,
namespace)
cleanup_docker_container(container)
def _parse_services(requested_services: set) -> dict:
"""
Parse requested services and return a dict mapping of valid service names
to configurations read from service_mappings.yml
:param requested_services: set of service names requested to be configured
:returns: mapping of service name to parameters required to configure the service
"""
# Read configuration from templates
template_file = join(dirname(dirname(__file__)), "templates",
"service_mappings.yml")
with open(template_file) as f:
template_data = YAML().load(f)
services_to_configure = {name: dict(template_data[name])
for name in requested_services if name in template_data}
# Warn for unknown requested services
if set(services_to_configure.keys()) != set(requested_services):
unhandled_services = [s for s in requested_services if s not in services_to_configure.keys()]
LOG.warning(f"Some requested services not handled: {unhandled_services}")
return services_to_configure
def _parse_vhosts(services_to_configure: dict) -> set:
"""
Parse MQ vhosts specified in the requested configuration
:param services_to_configure: service mapping parsed from service_mappings.yml
:returns: set of vhosts to be created
"""
return set(itertools.chain.from_iterable([service.get("mq", service).get("mq_vhosts", [])
for service in services_to_configure.values()]))
def _parse_configuration(services_to_configure: dict) -> tuple:
# Parse user and orchestrator configuration
user_permissions = dict()
neon_mq_auth = dict()
docker_compose_configuration = dict()
kubernetes_configuration = list()
for name, service in services_to_configure.items():
# Get service MQ Config
if service.get("mq"):
dict_merge(user_permissions, service.get("mq", service).get("mq_user_permissions", dict()))
if service["mq"].get("mq_username"):
# TODO: Update MQ services such that their service names match the container names DM
neon_mq_auth[service.get("mq",
service).get("mq_service_name", name)] = \
{"user": service.get("mq", service)["mq_username"]}
docker_compose_configuration[name] = service["docker_compose"]
kubernetes_configuration.extend(service.get("kubernetes") or list())
return user_permissions, neon_mq_auth, docker_compose_configuration, kubernetes_configuration
def configure_diana_backend(url: str, admin_user: str, admin_pass: str,
services: set, config_path: str = None,
volume_driver: str = "none",
volumes: Optional[dict] = None,
namespace: str = 'default'):
"""
Configure a new Diana RabbitMQ backend
:param url: URL of admin portal (i.e. http://0.0.0.0:15672)
:param admin_user: username to configure for RabbitMQ configuration
:param admin_pass: password associated with admin_user
:param services: list of services to configure on this backend
:param config_path: local path to write configuration files (default=NEON_CONFIG_PATH)
:param volume_driver: Docker volume driver (https://docs.docker.com/storage/volumes/#use-a-volume-driver)
:param volumes: Optional dict of volume names to directories (including hostnames for nfs volumes)
:param namespace: k8s namespace to configure services to run in
"""
api = RabbitMQAPI(url)
# Configure Administrator
api.login("guest", "guest")
api.configure_admin_account(admin_user, admin_pass)
# Parse requested services
services_to_configure = _parse_services(services)
# Parse Configured Service Mapping
vhosts_to_configure = _parse_vhosts(services_to_configure)
# Parse user and orchestrator configuration
users_to_configure, neon_mq_user_auth,\
docker_compose_configuration, kubernetes_configuration = \
_parse_configuration(services_to_configure)
LOG.debug(f"vhosts={vhosts_to_configure}")
LOG.debug(f"users={users_to_configure}")
# Configure vhosts
for vhost in vhosts_to_configure:
api.add_vhost(vhost)
# Configure users
credentials = api.create_default_users(list(users_to_configure.keys()))
api.add_user("neon_api_utils", "Klatchat2021")
# Configure user permissions
for user, vhost_config in users_to_configure.items():
for vhost, permissions in vhost_config.items():
if not api.configure_vhost_user_permissions(vhost, user, **permissions):
LOG.error(f"Error setting Permission! {user} {vhost}")
raise
# Export and save rabbitMQ Config
rabbit_mq_config_file = join(expanduser(config_path), "rabbit_mq_config.json") if config_path else None
write_rabbit_config(api, rabbit_mq_config_file)
# TODO: Generate config map DM
# Write out MQ Connector config file
for service in neon_mq_user_auth.values():
service["password"] = credentials[service["user"]]
neon_mq_config_file = join(expanduser(config_path), "mq_config.json") if config_path else None
write_neon_mq_config(neon_mq_user_auth, neon_mq_config_file)
# Generate docker-compose file
docker_compose_file = join(expanduser(config_path), "docker-compose.yml") if config_path else None
write_docker_compose(docker_compose_configuration, docker_compose_file,
volume_driver, volumes)
# Generate Kubernetes spec file
write_kubernetes_spec(kubernetes_configuration, config_path, namespace)
def generate_config(services: set, config_path: Optional[str] = None,
volume_driver: str = "none", volumes: Optional[dict] = None,
namespace: str = 'default'):
"""
Generate orchestrator configuration for the specified services
:param services: list of services to configure on this backend
:param config_path: local path to write configuration files (default=NEON_CONFIG_PATH)
:param volume_driver: Docker volume driver (https://docs.docker.com/storage/volumes/#use-a-volume-driver)
:param volumes: Optional dict of volume names to directories (including hostnames for nfs volumes)
:param namespace: k8s namespace to configure
"""
# Parse user and orchestrator configuration
users_to_configure, neon_mq_user_auth, \
docker_compose_configuration, kubernetes_configuration = \
_parse_configuration(_parse_services(services))
# Generate docker-compose file
docker_compose_file = join(expanduser(config_path), "docker-compose.yml") if config_path else None
write_docker_compose(docker_compose_configuration, docker_compose_file,
volume_driver, volumes)
# Generate Kubernetes spec file
write_kubernetes_spec(kubernetes_configuration, config_path, namespace)
def write_neon_mq_config(credentials: dict, config_file: Optional[str] = None):
"""
Takes the passed credentials and exports an MQ config file based on the
"""
config_file = config_file if config_file else \
join(getenv("NEON_CONFIG_PATH", "~/.config/neon"), "mq_config.json")
config_file = expanduser(config_file)
config_path = dirname(config_file)
if not exists(config_path):
os.makedirs(config_path)
configuration = {"server": "neon-rabbitmq",
"users": credentials}
LOG.info(f"Writing Neon MQ configuration to {config_file}")
with open(config_file, 'w+') as new_config:
json.dump(configuration, new_config, indent=2)
# Generate k8s secret
generate_secret("mq-config", {"mq_config.json": json.dumps(configuration)},
join(config_path, "k8s_secret_mq-config.yml"))
def write_rabbit_config(api: RabbitMQAPI, config_file: Optional[str] = None):
"""
Writes out RabbitMQ config files for persistence on next run
"""
config_file = config_file if config_file else \
join(getenv("NEON_CONFIG_PATH", "~/.config/neon"),
"rabbit_mq_config.json")
config_file = expanduser(config_file)
config_path = dirname(config_file)
if not exists(config_path):
os.makedirs(config_path)
config = api.get_definitions()
LOG.info(f"Exporting Rabbit MQ configuration to {config_file}")
with open(config_file, "w+") as exported:
json.dump(config, exported, indent=2)
config_basename = basename(config_file)
rmq_conf_contents = f"load_definitions = /config/{config_basename}"
with open(join(config_path, "rabbitmq.conf"), 'w+') as rabbit:
rabbit.write(rmq_conf_contents)
# Generate k8s config
generate_config_map("rabbitmq", {"rabbit_mq_config.json": json.dumps(config),
"rabbitmq.conf": rmq_conf_contents},
join(config_path, "k8s_config_rabbitmq.yml"))
| StarcoderdataPython |
6579671 | <gh_stars>1-10
import unittest
from hypothesis import given
from electionguard.group import ElementModQ
from electionguard.hash import hash_elems
from tests.property.test_group import elements_mod_p, elements_mod_q
class TestHash(unittest.TestCase):
@given(elements_mod_p(), elements_mod_q())
def test_same_answer_twice_in_a_row(self, a: ElementModQ, b: ElementModQ):
# if this doesn't work, then our hash function isn't a function
h1 = hash_elems(a, b)
h2 = hash_elems(a, b)
self.assertEqual(h1, h2)
@given(elements_mod_q(), elements_mod_q())
def test_basic_hash_properties(self, a: ElementModQ, b: ElementModQ):
ha = hash_elems(a)
hb = hash_elems(b)
if a == b:
self.assertEqual(ha, hb)
if ha != hb:
self.assertNotEqual(a, b)
| StarcoderdataPython |
4871155 | # -*- coding: utf-8 -*-
#
# Copyright 2018-2021 - Swiss Data Science Center (SDSC)
# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and
# Eidgenössische Technische Hochschule Zürich (ETHZ).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Represent provenance graph."""
import json
from pathlib import Path
from typing import Dict, List, Optional, Union
from marshmallow import EXCLUDE
from rdflib import ConjunctiveGraph
from renku.core.management.command_builder.command import inject
from renku.core.metadata.database import Database
from renku.core.models.calamus import JsonLDSchema, Nested, schema
from renku.core.models.provenance.activity import Activity, ActivityCollection, ActivitySchema
class ProvenanceGraph:
"""A graph of all executions (Activities)."""
def __init__(self, activities: List[Activity] = None):
self.activities: List[Activity] = activities or []
self._custom_bindings: Dict[str, str] = {}
self._graph: Optional[ConjunctiveGraph] = None
self._loaded: bool = False
# TODO: Remove _order and rely on Activity's ended_at_time and started_at_time for ordering
self._order: int = len(self.activities) + 1
self._path: Optional[Path] = None
@property
def custom_bindings(self) -> Dict[str, str]:
"""Return custom bindings."""
return self._custom_bindings
@custom_bindings.setter
def custom_bindings(self, custom_bindings: Dict[str, str]):
"""Set custom prefix to namespace bindings."""
self._custom_bindings = custom_bindings
def add(self, node: Union[Activity, ActivityCollection]) -> None:
"""Add an Activity/ActivityCollection to the graph."""
activity_collection = node if isinstance(node, ActivityCollection) else ActivityCollection(activities=[node])
for activity in activity_collection.activities:
assert not any([a for a in self.activities if a.id == activity.id]), f"Identifier exists {activity.id}"
activity.order = self._order
self._order += 1
self.activities.append(activity)
self._p_changed = True
@classmethod
@inject.autoparams()
def from_database(cls, database: Database):
"""Return an instance from a metadata database."""
activity_tree = database.get("activities")
activities = list(activity_tree.values())
self = ProvenanceGraph(activities=activities)
# NOTE: If we sort then all ghost objects will be loaded which is not what we want
# self.activities.sort(key=lambda e: e.order)
return self
@classmethod
def from_json(cls, path: Union[Path, str], lazy: bool = False) -> "ProvenanceGraph":
"""Return an instance from a JSON file."""
if Path(path).exists():
if not lazy:
with open(path) as file_:
data = json.load(file_)
self = cls.from_jsonld(data=data) if data else ProvenanceGraph(activities=[])
self.activities.sort(key=lambda e: e.order)
self._loaded = True
else:
self = ProvenanceGraph(activities=[])
self._loaded = False
else:
self = ProvenanceGraph(activities=[])
self._loaded = True
self._path = Path(path)
return self
@classmethod
def from_jsonld(cls, data) -> "ProvenanceGraph":
"""Create an instance from JSON-LD data."""
if isinstance(data, cls):
return data
elif not isinstance(data, list):
raise ValueError(data)
self = ProvenanceGraphSchema(flattened=True).load(data)
self._loaded = True
return self
def to_jsonld(self):
"""Create JSON-LD."""
return ProvenanceGraphSchema(flattened=True).dump(self)
def to_json(self, path=None):
"""Write an instance to file."""
path = path or self._path
data = self.to_jsonld()
with open(path, "w", encoding="utf-8") as file_:
json.dump(data, file_, ensure_ascii=False, sort_keys=True, indent=2)
@property
def rdf_graph(self):
"""Create an RDFLib ConjunctiveGraph."""
self._create_rdf_graph()
return self._graph
def _create_rdf_graph(self):
if self._graph:
return
self._graph = ConjunctiveGraph()
if not self._path.exists():
return
self._graph.parse(location=str(self._path), format="json-ld")
self._graph.bind("foaf", "http://xmlns.com/foaf/0.1/")
self._graph.bind("oa", "http://www.w3.org/ns/oa#")
self._graph.bind("prov", "http://www.w3.org/ns/prov#")
self._graph.bind("renku", "https://swissdatasciencecenter.github.io/renku-ontology#")
self._graph.bind("schema", "http://schema.org/")
self._graph.bind("wf", "http://www.w3.org/2005/01/wf/flow#")
self._graph.bind("wfprov", "http://purl.org/wf4ever/wfprov#")
for prefix, namespace in self._custom_bindings.items():
self._graph.bind(prefix, namespace)
def get_latest_plans_usages(self):
"""Return a list of tuples with path and check of all Usage paths."""
plan_orders = self.query(LATEST_PLAN_EXECUTION_ORDER)
usages = self.query(ALL_USAGES)
latest_usages = (u for u in usages for o in plan_orders if u[1] == o[1])
return [(str(u[0]), str(u[-2]), str(u[-1])) for u in latest_usages]
def query(self, query):
"""Run a SPARQL query and return the result."""
self._create_rdf_graph()
return self._graph.query(query)
class ProvenanceGraphSchema(JsonLDSchema):
"""ProvenanceGraph schema."""
class Meta:
"""Meta class."""
rdf_type = [schema.Collection]
model = ProvenanceGraph
unknown = EXCLUDE
activities = Nested(schema.hasPart, ActivitySchema, many=True, missing=None)
LATEST_PLAN_EXECUTION_ORDER = """
SELECT ?plan (MAX(?order) AS ?maxOrder)
WHERE
{
?activity a prov:Activity .
?activity prov:qualifiedAssociation/prov:hadPlan ?plan .
?activity renku:order ?order
}
GROUP BY ?plan
"""
ALL_USAGES = """
SELECT ?plan ?order ?usage ?path ?checksum
WHERE
{
?activity a prov:Activity .
?activity prov:qualifiedAssociation/prov:hadPlan ?plan .
?activity renku:order ?order .
?activity prov:qualifiedUsage ?usage .
?usage prov:entity ?entity .
?entity prov:atLocation ?path .
?entity renku:checksum ?checksum .
}
"""
LATEST_USAGES = """
SELECT ?path ?checksum ?order ?maxOrder
WHERE
{
{
SELECT ?path ?checksum ?order
WHERE
{
?activity a prov:Activity .
?entity renku:checksum ?checksum .
?entity prov:atLocation ?path .
?entity (prov:qualifiedGeneration/prov:activity) ?activity .
?activity renku:order ?order
}
}
.
{
SELECT ?path (MAX(?order_) AS ?maxOrder)
WHERE
{
SELECT ?path ?order_
WHERE
{
?activity a prov:Activity .
?entity prov:atLocation ?path .
?entity (prov:qualifiedGeneration/prov:activity) ?activity .
?activity renku:order ?order_
}
}
GROUP BY ?path
}
FILTER(?order = ?maxOrder)
}
"""
| StarcoderdataPython |
3509081 | <gh_stars>1-10
import pygame,sys
from opensimplex import OpenSimplex
mainClock = pygame.time.Clock()
from pygame.locals import *
pygame.mixer.pre_init(44100, -16, 2, 512)
pygame.init() # initiates pygame
pygame.mixer.set_num_channels(64)
pygame.event.set_allowed([QUIT, KEYDOWN, KEYUP,MOUSEBUTTONDOWN])
pygame.display.set_caption("Easy Engine Version 4")
WINDOW_SIZE = (1000,800)
screen = pygame.display.set_mode(WINDOW_SIZE,RESIZABLE|DOUBLEBUF)
display = screen.copy()
font = pygame.font.Font('data/fonts/font.ttf',50)
click = False
def draw_text(text,font,color,surface,x,y):
textobj = font.render(text,1,color)
textrect = textobj.get_rect()
textrect.topleft = (x,y)
surface.blit(textobj,textrect)
def WINDOW_RESIZED(screen_size,WINDOW_SIZE):
if screen_size[0] / WINDOW_SIZE[0] > screen_size[1] / WINDOW_SIZE[1]:
factor = screen_size[1] / WINDOW_SIZE[1]
display_size = (WINDOW_SIZE[0]*factor,WINDOW_SIZE[1]*factor)
display_y = 0
display_x = (screen_size[0]-WINDOW_SIZE[0]*factor)/2
else:
factor = screen_size[0] / WINDOW_SIZE[0]
display_size = (WINDOW_SIZE[0]*factor,WINDOW_SIZE[1]*factor)
display_x = 0
display_y = (screen_size[1]-WINDOW_SIZE[1]*factor)/2
return display_size,(display_x,display_y),pygame.Rect((display_x,display_y),display_size)
import libs.engine as e
import random
def generate_chunk(CHUNK_SIZE,noise,x,y):
scale = 10
chunk_data = {}
for y_pos in range(CHUNK_SIZE):
for x_pos in range(CHUNK_SIZE):
target_x = x * CHUNK_SIZE + x_pos
target_y = y * CHUNK_SIZE + y_pos
value = int(noise.noise2d(target_x*0.1,0)*scale)
tile_type = "air"
attribute = {}
if target_y > 8 - value: tile_type="dirt"
elif target_y == 8 - value:
tile_type="grass"
elif target_y == 8 - value -1:
if random.randint(1,5) > 1:
tile_type="plant"
elif random.randint(1,3) == 1:
tile_type="torch"
chunk_data[target_x,target_y] = {"type":tile_type,"attribute":attribute}
return chunk_data
from PIL import Image, ImageFilter
def draw_tile():
map_data = {}
TILE_SIZE = 24
CHUNK_SIZE = 7
SEED = random.randint(-999999,999999)
noise = OpenSimplex(SEED)
tile_database = e.load_images(TILE_SIZE)
img = pygame.Surface(WINDOW_SIZE)
light_display = pygame.Surface(WINDOW_SIZE, pygame.SRCALPHA)
tile_light = []
for y in range(6):
for x in range(7):
target_x = x - 1 + int(round(0/(CHUNK_SIZE*TILE_SIZE)))
target_y = y - 1 + int(round(-200/(CHUNK_SIZE*TILE_SIZE)))
target_chunk = str(target_x) + ';' + str(target_y)
if target_chunk not in map_data:
map_data[target_chunk] = generate_chunk(CHUNK_SIZE,noise,target_x,target_y)
for tile in map_data[target_chunk]:
pos_x = tile[0]*TILE_SIZE
pos_y = tile[1]*TILE_SIZE+200
tile_data = map_data[target_chunk][tile]
img.blit(tile_database[tile_data["type"]],(pos_x,pos_y))
if tile_data["type"] == "torch":
tile_light.append(((pos_x+10,pos_y+7),0.4))
light_display.fill((0,0,0))
light_display.set_alpha(150)
for distance in reversed(range(10)):
for light in tile_light:
pygame.draw.circle(light_display,(0,0,0,distance*25),light[0],int(light[1]*distance*10)*distance*0.2,int(light[1]//10))
img.blit(light_display,(0,0))
#Pil Blurred
pil_string_image = pygame.image.tostring(img,"RGBA",False)
im = Image.frombytes("RGBA",WINDOW_SIZE,pil_string_image)
im = im.filter(ImageFilter.GaussianBlur(radius=2))
data = im.tobytes()
surface = pygame.image.fromstring(data, im.size, im.mode)
return surface
def main_menu():
display_size = WINDOW_SIZE
display_pos = (0,0)
display_rect = pygame.Rect(display_pos,display_size)
screen.fill((0,0,0))
background = draw_tile()
while 1:
display.blit(background,(0,0))
draw_text("Main Menu",font,(255,255,255),display,400,100)
mx,my = pygame.mouse.get_pos()
pygame.draw.polygon(display,(100,100,100),((100,100),(80,140),(200,140),(220,100)))
click = False
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
if event.type == KEYDOWN:
if event.key == K_ESCAPE:
pygame.quit()
sys.exit()
if event.type == MOUSEBUTTONDOWN:
if event.button == 1:
click = True
if event.type == WINDOWRESIZED:
display_size,display_pos,display_rect = WINDOW_RESIZED(screen.get_size(),WINDOW_SIZE)
screen.fill((0,0,0))
pygame.display.update()
screen.blit(pygame.transform.scale(display,display_size),display_pos)
pygame.display.update(display_rect)
mainClock.tick(60)
main_menu() | StarcoderdataPython |
3588223 | import os, re, openpyxl
from openpyxl import load_workbook
wb1 = load_workbook("test/D04.xlsx")
wb2 = load_workbook("test/20200414SOE.xlsx")
ws11 = wb1['資料']
ws21 = wb2['AI_LIST']
ws22 = wb2['DI-1-SOE-MCD']
ws23 = wb2['DI-2-1BIT-2BIT']
ws24 = wb2['CO_LIST']
index = 0
index2 = 0
index3 = 0
for i in range(2,ws11.max_row):
# print(ws.cell(row=i, column=5).value)
if ('DI' in str(ws11.cell(row=i, column=16).value) and ws11.cell(row=i, column=23).value != None and ws11.cell(row=i, column=23).value.strip(" ") != ''):
if ws11.cell(row=i, column=23).value.lstrip("0") != '':
index = ws11.cell(row=i, column=23).value.lstrip("0")
else:
index = 0
if ws11.cell(row=i, column=24).value != None:
if ws11.cell(row=i, column=24).value.lstrip("0") != '':
index2 = ws11.cell(row=i, column=24).value.lstrip("0")
else:
index2 = 0
#print(int(index))
if int(index)<=511:
for j in range(6,ws22.max_row):
if index==ws22.cell(row=j, column=9).value:
ws11.cell(row=i, column=26).value=str(ws22.cell(row=j, column=3).value).strip(" ")+'-'+str(ws22.cell(row=j, column=5).value).strip(" ")
if int(index)>=512:
for j in range(6,ws23.max_row):
if index==ws23.cell(row=j, column=9).value:
ws11.cell(row=i, column=26).value=str(ws23.cell(row=j, column=3).value).strip(" ")+'-'+str(ws23.cell(row=j, column=5).value).strip(" ")
if int(index2)!=0:
for j in range(6,ws23.max_row):
if index2==ws23.cell(row=j, column=9).value:
ws11.cell(row=i, column=27).value=str(ws23.cell(row=j, column=3).value).strip(" ")+'-'+str(ws23.cell(row=j, column=5).value).strip(" ")
if ('AI' in str(ws11.cell(row=i, column=16).value) and ws11.cell(row=i, column=23).value != None and ws11.cell(row=i, column=23).value.strip(" ") != ''):
if ws11.cell(row=i, column=23).value.lstrip("0") != '':
index = ws11.cell(row=i, column=23).value.lstrip("0")
else:
index = 0
#print(int(index))
for j in range(6,ws21.max_row):
if index==ws21.cell(row=j, column=8).value:
ws11.cell(row=i, column=26).value=str(ws21.cell(row=j, column=2).value).strip(" ")+'-'+str(ws21.cell(row=j, column=4).value).strip(" ")
if (ws11.cell(row=i, column=25).value != None and ws11.cell(row=i, column=25).value.strip(" ") != ''):
if ws11.cell(row=i, column=25).value.lstrip("0") != '':
index3 = ws11.cell(row=i, column=25).value.lstrip("0")
else:
index3 = 0
for j in range(8,ws24.max_row):
if index3==ws24.cell(row=j, column=8).value:
ws11.cell(row=i, column=28).value=str(ws24.cell(row=j, column=2).value).strip(" ")+'-'+str(ws24.cell(row=j, column=4).value).strip(" ")
wb1.save("test/D04_M.xlsx")
wb2.save("test/20200414SOE_M.xlsx")
| StarcoderdataPython |
1616623 | #coding:utf8
import logging
import json
from base_handler import BaseHandler
from utils import decimal_default,get_linenumber
from utils import RoundDown
from .proxy import EthereumProxy
from constants import ETH_IP_ADDR,ETH_RPC_PORT,ETH_BLK_BUFFER_SIZE
import sql
from constants import ERC20_CONTRACTS_MAP #ERC20合约地址
#设置精度
import decimal
from decimal import Decimal
from decimal import getcontext
getcontext().prec = 30
ip_addr, port = ETH_IP_ADDR,ETH_RPC_PORT
class ETH_GetBalance(BaseHandler):
@staticmethod
def get_balance(rpc_connection,addr, block="latest"):
balance = rpc_connection.eth_getBalance(addr, block)
#return balance/float(10**18)
return balance
@classmethod
def get_all_balance(cls, rpcconn, addr, symbol='', block='latest'):
str_eth_balance = rpcconn.eth_getBalance(addr, block)
dbalance = Decimal(str_eth_balance) / Decimal(10**18)
dbalance = RoundDown(dbalance)
retData = {}
retData['ETH'] = "%.8f" % dbalance
if symbol.upper() == 'ETH':
pass
elif len(symbol) == 0:
# 检查代币余额
for contract_addr in ERC20_CONTRACTS_MAP.values():
strSymbol = rpcconn.eth_erc20_symbol(contract_addr)
strBalance = rpcconn.eth_erc20_balanceOf(contract_addr, addr, True)
retData[strSymbol] = strBalance
else:
contract_addr = ERC20_CONTRACTS_MAP[symbol] if symbol != 'ERC20-USDT' else ERC20_CONTRACTS_MAP['USDT']
strSymbol = rpcconn.eth_erc20_symbol(contract_addr)
strBalance = rpcconn.eth_erc20_balanceOf(contract_addr, addr, True)
retData[strSymbol] = strBalance
if 'USDT' in retData:
retData['ERC20-USDT'] = retData['USDT']
del (retData['USDT'])
return retData
def post(self):
rpcconn = EthereumProxy(ip_addr, port)
try:
address = self.get_argument("address")
if len(address) != 42:
self.write(json.dumps(BaseHandler.error_ret_with_data("arguments error")))
return
# symbol = self.get_argument('symbol')
# print("symbol:{}".format(symbol))
# balance = ETH_GetBalance.get_balance(rpc_connection,address)
data = ETH_GetBalance.get_all_balance(rpcconn, address)
self.write(json.dumps(BaseHandler.success_ret_with_data( data ), default=decimal_default))
except Exception as e:
self.write(json.dumps(BaseHandler.error_ret_with_data("error: %s"%e)))
logging.error("ETH_GetBalance error:{0} in {1}".format(e,get_linenumber()))
class ETH_PendingTransactions(BaseHandler):
def get(self):
rpc_connection = EthereumProxy(ip_addr, port)
try:
data = rpc_connection.eth_pendingTransactions()
self.write(json.dumps(BaseHandler.success_ret_with_data(data), default=decimal_default))
except Exception as e:
self.write(json.dumps(BaseHandler.error_ret_with_data("error: %s"%e)))
logging.error("ETH_PendingTransactions error:{0} in {1}".format(e,get_linenumber()))
class ETH_SendRawTransaction(BaseHandler):
def get_order_from_db(self, order_id):
import sql
sqlRet = sql.run("select * from tb_eth_broadcast where order_id='{0}';".format(order_id))
if len(sqlRet) == 0: return (False, "")
txid = sqlRet[0]['txid']
return (True, txid)
def insert_txid_into_db(self, order_id, txid):
import sql
strSql = """insert into tb_eth_broadcast(order_id, txid) values('{}','{}');""".format(order_id, txid)
logging.info('sql: {}'.format(strSql))
sqlRet = sql.run(strSql)
def post(self):
rpc_connection = EthereumProxy(ip_addr, port)
try:
data = str(self.get_argument("data"))
order_id = str(self.get_argument('orderId'))
flag, txid = self.get_order_from_db(order_id)
if flag: #如果是已经广播过的则不再广播
rspData = {'txid':txid, 'orderId':order_id}
self.write(json.dumps(BaseHandler.success_ret_with_data(rspData), default=decimal_default))
return
# 0x checking
rlpdata = "0x" + data if "0x" not in data else data
# sending raw transaction
txid = rpc_connection.eth_sendRawTransaction(rlpdata)
self.insert_txid_into_db(order_id, txid)
rspData = {'txid':txid, 'orderId':order_id}
self.write(json.dumps(BaseHandler.success_ret_with_data(rspData), default=decimal_default))
except Exception as e:
self.write(json.dumps(BaseHandler.error_ret_with_data("error: %s"%e)))
logging.error("ETH_SendRawTransaction error:{0} in {1}".format(e,get_linenumber()))
class ETH_ListAccounts(BaseHandler):
@staticmethod
def addresses():
from sql import run
accounts = run("""select address from t_eth_accounts;""")
return [account['address'].strip() for account in accounts]
def get(self):
try:
data = ETH_ListAccounts.addresses()
self.write(json.dumps(BaseHandler.success_ret_with_data(data), default=decimal_default))
except Exception as e:
self.write(json.dumps(BaseHandler.error_ret_with_data("error: %s"%e)))
logging.error("ETH_Accounts error:{0} in {1}".format(e,get_linenumber()))
class ETH_BlockNumber(BaseHandler):
@staticmethod
def latest(rpc_connection):
lastestBlockNum = int(rpc_connection.eth_blockNumber())
return lastestBlockNum
def get(self):
rpc_connection = EthereumProxy(ip_addr, port)
try:
data = ETH_BlockNumber.latest(rpc_connection)
self.write(json.dumps(BaseHandler.success_ret_with_data(data), default=decimal_default))
except Exception as e:
self.write(json.dumps(BaseHandler.error_ret_with_data("error: %s"%e)))
logging.error("ETH_BlockNumber error:{0} in {1}".format(e,get_linenumber()))
class ETH_GetBlockTransactionCount(BaseHandler):
@staticmethod
def fromGetBlock(rpc_connection,blknumber):
blkheader = rpc_connection.eth_getBlockByNumber(blknumber)
return len(blkheader['transactions']) if blkheader else 0
@staticmethod
def process(rpc_connection,blknumber):
blknumber = rpc_connection.eth_getBlockTransactionCountByNumber(blknumber)
return int(blknumber) if blknumber else 0
def get(self):
rpc_connection = EthereumProxy(ip_addr, port)
try:
blknumber = int(self.get_argument("blknumber")) if self.get_argument("blknumber") else int(ETH_BlockNumber.latest(rpc_connection))
data = ETH_GetBlockTransactionCount.fromGetBlock(rpc_connection,blknumber)
self.write(json.dumps(BaseHandler.success_ret_with_data(data), default=decimal_default))
except Exception as e:
self.write(json.dumps(BaseHandler.error_ret_with_data("error: %s"%e)))
logging.error("ETH_GetBlockTransactionCount error:{0} in {1}".format(e,get_linenumber()))
class ETH_GetTransactionFromBlock(BaseHandler):
@staticmethod
def process(rpc_connection,blknumber,txindex):
txdata = rpc_connection.eth_getTransactionByBlockNumberAndIndex(blknumber,txindex)
blockData = rpc_connection.eth_getBlockByNumber(blknumber)
txdata["blocktime"] = blockData["timestamp"] if blockData and "timestamp" in blockData else 0
txdata["confirmations"] = ETH_BlockNumber.latest(rpc_connection) - blknumber
txdata["blockNumber"] = blknumber
from utils import filtered,alterkeyname
retData = filtered(alterkeyname(txdata,'hash','txid'),["confirmations", "blocktime",
"blockNumber","nonce","txid","from","to","value","gas","gasPrice"]) if txdata else False
for key in ["nonce", "gas", "value", "gasPrice", "blocktime"]:
if "0x" in retData[key]: retData[key] = str(int(retData[key], 16))
getcontext().prec = 30
dValue = RoundDown(Decimal(retData[key]) / Decimal(10**18 ))
if key in ["value"]: retData[key] = "%.8f" % dValue
return retData
def get(self):
rpc_connection = EthereumProxy(ip_addr, port)
try:
blknumber = int(self.get_argument("blknumber")) if self.get_argument("blknumber") else int(ETH_BlockNumber.latest(rpc_connection))
txindex = int(self.get_argument("txindex")) if self.get_argument("txindex") else 0
ret = ETH_GetTransactionFromBlock.process(rpc_connection,blknumber,txindex)
if not ret:
self.write(json.dumps(BaseHandler.error_ret_with_data("no corresponding transaction or block body not found!!!")))
return
self.write(json.dumps(BaseHandler.success_ret_with_data(ret), default=decimal_default))
except Exception as e:
self.write(json.dumps(BaseHandler.error_ret_with_data("error: %s"%e)))
logging.error("ETH_GetTransactionFromBlock error:{0} in {1}".format(e,get_linenumber()))
class ETH_GetBlockTransactions(BaseHandler):
@staticmethod
def process(rpc_connection,blknumber,txcount):
txlist = []
for index in range(txcount):
txdata = ETH_GetTransactionFromBlock.process(rpc_connection,blknumber,index)
if not txdata:
break
if any(txdata[address] in ETH_ListAccounts.addresses() for address in ['to','from']):
txlist.append(txdata)
return txlist
def post(self):
rpc_connection = EthereumProxy(ip_addr, port)
try:
blknumber = int(self.get_argument("blknumber")) if self.get_argument("blknumber") else ETH_BlockNumber.latest(rpc_connection)
txcount = ETH_GetBlockTransactionCount.fromGetBlock(rpc_connection,blknumber)
data = ETH_GetBlockTransactions.process(rpc_connection,blknumber,txcount)
self.write(json.dumps(BaseHandler.success_ret_with_data(data), default=decimal_default))
except Exception as e:
self.write(json.dumps(BaseHandler.error_ret_with_data("error: %s"%e)))
logging.error("ETH_GetBlockTransactions error:{0} in {1}".format(e,get_linenumber()))
#2019-05-01 yqq
#获取用户充币信息的接口, 直接从数据库中获取交易数据
#不再临时扫描区块
class ETH_CrawlTxData(BaseHandler):
def GetTxDataFromDB(self, nBegin, nEnd, symbol='ETH'):
try:
if not (isinstance(nBegin, int) and isinstance(nEnd, int) ):
logging.error("nBegin or nEnd is not int type.")
return []
txRet = []
# strSql = """SELECT txdata FROM t_eth_charge WHERE height >= {0} and height <= {1};""".format(nBegin, nEnd)
strSql = """SELECT * FROM tb_eth_series_deposit WHERE symbol='{}' and block_number>={} and block_number<={}; """.format(symbol, nBegin, nEnd)
logging.info("sql : {}".format(strSql))
#print(strSql)
sqlRet = sql.run(strSql)
# print(sqlRet)
if not isinstance(sqlRet, list):
return []
for item in sqlRet:
tx = {}
tx['symbol'] = item['symbol']
tx["txid"] = item['txid']
tx["from"] = item["from"]
tx["to"] = item["to"]
tx["nonce"] = item['nonce']
tx["blocktime"] = item['block_time']
tx["confirmations"] = item['confirmations']
tx["blockNumber"] = item['block_number']
tx["value"] = item['value']
if symbol == 'USDT': tx['symbol'] = 'ERC20-USDT'
txRet.append(tx)
return txRet
except Exception as e:
logging.error("GetTxDataInfoDB(nBegin, nEnd, symbol):{}".format( e))
return []
pass
#@staticmethod
def process(self, nStart, symbol='ETH'):
txRet = self.GetTxDataFromDB(nStart, (1<<64) - 1, symbol)
return txRet
def post(self):
try:
logging.info('URI: {}'.format( self.request.uri) )
strURI = self.request.uri
symbol = strURI [ strURI.find('/', 0) + 1 : strURI.rfind('/') ] # 类似: /link/crawltransactions
if symbol.upper() == 'ERC20-USDT': symbol = 'USDT'
symbol = symbol.upper()
# print("symbol:{}".format(symbol))
nStart = int(self.get_argument("blknumber"))
data = self.process(nStart, symbol)
logging.info('data : {}'.format(data))
self.write(json.dumps(BaseHandler.success_ret_with_data(data), default=decimal_default))
except Exception as e:
self.write(json.dumps(BaseHandler.error_ret_with_data("error: %s"%e)))
logging.error("ETH_CrawlTxData error:{0} in {1}".format(e,get_linenumber()))
def get(self):
self.post()
class ETH_GetTransactionCount(BaseHandler):
def post(self):
rpc_connection = EthereumProxy(ip_addr, port)
try:
address = self.get_argument("address")
nonce = rpc_connection.eth_getTransactionCount(address, "pending") #获取最新nonce值, 不需要加1
self.write(json.dumps(BaseHandler.success_ret_with_data(str(nonce)), default=decimal_default))
except Exception as e:
self.write(json.dumps(BaseHandler.error_ret_with_data("error: %s"%e)))
logging.error("ETH_GetTransactionCount error:{0} in {1}".format(e,get_linenumber()))
class ETH_GetBlockByNumber(BaseHandler):
def get(self):
rpc_connection = EthereumProxy(ip_addr, port)
try:
block_number = str(self.get_argument("number"))
block_number = int(block_number,16) if '0x' in block_number else int(block_number)
tx_infos = rpc_connection.eth_getBlockByNumber(block_number)
self.write(json.dumps(BaseHandler.success_ret_with_data(tx_infos), default=decimal_default))
except Exception as e:
self.write(json.dumps(BaseHandler.error_ret_with_data("error: %s"%e)))
logging.error("ETH_GetBlockByNumber Error:{0} in {1}".format(e,get_linenumber()))
class ETH_GetTransactionByHash(BaseHandler):
def post(self):
rpc_connection = EthereumProxy(ip_addr, port)
try:
tx_hash = self.get_argument("tx_hash")
tx_info = rpc_connection.eth_getTransactionByHash(tx_hash)
self.write(json.dumps(BaseHandler.success_ret_with_data(tx_info), default=decimal_default))
except Exception as e:
self.write(json.dumps(BaseHandler.error_ret_with_data("error: %s"%e)))
logging.error("ETH_GetTransactionByHash error:{0} in {1}".format(e,get_linenumber()))
class ETH_CollectionQueryEx(BaseHandler):
def QueryBalanceAndNonce(self, rpcconn, addrs, symbol):
retList = []
for addr in addrs:
all_balance = ETH_GetBalance.get_all_balance(rpcconn, addr, symbol,"latest")
logging.info(all_balance)
if len(all_balance) > 0 and Decimal(all_balance[symbol]) < 0.001:
logging.info("skip : {}".format(all_balance))
continue
nNonce = rpcconn.eth_getTransactionCount(addr, "pending") # 获取最新nonce值, 不需要加1
# nNonce = rpcconn.eth_getTransactionCount(addr, "latest") # 获取最新nonce值, 不需要加1
retList.append({'address': addr, 'balances': all_balance, 'nonce': nNonce})
return retList
def proccess(self, rpcconn , symbol):
#1.从t_eth_active_addr 表中获取所有活跃地址,按照余额排序
strSql = """SELECT address FROM tb_eth_series_active_addrs WHERE `symbol`='{}' AND `balance` > 0.0001 ORDER BY `balance` DESC LIMIT 100;""".format(symbol)
sqlRet = sql.run(strSql)
addrs = []
for item in sqlRet:
if "address" in item:
if item['address'] not in addrs: addrs.append(item["address"])
#2.遍历所有地址, 实时查询地址余额
tmpSymbol = 'ERC20-USDT' if symbol == 'USDT' else symbol
return self.QueryBalanceAndNonce(rpcconn, addrs, tmpSymbol)
#3.返回数据
def get(self):
try:
symbol = self.get_argument("symbol")
except :
logging.warning('no symbol arg, default is eth')
symbol = "eth"
pass
rpcconn = EthereumProxy(ip_addr, port)
try:
if symbol.upper() == 'ERC20-USDT':
symbol = 'USDT'
elif not symbol.isalpha() : #防止sql注入
raise Exception("invalid symbol, must token symbol")
symbol = symbol.upper()
if symbol != 'ETH' and symbol not in ERC20_CONTRACTS_MAP.keys():
retData = []
else:
retData = self.proccess(rpcconn, symbol) #从活跃地址表中获取地址信息
self.write(json.dumps(BaseHandler.success_ret_with_data(retData), default=decimal_default))
except Exception as e:
self.write(json.dumps(BaseHandler.error_ret_with_data("error: %s"%e)))
logging.error(" ETH_CollectionQueryEx error:{0} in {1}".format(e,get_linenumber()))
class ETH_GasPrice(BaseHandler):
"""
2020-08-18 yqq 新增 实现动态手续费
"""
def get(self):
try:
# rpcconn = EthereumProxy(ip_addr, port)
rpcconn = EthereumProxy(ip_addr, 8545) #DEBUG
gas_price = rpcconn.eth_gasPrice()
self.write(json.dumps(BaseHandler.success_ret_with_data(str(gas_price)), default=decimal_default))
except Exception as e:
self.write(json.dumps(BaseHandler.error_ret_with_data("error: %s"%e)))
logging.error(" ETH_CollectionQueryEx error:{0} in {1}".format(e,get_linenumber()))
| StarcoderdataPython |
3574952 | <reponame>AlexandrosKyriakakis/StochasticProcesses<filename>Lab5_2020/simple_markov_chain_lib.py
from bisect import bisect_left
from random import random
import networkx as nx # get communication classes
import numpy as np
from scipy.sparse import csr_matrix
class markov_chain:
def __init__(self, markov_table, init_dist=None):
"""
Constructs a Markov Chain from a transition matrix.
The initial distribution can be provided or setted aftewards.
"""
# Attributes
self.running_state = None
self.steps = 0
self.visits = {state: 0 for state in markov_table}
size = len(markov_table)
# Set up state transition probs
self._states = {state: self._partial_sums(dist)
for state, dist in markov_table.items()}
for state, dist in self._states.items():
if not np.isclose(dist[-1][0], 1.0):
msg = "State {} transitions do not add up to 1.0".format(state)
raise ValueError(msg)
self._probs_state = np.array([0] * size)
# Adjacency Matrix
data, rows, cols = [], [], []
for row, dist in markov_table.items():
col, pval = zip(*[(s, p) for s, p in dist.items() if p > 0])
rows += [row] * len(col)
cols += col
data += pval
# make sure they are in the right order
enum = {state: i for i, state in enumerate(self._states)}
rows = [enum[r] for r in rows]
cols = [enum[c] for c in cols]
self._adj = csr_matrix((data, (rows, cols)), shape=(size, size))
# Communication Classes
classes = {'Closed': [], 'Open': []}
g = nx.MultiDiGraph(self._adj)
scc = list(nx.strongly_connected_components(g))
g = nx.condensation(g) # SCCs collapse to single nodes
for n in g:
if g.out_degree(n) == 0:
classes["Closed"].append(scc[n])
else:
classes["Open"].append(scc[n])
self.communication_classes = classes
# Set Initial State
self._init_dist = None
if init_dist is not None:
self.init_dist = init_dist
def __len__(self):
"""The cardinality of the state-space"""
return len(self._states)
@property
def probs_matrix(self):
"""The transition probability matrix"""
return self._adj.toarray()
@property
def probs_state(self):
"""
Computes analytically the probability of being in every state at
currentn step. Returns a vector of state probabilities
"""
init_dist = np.array([self.init_dist.get(state, 0.0)
for state in self._states])
probs = init_dist @ (self._adj ** self.steps)
return dict(zip(self._states, probs))
@property
def init_dist(self):
"""The initial distribution of the chain"""
return self._init_dist
@init_dist.setter
def init_dist(self, dist):
if not np.isclose(sum(dist.values()), 1.0):
msg = "The transition probabilities of init_dist must add up to 1.0"
raise ValueError(msg)
self._init_dist = dist
self._state0 = self._partial_sums(dist)
self.running_state = None
@property
def eigenvalues(self):
"""Returns the eigenvalues of the transition table"""
return list(np.sort(np.linalg.eigvals(self.probs_matrix)))
def _partial_sums(self, dist):
"""
Takes as input a row of the probability matrix (dist)
and generates its partial sums.
These are cached as tuples (sum, state) to be sampled.
"""
states, probs = zip(*[(s, p) for s, p in dist.items() if p > 0])
probs = np.cumsum(probs)
return list(zip(probs, states))
def _next_state(self, state):
"""Selects a new state based on the transition probabilities"""
return state[bisect_left(state, (random(), ))][1]
def start(self):
"""First step of the chain choosen from the initial distribution"""
# Initiate walk
self.steps = 0
for state in self._states:
self.visits[state] = 0
# Initialize the state distribution - to be updated as we walk
self.running_state = self._next_state(self._state0)
self.visits[self.running_state] = 1
def move(self):
"""Moves to the next state and updates all relevant fields"""
transition_probs = self._states[self.running_state]
self.running_state = self._next_state(transition_probs)
self.steps += 1
self.visits[self.running_state] += 1
| StarcoderdataPython |
284724 | furryshit = ["Rawr x3",
"nuzzles",
"how are you",
"pounces on you",
"you're so warm o3o",
"notices you have a bulge o:",
"someone's happy :wink:",
"nuzzles your necky wecky~ murr~",
"hehehe rubbies your bulgy wolgy",
"you're so big :oooo ",
"rubbies more on your bulgy wolgy",
"it doesn't stop growing ·///·",
"kisses you and lickies your necky",
"daddy likies (;",
"nuzzles wuzzles I hope daddy really likes $:",
"wiggles butt and squirms",
"I want to see your big daddy meat~",
"wiggles butt I have a little itch o3o",
"wags tail can you please get my itch~",
"puts paws on your chest nyea~",
"its a seven inch itch",
"rubs your chest can you help me pwease",
"squirms pwetty pwease sad face",
"I need to be punished runs paws down your chest and bites lip",
"like I need to be punished really good~",
"paws on your bulge as I lick my lips I'm getting thirsty.",
"I can go for some milk",
"unbuttons your pants as my eyes glow",
"you smell so musky :v",
"licks shaft mmmm~"]
facts = ["Fun fact, pandas are so fucking stupid that they roll over onto their babies and fucking kill them, they kill their own fucking babies. Fucking stupid, fucking impish brained animals. There is no point in their existence just become extinct already, fuck off.",
"Now, according to world population studies there has been approximately 108 BILLION people on this planet so far. The average lifespan for people is 0-25 years. If we multiply the average lifespan (25) by the 108 billion people, there has been 2.7 TRILLION years of life. If we multiply that by how many days are within a year (365) there has been approximately 985 TRILLION years of life. And not ONCE, in any of those days, did anybody ask.",
"Fun fact, people who are able to perform self-fellatio report that it feels more like sucking a dick, rather than getting your dick sucked. Why?\n\nThe reason is simple: neurological latency. The nerves for your mouth are closer to your brain than the nerves of your cock. So your body feels the sensation from the mouth before the sensations of the cock. The mouth sensations over-shadow the pleasurable sensations, which is why those who self-fellate feel like they’re sucking a dick, rather than getting their dick sucked.",
"A group of ravens is called a murder.",
"Bus seats are designed so that you cannot tell how dirty they really are.",
"Your intestines will “wriggle” themselves back into the correct position.\nDoctors who do any type of intestinal surgery don’t have to worry (too much) about how they put the intestines back in.",
"A certain type of angler fish reproduce via the male burrowing into the side of the female, eventually fusing. The males life is lost in the process.",
"When you get a sunburn, it's actually your cells dying so they don't get tumorous.",
"Horses can't throw up.\nSo if they eat something bad or get a bad gas bubble, they just lay down and die.",
"In the United States roughly 1/3 of all food is thrown away each year.",
"There are 8 unaccounted for American nukes and literally an unknown quantity of missing soviet nukes that range all the way from warheads to suitcase bombs.",
"The lemmings are not suicidal and the documentary that filmed it was actually showing lemmings getting thrown down a cliff because they needed the scene but couldn't make the animals do it."
"Live Chat support agents can see what you type before you send it, so they can reply quicker.",
"You can smell your own lungs. Your brain just filters out the smell.",
"In Australia there is a plant called the Gympie-Gympie which has such a severe sting that horses who brush against it throw themselves off cliffs because they’d rather die than continue to experience the pain.",
"If a hamster gets too stressed, it will eats its kids.",
"According to the World Bank Group, the world produces about 2 billion tons of garbage every year.",
"There is a non-zero maximum threshold for the amount of cockroach that can be present in ground coffee because it is literally impossible to keep them out entirely.",
"The threat of a deadly bird flu spreading to humans is always there. It takes just a little bit of negligence in screening chickens for this to happen.",
"The reason dogs love squeaky toys is because they sound like small animals dying.",
"There is a whale called 52 Blue that only sings at their frequency meaning it can't communicate with other whales. It is nicknamed the loneliest whale on the planet.",
"The FBI estimates there are between 25-50 active serial killers in the US at any given time.",
"No one went to prison over the Panama Papers.",
"Sometimes you're the bad guy.",
"If 2 male flat worms meet, they will sword fight with their dicks until one loses. The loser will become female and they will mate.",
"There's something called a gamma ray burst, basically some stars will periodically produce a burst of gamma rays with no warning. If this happens to a star close enough to the earth and hits the earth(not all that unlikely, they spread out quite a bit while still being deadly) we'll be hit by a burst of gamma radiation with no warning. Every living thing on the side of the earth it hits will die and earth's atmosphere will be permanently damaged, this could lead to most of not all of the population of the other side of the planet also dying.\n\nIt's a civilisation ending event and there's nothing we can do to defend against or predict it."] | StarcoderdataPython |
1848570 | <gh_stars>1-10
import unittest
import requests
class UnitTestsIbanfoxAPI(unittest.TestCase):
# https://ibanfox.com/docs/api-reference/#operation/validateIban
def test_get_validateIban(self):
print('test_get_validateIban')
headers = {
'x-api-key': '',
}
response = requests.get('https://api.ibanfox.com/v1/ibans/DE89370400440532013000/validity', headers=headers)
print(response.text)
# https://ibanfox.com/docs/api-reference/#operation/getIbanDetails
def test_get_getIbanDetails(self):
print('test_get_getIbanDetails')
headers = {
'x-api-key': '',
}
response = requests.get('https://api.ibanfox.com/v1/ibans/DE89370400440532013000', headers=headers)
print(response.text)
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
3583511 | <reponame>PseudotsugaMenziesii/e-napalm
from collections import Counter
from flask_login import current_user
from git import Repo
from io import BytesIO
from logging import info
from os import environ
from sqlalchemy import and_
from subprocess import Popen
from threading import Thread
from uuid import uuid4
from werkzeug.utils import secure_filename
from xlrd import open_workbook
from xlrd.biffh import XLRDError
from xlwt import Workbook
from eNMS.controller.base import BaseController
from eNMS.controller.ssh import SshConnection
from eNMS.database import db
from eNMS.models import models, model_properties, property_types
class InventoryController(BaseController):
ssh_port = -1
configuration_properties = {"configuration": "Configuration"}
def get_ssh_port(self):
self.ssh_port += 1
start = self.settings["ssh"]["start_port"]
end = self.settings["ssh"]["end_port"]
return start + self.ssh_port % (end - start)
def web_connection(self, device_id, **kwargs):
device = db.fetch("device", id=device_id, rbac="connect")
cmd = [str(self.path / "files" / "apps" / "gotty"), "-w"]
port, protocol = self.get_ssh_port(), kwargs["protocol"]
address = getattr(device, kwargs["address"])
cmd.extend(["-p", str(port)])
if "accept-once" in kwargs:
cmd.append("--once")
if "multiplexing" in kwargs:
cmd.extend(f"tmux new -A -s gotty{port}".split())
if self.settings["ssh"]["bypass_key_prompt"]:
options = "-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null"
else:
options = ""
if protocol == "telnet":
nested_cmd = f"telnet {address}"
elif "authentication" in kwargs:
login, environ["SSHPASS"] = (
(device.username, self.get_password(device.password))
if kwargs["credentials"] == "device"
else (current_user.name, self.get_password(current_user.password))
if kwargs["credentials"] == "user"
else (kwargs["username"], kwargs["password"])
)
nested_cmd = f"sshpass -e ssh {options} {login}@{address} -p {device.port}"
else:
nested_cmd = f"ssh {options} {address} -p {device.port}"
if "multiplexing" in kwargs:
cmd.append(nested_cmd)
else:
cmd.extend(nested_cmd.split())
Popen(cmd)
return {
"device": device.name,
"port": port,
"redirection": self.settings["ssh"]["port_redirection"],
"server_addr": self.settings["app"]["address"],
}
def get_device_logs(self, device_id):
device_logs = [
log.name
for log in db.fetch_all("log")
if log.source == db.fetch("device", id=device_id).ip_address
]
return "\n".join(device_logs)
def desktop_connection(self, id, **kwargs):
device = db.fetch("device", id=id, rbac="connect")
credentials = (
(device.username, self.get_password(device.password))
if kwargs["credentials"] == "device"
else (current_user.name, self.get_password(current_user.password))
if kwargs["credentials"] == "user"
else (kwargs["username"], kwargs["password"])
)
uuid, port = str(uuid4()), self.get_ssh_port()
session = db.factory(
"session",
name=uuid,
user=current_user.name,
timestamp=self.get_time(),
device=device.id,
)
db.session.commit()
try:
ssh_connection = SshConnection(
device.ip_address, *credentials, session.id, uuid, port
)
Thread(
target=ssh_connection.start_session, args=(session.id, uuid, port),
).start()
return {
"port": port,
"username": uuid,
"device_name": device.name,
"device_ip": device.ip_address,
}
except Exception as exc:
return {"error": exc.args}
def get_git_history(self, device_id):
device = db.fetch("device", id=device_id)
repo = Repo(self.path / "network_data")
path = self.path / "network_data" / device.name
return {
data_type: [
{"hash": str(commit), "date": commit.committed_datetime}
for commit in list(repo.iter_commits(paths=path / data_type))
]
for data_type in self.configuration_properties
}
def get_git_network_data(self, device_name, hash):
tree, result = Repo(self.path / "network_data").commit(hash).tree, {}
for property in self.configuration_properties:
file = tree / device_name / property
with BytesIO(file.data_stream.read()) as f:
result[property] = f.read().decode("utf-8")
return result
def get_device_network_data(self, device_id):
device = db.fetch("device", id=device_id)
return {p: getattr(device, p) for p in self.configuration_properties}
def get_session_log(self, session_id):
return db.fetch("session", id=session_id).content
def counters(self, property, type):
return Counter(
str(getattr(instance, property)) for instance in db.fetch_all(type)
)
def export_topology(self, **kwargs):
workbook = Workbook()
filename = kwargs["export_filename"]
if "." not in filename:
filename += ".xls"
for obj_type in ("device", "link"):
sheet = workbook.add_sheet(obj_type)
for index, property in enumerate(model_properties[obj_type]):
if property in db.dont_migrate[obj_type]:
continue
sheet.write(0, index, property)
for obj_index, obj in enumerate(db.fetch_all(obj_type), 1):
value = getattr(obj, property)
if type(value) == bytes:
value = str(self.decrypt(value), "utf-8")
sheet.write(obj_index, index, str(value))
workbook.save(self.path / "files" / "spreadsheets" / filename)
def topology_import(self, file):
book = open_workbook(file_contents=file.read())
status = "Topology successfully imported."
for obj_type in ("device", "link"):
try:
sheet = book.sheet_by_name(obj_type)
except XLRDError:
continue
properties = sheet.row_values(0)
for row_index in range(1, sheet.nrows):
values = {"dont_update_pools": True}
for index, property in enumerate(properties):
if not property:
continue
func = db.field_conversion[property_types.get(property, "str")]
values[property] = func(sheet.row_values(row_index)[index])
try:
db.factory(obj_type, **values).serialized
except Exception as exc:
info(f"{str(values)} could not be imported ({str(exc)})")
status = "Partial import (see logs)."
db.session.commit()
for pool in db.fetch_all("pool"):
pool.compute_pool()
self.log("info", status)
return status
def import_topology(self, **kwargs):
file = kwargs["file"]
if kwargs["replace"]:
db.delete_all("device")
if self.allowed_file(secure_filename(file.filename), {"xls", "xlsx"}):
result = self.topology_import(file)
info("Inventory import: Done.")
return result
def save_pool_objects(self, pool_id, **kwargs):
pool = db.fetch("pool", id=pool_id)
for obj_type in ("device", "link"):
string_objects = kwargs[f"string_{obj_type}s"]
if string_objects:
objects = []
for name in [obj.strip() for obj in string_objects.split(",")]:
obj = db.fetch(obj_type, allow_none=True, name=name)
if not obj:
return {
"alert": f"{obj_type.capitalize()} '{name}' does not exist."
}
if obj not in objects:
objects.append(obj)
else:
objects = db.objectify(obj_type, kwargs[f"{obj_type}s"])
setattr(pool, f"{obj_type}_number", len(objects))
setattr(pool, f"{obj_type}s", objects)
pool.last_modified = self.get_time()
return pool.serialized
def update_pool(self, pool_id):
db.fetch("pool", id=int(pool_id)).compute_pool()
def update_all_pools(self):
for pool in db.fetch_all("pool"):
pool.compute_pool()
def get_view_topology(self):
return {
"devices": [d.view_properties for d in db.fetch_all("device")],
"links": [d.view_properties for d in db.fetch_all("link")],
}
def view_filtering(self, **kwargs):
return {
obj_type: [
d.view_properties
for d in db.session.query(models[obj_type])
.filter(and_(*self.build_filtering_constraints(obj_type, **form)))
.all()
]
for obj_type, form in kwargs.items()
}
| StarcoderdataPython |
6418520 | #
# Solution to Project Euler problem 55
# Copyright (c) Project Nayuki. All rights reserved.
#
# https://www.nayuki.io/page/project-euler-solutions
# https://github.com/nayuki/Project-Euler-solutions
#
def compute():
ans = sum(1 for i in range(10000) if is_lychrel(i))
return str(ans)
def is_lychrel(n):
for i in range(50):
n += int(str(n)[ : : -1])
if str(n) == str(n)[ : : -1]:
return False
return True
if __name__ == "__main__":
print(compute())
| StarcoderdataPython |
82555 | <reponame>schnusch/nimble2nix
#!/usr/bin/env nix-shell
#!nix-shell -i python3 -p git nim nix-prefetch-git python3Packages.packaging
import argparse
import contextlib
from functools import cache
import itertools
import json
import logging
import os
import shlex
import subprocess
import sys
import tempfile
import urllib.parse
from urllib.request import urlopen
from packaging.version import parse as parse_version
def run(cmd, *args, check=True, **kwargs):
logging.getLogger('run').debug('$ %s', ' '.join(map(shlex.quote, cmd)))
return subprocess.run(cmd, *args, check=check, **kwargs)
def parse_nimble(nimble):
# nimble seems to write warnings to stdout instead of stderr, so we use --silent
p = run(['nimble', '--silent', 'dump', '--json', nimble],
stdout=subprocess.PIPE, encoding='utf-8')
return json.loads(p.stdout)
def find_nimble(dir):
nimbles = [x for x in os.listdir(dir) if x.endswith('.nimble')]
assert len(nimbles) == 1
return os.path.join(dir, nimbles[0])
class GitCache(object):
""" Temporary directory helper that runs git clone """
logger = logging.getLogger('GitCache')
def __init__(self):
self._temp = None
self._root = None
self._paths = {}
self.cloned = 0
self.reused = 0
def __enter__(self):
self._temp = tempfile.TemporaryDirectory(prefix='nimble2nix.')
self._root = self._temp.__enter__()
self.logger.debug("cloning git repos to %s", self._root)
return self
def __exit__(self, type, value, traceback):
self._temp.__exit__(type, value, traceback)
self._temp = None
self._root = None
self.logger.debug("cloned %d repositories, avoided %d redundant clones",
self.cloned, self.reused)
def get_path(self, name, url):
counter = 1
name = name.replace(os.sep, '_')
while True:
suffix = '' if counter == 1 else '-' + str(counter)
path = os.path.join(self._root, name + suffix)
if not os.path.exists(path):
self._paths[url] = path
return path
counter += 1
def clone(self, url, name):
try:
path = self._paths[url]
self.logger.debug('reusing %r for %r', path, url)
self.reused += 1
return path
except KeyError:
pass
path = self.get_path(name, url)
run(['git', 'clone', '--', url, path])
self.cloned += 1
return path
class Packages(object):
def __init__(self, name=None):
if name is None:
logging.info("downloading packages.json...")
with urlopen('https://github.com/nim-lang/packages/raw/master/packages.json') as resp:
self.packages = json.loads(resp.read().decode('utf-8'))
else:
logging.info("using %s...", name)
with open(name, 'r', encoding='utf-8') as fp:
self.packages = json.load(fp)
@cache
def get(self, name):
for pkg in self.packages:
if pkg['name'] == name:
return pkg
return {
'url': name,
'method': 'git',
}
def check_version_range(version_range, version):
kind = version_range['kind']
if kind == 'verAny':
return True
elif kind == 'verIntersect':
return check_version_range(version_range['verILeft'], version) and check_version_range(version_range['verIRight'], version)
else:
try:
ver = parse_version(version_range['ver'])
return {
'verLater': version > ver,
'verEqLater': version >= ver,
'verEarlier': version < ver,
'verEqEarlier': version <= ver,
'verEq': version == ver,
}[kind]
except KeyError:
logging.error("version range %r not supported", version_range)
raise
def intersect_version_range(a, b):
# TODO apply some logic
return {
'kind': 'verIntersect',
'verILeft': a,
'verIRight': b,
}
def format_version_range(version_range):
kind = version_range['kind']
if kind == 'verAny':
return '*'
elif kind == 'verIntersect':
return '%s %s' % (format_version_range(version_range['verILeft']),
format_version_range(version_range['verIRight']))
elif kind == 'verSpecial':
return version_range['spe']
else:
return {
'verLater': '>',
'verEqLater': '>=',
'verEarlier': '<',
'verEqEarlier': '<=',
'verTilde': '~=',
'verCaret': '^=',
'verEq': '',
}[kind] + version_range['ver']
class Requirement(object):
skip = {'nim'} # FIXME respect the nim version requirements
@classmethod
def from_nimble_file(cls, nimble_file, packages, git_cache):
reqs = []
for req in parse_nimble(nimble_file)['requires']:
if req['name'] not in cls.skip:
reqs.append(cls(req, packages, git_cache))
return reqs
def __init__(self, req, packages, git_cache):
self.name = req['name']
self.version = req['ver']
self._packages = packages
self._git_cache = git_cache
@property
@cache
def pkg(self):
return self._packages.get(self.name)
def find_latest_rev(self):
assert self.pkg['method'] == 'git', "%r not supported, currently the only supported method is 'git'" % self.pkg['method']
git_dir = self._git_cache.clone(self.pkg['url'], self.name)
rev = None
add_tag = False
kind = self.version['kind']
if kind == 'verSpecial':
assert self.version['spe'].startswith('#')
rev = self.version['spe'][1:]
# TODO what about nim's `head`
# keep the original to rev from the nimble file so we can re-add it
# to the git repo later
add_tag = True
else:
# get latest tag that satisfies the version range
tags = run(['git', '-C', git_dir, 'tag', '--list'],
stdout=subprocess.PIPE, encoding='utf-8')
tags = tags.stdout.split()
for tag in tags:
parsed_tag = parse_version(tag)
if check_version_range(self.version, parsed_tag):
if rev is None or parsed_tag > parse_version(rev):
rev = tag
if rev is None:
# see if nimble file in HEAD has a required version
logging.warning("%s: %s does not provide any tags, so we check if HEAD satisfies the version",
self.name, self.pkg['url'])
info = parse_nimble(find_nimble(git_dir))
if check_version_range(self.version, parse_version(info['version'])):
rev = 'HEAD'
if rev is None:
raise RuntimeError("%s: cannot satisfy %r" % (self.name, format_version_range(self.version)))
# nix-prefetch-git does not work with remote branches and such, so we
# convert rev to a commit hash
try:
commit_hash = run(['git', '-C', git_dir, 'rev-parse', rev],
stdout=subprocess.PIPE, encoding='utf-8').stdout.strip()
except subprocess.CalledProcessError:
# try again with remote branches
commit_hash = run(['git', '-C', git_dir, 'rev-parse', 'remotes/origin/' + rev],
stdout=subprocess.PIPE, encoding='utf-8').stdout.strip()
logging.info("%s: %s%s", self.name, commit_hash,
' (%s)' % rev if rev != commit_hash else '')
# do not add rev from nimble file to the git repo because it is
# unimportant or an abbreviated commit hash
if not add_tag or commit_hash.startswith(rev):
rev = None
return (commit_hash, rev)
def prefetch(self):
commit_hash, rev = self.find_latest_rev()
# re-add rev from the nimble file to the git repository,
# nix-prefetch-git removes almost everything and otherwise nimble will
# not find the commit
add_tag = '' if rev is None else \
'git -C "$dir" tag -f %s %s >&2' % (shlex.quote(rev), shlex.quote(commit_hash))
env = dict(os.environ)
env['NIX_PREFETCH_GIT_CHECKOUT_HOOK'] = add_tag
p = run(['nix-prefetch-git',
'--fetch-submodules',
'--leave-dotGit',
'--rev', commit_hash,
'--url', self.pkg['url']],
env=env, stdout=subprocess.PIPE, encoding='utf-8')
info = json.loads(p.stdout)
info['NIX_PREFETCH_GIT_CHECKOUT_HOOK'] = add_tag
return info
def dot_quote(x):
return x.replace('\\', '\\\\').replace('"', '\\"').join('""')
def collect_requirements(nimble, write_dot, url=None, *, collected=None, **kwargs):
# we will index requirements by their URL, whenever a requirement is
# encountered store it, if it is already known we update its version range
# and re-run the process on it
# TODO thinking about it, this might add sub-requirements that a no longer
# needed because their parent-dependencies are of another version
if collected is None:
collected = {}
if url is None:
url = 'file://' + urllib.parse.quote(nimble)
for req in Requirement.from_nimble_file(nimble, **kwargs):
write_dot('\t%s -> %s [label=%s];\n' % (dot_quote(url),
dot_quote(req.pkg['url']),
dot_quote(format_version_range(req.version))))
inserted = collected.setdefault(req.pkg['url'], req)
if inserted.version != req.version:
# package URL is already known, update the version range and re-run
inserted.version = intersect_version_range(inserted.version, req.version)
logging.info("common requirement %s with %r",
req.pkg['url'], format_version_range(inserted.version))
del req
inserted.prefetched = inserted.prefetch()
collect_requirements(find_nimble(inserted.prefetched['path']),
url=inserted.pkg['url'], write_dot=write_dot,
collected=collected, **kwargs)
return collected
def nix_dump(x):
if isinstance(x, (bool, int, float, str)):
return json.dumps(x)
elif isinstance(x, list):
return ' '.join(itertools.chain('[', map(nix_dump, x), ']'))
else:
raise TypeError('cannot convert %r to a nix value' % x)
def to_nimble_nix(requirements, fp):
logging.info("creating nimble.nix...")
fp.write('''\
{ fetchgit, writeText }:
let
packages = [
''')
for req in requirements.values():
pkg = {'tags': []}
pkg.update(req.pkg)
pkg['name'] = req.name
if 'license' not in pkg or 'description' not in pkg:
info = parse_nimble(find_nimble(req.prefetched['path']))
pkg.setdefault('license', info['license'])
pkg.setdefault('description', info['desc'])
fp.write(' {\n')
for k, v in pkg.items():
fp.write(' %s = ' % k)
if k == 'url':
fp.write('''"file://" + (fetchgit {
url = %s;
rev = %s;
sha256 = %s;
fetchSubmodules = true;
leaveDotGit = true;
})''' % (nix_dump(req.prefetched['url']),
nix_dump(req.prefetched['rev']),
nix_dump(req.prefetched['sha256'])))
if req.prefetched['NIX_PREFETCH_GIT_CHECKOUT_HOOK']:
fp.write('''.overrideAttrs ({ ... }: {
# re-add rev from the nimble file to the git repository,
# nix-prefetch-git removes almost everything and otherwise nimble will
# not find the commit
NIX_PREFETCH_GIT_CHECKOUT_HOOK = %s;
})''' % nix_dump(req.prefetched['NIX_PREFETCH_GIT_CHECKOUT_HOOK']))
else:
fp.write(nix_dump(v))
fp.write(';\n')
fp.write(' }\n')
fp.write(''' ];
in
writeText "packages.json" (builtins.toJSON packages)
''')
def main(argv=None):
p = argparse.ArgumentParser(description="Collect nimble requirements",
epilog="This tool creates a nix derivation that creates a nimble "
"package.json. The created package.json includes the requirements "
"of the given nimble files recursively with their `url` pointing "
"to the nix store. By creating a symlink from "
"$nimbleDir/packages_global.json to the created package.json "
"nimble can fetch the requirements when sandboxed. Because only "
"one version of a requirement is supported this may not always be "
"able to resolve the dependencies.")
p.add_argument('-o', '--output',
required=True,
help="Nix derivation that creates the package.json")
p.add_argument('-P', '--packages',
help="use custom packages.json instead of downloading")
p.add_argument('--dot',
help="output DOT graph of the requirements")
p.add_argument('-v', '--verbose',
action='store_const',
default=logging.INFO,
const=logging.DEBUG,
help="verbose logging")
p.add_argument('nimble_file', nargs='+')
args = p.parse_args()
logging.basicConfig(format=('\x1b[32m%s\x1b[39m' if sys.stderr.isatty() else '%s')
% '[%(asctime)s] %(levelname)-8s %(name)-8s %(message)s',
stream=sys.stderr, level=args.verbose)
with contextlib.ExitStack() as stack:
packages = Packages(args.packages)
git_cache = stack.enter_context(GitCache())
# write DOT graph
if args.dot is None:
write_dot = lambda x: None
else:
fp = stack.enter_context(open(args.dot, 'w', encoding='utf-8', buffering=1))
fp.write('''digraph {
node [fontname=monospace];
edge [fontname=monospace];
''')
stack.callback(fp.write, '}\n')
write_dot = fp.write
logging.debug("writing dependency graph to %r...", args.dot)
collected = {}
for nimble in args.nimble_file:
collect_requirements(nimble, write_dot, collected=collected,
packages=packages, git_cache=git_cache)
with open(args.output, 'w', encoding='utf-8') as fp:
to_nimble_nix(collected, fp)
if __name__ == '__main__':
main()
| StarcoderdataPython |
4804714 | <reponame>jungyoonoh/baekjoon-1
# Authored by : cieske
# Co-authored by : -
# Link : http://boj.kr/4c2449f200e440f39d4ba8e1e71601b6
import sys
def input():
return sys.stdin.readline().rstrip()
n, k = map(int, input().split())
coin = set([int(input()) for _ in range(n)]) #중복 동전 제거
dp = [0]*(k+1)
for i in range(1, k+1):
possible = []
for c in coin:
if i-c >= 0 and dp[i-c] >= 0: # i-c원 경우에 c원 동전을 추가해서 i원을 만들 수 있는 경우
possible.append(dp[i-c])
if possible:
dp[i] = min(possible) + 1 # optimal 값 + 1
else:
dp[i] = -1 #불가능!
print(dp[k])
| StarcoderdataPython |
250854 | from __future__ import absolute_import
from .. import idiokit
from ..xmlcore import Element
from .jid import JID
DISCO_INFO_NS = "http://jabber.org/protocol/disco#info"
DISCO_ITEMS_NS = "http://jabber.org/protocol/disco#items"
class DiscoItem(object):
def __init__(self, jid, node=None, name=None):
self.jid = JID(jid)
self.node = node
self.name = name
class DiscoIdentity(object):
def __init__(self, category, type, name=None):
self.category = category
self.type = type
self.name = name
class DiscoInfo(object):
def __init__(self, identities, features):
self.identities = set(identities)
self.features = set(features)
class Disco(object):
def __init__(self, xmpp):
self.xmpp = xmpp
self.features = set()
self.identities = set()
self.nodes = dict()
self.xmpp.core.iq_handler(self._info_iq, "query", DISCO_INFO_NS)
self.xmpp.core.iq_handler(self._items_iq, "query", DISCO_ITEMS_NS)
self.add_feature(DISCO_INFO_NS)
self.add_node(None, self._node_handler)
self.add_identity("client", "bot")
def _node_handler(self):
items = [DiscoItem(self.xmpp.jid, node) for node in self.nodes]
return self.features, self.identities, items
def _items_iq(self, element, payload):
node = payload.get_attr("node", None)
handler = self.nodes.get(node, None)
if handler is None:
return False
_, _, items = handler()
result = Element("query", xmlns=DISCO_ITEMS_NS)
for item in items:
item_element = Element("item", jid=item.jid)
if item.node is not None:
item_element.set_attr("node", item.node)
if item.name is not None:
item_element.set_attr("name", item.name)
result.add(item_element)
self.xmpp.core.iq_result(element, result)
return True
def _info_iq(self, element, payload):
identities = self.identities
features = self.features
node = payload.get_attr("node", None)
handler = self.nodes.get(node, None)
if handler is None:
return False
features, identities, _ = handler()
result = Element("query", xmlns=DISCO_INFO_NS)
for identity in identities:
id_element = Element("identity")
id_element.set_attr("category", identity.category)
id_element.set_attr("type", identity.type)
if identity.name is not None:
id_element.set_attr("name", identity.name)
result.add(id_element)
for feature in features:
result.add(Element("feature", var=feature))
self.xmpp.core.iq_result(element, result)
return True
def add_node(self, node, handler):
self.nodes[node] = handler
def add_feature(self, feature):
self.features.add(feature)
def add_identity(self, category, type, name=None):
identity = DiscoIdentity(category, type, name)
self.identities.add(identity)
@idiokit.stream
def info(self, jid, node=None):
query = Element("query", xmlns=DISCO_INFO_NS)
if node is not None:
query.set_attr("node", node)
elements = yield self.xmpp.core.iq_get(query, to=jid)
query = elements.children("query", DISCO_INFO_NS)
identities = list()
for identity in query.children("identity").with_attrs("category", "type"):
category = identity.get_attr("category")
type = identity.get_attr("type")
name = identity.get_attr("name", None)
identities.append(DiscoIdentity(category, type, name))
features = list()
for feature in query.children("feature").with_attrs("var"):
features.append(feature.get_attr("var"))
idiokit.stop(DiscoInfo(identities, features))
@idiokit.stream
def items(self, jid, node=None):
query = Element("query", xmlns=DISCO_ITEMS_NS)
if node is not None:
query.set_attr("node", node)
elements = yield self.xmpp.core.iq_get(query, to=jid)
query = elements.children("query", DISCO_ITEMS_NS)
items = list()
for item in query.children("item").with_attrs("jid"):
jid = item.get_attr("jid")
node = item.get_attr("node", None)
name = item.get_attr("name", None)
items.append(DiscoItem(jid, node, name))
idiokit.stop(items)
| StarcoderdataPython |
9717427 | <filename>env_check.py
# Script Name : env_check.py
# Author : <NAME>
# Created : 14th May 2012
# Last Modified : 14 February 2016
# Version : 1.0.1
# Modifications : 1.0.1 - Tidy up comments and syntax
# Description : This script will check to see if all of the environment variables I require are set
import os
confdir = os.getenv("my_config") # Set the variable confdir from the OS environment variable
conffile = 'env_check.conf' # Set the variable conffile
conffilename = os.path.join(confdir, conffile) # Set the variable conffilename by joining confdir and conffile together
for env_check in open(conffilename): # Open the config file and read all the settings
env_check = env_check.strip() # Set the variable as itself, but strip the extra text out
print('[{}]'.format(env_check)) # Format the Output to be in Square Brackets
newenv = os.getenv(
env_check) # Set the variable newenv to get the settings from the OS what is currently set for the settings out the configfile
if newenv is None: # If it doesn't exist
print(env_check, 'is not set') # Print it is not set
else: # Else if it does exist
print('Current Setting for {}={}\n'.format(env_check, newenv)) # Print out the details
| StarcoderdataPython |
1923835 | <gh_stars>0
from django.db import models
class Color(models.Model):
name = models.CharField('色', max_length=10)
class Apple(models.Model):
name = models.CharField('品種', max_length=50)
color = models.ForeignKey('sort_app.Color', on_delete=models.CASCADE)
breeding = models.CharField('交配', max_length=100)
season = models.IntegerField('旬')
born_in_nagano = models.BooleanField('長野県生まれ')
| StarcoderdataPython |
4892380 | <reponame>badouralix/adventofcode-2018<gh_stars>10-100
from tool.runners.python import SubmissionPy
class BebertSubmission(SubmissionPy):
def run(self, s):
dependencies = [line.split(" ") for line in s.splitlines()]
dependencies = [(d[1], d[7]) for d in dependencies]
tasks = {}
for before, after in dependencies:
if before not in tasks:
tasks[before] = []
if after not in tasks:
tasks[after] = []
tasks[after].append(before)
tasks = sorted(tasks.items(), key=lambda t: t[0])
done_ord = []
done = set()
while len(done) != len(tasks):
for after, befores in tasks:
if after not in done and all(t in done for t in befores):
done.add(after)
done_ord.append(after)
break
return "".join(done_ord)
| StarcoderdataPython |
5029149 | import unittest
from mock import MagicMock, call
from malcolm.core import Context
from malcolm.modules.ADPandABlocks.parts import PandABlocksDriverPart
class TestPandaABoxDriverPart(unittest.TestCase):
def setUp(self):
self.child = MagicMock()
self.context = MagicMock(spec=Context)
self.context.block_view.return_value = self.child
self.params = MagicMock()
self.params.name = "drv"
self.params.mri = "mri"
self.params.readoutTime = 0.002
self.o = PandABlocksDriverPart(self.params)
list(self.o.create_attribute_models())
def test_abort(self):
self.o.abort(self.context)
assert self.context.mock_calls == [
call.block_view("mri"),
call.block_view().stop()]
| StarcoderdataPython |
1791995 | from core import misc
def load_modules():
try:
misc.loader.load_package('private.modules')
except (ImportError, ModuleNotFoundError):
pass
misc.loader.load_packages(f"modules.{item}" for item in [
'base', # User management and base Middlewares
'nologi', # Plotim plotim
'get', # GETs in chats
'captcha_button', # Captcha for new joined users
'voteban', # Voteban in chats
'admin', # Small admin commands
'nsfw', # Checking media for nsfw
'amd_builds', # AMD Builds from reddit
'tail', # Handle all unhandled actions
])
| StarcoderdataPython |
4886330 | <reponame>levtelyatnikov/radiomixer
from radiomixer.sampler.segment.segment_samplers import EqualSegmentSampler
from radiomixer.sampler.segment.segment import SegmentGenerator
from radiomixer.transforms.transform import TransformType
class EqualSegmentSamplerWrapper(SegmentGenerator):
def __init__(self, configs):
super().__init__(TransformType.EQUALSEGMENTSAMPLER)
self.SegmentSampler = EqualSegmentSampler(configs)
def _sampler(self, signals: list) -> list:
"""
Apply Segment sampler procedure to all signals in the list.
Additionally update information in the signal.parameters.
The information added into signal.parameters depends on
the needs.
"""
segments = self.SegmentSampler.process(signals)
new_signals = []
for signal, segment in zip(signals, segments):
signal.parameters['segment'] = segment
signal.parameters['segment_length'] = segment[1] - segment[0]
new_signals.append(signal)
return new_signals | StarcoderdataPython |
6700868 | <filename>specifications/migrations/0002_categoryspecifications.py
# Generated by Django 3.1.7 on 2021-05-28 12:51
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('main', '0002_auto_20210524_1449'),
('specifications', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='CategorySpecifications',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='main.category')),
('spec_name', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='specifications.specificationpreset')),
],
),
]
| StarcoderdataPython |
322778 | <filename>word2vec/nlp/tokenizer.py<gh_stars>0
import re
UNK = "<UNK"
def naive_tokenizer(s):
return re.split(r"\s", s.strip())
| StarcoderdataPython |
1746860 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'x1ang.li'
import logging, os, argparse, textwrap
import time
import chardet
# Default configuration will take effect when corresponding input args are missing.
# Feel free to change this for your convenience.
DEFAULT_CONF = {
# Only those files ending with extensions in this list will be scanned or converted.
'exts' : ['txt'],
'overwrite' : False,
'add_BOM' : False,
'convert_UTF' : False,
'confi_thres' : 0.8,
}
# We have to set a minimum threshold. Only those target_encoding results returned by chartdet that are above that threshold level would be accepted.
# See https://github.com/x1angli/convert2utf/issues/4 for further details
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
log = logging.getLogger(__name__)
class Convert2Utf8:
def __init__(self, args):
self.args = args
def walk_dir(self, dirname):
for root, dirs, files in os.walk(dirname):
for name in files:
extension = os.path.splitext(name)[1][1:].strip().lower()
# On linux there is a newline at the end which will cause the match to fail, so we just 'strip()' the '\n'
# Also, add 'lower()' to ensure matching
if ( ('all' in self.args.exts) or (extension in self.args.exts)):
fullname = os.path.join(root, name)
try:
self.convert_file(fullname)
except IOError:
log.error("Unable to read or write the file: %s. Please check the file's permission.", fullname)
except KeyboardInterrupt:
log.warning("Interrupted by keyboard (e.g. Ctrl+C)")
exit()
# else:
# log.error("Unable to process the file: %s. Please check.", fullname)
# traceback.print_stack()
def convert_file(self, filename):
with open(filename, 'rb') as f: # read under the binary mode
bytedata = f.read()
if len(bytedata) == 0:
log.info("Skipped empty file %s", filename)
return
chr_res = chardet.detect(bytedata)
if chr_res['encoding'] == None or chr_res['confidence'] < DEFAULT_CONF['confi_thres']:
log.warning("Ignoring %s, since its encoding is unable to detect.", filename)
return
src_enc = chr_res['encoding'].lower()
log.debug("Scanned %s, whose encoding is %s ", filename, src_enc)
if (src_enc == 'ascii'):
log.info("Skipped %s, whose encoding is %s", filename, src_enc)
return
if (not self.args.convert_utf) and src_enc.startswith('utf'):
log.info("Skipped %s, whose encoding is %s", filename, src_enc)
return
# Since chardet only recognized all GB-based target_encoding as 'gb2312', the decoding will fail when the text file
# contains certain special charaters. To make it more special-character-tolerant, we should
# upgrade the target_encoding to 'gb18030', which is a character set larger than gb2312.
if src_enc.lower() == 'gb2312':
src_enc = 'gb18030'
try:
strdata = bytedata.decode(src_enc)
except UnicodeDecodeError as e:
log.error("Unicode error for file %s", filename)
print(e)
return
# preserving file time information (modification time and access time)
src_stat = os.stat(filename)
# if the 'overwrite' flag is 'False', we would make a backup of the original text file.
if not self.args.overwrite:
backup_name = filename + '.' + str(int(round(time.time() * 1000))) + '.bak'
log.info("Renaming %s to %s", filename, backup_name)
os.rename(filename, backup_name)
tgt_enc = self.args.target_encoding
log.debug("Writing the file: %s in %s", filename, tgt_enc)
with open(filename, 'wb') as f: # write under the binary mode
f.write(strdata.encode(tgt_enc))
log.info("Converted the file: %s from %s to %s", filename, src_enc, tgt_enc)
# setting the new file's time to the old file
os.utime(filename, times = (src_stat.st_atime, src_stat.st_ctime))
# end of def convert_file(self, filename)
def run(self):
root = self.args.root
if not os.path.exists(root):
log.error("The file specified %s is neither a directory nor a regular file", root)
return
log.info("Start working now!")
if os.path.isdir(root):
log.info("The root is: %s. ", root)
log.info("Files with these extension names will be inspected: %s", self.args.exts)
self.walk_dir(root)
else:
log.info("Wow, only a single file will be processed: %s", root)
self.convert_file(root)
log.info("Finished all.")
# end of def run(self, root):
def clean_backups(dirname):
if not os.path.isdir(dirname):
log.error("The file specified %s is not a directory ", dirname)
return
now = time.time()
last40min = now - 60 * 40
log.info("Removing all newly-created .bak files under %s", dirname)
for root, dirs, files in os.walk(dirname):
for name in files:
extension = os.path.splitext(name)[1][1:]
if extension == 'bak':
fullname = os.path.join(root, name)
ctime = os.path.getctime(fullname)
if ctime > last40min:
os.remove(fullname)
log.info("Removed the file: %s", fullname)
def cli():
parser = argparse.ArgumentParser(
prog='cvt2utf8',
description="A tool that converts non-UTF-encoded text files UTF-8 encoded files.",
epilog="You can use this tool to remove BOM from .php source code files, or convert other target_encoding into UTF-8")
parser.add_argument(
'root',
metavar = "filename",
help = textwrap.dedent('''\
the path pointing to the file or directory.
If it's a directory, files contained in it with specified extensions will be converted to UTF-8.
Otherwise, if it's a file, only that file will be converted to UTF-8.''')
)
parser.add_argument(
'-e',
'--exts',
nargs = '+', # '+'. Just like '*', all command-line args present are gathered into a list.
default = DEFAULT_CONF['exts'],
help = "the list of file extensions. Only those files ending with extensions in this list will be converted.",
)
parser.add_argument(
'-o',
'--overwrite',
action = 'store_true',
default = DEFAULT_CONF['overwrite'],
help = "Danger! If you turn this switch on, it would directly overwrite existing file without creating any backups.",
)
parser.add_argument(
'-u',
'--cvtutf',
action = 'store_true',
dest = 'convert_utf',
default = DEFAULT_CONF['convert_UTF'],
help = "By default, we will skip files whose encodings are UTF (including UTF-8 and UTF-16), and BOM headers in these files will remain unchanged. "
"But, if you want to change BOM headers for these files, you could utilize this option to change their signatures.",
)
parser.add_argument(
'-b',
'--addbom',
action = 'store_true',
dest = 'add_bom',
default = DEFAULT_CONF['add_BOM'],
help = "If this command line argument is missing, we convert files to UTF-8 without BOM (i.e. the target encoding would be just 'utf-8'). "
"But with this flag, we would add BOM in encoded text files (i.e. the target encoding would be 'utf-8-sig').",
)
parser.add_argument(
'-c',
'--cleanbak',
action = 'store_true',
dest = 'clean_bak',
default = False,
help = textwrap.dedent('''Clean all .bak files generated within last 40 minutes.
When enabled, no files will be converted to UTF-8. Use this flag with extra caution! '''),
)
args = parser.parse_args()
if args.clean_bak:
clean_backups(args.root)
else:
args.target_encoding = 'utf-8-sig' if args.add_bom else 'utf-8'
cvt2utf8 = Convert2Utf8(args)
cvt2utf8.run()
if __name__ == '__main__':
cli()
| StarcoderdataPython |
6545795 | """
VerseBot for Slack
By <NAME>
Adapted from VerseBot for Reddit by <NAME>
Copyright (c) 2016 <NAME> (MIT License)
"""
import asyncio
import websockets
import json
import signal
import logging
import time
import re
from slacker import Slacker
import sys
import threading
import books
import webparser
from regex import find_verses
from response import Response
from verse import Verse
from webparser import WebParser
# Time (seconds) to wait between receiving message before sending a ping
TIMEOUT = 3
class VerseBot(threading.Thread):
def __init__(self, token):
super(VerseBot, self).__init__()
logging.getLogger('requests').setLevel(logging.WARNING)
self.log = logging.getLogger('versebot')
self.log.addHandler(logging.FileHandler('versebot_log.txt'))
self.parser = WebParser()
self.slack = Slacker(token)
self.next_id = 1
self.unacked_messages = set()
self.user_id = self._get_user_id()
def _get_user_id(self):
data = self.slack.auth.test()
if data.successful:
return data.body['user_id']
else:
raise Exception
async def connect(self):
rtm_response = self.slack.rtm.start()
if rtm_response.successful:
url = rtm_response.body['url']
while True:
try:
await self.listen(url)
except websockets.ConnectionClosed:
pass
except Exception as e:
self.log.error('caught ' + str(type(e)) + ':' + str(e))
pass
else:
self.log.error('Failed to connect to rtm')
def run(self):
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
asyncio.ensure_future(self.connect())
loop.run_forever()
async def listen(self, url):
async with websockets.connect(url) as websocket:
while True:
try:
msg = await asyncio.wait_for(websocket.recv(), TIMEOUT)
msg = json.loads(msg)
if msg.get('type', '') == 'message' and \
msg.get('subtype', '') != 'bot_message':
if msg.get('text', '').find('@' + self.user_id) != -1:
await self.send_verses_response(msg, websocket)
time.sleep(1)
elif msg.get('type', '') == 'error':
self.log.error(
'error message received: %s' % (json.dumps(msg)))
return
# TODO handle rtm response. check if ok
else:
pass
except asyncio.TimeoutError:
await self.ping(websocket)
async def send_verses_response(self, msg, websocket):
user = msg['user']
channel = msg['channel']
body = msg['text']
match = re.search(r'["“].*"', body)
if match is not None:
reference = self.search(match.group(0))
if reference is None:
pass
else:
body = '[' + reference + ']'
verses = find_verses(body)
await self.send_verses(body, verses, user, channel, websocket)
async def ping(self, websocket):
ping_message = json.dumps({"id": self.next_id, "type": "ping",
"time": time.time()})
self.next_id += 1
await websocket.send(ping_message)
pong = await websocket.recv()
# eventually validate or something here
async def send_message(self, text, channel, websocket):
data = {'id': self.next_id, 'type': 'message', 'channel': channel,
'text': text}
self.next_id += 1
self.unacked_messages.add(self.next_id)
await websocket.send(json.dumps(data))
async def send_verses(self, body, verses, user, channel, websocket):
if verses is not None:
response = Response(body, self.parser)
for verse in verses:
book_name = books.get_book(verse[0])
if book_name is not None:
v = Verse(book_name,
verse[1],
verse[3],
user,
channel,
verse[2])
if not response.is_duplicate_verse(v):
response.add_verse(v)
if len(response.verse_list) != 0:
message_response = response.construct_message()
if message_response is not None:
await self.send_message(message_response, channel,
websocket)
else:
pass
def search(self, search_terms):
return webparser.search_bible_gateway(search_terms)
def handle_sigint(sig, frame):
for thread in threads:
signal.pthread_kill(thread.ident, signal.SIGINT)
sys.exit(0)
threads = []
if __name__ == '__main__':
signal.signal(signal.SIGINT, handle_sigint)
with open('tokens.dat') as tokens:
for token in tokens.readlines():
token = token.strip()
vb = VerseBot(token)
vb.start()
threads.append(vb)
for t in threads:
t.join()
| StarcoderdataPython |
5049627 | <gh_stars>1-10
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def averageOfLevels(self, root):
"""
:type root: TreeNode
:rtype: List[float]
"""
ans = []
self.dfs(ans, 1, root)
ans = map(lambda l: sum(l) * 1.0 / len(l), ans)
return ans
def dfs(self, ans, depth, node):
if not node:
return
if len(ans) < depth:
ans.append([node.val])
else:
ans[depth-1].append(node.val)
self.dfs(ans, depth + 1, node.left)
self.dfs(ans, depth + 1, node.right)
| StarcoderdataPython |
78232 | <filename>non_semantic_speech_benchmark/eval_embedding/finetune/models_test.py<gh_stars>1-10
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for non_semantic_speech_benchmark.eval_embedding.finetune.models."""
from absl.testing import absltest
from absl.testing import parameterized
import tensorflow as tf
from non_semantic_speech_benchmark.eval_embedding.finetune import models
class ModelTest(parameterized.TestCase):
@parameterized.parameters(
{'num_clusters': 0, 'alpha_init': 0},
{'num_clusters': 4, 'alpha_init': 0},
{'num_clusters': 0, 'alpha_init': 1.0},
)
def test_basic_model(self, num_clusters, alpha_init):
m = models.get_keras_model(num_classes=5, input_length=16000,
num_clusters=num_clusters, alpha_init=alpha_init)
o = m(tf.zeros([4, 16000], dtype=tf.float32))
self.assertEqual(o.shape, (4, 5))
if __name__ == '__main__':
absltest.main()
| StarcoderdataPython |
6632984 | from __future__ import division, print_function, absolute_import
from numpy.testing import assert_equal, assert_
from scipy.misc import pade, logsumexp, face, ascent
from scipy.special import logsumexp as sc_logsumexp
from scipy.interpolate import pade as i_pade
def test_logsumexp():
# make sure logsumexp can be imported from either scipy.misc or
# scipy.special
assert_(logsumexp is sc_logsumexp)
def test_pade():
assert_(pade is i_pade)
def test_face():
assert_equal(face().shape, (768, 1024, 3))
def test_ascent():
assert_equal(ascent().shape, (512, 512))
| StarcoderdataPython |
3254104 | <reponame>migvill5/pharmanager<filename>src/inventory/migrations/0009_auto_20210524_1601.py
# Generated by Django 3.1.7 on 2021-05-24 21:01
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('inventory', '0008_auto_20210524_1600'),
]
operations = [
migrations.RemoveField(
model_name='laboratory',
name='description',
),
migrations.RemoveField(
model_name='supplier',
name='description',
),
]
| StarcoderdataPython |
5057519 | <gh_stars>10-100
# -*- coding: utf-8 -*-
from .base import XmlServiceTransport
from .database import DatabaseTransport
from .direct import DirectTransport
from .http import HttpTransport
from .ssh import SshTransport
__all__ = [
'XmlServiceTransport',
'DatabaseTransport',
'DirectTransport',
'HttpTransport',
'SshTransport',
]
| StarcoderdataPython |
229928 | __author__ = 'sharvey'
| StarcoderdataPython |
167014 | <gh_stars>1-10
from __future__ import unicode_literals
import frappe
def execute():
frappe.rename_doc('DocType', 'Account Type', 'Bank Account Type', force=True)
frappe.rename_doc('DocType', 'Account Subtype', 'Bank Account Subtype', force=True)
frappe.reload_doc('accounts', 'doctype', 'bank_account') | StarcoderdataPython |
5171287 | from src import OpCode
from src import Statement
from src import Type
from src import VoidOp
from utils.patched_dataclass import dataclass
@dataclass
class OpExtension(OpCode, VoidOp):
name: str = None
@dataclass
class OpExtInstImport(OpCode):
name: str = None
@dataclass
class OpExtInst(Statement):
type: Type = None
extension_set: OpExtInstImport = None
instruction: OpCode = None
operands: tuple[Statement] = None
| StarcoderdataPython |
4832376 | __author__ = 'mehdibenchoufi'
| StarcoderdataPython |
9616275 | <filename>Influxdb2MagicBlue.py
# -*- coding: utf-8 -*-
from magicbluelib import MagicBlue
import matplotlib.pyplot as plt
import time
import numpy as np
import sys, getopt
from influxdb import InfluxDBClient
__author__ = '<EMAIL>'
def cmap2hex(mag=0.5,cmax=1,cmin=0,cmap='jet'):
color = plt.get_cmap(cmap)
#normalize
x = float(mag - cmin) / float(cmax - cmin)
# obtiene el rgb sin alpha normalizado
rgbNorm=color(x)[:3]
# lo pasa a escala de 0 a 255
rgbFull=np.mat(rgbNorm)*255
# redondea los float y los convierte a int
rgbInt= np.rint(rgbFull).astype(int)
# convierte la matriz a list
return np.asarray(rgbInt).reshape(-1)
def connect():
bulb = MagicBlue(mac)
bulb.connect()
return bulb
def change_color(num=100):
bulb=connect()
bulb.turn_on()
colorRgb=cmap2hex(num,100)
bulb.set_color(colorRgb)
bulb.disconnect()
def read_edge_influxdb(hostA, hostB, db='sdn', value='bw'):
client = InfluxDBClient(address, port, user, password, db)
result = client.query('select '+value+' from ' + table + ' where topology = \'' + topology + '\' and ((src=\'' + hostA + '\' and dst=\'' + hostB + '\') or (src = \'' + hostB + '\' and dst = \'' + hostA + '\')) order by time desc limit 1;')
valores = result._get_series()[0].get('values')
print("Last input {}".format(valores[0][1]))
# get value
return valores[0][1]
def main():
try:
# while True:
#bw=lee_influxdb('bw','bulb',-1)
bw=read_edge_influxdb(src,dst,db)
# time.sleep(1)
change_color(bw)
except:
print('\nEl programa se ha cerrado inesperadamente')
if __name__ == "__main__":
address = 'localhost'
port = '8086'
user = 'admin'
password = '<PASSWORD>'
db='sdn'
table='edges'
topology='ATT'
mac='c4:bd:7c:27:89:bf'
src='0'
dst='2'
try:
myopts, args = getopt.getopt(sys.argv[1:], "ha:P:u:p:D:t:T:s:d:m:")
except getopt.GetoptError as e:
print (str(e))
print("Usage: %s -a address -P port -u user -P password -D database -t table -T topology -s source -d destination -m mac" % sys.argv[0])
sys.exit(2)
for o, a in myopts:
if o == '-h':
print('Influxdb2MagicBlue.py -a <address> -p <port> -u <user> -P <password> -D <database> -t <table> -T <topology> -s <source> -d <destination> -m <mac>')
sys.exit()
elif o == '-a':
address = a
elif o == '-p':
port = a
elif o == '-u':
user = a
elif o == '-P':
password = a
elif o == '-D':
db = a
elif o == '-t':
table = a
elif o == '-T':
topology = a
elif o == '-s':
src = a
elif o == '-d':
dst = a
elif o == '-m':
mac = a
main()
| StarcoderdataPython |
5023291 | #!/usr/bin/env python
# imrt.py
import numpy as num
from satvap import satvap
from integral import integral
def imrt(alt,temp,rh):
"""imt = imrt(alt,temp,rh)
compute integrated mean radiating temperature
= Integral(p_h2o / T) / Integral(p_h2o / T^2)
Inputs:
alt : altitude profile in km
temp : temperature profile in K
rh : relative humidity profile in %
DCT 11-2-99
"""
npts = alt.size
temp = num.reshape(temp,(-1,1))
alt = num.reshape(alt,(-1,1))
rh = num.reshape(rh,(-1,1))
# saturation vapor pressure
psat = satvap(temp)
# H2O partial pressure
ph2o = psat * rh
arg1 = integral(alt,ph2o/temp);
arg2 = integral(alt,ph2o/(temp**2));
imt = arg1/arg2;
return imt[0]
if __name__ == '__main__':
from e2rh import e2rh
from hypsometric import hypsometric
from dp2e import dp2e
print(imrt.__doc__)
p = num.array(
( 1012.0, 991.3, 969.1, 945.5, 920.4, 893.8, 865.7, 836.1, 805.1, 772.8,
739.5, 705.2, 670.3, 635.0, 599.7, 564.5, 529.8, 495.7, 462.6, 430.7,
400.0, 370.8, 343.0, 316.7, 292.0, 266.8, 247.2, 227.0, 208.2, 190.8,
174.7, 159.9, 146.2, 133.6, 121.9, 111.3, 101.5, 92.6, 84.4, 76.9,
70.0 ))
t = num.array(
( 24.54, 23.16, 21.67, 20.23, 18.86, 17.49, 16.10, 14.69, 13.22, 11.52,
9.53, 7.24, 4.80, 2.34, 0.04, -2.29, -4.84, -7.64,-10.66,-13.95,
-17.54,-21.45,-25.58,-29.90,-34.33,-38.94,-43.78,-48.80,-53.94,-58.79,
-63.27,-67.32,-70.74,-73.62,-75.74,-77.07,-77.43,-76.63,-75.06,-73.14,
-71.43 ))
t = t + 273.15
td = num.array(
( 295.99569524, 294.88592297, 293.58149854, 292.11729779, 290.51490282,
288.80633219, 287.25337561, 285.56579921, 283.86054795, 281.99074887,
279.96863936, 278.00807838, 276.00353817, 273.74197577, 271.36371593,
268.74827599, 265.5596088, 261.9472149, 258.46973102, 255.00425602,
251.12242488, 247.15405877, 243.22262393, 238.86585074, 233.8823144,
228.4539335, 223.20007008, 217.86176743, 212.95046128, 209.08799585,
203.25047643, 202.6535621, 197.18886555, 196.61856765, 196.0340168,
195.44634221, 194.83729251, 194.21361376, 193.57543455, 192.93607596,
196.90293301))
r = num.array(
( 17.78, 16.92, 15.93, 14.87, 13.78, 12.70, 11.84, 10.96, 10.15, 9.31,
8.46, 7.73, 7.05, 6.32, 5.62, 4.91, 4.10, 3.30, 2.67, 2.15,
1.66, 1.26, 0.95, 0.68, 0.45, 0.28, 0.17, 0.10, 0.06, 0.04,
0.02, 0.02, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01, 0.01,
0.02 ))
e = dp2e(t,td)
(rh1,rh2) = e2rh(p,t,e)
h = hypsometric(p,t,r,0.001)
imt = imrt(h,t,rh1)
print(imt)
| StarcoderdataPython |
371548 | <reponame>jbaumann/attiny_daemon
import logging
import os
import sys
import time
import smbus
import struct
from typing import Tuple, Any
from configparser import ConfigParser
from argparse import ArgumentParser, Namespace
from collections.abc import Mapping
from pathlib import Path
class ATTiny:
REG_LAST_ACCESS = 0x01
REG_BAT_VOLTAGE = 0x11
REG_EXT_VOLTAGE = 0x12
REG_BAT_V_COEFFICIENT = 0x13
REG_BAT_V_CONSTANT = 0x14
REG_EXT_V_COEFFICIENT = 0x15
REG_EXT_V_CONSTANT = 0x16
REG_TIMEOUT = 0x21
REG_PRIMED = 0x22
REG_SHOULD_SHUTDOWN = 0x23
REG_FORCE_SHUTDOWN = 0x24
REG_LED_OFF_MODE = 0x25
REG_RESTART_VOLTAGE = 0x31
REG_WARN_VOLTAGE = 0x32
REG_UPS_SHUTDOWN_VOLTAGE = 0x33
REG_TEMPERATURE = 0x41
REG_T_COEFFICIENT = 0x42
REG_T_CONSTANT = 0x43
REG_UPS_CONFIG = 0x51
REG_PULSE_LENGTH = 0x52
REG_SW_RECOVERY_DELAY = 0x53
REG_VEXT_OFF_IS_SHUTDOWN = 0x54
REG_PULSE_LENGTH_ON = 0x55
REG_PULSE_LENGTH_OFF = 0x56
REG_VERSION = 0x80
REG_FUSE_LOW = 0x81
REG_FUSE_HIGH = 0x82
REG_FUSE_EXTENDED = 0x83
REG_INTERNAL_STATE = 0x84
REG_UPTIME = 0x85
REG_MCU_STATUS_REG = 0x86
REG_INIT_EEPROM = 0xFF
_POLYNOME = 0x31
def __init__(self, bus_number, address, time_const, num_retries):
self._bus_number = bus_number
self._address = address
self._time_const = time_const
self._num_retries = num_retries
def addCrc(self, crc, n):
for bitnumber in range(0,8):
if ( n ^ crc ) & 0x80 : crc = ( crc << 1 ) ^ self._POLYNOME
else : crc = ( crc << 1 )
n = n << 1
return crc & 0xFF
def calcCRC(self, register, read, len):
crc = self.addCrc(0, register)
for elem in range(0, len):
crc = self.addCrc(crc, read[elem])
return crc
def set_timeout(self, timeout):
return self.set_8bit_value(self.REG_TIMEOUT, timeout)
def set_primed(self, primed):
return self.set_8bit_value(self.REG_PRIMED, primed)
def init_eeprom(self):
return self.set_8bit_value(self.REG_INIT_EEPROM, 1)
def set_should_shutdown(self, value):
return self.set_8bit_value(self.REG_SHOULD_SHUTDOWN, value)
def set_force_shutdown(self, value):
return self.set_8bit_value(self.REG_FORCE_SHUTDOWN, value)
def set_led_off_mode(self, value):
return self.set_8bit_value(self.REG_LED_OFF_MODE, value)
def set_ups_configuration(self, value):
return self.set_8bit_value(self.REG_UPS_CONFIG, value)
def set_vext_off_is_shutdown(self, value):
return self.set_8bit_value(self.REG_VEXT_OFF_IS_SHUTDOWN, value)
def set_8bit_value(self, register, value):
crc = self.addCrc(0, register)
crc = self.addCrc(crc, value)
arg_list = [value, crc]
for x in range(self._num_retries):
bus = smbus.SMBus(self._bus_number)
time.sleep(self._time_const)
try:
bus.write_i2c_block_data(self._address, register, arg_list)
bus.close()
if (self.get_8bit_value(register)) == value:
return True
except Exception as e:
logging.debug("Couldn't set 8 bit register " + hex(register) + ". Exception: " + str(e))
logging.warning("Couldn't set 8 bit register after " + str(x) + " retries.")
return False
def set_restart_voltage(self, value):
return self.set_16bit_value(self.REG_RESTART_VOLTAGE, value)
def set_warn_voltage(self, value):
return self.set_16bit_value(self.REG_WARN_VOLTAGE, value)
def set_ups_shutdown_voltage(self, value):
return self.set_16bit_value(self.REG_UPS_SHUTDOWN_VOLTAGE, value)
def set_bat_v_coefficient(self, value):
return self.set_16bit_value(self.REG_BAT_V_COEFFICIENT, value)
def set_bat_v_constant(self, value):
return self.set_16bit_value(self.REG_BAT_V_CONSTANT, value)
def set_t_coefficient(self, value):
return self.set_16bit_value(self.REG_T_COEFFICIENT, value)
def set_t_constant(self, value):
return self.set_16bit_value(self.REG_T_CONSTANT, value)
def set_ext_v_coefficient(self, value):
return self.set_16bit_value(self.REG_EXT_V_COEFFICIENT, value)
def set_ext_v_constant(self, value):
return self.set_16bit_value(self.REG_EXT_V_CONSTANT, value)
def set_pulse_length(self, value):
return self.set_16bit_value(self.REG_PULSE_LENGTH, value)
def set_switch_recovery_delay(self, value):
return self.set_16bit_value(self.REG_SW_RECOVERY_DELAY, value)
def set_pulse_length_on(self, value):
return self.set_16bit_value(self.REG_PULSE_LENGTH_ON, value)
def set_pulse_length_off(self, value):
return self.set_16bit_value(self.REG_PULSE_LENGTH_OFF, value)
def set_16bit_value(self, register, value):
# we interpret every value as a 16-bit signed value
vals = value.to_bytes(2, byteorder='little', signed=True)
crc = self.calcCRC(register, vals, 2)
arg_list = [vals[0], vals[1], crc]
for x in range(self._num_retries):
bus = smbus.SMBus(self._bus_number)
time.sleep(self._time_const)
try:
bus.write_i2c_block_data(self._address, register, arg_list)
bus.close()
if (self.get_16bit_value(register)) == value:
return True
except Exception as e:
logging.debug("Couldn't set 16 bit register " + hex(register) + ". Exception: " + str(e))
logging.warning("Couldn't set 16 bit register after " + str(x) + " retries.")
return False
def get_last_access(self):
return self.get_16bit_value(self.REG_LAST_ACCESS)
def get_bat_voltage(self):
return self.get_16bit_value(self.REG_BAT_VOLTAGE)
def get_ext_voltage(self):
return self.get_16bit_value(self.REG_EXT_VOLTAGE)
def get_bat_v_coefficient(self):
return self.get_16bit_value(self.REG_BAT_V_COEFFICIENT)
def get_bat_v_constant(self):
return self.get_16bit_value(self.REG_BAT_V_CONSTANT)
def get_ext_v_coefficient(self):
return self.get_16bit_value(self.REG_EXT_V_COEFFICIENT)
def get_ext_v_constant(self):
return self.get_16bit_value(self.REG_EXT_V_CONSTANT)
def get_restart_voltage(self):
return self.get_16bit_value(self.REG_RESTART_VOLTAGE)
def get_warn_voltage(self):
return self.get_16bit_value(self.REG_WARN_VOLTAGE)
def get_ups_shutdown_voltage(self):
return self.get_16bit_value(self.REG_UPS_SHUTDOWN_VOLTAGE)
def get_temperature(self):
return self.get_16bit_value(self.REG_TEMPERATURE)
def get_t_coefficient(self):
return self.get_16bit_value(self.REG_T_COEFFICIENT)
def get_t_constant(self):
return self.get_16bit_value(self.REG_T_CONSTANT)
def get_pulse_length(self):
return self.get_16bit_value(self.REG_PULSE_LENGTH)
def get_switch_recovery_delay(self):
return self.get_16bit_value(self.REG_SW_RECOVERY_DELAY)
def get_pulse_length_on(self):
return self.get_16bit_value(self.REG_PULSE_LENGTH_ON)
def get_pulse_length_off(self):
return self.get_16bit_value(self.REG_PULSE_LENGTH_OFF)
def get_16bit_value(self, register):
for x in range(self._num_retries):
bus = smbus.SMBus(self._bus_number)
time.sleep(self._time_const)
try:
read = bus.read_i2c_block_data(self._address, register, 3)
# we interpret every value as a 16-bit signed value
val = int.from_bytes(read[0:2], byteorder='little', signed=True)
bus.close()
if read[2] == self.calcCRC(register, read, 2):
return val
logging.debug("Couldn't read 16 bit register " + hex(register) + " correctly.")
except Exception as e:
logging.debug("Couldn't read 16 bit register " + hex(register) + ". Exception: " + str(e))
logging.warning("Couldn't read 16 bit register after " + str(x) + " retries.")
return 0xFFFFFFFF
def get_timeout(self):
return self.get_8bit_value(self.REG_TIMEOUT)
def get_primed(self):
return self.get_8bit_value(self.REG_PRIMED)
def should_shutdown(self):
return self.get_8bit_value(self.REG_SHOULD_SHUTDOWN)
def get_force_shutdown(self):
return self.get_8bit_value(self.REG_FORCE_SHUTDOWN)
def get_led_off_mode(self):
return self.get_8bit_value(self.REG_LED_OFF_MODE)
def get_ups_configuration(self):
return self.get_8bit_value(self.REG_UPS_CONFIG)
def get_vext_off_is_shutdown(self):
return self.get_8bit_value(self.REG_VEXT_OFF_IS_SHUTDOWN)
def get_fuse_low(self):
return self.get_8bit_value(self.REG_FUSE_LOW)
def get_fuse_high(self):
return self.get_8bit_value(self.REG_FUSE_HIGH)
def get_fuse_extended(self):
return self.get_8bit_value(self.REG_FUSE_EXTENDED)
def get_internal_state(self):
return self.get_8bit_value(self.REG_INTERNAL_STATE)
def get_mcu_status_register(self):
return self.get_8bit_value(self.REG_MCU_STATUS_REG)
def get_8bit_value(self, register):
for x in range(self._num_retries):
bus = smbus.SMBus(self._bus_number)
time.sleep(self._time_const)
try:
read = bus.read_i2c_block_data(self._address, register, 2)
val = read[0]
bus.close()
if read[1] == self.calcCRC(register, read, 1):
return val
logging.debug("Couldn't read register " + hex(register) + " correctly: " + hex(val))
except Exception as e:
logging.debug("Couldn't read 8 bit register " + hex(register) + ". Exception: " + str(e))
logging.warning("Couldn't read 8 bit register after " + str(x) + " retries.")
return 0xFFFF
def get_version(self):
for x in range(self._num_retries):
bus = smbus.SMBus(self._bus_number)
time.sleep(self._time_const)
try:
read = bus.read_i2c_block_data(self._address, self.REG_VERSION, 5)
bus.close()
if read[4] == self.calcCRC(self.REG_VERSION, read, 4):
major = read[2]
minor = read[1]
patch = read[0]
return (major, minor, patch)
logging.debug("Couldn't read version information correctly.")
except Exception as e:
logging.debug("Couldn't read version information. Exception: " + str(e))
logging.warning("Couldn't read version information after " + str(x) + " retries.")
return (0xFFFF, 0xFFFF, 0xFFFF)
def get_uptime(self):
for x in range(self._num_retries):
bus = smbus.SMBus(self._bus_number)
time.sleep(self._time_const)
try:
read = bus.read_i2c_block_data(self._address, self.REG_UPTIME, 5)
bus.close()
if read[4] == self.calcCRC(self.REG_UPTIME, read, 4):
uptime = int.from_bytes(read[0:3], byteorder='little', signed=False)
return uptime
logging.debug("Couldn't read uptime information correctly.")
except Exception as e:
logging.debug("Couldn't read uptime information. Exception: " + str(e))
logging.warning("Couldn't read uptime information after " + str(x) + " retries.")
return 0xFFFFFFFFFFFF
| StarcoderdataPython |
1921408 | from django_elasticsearch_model_binder.mixins import ESQuerySetMixin
from django_elasticsearch_model_binder.models import ESBoundModel
from django_elasticsearch_model_binder.utils import ExtraModelFieldBase
__all__ = ['ESBoundModel', 'ESQuerySetMixin', 'ExtraModelFieldBase']
| StarcoderdataPython |
1861779 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
from nltk.stem.snowball import SnowballStemmer
from base import BaseObject
stemmer = SnowballStemmer("english", ignore_stopwords=False)
class EntityDictGenerator(BaseObject):
def __init__(self, some_df):
"""
Updated:
12-Apr-2017
<EMAIL>
* renamed from "ProductDictGenerator"
Updated:
25-May-2017
<EMAIL>
* created 'get_params' method
Updated:
2-Aug-2017
<EMAIL>
* modify entity generation using provenance as a dictionary key
<https://github.ibm.com/abacus-implementation/Abacus/issues/1721#issuecomment-3080923>
Updated:
19-Mar-2018
<EMAIL>
* moved static methods to "private class"
* stemmer should only operate on unigrams
Updated:
21-Feb-2019
<EMAIL>
* migrated from text
Updated:
26-Mar-2019
<EMAIL>
* removed 'id' from attributes (only needed as key)
:param some_df:
"""
BaseObject.__init__(self, __name__)
self.df = some_df
def generate_tuple_l2(self, tokens):
t0 = self.stem(tokens[0])
t1 = self.stem(tokens[1])
if t0 != t1:
return [(t0, t1)]
return []
def generate_tuple_l3(self, tokens):
return [(
self.stem(tokens[0]),
self.stem(tokens[1]),
self.stem(tokens[2]))]
def generate_tuple_l4(self, tokens):
return [(
self.stem(tokens[0]),
self.stem(tokens[1]),
self.stem(tokens[2]),
self.stem(tokens[3]))]
def get_variation(self, some_token):
if "+" in some_token:
tmp = [x.lower().strip() for x in some_token.split("+")]
if 2 == len(tmp):
return self.generate_tuple_l2(tmp)
elif 3 == len(tmp):
return self.generate_tuple_l3(tmp)
elif 4 == len(tmp):
return self.generate_tuple_l4(tmp)
raise ValueError(
"Unrecognized Tuple (value = {0})".format(some_token))
return [some_token]
@staticmethod
def add_to_set(the_set, the_value):
if the_value is None:
return
if not isinstance(the_value, tuple):
the_value = the_value.strip().lower()
if len(the_value) == 0:
return
the_set.add(the_value)
def get_expanded_variations(self, some_label, some_list):
if some_list.startswith('"') and some_list.endswith('"'):
some_list = some_list[1:len(some_list) - 1]
the_set = set()
if 0 == len(some_label.split(" ")):
the_variant = stemmer.stem(some_label.lower())
self.add_to_set(the_set, the_variant)
else:
self.add_to_set(the_set, some_label.lower())
for token in some_list.split(","):
try:
for variant in self.get_variation(token):
self.add_to_set(the_set, variant)
except ValueError:
continue
# return list(sorted(the_set))
return list(the_set)
@staticmethod
def get_params(row):
d = {}
param = row["param"].strip()
# default param to 'type=label'
if 0 == len(param):
key = row["type"].strip().lower()
d[key] = row["label"].strip().lower()
return d
for p in param.split(","):
tokens = [x.lower().strip() for x in p.split("=")]
d[tokens[0]] = tokens[1]
return d
def process(self):
the_master_dict = {}
prov_list = filter(lambda x: len(x) > 0, self.df.prov.unique())
for prov in prov_list:
df2 = self.df[self.df.prov == prov]
for i, row in df2.iterrows():
the_label = row["label"].strip()
the_id = self.key(the_label)
the_type = self.key(row["type"].strip())
the_params = self.get_params(row)
the_scope = row["scope"].strip()
the_variants = self.get_expanded_variations(
some_label=the_label,
some_list=row["variations"].strip())
the_master_dict[the_id] = {
# "id": the_id,
"label": the_label,
"type": the_type,
# "params": the_params,
"variations": the_variants
# "scope": the_scope
}
return the_master_dict
@staticmethod
def key(value):
return value.replace(" ", "_").lower().strip()
@staticmethod
def stem(some_token):
"""
Purpose:
perform stemming operation using Snowball
Rules:
1. Only stem unigrams
non-unigrams contain 1..* whitespace tokens
2. Do not stem patterns
patterns start with "$"
3. Do not stem pre-compounded tokens
e.g. "contract_number" is pre-compounded in a prior stage
running this through the stemmer would generate
"contract_numb"
"""
if " " in some_token:
return some_token
if "_" in some_token:
return some_token
if some_token.startswith("$"):
return some_token
# entity matching is supposed to have both “exact” and “partial” matching but
# perhaps the partial matching was removed by accident so commenting out this line to stop the stemming
# return stemmer.stem(some_token.strip())
return some_token
| StarcoderdataPython |
3221276 | <gh_stars>0
import requests
import json
import csv
from collections import deque
import time
import datetime
def getsince(csv_file):
with open(csv_file,'r') as f:
return deque(csv.reader(f),1)[0][0]
def getprices():
#urlbfx='https://api.bitfinex.com/v2/candles/trade:1m:tLTCBTC/hist'
#file='Bitfinex1minLTCBTC.csv'
urlbfx='https://api.bitfinex.com/v2/candles/trade:1m:tBTCUSD/hist'
file='Data\BitfinexBTCUSD.csv'
params = {'limit':1000, 'sort':1}
response = requests.get(urlbfx, params=params)
bfxjson = json.loads(response.text)
bfxohlc = []
for i in range(0, len(bfxjson)):
appendline=bfxjson[i][0]/1000,bfxjson[i][1],bfxjson[i][3],bfxjson[i][4],bfxjson[i][2],bfxjson[i][5]
bfxohlc.append(appendline)
with open(file,'a',newline='') as f:
writer = csv.writer(f)
writer.writerows(bfxohlc)
#print(bfxohlc)
'''
start = (float(getsince(file))+60)*1000
now = int(datetime.datetime.timestamp(datetime.datetime.now()))
datevalue = datetime.datetime.utcfromtimestamp(start/1000).replace(tzinfo=datetime.timezone.utc)
print('last:', datevalue)
limit = 10000
count = 0
while count < limit:
params = {'limit':1000, 'start':start, 'sort':1}
try:
response = requests.get(urlbfx, params=params)
bfxjson = json.loads(response.text)
bfxohlc = []
for i in range(0, len(bfxjson)):
appendline=bfxjson[i][0]/1000,bfxjson[i][1],bfxjson[i][3],bfxjson[i][4],bfxjson[i][2],bfxjson[i][5]
bfxohlc.append(appendline)
with open(file,'a',newline='') as f:
writer = csv.writer(f)
writer.writerows(bfxohlc)
start = (float(getsince(file))+60)*1000
datevalue = datetime.datetime.utcfromtimestamp(start/1000).replace(tzinfo=datetime.timezone.utc)
print(count+1)
print('last:', datevalue)
count += 1
time.sleep(5)
except Exception as e:
continue
if start/1000 >= now:
break
'''
getprices()
| StarcoderdataPython |
6481668 | <filename>djbr/urls.py
from django.conf.urls import url
from . import views
app_name = 'djbr'
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^book/(?P<book_id>[0-9]+)/$', views.book, name='book'),
url(r'^book/(?P<book_id>[0-9]+)/reviews/$', views.reviews, name='reviews'),
url(r'^book/(?P<book_id>[0-9]+)/review/(?P<review_id>[0-9]+)$',
views.review, name='review'),
url(r'^book/(?P<book_id>[0-9]+)/review/$', views.review, name='review'),
url(r'^author/(?P<author_id>[0-9]+)/$', views.author, name='author'),
url(r'^user/(?P<user_id>[0-9]+)/$', views.profile, name='profile'),
url(r'^user/$', views.profile, name='profile'),
url(r'^login/$', views.login_view, name='login'),
url(r'^logout/$', views.logout_view, name='logout'),
]
| StarcoderdataPython |
5072468 | from .EKSSOneHundredPlantsMarginPipeline import EKSSOneHundredPlantsMarginPipeline
from .GRASTAAutoMPGPipeline import GRASTAAutoMPGPipeline
from .GRASTAAutoPricePipeline import GRASTAAutoPricePipeline
from .KSSOneHundredPlantsMarginPipeline import KSSOneHundredPlantsMarginPipeline
from .OWLRegressionAutoPricePipeline import OWLRegressionAutoPricePipeline
from .SSCADMMOneHundredPlantsMarginPipeline import SSCADMMOneHundredPlantsMarginPipeline
from .SSCCVXOneHundredPlantsMarginPipeline import SSCCVXOneHundredPlantsMarginPipeline
from .SSCOMPOneHundredPlantsMarginPipeline import SSCOMPOneHundredPlantsMarginPipeline
| StarcoderdataPython |
276668 | <filename>mythril/ethereum/interface/rpc/constants.py
"""This file contains constants used used by the Ethereum JSON RPC
interface."""
BLOCK_TAG_EARLIEST = "earliest"
BLOCK_TAG_LATEST = "latest"
BLOCK_TAG_PENDING = "pending"
BLOCK_TAGS = (BLOCK_TAG_EARLIEST, BLOCK_TAG_LATEST, BLOCK_TAG_PENDING)
| StarcoderdataPython |
4817288 | import time
from mw import api
from ..types import StateMarker, Timestamp
from ..types.events import Event, Match
class RCListener:
def __init__(self, session, *, state_marker, events,
max_wait, rcs_per_request, stop):
self.session = session
self.state_marker = state_marker
self.events = events
self.max_wait = max_wait
self.rcs_per_request = rcs_per_request
self.stop = stop
self.kwargs = {
'limit': rcs_per_request,
'properties': API.RC_EVENT_PROPS,
'direction': "newer",
'start': self.state_marker.last_event
}
if self.events is None:
self.kwargs['type'] = set(m.rc_type \
for m in Event.MATCH_GROUPS.keys())
else:
self.kwargs['type'] = set(m.rc_type \
for e in self.events
for m in e.MATCHES)
def __iter__(self):
while not self.stop():
start = time.time()
rc_docs, self.kwargs['rccontinue'] = \
self.session.recent_changes._query(**self.kwargs)
for rc_doc in rc_docs:
if self.state_marker.is_after(Timestamp(rc_doc['timestamp']),
rc_doc.get('rcid'),
rc_doc.get('revid'),
rc_doc.get('logid')):
for event in Event.from_rc_doc(rc_doc):
if self.events is None or type(event) in self.events:
yield event
self.state_marker.update(Timestamp(rc_doc['timestamp']),
rc_doc.get('rcid'),
rc_doc.get('revid'),
rc_doc.get('logid'))
if len(rc_docs) < self.rcs_per_request:
time.sleep(self.max_wait - (time.time() - start))
class API:
"""
Constructs a source of :class:`mwevents.Event` that connects to a MediaWiki
API (api.php).
"""
RC_EVENT_PROPS = {'user', 'userid', 'comment', 'timestamp', 'title', 'ids',
'sizes', 'loginfo', 'sha1'}
def __init__(self, session):
self.session = session
def listener(self, state_marker=None, events=None, max_wait=5,
rcs_per_request=100, direction="newer",
properties=RC_EVENT_PROPS, stop=lambda: False):
"""
:Example:
.. code-block:: python
from mwevents.sources import API
from mwevents import RevisionSaved, PageCreated
api_source = \
API.from_api_url("http://en.wikipedia.org/w/api.php")
listener = \
api_source.listener(events={RevisionSaved, PageCreated})
for event in listener:
if isinstance(event, RevisionSaved):
print(event.revision)
else: # isinstance(event, PageCreated):
print(event.page)
"""
state_marker = StateMarker(state_marker) \
if state_marker is not None \
else self._get_current_state()
events = set(events) if events is not None else None
max_wait = float(max_wait)
rcs_per_request = int(rcs_per_request)
if not callable(stop):
raise TypeError("'stop' must be a callable function")
return RCListener(self.session,
state_marker=state_marker,
events=events,
max_wait=max_wait,
rcs_per_request=rcs_per_request,
stop=stop)
def _get_current_state(self):
docs = list(self.session.recent_changes.query(properties={'ids',
'timestamp'},
limit=1))
if len(docs) > 0:
return StateMarker(Timestamp(docs[0]['timestamp']), docs[0]['rcid'])
else:
return StateMarker()
def query(self, *args, **kwargs): raise NotImplementedError()
@classmethod
def from_api_url(cls, url):
return cls(api.Session(url))
| StarcoderdataPython |
195591 | from __future__ import absolute_import, division, print_function
from trakt.core.errors import ERRORS
class RequestFailedError(Exception):
pass
class RequestError(Exception):
def __init__(self, response):
self.response = response
self.status_code = response.status_code if response is not None else None
self.error = ERRORS.get(self.status_code, ('Unknown', 'Unknown'))
# Call super class with message
super(RequestError, self).__init__('%s - "%s"' % self.error)
class ClientError(RequestError):
pass
class ServerError(RequestError):
pass
| StarcoderdataPython |
11248516 | <reponame>tomsinger/TimeInc-Radeks-LeavingCard
from __future__ import unicode_literals
import hashlib
import os
import errno
import codecs
import zipfile
import shutil
import glob
import subprocess
from chardet.universaldetector import UniversalDetector
from ftfy import fix_file
def get_hash(file_handler):
md5 = hashlib.md5()
for data in iter(lambda: file_handler.read(8192), b''):
md5.update(data)
return md5.hexdigest()
def detect_encoding(in_path, encoding='utf8'):
file_handler = open(in_path, 'r')
detector = UniversalDetector()
sample = file_handler.readlines(300)
for line in sample:
detector.feed(line)
if detector.done:
break
detector.close()
result = detector.result
if result['encoding']:
encoding = result['encoding']
return encoding
def fix_encoding(in_path, in_encoding, out_encoding='utf8'):
"""Attempt to clean up some of the more common encoding screw-ups."""
in_fh = codecs.open(in_path, 'r+', in_encoding, errors='ignore')
in_name = in_fh.name
tmp_name = os.path.join(os.path.dirname(in_fh.name), 'converting.tmp')
out_fh = codecs.open(tmp_name, 'w+', out_encoding)
with in_fh, out_fh:
for line in fix_file(in_fh):
out_fh.write(line)
os.rename(tmp_name, in_name)
def purge_dir(directory):
"""This needs to raise exceptions."""
for dirpath, dirnames, filenames in os.walk(directory):
# We don't want to delete hidden filenames e.g. '.gitignore'
filenames = [
filename for filename in filenames if not filename[0] == '.'
]
dirnames[:] = [
dirname for dirname in dirnames if not dirname[0] == '.'
]
for filename in filenames:
os.unlink(os.path.join(dirpath, filename))
for dirname in dirnames:
shutil.rmtree(os.path.join(dirpath, dirname))
def ensure_exists(directory):
"""Ensure that a directory exists, creating it if it does not.
Args:
directory: A string of the desired directory path.
Raises:
OSError if any exception is raised on directory creation
(other than an already exists type exception)."""
try:
os.makedirs(directory)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
def all_zipped_files_startwith(zip_file, prefix):
filenames = zip_file.namelist()
for filename in filenames:
extracted_path = os.path.abspath(os.path.join(prefix, filename))
if not extracted_path.startswith(prefix):
return False
return True
def safely_unzip(destination, zipfile_path):
"""Safely unzip an archive into the destination path."""
sucess = False
with zipfile.ZipFile(zipfile_path, allowZip64=True) as zip_file:
if all_zipped_files_startwith(zip_file, destination):
zip_file.extractall(destination)
success = True
else:
pass
return success
def split_file_by_size(file_path, part_size_in_bytes):
prefix = os.path.join(
os.path.dirname(file_path),
'%s.part.' % os.path.basename(file_path)
)
cmd = ['split', '-b%s' % part_size_in_bytes, file_path, prefix]
subprocess.check_call(cmd)
return sorted(glob.glob('%s*' % prefix))
| StarcoderdataPython |
12846217 | <gh_stars>1-10
from fastapi import Request
class MockedHTTPRequest(Request):
def __init__(self, body: dict = {}, headers: dict = {}):
super().__init__({"type": "http"})
self.__body = body
self.__headers = headers
@property
def body(self) -> dict:
return self.__body
@property
def headers(self) -> dict:
return self.__headers
| StarcoderdataPython |
11279472 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
class Polygon:
def get_min_max(self,coordinates):
min_x = None
min_y = None
max_x = None
max_y = None
for i in range(len(coordinates)):
for j in range(len(coordinates[i])):
actual_x = coordinates[i][j][0]
actual_y = coordinates[i][j][1]
if min_x is None or min_x > actual_x:
min_x = actual_x
if min_y is None or min_y > actual_y:
min_y = actual_y
if max_x is None or max_x < actual_x:
max_x = actual_x
if max_y is None or max_y < actual_y:
max_y = actual_y
return {
'min_x':min_x,
'min_y':min_y,
'max_x':max_x,
'max_y':max_y,
}
def to_canvas(self, coordinates, size, flip_horrizontaly):
path = "M"
previous_x_in_px = None
previous_y_in_px = None
first_point = ""
for i in range(len(coordinates)):
if i > 0 and path[-4:] != " Z M":
path = path + " Z M"
for j in range(len(coordinates[i])):
actual_x = coordinates[i][j][0]
actual_y = coordinates[i][j][1]
x_in_px = round((actual_x - size["orig_left"])*size["x_transform_factor"])
y_in_px = round((actual_y - size["orig_top"])*size["y_transform_factor"])
if flip_horrizontaly:
y_in_px = size["image_height_px"] - y_in_px
if x_in_px != previous_x_in_px and y_in_px != previous_y_in_px :
if path[-1:] != "M":
path = path + " L"
path = path + " " + str(x_in_px) + "," + str(y_in_px)
previous_x_in_px = x_in_px
previous_y_in_px = y_in_px
if j == 0:
first_point = str(x_in_px) + "," + str(y_in_px)
path = path + " L " + first_point + " z"
return path
| StarcoderdataPython |
12850807 | <reponame>IVIGOR13/print_chat<gh_stars>1-10
#
# Author: <NAME>
# 2019
#
import time
import os
from termcolor import colored
from datetime import datetime
import colorama
colorama.init()
"""
Small print tool for implementing chat in the terminal
"""
class print_chat:
def _clear_screen(self):
os.system('cls' if os.name == 'nt' else 'clear')
def clear_row(self):
print('\r' + ' ' * os.get_terminal_size().columns + '\r', end='')
def up_on_rows(self, number):
self.clear_row
print(('\x1b[A\r' + ' ' * os.get_terminal_size().columns + '\r') * number, end='')
def up_on_message(self, number):
n = self.__get_lines(number)
self.up_on_rows(n)
def up_on_occupied_rows(self, len_str):
lines = ((len_str-1) // os.get_terminal_size().columns) + 1
self.up_on_rows(lines)
def down_on_rows(self, number):
self.clear_row()
print(('\n\r' + ' ' * os.get_terminal_size().columns + '\r') * number, end='')
def get_num_messages(self):
return(len(self.MESSAGES))
def get_messages_from(self, sender):
out = ()
for i in self.MESSAGES:
if i['sender'] == sender:
out.append(i)
return out
def get_messages(self):
return self.MESSAGES
def get_message(self, number):
if number <= len(self.MESSAGES):
return self.MESSAGES[len(self.MESSAGES) - number]
def get_senders(self):
out = ()
for key in self.senders.keys():
out.append(key)
return out
def get_mark(self, number):
return self.MESSAGES[len(self.MESSAGES) - number]['mark']
def set_colors(self, colors):
found = False
for color in colors:
for i in range(len(self.senders)):
if self.senders[i]['sender'] == color[0]:
self.senders[i]['color'] = color[1]
found = True
if not found:
if len(color) == 1:
self.senders.append({
'sender': color[0],
'color': 'grey',
})
else:
self.senders.append({
'sender': color[0],
'color': color[1],
})
def get_time(self):
if not self.time_full:
return datetime.today().strftime("%H:%M")
else:
return datetime.today().strftime("%d.%m.%y %H:%M")
def set_header(self, text):
self.header = text.split('\n')
self._print_header()
def _print_header(self):
self._clear_screen()
for i in self.header:
print(i)
# returns the number of lines that must be passed to move the cursor to the specified message
def __get_lines(self, number):
lines = 0
for i in range(number):
# counting the number of lines occupied by a message
m = self.MESSAGES[(len(self.MESSAGES)-1) - i]
l = (len(m['sender']) + len(m['text']) + len(m['mark']) + self.len_frame)
# count the number of lines occupied by a skip
s = 0
for j in m['skip']:
j = str(j)
if isinstance(j, str):
for k in j.split('\n'):
s += ((len(k)-1) // os.get_terminal_size().columns) + 1
else:
s += ((len(j)-1) // os.get_terminal_size().columns) + 1
lines += (((l-1) // os.get_terminal_size().columns) + 1) + s
return lines
def _print_mess(self, sender, text, time, skip, mark):
if self.is_time:
print('[{}] '.format(time), end='')
# color selection for printing sender name
c0, c1 = 'white', 'grey'
found = False
for i in self.senders:
if i['sender'] == sender:
c = i['color']
if c == 'grey':
c0, c1 = 'white', 'grey'
else:
c0, c1 = 'grey', c
break
found = True
if not found:
self.senders.append({
'sender': sender,
'color': 'grey',
})
print(colored('[' + sender + ']', c0, ('on_' + c1)) + ': ', end='')
print('{}{}'.format(text, ''.join(mark)), end='\n')
for i in skip:
print(i)
def add_mark(self, number, mark):
if not mark == '' and number > 0 and number <= len(self.MESSAGES):
self.up_on_message(number)
m = self.MESSAGES[len(self.MESSAGES)-number]['mark']
if not m:
self.MESSAGES[len(self.MESSAGES)-number].update({
'mark': [str(mark)]
})
else:
m.append(str(mark))
self.MESSAGES[len(self.MESSAGES)-number].update({
'mark': m
})
self._load(number)
def edit_mark(self, number, mark):
if number > 0 and number <= len(self.MESSAGES):
if mark == '':
self.remove_mark(number)
else:
n = len(self.MESSAGES) - number
self.up_on_message(number)
self.MESSAGES[n].update({
'mark': [str(mark)]
})
self._load(number)
def remove_mark(self, number):
if number > 0 and number <= len(self.MESSAGES):
n = len(self.MESSAGES) - number
self.up_on_message(number)
self.MESSAGES[n].update({
'mark': []
})
self._load(number)
def has_mark(self, number):
n = len(self.MESSAGES) - number
if self.MESSAGES[n]['mark'] == []:
return False
else:
return True
def get_mark(self, number):
n = len(self.MESSAGES) - number
return self.MESSAGES[n]['mark']
def add_skip(self, number, text):
if not text == '' and number > 0 and number <= len(self.MESSAGES):
self.up_on_message(number)
m = self.MESSAGES[len(self.MESSAGES)-number]['skip']
if not m:
self.MESSAGES[len(self.MESSAGES)-number].update({
'skip': [str(text)]
})
else:
m.append(str(text))
self.MESSAGES[len(self.MESSAGES)-number].update({
'skip': m
})
self._load(number)
def edit_skip(self, number, text):
if number > 0 and number <= len(self.MESSAGES):
if text == '':
self.remove_skip(number)
else:
self.up_on_message(number)
self.MESSAGES[len(self.MESSAGES) - number].update({
'skip': [str(text)]
})
self._load(number)
def remove_skip(self, number):
if number > 0 and number <= len(self.MESSAGES):
self.up_on_message(number)
self.MESSAGES[len(self.MESSAGES) - number].update({
'skip': []
})
self._load(number)
def has_skip(self, number):
if self.MESSAGES[len(self.MESSAGES) - number]['skip'] == []:
return False
else:
return True
# reprints the specified number of messages
def reload(self, number):
if number > 0 and number < len(self.MESSAGES):
self.up_on_message(number)
self._load(number)
elif number == len(self.MESSAGES):
self._clear_screen()
self._print_header()
self._load(number)
def _load(self, number):
if number > 0 and number <= len(self.MESSAGES):
for m in self.MESSAGES[len(self.MESSAGES)-number:len(self.MESSAGES)]:
self._print_mess(m['sender'], m['text'], m['time'], m['skip'], m['mark'])
def remove(self, number):
if number > 0 and number <= len(self.MESSAGES):
self.up_on_message(number)
self._load(number-1)
self.MESSAGES.pop(len(self.MESSAGES) - number)
def edit(self, number, text):
if number > 0 and number <= len(self.MESSAGES):
if text == '':
self.remove(number)
else:
n = len(self.MESSAGES) - number
self.up_on_message(number)
self.MESSAGES[n].update({
'text': text
})
self._load(number)
def add_message_top(self, sender, text, time='', skip=[], mark=[], prnt=True):
text = " ".join(str(text).split())
if text != '':
if time == '':
time = self.get_time()
self.MESSAGES.insert(0, {
'sender': sender,
'text': text,
'time': time,
'skip': skip,
'mark': mark,
})
if prnt:
self.up_on_message(self.get_num_messages() - 1)
self._print_mess(sender, text, time, skip, mark)
self._load(self.get_num_messages()-1)
def add_message(self, sender, text, time='', skip=[], mark=[]):
text = " ".join(str(text).split())
if text != '':
if time == '':
time = self.get_time()
self.MESSAGES.append({
'sender': sender,
'text': text,
'time': time,
'skip': skip,
'mark': mark,
})
self._print_mess(sender, text, time, skip, mark)
def close(self, clr=False):
self.MESSAGES.clear()
self.senders.clear()
print('\x1b[A\r', end='')
if clr:
self._clear_screen()
def __init__(self, time=False):
self.MESSAGES = []
self.senders = []
self.header = []
self.is_time = False
self.time_full = False
if time == 'short':
self.len_frame = 4 + 8
self.is_time = True
elif time == 'full':
self.len_frame = 4 + 8 + 9
self.is_time = True
self.time_full = True
else:
self.len_frame = 4
self._clear_screen()
| StarcoderdataPython |
9720778 | #!/usr/bin/env python
#
# soaplib - Copyright (C) Soaplib contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
#
import unittest
class FaultTests(unittest.TestCase):
def _getTargetClass(self):
from soaplib.core.model.exception import Fault
return Fault
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_ctor_defaults(self):
fault = self._makeOne()
self.assertEqual(fault.faultcode, 'senv:Server')
self.assertEqual(fault.faultstring, 'Fault')
self.assertEqual(fault.faultactor, '')
self.assertEqual(fault.detail, None)
self.assertEqual(repr(fault), "senv:Server: 'Fault'")
def test_ctor_faultcode_w_senv_prefix(self):
fault = self._makeOne(faultcode='senv:Other')
self.assertEqual(fault.faultcode, 'senv:Other')
self.assertEqual(repr(fault), "senv:Other: 'Fault'")
def test_ctor_explicit_faultstring(self):
fault = self._makeOne(faultstring='Testing')
self.assertEqual(fault.faultstring, 'Testing')
self.assertEqual(repr(fault), "senv:Server: 'Testing'")
def test_ctor_no_faultstring_overridden_get_type_name(self):
class Derived(self._getTargetClass()):
def get_type_name(self):
return 'Overridden'
fault = Derived()
self.assertEqual(fault.faultstring, 'Overridden')
self.assertEqual(repr(fault), "senv:Server: 'Overridden'")
def test_to_parent_element_wo_detail(self):
from lxml.etree import Element
from soaplib.core.namespaces import ns_soap_env
element = Element('testing')
fault = self._makeOne()
cls = self._getTargetClass()
cls.to_parent_element(fault, 'urn:ignored', element)
(child,) = element.getchildren()
self.assertEqual(child.tag, '{%s}Fault' % ns_soap_env)
self.assertEqual(child.find('faultcode').text, 'senv:Server')
self.assertEqual(child.find('faultstring').text, 'Fault')
self.assertEqual(child.find('faultactor').text, '')
self.failIf(child.findall('detail'))
def test_to_parent_element_w_detail(self):
from lxml.etree import Element
element = Element('testing')
detail = Element('something')
fault = self._makeOne(detail=detail)
cls = self._getTargetClass()
cls.to_parent_element(fault, 'urn:ignored', element)
(child,) = element.getchildren()
self.failUnless(child.find('detail').find('something') is detail)
def test_add_to_parent_element(self):
from lxml.etree import Element
from soaplib.core.namespaces import ns_soap_env
element = Element('testing')
fault = self._makeOne()
cls = self._getTargetClass()
fault.add_to_parent_element('urn:ignored', element)
(child,) = element.getchildren()
self.assertEqual(child.tag, '{%s}Fault' % ns_soap_env)
self.assertEqual(child.find('faultcode').text, 'senv:Server')
self.assertEqual(child.find('faultstring').text, 'Fault')
self.assertEqual(child.find('faultactor').text, '')
self.failIf(child.findall('detail'))
def test_from_xml_wo_detail(self):
from lxml.etree import Element
from lxml.etree import SubElement
from soaplib.core.namespaces import ns_soap_env
element = Element('{%s}Fault' % ns_soap_env)
fcode = SubElement(element, 'faultcode')
fcode.text = 'senv:other'
fstr = SubElement(element, 'faultstring')
fstr.text = 'Testing'
actor = SubElement(element, 'faultactor')
actor.text = 'phreddy'
fault = self._getTargetClass().from_xml(element)
self.assertEqual(fault.faultcode, 'senv:other')
self.assertEqual(fault.faultstring, 'Testing')
self.assertEqual(fault.faultactor, 'phreddy')
self.assertEqual(fault.detail, None)
def test_from_xml_w_detail(self):
from lxml.etree import Element
from lxml.etree import SubElement
from soaplib.core.namespaces import ns_soap_env
element = Element('{%s}Fault' % ns_soap_env)
fcode = SubElement(element, 'faultcode')
fcode.text = 'senv:other'
fstr = SubElement(element, 'faultstring')
fstr.text = 'Testing'
actor = SubElement(element, 'faultactor')
actor.text = 'phreddy'
detail = SubElement(element, 'detail')
fault = self._getTargetClass().from_xml(element)
self.failUnless(fault.detail is detail)
def test_add_to_schema_no_extends(self):
from soaplib.core.namespaces import ns_xsd
class cls(self._getTargetClass()):
@classmethod
def get_type_name_ns(self, app):
return 'testing:My'
schema = DummySchemaEntries(object())
cls.add_to_schema(schema)
self.assertEqual(len(schema._complex_types), 1)
c_cls, c_elt = schema._complex_types[0]
self.failUnless(c_cls is cls)
self.assertEqual(c_elt.tag, '{%s}complexType' % ns_xsd)
self.assertEqual(c_elt.get('name'), 'FaultFault')
self.assertEqual(len(c_elt), 1)
seq = c_elt[0]
self.assertEqual(seq.tag, '{%s}sequence' % ns_xsd)
self.assertEqual(len(schema._elements), 1)
e_cls, e_elt = schema._elements[0]
self.failUnless(e_cls is cls)
self.assertEqual(e_elt.tag, '{%s}element' % ns_xsd)
self.assertEqual(e_elt.get('name'), 'Fault')
self.assertEqual(e_elt.get('{%s}type' % ns_xsd), 'testing:MyFault')
self.assertEqual(len(e_elt), 0)
def test_add_to_schema_w_extends(self):
from soaplib.core.namespaces import ns_xsd
class base(self._getTargetClass()):
@classmethod
def get_type_name_ns(self, app):
return 'testing:Base'
class cls(self._getTargetClass()):
__extends__ = base
@classmethod
def get_type_name_ns(self, app):
return 'testing:My'
schema = DummySchemaEntries(object())
cls.add_to_schema(schema)
self.assertEqual(len(schema._complex_types), 1)
c_cls, c_elt = schema._complex_types[0]
self.failUnless(c_cls is cls)
self.assertEqual(c_elt.tag, '{%s}complexType' % ns_xsd)
self.assertEqual(c_elt.get('name'), 'FaultFault')
self.assertEqual(len(c_elt), 1)
cc_elt = c_elt[0]
self.assertEqual(cc_elt.tag, '{%s}complexContent' % ns_xsd)
self.assertEqual(len(cc_elt), 1)
e_elt = cc_elt[0]
self.assertEqual(e_elt.tag, '{%s}extension' % ns_xsd)
self.assertEqual(e_elt.get('base'), 'testing:Base')
self.assertEqual(len(e_elt), 1)
seq = e_elt[0]
self.assertEqual(seq.tag, '{%s}sequence' % ns_xsd)
self.assertEqual(len(schema._elements), 1)
e_cls, e_elt = schema._elements[0]
self.failUnless(e_cls is cls)
self.assertEqual(e_elt.tag, '{%s}element' % ns_xsd)
self.assertEqual(e_elt.get('name'), 'Fault')
self.assertEqual(e_elt.get('{%s}type' % ns_xsd), 'testing:MyFault')
self.assertEqual(len(e_elt), 0)
class DummySchemaEntries:
def __init__(self, app):
self.app = app
self._complex_types = []
self._elements = []
def add_complex_type(self, cls, ct):
self._complex_types.append((cls, ct))
def add_element(self, cls, elt):
self._elements.append((cls, elt))
if __name__ == '__main__': #pragma NO COVERAGE
unittest.main()
| StarcoderdataPython |
1721606 | <reponame>blackbotinc/AWS-Attack<filename>ttp/src/cloudtrail_download_event_history_src.py
#!/usr/bin/env python3
import datetime
import argparse
import json
import time
def main(args, awsattack_main):
session = awsattack_main.get_active_session()
print = awsattack_main.print
get_regions = awsattack_main.get_regions
summary_data = {}
if args.regions is None:
regions = get_regions('cloudtrail')
if regions is None or regions == [] or regions == '' or regions == {}:
print('This module is not supported in any regions specified in the current sessions region set. Exiting...')
return
else:
regions = args.regions.split(',')
for region in regions:
events = []
print('Downloading logs from {}:'.format(region))
print(' This may take a while...')
client = awsattack_main.get_boto3_client('cloudtrail', region)
event_history = client.lookup_events(
MaxResults=50,
)
events += event_history['Events']
while 'NextToken' in event_history:
print(' Processing additional results...')
event_history = client.lookup_events(
MaxResults=50,
NextToken=event_history['NextToken']
)
events += event_history['Events']
summary_data[region] = len(events)
#print('Finished enumerating {}'.format(region))
now = time.time()
with open('sessions/{}/downloads/cloudtrail_{}_event_history_{}.json'.format(session.name, region, now), 'w+') as json_file:
json.dump(events, json_file, indent=2, default=str)
print(' Events written to ./sessions/{}/downloads/cloudtrail_{}_event_history_{}.json'.format(session.name, region, now))
return summary_data
| StarcoderdataPython |
1966460 | # density processing classes and functions
import numpy as np
import matplotlib.pyplot as plt
# define class
class Density(object):
"""Data over time for one or more positions.
Attributes
----------
t : time in units of [s]
d : density in units of [kg/m^3]
p : the name of the locations
"""
def __init__(self, time, data, positions):
"""Initializes the density data with supplied values for time, density data, and location names."""
self.t = time
self.d = data
self.p = positions
# take data from csv file and convert into Density class to be used by other functions
def data_to_density(filename, positions):
""" Takes data from csv and converts to Density class.
Parameters
----------
filename : string
name of file to be plotted
positions : string
names of positions or locations for data, like a header.
Returns
-------
data : Density
Formatted data from csv file. # maybe this should be a separate function.
"""
data_read = np.genfromtxt(filename, delimiter=',')
time = data_read[1:,0]
data_in = data_read[1:,1:]
data = Density(time, data_in, positions)
return data
# plot data from Density class
def plot_data(data, filename):
""" Plots data from data in Density class format into png, svg, and pdf formats.
Parameters
----------
filename : string
name of file to be saved to
"""
plt.rcParams['font.family'] = 'serif'
plt.rcParams['mathtext.fontset'] = 'dejavuserif'
plt.figure(facecolor='w', edgecolor='k', dpi=200)
for k in range(len(data.p)):
plt.plot(data.t, data.d[:,k], label=data.p[k])
plt.xlabel('Time (s)')
plt.ylabel('Density (kg/m^3)')
plt.figlegend(loc='upper left', bbox_to_anchor=(0.2,0.8))
plt.grid(b=True, which='major', axis='both')
plt.savefig('astronomer/plots/'+filename+'_plot.png',transparent=True)
plt.savefig('astronomer/plots/'+filename+'_plot.svg',transparent=True)
plt.savefig('astronomer/plots/'+filename+'_plot.pdf',transparent=True)
# function to convert units to atom/b-cm for MCNP
def density_to_atomdensity(data_in):
""" Converts density in kg/m^3 to atom/barn-cm for helium-3.
Parameters
----------
data_in : Density
Density over time in kg/m^3 in Density class format.
Returns
-------
data_out : Density
Density over time in atom/b-cm in Density class format.
"""
data_out = Density(data_in.t, data_in.d, data_in.p)
data_out.d = data_in.d * 1000 * 6.022e23 * 1e-6 * 1e-24 / 3.016029
return data_out
# function to get time step data for MCNP input
def get_time_step_data(data_in, time_step):
""" Sparses data file for time steps specified by user.
Parameters
----------
data_in : Density
Data to be condensed into time steps
time_step : float
Time steps requested.
Returns
-------
data_out : Density
Data in specified time steps.
"""
data_out = Density(np.zeros((len(time_step)+2)), np.zeros((len(time_step)+2, len(data_in.p))), data_in.p)
data_out.t[0] = data_in.t[0]
data_out.t[-1] = data_in.t[-1]
data_out.d[0] = data_in.d[0]
data_out.d[-1] = data_in.d[-1]
data_index = 0
time = 0
for k in range(len(time_step)):
while time < time_step[k] and data_index < len(data_in.t):
time = data_in.t[data_index]
data_index += 1
data_out.t[k+1] = time
data_out.d[k+1] = data_in.d[data_index-1]
return data_out
# write Density class data to file
def writeDensity(data, filename):
""" Writes Density class data to csv file.
"""
data_out = np.zeros((len(data.t),len(data.d[0,:])+1))
data_out[:,0] = data.t
data_out[:,1:] = data.d
with open(filename, 'w') as f:
f.write('Time,'+','.join(data.p)+'\n' )
np.savetxt(f, data_out, delimiter=',', newline='\n')
| StarcoderdataPython |
8179849 | <reponame>Botomatik/JackBot
#!/usr/bin/python
"""
Script to extract all wiki page names a certain HTML file points to in
interwiki-link format
The output can be used as input to interwiki.py.
This script takes a single file name argument, the file should be a HTML file
as captured from one of the wikipedia servers.
Arguments:
-bare Extract as internal links: [[Title]] instead of [[Family:xx:Title]]
-sorted Print the pages sorted alphabetically (default: the order in which
they occur in the HTML file)
"""
#
# (C) <NAME>, <NAME>, 2003-2005
#
# Distributed under the terms of the MIT license.
#
__version__='$Id$'
#
import sys,re
import codecs
import wikipedia as pywikibot
# This bot does not contact the Wiki, so no need to get it on the list
pywikibot.stopme()
R = re.compile('/wiki/(.*?)" *')
fn = []
sorted = False
list = []
complete = True
for arg in pywikibot.handleArgs():
if arg.startswith("-sorted"):
sorted = True
elif arg.startswith("-bare"):
complete = False
elif fn:
print "Ignoring argument %s"%arg
else:
fn = arg
if not fn:
print "No file specified to get the links from"
sys.exit(1)
mysite = pywikibot.getSite()
f=open(fn,'r')
text=f.read()
f.close()
for hit in R.findall(text):
if complete:
list.append(mysite.linkto(hit))
else:
list.append("[[%s]]"%hit)
if sorted:
list.sort()
for page in list:
print page
| StarcoderdataPython |
5066727 | from netapp.netapp_object import NetAppObject
class LunSnapUsageLunInfo(NetAppObject):
"""
Details of the LUN backed by specified snapshot.
"""
_path = None
@property
def path(self):
"""
Path of the LUN.
"""
return self._path
@path.setter
def path(self, val):
if val != None:
self.validate('path', val)
self._path = val
_snapshot = None
@property
def snapshot(self):
"""
Name of the snapshot in which the LUN exists.
"""
return self._snapshot
@snapshot.setter
def snapshot(self, val):
if val != None:
self.validate('snapshot', val)
self._snapshot = val
_backing_store = None
@property
def backing_store(self):
"""
Path of the LUN serving as the backing store.
"""
return self._backing_store
@backing_store.setter
def backing_store(self, val):
if val != None:
self.validate('backing_store', val)
self._backing_store = val
@staticmethod
def get_api_name():
return "lun-snap-usage-lun-info"
@staticmethod
def get_desired_attrs():
return [
'path',
'snapshot',
'backing-store',
]
def describe_properties(self):
return {
'path': { 'class': basestring, 'is_list': False, 'required': 'required' },
'snapshot': { 'class': basestring, 'is_list': False, 'required': 'required' },
'backing_store': { 'class': basestring, 'is_list': False, 'required': 'required' },
}
| StarcoderdataPython |
12859997 | from unittest import TestCase
from leetcodepy.edit_distance import *
solution1 = Solution1()
word11 = "horse"
word12 = "ros"
expected1 = 3
word21 = "intention"
word22 = "execution"
expected2 = 5
class TestEditDistance(TestCase):
def test1(self):
self.assertEqual(expected1, solution1.minDistance(word11, word12))
self.assertEqual(expected2, solution1.minDistance(word21, word22))
| StarcoderdataPython |
9645660 | <filename>doit/api.py<gh_stars>1000+
"""Definition of stuff that can be used directly by a user in a dodo.py file."""
import sys
from doit.cmd_base import ModuleTaskLoader
from doit.doit_cmd import DoitMain
def run(task_creators):
"""run doit using task_creators
@param task_creators: module or dict containing task creators
"""
sys.exit(DoitMain(ModuleTaskLoader(task_creators)).run(sys.argv[1:]))
| StarcoderdataPython |
3380119 | <reponame>rajansh87/Algorithms-Implementations<filename>HASHING/2 sum.py
arr=[2,7,11,15]
target=9
if len(arr)<2:
print([])
hashtable={}
temp=[]
for i in range(len(arr)):
j=target-arr[i]
if j in hashtable.keys():
temp=[hashtable[j]+1,i+1]
hashtable[arr[i]]=i
print(temp)
| StarcoderdataPython |
11262266 | <filename>mitm/extra/data_processor/out_socket/data_processor/group.py
from mitm.extra.data_processor.out_socket.data_processor import DataProcessor
class DataProcessorGroup(DataProcessor):
def __init__(self, *processors):
self.processors = processors
def process(self, string):
for processor in self.processors:
processor.process(string)
| StarcoderdataPython |
8178059 | """add ip_addr column
Revision ID: 353ae4db3695
Revises: 80a54752ff01
Create Date: 2018-02-17 03:01:59.381491
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '353ae4db3695'
down_revision = '80a54752ff01'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('interest', sa.Column('ip_addr', sa.String(length=100), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('interest', 'ip_addr')
# ### end Alembic commands ###
| StarcoderdataPython |
4840330 | from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns('',
url('^home$', views.home, name='offline.home'),
url('^stub$', views.stub, name='offline.stub'),
)
| StarcoderdataPython |
9662262 | <filename>src/wrappers/python/pygroupsig/blindsig.py
from _groupsig import lib, ffi
from . import constants
import base64
def blindsig_export(sig):
"""
Exports the given blinded signature to a Base64 string.
Parameters:
sig: The blinded signature to export.
Returns:
The produced Base64 string. On error, an Exception is thrown.
"""
bsig = ffi.new("byte_t **")
bsig[0] = ffi.NULL
size = ffi.new("uint32_t *")
if lib.groupsig_blindsig_export(bsig, size, sig) == constants.IERROR:
raise Exception('Error exporting blindsig.')
b64sig = base64.b64encode(ffi.buffer(bsig[0],size[0]))
b64sig = b64sig.decode('utf-8').replace('\n', '')
# lib.free(bsig[0])
return b64sig
def blindsig_import(code, b64sig):
"""
Imports the given blinded signature from a Base64 string.
Parameters:
sig: The blinded signature to import.
Returns:
The imported blinded signature. On error, an Exception is thrown.
"""
b = base64.b64decode(b64sig)
sig = lib.groupsig_blindsig_import(code, b, len(b))
if sig == ffi.NULL:
raise Exception('Error importing blindsig.')
return sig
def blindsig_to_string(sig):
"""
Returns a human readable string corresponding to the given blinded signature.
Parameters:
sig: The blinded signature to print.
Returns:
The produced string. On error, an Exception is thrown.
"""
_str = ffi.new("char *")
_str = lib.groupsig_blindsig_to_string(sig)
if _str == ffi.NULL:
raise Exception('Error converting blindsig to string.')
return ffi.string(_str).decode('utf8')
| StarcoderdataPython |
3527960 | <reponame>mocobk/pymock<gh_stars>1-10
# -*- coding:utf-8 -*-
# __auth__ = mocobk
# email: <EMAIL>
import setuptools
with open("README.md", "r", encoding='utf-8') as fh:
long_description = fh.read()
setuptools.setup(
name="py-mock",
version="1.2.1",
author="mocobk",
author_email="<EMAIL>",
description="Mock.js for Python3",
long_description=long_description,
long_description_content_type="text/markdown",
keywords="pymock,Mock,Mock.js,better-mock",
url="https://github.com/mocobk/pymock",
packages=['pymock'],
install_requires=['py-mini-racer==0.4.0'],
classifiers=[
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
package_data={'pymock': ['js/*.js']},
)
| StarcoderdataPython |
4939203 | from sw_data_loader.SWDataClassFile import SWData
print("run home")
data = SWData()
data.load_img_datafiles("I:/RES/TestFolder")
print(data.base_path)
print((len(data.data_classes)))
print(data.is_datafiles_all_same_dim()) | StarcoderdataPython |
4848288 | <reponame>JasonKarle/rpi_weather
#!/usr/bin/env python
"""
This is the core program enabling the local weather and date and time
to be read for a location and parsed. This is intended to provide
the base data to then be graphically displayed on a RPi 3" LCD screen,
which will (may) be executed in a separate module
"""
__author__ = "<NAME>"
__copyright__ = "Copyright 2020, The RPi Weather Project"
__credits__ = ["nil"]
__license__ = "MIT"
__version__ = "0.1.0"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Development"
#test
# module imports
import requests
import datetime
# variables
# weather API key, location and formatting
url = "http://api.openweathermap.org/data/2.5/forecast?id=6324729&APPID=a93c2213b87caa16add92c8d0470c01d&units=metric"
# gets current DTG
now = datetime.datetime.now()
#defines degree symbology
degree = "\u00B0"
degreeC = "\u2103"
degreeF = "\u2109"
json_data = requests.get(url).json() # grabs weather data from OpenWeatherMap.org in JSON format
# extracts specific weather data from jason_data and sets in dedicated variable
current_temp = round(json_data['list'][0]['main']['temp'])
feels_like_temp = round(json_data['list'][0]['main']['feels_like'])
weather_desc = json_data['list'][0]['weather'][0]['description']
icon = json_data['list'][0]['weather'][0]['icon'] # place holder for graphic symbol once GUI is implemented
clouds = json_data['list'][0]['clouds']['all']
wind_speed = json_data['list'][0]['wind']['speed']
wind_kph = round(wind_speed * ((60*60)/1000))
wind_dir = json_data['list'][0]['wind']['deg']
time = now.strftime("%H:%M")
date = now.strftime(("%d %b %Y"))
#prints the desired information to screen
print(f"The time is: {time}, {date}\n")
print(f"The current temperature is: {current_temp}{degreeC}")
print(f"But it feels like: {feels_like_temp}{degreeC}")
print(f"It is currently: {weather_desc}")
print(f"{icon} {clouds}% cloud coverage")
print(f"The wind is coming from {wind_dir}{degree} at {wind_kph} kph")
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.