seq_id stringlengths 4 11 | text stringlengths 113 2.92M | repo_name stringlengths 4 125 ⌀ | sub_path stringlengths 3 214 | file_name stringlengths 3 160 | file_ext stringclasses 18
values | file_size_in_byte int64 113 2.92M | program_lang stringclasses 1
value | lang stringclasses 93
values | doc_type stringclasses 1
value | stars int64 0 179k ⌀ | dataset stringclasses 3
values | pt stringclasses 78
values |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
19951561410 | counter = 0
response = input("Do you want to calculate tution owed Yes or No")
while response == "Yes":
counter = counter + 1
lastname = input("Enter lastname")
credits = input("Enter credits taken")
district = input("Enter district code I or O")
if district == "I":
tuition = 250.0
else:
tution = 500.0
tuitionowed = float(credits) * float(tuition)
totalowed = tuitionowed + tuitionowed
print(lastname, credits, tuitionowed)
response = input("Do you want to calculate tution owed Yes or No")
print("Sum of all tuition owed: ", totalowed)
print("Number of students: ", counter)
| Dbalboaaaa/CIS-106-W65-Spring-2022 | PS6P5.py | PS6P5.py | py | 623 | python | en | code | 0 | github-code | 36 |
71685390185 | __author__ = 'apple'
from turtle import *
colors=["blue","orange"]
N=400
def posadzka(n):
k=400/n
set_starting_point(n)
for i in range(n):
for j in range(n):
color=colors[(i+j)%2]
shape(k,color)
fd(k)
bk(k*n)
rt(90)
fd(k)
lt(90)
def shape(k,color):
fillcolor(color)
l=k/4
fd(l)
rt(90)
fd(l)
lt(90)
begin_fill()
for _ in range(4):
fd(2*l)
square(l)
rt(90)
end_fill()
bk(l)
lt(90)
fd(l)
rt(90)
def square(k):
for _ in range(4):
fd(k)
lt(90)
def set_starting_point(n):
penup()
bk(N/2)
lt(90)
fd(N/2)
rt(90)
speed(0)
posadzka(3)
done() | chinski99/minilogia | 2009/etap 2/posadzka.py | posadzka.py | py | 746 | python | en | code | 0 | github-code | 36 |
36493183781 | """archetypal StructureInformation."""
import collections
from validator_collection import validators
from archetypal.template.constructions.base_construction import ConstructionBase
from archetypal.template.materials.opaque_material import OpaqueMaterial
class MassRatio(object):
"""Handles the properties of the mass ratio for building template structure."""
__slots__ = ("_high_load_ratio", "_material", "_normal_ratio")
def __init__(self, HighLoadRatio=None, Material=None, NormalRatio=None, **kwargs):
"""Initialize a MassRatio object with parameters.
Args:
HighLoadRatio (float):
Material (OpaqueMaterial):
NormalRatio (float):
"""
self.HighLoadRatio = HighLoadRatio
self.Material = Material
self.NormalRatio = NormalRatio
@property
def HighLoadRatio(self):
"""Get or set the high load ratio [kg/m2]."""
return self._high_load_ratio
@HighLoadRatio.setter
def HighLoadRatio(self, value):
self._high_load_ratio = validators.float(value, minimum=0)
@property
def Material(self):
"""Get or set the structure OpaqueMaterial."""
return self._material
@Material.setter
def Material(self, value):
assert isinstance(
value, OpaqueMaterial
), f"Material must be of type OpaqueMaterial, not {type(value)}"
self._material = value
@property
def NormalRatio(self):
"""Get or set the normal load ratio [kg/m2]."""
return self._normal_ratio
@NormalRatio.setter
def NormalRatio(self, value):
self._normal_ratio = validators.float(value, minimum=0)
def __hash__(self):
"""Return the hash value of self."""
return hash(self.__key__())
def __key__(self):
"""Get a tuple of attributes. Useful for hashing and comparing."""
return (
self.HighLoadRatio,
self.Material,
self.NormalRatio,
)
def __eq__(self, other):
"""Assert self is equivalent to other."""
if not isinstance(other, MassRatio):
return NotImplemented
else:
return self.__key__() == other.__key__()
def __iter__(self):
"""Iterate over attributes. Yields tuple of (keys, value)."""
for k, v in self.mapping().items():
yield k, v
def to_dict(self):
"""Return MassRatio dictionary representation."""
return collections.OrderedDict(
HighLoadRatio=self.HighLoadRatio,
Material={"$ref": str(self.Material.id)},
NormalRatio=self.NormalRatio,
)
def mapping(self):
"""Get a dict based on the object properties, useful for dict repr."""
return dict(
HighLoadRatio=self.HighLoadRatio,
Material=self.Material,
NormalRatio=self.NormalRatio,
)
def get_unique(self):
"""Return the first of all the created objects that is equivalent to self."""
return self
@classmethod
def generic(cls):
"""Create generic MassRatio object."""
mat = OpaqueMaterial(
Name="Steel General",
Conductivity=45.3,
SpecificHeat=500,
SolarAbsorptance=0.4,
ThermalEmittance=0.9,
VisibleAbsorptance=0.4,
Roughness="Rough",
Cost=0,
Density=7830,
MoistureDiffusionResistance=50,
EmbodiedCarbon=1.37,
EmbodiedEnergy=20.1,
TransportCarbon=0.067,
TransportDistance=500,
TransportEnergy=0.94,
SubstitutionRatePattern=[1],
SubstitutionTimestep=100,
DataSource="BostonTemplateLibrary.json",
)
return cls(HighLoadRatio=305, Material=mat, NormalRatio=305)
def duplicate(self):
"""Get copy of self."""
return self.__copy__()
def __copy__(self):
"""Create a copy of self."""
return self.__class__(self.HighLoadRatio, self.Material, self.NormalRatio)
class StructureInformation(ConstructionBase):
"""Building Structure settings.
.. image:: ../images/template/constructions-structure.png
"""
_CREATED_OBJECTS = []
__slots__ = ("_mass_ratios",)
def __init__(self, Name, MassRatios, **kwargs):
"""Initialize object.
Args:
MassRatios (list of MassRatio): MassRatio object.
**kwargs: keywords passed to the ConstructionBase constructor.
"""
super(StructureInformation, self).__init__(Name, **kwargs)
self.MassRatios = MassRatios
# Only at the end append self to _CREATED_OBJECTS
self._CREATED_OBJECTS.append(self)
@property
def MassRatios(self):
"""Get or set the list of MassRatios."""
return self._mass_ratios
@MassRatios.setter
def MassRatios(self, value):
assert isinstance(value, list), "mass_ratio must be of a list of MassRatio"
self._mass_ratios = value
@classmethod
def from_dict(cls, data, materials, **kwargs):
"""Create StructureInformation from a dictionary.
Args:
data (dict): A python dictionary.
materials (dict): A dictionary of python OpaqueMaterials with their id as
keys.
**kwargs: keywords passed to parent constructors.
"""
mass_ratio_ref = data.pop("MassRatios")
mass_ratios = [
MassRatio(
HighLoadRatio=massratio["HighLoadRatio"],
Material=materials[massratio["Material"]["$ref"]],
NormalRatio=massratio["NormalRatio"],
)
for massratio in mass_ratio_ref
]
_id = data.pop("$id")
return cls(MassRatios=mass_ratios, id=_id, **data, **kwargs)
def to_dict(self):
"""Return StructureInformation dictionary representation."""
self.validate() # Validate object before trying to get json format
data_dict = collections.OrderedDict()
data_dict["$id"] = str(self.id)
data_dict["MassRatios"] = [mass.to_dict() for mass in self.MassRatios]
data_dict["AssemblyCarbon"] = self.AssemblyCarbon
data_dict["AssemblyCost"] = self.AssemblyCost
data_dict["AssemblyEnergy"] = self.AssemblyEnergy
data_dict["DisassemblyCarbon"] = self.DisassemblyCarbon
data_dict["DisassemblyEnergy"] = self.DisassemblyEnergy
data_dict["Category"] = self.Category
data_dict["Comments"] = validators.string(self.Comments, allow_empty=True)
data_dict["DataSource"] = self.DataSource
data_dict["Name"] = self.Name
return data_dict
def validate(self):
"""Validate object and fill in missing values."""
return self
def mapping(self, validate=False):
"""Get a dict based on the object properties, useful for dict repr.
Args:
validate (bool): If True, try to validate object before returning the
mapping.
"""
if validate:
self.validate()
return dict(
MassRatios=self.MassRatios,
AssemblyCarbon=self.AssemblyCarbon,
AssemblyCost=self.AssemblyCost,
AssemblyEnergy=self.AssemblyEnergy,
DisassemblyCarbon=self.DisassemblyCarbon,
DisassemblyEnergy=self.DisassemblyEnergy,
Category=self.Category,
Comments=self.Comments,
DataSource=self.DataSource,
Name=self.Name,
)
def duplicate(self):
"""Get copy of self."""
return self.__copy__()
def __hash__(self):
"""Return the hash value of self."""
return hash(self.id)
def __eq__(self, other):
"""Assert self is equivalent to other."""
if not isinstance(other, StructureInformation):
return NotImplemented
else:
return all(
[
self.AssemblyCarbon == other.AssemblyCarbon,
self.AssemblyCost == other.AssemblyCost,
self.AssemblyEnergy == other.AssemblyEnergy,
self.DisassemblyCarbon == other.DisassemblyCarbon,
self.DisassemblyEnergy == other.DisassemblyEnergy,
self.MassRatios == other.MassRatios,
]
)
def __copy__(self):
"""Create a copy of self."""
return self.__class__(**self.mapping(validate=False))
@property
def children(self):
return tuple(m.Material for m in self.MassRatios)
| samuelduchesne/archetypal | archetypal/template/structure.py | structure.py | py | 8,696 | python | en | code | 11 | github-code | 36 |
2134410901 | #!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'http://egel.pl'
# Following items are often useful when publishing
# We wants absolute URLs
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/category/%s.atom.xml'
TAG_FEED_ATOM = 'feeds/tag/%s.atom.xml'
TRANSLATION_FEED_ATOM = 'feeds/all-%s.atom.xml'
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# don't delete our .git submodule dir
DELETE_OUTPUT_DIRECTORY = False
DISQUS_SHORT_NAME = "egel"
GOOGLE_ANALYTICS = 'UA-26456669-2'
# minify
MINIFY = {
'remove_comments': True,
'remove_all_empty_space': True,
'remove_optional_attribute_quotes': False
}
PLUGINS.append('minify')
| egel/blog | publishconf.py | publishconf.py | py | 946 | python | en | code | 0 | github-code | 36 |
39754184637 | # coding: utf-8
from ..objecter_core._SmartTemplate import template
from ..objecter_core._Base import _Base
from ..objecter_core._Smart import Translater
from ..objecter_core._common_classes import _NumberInt
from .url import url, Urler, to_exps
class Common(Translater):
class NumberInt(_NumberInt):
TYPE_OUT = 'Int'
class BasePython(Urler):
Nonnne = template(
IN='None')
If = template(
IN='if <EXP>:',
INDEX=_Base.FULL_LINE_PARENTER,
locals=lambda self:self.parent.get_locals())
ExpList = template(
IN='[<EXP:LIST>]')
Exp3List = template(
IN='[<EXP>, <EXP>, <EXP>]')
# ------
Else = template(
IN='else:',
INDEX=_Base.FULL_LINE_PARENTER)
Print = template(
IN='print(<EXP>)',
INDEX=_Base.FULL_LINE_PARENTER)
Range = template(
IN='range(<EXP>, <EXP>)',
INDEX=_Base.IN_LINE_PARENTER)
Comment = template(
IN='<EXP>#<EXP:TEXT>',
INDEX=_Base.FULL_LINE_PARENTER)
CommentFull = template(
IN='#<EXP:TEXT>',
INDEX=_Base.FULL_LINE_PARENTER)
Index = template(
IN='<EXP:NAME>[<EXP>]',
INDEX=_Base.IN_LINE_CHILD_LAST + 1)
Eval = template(
IN='eval(<EXP>)',
INDEX=_Base.FULL_LINE_PARENTER
)
# -----
List = template(
IN='[<EXP>]',
INDEX=_Base.IN_LINE_CHILD_LAST)
ForIn = template(
IN='for <EXP:NAME> in <EXP:^var>:',
INDEX=_Base.FULL_LINE_PARENTER)
ForIn2 = template(
IN='for <EXP:NAME>, <EXP:NAME> in <EXP:^var>:',
INDEX=_Base.FULL_LINE_PARENTER)
| awini/coup | coup/common/all.py | all.py | py | 1,645 | python | en | code | 0 | github-code | 36 |
13413427634 | import numpy as np
import matplotlib.pyplot as plt
from sklearn.decomposition import PCA
from matplotlib.lines import Line2D
import pandas as pd
##############################################################
# Change these lines to apply on your custom datasets
##############################################################
features = 'scraping/test_data_evaluation/r-mac_features.npy'
dataset = 'scraping/test_dataset.csv'
landmark_colors = {'holstentor': 'b', 'rathaus': 'tab:purple', 'sternschanze': 'g', 'michaelis': 'tab:olive',
'elbphilharmonie': 'tab:orange', 'random': 'tab:brown'}
class_list = ['holstentor', 'rathaus', 'sternschanze', 'michaelis', 'elbphilharmonie', 'random']
save = False
##############################################################
# End of hardcoded parameters
##############################################################
def vis_pca_features(pca_result: np.array, query_id: int = None, answer_id: np.array = None, save_name: str = None,
title: str = ''):
"""
Plot the downprojected features vectors in 2D
:param pca_result: The 2D projected version of the vectors
:param query_id: Index of image that was used as query
:param answer_id: Array of indices of retrieved images
:param save_name: Filename to save the plot
:param title: Title of the plot
:return:
"""
fig, ax = plt.subplots(1)
for l in class_list:
idxs = test_data[test_data.landmark == l].index.values # find all images of that class
ax.scatter(pca_result[idxs, 0], pca_result[idxs, 1], label=l,
color=landmark_colors[l])
ax.scatter(np.average(pca_result[idxs, 0]), np.average(pca_result[idxs, 1]), label=l, marker='*',
color=landmark_colors[l]) # plot the class average as star marker
if query_id is not None:
ax.scatter(pca_result[query_id, 0], pca_result[query_id, 1], color='r', marker='x', label='query')
if answer_id is not None:
ax.scatter(pca_result[answer_id[1:], 0], pca_result[answer_id[1:], 1], color='r', marker=2, label='answer')
# summarize legend entries of same landmark
handles, labels = plt.gca().get_legend_handles_labels()
labels, ids = np.unique(labels, return_index=True)
handles = [handles[i] for i in ids]
# insert legend dummy for average star
avg_patch = Line2D([0], [0], marker='*', color='grey', label='Class average', markersize=9, linestyle='None')
handles.insert(-1, avg_patch)
plt.legend(handles, np.insert(labels, -1, 'Class average'), loc='best')
ax.set_xticks([])
ax.set_yticks([])
plt.title(title)
if save_name:
plt.savefig(save_name + '.pdf')
plt.show()
print("Loading features..")
features = np.load(features)
test_data = pd.read_csv(dataset)
class_order = test_data.landmark.values
print("Projecting features..")
pca = PCA(n_components=2)
pca_result = pca.fit_transform(features)
savepath = 'vis_pca_features.pdf' if save else None
vis_pca_features(pca_result, save_name=savepath)
| MiriUll/multimodal_ABSA_Elbphilharmonie | pca_vis_img_features.py | pca_vis_img_features.py | py | 3,060 | python | en | code | 2 | github-code | 36 |
13030737473 | import sys
from PySide6.QtCore import Qt, QTimer, QSettings, QThread, QRegularExpression
from PySide6.QtGui import QIcon, QAction, QPixmap, QIntValidator, QRegularExpressionValidator
from PySide6.QtWidgets import QApplication, QSystemTrayIcon, QMenu, \
QLabel, QWidgetAction, QWidget, QHBoxLayout, QMessageBox, QFormLayout, QLineEdit, QPushButton
import qdarktheme
from modules.flask_thread import FlaskThread
from modules.flask_factory import create_flask_app
import resources
__version__ = "0.1.1"
class IconLabel(QWidget):
HorizontalSpacing = -2
def __init__(self, text):
super(IconLabel, self).__init__()
layout = QHBoxLayout()
layout.setContentsMargins(2, 0, 0, 0)
self.setLayout(layout)
image_label = QLabel()
image_label.setPixmap(QPixmap(":/icons/feather/life-buoy.svg").scaledToWidth(15))
# image_label.setText(text)
image_label.setMaximumWidth(20)
image_label.setMaximumHeight(25)
layout.addWidget(image_label)
layout.addSpacing(self.HorizontalSpacing)
label = QLabel(text)
label.setStyleSheet("QLabel {background: rgba(41.000, 42.000, 45.000, 1.000)}")
layout.addWidget(label)
class ButtonLabel(QWidget):
horizontal_spacing = 0
def __init__(self):
super(ButtonLabel, self).__init__()
style = """
QPushButton {
background-color: darkred;
}
QPushButton:checked {
background-color: green;
}
"""
layout = QHBoxLayout()
layout.setContentsMargins(1, 5, 8, 5)
self.setLayout(layout)
self.button = QPushButton()
self.button.setText("OFF")
self.button.setCheckable(True)
self.button.setMinimumWidth(60)
self.button.setStyleSheet(style)
layout.addWidget(self.button)
self.button.clicked.connect(self.on_click)
def on_click(self):
if self.button.isChecked():
self.button.setText("ON")
else:
self.button.setText("OFF")
class LabelEdit(QWidget):
HorizontalSpacing = 2
def __init__(self, label_txt, key, settings):
super(LabelEdit, self).__init__()
self.key = key
self.settings = settings
value = self.settings.value(key)
layout = QFormLayout()
layout.setContentsMargins(5, 2, 5, 2)
self.setLayout(layout)
label = QLabel(label_txt)
label.setMinimumWidth(70)
self.edit = QLineEdit()
self.edit.setValidator(self.get_validator())
self.edit.setText(value)
self.edit.setMaximumWidth(70)
layout.addRow(label, self.edit)
self.edit.textChanged.connect(self.on_change)
def get_validator(self):
print(self.key)
if self.key == "institution":
rx = QRegularExpression()
rx.setPattern("[A-Z]{3}\\d{4}")
return QRegularExpressionValidator(rx, self)
else:
return QIntValidator()
def on_change(self):
value = self.edit.text()
self.settings.setValue(self.key, value)
class SysTrayApp:
status_desc = {
100: 'Continue',
101: 'Switching Protocols',
102: 'Processing',
200: 'OK',
201: 'Created',
202: 'Accepted',
203: 'Non-authoritative Information',
204: 'No Content',
205: 'Reset Content',
206: 'Partial Content',
207: 'Multi-Status',
208: 'Already Reported',
226: 'IM Used',
300: 'Multiple Choices',
301: 'Moved Permanently',
302: 'Found',
303: 'See Other',
304: 'Not Modified',
305: 'Use Proxy',
307: 'Temporary Redirect',
308: 'Permanent Redirect',
400: 'Bad Request',
401: 'Unauthorized',
402: 'Payment Required',
403: 'Forbidden',
404: 'Not Found',
405: 'Method Not Allowed',
406: 'Not Acceptable',
407: 'Proxy Authentication Required',
408: 'Request Timeout',
409: 'Conflict',
410: 'Gone',
411: 'Length Required',
412: 'Precondition Failed',
413: 'Payload Too Large',
414: 'Request-URI Too Long',
415: 'Unsupported Media Type',
416: 'Requested Range Not Satisfiable',
417: 'Expectation Failed',
418: 'Im a teapot',
421: 'Misdirected Request',
422: 'Unprocessable Entity',
423: 'Locked',
424: 'Failed Dependency',
426: 'Upgrade Required',
428: 'Precondition Required',
429: 'Too Many Requests',
431: 'Request Header Fields Too Large',
444: 'Connection Closed Without Response',
451: 'Unavailable For Legal Reasons',
499: 'Client Closed Request',
500: 'Internal Server Error',
501: 'Not Implemented',
502: 'Bad Gateway',
503: 'Service Unavailable',
504: 'Gateway Timeout',
505: 'HTTP Version Not Supported',
506: 'Variant Also Negotiates',
507: 'Insufficient Storage',
508: 'Loop Detected',
510: 'Not Extended',
511: 'Network Authentication Required',
599: 'Network Connect Timeout Error'
}
def __init__(self, app):
self.app = app
self.settings = QSettings("Region Västerbotten", "getmod")
self.thread = QThread()
self.timer = QTimer()
self.timer.timeout.connect(self.check_flask_status)
self.tray = QSystemTrayIcon(QIcon(":/icons/feather/life-buoy.svg"), self.app)
self.menu = QMenu()
self.menu.setMinimumWidth(80)
self.menu.setContentsMargins(10, 2, 2, 2)
header = IconLabel("- GetMod " + __version__ + " -")
header.setStyleSheet("margin-left: 0px; margin-top: 0px; margin-bottom: 5px")
header_action = QWidgetAction(self.menu)
header_action.setDefaultWidget(header)
self.menu.addAction(header_action)
self.action_onoff = ButtonLabel()
action_onoff = QWidgetAction(self.menu)
action_onoff.setDefaultWidget(self.action_onoff)
self.menu.addAction(action_onoff)
self.action_onoff.button.clicked.connect(self.onoff_clicked)
self.submenu_settings = self.menu.addMenu("Settings")
self.submenu_settings.setMaximumWidth(200)
listen = LabelEdit("Listen port", "listen_port", self.settings)
listen_action = QWidgetAction(self.submenu_settings)
listen_action.setDefaultWidget(listen)
self.submenu_settings.addAction(listen_action)
target = LabelEdit("Target port", "target_port", self.settings)
target_action = QWidgetAction(self.submenu_settings)
target_action.setDefaultWidget(target)
self.submenu_settings.addAction(target_action)
apikey = LabelEdit("API key", "apikey", self.settings)
apikey_action = QWidgetAction(self.submenu_settings)
apikey_action.setDefaultWidget(apikey)
self.submenu_settings.addAction(apikey_action)
instutution = LabelEdit("Institution", "institution", self.settings)
instutution_action = QWidgetAction(self.submenu_settings)
instutution_action.setDefaultWidget(instutution)
self.submenu_settings.addAction(instutution_action)
self.action_exit = QAction("Exit")
self.action_exit.triggered.connect(self.exit)
self.menu.addAction(self.action_exit)
self.tray.setToolTip("GetMod - get request modifier")
self.tray.setContextMenu(self.menu)
self.tray.setVisible(True)
self.tray.show()
self.app.setStyleSheet(qdarktheme.load_stylesheet())
sys.exit(self.app.exec())
def exit(self):
self.thread.terminate()
self.thread.wait()
self.tray.hide()
self.app.exit()
def check_flask_status(self):
if not self.thread.isRunning():
self.action_onoff.setChecked(False)
msgBox = QMessageBox()
msgBox.setIcon(QMessageBox.Critical)
msgBox.setWindowIcon(QIcon(":/icons/feather/life-buoy.svg"))
msgBox.setWindowTitle("Critical Error")
msgBox.setText("Houston, the flask server did not start!")
msgBox.exec()
def onoff_clicked(self):
if self.action_onoff.button.isChecked():
self.start_flask()
else:
self.thread.terminate()
print("Flask off!")
def start_flask(self):
apikey = self.settings.value('apikey')
institution = self.settings.value('institution')
listen_port = self.settings.value('listen_port')
target_port = self.settings.value('target_port')
flask_app = create_flask_app(apikey, institution, target_port, self.status_desc)
self.thread = FlaskThread(flask_app, listen_port)
self.thread.start()
self.timer.singleShot(1000, self.check_flask_status)
# @staticmethod
# def create_flask_app(apikey, institution, relay_port, status_desc):
#
# site_relay = "http://localhost:" + str(relay_port)
#
# def args2str(args):
# dlist = list()
# for key, value in args.items():
# _str = f"{key}={value}"
# dlist.append(_str)
#
# return "&".join(dlist)
#
# flask_app = Flask(__name__)
#
# # @flask_app.route('/', defaults={'path': ''})
# @flask_app.route('/<path:path>', methods=['GET'])
# def proxy(path):
#
# def get_mod_path(request: flask.request, apikey, institution):
# args = request.args.to_dict()
# request_path = request.path
# outdata = {}
# outdata['apikey'] = apikey
# outdata['institution'] = institution
#
# for key in args:
# value = args[key]
# if key == "request":
# if value.startswith('BAM<'):
# new_key1 = "path"
# new_value1 = "file:///" + value.lstrip('BAM<')
# new_key2 = "filetype"
# new_value2 = "bam"
#
# outdata[new_key1] = new_value1.replace('\\', "/")
# outdata[new_key2] = new_value2
# request_path = "open"
# else:
# outdata[key] = value
# request_path = "search"
#
# return request_path, args2str(outdata)
#
# def error_response(e, site_relay, new_path):
# return f"<html><head></head><body><h1>Communication error!</h1>" \
# f"<p>Exception msg: {e}</p>" \
# f"<p>Target (host:port): {site_relay}</p>" \
# f"<p>Get request: {new_path}</p>" \
# f"</body></html>"
#
# if request.method == "GET" and request.path != "/favicon.ico":
#
# req_path, argstr = get_mod_path(request, apikey, institution)
#
# encoded_argstr = parse.quote(argstr, safe='&=')
#
# encoded_request = f'{site_relay}/{req_path}?{encoded_argstr}'
#
# print(encoded_request)
# print(argstr)
#
# try:
# ret = requests.get(encoded_request, timeout=10)
#
# status = int(ret.status_code)
#
# if status in range(200, 300):
# header = "Success!"
# else:
# header = "Problem!"
#
# return f"<html><head></head><body><h1>{header}</h1>" \
# f"<p>Target status code: {ret.status_code} {status_desc[status]}</p>" \
# f"<p>Target (host:port): {site_relay}</p>" \
# f"<p>Get request: {encoded_argstr}</p>" \
# "</body></html>"
#
# except requests.exceptions.HTTPError as errh:
# e = "Http Error: " + str(errh)
# return error_response(e, site_relay, encoded_argstr)
#
# except requests.exceptions.ConnectionError as errc:
# e = "Error Connecting: " + str(errc)
# return error_response(e, site_relay, encoded_argstr)
#
# except requests.exceptions.Timeout as errt:
# e = "Error Connecting: " + str(errt)
# return error_response(e, site_relay, encoded_argstr)
#
# except requests.exceptions.RequestException as err:
# e = "Error Connecting: " + str(err)
# return error_response(e, site_relay, encoded_argstr)
#
# return f"<html><head></head><body><h1>Something's wrong!</h1>" \
# f"<p>No errors detected but no valid response from target either ... </p>" \
# f"</body></html>"
#
# return flask_app
if __name__ == "__main__":
app = QApplication(sys.argv)
tray = SysTrayApp(app)
sys.exit(app.exec())
| gmc-norr/getmod | getmod.py | getmod.py | py | 13,431 | python | en | code | 0 | github-code | 36 |
19800687308 | import sys
N, K = [int(n) for n in sys.stdin.readline().split()]
W = [0]
V = [0]
for _ in range(N):
w, v = [int(n) for n in sys.stdin.readline().split()]
W.append(w)
V.append(v)
dp = [[0] * (N + 1) for _ in range(K+1)]
for i in range(1, K+1):
for j in range(1, N+1):
if i < W[j]:
dp[i][j] = dp[i][j-1]
else:
dp[i][j] = max(dp[i][j-1], dp[i-W[j]][j-1] + V[j])
print(max(dp[K])) | chelsh/baekjoon | Solved/12865_bag.py | 12865_bag.py | py | 437 | python | en | code | 1 | github-code | 36 |
31025538209 | class Solution:
def maxMoves(self, grid: List[List[int]]) -> int:
m, n = len(grid), len(grid[0])
@cache
def dp(i: int, j: int) -> int:
if i < 0 or i >= m or j < 0 or j >= n: return 0
res = 0
cur = grid[i][j]
for x, y in [(-1, 1), (0, 1), (1, 1)]:
if 0 <= i + x < m and 0 <= j + y < n and grid[i + x][j + y] > cur:
res = max(res, dp(i + x, j + y) + 1)
return res
ans = 0
for i in range(m):
ans = max(ans, dp(i, 0))
return ans | meetsingh0202/Leetcode-Daily-Coding-Challenge | 2684-maximum-number-of-moves-in-a-grid/2684-maximum-number-of-moves-in-a-grid.py | 2684-maximum-number-of-moves-in-a-grid.py | py | 594 | python | en | code | 0 | github-code | 36 |
11361181001 | import sys
sys.stdin = open('글자수.txt')
T = int(input())
for tc in range(1, T+1):
str1 = input()
str2 = input()
final_cnt = 0
tmp = []
for j in range(len(str1)):
cnt = 0
for k in range(len(str2)):
if str1[j] == str2[k]:
cnt += 1
tmp.append(cnt)
sup = tmp[0]
for i in tmp:
if sup < i:
sup = i
print('#{} {}'.format(tc, sup)) | Jade-KR/TIL | 04_algo/수업/글자수.py | 글자수.py | py | 435 | python | en | code | 0 | github-code | 36 |
16248087762 | from .base import Scoring
from math import pi
import torch
__all__ = ["ComplEx"]
class ComplEx(Scoring):
"""ComplEx scoring function.
Examples
--------
>>> from ckb import models
>>> from ckb import datasets
>>> from ckb import scoring
>>> import torch
>>> _ = torch.manual_seed(42)
>>> dataset = datasets.Semanlink(1)
>>> model = models.DistillBert(
... entities = dataset.entities,
... relations = dataset.relations,
... gamma = 9,
... device = 'cpu',
... scoring = scoring.ComplEx(),
... )
>>> sample = torch.tensor([[0, 0, 0], [2, 2, 2]])
>>> model(sample)
tensor([[0.8402],
[0.4317]], grad_fn=<ViewBackward>)
>>> sample = torch.tensor([[0, 0, 1], [2, 2, 1]])
>>> model(sample)
tensor([[0.5372],
[0.1728]], grad_fn=<ViewBackward>)
>>> sample = torch.tensor([[1, 0, 0], [1, 2, 2]])
>>> model(sample)
tensor([[0.5762],
[0.3085]], grad_fn=<ViewBackward>)
>>> sample = torch.tensor([[0, 0, 0], [2, 2, 2]])
>>> negative_sample = torch.tensor([[1, 0], [1, 2]])
>>> model(sample, negative_sample, mode='head-batch')
tensor([[0.5762, 0.8402],
[0.3085, 0.4317]], grad_fn=<ViewBackward>)
>>> model(sample, negative_sample, mode='tail-batch')
tensor([[0.5372, 0.8402],
[0.1728, 0.4317]], grad_fn=<ViewBackward>)
"""
def __init__(self):
super().__init__()
def __call__(self, head, relation, tail, mode, **kwargs):
"""Compute the score of given facts (heads, relations, tails).
Parameters
----------
head: Embeddings of heads.
relation: Embeddings of relations.
tail: Embeddings of tails.
mode: head-batch or tail-batch.
"""
re_head, im_head = torch.chunk(head, 2, dim=2)
re_relation, im_relation = torch.chunk(relation, 2, dim=2)
re_tail, im_tail = torch.chunk(tail, 2, dim=2)
if mode == "head-batch":
re_score = re_relation * re_tail + im_relation * im_tail
im_score = re_relation * im_tail - im_relation * re_tail
score = re_head * re_score + im_head * im_score
else:
re_score = re_head * re_relation - im_head * im_relation
im_score = re_head * im_relation + im_head * re_relation
score = re_score * re_tail + im_score * im_tail
return score.sum(dim=2)
| raphaelsty/ckb | ckb/scoring/complex.py | complex.py | py | 2,492 | python | en | code | 20 | github-code | 36 |
2037492885 | #!/usr/bin/env python3
import argparse
import datetime
import importlib
import re
import site
import traceback
from pathlib import Path
import yaml
SECRET_FILENAME = "secrets.yaml"
SECRET_REGEX = re.compile(r"!secret\s(\w+)")
def main():
parser = argparse.ArgumentParser(description="Test sources.")
parser.add_argument(
"-s", "--source", action="append", help="Test given source file"
)
parser.add_argument(
"-l", "--list", action="store_true", help="List retrieved entries"
)
parser.add_argument(
"-i", "--icon", action="store_true", help="Show waste type icon"
)
parser.add_argument("--sorted", action="store_true", help="Sort output by date")
parser.add_argument("--weekday", action="store_true", help="Show weekday")
parser.add_argument(
"-t",
"--traceback",
action="store_true",
help="Print exception information and stack trace",
)
args = parser.parse_args()
# read secrets.yaml
secrets = {}
try:
with open(SECRET_FILENAME) as stream:
try:
secrets = yaml.safe_load(stream)
except yaml.YAMLError as exc:
print(exc)
except FileNotFoundError:
# ignore missing secrets.yaml
pass
package_dir = Path(__file__).resolve().parents[2]
source_dir = package_dir / "waste_collection_schedule" / "source"
# add module directory to path
site.addsitedir(str(package_dir))
if args.source is not None:
files = args.source
else:
files = filter(
lambda x: x != "__init__",
map(lambda x: x.stem, source_dir.glob("*.py")),
)
for f in sorted(files):
# iterate through all *.py files in waste_collection_schedule/source
print(f"Testing source {f} ...")
module = importlib.import_module(f"waste_collection_schedule.source.{f}")
# get all names within module
names = set(dir(module))
# test if all mandatory names exist
assert "TITLE" in names
assert "DESCRIPTION" in names
assert "URL" in names
assert "TEST_CASES" in names
# run through all test-cases
for name, tc in module.TEST_CASES.items():
# replace secrets in arguments
replace_secret(secrets, tc)
# create source
try:
source = module.Source(**tc)
result = source.fetch()
count = len(result)
if count > 0:
print(
f" found {bcolors.OKGREEN}{count}{bcolors.ENDC} entries for {name}"
)
else:
print(
f" found {bcolors.WARNING}0{bcolors.ENDC} entries for {name}"
)
# test if source is returning the correct date format
if (
len(
list(
filter(lambda x: type(x.date) is not datetime.date, result)
)
)
> 0
):
print(
f"{bcolors.FAIL} ERROR: source returns invalid date format (datetime.datetime instead of datetime.date?){bcolors.ENDC}"
)
if args.list:
result = (
sorted(result, key=lambda x: x.date) if args.sorted else result
)
for x in result:
icon_str = f" [{x.icon}]" if args.icon else ""
weekday_str = x.date.strftime("%a ") if args.weekday else ""
print(
f" {x.date.isoformat()} {weekday_str}: {x.type}{icon_str}"
)
except KeyboardInterrupt:
exit()
except Exception as exc:
print(f" {name} {bcolors.FAIL}failed{bcolors.ENDC}: {exc}")
if args.traceback:
print(indent(traceback.format_exc(), 4))
def replace_secret(secrets, d):
for key in d.keys():
value = d[key]
if isinstance(value, dict):
replace_secret(secrets, value)
elif isinstance(value, str):
match = SECRET_REGEX.fullmatch(value)
if match is not None:
id = match.group(1)
if id in secrets:
d[key] = secrets[id]
else:
print(f"identifier '{id}' not found in {SECRET_FILENAME}")
def indent(s, count):
indent = " " * count
return "\n".join([indent + line for line in s.split("\n")])
class bcolors:
HEADER = "\033[95m"
OKBLUE = "\033[94m"
OKCYAN = "\033[96m"
OKGREEN = "\033[92m"
WARNING = "\033[93m"
FAIL = "\033[91m"
ENDC = "\033[0m"
BOLD = "\033[1m"
UNDERLINE = "\033[4m"
if __name__ == "__main__":
main()
| geNAZt/home-assistant | custom_components/waste_collection_schedule/waste_collection_schedule/test/test_sources.py | test_sources.py | py | 5,022 | python | en | code | 0 | github-code | 36 |
25577884705 | class Solution:
def compareVersion(self, version1: str, version2: str) -> int:
version1 = version1.split('.')
version2 = version2.split('.')
# the level we are comparing versions
level = 0
while level < len(version1) and level < len(version2):
# comparing each level, returning is a deciscion can be made
if int(version1[level]) > int(version2[level]):
return 1
elif int(version2[level]) > int(version1[level]):
return -1
level += 1
# we wont know ahead of time which version we have hit the end on,
# one or neither of these while loops will run
while level < len(version1):
if int(version1[level]):
return 1
level += 1
while level < len(version2):
if int(version2[level]):
return -1
level += 1
# if still not clear which version is larger, they are the same
return 0
| korynewton/code-challenges | leetcode/CompareVersionNumbers/solution.py | solution.py | py | 1,025 | python | en | code | 0 | github-code | 36 |
7683932011 | # ----------------------------------------------------------------------------------------
# prepare environment (boilerplate)
# import the required packages using their usual aliases
import dash
from dash import dcc, html, Input, Output, State
import dash_bootstrap_components as dbc
import plotly.graph_objects as go
import plotly.express as px
import pandas as pd
import humanize
import os
# read token string with your access mapbox token from a hidden file
# saved in environment's root directory same as where this app.py file is
# if you're using GitHub make sure to add '*.mapbox_token' to your .gitignore file
# to prevent your private credentials from being publicly viewed or uploaded to GitHub
mapbox_access_token = os.environ.get('MAPBOX_ACCESS_TOKEN')
# ----------------------------------------------------------------------------------------
# -- call the data
# -- read the food trade matrix data into pandas from CSV file of 2019 export quantities (exported from analysis in Jupyter Notebook)
# prepared using original dataset FAOSTAT Detailed trade matrix: All Data Normalized from https://fenixservices.fao.org/faostat/static/bulkdownloads/Trade_DetailedTradeMatrix_E_All_Data_(Normalized).zip
# with appended key demographics from FAOSTAT Key dataset (in Jupyter Notebook)
# # full dataset
dffood = pd.read_csv('./data/dffood.csv')
# -- read the 4.5 depth soil organic carbon density (%) measurements pre-filtered for audience China's and U.S.'s food's trade export Reporter Countries (exported from analysis in Jupyter Notebook)
# prepared using original dataset Soil organic carbon density: SOCD5min.zip from http://globalchange.bnu.edu.cn/research/soilw
# with appended country name and ISO3 code from GeoPandas embedded World dataset
dfsoil = pd.read_csv('./data/dfsoil_subUSCN_prod.csv')
# ----------------------------------------------------------------------------------------
# create (instantiate) the app,
# using the Bootstrap MORPH theme, Slate (dark) or Flatly (light) theme or Darkly (its dark counterpart) to align with my llc website in development with Flatly (dadeda.design)
app = dash.Dash(__name__, external_stylesheets=[dbc.themes.MORPH],
meta_tags=[{'name': 'viewport',
# initial-scale is the initial zoom on each device on load
'content': 'width=device-width, initial-scale=1.0, maximum-scale=1.2, minimum-scale=0.5'}]
)
server = app.server
app.title = 'Sustain-Our-Soil-for-Our-Food'
# ----------------------------------------------------------------------------------------
# named variables for the app's layout
navbar = dbc.NavbarSimple(
children=[
dbc.DropdownMenu(
children=[
dbc.DropdownMenuItem("Email", href="mailto:kathryn@dadeda.design?subject=Sustain our Soil for our Food", target='_blank'), # mailto link, github issues, and/or "http://kathrynhurchla.com/", target="_blank"),
# submit a gitHub issue (with options of feature request or bug report active at time of prototype deployment)
dbc.DropdownMenuItem("Submit issues or Ideas", href="https://github.com/khurchla/sustain-our-soil-for-our-food/issues/new/choose", target='_blank'),
# link to gitHub repository for readme caveats, data preparation, or to recreate app/opensource code
dbc.DropdownMenuItem("View source code", href="https://github.com/khurchla/sustain-our-soil-for-our-food", target='_blank')
],
nav=True,
in_navbar=True,
label="Contact",
),
dbc.DropdownMenu(
children=[
# placeholder for Twitter button javascript embed # <a href="https://twitter.com/share?ref_src=twsrc%5Etfw" class="twitter-share-button" data-text="Organic carbon occurs naturally in soil, but whether it presents a threat or a service to humans depends on YOU." data-via="khurchla" data-hashtags="dataviz" data-show-count="false">Tweet</a><script async src="https://platform.twitter.com/widgets.js" charset="utf-8"></script>
dbc.DropdownMenuItem("Tweet", href="#"),
# placeholder for popular Chinese social media Weibo share URL: http://service.weibo.com/share/share.php?url=http://example.com&appkey=&title=Organic carbon occurs naturally in soil, but whether it presents a threat or a service to humans depends on YOU.&pic=&ralateUid=&language=zh_cn
dbc.DropdownMenuItem("Weibo", href="#"),
],
nav=True,
in_navbar=True,
label="Share",
),
],
brand='Sustain Our Soil for Our Food',
color='#483628', # "dark", #hex code color matching text in graphs, a dark orange brown; "dark" is MORPH theme option and a dark charcoal
dark=True,
class_name="fixed-top",
)
appSubheading = dbc.Container(
html.Div([
html.H5("Organic carbon occurs naturally in soil, but whether it presents a threat or a service to humans depends on YOU.")
])
)
# # empty card to push the info tooltip to the far right
# controlsSpacer = dbc.CardBody(
# html.Div()
# )
tooltip = dbc.CardFooter(
html.Div(children=[
dbc.Button(
"info",
id="info-toolbar-tooltip",
# class_name="mx-2",
n_clicks=0,
size="sm"
),
dbc.Tooltip(
"Use the in toolbar in the upper right corner of the map to zoom, move around, or reset your view.",
target="info-toolbar-tooltip",
placement="left"
),
],
))
learnMore = html.Div(children=[
dbc.Button("Learn more about soil health, and how you can help.", id="learn-more-button", n_clicks=0, color="link", size="md", class_name="btn btn-link"),
dbc.Modal(children=[
dbc.ModalHeader(dbc.ModalTitle("Take Your Curiosity a Step Further.")),
dbc.ModalBody(children=['Copy these suggested key terms by clicking the paper icon beside them or by selecting and copying them directly from within the text area below, and then paste them into your preferred search engine. There are many excellent resources to learn more on your journey as a soil stakeholder.',
html.Br(),
html.Br(),
dcc.Textarea(
id="search_terms_textarea_id",
value='"soil health" OR "soil carbon" OR "soil organic carbon" OR "regenerative agriculture" OR "regenerative grazing"',
style={"heaight": '100%',
"width": 300,
"overflow": "auto"},
),
dcc.Clipboard(
target_id="search_terms_textarea_id",
title="copy",
style={
"display": "inline-block",
"fontSize": 20,
"color": '#483628',
"verticalAlign": "top"
}
)
]
),
dbc.ModalFooter(
dbc.Button(
"Close", id="learn-more-close", className="ms-auto", n_clicks=0
)
),
],
id="modal",
size="lg",
is_open=False,
centered=True,
style={"color": '#483628'}
)
])
whyCarbon = dbc.Card(
html.Div(children=[
html.H5("Carbon has a superpower.",
style={'text-align': 'left'}
),
html.P("Often called the element or giver of life, carbon is critical to life supporting processes because it can bond to many other elements essentially as a building block of large and complex compounds that make up living things––including soil, and the plants and animals in the food chain. Soil organic carbon is left in the soil by the processes collectively called the Carbon Cycle, which includes both the growth and death of plants, animals, and other organisms.",
style={'text-align': 'left'}
),
html.P("Soil organic carbon (SOC) indicates soil's ability to hold water and nutrients that sustain plants in natural and farming settings. As an indicator of soil's overall organic matter, it also builds soil structure that reduces erosion leading to improved water quality and greater resilience from storms.",
style={'text-align': 'left'}
),
html.P("Including its mineral inorganic carbon parts, our soil holds the largest amount of carbon in Earth's ecosystem, and its release––through mismanagement from a lack of knowledge and the removal of forests and wetlands––is a great risk to increasing carbon dioxide in the atmosphere and speeding up climate change.",
style={'text-align': 'left'}
),
html.P("Whether your food comes from across the globe or your own garden, you have an opportunity to restore and ensure soil health to fill bellies all over the world with nutritious foods for years to come. By learning more, you can have an impact on soil health, and together we may even save the world one plate at a time.",
style={'text-align': 'left'}
)
]),
body=True,
color="light",
class_name="card bg-light mb-3"
)
dropdownReporterCountry = dbc.CardBody(
html.Div(children=[
# add a brief instructive subheading as a label
dbc.Label('Choose a trade partner.', style={'text-align': 'left'}
),
# add a dropdown for audience member using app to select a reporter country (their partner who exports the food they've chosen to their country)
dcc.Dropdown(id='reporter_country_dropdown',
options=[{'label': country, 'value': country}
# series values needed to be sorted first before taking unique to prevent errors
for country in dfsoil['Reporter_Country_name'].sort_values().unique()],
placeholder='Trade Partner',
searchable=True,
clearable=True, # shows an 'X' option to clear selection once selection is made
persistence=True, # True is required to use a persistence_type
persistence_type='session', # remembers dropdown value selection until browser tab is closed (saves after refresh)
multi=False, # do not allow multiple country selections (default); doing so would require more code development in callback function
style={"width": "75%"}
)
])
)
controls = html.Div(children=[
dbc.CardGroup([dropdownReporterCountry, tooltip], class_name="card border-primary bg-light mb-2")
]
)
mapExplorer = dbc.Card([
html.Div(children=[
html.P('Explore how much of the soil where your food comes from is made up of organic carbon.',
className="lead"
),
html.Div(controls),
# # map format without spinner for reference
# html.Div(id='map-socd',
# ),
# add a loading spinner to the map
dbc.Spinner(id='map-socd', size="lg", color="primary", type="border", fullscreen=False
),
]),
html.Br(),
html.Div(children=[
html.P("Dots on the map vary in size by the location's soil organic carbon density (SOCD), which can be understood as how much of the soil is made up of organic carbon, from the ground surface down to 4.5 centimeters deep. These density estimates are by global leading scientists from the available worldwide soil data––collected and mathematically modelled––and are expressed in metric tonnes per hectare (t ha-1), which are equal to about 1,000 kilograms or aproximately 2,205 pounds.",
style={'text-align': 'left'}),
html.P("Read more about carbon's importance in soil below.",
style={'text-align': 'left'}),
html.P(children=[
"Data source: Shangguan, W., Dai, Y., Duan, Q., Liu, B. and Yuan, H., 2014. A Global Soil Data Set for Earth System Modeling. Journal of Advances in Modeling Earth Systems, ",
html.A("6: 249-263.",
href='https://agupubs.onlinelibrary.wiley.com/doi/full/10.1002/2013MS000293',
target='_blank' # opens link in new tab or window
)
],
style={'text-align': 'left'}),
]),
# html.Br()
], body=True)
# --------------------------SOIL BAR graph--------------------------
# take the mean SOCD by grouping soil dataframe by Country and append the mean as a column
dfsoil['SOCDcountryMean'] = dfsoil['Reporter_Country_SOCD_depth4_5'].groupby(dfsoil['Reporter_Country_name']).transform('mean')
# drop the raw SOCD values from the subset of soil data; used in density ranges bar chart
dfsoilMeans = dfsoil.drop_duplicates(subset=['Reporter_Country_name', 'Reporter_Country_continent', 'SOCDcountryMean', 'Reporter_Country_pop_est']).drop(['Reporter_Country_SOCD_depth4_5'], axis=1).sort_values(by=['SOCDcountryMean', 'Reporter_Country_continent', 'Reporter_Country_name'], ascending=(False, True, True))
dfsoilMeansMaxOrder = ['Africa', 'Oceania', 'South America', 'Asia', 'North America', 'Europe']
# make numbers into a more human readable format, e.g., transform 12345591313 to '12.3 billion' for hover info
dfsoilMeans['humanPop'] = dfsoilMeans['Reporter_Country_pop_est'].apply(lambda x: humanize.intword(x))
# make a bar chart showing range of mean by countries, overlay countries within continent group to retain mean y axis levels
rangeSOCDfig = px.bar(dfsoilMeans, x='Reporter_Country_continent', y='SOCDcountryMean', color='SOCDcountryMean', barmode='overlay',
# set bolded title in hover text, and make a list of columns to customize how they appear in hover text
custom_data=['Reporter_Country_name',
'Reporter_Country_continent',
'SOCDcountryMean',
'humanPop'
],
color_continuous_scale=px.colors.sequential.speed, # alternately use turbid for more muted yellows to browns (speed for yellow to green to black scale)
# a better label that will display over color legend
labels={'SOCDcountryMean': 'Avg.<br>SOCD'},
# lower opacity to help see variations of color between countries as means change
opacity=0.20
)
# sort bars by mean SOCD, and suppress redundant axis titles, instead of xaxis={'categoryorder': 'mean ascending'} I pre-sorted the dataframe above, but still force sort here by explicit names
rangeSOCDfig.update_layout(xaxis={'categoryorder': 'array', 'categoryarray': dfsoilMeansMaxOrder},
xaxis_title=None, yaxis_title=None, # removed xaxis_tickangle=-45, # used to angle longer/more xaxis labels
paper_bgcolor='#e8ece8', # next tint variation up from a low tint of #dadeda
plot_bgcolor='#f7f5fc', # violet tone of medium purple to help greens pop forward
yaxis={'gridcolor': '#e8ece8'}, # match grid lines shown to background to appear as showing through
font={'color': '#483628'}) # a dark shade of orange that appears dark brown
rangeSOCDfig.update_traces(
hovertemplate="<br>".join([
"<b>%{customdata[0]} </b><br>", # bolded hover title included, since the separate hover_name is superseced by hovertemplae
"%{customdata[1]}", # Continent value with no label
"Average SOCD: %{customdata[2]:.1f} t ha<sup>−1</sup>", # with html <sup> superscript tag in abbr. metric tonnes per hectare (t ha-1) t ha<sup>−1</sup> formatted to 2 decimals
"Estimated Population (2019): %{customdata[3]} people" # in humanized format
])
)
densityRanges = dbc.Card([
html.Div(children=[
html.H5("Range of Average Soil Organic Carbon Density (SOCD) Worldwide"
),
dcc.Graph(figure=rangeSOCDfig,
id="SOCD-bar-chart",
config={'displayModeBar': True, 'scrollZoom': True}
)
]),
html.Br(),
html.Div(children=[
html.P("Bars show the range of soil organic carbon density on land as a mean average within each country in metric tonnes per hectare (t ha-1), which are equal to about 1,000 kilograms or aproximately 2,205 pounds. Hover over any bar to view details for specific countries.",
style={'text-align': 'left'}),
html.P(children=[
"Data source: Shangguan, W., Dai, Y., Duan, Q., Liu, B. and Yuan, H., 2014. A Global Soil Data Set for Earth System Modeling. Journal of Advances in Modeling Earth Systems, ",
html.A("6: 249-263.",
href='https://agupubs.onlinelibrary.wiley.com/doi/full/10.1002/2013MS000293',
target='_blank' # opens link in new tab or window
)
],
style={'text-align': 'left'}),
]),
html.Br()
], body=True)
# --------------------------FOOD TRADE graph--------------------------
# take the sum total of exported tonnes by grouping food dataframe by Partner (importing) Country and append the sum as a column
dffood['Export_Quantity_Sum'] = dffood['Export_Quantity_2019_Value_tonnes'].groupby(dffood['Partner_Country_name']).transform('sum')
# take the distinct count of exported items by grouping food dataframe by Reporter (exporting) Country and append the count as a column
dffood['Export_Items_Count'] = dffood['Item'].groupby(dffood['Partner_Country_name']).transform('nunique')
# make numbers into a more human readable format, e.g., transform 12345591313 to '12.3 billion' for hover info
dffood['tradeVolume'] = dffood['Export_Quantity_Sum'].apply(lambda x: humanize.intword(x))
# food data scatterplot points
RiskFoodsFig = px.scatter(dffood, x='Export_Items_Count', y='Export_Quantity_Sum', size='Export_Quantity_Sum',
custom_data=['Partner_Country_name', # 'Reporter_Country_name_x',
'Export_Quantity_Sum',
'Export_Items_Count'
]
)
# sort bars by mean SOCD, and suppress redundant axis titles, instead of xaxis={'categoryorder': 'mean ascending'} I pre-sorted the dataframe above, but still force sort here by explicit names
RiskFoodsFig.update_layout(
xaxis_title='Diversity of Foods Imported (How many unique items?)', # Exported (How many unique items?)',
# move yaxis text to title area for readability; add empty line above it so it appears below the plotly toolbar options
title={
'text': 'Volume as Total Quantity of Foods Imported (tonnes)',
'xref': 'container',
},
yaxis_title='', # moved to title attribute for readability
paper_bgcolor='#e8ece8', # next tint variation up from a low tint of #dadeda
plot_bgcolor='#f7f5fc', # violet tone of medium purple to help greens pop forward
yaxis={'gridcolor': '#e8ece8'}, # match grid lines shown to background to appear as showing through
font={'color': '#483628'}) # a dark shade of orange that appears dark brown
RiskFoodsFig.update_traces(
# hard code single point color
marker=dict(
color='#a99e54',
sizemin=10
),
# set bolded title in hover text, and make a list of columns to customize how they appear in hover text
hovertemplate="<br>".join([
"<b>%{customdata[0]} </b><br>", # bolded hover title included, since the separate hover_name is superseced by hovertemplae
"Trade Volume: %{customdata[1]:,} tonnes imported", # %{customdata[2]:,} tonnes exported", # note html tags can be used in string; comma sep formatted; note with tradeVolume use format .1f to 1 decimals
"Trade Diversity: %{customdata[2]:} unique food products imported" # %{customdata[3]:} unique food products exported",
])
)
riskFoods = dbc.Card([
html.Div(children=[
html.H5("Food Security Risk Analysis by Volume & Diversity of Food Trade Reliance"
),
dcc.Graph(figure=RiskFoodsFig,
id="food-quadrant-chart",
config={'displayModeBar': True, 'scrollZoom': True}
)
]),
html.Br(),
html.Div(children=[
html.P("Points show where each country falls in relation to these two major trade metrics as indicators of risk for a country's ability to feed its population. Countries in the upper right corner can generally be understood to be most at risk if food trade lines are affected by decreased production.",
style={'text-align': 'left'}),
html.P("All food products traded between countries are included in the total summary of items imported, in 2019, as measured in metric tonnes (vertical axis showing range with M representing millions of tonnes). While soil organic carbon content is a major factor determining agricultural productivity, those levels are not directly shown in this graph and there are many factors that can lead to trade volatility", # The major grid lines dividing the four sections are set at the median, in other words the middle, of that range of global values as a benchmark to divide high or low in population and trade dependency, in relation to other countries.",
style={'text-align': 'left'}),
html.P(children=["Food and Agriculture Organization of the United Nations. (2020). FAOSTAT Detailed trade matrix: All Data Normalized. ",
html.A('https://www.fao.org/faostat/en/#data/TM',
href='https://www.fao.org/faostat/en/#data/TM',
target="_blank" # opens link in new tab or window
)
],
style={'text-align': 'left'}
)
]),
html.Br()
], body=True)
tab1 = dbc.Tab([densityRanges], label="Density Ranges")
tab2 = dbc.Tab([riskFoods], label="At Risk Foods")
tab3 = dbc.Tab([whyCarbon], label="Why Carbon?")
tabs = dbc.Tabs(children=[tab1, tab2, tab3])
# create the app's layout with the named variables
app.layout = dbc.Container(
[
dbc.Row(
[
dbc.Col(navbar,
width=12)
]
),
dbc.Row(
[
dbc.Col(appSubheading,
width={"size": "auto", "offset": 0},
md={"size": "auto", "offset": 1},
xxl={"size": "auto", "offset": 2}
),
],
justify="left",
style={"padding-top": 95, "padding-bottom": 0}
),
dbc.Row(
[
dbc.Col(mapExplorer,
width={"size": 11, "offset": 0}
)
],
justify="center",
style={"padding-top": 10, "padding-bottom": 25}
),
dbc.Row(
[
dbc.Col(learnMore,
width={'size': 9, 'offset': 2}, md={'size': 5, 'offset': 6}
)
],
style={"padding-top": 10, "padding-bottom": 10}
),
dbc.Row(
[
dbc.Col(html.Br(),
width=12
)
]
),
dbc.Row(
[
dbc.Col(
dbc.Container(
tabs),
width={"size": 11, "offset": 0}
)
],
justify="center",
),
dbc.Row(
html.Div(children=[
html.Br(),
html.Br(),
html.Footer(children=[
html.A(u"\u00A9"+" Kathryn Hurchla 2021",
href="http://kathrynhurchla.com",
target="_blank",
style={'width': '100%', 'display': 'flex', 'align-items': 'center', 'justify-content': 'center'}
),
], className="text-muted",
),
],
),
),
],
fluid=True,
className="dbc"
)
# ----------------------------------------------------------------------------------------
# callback decorators and functions
# connecting the Dropdown values to the graph
# simple selection on country directly
@app.callback(
Output('map-socd', 'children'),
[Input('reporter_country_dropdown', 'value')]
)
def update_selected_reporter_country(selected_reporter_country):
# always make a copy of any dataframe to use in the function
# define the subset of data that matches the selected values from both dropdown(s)
dfsoil_sub = dfsoil
# filter dataframe with geo points for single selection multi=False (default)
dfsoil_sub1 = dfsoil_sub[(dfsoil_sub['Reporter_Country_name'] == selected_reporter_country)]
# create figure variables for the graph object
locations = [go.Scattermapbox(
name='SOCD at Surface Depth to 4.5cm',
lon=dfsoil_sub1['Reporter_Country_lon'],
lat=dfsoil_sub1['Reporter_Country_lat'],
mode='markers',
marker=go.scattermapbox.Marker(
size=dfsoil_sub['Reporter_Country_SOCD_depth4_5'],
# add a sequential color scale based on shades of fuschia #ff00ff
# bright hues range for contrast to map background layer
# to more easily differentiate each separate point on map
color=dfsoil_sub['Reporter_Country_SOCD_depth4_5'],
colorscale='Agsunset_r',
# show a colorbar for this colorscale range
showscale=True,
colorbar=dict(title="SOCD"
),
opacity=0.8, # float or integer range between 0 and 1
),
hovertemplate="Longitude: %{lon}<br>" + "Latitude: %{lat}<br><extra></extra>" # hide secondary tag with empty extra tag
)
]
# add a mapbox image layer below the data
layout = go.Layout(
# commented out uirevision to allow map to reset zoom level to default when selection is changed
# uirevision='foo', # to preserve state of figure/map after callback activated
# match background behind color legend to the page area graph sit on
paper_bgcolor='#e4ebf5', # Morph theme card background color,
font=dict(color='#483628'), # a dark shade of orange that appears dark brown
clickmode='event+select',
hovermode='closest',
hoverdistance=2,
mapbox=dict(
accesstoken=mapbox_access_token,
style='white-bg'
),
autosize=True,
margin=dict(l=0, r=0, t=35, b=0),
mapbox_layers=[
{
'below': 'traces',
'sourcetype': 'raster',
'source': [
"https://basemap.nationalmap.gov/arcgis/rest/services/USGSImageryOnly/MapServer/tile/{z}/{y}/{x}"
]
}
]
)
# Return figure
return dcc.Graph(config={'displayModeBar': True, 'scrollZoom': True},
figure={
'data': locations,
'layout': layout
})
# connect theLearn More button and modal with user interactions
@app.callback(
Output("modal", "is_open"),
[Input("learn-more-button", "n_clicks"), Input("learn-more-close", "n_clicks")],
[State("modal", "is_open")],
)
def toggle_modal(n1, n2, is_open):
if n1 or n2:
return not is_open
return is_open
# ----------------------------------------------------------------------------------------
# run the app
if __name__ == '__main__':
app.run_server(debug=True) # if inside Jupyter Notebook, add use_reloader=False inside parens to turn off reloader
| khurchla/sustain-our-soil-for-our-food-prod | app.py | app.py | py | 29,394 | python | en | code | 1 | github-code | 36 |
23426656669 | listA = []
listB = []
for a in range(3,18,2):
listA.append(a)
for b in range(2,17,2):
listB.append(b)
for x in listA:
for y in listB:
print(x,y) | NaifAlqahtani/100_DaysOfCode | 100 days of python/Day32-33.py | Day32-33.py | py | 178 | python | en | code | 0 | github-code | 36 |
7003913528 | from tkinter import *
import tkinter.messagebox as msg
def order():
msg.showinfo("Order Received!", f"We have received your order for {var.get()}. Thanks for ordering")
top = Tk()
top.geometry('400x200')
top.title('Tkinter - Radio Button')
Label(top, text = "What would you like to have sir?",font="lucida 19 bold",
justify=LEFT, padx=14).pack()
# var = IntVar()
var = StringVar()
var.set("Radio")
# var.set(1)
radio = Radiobutton(top, text='Dosa',variable=var, value='Dosa').pack(anchor="w")
radio = Radiobutton(top, text='Idly',variable=var, value='Idly').pack(anchor="w")
radio = Radiobutton(top, text='Somosa',variable=var, value='Somosa').pack(anchor="w")
radio = Radiobutton(top, text='paratha',variable=var, value='paratha').pack(anchor="w")
Button(top, text="Order Now", command=order).pack()
top.mainloop()
| salmansaifi04/python | chapter18(tkinter)/17_radio_button.py | 17_radio_button.py | py | 837 | python | en | code | 0 | github-code | 36 |
21119755777 | from typing import Counter, List
class Solution:
def mergeSimilarItems(self, items1: List[List[int]], items2: List[List[int]]) -> List[List[int]]:
map = Counter()
for a, b in items1:
map[a] += b
for a, b in items2:
map[a] += b
return sorted([a, b] for a, b in map.items())
if __name__ == '__main__':
items1 = [[1,1],[4,5],[3,8]]
items2 = [[3,1],[1,5]]
items1 = [[1,1],[3,2],[2,3]]
items2 = [[2,1],[3,2],[1,3]]
items1 = [[1,3],[2,2]]
items2 = [[7,1],[2,2],[1,4]]
rtn = Solution().mergeSimilarItems(items1, items2)
print(rtn)
| plattanus/leetcodeDAY | python/2363. 合并相似的物品.py | 2363. 合并相似的物品.py | py | 625 | python | en | code | 0 | github-code | 36 |
31598739479 |
# Uses the same dynamics as the 6Dof (just a reduced state) and different model to compute Force and Torque because there are only 3 blades
# Receive user input for the objective position and attitude (later on will be received from a subscriber to pose of aruco pkg)
# Computes the necessary rotations per second on each of ACROBAT's blades to reach that position and attitude
# Based on 2 papers:
# - "A multi-objective optimization approach to the design of a free-flyer space robot for in-orbit manufacturing and assembly" by Vale, Rocha, Leite and Ventura
# - "Towards an autonomous free-flying robot fleet for intra-vehicular trasnportation of loads in unmanned space stations" by Ventura, Roque and Ekal
#!python2
import numpy as np # Import Numpy library
import math
import sys
def get_rotation_matrix_from_euler_angles(euler_angles):
x = euler_angles[0]
y = euler_angles[1]
z = euler_angles[2]
return np.array([
[np.cos(y)*np.cos(x) , -np.cos(z)*np.sin(x)+np.sin(z)*np.cos(x)*np.sin(y) , np.sin(z)*np.sin(x)+np.cos(z)*np.sin(y)*np.cos(x)], #TODO(): Double check if it's correct
[np.cos(y)*np.sin(x) , np.cos(z)*np.cos(x)+np.sin(z)*np.sin(y)*np.sin(x) , -np.sin(z)*np.cos(x)+np.cos(z)*np.sin(y)*np.sin(x)],
[-np.sin(y) , np.sin(z)*np.cos(y) , np.cos(z)*np.cos(y)]
])
# Calculates rotation matrix to euler angles
def get_euler_anles_from_rotation_matrix(R) :
sy = math.sqrt(R[0,0] * R[0,0] + R[1,0] * R[1,0])
singular = sy < 1e-6
if not singular :
x = math.atan2(R[2,1] , R[2,2])
y = math.atan2(-R[2,0], sy)
z = math.atan2(R[1,0], R[0,0])
else :
x = math.atan2(-R[1,2], R[1,1])
y = math.atan2(-R[2,0], sy)
z = 0
return np.array([x, y, z])
FREE_FLYER_MASS = 0.340 # Kg, Random testing value... Still have to search for the exact mass of the ACROBAT
FREE_FLYER_MOMENT_OF_INERTIA = np.array((0.1348056, 0.1902704, 0.1435024)) # Kg.m^2 ... Still have to search for exact moment of inertia vector of the ACROBAT
FREE_FLYER_BLADE_MAX_RPS = 568 # ACROBAT propellers rotations per second
# The aruco library estimates the position of the tag relative to the camera, where the tag's coordinate system has z pointing towards us and Y up, X right
# Meaning we want the robot to be 30cm in front of the tag == 30cm in the Z axis of tag
DESIRED_POSITION = np.array((0, 0, 0.2)) # To be 10cm in front of the AR tag
DESIRED_LINEAR_VELOCITY = np.array((0, 0, 0)) # We want the robot to be stopped in the end
DESIRED_ATTITUDE = np.array((0, 0, 0)) # To be aligned with the AR tag TODO: This might have be confirmed... don't known if this will make the front of the robot face backwards
DESIRED_ROTATION_MATRIX = get_rotation_matrix_from_euler_angles(DESIRED_ATTITUDE)
DESIRED_ANGULAR_VELOCITY = np.array((0, 0, 0)) # We want the robot to be stopped in the end
DESIRED_ANGULAR_ACCELERATION = np.array((0, 0, 0))
# Controller Gains TODO: Need to be calibrated
K_x = 4 # Controller Proportional Gain (Translational part)
K_v = 0.1 # Controller Derivative Gain (Translational part)
K_r = 2 # Controller Porportional Gains (Rotational part)
K_w = 0.1 # Controller Derivative Gain (Rotational part)
# Check with the real ACROBAT what are the blade indexes that exist
# a1 = np.array((-0.02219657522, 0.01859006027, 0.01279597757, -0.01859006027, -0.03499255279, 0.01859006027)).T
# a2 = np.array((0.00023789747, 0.01859006027, 0.01279597757, 0.0191789862, 0.0004043464, -0.00083633887)).T
# a3 = np.array((0.01079543949, 0.9998789634, 0.01079611706, 0.9979681394, -0.00933602561, -0.1332524502)).T
# a4 = np.array((0.7653778177, 0.01913843278, 0.9996875163, -0.03321824755, -0.0134563338, 0.02569007069)).T
# a5 = np.array((-0.001067244345, 0.01896885746, -0.05617408802, 0.05638539532, -0.00126732433, -0.00128318081)).T
# a6 = np.array((.9982558165, 0.05796384873, 0.9983184715, -0.01714869459, 0.07147917464, 0.01299743049)).T
#Considering 2 degrees of freedom x, y and rotation z
a1 = np.array((0.2588, -0.9659, 0.8528812)).T
a2 = np.array((-0.9659, 0.2588, -0.85289255)).T
a3 = np.array((0.7071, 0.7071, -0.85290402)).T
A = np.column_stack((a1, a2, a3))
A_inverse = np.linalg.inv(A)
def compute_force_and_torque(current_position, current_attitude):
# ************* Testing values, will be erased later ************* Should be received from IMU (?)
current_linear_velocity = np.array((0, 0, 0))
current_angular_velocity = np.array((0, 0, 0))
# ****************************************************************
attitude_rotation_matrix = get_rotation_matrix_from_euler_angles(current_attitude)
# Translational Part
error_x = current_position - DESIRED_POSITION
error_v = current_linear_velocity - DESIRED_LINEAR_VELOCITY # current_velocity has to be somehow received by the ACROBAT sensors (subscribe to topic)
acceleration = -K_x * error_x - K_v * error_v # K_x and K_v are the proportionate and derivative gains (constants) and error_x and error_v the position and velocity errors
force = np.dot( (FREE_FLYER_MASS * attitude_rotation_matrix), acceleration)
# Rotational Part
inverse_of_S_w = get_inverse_S_w( (np.dot(DESIRED_ROTATION_MATRIX.T, attitude_rotation_matrix) - np.dot(attitude_rotation_matrix.T, DESIRED_ROTATION_MATRIX)) )
error_r = ( 1 / (2*np.sqrt(1 + np.trace( np.dot(DESIRED_ROTATION_MATRIX.T, attitude_rotation_matrix ))) )) * inverse_of_S_w
error_w = current_angular_velocity - np.dot(np.dot( attitude_rotation_matrix.T, DESIRED_ROTATION_MATRIX), DESIRED_ANGULAR_VELOCITY)
S_w_matrix = get_S_w( np.dot( np.dot(attitude_rotation_matrix.T, DESIRED_ROTATION_MATRIX), DESIRED_ANGULAR_VELOCITY ) )
torque = -K_r * error_r - K_w * error_w + np.dot(np.dot(np.dot(np.dot(S_w_matrix, FREE_FLYER_MOMENT_OF_INERTIA), attitude_rotation_matrix.T), DESIRED_ROTATION_MATRIX), DESIRED_ANGULAR_VELOCITY) + np.dot(np.dot(np.dot(FREE_FLYER_MOMENT_OF_INERTIA, attitude_rotation_matrix.T), DESIRED_ROTATION_MATRIX), DESIRED_ANGULAR_ACCELERATION)
force = np.array((force[0], force[2]))
torque = np.array((torque[1]))
return force, torque
# Matrix operations that recovers angular velocity vector from a skew-symmetrix matrix (Check paper)
# S(w) = [0, -w_z, w_y; w_z, 0, -w_x; -w_y, w_x, 0]
def get_inverse_S_w(matrix):
angular_velocity = (matrix[2][1], matrix[0][2], matrix[1][0])
return np.array(angular_velocity)
def get_S_w(vect):
s_matrix = np.array([[0, -vect[2], vect[1]], [vect[2], 0, -vect[0]], [-vect[1], vect[0], 0]])
return s_matrix
# Converts from force and torque to pwm signals to each of the propellers
def compute_pwm_control(force, torque):
input_vect = force
input_vect = np.append(input_vect, torque)
q = np.dot(A_inverse, input_vect)
rpm = forces_to_rpm(q)
q = map_rpm_to_pulsewidth(rpm)
return np.array(q)
# The ACROBAT papers says that F_max = 2 and M_Max = 2
# Pulse = 0 ==> OFF; Pulse = 1000 ==> Safe anti-clockwise
# Pulse = 1500 ==> Centre; Pulse = 2000 ==> Safe clockwise
def map_rpm_to_pulsewidth(rpm_vector):
rpm_vector = rpm_vector
difference = 1000.0
for idx in range(len(rpm_vector)):
rpm_vector[idx] = 1500 + rpm_vector[idx] * difference / 2.0
return rpm_vector
def forces_to_rpm(forces_vector):
rpm_vector = []
for force in forces_vector:
if force < 0:
rpm_vector.append(0.0 - math.sqrt(-force))
else:
rpm_vector.append(math.sqrt(force))
return rpm_vector | Guilherme-Viegas/PositionAndAttitudeEstimation3DoF_Free_Flyer | controller.py | controller.py | py | 7,527 | python | en | code | 1 | github-code | 36 |
12780766468 | def is_divisible(n, d):
while n >= d:
n = n - d
return n == 0
def is_premier(n):
for d in range(2, n):
if is_divisible(n, d):
return False
return True
def affiche_nombres_premiers_jusqua(n):
for i in range(2, n+1):
if is_premier(i):
print(i)
print(is_divisible(10, 3))
print(is_divisible(16, 2))
print(is_premier(45))
print(is_premier(31))
print(is_premier(91))
affiche_nombres_premiers_jusqua(100000) | janoscoder/experiments | incubator/nombres_premiers.py | nombres_premiers.py | py | 474 | python | en | code | 0 | github-code | 36 |
412173435 | import nltk
def init_wfst(tokens, grammar):
"""Updates diagonal elements of chart
Arguments:
---------
tokens (list):
List of words in input sentence
grammar (list):
List of production rules in the grammar
"""
num_tokens = len(tokens)
wfst = [[None for i in range(num_tokens+1)] for j in range(num_tokens+1)]
for i in range(num_tokens):
productions = grammar.productions(rhs=tokens[i])
wfst[i][i+1] = [production.lhs() for production in productions]
return wfst
def complete_wfst(wfst, tokens, grammar, trace=False):
"""Updates non-diagonal elements of chart
Arguments:
---------
wfst
tokens (list):
List of words in input sentence
grammar (list):
List of production rules in the grammar
"""
index = dict((p.rhs(), p.lhs()) for p in grammar.productions())
num_tokens = len(tokens)
for span in range(2, num_tokens+1):
for start in range(num_tokens+1-span):
end = start + span
temp = []
for mid in range(start+1, end):
nt1s, nt2s = wfst[start][mid], wfst[mid][end]
for nt1 in nt1s:
for nt2 in nt2s:
if nt1 and nt2 and (nt1, nt2) in index:
temp.append(index[(nt1, nt2)])
wfst[start][end] = list(set(temp))
return wfst
def display(wfst, tokens):
"""Updates non-diagonal elements of chart
Arguments:
---------
wfst
tokens (list):
List of words in input sentence
"""
print('\nWFST ' + ' '.join(("%-4d" % i) for i in range(1, len(wfst))))
for i in range(len(wfst)-1):
print("%d " % i, end=" ")
for j in range(1, len(wfst)):
print("%-4s" % (wfst[i][j] or '.'), end=" ")
print()
# MAIN FUNCTION
groucho_grammar1 = nltk.CFG.fromstring("""
S -> NP VP
PP -> P NP
NP -> Det N | Det N PP | 'I'
VP -> V NP | VP PP
Det -> 'an' | 'my'
N -> 'elephant' | 'pajamas'
V -> 'shot'
P -> 'in'
""")
groucho_grammar2 = nltk.CFG.fromstring("""
S -> NP VP
PP -> P NP
NP -> Det N | Det X | 'I'
X -> N PP
VP -> V NP | VP PP
Det -> 'an' | 'my'
N -> 'elephant' | 'pajamas'
V -> 'shot'
P -> 'in'
""")
tokens = "I shot an elephant in my pajamas".split()
initial_wfst = init_wfst(tokens, groucho_grammar2)
print('Displaying Initial Chart Parser Table for Groucho Grammar...')
display(initial_wfst, tokens)
final_wfst = complete_wfst(initial_wfst, tokens, groucho_grammar2)
print('Displaying Complete Chart Parser Table for Groucho Grammar...')
display(final_wfst, tokens)
| aashishyadavally/MS_AI_Coursework | CS6900/Assignment06/homework6_1.py | homework6_1.py | py | 2,755 | python | en | code | 0 | github-code | 36 |
4399694937 | #!/usr/bin/env python
# coding: utf-8
from codecs import open # to use a consistent encoding
from os import path
from subprocess import check_output
from setuptools import setup, find_packages
def get_version():
cmd = "git describe"
try:
result = check_output(
cmd.split(),
).decode('utf-8').strip()
except:
result = "?"
return result
def get_long_description():
here = path.abspath(path.dirname(__file__))
with open(path.join(here, "README.md"), encoding="utf-8") as f:
long_description = f.read()
return long_description
setup(
name="swmclient",
version=get_version(),
description="Python bindings for swm-core user REST API",
long_description=get_long_description(),
long_description_content_type="text/markdown",
url="https://github.com/openworkload/swm-python-client",
author="Taras Shapovalov",
author_email="taras@iclouds.net",
packages=find_packages(),
license="BSD",
include_package_data=True,
install_requires=["httpx"],
python_requires=">=3.9, <4",
platforms="Linux, Mac OS X, Windows",
keywords=[
"HPC",
"High Performance Computing",
"Cloud Computing",
"Open Workload",
"Sky Port"
],
classifiers=[
"Development Status :: 3 - Alpha",
"Programming Language :: Python :: 3",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3 :: Only",
],
project_urls={
"Bug Reports": "https://github.com/openworkload/swm-python-client/issues",
"Source": "https://github.com/openworkload/swm-python-client",
},
)
| openworkload/swm-python-client | setup.py | setup.py | py | 1,828 | python | en | code | 1 | github-code | 36 |
72905658983 | from .utils import *
@pytest.fixture
def client():
app.config.from_object(TestingConfig)
db.create_all()
yield app.test_client()
db.session.remove()
db.drop_all()
class TestMain:
def test_home_page_shows(self, client):
response = client.get('/')
assert response.status_code == 200
assert b'Welcome to Habit Tracker' in response.data
def test_404_error_renders_proper(self, client):
response = client.get('/not_exists', follow_redirects=True)
assert response.status_code == 404
assert b'What you were looking for is just not here.' in response.data
def test_home_page_if_user_logged_off(self, client):
response = client.get('/', follow_redirects=True)
assert b'Login' in response.data
assert b'Signup' in response.data
def test_navbar_if_user_logged_off(self, client):
response = client.get('/', follow_redirects=True)
assert b'Home' in response.data
def test_home_page_changes_if_user_logged(self, client):
register(client, 'John', 'johndoe@wp.pl', 'mysecret', 'mysecret')
login(client, 'John', 'mysecret')
response = client.get('/', follow_redirects=True)
assert b'Login' not in response.data
assert b'Signup' not in response.data
assert b'Logout' in response.data
def test_navbar_changes_if_user_logged(self, client):
register(client, 'John', 'johndoe@wp.pl', 'mysecret', 'mysecret')
login(client, 'John', 'mysecret')
response = client.get('/', follow_redirects=True)
assert b'Habits' in response.data
| KiTroNik/HabitTracker | tests/test_main.py | test_main.py | py | 1,625 | python | en | code | 0 | github-code | 36 |
30382090301 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 7 13:32:28 2018
@author: jon
"""
#import sys
#from pyuvdata import UVData
from pynfft import NFFT
import numpy as np
import matplotlib.pyplot as plt
from scipy import constants
from mslib import MS_jon
def singleFrequency():
imsize = (256, 256)
cell = np.asarray([0.5, -0.5]) / 3600.0 # #arcseconds. revert v axis because it is empirically right. the axes in the ms are not really standardized
cell = np.radians(cell)
ms = MS_jon()
ms.read_ms("simkat64-default.ms")
# 4 polarizations are XX, XY, YX and YY
#Intensity image should be XX + YY
wavelengths = ms.freq_array[0, 0] / constants.c
uvw_wavelengths = np.dot(ms.uvw_array, np.diag(np.repeat(wavelengths, 3)))
uv = np.multiply(uvw_wavelengths[:,0:2], cell)
plan = NFFT(imsize, uv.shape[0])
plan.x = uv.flatten()
plan.precompute()
plan.f = ms.data_array[:,:,0,0]
dirty = plan.adjoint() / uv.shape[0]
plt.imshow(np.flipud(np.transpose(np.real(dirty))))
def allFrequencies():
imsize = (256, 256)
cell = np.asarray([0.5, -0.5]) / 3600.0 # #arcseconds. revert v axis because it is empirically right. the axes in the ms are not really standardized
cell = np.radians(cell)
ms = MS_jon()
ms.read_ms("simkat64-default.ms")
wavelengths = ms.freq_array[0] / constants.c
offset = ms.uvw_array.shape[0]
start = 0
end = offset
uv = np.zeros((ms.uvw_array.shape[0] * wavelengths.size, 2))
vis = np.zeros(ms.uvw_array.shape[0] * wavelengths.size, dtype=np.complex128)
for i in range(0, wavelengths.size):
uvw_wavelengths = np.dot(ms.uvw_array, np.diag(np.repeat(wavelengths[i], 3)))
#skip w component
uv[start:end] = uvw_wavelengths[:, 0:2]
#add the XX and YY Polarization to get an intensity
vis[start:end] = ms.data_array[:, 0, i, 0] + ms.data_array[:, 0, i, 3]
start += offset
end += offset
uv = np.multiply(uv, cell)
plan = NFFT(imsize, uv.shape[0])
plan.x = uv.flatten()
plan.precompute()
plan.f = vis
dirty = plan.adjoint() / uv.shape[0] / 2
plt.imshow(np.real(dirty))
print(np.max(np.real(dirty)))
return 0
allFrequencies() | lord-blueberry/p8-pipeline | sandbox/img_test/pynfft_test.py | pynfft_test.py | py | 2,295 | python | en | code | 0 | github-code | 36 |
8735286229 | import requests
def linkCheck(linksFound):
goodLinks = []
badLinks = []
for link in linksFound:
res = requests.get(link)
if res.status_code == 200:
print(link + " <<<<<<<<<< 200")
goodLinks.append(link)
else:
badLink = res.status_code
badLinks.append(link)
print(link + " <<<<<<<<<< link broken. Status: " + str(badLink))
return goodLinks, badLinks
| zipinel/Selenium_and_BeautifulSoup | Base/linkChecker.py | linkChecker.py | py | 467 | python | en | code | 0 | github-code | 36 |
35753678574 | import xlsxwriter
workbook = xlsxwriter.Workbook("1.xlsx")
mySheet = workbook.add_worksheet()
mySheet.write("A1", "t_value")
mySheet.write("B1", "y1_value")
mySheet.write("C1", "y2_value")
t = 0
t1 = 0
y2 = 0
t1_value = []
y1_value = []
y2_value = []
while int(t) != 2:
t += 0.1
y1 = (5 * t) + ((2 * t) ** 2)
t = ("{:.2f}".format(t))
t1_value.append(t)
y1_value.append(y1)
t = float(t)
print(y1)
j = 0
for i in range(2, len(t1_value)+2):
mySheet.write(("A"+str(i)), t1_value[j])
j += 1
j = 0
for i in range(2, len(y1_value)+2):
mySheet.write("B"+str(i), y1_value[j])
j += 1
while int(t1) != 2:
t1 += 0.1
y2 = 30 + (10 * t1) - ((5 * t1) ** 2)
t1 = ("{:.2f}".format(t1))
y2_value.append(y2)
t1 = float(t1)
j = 0
for i in range(2, len(y2_value)+2):
mySheet.write(("C"+str(i)), y2_value[j])
j += 1
workbook.close()
| toni7891/magshimimHW_10grade | selfProjects/physics/phisycaProg1.py | phisycaProg1.py | py | 893 | python | en | code | 3 | github-code | 36 |
39498051069 | from __future__ import absolute_import
__author__ = "Angelo Ziletti"
__copyright__ = "Angelo Ziletti"
__maintainer__ = "Angelo Ziletti"
__email__ = "ziletti@fhi-berlin.mpg.de"
__date__ = "14/08/18"
import unittest
from ai4materials.models.clustering import design_matrix_to_clustering
import numpy as np
import sklearn.manifold
np.random.seed(42)
class TestClustering(unittest.TestCase):
def setUp(self):
pass
def test_design_matrix_to_clustering(self):
n_samples = 100
n_dim = 5
design_matrix = np.random.rand(n_samples, n_dim)
# test for pre-selected method without user-defined parameters and no probabilities
labels, labels_prob, clustering = design_matrix_to_clustering(design_matrix, clustering_method='kmeans')
self.assertIsInstance(labels, np.ndarray)
self.assertIs(labels_prob, None)
# test for pre-selected method without user-defined parameters and with probabilities
# use gaussian_mixture model since it returns also probabilities
labels, labels_prob, clustering = design_matrix_to_clustering(design_matrix,
clustering_method='gaussian_mixture')
self.assertIsInstance(labels, np.ndarray)
self.assertGreaterEqual(np.amin(labels_prob), 0.0)
self.assertLessEqual(np.amax(labels_prob), 1.0)
# test for pre-selected method without user-defined parameters
n_clusters = 4
labels, labels_prob, clustering = design_matrix_to_clustering(design_matrix, clustering_method='kmeans',
clustering_params={'n_clusters': n_clusters})
actual_n_clusters = clustering.get_params()['n_clusters']
self.assertEqual(actual_n_clusters, n_clusters)
self.assertIsInstance(labels, np.ndarray)
# test when a clustering object is directly passed
dbscan = sklearn.cluster.DBSCAN(eps=0.5, min_samples=50, leaf_size=10)
clustering_labels, prob_labels, clustering = design_matrix_to_clustering(design_matrix, clustering_class=dbscan)
self.assertIsInstance(clustering_labels, np.ndarray)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestClustering)
unittest.TextTestRunner(verbosity=2).run(suite)
| angeloziletti/ai4materials | tests/test_clustering.py | test_clustering.py | py | 2,367 | python | en | code | 36 | github-code | 36 |
25050897663 | # coding: utf-8
from typing import Any, Dict, List, Optional, Tuple, Union
import matplotlib.pyplot as plt
from matplotlib.axes import Axes
from matplotlib.colors import Colormap
from matplotlib.figure import Figure as mplFigure
from plotly.graph_objects import Trace
from plotly.graph_objs import Figure as plotlyFigure
from plotly.subplots import make_subplots
# <=== Utility functions for Both Plotting modules ===
def get_colorList(
n: int, cmap: Optional[Union[str, Colormap]] = None, style: str = "matplotlib"
) -> List[Tuple[float, float, float, float]]:
"""Get a color List using matplotlib's colormaps. See `Choosing Colormaps in Matplotlib <https://matplotlib.org/stable/tutorials/colors/colormaps.html>` for details.
Args:
n (int) : The number of samples
cmap (Optional[Union[str,Colormap]], optional) : A ``Colormap`` object or a color map name. Defaults to ``None``.
style (str) : How to express colors (Please choose from ``"matplotlib"``, or ``"plotly"``)
Returns:
List[Tuple[float,float,float,float]]: Color List
Examples:
>>> import matplotlib
>>> from matplotlib.cm import _cmap_registry
>>> from teilab.utils import get_colorList
>>> get_colorList(n=3, cmap="bwr")
[(0.6666666666666666, 0.6666666666666666, 1.0, 1.0),
(1.0, 0.6666666666666667, 0.6666666666666667, 1.0),
(1.0, 0.0, 0.0, 1.0)]
>>> get_colorList(n=3, cmap=_cmap_registry["bwr"])
[(0.6666666666666666, 0.6666666666666666, 1.0, 1.0),
(1.0, 0.6666666666666667, 0.6666666666666667, 1.0),
(1.0, 0.0, 0.0, 1.0)]
>>> get_colorList(n=3)
[(0.190631, 0.407061, 0.556089, 1.0),
(0.20803, 0.718701, 0.472873, 1.0),
(0.993248, 0.906157, 0.143936, 1.0)]
>>> matplotlib.rcParams['image.cmap'] = "bwr"
>>> get_colorList(n=3)
[(0.6666666666666666, 0.6666666666666666, 1.0, 1.0),
(1.0, 0.6666666666666667, 0.6666666666666667, 1.0),
(1.0, 0.0, 0.0, 1.0)]
>>> get_colorList(n=3, cmap="bwr", style="plotly")
['rgba(170,170,255,1.0)', 'rgba(255,170,170,1.0)', 'rgba(255,0,0,1.0)']
"""
cmap = plt.get_cmap(name=cmap)
colors = [cmap((i + 1) / n) for i in range(n)]
if style in ["plotly", "rgba"]:
colors = [
f'rgba({",".join([str(int(e*255)) if i<3 else str(e) for i,e in enumerate(color)])})' for color in colors
]
return colors
def subplots_create(
nrows: int = 1,
ncols: int = 1,
sharex: Union[bool, str] = False,
sharey: Union[bool, str] = False,
style: str = "matplotlib",
**kwargs,
) -> Union[Tuple[mplFigure, Axes], plotlyFigure]:
"""Create subplots for each plot style.
Args:
nrows (int, optional) : Number of rows of the subplot grid. Defaults to ``1``.
ncols (int, optional) : Number of columns of the subplot grid. Defaults to ``1``.
sharex (Union[bool,str], optional) : Controls sharing of properties among x-axes. Defaults to ``False``.
sharey (Union[bool,str], optional) : Controls sharing of properties among y-axes. Defaults to ``False``.
style (str, optional) : Plot style. Please choose from ``"matplotlib"``, or ``"plotly"`` . Defaults to ``"matplotlib"``.
Returns:
Union[Tuple[mplFigure,Axes],plotlyFigure]: Subplots to suit each plot style.
Examples:
>>> from teilab.utils import subplots_create
>>> fig,axes = subplots_create(nrows=3, style="matplotlib")
>>> fig.__class__
>>> "<class 'matplotlib.figure.Figure'>"
>>> str(axes[0].__class__)
>>> "<class 'matplotlib.axes._subplots.AxesSubplot'>"
>>> fig = subplots_create(nrows=3, style="plotly")
>>> str(fig.__class__)
>>> "<class 'plotly.graph_objs._figure.Figure'>"
"""
if style == "plotly":
return make_subplots(rows=nrows, cols=ncols, shared_xaxes=sharex, shared_yaxes=sharey, **kwargs)
else:
return plt.subplots(nrows=nrows, ncols=ncols, sharex=sharex, sharey=sharey, **kwargs)
# === Utility functions for Both Plotting modules ===>
# <=== Utility functions for "plotly" ===
def trace_transition(from_fig: plotlyFigure, to_fig: plotlyFigure, row: int = 1, col: int = 1) -> plotlyFigure:
"""Trace ``Figure`` which is created by ``plotly.express``
Args:
from_fig (Figure) : Move the trace that exists in this ``Figure``.
to_fig (Figure) : Move trace to this ``Figure``
row (int, optional) : Row of subplots. Defaults to ``1``.
col (int, optional) : Column of subplots. Defaults to ``1``.
Returns:
Figure: ``to_fig`` with ``from_fig`` 's traces.
"""
def transition(trace: Trace):
"""Move the ``Trace`` from ``from_fig`` to ``to_fig``"""
trace.legendgroup = f"{col}-{row}"
to_fig.add_trace(trace=trace, row=row, col=col)
from_fig.for_each_trace(fn=transition)
return to_fig
# === Utility functions for "plotly" ===>
# <=== Utility functions for "matplotlib" ===
# === Utility functions for "matplotlib" ===>
| iwasakishuto/TeiLab-BasicLaboratoryWork-in-LifeScienceExperiments | teilab/utils/plot_utils.py | plot_utils.py | py | 5,241 | python | en | code | 0 | github-code | 36 |
19027441425 | from django.db import models
class SocialNetwork(models.Model):
"""Social Network model definitions"""
DEFAULT_SOCIALNETWORKS = (
(0, 'FaceBook'),
(1, 'Instagram'),
(2, 'Linkedin'),
(3, 'Twitter'),
(4, 'YouTube'),
)
title = models.CharField(
verbose_name='Rede Social',
max_length=50,
choices=DEFAULT_SOCIALNETWORKS
)
url = models.URLField(
unique=True, null=True, blank=True,
verbose_name='URL do Perfil',
help_text='Link do Perfil na rede social escolhida.',
)
class Meta:
ordering = ['title']
verbose_name = 'Rede Social'
verbose_name_plural = 'Redes Sociais'
def __str__(self):
return self.title
| ag-castro/brazil-ongs-mapping | ressonantes/core/models/social_network.py | social_network.py | py | 762 | python | en | code | 1 | github-code | 36 |
7737385160 | # -*- coding=utf8
import web, random, string
from StringIO import StringIO
from PIL import Image, ImageDraw, ImageFont, ImageFilter
#生成验证码的位数
vcodeLength = 4
#生成验证码图片的尺寸
vcodeSize = (60, 25)
#背景颜色, 默认白色
vcodeBgcolor = (238, 238, 238)
#字体颜色, 蓝色
vcodeFontcolor = (0, 0, 255)
#干扰线, 红色
vcodeLinecolor = (255, 0, 0)
#是否要加干扰线
isDrawLine = True
#加入干扰线的上下限
vcodeLineNumber = (1, 5)
#随机字符串
def gen_text():
source = list(string.letters)
for index in range(0, 10):
source.append(str(index))
validateCode = ''.join(random.sample(source, 4))
web.config._session['validateCode'] = validateCode
return validateCode
#绘制干扰线
def gen_line(draw, width, height):
begin = (random.randint(0, width), random.randint(0, height))
end = (random.randint(0, width), random.randint(0, height))
draw.line([begin, end], fill = vcodeLinecolor)
#生成验证码图片
def gen_code():
width, height = vcodeSize
image = Image.new('RGBA', (width, height), vcodeBgcolor)
font = ImageFont.truetype("arial.ttf", 25)
draw = ImageDraw.Draw(image)
text = gen_text()
font_width, font_height = font.getsize(text)
draw.text(((width - font_width) / vcodeLength, (height - font_height) / vcodeLength), text, font = font, fill = vcodeFontcolor)
if isDrawLine:
gen_line(draw, width, height)
image = image.transform((width + 20, height + 10), Image.AFFINE, (1, -0.3, 0, -0.1, 1, 0), Image.BILINEAR)
#image = image.filter(ImageFilter.EDGE_ENHANCE_MORE)
out = StringIO()
image.save(out, 'png', quality=75)
return out.getvalue()
class index:
def GET(self):
agent = web.ctx.env['HTTP_USER_AGENT'].lower()
if 'android' in agent or 'iphone' in agent:
return web.template.frender('indexMobile.html')()
else:
return web.template.frender('index.html')()
class get_validate_code:
def GET(self):
web.header('Content-Type', 'image/png')
return gen_code()
class check_validate_code:
def GET(self):
if 'validateCode' in web.config._session:
return web.config._session['validateCode'] == web.input().validateCode
else:
return None
| kungfucode-rex/jlgjg-admin | server/web/controller/Index_C.py | Index_C.py | py | 2,315 | python | en | code | 0 | github-code | 36 |
20777673952 | from pandas import read_csv
X = read_csv('./datasets/wine.csv')
blockSize=2000
scanned=0
for i in range(blockSize,len(X.index),blockSize):
dfAux=X[i-blockSize:i]
dfAux.to_csv(index=False,path_or_buf='./datasets/wine'+str(i)+".csv")
scanned+=blockSize
dfAux=X[scanned:]
dfAux.to_csv(index=False,path_or_buf='./datasets/wine'+str(X.shape[0])+".csv")
| ggonzalere19/OptimizacionProyecto | splitter.py | splitter.py | py | 363 | python | en | code | 0 | github-code | 36 |
17225040 | from pwn import *
p = remote('chal.2020.sunshinectf.org', 30002)
#p = process('./chall_02')
e = ELF('./chall_02')
p.readline()
input('...')
p.send('a'*0x12)
print('[INFO] launch /bin/sh 0x%.8x' %(e.symbols['win']))
line = b'a'*0x3e
line += p64(e.symbols['win'])
line += b'\n'
p.send(line)
p.interactive()
'''
[+] Opening connection to chal.2020.sunshinectf.org on port 30002: Done
[*] '/vagrant/junkdrawer/sunshine/speedrun/chall_02'
Arch: i386-32-little
RELRO: Partial RELRO
Stack: No canary found
NX: NX enabled
PIE: No PIE (0x8048000)
...
[INFO] launch /bin/sh 0x080484d6
[*] Switching to interactive mode
$ ls
chall_02
flag.txt
$ cat flag.txt
sun{warmness-on-the-soul-3b6aad1d8bb54732}
'''
| hjlbs/ctf | 2020/sunshine/speedrun/02-exp.py | 02-exp.py | py | 745 | python | en | code | 0 | github-code | 36 |
26745748297 | """
Workhorse file to perform analysis on data taken by Andor Cameras using CSPY
Author : Juan Bohorquez
Created on : 06/04/2021
Last Modified : 06/04/2021
"""
import h5py
import os
import numpy as np
import warnings
from typing import Tuple
from HamamatsuH5 import HMROI
def load_data(
results_file: h5py.File,
roi: HMROI
) -> np.array:
"""
Loads data from an Andor camera into a numpy array
results are indexed as follows
> results = array[iterations,measurements,shots,horizontal_pixels, vertical_pixels]
Args:
results_file: h5file object corresponding to results.hdf5 file
roi: region of interest from which to extract pixel data
Returns:
5D numpy array holding all of the data taken by the hamamatsu during the experiment
indexed [iteration,measurement,shot,horizontal_pixel,vertical_pixel]
"""
num_its = len(results_file['iterations'])
measurements = results_file['settings/experiment/measurementsPerIteration'][()] + 1
shots_per_measurement = 1
andr_pix = np.zeros(
(num_its, measurements, shots_per_measurement, roi.bottom - roi.top, roi.right - roi.left,),
dtype=int
)
for iteration, i_group in results_file['experiments/0/iterations'].items():
# print(f"iteration : {iteration} : {type(iteration)}")
for measurement, m_tup in enumerate(i_group['measurements'].items()):
m_group = m_tup[1]
# print(f"\tmeasurement : {measurement} : {type(measurement)}")
for shot, s_group in m_group['data/Andor_1026/shots'].items():
try:
# print(f"\t\tshot : {shot} : {type(shot)}")
andr_pix[int(iteration), int(measurement), int(shot)] = s_group[()][roi.slice]
except IndexError as e:
warnings.warn(
f"{e}\n iteration : {iteration} measurement : {measurement} shot {shot}"
)
continue
except ValueError as ve:
warnings.warn(
f"{ve}\n iteration : {iteration} measurement : {measurement} shot {shot}"
)
return andr_pix | JuanBohorquez3/Hybrid_H5 | H5_python3/AndorH5.py | AndorH5.py | py | 2,231 | python | en | code | 0 | github-code | 36 |
71712982823 | import requests
from behave import *
from hamcrest import *
@when('Make a get request')
def make_get_request_to_api(context):
context.resp= requests.get("https://reqres.in/api/users?page=2")
assert_that(context.resp.status_code, equal_to(200))
@then('Check if users list is returned')
def check_user_list(context):
assert_that(len(context.resp.json()),greater_than(5))
@then("Check if user's data is correct")
def check_user_list(context):
expected_data={
"id":7
}
actual_data={
"id":context.resp.json().get('data')[0].get("id")
}
assert_that(expected_data,equal_to(actual_data)) | HarshDevSingh/docker_python_bdd | features/steps/rest_api.py | rest_api.py | py | 631 | python | en | code | 0 | github-code | 36 |
70077277545 | from path import Path
import sys, os
def rec(fname):
os.system('alsamixer')
os.system('sox -t alsa default "{fname}"'.format_map(vars()))
print('playback command: ~$ aplay {fname}'.format_map(vars()))
def initialize():
path = os.path.join(os.getcwd(), 'wavs')
os.path.exists(path) or os.mkdir(path)
return path
def normalize(fname):
ext = Path(fname).ext
if not ext:
fname += '.wav'
return fname
def sanity_check():
try:
assert list(os.popen('which sox'))
except AssertionError:
os.system('sudo apt-get update')
os.system('sudo apt-get install sox')
def main():
sanity_check()
try:
fname = os.path.join(initialize(), normalize(sys.argv[1]))
assert not os.path.exists(fname)
except AssertionError:
print('{fname} already exists.'.format_map(vars()))
except IndexError:
print('You must specify an output filename.')
else:
rec(fname)
if __name__ == '__main__':
main()
| chris-hamberg/system_utils | alsa_record.py | alsa_record.py | py | 1,013 | python | en | code | 0 | github-code | 36 |
29144047296 | #!/usr/bin/python3
import numpy as np
from scipy.io import loadmat
from func import displayData, nnCostFunction, sigmoidGradient, randInitializeWeights,\
checkNNGradients, fmin_nn, fmin_nn1, predict
import matplotlib.pyplot as plt
def main():
# Setup the parameters you will use for this exercise
input_layer_size = 400 # mnist dataset 20x20
hidden_layer_size = 25
num_labels = 10
## Part 1: Loading and Visualizing Data
print("Loading and Visualizing Data ...")
dat = loadmat("./ex4data1.mat")
X = dat['X']
y = dat['y']
m = X.shape[0]
# Randomly select 100 data points to display
rand_indices = np.random.permutation(m)
sel = X[rand_indices[:100], :]
displayData(sel)
## Part 2: Loading Parameters
# Load the weights into variables Theta1 and Theta2
dat1 = loadmat("./ex4weights.mat")
Theta1 = dat1["Theta1"]
Theta2 = dat1["Theta2"]
# Unroll parameters
nn_params = np.vstack([Theta1.reshape(-1, 1), Theta2.reshape(-1, 1)])
## Part 3: Compute Cost (Feedforward)
print("\nFeedforward Using Neural Network ...")
# Weight regularization parameter
lmbd = 0
J, _ = nnCostFunction(nn_params, input_layer_size, hidden_layer_size, num_labels, X, y, lmbd)
print("Cost at parameters (loaded from ex4weights): {}\n\
(this value should be about 0.2877629)".format(J))
## Part 4: Implement Regularization
print("\nChecking Cost Function (w/ Regularization) ...")
lmbd = 1
J, _ = nnCostFunction(nn_params, input_layer_size, hidden_layer_size, num_labels, X, y, lmbd)
print("Cost at parameters (loaded from ex4weights): {}\n\
(this value should be about 0.383770)".format(J))
## Part 5: Sigmoid Gradient
print("\nEvaluationg sigmoid gradient...")
g = sigmoidGradient(np.array([-1, -0.5, 0, 0.5, 1]))
print("Sigmoid gradient evaluated at [-1, -0.5, 0, 0.5, 1]:")
print(g)
print("\n")
## Part 6: Initializing Parameters
print("\nInitializing Neural Network Parameters ...")
# initial_Theta1 = randInitializeWeights(input_layer_size, hidden_layer_size)
# initial_Theta2 = randInitializeWeights(hidden_layer_size, num_labels)
# Unroll parameters
# initial_nn_params = np.vstack([initial_Theta1.reshape(-1, 1), initial_Theta2.reshape(-1, 1)])
## Part 7: Implement Backpropagation
print("\nChecking Backpropagation...")
checkNNGradients()
## Part 8: Implement Regularization
print("\nChecking Backpropagation (w/ Regularization) ...")
# Check gradients by running checkNNGradients
lmbd = 3
checkNNGradients(lmbd)
# Also output the costFunction debugging values
debug_J, _ = nnCostFunction(nn_params, input_layer_size, hidden_layer_size, num_labels, X, y, lmbd)
print("\n\nCost at (fixed) debugging parameters (w/ lambda = {}): {}"\
"\n(for lambda = 3, this value should be about 0.576051)\n".format(lmbd, debug_J))
## Part 8: Training NN
print("\nTraining Neural Network...")
lmbd = 1 # TODO optimize() can't not work with regularization now, should be 1 here
nn_params, _ = fmin_nn1(input_layer_size, hidden_layer_size, num_labels, X, y, lmbd)
Theta1 = nn_params[:hidden_layer_size*(input_layer_size+1)].reshape(hidden_layer_size, (input_layer_size+1))
Theta2 = nn_params[hidden_layer_size*(input_layer_size+1):].reshape(num_labels, (hidden_layer_size+1))
## Part 9: Visualize Weights
print("\nVisualizing Neural Network ...")
displayData(Theta1[:, 1:])
## Part 10: Implement Predict
pred = predict(Theta1, Theta2, X)
pred[pred==0] = 10 # label 10 is set to 0 in the nn model
print("\nTraining Set Accuracy: {}".format(np.mean(np.double(pred == y.ravel())) * 100))
plt.show()
if __name__ == "__main__":
main()
| rossihwang/Coursera_ML_homework_with_python | week5/ex4.py | ex4.py | py | 3,839 | python | en | code | 1 | github-code | 36 |
43111990750 | import aws_cdk as cdk
from constructs import Construct
from aws_cdk import (aws_apigateway as apigateway,
aws_lambda as lambda_,
aws_dynamodb)
class TasksService(Construct):
def __init__(self, scope: Construct, id: str):
super().__init__(scope, id)
task_table = aws_dynamodb.Table(
self,
"task_table",
partition_key=aws_dynamodb.Attribute(
name="id",
type=aws_dynamodb.AttributeType.STRING
)
)
task_lambda = lambda_.Function(
self,
"TaskLambda",
runtime=lambda_.Runtime.PYTHON_3_9,
code=lambda_.Code.from_asset("resources"),
handler="tasks.main"
)
task_lambda.add_environment("TABLE_NAME", task_table.table_name)
task_table.grant_read_write_data(task_lambda)
taks_integration = apigateway.LambdaIntegration(task_lambda)
api = apigateway.RestApi(
self, "widgets-api",
rest_api_name="Widget Service",
description="This service serves widgets."
)
api.root.add_method("ANY", taks_integration)
| basv98/api-dynamodb | tasks/tasks_service.py | tasks_service.py | py | 1,208 | python | en | code | 0 | github-code | 36 |
25607794751 | from math import gcd
n,r = map(int,input().split())
p=1
d=1
if n-r<r:
r=n-r
if r!=0:
while r:
p=p*n
d=d*r
gcdval=gcd(p,d)
p=p//gcdval
d=d//gcdval
print(p,d)
n-=1
r-=1
print(p/d)
| Nirmalkumarvs/programs | Math Algorithms/NCR.py | NCR.py | py | 277 | python | en | code | 0 | github-code | 36 |
2893340600 | import re
from RegExp import *
from Detector import *
class CLanguageCS:
# Comments
SINGLE_LINE_COMMENT_PREFIX = "//"
MULTI_LINE_COMMENT_PREFIX = "/*"
MULTI_LINE_COMMENT_SUFFIX = "*/"
# Block
BLOCK_PREFIX = "{"
BLOCK_SUFFIX = "}"
# Tokens
TOKENS = [
';', \
'{', '}', \
'[', ']', \
'(', ')', \
'<', '>', \
'&', '|', \
'+', '-', '*', '/', \
'_', \
]
class CRegExpCS:
# Command
COMMAND_SEPERATOR = ";"
# Token Section
FIELD_DELIMETER = "\."
# Block Section
BLOCK = CRegExp.CURLY_BRACKETS
DO_BLOCK = "do" + CRegExp.SPACES + CRegExp.CURLY_BRACKETS
ENUM_BLOCK = "enum" + CRegExp.SPACES + CRegExp.CURLY_BRACKETS
# Code Lines Section
CODE_LINE = CRegExp.SPACES + CRegExp.CONTENT + COMMAND_SEPERATOR
# Conditions Section
IF_CONDITION = "if" + CRegExp.SPACES + CRegExp.ROUND_BRACKETS
ELSE_CONDITION = "else" + CRegExp.SPACES + CRegExp.CURLY_BRACKETS
ELSE_IF_CONDITION = "else if" + CRegExp.SPACES + CRegExp.CURLY_BRACKETS
# Loops Section
FOR_LOOP = "for" + CRegExp.SPACES + CRegExp.ROUND_BRACKETS
FOREACH_LOOP = "foreach" + CRegExp.SPACES + CRegExp.ROUND_BRACKETS
WHILE_LOOP = "while" + CRegExp.SPACES + CRegExp.ROUND_BRACKETS
# Actions Section
FIELD_ACTION = CRegExp.CONTENT + FIELD_DELIMETER + CRegExp.CONTENT
# Complied Statements Section
COMPILED_BLOCK = re.compile(BLOCK)
COMPILED_IF_CONDITION = re.compile(IF_CONDITION)
COMPILED_ELSE_CONDITION = re.compile(ELSE_CONDITION)
COMPILED_ELIF_CONDITION = re.compile(ELSE_IF_CONDITION)
COMPILED_FOR_LOOP = re.compile(FOR_LOOP)
COMPILED_FOREACH_LOOP = re.compile(FOREACH_LOOP)
COMPILED_WHILE_LOOP = re.compile(WHILE_LOOP)
COMPILED_ENUM_BLOCK = re.compile(ENUM_BLOCK)
COMPILED_DO_BLOCK = re.compile(DO_BLOCK)
COMPILED_STATEMENTS = [
COMPILED_IF_CONDITION, COMPILED_ELSE_CONDITION, COMPILED_ELIF_CONDITION, \
COMPILED_FOR_LOOP, COMPILED_FOREACH_LOOP, COMPILED_WHILE_LOOP, \
COMPILED_DO_BLOCK, COMPILED_ENUM_BLOCK \
]
# Compiled Actions Section
COMPILED_FIELD_ACTION = re.compile(FIELD_ACTION)
COMPILED_ACTIONS = [
COMPILED_FIELD_ACTION
]
class CKeyWordsCS:
KEYWORDS = [
"abstract", "base", "bool", "break", "byte", "case", "catch", "char", "checked", "class", "const", "continue", "decimal", "default", "delegate", "double", "enum", "explicit", "extern", "false", "finally", "float", "goto", "implicit", "int", "interface", "internal", "lock", "long", "namespace", "new", "null", "object", "operator", "override", "params", "private", "protected", "public", "readonly", "ref", "return", "sbyte", "sealed", "short", "sizeof", "stackalloc", "static", "string", "struct", "switch", "this", "throw", "true", "try", "typeof", "uint", "ulong", "unchecked", "unsafe", "ushort", "using", "using static", "virtual", "void", "volatile"
]
FUNCTIONS = [
# Common
"Main(", "ToString(", "Clone(", "ToObject(", "CompateTo(", "Equals(", "GetHashCode(", "GetName(", "GetNames(", "GetType(", "GetTypeCode(", "GetUnderlyingType(", "GetValues(", "HasFlag(", "IsDefined(", "Parse(", \
# Array
"AsReadOnly<", "BinarySearch(", "BinarySearch<", "Clear(", "ConstrainedCopy(", "ConvertAll<TInput,TOutpu", "Copy(", "CopyTo(", "CreateInstance(", "Empty<", "Equals(", "Exists<", "Find<", "FindAll<", "FindIndex<", "FindLast<", "FindLastIndex<", "ForEach<", "GetEnumerator(", "GetHashCode(", "GetLength(", "GetLongLength(", "GetLowerBound(", "GetType(", "GetUpperBound(", "GetValue(", "IndexOf(", "IndexOf<", "Initialize(", "LastIndexOf(", "LastIndexOf<", "MemberwiseClone(", "Resize<", "Reverse(", "SetValue(", "Sort(", "Sort<", "Sort<TKey,TValue>(", "TrueForAll<", \
# Console
"Beep(", "Clear(", "MoveBufferArea(", "OpenStandardError(", "OpenStandardInput(", "OpenStandardOutput(", "Read(", "ReadKey(", "ReadLine(", "ResetColor(", "SetBufferSize(", "SetCursorPosition(", "SetError(", "SetIn(", "SetOut(", "SetWindowPosition(", "SetWindowSize(", "Write(", "WriteLine(", \
# ICollection, IList, IDictionary
"Contains(", "Add(", "Remove(", "Clear(", "IsReadOnly(", "IndexOf(", "Insert(", "RemoveAt(", "GetEnumerator(", "AsParallel(", "Cast<", "OfType<", "AsQueryable(",
]
DATA_STRUCTURES = [
# Data Structures
"ArrayList(", "List<", "LinkedList<", "Dictionary<", "HashSet<", "KeyValuePair<", "Queue<", "SortedDictionary<", "SortedList<", "SoretdSet<", "Stack<", "SynchronizedCollection<", "SynchronizedKeyedCollection<", "SynchronizedReadOnlyCollection<",
]
LIBRARIES = [
"System", "Collections"
]
# Static Analyse
keywords = CKeyWordsCS.KEYWORDS
libraries = CKeyWordsCS.LIBRARIES
functions = CKeyWordsCS.FUNCTIONS
statements = CRegExpCS.COMPILED_STATEMENTS
actions = CRegExpCS.COMPILED_ACTIONS
tokens = TOKENS
data_structures = CKeyWordsCS.DATA_STRUCTURES
commnad_seperator = CRegExpCS.COMMAND_SEPERATOR
case_sensitivity = True
class CDetectorCS(CDetectorBlockCurlyBrackets):
DETECTOR_NAME = "C#"
LANGUAGE = CLanguageCS()
# Ctor
def __init__(self):
super(CDetectorCS, self).__init__(CDetectorCS.LANGUAGE)
| AvivYaniv/FireWall | hw5/proxy/DetectorCS.py | DetectorCS.py | py | 8,586 | python | en | code | 1 | github-code | 36 |
1942576401 | class Solution:
def twoOutOfThree(
self, nums1: List[int], nums2: List[int], nums3: List[int]
) -> List[int]:
s1 = set(nums1)
s2 = set(nums2)
s3 = set(nums3)
result = set()
for n in s1:
if n in s2 or n in s3:
result.add(n)
for n in s2:
if n in s1 or n in s3:
result.add(n)
for n in s3:
if n in s1 or n in s2:
result.add(n)
return list(result)
| hellojukay/leetcode-cn | src/two-out-of-three.py | two-out-of-three.py | py | 507 | python | en | code | 3 | github-code | 36 |
37349410877 | """Test all electron density for right interpretation of coreholes"""
import pytest
from ase.build import molecule
from ase.units import Bohr
from gpaw import GPAW, PoissonSolver
from gpaw.mixer import Mixer
from gpaw.test import gen
@pytest.mark.later
def test_aed_with_corehole_li():
"""Compare number of electrons for different channels with corehole"""
li_setup = gen('Li', name='fch1s', corehole=(1, 0, 1), xcname='PBE')
grf = 1
atoms = molecule('Li2')
atoms.center(vacuum=2.5)
calc = GPAW(xc='PBE',
mixer=Mixer(),
setups={0: li_setup},
charge=-1,
poissonsolver=PoissonSolver('fd'))
atoms.calc = calc
atoms.get_potential_energy()
n_sg = calc.get_all_electron_density(gridrefinement=grf)
ne_sz = calc.density.gd.integrate(
n_sg, global_integral=False) * (Bohr / grf)**3
assert ne_sz == pytest.approx(6.0, abs=1e-5)
atoms.set_initial_magnetic_moments([0.66, .34])
calc = calc.new(spinpol=True)
atoms.calc = calc
atoms.get_potential_energy()
for sz in range(2):
n_sg = calc.get_all_electron_density(spin=sz, gridrefinement=grf)
ne_sz = calc.density.gd.integrate(
n_sg, global_integral=False) * (Bohr / grf)**3
assert ne_sz == pytest.approx(3.0, abs=1e-5)
| f-fathurrahman/ffr-learns-gpaw | my_gpaw/test/corehole/test_li2.py | test_li2.py | py | 1,336 | python | en | code | 0 | github-code | 36 |
37486834753 | from collection import deque
def fill(point, canvas, color):
if x not in canvas:
return
elif y not in canvas[x]:
return
x, y = point
if canvas[y][x] == color:
return
canvas[y][x] = color
fill((x + 1, y), canvas, color)
fill((x - 1, y), canvas, color)
fill((x, y + 1), canvas, color)
fill((x, y - 1), canvas, color)
def fill_bfs(point, canvas, color):
x, y = point
if x not in canvas:
return None
if y not in canvas[x]:
return None
xs = deque()
xs.append((x, y))
while xs:
x, y = xs.popleft()
canvas[y][x] = color
for x2, y2 in [(x - 1, y), (x + 1, y), (x, y - 1), (x, y + 1)]:
if x2 not in canvas:
continue
elif y2 not in canvas[x2]:
continue
if canvas[y2][x2] != color:
xs.append((x2, y2))
return None
| tvl-fyi/depot | users/wpcarro/scratch/facebook/recursion-and-dynamic-programming/paint-fill.py | paint-fill.py | py | 918 | python | en | code | 0 | github-code | 36 |
29572532106 | """ Notes
-Need to have opencv built with gstreamer support
print(cv2.getBuildInformation())
-Set Xavier to max power:
(do do manually or providing sudo password as script arg -p PASSWORD)
sudo nvpmodel -m 0
sudo jetson_clocks
-JTOP - helpful activity monitor
sudo apt-get install python3-pip -y
sudo python3 -m pip install --upgrade pip
sudo pip3 install -U jetson-stats
sudo reboot
-testcard output to MUX - NVENC chip should light up
gst-launch-1.0 videotestsrc ! video/x-raw ! nvvidconv ! nvv4l2h264enc maxperf-enable=1 ! h264parse ! flvmux streamable=true ! queue ! rtmpsink location='rtmp://global-live.mux.com:5222/app/51bc0427-ad29-2909-4979-11ee335d2b53'
-to read
https://github.com/dusty-nv/jetson-inference/blob/master/docs/aux-image.md
https://github.com/Fuell-ai/acm/blob/jetcam_bits/jetcam/functions/nvidia_gpu_buff_share.py
"""
import cv2
import time
import math
import vpi
import numpy as np
from contextlib import contextmanager
import math
from jetson_inference import detectNet
import jetson_utils
import threading
import queue
import copy
import json
from datetime import datetime
import subprocess
import argparse
@contextmanager
def time_it(comment):
tic: float = time.perf_counter()
try:
yield
finally:
toc: float = time.perf_counter()
#if "total" in comment:
print(f"{comment}: {1000*(toc - tic):.3f}ms")
#print(" ")
def gstreamer_out():
# leaky downstream throws away old images - default queue is 5
# sync = false might be useful
# not tested with real cameras
#MUX playback ID https://stream.mux.com/vL9SJU61FSv8sSQR01F6ajKI702WeK2pXRuLVtw25zquo.m3u8
return (
"appsrc ! "
"videoconvert ! "
"video/x-raw, framerate=(fraction)25/1, format=RGBA ! "
"nvvidconv ! "
"nvv4l2h264enc ! "
"h264parse ! "
"flvmux ! "
"queue leaky=downstream ! "
"rtmpsink location=rtmp://global-live.mux.com:5222/app/eb27591f-6aa1-aaf9-8be8-978237205f5a sync=false"
)
def detector_cuda(inbox, outbox, ID):
# this is default ssd - example of how we will load
net = detectNet(model="/home/jetcam/mb1025_voc1501/ssd-mobilenet.onnx",
input_blob="input_0",
output_cvg="scores",
output_bbox="boxes",
threshold=0.1)
# net = detectNet(
# "ssd-mobilenet-v2",
# threshold=0.3)
cuda_buff = None
while True:
if inbox.empty() is False:
# blocking call here to see how long it takes to
# pop image off queue
with time_it(f"{ID}: get object off queue"):
cuda_obj = inbox.get(block=True)
#image type is 'jetson.utils.cudaImage'
if cuda_buff is None:
with time_it(f"{ID} create GPU buffer (once only):"):
cuda_buff = jetson_utils.cudaAllocMapped(
width=cuda_obj.width,
height=cuda_obj.height,
format=cuda_obj.format)
with time_it(f"{ID}::::::::::: total time :::::::"):
# copy image or something goes weird
# allocate this outside of loop
with time_it(f"{ID} copy GPU buffer:"):
jetson_utils.cudaMemcpy(cuda_buff, cuda_obj)
with time_it(f"{ID} detectnet"):
detections = net.Detect(cuda_buff)
with time_it(f"{ID}: feedback dects"):
all_dects = {}
dectdeets = None
# output is <class 'jetson.inference.detectNet.Detection'>
# single object is <detectNet.Detection object>
#{'ClassID': 2, 'Left': 555.9375, 'Top': 181.142578125,
# 'Right': 759.375, 'Bottom': 324.580078125,
# 'Confidence': 0.168701171875, 'index': '21'}
for index, deect in enumerate(detections):
dectdeets = {}
dectdeets["ClassID"] = deect.ClassID
dectdeets["Left"] = deect.Left
dectdeets["Top"] = deect.Top
dectdeets["Right"] = deect.Right
dectdeets["Bottom"] = deect.Bottom
dectdeets["Confidence"] = deect.Confidence
dectdeets["index"] = str(index)
all_dects[index]=copy.deepcopy(dectdeets)
output = json.dumps(all_dects)
if outbox.empty():
outbox.put(output)
else:
print(f"{ID}: Waiting for image")
time.sleep(0.02)
def main_videocap():
_in_box = queue.Queue(maxsize=3)
_dects_box = queue.Queue(maxsize=3)
workers = []
for id in range (0,1):
workers.append(threading.Thread(
target=detector_cuda,
args=(_in_box, _dects_box, f"IF{id}", )))
workers[-1].start()
input_size = (1920, 1080) #(3840, 2160)
output_size = (1920, 1080)
file_path = "/home/jetcam/tensorrt_hello/jetson-inference/data/images/humans_0.jpg"
img_people = cv2.imread(file_path)
img_people = cv2.resize(img_people, input_size)
file_path_save = file_path.replace(".jpg", "_copy.jpg")
cv2.imwrite(file_path_save, img_people)
# more args etc
# https://github.com/dusty-nv/jetson-inference/blob/master/docs/aux-streaming.md#source-code
# videosource returns its own GPU buffer so don't have to
# define one with cudalloc
input = jetson_utils.videoSource(file_path_save, ["--loop=-1"])
img_people = input.Capture(format='rgb8')
#img_people = resize(img_people, input_size)
# set up parallel process streams
streamLeft = vpi.Stream()
streamRight = vpi.Stream()
# using gstreamer instead of FFMPEG, Nvidia doesn't
# support FFMPEG 100% for hardware dec/enc
#ensure opencv is built with gstreamer support
out_stream = cv2.VideoWriter(
filename=gstreamer_out(),
apiPreference=cv2.CAP_GSTREAMER,
fourcc=0,
fps=25.0,
frameSize=output_size)
# not in loop while pulling from images - disc read time
input_img_1 = input.Capture(format='rgb8')
input_img_2 = input.Capture(format='rgb8')
cnt = 0
while True:
cnt +=1
# time-based moving transform
hom = np.array([
[1, (math.sin(cnt/10)), 0],
[0, 1, 0],
[0, 0, 1]])
print("------")
with time_it("VC: upload to GPU (2)"):
with vpi.Backend.CUDA:
# upload image into GPU
with streamLeft:
frame1 = vpi.asimage(input_img_1)#.convert(vpi.Format.RGB8)
with streamRight:
frame2 = vpi.asimage(input_img_2)#.convert(vpi.Format.RGB8)
with time_it("VC: perp processing & sync (2)"):
with vpi.Backend.CUDA:
# VIC processor can be used here - need to convert
# image to correct format (NVIDIA VPI doc page)
# but not much performance gain
# if we run out of GPU it will be useful
# https://docs.nvidia.com/vpi/algo_persp_warp.html#algo_persp_warp_perf
with streamLeft:
frame1 = frame1.perspwarp(hom)
with streamRight:
frame2 = frame2.perspwarp(hom)
# wait for GPU streams to finish their tasks
streamLeft.sync()
streamRight.sync()
result_dict = None
if _dects_box.empty() is False:
with time_it("VC: get detections off queue"):
try:
result_dict = _dects_box.get(block=False)
result_dict = json.loads(result_dict)
except queue.Empty:
pass
with time_it("VC: output GPU to CPU (1)"):
# lock GPU memory to pull out buffer
# here it is assumed the payload is
# 1080p
with frame1.rlock_cpu() as data:
img_copy = data.copy()
if _in_box.empty() :
with time_it("VC: put image on queue (2)"):
_in_box.put(input_img_1)
_in_box.put(input_img_2)
#time.sleep(1000)
with time_it("VC: draw on rectangles"):
ts = str(datetime.now().strftime("%H:%M:%S"))
cv2.putText(
img_copy,
ts,
(80, 80),
cv2.FONT_HERSHEY_SIMPLEX,
fontScale=3,
color=(255, 0, 0),
thickness=4)
if result_dict is not None:
print(f"detections found?{len(result_dict.values())}")
for dect in result_dict.values():
print(dect)
cv2.rectangle(
img_copy,
(int(dect["Left"]),int(dect["Top"])),
(int(dect["Right"]),int(dect["Bottom"])),
(255, 0, 0),
3)
with time_it("VC: output to mux"):
#print(img_copy.shape)
out_stream.write(img_copy)
def xavier_power_settings(sudo_pass):
# obviously not secure - for quick and dirty testing
sudo_password = sudo_pass
commands = ['sudo nvpmodel -m 8', 'sudo jetson_clocks']
check_pwr_mode = 'sudo nvpmodel -q'
for command in commands:
command = command.split()
print("command" , command)
cmd1 = subprocess.Popen(['echo', sudo_password], stdout=subprocess.PIPE)
cmd2 = subprocess.Popen(['sudo', '-S'] + command, stdin=cmd1.stdout, stdout=subprocess.PIPE)
print(cmd2.stdout.read().decode())
time.sleep(2)
print("checking power mode")
cmd1 = subprocess.Popen(['echo', sudo_password], stdout=subprocess.PIPE)
cmd2 = subprocess.Popen(['sudo', '-S'] + check_pwr_mode.split(), stdin=cmd1.stdout, stdout=subprocess.PIPE)
capture = (cmd2.stdout.read().decode())
print(capture)
#if 'MODE_15W_2CORE' not in capture:
# raise Exception("XAVIER not in max power mode - try again with correct sudo pass")
if '20W' not in capture:
raise Exception("XAVIER not in max power mode - try again with correct sudo pass")
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="",
formatter_class=argparse.RawTextHelpFormatter,
)
parser.add_argument(
'-sudopassword',
help='sudo password to enable power settings',
required=True)
args = parser.parse_args()
xavier_power_settings(sudo_pass=args.sudopassword)
main_videocap()
| LiellPlane/DJI_UE4_poc | Source/lumotag/mobilenet_inference_tidied.py | mobilenet_inference_tidied.py | py | 10,889 | python | en | code | 0 | github-code | 36 |
6811791128 | #!/usr/bin/env python3
from random import random
from z3 import *
import numpy as np
import time
from math import *
from statistics import *
from random_lib import *
from matplotlib import pyplot as plt
from matplotlib.patches import Rectangle
from collections import defaultdict
import heapq
import faulthandler
faulthandler.enable()
# bad code goes here
import sys
sys.setrecursionlimit(10**6)
vis_clusters = []
unexp_clusters = []
class Vis_Cluster:
def __init__(self,m,n):
# Get the dimensions of the grid
self.rows = m
self.cols = n
self.visited_map = np.zeros((m,n), dtype=bool)
global vis_clusters
vis_clusters = []
self.cell2cluster = dict()
self.vis_cells_per_cluster = []
def traverse(self,r, c):
# Check if the current cell is out of bounds or has already been visited
if r < 0 or r >= self.rows or c < 0 or c >= self.cols or self.visited_map[r][c]:
return
# Check if the current cell is a 0
if map[r][c] != 0.5:
return
# Mark the current cell as visited
self.visited_map[r][c] = True
self.component.append((c,r))
(x,y) = (c,r)
key = str(x)+'+'+str(y)
self.cell2cluster[key] = len(vis_clusters)
# Recursively traverse the neighbors of the current cell
self.traverse(r + 1, c) # right
self.traverse(r - 1, c) # left
self.traverse(r, c + 1) # down
self.traverse(r, c - 1) # up
def make_clusters(self):
for (x,y) in visible_cells:
(r,c) = (y,x)
# Skip cells that have already been visited
if self.visited_map[r][c]:
continue
# Initialize a new connected component as a list of coordinates
self.component = []
# Traverse the connected component and add the coordinates of each cell to the list
self.traverse(r, c)
# Add the connected component to the list of components
vis_clusters.append(np.array(self.component))
self.vis_cells_per_cluster.append(len(self.component))
return np.array(self.vis_cells_per_cluster),self.cell2cluster
class Unexp_Cluster:
def __init__(self,m,n):
# Get the dimensions of the grid
self.rows = m
self.cols = n
global unexp_clusters
unexp_clusters = []
self.visited = np.zeros((m,n), dtype=bool)
self.cell2cluster = dict()
self.cells_per_cluster = []
def bfs(self,r, c):
component = []
count = 0
# Create a queue to store the cells to visit
component.append((c,r))
queue = [(r,c)]
while queue:
r, c = queue.pop(0)
(x,y) = (c,r)
if not self.visited[r][c]:
count+=1
key = str(x)+'+'+str(y)
self.cell2cluster[key] = len(unexp_clusters)
self.visited[r][c] = True
# Check the four neighboring cells
for r_, c_ in [(r-1, c), (r+1, c), (r, c-1), (r, c+1)]:
if 0 <= r_ < len(map) and 0 <= c_ < len(map[0]) and map[r_][c_] == 0.0:
queue.append((r_, c_))
return component,count
def make_clusters(self):
for r in range(self.rows):
for c in range(self.cols):
# Skip cells that have already been visited
if map[r][c] == 0.0 and not self.visited[r][c]:
# Traverse the connected component and add the coordinates of each cell to the list
component,count = self.bfs(r, c)
# Add the connected component to the list of components
unexp_clusters.append(np.array(component))
self.cells_per_cluster.append(count)
return np.array(self.cells_per_cluster),self.cell2cluster
class SMT:
def __init__(self,R,T,Dx,Dy,vis_dist,n_neighbors):
self.s = Optimize()
self.R = R
self.T = T
self.Dx = Dx
self.Dy = Dy
self.m = len(map[0])
self.n = len(map)
self.vis_dist = vis_dist
self.n_neighbors = n_neighbors
self.W_Cost = 1000 # 100
self.W_Reward = 1000 # 10
self.WR_Cov = max(Dx,Dy)*(10+0.1) # 300
self.WR_Vis = max(Dx,Dy)*(10+0.1) # 300
self.reachable_cells = [[] for i in range(R)]
self.X = [[Int("x%s_%s" % (i, j)) for j in range(T)] for i in range(R)]
self.Y = [[Int("y%s_%s" % (i, j)) for j in range(T)] for i in range(R)]
self.C = [[Real("c_%s_%s" % (i, j)) for j in range(T-1)] for i in range(R)]
self.Re = [[Real("re_%s_%s" % (i, j)) for j in range(T-1)] for i in range(R)]
self.total_cost = Real('total_cost')
self.total_reward = Real('total_reward')
self.Obj_func = Real('Obj_func')
def new_assign_Bots2Clusters(self,bot_loc):
nc = len(vis_clusters)
nr = self.R
self.clusters_assigned = [[] for c in range(nc)]
self.near_b2c = [[] for c in range(nc)]
self.far_b2c = [[] for c in range(nc)]
self.bots_assigned = [[] for r in range(nr)]
self.D = np.zeros((nc,nr))
self.D_min_idx = np.zeros((nc,nr))
count_clusters_unassigned = nc
count_bots_unassigned = nr
for r in range(nr):
c_near = []
(rx,ry) = bot_loc[r]
for c in range(nc):
dist = abs(rx-vis_clusters[c][:,0]) + abs(ry-vis_clusters[c][:,1])
idxs = dist.argsort()[:self.n_neighbors]
d_min = dist[idxs[0]]
#d_avg = np.average(dist[idxs])
self.D_min_idx[c][r] = idxs[0]
self.D[c][r] = d_min
if d_min==1:
c_near.append(c)
if len(c_near)>0:
c_near_cells = np.array(self.vis_cells_per_cluster[c_near])
idxs = c_near_cells.argsort()
c_near_min = c_near[idxs[0]]
c_near_max = c_near[idxs[len(idxs)-1]]
c = 0
if len(self.clusters_assigned[c_near_min]) == 0:
c = c_near_min
else:
c = c_near_max
self.clusters_assigned[c].append(r)
self.near_b2c[c].append(r)
self.bots_assigned[r].append(c)
count_bots_unassigned += -1
count_clusters_unassigned += -1
Idx_sorted =self.D.argsort(axis=None)
# Assigning island of visible cells which have zero assigned robots
for idx in Idx_sorted:
c = idx // nr
r = idx % nr
if len(self.clusters_assigned[c]) == 0 and len(self.bots_assigned[r]) == 0 and self.inbetween_vis(r,c)==0:
self.clusters_assigned[c].append(r)
self.far_b2c[c].append(r)
self.bots_assigned[r].append(c)
count_bots_unassigned += -1
count_clusters_unassigned += -1
self.bots_per_cluster = np.array([ len(bots) for bots in self.clusters_assigned])
self.cells_per_cluster = self.vis_cells_per_cluster + self.unexp_cells_per_cluster
cells_per_bot = np.array([self.cells_per_cluster[c]/ self.bots_per_cluster[c] if self.bots_per_cluster[c]>0 else 0 for c in range(nc)])
bots_unassigned = []
for r in range(self.R) :
if len(self.bots_assigned[r]) == 0:
bots_unassigned.append(r)
while not(count_bots_unassigned == 0):
c = np.argmax(cells_per_bot)
dist = self.D[c][bots_unassigned]
idxs = dist.argsort()
assigned = False
for i in idxs :
r = bots_unassigned[i]
if (self.inbetween_vis(r,c)==0 and self.cells_2_cover(c,r)>0 ):
bots_unassigned.pop(i)
self.clusters_assigned[c].append(r)
self.bots_assigned[r].append(c)
self.bots_per_cluster[c]+=1
cells_per_bot[c] = self.cells_per_cluster[c]/self.bots_per_cluster[c]
count_bots_unassigned += -1
assigned = True
break
if(assigned == False):
cells_per_bot[c] = 0
cls_canot_be_assigned_count = 0
for c in range(nc):
if cells_per_bot[c] == 0:
cls_canot_be_assigned_count+=1
if cls_canot_be_assigned_count == nc:
break
#print(cls_canot_be_assigned)
self.bots_per_cluster = np.array([ len(bots) for bots in self.clusters_assigned])
self.cells_per_cluster = self.vis_cells_per_cluster + self.unexp_cells_per_cluster
cells_per_bot = np.array([self.cells_per_cluster[c]/ self.bots_per_cluster[c] if self.bots_per_cluster[c]>0 else 0 for c in range(nc)])
bots_unassigned = []
for r in range(self.R) :
if len(self.bots_assigned[r]) == 0:
bots_unassigned.append(r)
while (count_bots_unassigned != 0):
c = np.argmax(cells_per_bot)
dist = self.D[c][bots_unassigned]
idxs = dist.argsort()
assigned = False
for i in idxs :
r = bots_unassigned[i]
if (self.inbetween_vis(r,c)==0 ):
bots_unassigned.pop(i)
self.clusters_assigned[c].append(r)
self.bots_assigned[r].append(c)
self.bots_per_cluster[c]+=1
cells_per_bot[c] = self.cells_per_cluster[c]/self.bots_per_cluster[c]
count_bots_unassigned += -1
assigned = True
break
if(assigned == False):
cells_per_bot[c] = 0
def cells_2_cover(self,c,r):
near_bots = len(self.near_b2c[c])
cells_2_cover = self.cells_per_cluster[c]
new_far_b2c = self.far_b2c[c] + [r]
dist = self.D[c][new_far_b2c]
idxs = np.argsort(dist)
for i in idxs :
if(i==0):
r1 = new_far_b2c[i]
cells_2_cover -= (self.D[c][r1]-1)*(near_bots)
else:
r1 = new_far_b2c[i-1]
r2 = new_far_b2c[i]
cells_2_cover -= (self.D[c][r2]-self.D[c][r1])*(near_bots+i)
return cells_2_cover
def new2_assign_Bots2Clusters(self,bot_loc):
nc = len(vis_clusters)
nr = self.R
self.clusters_assigned = [[] for c in range(nc)]
self.near_b2c = [[] for c in range(nc)]
self.far_b2c = [[] for c in range(nc)]
self.bots_assigned = [[] for r in range(nr)]
D = np.zeros((nc,nr))
self.D_min_idx = np.zeros((nc,nr))
count_clusters_unassigned = nc
count_bots_unassigned = nr
for r in range(nr):
c_near = []
(rx,ry) = bot_loc[r]
for c in range(nc):
dist = abs(rx-vis_clusters[c][:,0]) + abs(ry-vis_clusters[c][:,1])
idxs = dist.argsort()[:self.n_neighbors]
d_min = dist[idxs[0]]
#d_avg = np.average(dist[idxs])
self.D_min_idx[c][r] = idxs[0]
self.D[c][r] = d_min
if d_min==1:
c_near.append(c)
if len(c_near)>0:
c_near_cells = np.array(self.vis_cells_per_cluster[c_near])
idxs = c_near_cells.argsort()
c_near_min = c_near[idxs[0]]
c_near_max = c_near[idxs[len(idxs)-1]]
c = 0
if len(self.clusters_assigned[c_near_min]) == 0:
c = c_near_min
else:
c = c_near_max
self.clusters_assigned[c].append(r)
self.near_b2c[c].append(r)
self.bots_assigned[r].append(c)
count_bots_unassigned += -1
count_clusters_unassigned += -1
Idx_sorted = D.argsort(axis=None)
# Assigning island of visible cells which have zero assigned robots
for idx in Idx_sorted:
c = idx // nr
r = idx % nr
if len(self.clusters_assigned[c]) == 0 and len(self.bots_assigned[r]) == 0 and self.inbetween_vis(r,c)==0:
self.clusters_assigned[c].append(r)
self.far_b2c[c].append(r)
self.bots_assigned[r].append(c)
count_bots_unassigned += -1
count_clusters_unassigned += -1
# If any bots left , then assign according to the algo
cells_per_cluster = self.vis_cells_per_cluster+self.unexp_cells_per_cluster
bots_unassigned = []
for r in range(nr) :
if len(self.bots_assigned[r]) == 0:
bots_unassigned.append(r)
D_ = np.zeros((nc,nr))
for c in range(nc):
for r in range(nr):
if self.inbetween_vis(r,c)==0:
D_[c][r]= self.D[c][r]-1
else:
D_[c][r] = 1000000
while (count_bots_unassigned != 0):
nr = len(bots_unassigned)
cells_2_cover = np.zeros((nc,nr))
for c in range(nc):
near_bots = len(self.near_b2c[c])
for ri in range(nr):
r = bots_unassigned[ri]
new_far_b2c = self.far_b2c[c] + [r]
dist = D_[c][new_far_b2c]
idx = np.argsort(dist)
cells_2_cover[c][ri] = cells_per_cluster[c]
for i in idx :
if(i==0):
r = new_far_b2c[i]
cells_2_cover[c][ri] -= D_[c][r]*(near_bots)
else:
r1 = new_far_b2c[i-1]
r2 = new_far_b2c[i]
cells_2_cover[c][ri] -= (D_[c][r2]-D_[c][r1])*(near_bots+i)
Idx_sorted_ = cells_2_cover.argsort(axis=None)
idx = len(Idx_sorted_)-1
c = idx // nr
i = idx % nr
r = bots_unassigned[i]
bots_unassigned.pop(i)
self.clusters_assigned[c].append(r)
self.far_b2c[c].append(r)
self.bots_assigned[r].append(c)
count_bots_unassigned += -1
count_clusters_unassigned += -1
def inbetween_vis(self,r,c):
vis_array = copy.copy(visible_cells)
vis_array = np.array(vis_array)
(rx,ry) = self.bot_loc[r]
(vx,vy) = vis_clusters[c][int(self.D_min_idx[c][r])]
dx = rx-vx
dy = ry-vy
dx_array = rx-vis_array[:,0]
dy_array = ry-vis_array[:,1]
filtered_idxs = (dx*dx_array >= 0) & (dy*dy_array >= 0) & (abs(dx_array)+abs(dy_array) < abs(dx)+abs(dy))
if(dx==0):
filtered_idxs = filtered_idxs & (dx_array==0)
elif(dy==0):
filtered_idxs = filtered_idxs & (dy_array==0)
vis_array = vis_array[filtered_idxs]
return len(vis_array)
def shortest_distance(self,r,c):
grid, uav_pos, cluster = map, self.bot_loc[r],vis_clusters[c]
# Create an adjacency list representation of the grid
adj_list = defaultdict(list)
for i in range(len(grid)):
for j in range(len(grid[i])):
if grid[i][j] in cluster:
if i > 0 and grid[i-1][j] != -1:
adj_list[(i, j)].append((i-1, j))
if i < len(grid) - 1 and grid[i+1][j] != -1:
adj_list[(i, j)].append((i+1, j))
if j > 0 and grid[i][j-1] != -1:
adj_list[(i, j)].append((i, j-1))
if j < len(grid[i]) - 1 and grid[i][j+1] != -1:
adj_list[(i, j)].append((i, j+1))
# Initialize the distance and visited arrays
dist = {(i, j): float('inf') for i in range(len(grid)) for j in range(len(grid[i]))}
visited = {(i, j): False for i in range(len(grid)) for j in range(len(grid[i]))}
dist[uav_pos] = 0
# Create a priority queue to store the nodes to visit
pq = []
heapq.heappush(pq, (0, uav_pos))
while pq:
current_dist, current_node = heapq.heappop(pq)
if visited[current_node]:
continue
visited[current_node] = True
# Update the distances of the neighboring nodes
for neighbor in adj_list[current_node]:
if dist[neighbor] > current_dist + 1:
dist[neighbor] = current_dist + 1
heapq.heappush(pq, (dist[neighbor], neighbor))
# Find the minimum distance from the UAV to the cluster
min_dist = float('inf')
for cell in cluster:
if dist[cell] < min_dist:
min_dist = dist[cell]
return min_dist
def unexplored_per_cluster(self):
unexp_cells_per_cluster = [0 for c in range(len(vis_clusters))]
unexp_cluster_per_cluster = [[] for c in range(len(vis_clusters))]
d = 1
for c in range(len(vis_clusters)):
for cell in vis_clusters[c]:
(vx,vy) = cell
xl = int (max (0, vx-d))
xh = int (min (self.Dx, vx+d+1))
yl = int (max (0, vy-d))
yh = int (min (self.Dy, vy+d+1))
for x in range (xl, xh):
for y in range (yl, yh):
if map[y][x] == 0.0:
key = str(x)+'+'+str(y)
unexp_c = self.unexpcell2cluster[key]
if unexp_c not in unexp_cluster_per_cluster[c]:
unexp_cluster_per_cluster.append(unexp_c)
unexp_cells_per_cluster[c] += self.unexp_cells_per_unexpcluster[unexp_c]
return np.array(unexp_cells_per_cluster)
def make_and_assign_clusters(self,bot_loc):
self.bot_loc = bot_loc
self.vis_cells_per_cluster, self.viscell2cluster = Vis_Cluster(self.m,self.n).make_clusters()
self.unexp_cells_per_unexpcluster,self.unexpcell2cluster = Unexp_Cluster(self.m,self.n).make_clusters()
self.unexp_cells_per_cluster = self.unexplored_per_cluster()
self.new_assign_Bots2Clusters(bot_loc)
def init_bot_loc(self,bot_loc):
self.bot_loc = bot_loc
for r in range (self.R):
(x,y) = bot_loc[r]
self.s.add (And (self.X[r][0] == int (x), self.Y[r][0] == int (y))) # Assign the initial x and y coordinates
self.collect_reachables(r,self.vis_dist) # collect other reachable locations available
##########
def collect_reachables(self,r,d):
(rx,ry) = self.bot_loc[r]
xl = int (max (0, rx-d))
xh = int (min (self.Dx, rx+d+1))
yl = int (max (0, ry-d))
yh = int (min (self.Dy, ry+d+1))
for x in range (xl, xh):
for y in range (yl, yh):
# Collect all reachable cells from visible cells according to path length T (reachability)
if ((map[y][x] == 0.5 or map[y][x] == 1.0) and (abs (x - rx) + abs (y - ry) <= d) ) :
self.reachable_cells[r].append((x,y))
def motion_primitive(self):
for r in range (self.R):
for t in range (self.T-1):
self.s.add (And (self.P[r][t] <= 4, self.P[r][t] >= 0)) # Only 5 motion primitives are allowed
self.s.add (Or (self.C[r][t] == 1 , self.C[r][t] == 3 )) # Only 2 cost values are allowed
# For robot r at time t , If we choose an allowed value of P then the corresponding cost and next state allowed is defined
self.s.add(Implies(self.P[r][t] == 0, And(self.X[r][t+1] == self.X[r][t], self.Y[r][t+1] == self.Y[r][t], self.C[r][t] == 3))) # same
self.s.add(Implies(self.P[r][t] == 1, And(self.X[r][t+1] == self.X[r][t]+1,self.Y[r][t+1] == self.Y[r][t], self.C[r][t] == 1))) # right
self.s.add(Implies(self.P[r][t] == 2, And(self.X[r][t+1] == self.X[r][t]-1,self.Y[r][t+1] == self.Y[r][t], self.C[r][t] == 1))) # left
self.s.add(Implies(self.P[r][t] == 3, And(self.X[r][t+1] == self.X[r][t], self.Y[r][t+1] == self.Y[r][t]+1,self.C[r][t] == 1))) # up
self.s.add(Implies(self.P[r][t] == 4, And(self.X[r][t+1] == self.X[r][t], self.Y[r][t+1] == self.Y[r][t]-1,self.C[r][t] == 1))) # down
def action_cost(self,current_loc,next_loc):
same_cell_cost = 3
different_cell_cost = 1
if current_loc == next_loc:
return same_cell_cost
else:
return different_cell_cost
##########
def reward(self,r,current_loc,next_loc):
(nx,ny) = (int(next_loc[0]),int(next_loc[1]))
if(map[ny][nx] == 0.5):
key = str(nx)+'+'+str(ny)
c = self.viscell2cluster[key]
cov = self.surroundings(next_loc,1)
# return (cov + 1/len(vis_clusters[c]))*self.WR_Vis #cell_age[ny][nx]+
if(self.bots_assigned[r][0]==c):
return (cov + 1/len(vis_clusters[c]))*self.WR_Vis #cell_age[ny][nx]+
else:
return -1000
elif(map[ny][nx] == 1.0):
return self.near(r,current_loc, next_loc)*self.WR_Cov
else:
return -1000
def near(self,r,current_loc,next_loc):
(nx,ny) = (next_loc[0],next_loc[1])
(rx,ry) = (current_loc[0],current_loc[1])
# if(len(self.visible_cells)==0):
# return 0
# visible_cells = np.array (self.visible_cells)
if(len(vis_clusters[self.bots_assigned[r][0]])==0):
return 0
np_visible_cells = np.array (vis_clusters[self.bots_assigned[r][0]])
dist = abs (np_visible_cells[:,0] - rx) + abs (np_visible_cells[:,1] - ry)
idxs = dist.argsort ()[:self.n_neighbors]
safe_visible_cells = np_visible_cells[idxs]
k = len(safe_visible_cells)
total_d = 0
for loc in safe_visible_cells:
d = abs (loc[0] - nx) + abs (loc[1] - ny)
total_d += d
return k/total_d
##########
# def near(self,r,current_loc,next_loc):
# (nx,ny) = (next_loc[0],next_loc[1])
# (rx,ry) = (current_loc[0],current_loc[1])
# total_w_d = 0
# key = str(rx)+'+'+str(ry)
# k = len(self.nearest_vis_cells.get(key))
# Total_W = 0
# for loc in self.nearest_vis_cells[key]:
# d = abs (loc[0] - nx) + abs (loc[1] - ny)
# (x,y) = (loc[0],loc[1])
# key = str(x)+'+'+str(y)
# common_ratio = self.R/self.vis_common.get(key)
# w = pow(common_ratio,3)
# Total_W += w
# total_w_d += d*w
# return Total_W/total_w_d
##########
def Visible_cells_common_count(self):
self.vis_common = dict({})
self.nearest_vis_cells = dict({})
for r in range(self.R):
(rx,ry) = self.bot_loc[r]
key = str(rx)+'+'+str(ry)
self.nearest_vis_cells[key] = []
if(len(vis_clusters[self.bots_assigned[r][0]])==0):
return 0
visible_cells = np.array (vis_clusters[self.bots_assigned[r][0]])
dist = abs (visible_cells[:,0] - rx) + abs (visible_cells[:,1] - ry)
idxs = dist.argsort ()[:self.n_neighbors]
self.nearest_vis_cells[key] = visible_cells[idxs]
for cell in self.nearest_vis_cells[key]:
(x,y) = (cell[0],cell[1])
key = str(x)+'+'+str(y)
if self.vis_common.get(key) == None :
self.vis_common[key] = 1
else:
self.vis_common[key] += 1
def collision_avoidance(self):
for t in range(self.T-1):
for r1 in range (self.R):
for r2 in range (r1+1,self.R):
# Both x and y coordinates of r1 & r2 at time t+1 cannot be equal
self.s.add (Not( And(self.X[r1][t+1] == self.X[r2][t+1], self.Y[r1][t+1] == self.Y[r2][t+1])))
# Head on collision or Swaping position collision
self.s.add(Not (And(And(self.X[r1][t+1] == self.X[r2][t],self.Y[r1][t+1] == self.Y[r2][t]),And(self.X[r2][t+1] == self.X[r1][t],self.Y[r2][t+1] == self.Y[r1][t]))))
def d_bots(self,r1,r2):
(x1,y1) = self.bot_loc[r1]
(x2,y2) = self.bot_loc[r2]
return abs(x1-x2)+abs(y1-y2)
def new_collision_avoidance(self):
for t in range(self.T-1):
for r1 in range (self.R):
for r2 in range(r1+1,self.R):
# Both x and y coordinates of r1 & r2 at time t cannot be equal
if(self.d_bots(r1,r2)<=2):
self.s.add (Not( And(self.X[r1][t+1] == self.X[r2][t+1], self.Y[r1][t+1] == self.Y[r2][t+1])))
# Head on collision or Swaping position collision
if(self.d_bots(r1,r2)==1):
self.s.add(Not (And(And(self.X[r1][t+1] == self.X[r2][t],self.Y[r1][t+1] == self.Y[r2][t]),And(self.X[r2][t+1] == self.X[r1][t],self.Y[r2][t+1] == self.Y[r1][t]))))
def obstacle_avoidance(self,obst_loc):
self.obst_loc = obst_loc
for r in range (self.R):
for t in range (1,self.T):
for obst in obst_loc:
# Both the x & y coordinates of r at time t cannot be equal to that of obstacle coordinates
self.s.add (Not( And (self.X[r][t] == obst[0], self.Y[r][t] == obst[1])))
# stay within the grid bounds
self.s.add (And (self.X[r][t] < self.Dx, self.X[r][t] >= 0))
self.s.add (And (self.Y[r][t] < self.Dy, self.Y[r][t] >= 0))
def visit_reachable_cells(self):
#self.Visible_cells_common_count()
for r in range (self.R):
for t in range (self.T-1):
# A robot r at time t must choose a cell from all the reachable cells
self.s.add (Or ([And (self.X[r][t+1] == x, self.Y[r][t+1] == y) for (x,y) in self.reachable_cells[r]]))
curr = self.bot_loc[r]
for next in self.reachable_cells[r]:
cx,cy = curr
nx,ny = next
self.s.add(Implies(And(And (self.X[r][t] == int(cx), self.Y[r][t] == int(cy)),And (self.X[r][t+1] == int(nx), self.Y[r][t+1] == int(ny))),And(self.Re[r][t] == self.reward(r,[cx,cy],[nx,ny]),self.C[r][t] == self.action_cost([cx,cy],[nx,ny]))))
#self.s.add(Implies(Or(Not(And (self.X[r][t] == int(cx), self.Y[r][t] == int(cy))),Not(And (self.X[r][t+1] == int(nx), self.Y[r][t+1] == int(ny)))),self.Re[r][t] == -1000))
def check_smt(self):
TC = []
TR = []
for r in range(self.R):
TC+= self.C[r]
TR+= self.Re[r]
self.total_cost = Sum(TC)
self.total_reward = Sum(TR)
self.s.add(self.Obj_func == self.W_Cost*self.total_cost - self.W_Reward*self.total_reward)
h = self.s.minimize(self.Obj_func)
check = str(self.s.check())
return check
def add_visible_cells(self,loc,d):
(rx,ry) = loc
xl = int (max (0, rx-d))
xh = int (min (self.Dx, rx+d+1))
yl = int (max (0, ry-d))
yh = int (min (self.Dy, ry+d+1))
for x in range (xl, xh):
for y in range (yl, yh): # For another condition to select visible cells
if (map[y][x] == 0.0): # and d < self.vis_dist):
self.new_visible_cells.append((x,y))
map[y][x] = 0.5
def return_all_vars(self):
global visible_cells
#global Unexp_cells
model = self.s.model()
next_start_loc = []
current_traj = []
self.new_visible_cells = []
covered_visible_cells = []
count = 0
for r in range(self.R):
bot_traj = []
for t in range(self.T):
rx = int (str (model[self.X[r][t]]))
ry = int (str (model[self.Y[r][t]]))
if map[ry][rx] == 0.5 :
covered_visible_cells.append((rx,ry))
#cell_age[ry][rx] = 0
count+=1
if(t>0):
bot_traj.append((rx,ry))
self.add_visible_cells([rx,ry],self.vis_dist)
if(t==self.T-1):
next_start_loc.append((rx,ry))
map[ry][rx] = 1.0
current_traj.append(bot_traj)
filtered_cells = []
for cell in visible_cells:
if cell not in covered_visible_cells:
filtered_cells.append(cell)
visible_cells = filtered_cells + self.new_visible_cells
# new_unexp_cells = []
# for cell in Unexp_cells:
# if cell not in self.new_visible_cells:
# new_unexp_cells.append(cell)
# Unexp_cells = new_unexp_cells
return next_start_loc,current_traj,count
def surroundings(self,loc,d=1):
(vx,vy) = (int(loc[0]),int(loc[1]))
cov = 0
vis = 0
n = self.Dx
m = self.Dy
for x in range (vx-d, vx+d+1):
for y in range (vy-d, vy+d+1):
if (x==vx and y==vy):
continue
if(x<0 or y<0 or x>=n or y>=m):
cov+=1
else:
if map[y][x]==1:
cov+=1
return cov
#------------------------------------------------------------------------------------------------------------------------
def Init_visible_cells(init_loc,Dx,Dy,d):
global map
global visible_cells
visible_cells = []
for loc in init_loc:
(rx,ry) = loc
xl = int (max (0, rx-d))
xh = int (min (Dx, rx+d+1))
yl = int (max (0, ry-d))
yh = int (min (Dy, ry+d+1))
for x in range (xl, xh):
for y in range (yl, yh):
# d = ((rx-x)^2+(ry-y)^2)^(0.5) # For another condition to select visible cells
if (map[y][x] == 0.0): # and d < self.vis_dist):
visible_cells.append((x,y))
map[y][x] = 0.5
# def update_age():
# for cell in visible_cells:
# (x,y) = cell
# cell_age[y][x]+=1
def make_map(R,Dx,Dy,init_pos,obst_pos):
global map
map = np.full ((Dy,Dx),0.0)
for pos in init_pos:
(x,y) = pos
map[y][x] = 1.0
for pos in obst_pos:
(x,y) = pos
map[y][x] = -1.0
def main(R,T,Dx,Dy,plots_dir,wp_dir,init_pos,obst_pos,vis = False):
# num_obst = int (Dx*Dy/10)
# obst_pos = [] # random_obst_positions (Dx, Dy, num_obst)
# init_pos,map = random_init_positions (Dx, Dy, R, obst_pos)
make_map(R,Dx,Dy,init_pos,obst_pos)
vis_dist = 1
cells_need_to_be_covered = Dx*Dy-len(obst_pos)
cells_covered = R
n_neighbors = 5
Init_visible_cells(init_pos,Dx,Dy,vis_dist)
#global cell_age
# cell_age = np.full ((Dy,Dx), 0)
# update_age()
files = []
for r in range (R):
filename = 'robot_' + str (r)
filepath = os.path.join(wp_dir, filename)
f = open (filepath, 'w+')
files.append (f)
x,y = init_pos[r]
s = str (y) + " " + str (x) + "\n"
files[r].write (s)
files[r].flush ()
k=0
total_time = 0
while True:
if cells_covered>=cells_need_to_be_covered and len(visible_cells)==0:
break
k+=1
tic = time.time()
smt = SMT(R,T,Dx,Dy,vis_dist,n_neighbors)
smt.make_and_assign_clusters(init_pos)
toc1 = time.time()
smt.init_bot_loc(init_pos)
#smt.motion_primitive()
smt.new_collision_avoidance()
#smt.obstacle_avoidance(obst_pos)
toc2 = time.time()
smt.visit_reachable_cells()
toc3 = time.time()
if(smt.check_smt()=='unsat'):
break
tocF = time.time()
dt1 = round(toc1 - tic,3)
dt2 = round(toc2 - toc1,3)
dt3 = round(toc3 - toc2,3)
dt4 = round(tocF - toc3,3)
dt = round(tocF - tic,3)
total_time+= dt
init_pos,current_traj,count = smt.return_all_vars()
cells_covered+=count
no_visible = len(visible_cells)
no_uncovered_cells = Dx*Dy -cells_covered-no_visible
print("For horizon {} : Total time taken : {} sec , Total cells covered : {} , Visible cells : {} , Uncovered cells : {}\n".format(k,dt,cells_covered,no_visible,no_uncovered_cells))
if dt<60:
print(" Total time taken : {} sec, cluster : {} sec, collision : {} sec, visit_reach : {} sec, SMT_check : {} sec\n".format(dt,dt1,dt2,dt3,dt4))
else:
print(" Total time taken : {} min, cluster : {} min, collision : {} min, visit_reach : {} sec, SMT_check : {} min\n".format(dt/60,dt1/60,dt2/60,dt3/60,dt4/60))
#update_age()
for r in range(R):
for loc in current_traj[r]:
x,y = loc
s = str (y) + " " + str (x) + "\n"
files[r].write (s)
files[r].flush ()
if (cells_covered<cells_need_to_be_covered):
print("SMT not Satisfied")
print("Total no of horizons needed : {} \n".format(k))
if total_time<60:
print("Total Time taken : {} sec\n".format(total_time))
else:
print("Total Time taken : {} min\n".format(total_time/60))
return k,round(total_time,3)
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('-r', dest='num_robots', type=int, help='Number of robots')
parser.add_argument('-d', dest='dimension', type=int, help='Size of workspace')
parser.add_argument('-t', dest='tests', type=int, help='No of tests')
parser.add_argument('-it', default=1, dest='init_test', type=int, help='Initial test location')
parser.add_argument('-v', dest='vis', type=int, help='should visualize or not')
parser.add_argument('-f', dest='filename', type=str, help='Name of the file to save')
args = parser.parse_args()
D = int(args.dimension)
R = int(args.num_robots)
it = int(args.init_test)
filename = str(D)+'x'+str(D)+'_'+str(R)+'bots'
init_loc_file = "INITIAL_LOCATIONS-"+str(1)
if(args.filename==""):
filename = args.filename
if not os.path.isdir (filename):
os.mkdir (filename)
Dx = D
Dy = D
T = 2
do_test = 0
vis = False
if args.vis == 1:
vis = True
if(args.tests):
do_test = args.tests
if (do_test==0):
test_dir = os.path.join(filename,'TEST'+str(it))
if not os.path.isdir (test_dir):
os.mkdir (test_dir)
plots_dir = os.path.join(test_dir, 'plots')
if not os.path.isdir (plots_dir):
os.mkdir (plots_dir)
wp_dir = os.path.join(test_dir, 'WPts')
if not os.path.isdir (wp_dir):
os.mkdir (wp_dir)
path = os.path.join(init_loc_file,str(D)+'x'+str(D)+'_'+str(R),'TEST-'+str(it),'robot_init_locs')
file = open(path,'r')
init_pos = []
obst_pos = []
for r in range(R):
NewLine = file.readline()
y,x = int (NewLine.split(' ')[0]), int (NewLine.split(' ')[1])
init_pos.append((x,y))
k,total_time = main(R,T,Dx,Dy,plots_dir,wp_dir,init_pos,obst_pos,vis)
#######################################
else:
tests = do_test
Avg_k = 0
Avg_time = 0
K = []
Time = []
for i in range(tests):
print("TEST : ",i+1)
test_dir = os.path.join(filename,'TEST'+str(i+1))
if not os.path.isdir (test_dir):
os.mkdir (test_dir)
plots_dir = os.path.join(test_dir, 'plots')
if not os.path.isdir (plots_dir):
os.mkdir (plots_dir)
wp_dir = os.path.join(test_dir, 'WPts')
if not os.path.isdir (wp_dir):
os.mkdir (wp_dir)
path = os.path.join(init_loc_file,str(D)+'x'+str(D)+'_'+str(R),'TEST-'+str(i+1),'robot_init_locs')
file = open(path,'r')
init_pos = []
for r in range(R):
NewLine = file.readline()
y,x = int (NewLine.split(' ')[0]), int (NewLine.split(' ')[1])
init_pos.append((x,y))
k,total_time = main(R,T,Dx,Dy,plots_dir,wp_dir,init_pos,obst_pos=[])
K.append(k)
Time.append(total_time)
for i in range(tests):
if (Time[i]< 60):
print("TEST {} --------------> No of horizons : {} ,Computation time : {} sec\n".format(i+1,K[i],Time[i]))
else:
print("TEST {} --------------> No of horizons : {} ,Computation time : {} min\n".format(i+1,K[i],Time[i]/60))
Avg_k = mean(K)
Avg_time = mean(Time)
sd_k = stdev(K)
sd_time = stdev(Time)
print("For {}x{} grid & {} robots in {} tests ------>>>> Average no of horizons needed : {} , Standard Deviation : {}\n".format(D,D,R,tests,Avg_k,sd_k))
if (Avg_time< 60):
print("For {}x{} grid & {} robots in {} tests ------>>>> Average no of horizons needed : {} sec, Standard Deviation : {} sec\n".format(D,D,R,tests,Avg_time,sd_time))
else:
print("For {}x{} grid & {} robots in {} tests ------>>>> Average Computation time needed : {} min, Standard Deviation : {} sec\n".format(D,D,R,tests,Avg_time/60,sd_time))
| Luckykantnayak/uav-project-2 | lucky_smt_v5.py | lucky_smt_v5.py | py | 38,555 | python | en | code | 0 | github-code | 36 |
19192518817 | # -*- coding: utf-8 -*-
"""
Created on Tue Feb 16 21:51:03 2021
@author: jyotm
"""
import numpy as np
from math import sqrt
import math
import warnings
warnings.filterwarnings("ignore")
test = True
#this is a continuation of programming drill exercises and quantum simulator of 4.2.1
#we are going to implement the list of eigen values of observable,
# the probability that state will transition to each one of the eigenstate
if test:
numStates = 2
else:
#take input from the user, number of discrete values or total states
numStates = int(input("enter the total number of quantum system: "))
if test:
# inputStates = [complex(-1,0), complex(-1,-1)]
inputStates = [complex(1/np.sqrt(2),0), complex(0,1/sqrt(2))]
else:
inputStates = []
for ind in range(numStates):
inputStates = complex(input(f"enter the amplitude associated with basis state x{ind} for start state"))
inputStates = np.array(inputStates).T
print(f"input states are {inputStates}")
#input observable
#create matrix of appropriate size
A = np.zeros((numStates,numStates),dtype = complex)
if test:
# A[0][0] = -1
# A[0][1] = - 1j
# A[1][0] = 1j
# A[1][1] = 1
A[0][0] = 1
A[0][1] = -1j
A[1][0] = 1j
A[1][1] = 2
print(f'The observable matrix is \n {A} \n')
else:
for i in range(numStates):
for j in range(numStates):
A[i][j] = complex(input(f"enter your Ovversation matrix value at {i} {j}"))
#check whether the matrix is hermitian or not
#print(A,'\n',A.conj().T)
assert (A == A.conj().T).all()
#get the eigenvalues of corresponding observable
eigenstates = np.linalg.eigh(A)
eigenValues = eigenstates[0]
eigenVectors = eigenstates[1]
print(f"the eigenvalues of the observable is {eigenValues}")
#now we will find out the probability of collapsing input state into each eigenvectors,
probabilities = np.zeros((numStates))
for i in range(numStates):
#probability of input state collapsing into eigen vector is the square of dot product of both (projection of input state in eigne vector)
eigenVector = np.array(eigenVectors[:,i],dtype = complex)
probabilities[i] = np.square(np.dot(inputStates,eigenVector))
print(f"the probability for each eigenvector measuring is: {probabilities}") | jeromepatel/Quantum-Computing-for-Computer-Scientists | Programming_drill_4_3_1.py | Programming_drill_4_3_1.py | py | 2,316 | python | en | code | 3 | github-code | 36 |
4593625107 | import cv2
import json
import numpy as np
import matplotlib.pyplot as plt
from itertools import count
def put_speed_on_video(mp4_path, pred_text_path, act_text_path):
pred_speed_list = np.around(np.loadtxt(pred_text_path), decimals=1)
act_speed_list = np.around(np.loadtxt(act_text_path), decimals=1)[1:]
video = cv2.VideoCapture(mp4_path)
video.set(1, 1)
font = cv2.FONT_HERSHEY_SIMPLEX
out = cv2.VideoWriter('./docs/demos/demo.mp4', 0x7634706d, 20, (640, 480))
for t in count():
ret, frame = video.read()
if ret == False or t >= len(pred_speed_list):
break
pred_curr_speed = pred_speed_list[t]
act_curr_speed = act_speed_list[t]
cv2.putText(frame,
f'Speed (m/s): {pred_curr_speed}',
(50, 50),
font,
0.7,
(242, 23, 161),
2,
cv2.LINE_4)
cv2.putText(frame,
f'Error: {round(pred_curr_speed - act_curr_speed, 1)}',
(50, 80),
font,
0.7,
(82, 51, 255),
2,
cv2.LINE_4)
out.write(frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
video.release()
out.release()
cv2.destroyAllWindows()
def parse_logs(log_file_path):
train_loss = []
val_loss = []
with open(log_file_path, 'r') as file:
for line in file:
line = line.replace("\'", "\"")
line_dict = json.loads(line)
train_loss.append(line_dict['train_epoch_loss'])
val_loss.append(line_dict['eval_epoch_loss'])
return train_loss, val_loss
def graph_loss(log_file_path_farn, log_file_path_pwc):
farn_train_loss, farn_val_loss = parse_logs(log_file_path_farn)
pwc_train_loss, pwc_val_loss = parse_logs(log_file_path_pwc)
with plt.style.context('seaborn-muted'):
_, ax = plt.subplots(figsize=(20,6))
ax.plot(range(1, len(farn_train_loss)+1), farn_train_loss, alpha=0.7, linewidth=3, label='Farneback Train Loss')
ax.plot(range(1, len(farn_train_loss)+1), farn_val_loss, alpha=0.7, linewidth=3, label='Farneback Eval Loss')
ax.plot(range(1, len(pwc_train_loss)+1), pwc_train_loss, alpha=0.7, linewidth=3, label='PWC Train Loss')
ax.plot(range(1, len(pwc_train_loss)+1), pwc_val_loss, alpha=0.7, linewidth=3, label='PWC Eval Loss')
ax.set_xticks(range(1, len(pwc_train_loss)+1))
ax.set_xlabel('Epochs')
ax.set_ylabel('MSE Loss')
ax.legend()
plt.savefig('./docs/readme_media/loss.png')
if __name__ == '__main__':
put_speed_on_video('./data/train/train.mp4', './docs/demos/pred_test.txt', './data/train/train.txt')
# graph_loss('./training_logs/farneback.log', './training_logs/pwc.log')
| antoninodimaggio/Voof | demo_utils.py | demo_utils.py | py | 2,797 | python | en | code | 65 | github-code | 36 |
74062226345 | import argparse
from fauxcaml import build
def create_parser():
ap = argparse.ArgumentParser(
prog="fauxcamlc",
description="Compiles an OCaml source file to an x86-64 executable.",
epilog="project homepage: https://github.com/eignnx/fauxcaml",
)
ap.add_argument(
"source_file",
metavar="SRC",
type=str,
help="the file to compile",
)
ap.add_argument(
"-o",
dest="exe_file",
metavar="EXE",
type=str,
default=None,
help="the name of the executable to create",
)
return ap
if __name__ == "__main__":
ap = create_parser()
args = ap.parse_args()
build.compile_from_source_file(args.source_file, args.exe_file)
| eignnx/fauxcaml | fauxcaml/__main__.py | __main__.py | py | 757 | python | en | code | 2 | github-code | 36 |
18252831721 | from typing import List
class Solution:
def reconstructQueue(self, people: List[List[int]]) -> List[List[int]]:
res = []
people.sort(key = lambda x : (-x[0], x[1]))
for a in people:
res.insert(a[1], a)
return res
solution = Solution()
people = [[7,0],[4,4],[7,1],[5,0],[6,1],[5,2]]
assert solution.reconstructQueue(people) == [[5,0],[7,0],[5,2],[6,1],[4,4],[7,1]], "Should be [[5,0],[7,0],[5,2],[6,1],[4,4],[7,1]]" | hujienan/Jet-Algorithm | leetcode/406. Queue Reconstruction by Height/index.py | index.py | py | 465 | python | en | code | 0 | github-code | 36 |
26635576564 | angka_angka = [2, 3, 5, 5, 4, 2, 6, 5, 7, 8, 3]
angka_unik = []
for angka in angka_angka:
if angka not in angka_unik:
angka_unik.append(angka)
print(angka_unik)
#unpacking
#list
koordinat = [1, 2, 3]
x, y, z = koordinat
print(z)
#tupple
koordinat = (4, 5, 6)
a, b, c = koordinat
print(a)
#dictionaries
ponsel = input("Masukan nomor ponsel: ")
digits_mapping = {
"1": "satu",
"2": "dua",
"3": "tiga",
"4": "empat",
"5": "lima",
"6": "enam",
"7": "tujuh",
"8": "delapan",
"9": "sembilan",
"0": "nol"
}
output = ""
for ch in ponsel:
output += digits_mapping.get(ch, "!") + " "
print(output)
| Noorwahid717/PythonOop | list.py | list.py | py | 689 | python | jv | code | 0 | github-code | 36 |
12573047520 | # Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def invertTree(self, root):
"""
:type root: TreeNode
:rtype: TreeNode
"""
if root == None:
return None
container = [root]
while container:
node = container.pop(0)
temp = node.right
node.right = node.left
node.left = temp
if node.left:
container.append(node.left)
if node.right:
container.append(node.right)
return root
'''
class Solution(object):
def invertTree(self, root):
"""
:type root: TreeNode
:rtype: TreeNode
"""
if root:
if root.left or root.right:
temp = root.left
root.left = root.right
root.right = temp
self.invertTree(root.left)
self.invertTree(root.right)
return root
else:
return root
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def invertTree(self, root):
"""
:type root: TreeNode
:rtype: TreeNode
"""
if root:
temp = root.left
root.left = self.invertTree(root.right)
root.right = self.invertTree(temp)
return root
'''
| AG-Systems/programming-problems | Leetcode/Invert-Binary-Tree.py | Invert-Binary-Tree.py | py | 1,657 | python | en | code | 10 | github-code | 36 |
14581993222 | # -*- coding: utf-8 -*-
# @Author : DevinYang(pistonyang@gmail.com)
import numpy as np
import random
from torchtoolbox.transform import *
from torchtoolbox.transform.functional import to_tensor
trans = Compose([
# CV2 transforms
Resize(500),
CenterCrop(300),
Pad(4),
RandomCrop(255, 255),
RandomHorizontalFlip(p=1),
RandomVerticalFlip(p=1),
RandomResizedCrop(100),
ColorJitter(0.2, 0.2, 0.2),
RandomRotation(15),
RandomAffine(0),
RandomPerspective(p=1),
RandomGaussianNoise(p=1),
RandomPoissonNoise(p=1),
RandomSPNoise(p=1),
Cutout(p=1),
ToTensor(),
# Tensor transforms
Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),
RandomErasing(p=1),
])
def _genener_fake_img(size=None):
if size is None:
size = (400, 400, 3)
return np.random.randint(0, 255, size=size, dtype='uint8')
def test_transform():
img = _genener_fake_img()
trans(img)
| PistonY/torch-toolbox | tests/test_transform.py | test_transform.py | py | 960 | python | en | code | 409 | github-code | 36 |
17559817941 | #!/usr/bin/env python
# -*- coding:utf-8 -*-
import turtle
def koch(size,n):
if n==0:
turtle.fd(size)
else:
for angle in [0,60,-120,60]:
turtle.left(angle)
koch(size/3,n-1)
def main():
turtle.setup(600,600)
turtle.penup()
turtle.goto(-200,100)
turtle.pendown()
turtle.pensize(2)
turtle.hideturtle()
for i in range(3): #三角形
koch(100,3)
turtle.right(180)
turtle.done()
main() | quanproject/python-engineer | 5.3 科赫雪花.py | 5.3 科赫雪花.py | py | 477 | python | en | code | 0 | github-code | 36 |
40962236742 | #!/usr/bin/env python
# _*_ coding:utf-8 _*_
import json
from flask import Flask,request
from base.base import *
app = Flask(__name__)
@app.before_request
def before_request():
if request.method == 'POST' and request.form.get("name"):
name=request.form.get("name")
if existfile(name):
if ran(5):
return readcache(name)
elif request.args.get("name"):
name = request.args.get("name")
if existfile(name):
if ran(5):#概率5-->5%的概率更新缓存
return readcache(name),{"Content-Type":"application/json","server":"qq","time":"Hello"}
@app.after_request
def after_request(environ):
if True:#文件缓存
data=environ.data.decode('UTF-8')
if request.method == 'POST' and request.form.get("name"):
name=request.form.get("name")
writecache(name,data)
elif request.args.get("name"):
name = request.args.get("name")
writecache(name,data)
return environ
@app.route('/')
def hello_world():
return '调用方式/api?name=视频名称[GET|POST] return JSON'
@app.route('/api',methods=['GET','POST'])
def api():
if request.method=='POST':
name=request.form.get("name")
data=json.dumps(run(name))
return data
else:
name = request.args.get("name")
data=run(name)
jsondata = json.dumps(data)
return jsondata,{"Content-Type":"application/json","server":"qq"}
if __name__ == '__main__':
app.run(host='127.0.0.1',port=8080,threaded=True,debug=True)
#app.run(host='0.0.0.0', port=8081, threaded=True, debug=False) | 1185714392/moviesearch | app.py | app.py | py | 1,710 | python | en | code | 3 | github-code | 36 |
21088767071 | from structures.list import List
__all__ = ['Queue']
class Queue:
def __init__(self):
self.max_size = 100
self._list = List()
def enqueue(self, value):
# add element to end of queue
self._list.append(value)
if len(self) > self.max_size:
raise Exception('Queue overflow!')
def dequeue(self):
# get and remove first element of queue
val = self._list[0]
del self._list[0]
return val
def __len__(self):
return len(self._list)
def __str__(self):
text = '< '
while len(self) > 0:
text += str(self.dequeue())
text += ' '
return text
if __name__ == '__main__':
q = Queue()
q.enqueue(10)
q.enqueue(20)
q.enqueue(40)
q.enqueue(50)
q.dequeue()
q.enqueue(150)
print(q)
| AlekseySh/computer_science | structures/queue.py | queue.py | py | 861 | python | en | code | 0 | github-code | 36 |
12829538864 | import json
class Participant:
# custom classes must be converted to dictionary or list to be serializable
def __init__(
self,
points=0,
total_points=0,
problems_solved=0,
easy=0,
medium=0,
hard=0,
won=0,
first=0,
) -> None:
self.points = points
self.total_points = total_points
self.problems_solved = problems_solved # also days committed
self.easy = easy
self.medium = medium
self.hard = hard
self.won = won
self.first = first
def toJSON(self):
return json.dumps(self, default=lambda x: x.__dict__, sort_keys=True, indent=2)
def to_string(self):
result = "\nTotal Points: " + str(self.total_points)
result += "\nProblems Solved: " + str(self.problems_solved)
result += "\nEasy Problems Solved: " + str(self.easy)
result += "\nMedium Problems Solved: " + str(self.medium)
result += "\nHard Problems Solved: " + str(self.hard)
result += "\nCompetitions Won: " + str(self.won)
result += "\nFirst Submissions: " + str(self.first)
return result
def get_points(self):
return self.points
def clear_points(self):
self.points = 0
def update_win(self):
self.won += 1
def update_stats(self, difficulty: str, points_recieved: int, was_first: bool):
if difficulty == "hard":
self.hard += 1
elif difficulty == "med":
self.medium += 1
elif difficulty == "easy":
self.easy += 1
self.points += points_recieved
self.problems_solved += 1
self.total_points += points_recieved
self.first += 1 if was_first else 0
"""
def test_stats(self, setting: str, point_amount: int):
if (setting == "first"):
self.first += point_amount
if setting == "hard":
self.hard += point_amount
elif setting == "med":
self.medium += point_amount
elif setting == "easy":
self.easy += point_amount
elif setting == "point":
self.total_points += point_amount
elif setting == "prob":
self.problems_solved += point_amount
elif setting == "win":
self.won += point_amount
"""
def get_badge_title(self):
PROBLEM_THRESHOLD = 20
POINT_THRESHOLD = 100
DIFFICULTY_PERCENTAGE_THRESHOLD = 45
PERCENT_TOTAL = lambda amount: (amount / self.problems_solved) * 100
badge_title = "No badge... Do some problems to earn a badge!"
if self.problems_solved < PROBLEM_THRESHOLD:
return badge_title
easy_percentage = PERCENT_TOTAL(self.easy)
medium_percentage = PERCENT_TOTAL(self.medium)
hard_percentage = PERCENT_TOTAL(self.hard)
if self.won >= PROBLEM_THRESHOLD:
badge_title = "🥇 *Standing on the shoulder of giants. And your hard work.*"
elif self.first >= PROBLEM_THRESHOLD:
badge_title = (
"💨 *Well, would you look at the time. Lack there of obviously.*"
)
elif hard_percentage >= DIFFICULTY_PERCENTAGE_THRESHOLD:
badge_title = "🏆 *The highest honor. Not using Stack Overflow.*"
elif medium_percentage >= DIFFICULTY_PERCENTAGE_THRESHOLD:
badge_title = "🍪 *Here's a cookie for all your efforts.*"
elif easy_percentage >= DIFFICULTY_PERCENTAGE_THRESHOLD:
badge_title = "🐒 *If rock and monke, then create fire.*"
elif self.total_points >= POINT_THRESHOLD:
badge_title = "🦾 *Point King*"
elif self.problems_solved >= PROBLEM_THRESHOLD:
badge_title = (
"👨🌾 *Living the simple life. Eat. Solve a programming problem. Sleep.*"
)
return badge_title
| misslame/BroncoderBot | participant_data_handling/participant.py | participant.py | py | 3,927 | python | en | code | 9 | github-code | 36 |
30467610617 | import collections
class Solution:
def accountsMerge(self, accounts: List[List[str]]) -> List[List[str]]:
node_to_neighbor = {}
# build graph
for account in accounts:
name = account[0]
for i in range(1, len(account)):
cur_email = account[i]
cur = UGNode(name, cur_email)
if cur not in node_to_neighbor:
node_to_neighbor[cur] = []
if i == len(account) - 1:
continue
after_email = account[i + 1]
after = UGNode(name, after_email)
node_to_neighbor[cur].append(after)
if after not in node_to_neighbor:
node_to_neighbor[after] = []
node_to_neighbor[after].append(cur)
result = []
# walk graph
visited = set()
for email, neighbor in node_to_neighbor.items():
start = email
if start in visited:
continue
group = self.bfs(node_to_neighbor, start, visited)
res = []
res.append(start.name)
res.extend(sorted(group))
result.append(res)
return result
def bfs(self, graph, start, visited):
emails = set()
queue = collections.deque()
queue.append(start)
while len(queue) > 0:
cur = queue.popleft()
if cur in visited:
continue
visited.add(cur)
emails.add(cur.email)
# children
if cur in graph:
neighbors = graph[cur]
else:
neighbors = []
for neighbor in neighbors:
queue.append(neighbor)
return emails
class UGNode:
def __init__(self, name, email):
self.name = name
self.email = email
def __hash__(self):
return hash((self.name, self.email))
def __eq__(self, other):
return (self.name, self.email) == (other.name, other.email)
| dundunmao/LeetCode2019 | 721. Accounts Merge.py | 721. Accounts Merge.py | py | 2,068 | python | en | code | 0 | github-code | 36 |
43046784986 | from datetime import datetime
from discord.ext import commands
import discord
from discordbot.errors import ErrorMessage
class UserInfo(commands.Cog):
def __init__(self, bot):
self.bot = bot
self.color = 0xffffff
@commands.command(
brief="Erhalte Benutzerinfos",
description="Erhalte den Standardavatar, Avatar und das Alter eines Discordaccounts",
aliases=["avatar", "defaultavatar", "accountage"],
help="Benutze /userinfo <User> und du erhältst Informationen über diesen Discord Account",
usage="<User>"
)
async def userinfo(self, ctx, user: discord.User):
d = datetime.now()-user.created_at
await ctx.sendEmbed(
title="Benutzerinformationen",
description=f"Infos über den Benutzer {user.mention}",
fields=[
("ID", str(user.id)),
("Account erstellt am", f"<t:{int(datetime.timestamp(user.created_at))}>"),
("Account erstellt vor", f"{d.days} Tag(en)"),
("Standardavatar", f"[{user.default_avatar}]({user.default_avatar_url})"),
],
inline=False,
thumbnailurl=str(user.avatar_url))
@commands.command(
brief='Stalke musikhörende Leute',
description='Erhalte Links zu dem Song, welcher jemand gerade hört',
aliases=[],
help="Benutze /usersong <Member> um den Song zu erhalten",
usage="<Member>"
)
@commands.guild_only()
async def usersong(self, ctx, member: discord.Member):
found = False
for activity in member.activities:
if str(activity.type) == "ActivityType.listening":
try:
await ctx.sendEmbed(title="Spotify Song", fields=[
("Titel", activity.title),
("Künstler", activity.artist),
("Link", ("[Spotify](https://open.spotify.com/track/"+activity.track_id+")")),
("Benutzer", member.mention)])
except AttributeError:
raise ErrorMessage(
message="Scheinbar hört dieser Benutzer keinen richtigen Song.")
found = True
if not found:
raise ErrorMessage(message="Dieser Benutzer hört keinen Song!")
def setup(bot):
bot.add_cog(UserInfo(bot))
| AlexeiSur/bot12345 | discordbot/botcmds/userinfo.py | userinfo.py | py | 2,397 | python | de | code | 0 | github-code | 36 |
33512505076 | # -*- coding: utf8 -*-
from collective.contact.core.behaviors import IRelatedOrganizations
from collective.contact.core.testing import INTEGRATION
from ecreall.helpers.testing.base import BaseTest
from z3c.relationfield.relation import RelationValue
from zope.component import getUtility
from zope.interface import alsoProvides
from zope.intid.interfaces import IIntIds
import unittest
class TestSearch(unittest.TestCase, BaseTest):
"""Tests realted organizations"""
layer = INTEGRATION
def setUp(self):
super(TestSearch, self).setUp()
self.portal = self.layer['portal']
self.mydirectory = self.portal['mydirectory']
self.armeedeterre = self.mydirectory['armeedeterre']
self.corpsa = self.armeedeterre['corpsa']
self.divisionalpha = self.corpsa['divisionalpha']
self.divisionbeta = self.corpsa['divisionbeta']
def test_related_searchable_text(self):
pc = self.portal.portal_catalog
index = pc._catalog.getIndex("SearchableText")
rid = pc(UID=self.divisionalpha.UID())[0].getRID()
indexed = index.getEntryForObject(rid, default=[])
self.assertListEqual(indexed, ['armee', 'de', 'terre', 'corps', 'a', 'division', 'alpha'])
intids = getUtility(IIntIds)
alsoProvides(self.divisionalpha, IRelatedOrganizations)
self.divisionalpha.related_organizations = [
RelationValue(intids.getId(self.divisionbeta)),
]
self.divisionalpha.reindexObject()
indexed = index.getEntryForObject(rid, default=[])
self.assertListEqual(indexed, ['armee', 'de', 'terre', 'corps', 'a', 'division', 'beta', 'armee', 'de',
'terre', 'corps', 'a', 'division', 'alpha'])
| collective/collective.contact.core | src/collective/contact/core/tests/test_related.py | test_related.py | py | 1,764 | python | en | code | 6 | github-code | 36 |
5547127979 | """
Voting 12/04/2022.
1. Refund previous depositor' spending to finance multisig 0x48F300bD3C52c7dA6aAbDE4B683dEB27d38B9ABb
with 254.684812629886507249 stETH.
2. Fund depositor bot multisig 0x5181d5D56Af4f823b96FE05f062D7a09761a5a53 with 130 stETH.
Vote passed & executed on Apr-15-2022 05:34:30 PM +UTC, block #14591317.
TX URL: https://etherscan.io/tx/0x3b5fff376df823e26857e68a468e161a8ed818afd29410983f0680c2d18042f5
"""
import time
from typing import (Dict, Tuple, Optional)
from brownie.network.transaction import TransactionReceipt
from utils.voting import confirm_vote_script, create_vote
from utils.finance import make_steth_payout
from utils.evm_script import encode_call_script
from utils.config import (
get_deployer_account,
get_is_live
)
def start_vote(
tx_params: Dict[str, str],
silent: bool = False
) -> Tuple[int, Optional[TransactionReceipt]]:
"""Prepare and run voting."""
encoded_call_script = encode_call_script([
# 1. Refund previous depositor' spending to finance multisig 0x48F300bD3C52c7dA6aAbDE4B683dEB27d38B9ABb
# with 254.684812629886507249 stETH.
make_steth_payout(
target_address='0x48F300bD3C52c7dA6aAbDE4B683dEB27d38B9ABb',
steth_in_wei=254_684_812_629_886_507_249,
reference='Refund depositor\'s spending'
),
# 2. Fund dedicated depositor multisig 0x5181d5D56Af4f823b96FE05f062D7a09761a5a53 with 130 stETH.
make_steth_payout(
target_address='0x5181d5D56Af4f823b96FE05f062D7a09761a5a53',
steth_in_wei=130 * (10 ** 18),
reference='Fund depositor bot multisig'
),
])
return confirm_vote_script(encoded_call_script, silent) and create_vote(
vote_desc=(
'Omnibus vote: '
'1) Refund previous depositor\' spending to finance multisig with 254.684812629886507249 stETH; '
'2) Fund depositor bot multisig 0x5181d5D56Af4f823b96FE05f062D7a09761a5a53 with 130 stETH.'
),
evm_script=encoded_call_script,
tx_params=tx_params
)
def main():
tx_params = {'from': get_deployer_account()}
if get_is_live():
tx_params['max_fee'] = '300 gwei'
tx_params['priority_fee'] = '2 gwei'
vote_id, _ = start_vote(tx_params=tx_params)
vote_id >= 0 and print(f'Vote created: {vote_id}.')
time.sleep(5) # hack for waiting thread #2.
| lidofinance/scripts | archive/scripts/vote_2022_04_12.py | vote_2022_04_12.py | py | 2,423 | python | en | code | 14 | github-code | 36 |
31428823001 | # 2021.09.09
# 2309
# 일곱 난쟁이
ls = []
for _ in range(9):
ls.append(int(input()))
target = sum(ls) - 100
for i in range(9):
flag = False
for j in range(i + 1, 9):
if (ls[i] + ls[j]) == target:
ls.pop(i)
ls.pop(j-1) # 하나 삭제되기 때문에 하나 줄여줘야 함
flag = True
break
# 찾았으면 거기서 멈추자
if flag:
break
ls.sort()
for i in ls:
print(i)
| Minkeyyyy/OJ | BaekJoon/All/2309.py | 2309.py | py | 471 | python | ko | code | 0 | github-code | 36 |
23728224660 | import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from StyleFrame import StyleFrame, utils
# read excel file to usable numpy arrays
def load_multispectral_data(excel_file):
df = pd.read_excel(excel_file, 'Multispectral Image')
nir = df[0:20].iloc[:, 1:].to_numpy()
red = df[22:42].iloc[:, 1:].to_numpy()
return nir, red
def load_training_data(excel_file):
sf = StyleFrame.read_excel(excel_file, sheet_name='Training Samples', read_style=True, use_openpyxl_styles=False)
return StyleFrame(sf.applymap(get_classes_from_colors)).data_df[0:20].iloc[:, 1:].to_numpy(dtype=np.str)
def get_classes_from_colors(cell):
if cell.style.bg_color in {utils.colors.green, 'FF92D050'}:
return 'vegetation'
elif cell.style.bg_color in {utils.colors.red, 'FFFF0000'}:
return 'bare ground'
elif cell.style.bg_color in {utils.colors.blue, '3275c8'}:
return 'water'
else:
return 'unclassified'
# calculate ndvi and set invalid values (zero division) to zero
def calculate_ndvi(nir, red):
ndvi = np.divide(nir - red, nir + red)
return np.nan_to_num(ndvi)
def plot_histogram(ndvi, interval, filename):
histo = []
series = np.arange(-1, 1, interval)
for i in series:
in_interval = np.logical_and(i <= ndvi, ndvi < i + interval)
histo.append(in_interval.sum())
plt.bar(series, histo, width=interval, align='edge', edgecolor='white', color='grey')
plt.title('Histogram of NDVI values')
plt.xlabel('Range of NDVI values')
plt.ylabel('Amount of values within range')
plt.savefig(filename)
def plot_scatter(red, nir, colors='Grey'):
plt.scatter(red, nir, c=colors)
plt.title('Relationship of red and near-infrared channels')
plt.xlabel('Red channel')
plt.ylabel('Near-infrared channel')
def mv_cov_covinv(ar1, ar2):
obs_vectors = np.ma.vstack((ar1, ar2)).T
mean_vector = np.mean(obs_vectors, axis=0)
covariance_matrix = np.cov(obs_vectors)
covariance_matrix_inv = np.linalg.pinv(covariance_matrix)
return {'mean vector': mean_vector,
'covariance matrix': covariance_matrix,
'inverse covariance matrix': covariance_matrix_inv}
def minimum_distance_to_mean(vec, means):
distances = [np.linalg.norm(vec-i) for i in means]
min_distance = distances[np.argmin(distances)]
return (np.argmin(distances) + 1, min_distance)
if __name__ == '__main__':
### assignment 1
fn = './Multispectral Classification.xlsx'
nir, red = load_multispectral_data(fn)[0], load_multispectral_data(fn)[1]
ndvi = calculate_ndvi(nir, red)
fig = plt.figure(figsize=(6, 3.2))
ax = fig.add_subplot(111)
plt.imshow(ndvi)
plt.colorbar(orientation='vertical')
plt.title('NDVI values of 20x20 area')
plot_histogram(ndvi, 0.2, 'histogram.jpg')
### assignment 2
plot_scatter(red, nir)
plt.savefig('scatter.jpg')
plt.close()
### assignment 3
training_classes = load_training_data(fn)
# get masks for each class
water_mask = np.isin(training_classes, 'water', invert=True)
bg_mask = np.isin(training_classes, 'bare ground', invert=True)
veg_mask = np.isin(training_classes, 'vegetation', invert=True)
unc_mask = np.isin(training_classes, 'unclassified', invert=True)
# plot each class with a different color
plot_scatter(red[~unc_mask], nir[~unc_mask], colors='lightgrey')
plot_scatter(red[~water_mask], nir[~water_mask], colors='blue')
plot_scatter(red[~bg_mask], nir[~bg_mask], colors='red')
plot_scatter(red[~veg_mask], nir[~veg_mask], colors='green')
plt.savefig('scatter_f.jpg')
plt.close()
### assignment 5
# compute mean vector, covariance matrix and inverse of covariance matrix
water_stats = mv_cov_covinv(red[~water_mask], nir[~water_mask])
bg_stats = mv_cov_covinv(red[~bg_mask], nir[~bg_mask])
veg_stats = mv_cov_covinv(red[~veg_mask], nir[~veg_mask])
### assignment 6
obs_vecs = np.array((red, nir)).T
means = (veg_stats['mean vector'], bg_stats['mean vector'], water_stats['mean vector'])
classified = np.zeros(ndvi.shape)
distances = np.zeros(ndvi.shape)
for i in range(len(obs_vecs)):
for j in range(len(obs_vecs[i])):
pixel_class = minimum_distance_to_mean(obs_vecs[i][j], means)
classified[j][i] = pixel_class[0]
distances[j][i] = pixel_class[1]
# threshold distance values
classified[distances > 2*np.std(distances)] = None
# write to excel file
df = pd.DataFrame(classified)
df.to_excel('classified.xlsx', index=False)
plt.imshow(classified)
plt.show()
colors = {0:'lightgrey', 1: 'green', 2: 'red', 3:'blue'}
# assignment 7
for i in range(0, 4):
plt.scatter(red[classified == i], nir[classified == i], c=colors[i])
in_class = ndvi[classified == i]
in_class[abs(in_class - np.mean(in_class)) > np.std(in_class)] = None
ndvi_range = (np.nanmin(in_class), np.nanmax(in_class))
plt.show()
| maxvanschendel/Geomatics | GEO1001/assignment_5.py | assignment_5.py | py | 5,084 | python | en | code | 0 | github-code | 36 |
72506677225 | from BetterDirectGui.DirectGui import *
def test():
print("click")
def test_setup1():
b1 = DirectButton(text="button1", command=test) # , suppressMouse=0, frameTexture="models/maps/circle.png")
b1.setScale(0.2)
b1.setPos(-0.7, 0, 0)
# b1["scale"] = 0.2
# b1["pos"] = (-0.7, 0, 0)
# b1["hpr"] = (30, 30, 30)
# b1["suppressMouse"] = 1
# b1["suppressKeys"] = 1
# b1["transparency"] = 1
f0 = DirectFrame(frameSize=(-1, 1, -1, 1))
f0.setScale(0.2)
f0.setPos(0, 0, 0)
b11 = DirectButton(text="button1", parent=f0)
b11.setScale(0.2)
b11.setPos(0, 0, 0.7)
f11 = DirectFrame(frameSize=(-1, 1, -1, 1), parent=f0)
f11.setScale(0.6)
f11.setPos(0, 0, 0)
b14 = DirectButton(text="button4", parent=f11)
b14.setScale(0.8)
b14.setPos(0, 0, 0.3)
b15 = DirectButton(text="button5", parent=f11)
b15.setScale(0.8)
b15.setPos(0, 0, -0.7)
f1 = DirectFrame(frameSize=(-1, 1, -1, 1))
f1.setScale(0.2)
f1.setPos(0.7, 0, 0)
b4 = DirectButton(text="button4", parent=f1)
b4.setScale(0.2)
b4.setPos(0, 0, 0)
b5 = DirectButton(text="button5", parent=f1)
b5.setScale(0.2)
b5.setPos(0, 0, -0.3)
b2 = DirectButton(text="button2")
b2.setScale(0.2)
b2.setPos(-0.7, 0, -0.5)
b3 = DirectButton(text="button3")
b3.setScale(0.2)
b3.setPos(0.7, 0, -0.5)
theme = {
"DirectButton": dict(
borderWidth=(0.2, 0.2),
frameColor=(.2, 1.0, 1.0, 1.0),
pad=(0.2, 0.2),
pos=(0, 0, 0),
hpr=(0, 0, -30),
scale=(0.1, 0.1, 0.1),
text='button',
),
"DirectFrame": dict(
# frameSize=(-1, 1, -1, 1),
frameColor=(.2, 1, 1, 1),
text=""
)
}
# b1.override_navigation_map("f", b2)
# f1.set_theme(theme, 1)
# f1.clear_theme()
| Augustifolia/BetterDirectGui | tests/nesting_test.py | nesting_test.py | py | 1,901 | python | en | code | 0 | github-code | 36 |
24851114046 | #!/usr/bin/env python
import numpy as np
import pandas as pd
import lightgbm
path_in = 'sample/'
path_out = 'sample/'
classes = 9
def make_features_weighted(data):
weights = np.arange(data.shape[1], dtype=float)
weights /= np.sum(weights)
counters = pd.concat(
[((data == j)*weights).sum(axis=1) for j in range(classes)], axis=1)
data = data*weights
return pd.concat((
data[data!=0].mean(axis=1),
data.mean(axis=1),
data[data!=0].std(axis=1),
data.std(axis=1),
counters),
axis=1)
train_labels = np.load(path_in + 'train_labels.npy')
train_data = pd.DataFrame(np.load(path_in + 'train_data.npy'))
test_data = pd.DataFrame(np.load(path_in + 'test_data.npy'))
model = lightgbm.LGBMClassifier(
n_estimators=400, learning_rate=0.01, n_jobs=-1, max_depth=6)
model.fit(make_features_weighted(train_data), train_labels)
test_labels = model.predict(make_features_weighted(test_data))
np.save(path_out + 'test_labels_lgbm.npy', test_labels)
| eugenbobrov/pzad2017 | task1/simple_lgbm.py | simple_lgbm.py | py | 1,023 | python | en | code | 0 | github-code | 36 |
20115987747 |
print(type("334"))
print(type(44.22))
score = int(input("请输入你的分数:"))
# elif = else if
if 100 >= score >= 90:
print('A')
elif 90 > score >=60:
print('B')
elif 60 > score >=0:
print('C')
else:
print("输入错误!")
# 三元表达式
x,y=4,5
small = x if x < y else y
print(small)
# 断言(assert) 当条件成立后让程序自动崩溃,确保某个条件一定为真
favorite = 'liuly'
for i in favorite:
print(i, end=' ')
# 列表
print('------------------------')
member = ['白猫','黑猫','大脸猫','蓝皮鼠']
for item in member:
print(item,len(item),end=' ')
# range(),第三个参数为步进的意思
range(5)
for i in range(5):
print(i)
for i in range(1,10,2):
print(i)
bingo = '我自己'
answer = input('说,你喜欢谁:')
while True:
if answer == bingo:
break
answer = input('猜错了,重新说')
print("yes")
for i in range(10):
if i%2 != 0:
print(i)
continue
i += 2
print(i)
| Linka39/pythonStudy | branch.py | branch.py | py | 1,007 | python | en | code | 0 | github-code | 36 |
71160406824 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import math
import sys
def normal(x, mean, var):
if var == 0:
var = 0.2
return math.e**-((x - mean)**2 / (2.0 * var)) / (2.0 * math.pi * var)**0.5
class Naive():
def __init__(self, label_n, pixel_n, bin_n=32, option=0):
self.label_count = [0 for i in range(label_n)]
self.label_n = label_n
self.pixel_n = pixel_n
self.bin_n = bin_n
self.option = option
def fit(self, images, labels):
self.total = len(labels)
if self.option == 0:
self.table = [[[0 for k in range(self.bin_n)] for j in range(self.pixel_n)] for i in range(self.label_n)]
for i, image in enumerate(images):
self.label_count[labels[i]] += 1
for j, pixel in enumerate(image):
self.table[labels[i]][j][int(pixel / (256 / self.bin_n))] += 1
else:
self.table = [[{"mean": 0, "var": 0} for j in range(self.pixel_n)] for i in range(self.label_n)]
for i, image in enumerate(images):
self.label_count[labels[i]] += 1
for j, pixel in enumerate(image):
self.table[labels[i]][j]["mean"] += pixel
self.table[labels[i]][j]["var"] += pixel**2
for i in range(self.label_n):
for j in range(self.pixel_n):
self.table[i][j]["mean"] /= self.label_count[i]
self.table[i][j]["var"] = float(self.table[i][j]["var"]) / float(self.label_count[i]) - self.table[i][j]["mean"]**2
def score(self, images, labels):
if self.option == 0:
correct = 0
for k, image in enumerate(images):
probabilities = []
for i in range(self.label_n):
probabilities.append(1)
for j, pixel in enumerate(image):
probabilities[-1] *= self.table[i][j][int(pixel / (256 / self.bin_n))] or 1
probabilities[-1] *= self.label_count[i]
# print(probabilities)
m = 0
for i in range(self.label_n):
if m == i:
continue
if probabilities[m] * self.label_count[i]**self.pixel_n < probabilities[i] * self.label_count[m]**self.pixel_n:
m = i
if m == labels[k]:
correct += 1
return float(correct) / float(len(labels))
else:
correct = 0
for k, image in enumerate(images):
probabilities = []
for i in range(self.label_n):
probabilities.append(0.0)
for j, pixel in enumerate(image):
p = normal((pixel - self.table[i][j]["mean"]) / (self.table[i][j]["var"]**0.5 or 0.1), 0, 1)
probabilities[-1] += math.log(p or 0.01)
probabilities[-1] += math.log(float(self.label_count[i]) / float(self.total))
# print(labels[k], probabilities)
if max((v, i) for i, v in enumerate(probabilities))[1] == labels[k]:
correct += 1
return float(correct) / float(len(labels))
| chhu0830/NCTU_106-2_machine-learning | lab2/classifier.py | classifier.py | py | 3,307 | python | en | code | 0 | github-code | 36 |
70003678825 | from discord.ext import commands
import logging, traceback, discord
from collections import Counter
import datetime
import asyncio, aioredis
import os, sys, time
import random
from multiprocessing import Queue
from queue import Empty as EmptyQueue
import json
import hashlib
import config
import rethinkdb as r
import aiohttp
BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(8)
RESET_SEQ = "\033[0m"
COLOR_SEQ = "\033[1;%dm"
BOLD_SEQ = "\033[1m"
TIME_SEQ = COLOR_SEQ % (30 + MAGENTA)
NAME_SEQ = COLOR_SEQ % (30 + CYAN)
FORMAT = "[$TIME_SEQ%(asctime)-3s$RESET]" \
"[$NAME_SEQ$BOLD%(name)-2s$RESET]" \
"[%(levelname)-1s]" \
"[%(message)s]" \
"[($BOLD%(filename)s$RESET:%(lineno)d)]"
def formatter_message(message: str, colored: bool = True):
if colored:
message = message.replace("$RESET", RESET_SEQ)
message = message.replace("$BOLD", BOLD_SEQ)
message = message.replace("$TIME_SEQ", TIME_SEQ)
message = message.replace("$NAME_SEQ", NAME_SEQ)
return message
else:
message = message.replace("$RESET", "")
message = message.replace("$BOLD", "")
return message
class ColoredFormatter(logging.Formatter):
def __init__(self, msg, use_color=True):
logging.Formatter.__init__(self, msg)
self.use_color = use_color
def format(self, record):
level_name = record.levelname
if self.use_color and level_name in COLORS:
level_name_color = COLOR_SEQ % (30 + COLORS[level_name]) + level_name + RESET_SEQ
record.levelname = level_name_color
message = record.msg
if self.use_color and level_name in COLORS:
message_color = COLOR_SEQ % (30 + BLUE) + message + RESET_SEQ
record.msg = message_color
return logging.Formatter.format(self, record)
class ColoredLogger(logging.Logger):
def __init__(self, name):
logging.Logger.__init__(self, name, logging.INFO)
return
COLORS = {
'WARNING': YELLOW,
'INFO': BLUE,
'DEBUG': WHITE,
'CRITICAL': YELLOW,
'ERROR': RED
}
logger = logging.getLogger()
logger.setLevel(logging.INFO)
color_format = formatter_message(FORMAT, True)
logging.setLoggerClass(ColoredLogger)
color_formatter = ColoredFormatter(color_format)
console = logging.StreamHandler()
console.setFormatter(color_formatter)
logger.addHandler(console)
commandLog = logging.getLogger("commandLog")
commandLog.setLevel(logging.INFO)
if sys.platform == "linux":
file = logging.FileHandler(filename=f'logs/{datetime.datetime.utcnow()}.log', encoding='utf-8', mode='w')
file.setFormatter(color_formatter)
logger.addHandler(file)
file = logging.FileHandler(filename="logs/{}-commands.log".format(datetime.datetime.utcnow()), encoding="utf-8", mode="w")
file.setFormatter(color_formatter)
commandLog.addHandler(file)
async def _prefix_callable(bot, msg):
prefix = await bot.redis.get(f"{msg.author.id}-prefix")
if not prefix:
prefix = ['n!', 'N!']
else:
prefix = [prefix.decode("utf8"), "n!", "N!"]
return commands.when_mentioned_or(*prefix)(bot, msg)
class NekoBot(commands.AutoShardedBot):
def __init__(self, instance, instances, shard_count, shard_ids, pipe, ipc_queue: Queue, **kwargs):
super().__init__(command_prefix=_prefix_callable,
description="NekoBot",
pm_help=None,
shard_ids=shard_ids,
shard_count=shard_count,
status=discord.Status.idle,
fetch_offline_members=False,
max_messages=kwargs.get("max_messages", 105),
help_attrs={"hidden": True})
self.counter = Counter()
self.command_usage = Counter()
self.instance = instance
self.instances = instances
self.pipe = pipe
self.ipc_queue = ipc_queue
self.shard_ids = shard_ids
async def _init_redis():
self.redis = await aioredis.create_redis(address=("localhost", 6379), loop=self.loop)
async def _init_rethink():
r.set_loop_type("asyncio")
self.r_conn = await r.connect(host="localhost",
db="nekobot")
self.loop.create_task(_init_rethink())
self.loop.create_task(_init_redis())
for file in os.listdir("modules"):
if file.endswith(".py"):
name = file[:-3]
try:
self.load_extension(f"modules.{name}")
except:
logger.warning("Failed to load {}.".format(name))
traceback.print_exc()
self.loop.create_task(self.ipc())
self.run()
async def ipc(self):
while True:
try:
data = self.ipc_queue.get_nowait()
if data:
data = json.loads(data)
if data["op"] == "reload":
self.unload_extension("modules.{}".format(data["d"]))
self.load_extension("modules.{}".format(data["d"]))
logger.info("Reloaded {}".format(data["d"]))
elif data["op"] == "load":
self.load_extension("modules.{}".format(data["d"]))
logger.info("Loaded {}".format(data["d"]))
elif data["op"] == "unload":
self.unload_extension("modules.{}".format(data["d"]))
logger.info("Unloaded {}".format(data["d"]))
except EmptyQueue:
pass
except Exception as e:
logger.error("IPC Failed, {}".format(e))
await asyncio.sleep(30)
async def get_language(self, ctx):
data = await self.redis.get("%s-lang" % ctx.author.id)
if not data:
return None
dec = data.decode("utf8")
if dec == "english":
await self.redis.delete("%s-lang" % ctx.author.id)
return None
return dec
async def on_command_error(self, context, exception):
if isinstance(exception, commands.CommandNotFound):
return
async def on_command_completion(self, ctx):
data = await self.redis.get("{}:{}:{}".format(ctx.author.id, ctx.channel.id, ctx.message.id))
if data:
completion = int(time.time()) - int(data)
commandLog.info("{} executed {} in {}s".format(ctx.author.id, ctx.command.name, completion))
if completion >= 30:
commandLog.warning("{} took over 30 seconds to execute".format(ctx.command.name))
async def on_command(self, ctx):
self.counter["commands_used"] += 1
self.command_usage[ctx.command.name] += 1
await self.redis.incr(ctx.command.name)
await self.redis.set("{}:{}:{}".format(ctx.author.id, ctx.channel.id, ctx.message.id), int(time.time()), expire=3600)
async def send_cmd_help(self, ctx):
if ctx.invoked_subcommand:
pages = await self.formatter.format_help_for(ctx, ctx.invoked_subcommand)
for page in pages:
await ctx.send(page)
else:
pages = await self.formatter.format_help_for(ctx, ctx.command)
for page in pages:
await ctx.send(page)
async def __level_handler(self, message):
if not isinstance(message.channel, discord.TextChannel):
return
if message.content == "" or not len(message.content) > 5:
return
if random.randint(1, 15) == 1:
author = message.author
user_data = await r.table("levelSystem").get(str(author.id)).run(self.r_conn)
if not user_data:
data = {
"id": str(author.id),
"xp": 0,
"lastxp": "0",
"blacklisted": False,
"lastxptimes": []
}
return await r.table("levelSystem").insert(data).run(self.r_conn)
if user_data.get("blacklisted", False):
return
if (int(time.time()) - int(user_data["lastxp"])) >= 120:
lastxptimes = user_data["lastxptimes"]
lastxptimes.append(str(int(time.time())))
xp = user_data["xp"] + random.randint(1, 30)
data = {
"xp": xp,
"lastxp": str(int(time.time())),
"lastxptimes": lastxptimes
}
await r.table("levelSystem").get(str(author.id)).update(data).run(self.r_conn)
elif random.randint(1, 15) == 1:
guildXP = await r.table("guildXP").get(str(message.guild.id)).run(self.r_conn)
if not guildXP or not guildXP.get(str(message.author.id)):
data = {
str(message.author.id): {
"lastxp": str(int(time.time())),
"xp": 0
}
}
if not guildXP:
data["id"] = str(message.guild.id)
return await r.table("guildXP").get(str(message.guild.id)).update(data).run(self.r_conn)
if (int(time.time()) - int(guildXP.get(str(message.author.id))["lastxp"])) >= 120:
xp = guildXP.get(str(message.author.id))["xp"] + random.randint(1, 30)
data = {
str(message.author.id): {
"xp": xp,
"lastxp": str(int(time.time()))
}
}
await r.table("guildXP").get(str(message.guild.id)).update(data).run(self.r_conn)
async def on_message(self, message):
self.counter["messages_read"] += 1
if message.author.bot:
return
await self.process_commands(message)
await self.__level_handler(message)
async def close(self):
self.r_conn.close()
self.redis.close()
await super().close()
async def on_ready(self):
if not hasattr(self, "uptime"):
self.uptime = datetime.datetime.utcnow()
async with aiohttp.ClientSession() as cs:
await cs.post(config.status_smh, json={
"content": "instance {} ready smh".format(self.instance)
})
print(" _ _ _ \n"
" | | | | | | \n"
" _ __ ___| | _____ | |__ ___ | |_ \n"
" | '_ \ / _ \ |/ / _ \| '_ \ / _ \| __|\n"
" | | | | __/ < (_) | |_) | (_) | |_ \n"
" |_| |_|\___|_|\_\___/|_.__/ \___/ \__|\n"
" \n"
" ")
logger.info("Ready OwO")
logger.info(f"Shards: {self.shard_count}")
logger.info(f"Servers {len(self.guilds)}")
logger.info(f"Instance {self.instance}")
logger.info(f"Users {len(set(self.get_all_members()))}")
await self.change_presence(status=discord.Status.idle)
if not hasattr(self, "instancePoster"):
self.instancePoster = True
while self.instancePoster:
await self.redis.set("instance%s-guilds" % self.instance, len(self.guilds))
await self.redis.set("instance%s-users" % self.instance, sum([x.member_count for x in self.guilds]))
await self.redis.set("instance%s-messages" % self.instance, self.counter["messages_read"])
await self.redis.set("instance%s-commands" % self.instance, self.counter["commands_used"])
await self.redis.set("instance%s-channels" % self.instance, len(set(self.get_all_channels())))
logger.info(f"Updated Instance {self.instance}'s Guild Count with {len(self.guilds)}")
await asyncio.sleep(300)
def run(self):
super().run(config.token)
| harumaki4649/nekobot | shardedBot.py | shardedBot.py | py | 12,244 | python | en | code | 0 | github-code | 36 |
13160204578 | #!/usr/bin/python
import main.database as maria
import csv
db = maria.MySQLDatabase()
def create_csv():
sql = "SELECT * FROM PROBES;"
db.mycursor.execute(sql)
output_description = tuple([field[0] for field in db.mycursor.description])
m_list = db.mycursor.fetchall()
with open('persons.csv', 'wb') as csvfile:
filewriter = csv.writer(csvfile, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
filewriter.writerow(['PROBES'])
filewriter.writerow(output_description)
for row in m_list:
filewriter.writerow(row)
def example_csv():
with open('persons.csv', 'wb') as csvfile:
filewriter = csv.writer(csvfile, delimiter=',',
quotechar='|', quoting=csv.QUOTE_MINIMAL)
filewriter.writerow(['Name', 'Profession'])
filewriter.writerow(['Derek', 'Software Developer'])
filewriter.writerow(['Steve', 'Software Developer'])
filewriter.writerow(['Paul', 'Manager'])
create_csv()
# example_csv() | pdeesawat4887/python-cgi-monitor | tuesday-service-server/create_csv.py | create_csv.py | py | 1,033 | python | en | code | 0 | github-code | 36 |
4108486387 | import sys
input = sys.stdin.readline
n, q = [int(x) for x in input().split()]
arr = [int(x) for x in input().split()]
maxL = [0] * (n + 2)
maxR = [0] * (n + 2)
freqL = [1] * (n + 2)
freqR = [1] * (n + 2)
for i in range(n):
maxL[i + 1] = max(arr[i], maxL[i])
freqL[i + 1] = freqL[i]
if arr[i] == maxL[i]:
freqL[i + 1] += 1
elif arr[i] == maxL[i + 1]:
freqL[i + 1] = 1
maxR[n] = arr[n - 1]
for i in range(n - 2, -1, -1):
maxR[i + 1] = max(arr[i], maxR[i + 2])
freqR[i + 1] = freqR[i + 2]
if arr[i] == maxR[i + 2]:
freqR[i + 1] += 1
elif arr[i] == maxR[i + 1]:
freqR[i + 1] = 1
# print(maxL)
# print(freqL)
# print(maxR)
# print(freqR)
for _ in range(q):
l, r = [int(x) for x in input().split()]
l -= 1
r += 1
if maxL[l] == maxR[r]:
print(maxL[l], freqL[l] + freqR[r])
elif maxL[l] > maxR[r]:
print(maxL[l], freqL[l])
else:
print(maxR[r], freqR[r])
| AAZZAZRON/DMOJ-Solutions | gfssoc2j5.py | gfssoc2j5.py | py | 963 | python | en | code | 1 | github-code | 36 |
19565250442 | from django.db import models
from users.models import User
from .validators import validate_year
class Category(models.Model):
name = models.CharField(max_length=256)
slug = models.SlugField(max_length=50, unique=True)
def __str__(self):
return self.name
class Meta:
ordering = ('name',)
class Genre(models.Model):
name = models.CharField(max_length=256)
slug = models.SlugField(max_length=50, unique=True)
def __str__(self):
return self.name
class Meta:
ordering = ('name',)
class Title(models.Model):
name = models.CharField(max_length=256)
year = models.IntegerField(validators=[validate_year])
description = models.TextField(
null=True,
blank=True
)
genre = models.ManyToManyField(Genre)
category = models.ForeignKey(
Category, on_delete=models.SET_NULL,
null=True
)
def __str__(self):
return self.name
class Meta:
ordering = ('name',)
class Review(models.Model):
title = models.ForeignKey(
Title,
on_delete=models.CASCADE,
related_name='reviews'
)
text = models.TextField(
'Текст отзыва'
)
score = models.IntegerField(
'Оценка',
choices=list(zip(range(1, 11), range(1, 11))),
)
author = models.ForeignKey(
User,
on_delete=models.CASCADE,
related_name='reviews'
)
pub_date = models.DateTimeField(
auto_now_add=True,
db_index=True
)
class Meta:
constraints = [
models.UniqueConstraint(
fields=['author', 'title'],
name='unique_author_title'
)
]
def __str__(self):
return self.text
class Comment(models.Model):
review = models.ForeignKey(
Review,
on_delete=models.CASCADE,
related_name='comments'
)
text = models.TextField(
'Текст комментария'
)
author = models.ForeignKey(
User,
on_delete=models.CASCADE,
related_name='comments'
)
pub_date = models.DateTimeField(
auto_now_add=True,
db_index=True
)
class Meta:
ordering = ('pub_date',)
def __str__(self):
return self.text
| Daniil-lev/infra_sp2 | api_yamdb/reviews/models.py | models.py | py | 2,321 | python | en | code | 3 | github-code | 36 |
31609478117 | s = list(input())
num = 0
slist = []
for i in range(len(s)):
if 47 < ord(s[i]) < 58: # number
num += int(s[i])
elif 96 < ord(s[i]) < 123 or 64 < ord(s[i]) < 91:
slist.append(s[i])
slist.sort()
# for i in range(len(slist)):
# print(slist[i], end='')
print(''.join(slist), end='')
print(num)
# K1KA5CB7
# AJKDLSI412K4JSJ9D | dongho108/ThisIsCodingTestByPython | implement/replaceStr.py | replaceStr.py | py | 351 | python | en | code | 0 | github-code | 36 |
30321707707 | from unittest.mock import patch
import unittest
import os
import uuid
from datetime import datetime
import cmr.util.common as com
# ******************************************************************************
class TestSearch(unittest.TestCase):
"""Test suit for Search API"""
# **********************************************************************
# Util methods
# **********************************************************************
# Tests
def test_conj(self):
"""Test the conj function"""
self.assertEqual([3, 4], com.conj(None, [3, 4]), 'src was None')
self.assertEqual([1, 2, 3, 4], com.conj([1, 2], [3, 4]), 'good src, lists')
self.assertEqual((4, 3, 1, 2), com.conj((1, 2), (3, 4)), 'good src, tuples')
self.assertEqual({'a': 'A', 'b': 'B'}, com.conj({'a':'A'}, {'b':'B'}), 'good src, dict')
def test_always(self):
"""Test the always function"""
self.assertEqual({}, com.always("wrong type"), 'wrong thing')
self.assertEqual({}, com.always([]), 'wrong type')
self.assertEqual({}, com.always({}), 'same type')
self.assertEqual({'a':'b'}, com.always({'a':'b'}), 'populated dict, assumed')
self.assertEqual({'a':'b'}, com.always({'a':'b'}, otype=dict), 'populated dict')
self.assertEqual(['a', 'b'], com.always(['a','b'], otype=list), 'populated list')
self.assertEqual((1,2,3), com.always((1,2,3), otype=tuple), 'populated tuple')
self.assertEqual((1,2,3), com.always((1,2,3), tuple), 'populated tuple, positional')
self.assertEqual('', com.always(None, str), 'not populated string, positional')
self.assertEqual('', com.always('', str), 'empty string, positional')
self.assertEqual('text', com.always('text', str), 'populated string, positional')
# None use cases
self.assertEqual({}, com.always(None), 'assumed, none, dict')
self.assertEqual({}, com.always(None, otype=dict), 'None, dict')
self.assertEqual([], com.always(None, otype=list), 'None, list')
self.assertEqual((), com.always(None, otype=tuple), 'None, tuple')
self.assertEqual((), com.always(None, tuple), 'None, tuple, positional')
def test_drop_key_safely(self):
"""Test that values can be dropped safely"""
def tester (expected, src, key, msg):
return self.assertEqual(expected, com.drop_key_safely(src, key), msg)
tester({}, {}, "Not existing", "Empty dictionary")
tester({"key":"value"}, {"key": "value"}, "not found", "wrong key, no drop")
tester({}, {"key":"value"}, "key", "drop found key")
def test_write_read_round_trip(self):
"""
Test the read and write functions by doing a full round trip test. Save
some text to a temp file, then read it back, testing both functions at once
"""
path = "/tmp/" + str(uuid.uuid4())
expected = str(uuid.uuid4())
com.write_file(path, expected)
actual = com.read_file(path)
os.remove(path) # cleanup now
self.assertEqual(expected, actual, "Write-Read round trip")
def test_execute_command(self):
"""Execute will run any command, test that it behaves as expected"""
def tester (expected, given, msg):
return self.assertEqual(expected, com.execute_command(given), msg)
tester("", "true", "Test a single command response")
tester("_result_", ["printf", '_%s_', 'result'], "Test a command with properties")
@patch('cmr.util.common.execute_command')
def test_security_call(self, execute_command_mock):
"""
test that the code will call an external command and respond as expected
"""
execute_command_mock.return_value = " response info "
self.assertEqual("response info", com.call_security("account", "service"), "Good response")
execute_command_mock.return_value = None
try:
com.call_security("account", "service")
except TypeError as err:
self.assertEqual('account not found in keychain', str(err), "Bad response")
def test_help_format_lambda(self):
"""Test that the lambda function performs as expected"""
cmd = com.help_format_lambda()
self.assertTrue("str(object='') -> str" in cmd("str", ""))
def test_mask_string(self):
"""Test that the mask_diictionary function will clean out sensitive info"""
def tester(expected, given, msg):
return self.assertEqual(expected, com.mask_string(given), msg)
tester("", None, "None sent")
tester("", "", "No Letters")
tester("0", "0", "One letter")
tester("01", "01", "Two Letters")
tester("0*2", "012", "Three Letters")
tester('EDL-U123********34567890', 'EDL-U12345678901234567890', "Real example")
def test_mask_dictionary(self):
"""Test that the mask_diictionary function will clean out sensitive info"""
data = {'ignore': 'this',
'token': '012345687', 'cmr-token': 'EDL-U12345678901234567890'}
expected1 = {'ignore': 'this',
'token': '012345687', 'cmr-token': 'EDL-U123********34567890'}
expected2 = {'ignore': 'this',
'token': '012***687', 'cmr-token': 'EDL-U12345678901234567890'}
expected3 = {'ignore': 'this',
'token': '012345687', 'cmr-token': 'EDL-U12345678901234567890'}
expected4 = {'ignore': 'this',
'token': '012***687', 'cmr-token': 'EDL-U123********34567890'}
self.assertEqual(expected1, com.mask_dictionary(data, 'cmr-token'))
self.assertEqual(expected1, com.mask_dictionary(data, ['cmr-token']))
self.assertEqual(expected2, com.mask_dictionary(data, 'token'))
self.assertEqual(expected2, com.mask_dictionary(data, ['token']))
self.assertEqual(expected3, com.mask_dictionary(data, 'cmr'))
self.assertEqual(expected3, com.mask_dictionary(data, ['cmr']))
self.assertEqual(expected4, com.mask_dictionary(data, ['token', 'cmr-token']))
self.assertEqual(data, com.mask_dictionary(data, ''))
self.assertEqual(data, com.mask_dictionary(data, []))
def test_now(self):
"""
The now function is provided to allow tests to patch it for returning a
fixed time. This function should normally return the same value as
datetime.now(). Test that the value is within 1 second of a direct call
to datetime.now()
"""
actual = datetime.now().timestamp()
managed = com.now().timestamp()
dif = managed - actual
self.assertTrue(dif < 1.0, "time returned should be close to the real thing")
| nasa/eo-metadata-tools | CMR/python/test/cmr/util/test_common.py | test_common.py | py | 6,735 | python | en | code | 25 | github-code | 36 |
11784656842 | from aiogram import Dispatcher
from aiogram.types import Message
from database.database import GAME_USERS
from lexicon.lexicon_ru import LEXICON_RU
async def send_reverse_answer(message: Message):
if message.from_user.id not in GAME_USERS:
await message.reply('\n'.join([message.text[::-1], LEXICON_RU['smile']]))
else:
if GAME_USERS[message.from_user.id]['in_game']:
await message.reply(text=LEXICON_RU['we_are_playing'])
else:
await message.reply('\n'.join([message.text[::-1], LEXICON_RU['smile']]))
def register_other_handlers(dp: Dispatcher):
dp.register_message_handler(send_reverse_answer)
| faralost/ichiraku-telegram-bot | handlers/other_handlers.py | other_handlers.py | py | 662 | python | en | code | 2 | github-code | 36 |
21477740913 | import sys
direction = [(-1, 0), (0, 1), (1, 0), (0, -1)] # 시계방향순
# 빙산을 탐색한 갯수를 찾는 함수
def check(r,c):
global cnt
cnt = 1
stack = [(r,c)]
visited = [[False] * m for i in range(n)]
visited[r][c] = True
while stack:
r, c = stack.pop()
for i in range(4):
nx = r + direction[i][0]
ny = c + direction[i][1]
if arr[nx][ny] != 0 and not visited[nx][ny]: # 주변 탐색시 0이 아니고:
stack.append((nx, ny))
visited[nx][ny] = True # 방문 표시하고
cnt += 1
return cnt
# 탐색한 갯수와 빙산의 정보의 갯수와 같다고 하면 빙산은 하나다.
# 연도를 올리고, 주변 0의 갯수를 카운팅하여 반영해준다.
# 만약 탐색한 갯수가 빙산의 정보와 다르다고 하면 빙산이 두개임을 의미한다.
n, m = map(int, sys.stdin.readline().split())
arr = [list(map(int, sys.stdin.readline().split())) for i in range(n)]
melt = [[0] * m for _ in range(n)]
# ice: 빙산이 남아있는 위치를 넣어준다.
ice = []
for i in range(1, n-1):
for j in range(1, m-1):
if arr[i][j] != 0:
ice.append((i, j))
ans = 0
cnt = 0
year = 0
while ice:
if len(ice) != check(ice[0][0], ice[0][1]):
ans = year
break
year += 1
melt_co = []
for i in range(len(ice) -1, -1, -1):
x, y = ice[i]
for dir in range(4):
nx = x + direction[dir][0]
ny = y + direction[dir][1]
if arr[nx][ny] == 0:
melt[x][y] += 1
if melt[x][y] > 0:
melt_co.append((x, y, i))
for x, y, i in melt_co:
arr[x][y] -= melt[x][y]
if arr[x][y] <= 0:
arr[x][y] = 0
ice.pop(i)
melt[x][y] = 0
print(ans) | Minsoo-Shin/jungle | week03/2573_빙산 copy.py | 2573_빙산 copy.py | py | 1,877 | python | ko | code | 0 | github-code | 36 |
26804643066 | # -*- coding: utf-8 -*-
import pymysql
import itertools
if __name__ == "__main__":
pre_deal()
#search all of the entities from db and remove duplicated entries.
def pre_deal():
db = pymysql.connect("localhost", "root", "302485", "imdb", charset='utf8')
cursor = db.cursor()
search_sql = """search identifiers from imdb_entity"""
try:
cursor.execute(search_sql)
except Exception as e:
db.rollback()
print(str(e))
finally:
cursor.close()
db.close()
identifiers = cursor.fetchall()
identify_groups = []
for identify in identifiers:
split_identfify = identify.split(",")
identify_groups.append(split_identfify)
identify_groups.sort()
id_distincts = itertools.groupby(identify_groups)
return id_distincts
#search relationships between identifiers
def get_relation(identifiers):
count = len(identifiers)
triples = []
for i in range(0, count):
for j in range(i + 1, count):
triple_one = get_triple(identifiers[i], identifiers[j])
triple_two = get_triple(identifiers[j], identifiers[i])
if(triple_one!=''):
triples.append(triple_one)
if(triple_two!=''):
triples.append(triple_two)
return triples
def get_triple(identfier_one, identifier_two):
url = 'http://192.168.0.196:9999/bigdata/namespace/wdq/sparql'
query = """
SELECT ?item_one ?predicate ?item_two
WHERE
{
?item_one ?predicate ?item_two.
BIND(%s AS ?item_one).
BIND(%s AS ?item_two).
SERVICE wikibase:label { bd:serviceParam wikibase:language "[AUTO_LANGUAGE]". }
}
""" %(identfier_one,identifier_two)
r = requests.get(url, params = {'format': 'json', 'query': query})
data = r.json()
# print(data)
identifier = ''
bindings = data['results']['bindings']
if bindings:
identifier = data['results']['bindings'][0]['item']['value'].split('/')[-1]
#print(identifier)
return identifier
| LYunCoder/imdb_analysis | subgraph_wikidata/construct_relations.py | construct_relations.py | py | 2,079 | python | en | code | 0 | github-code | 36 |
26755881531 | import random
class MT19937:
W, N, M, R = 32, 624, 397, 31
A = 0x9908B0DF
U, D = 11, 0xFFFFFFFF
S, B = 7, 0x9D2C5680
T, C = 15, 0xEFC60000
L = 18
F = 1812433253
index = N + 1
lower_mask = (1 << R) - 1
upper_mask = (not lower_mask) & ((1 << W) - 1)
def __init__(self, seed):
self.mt = [0] * self.N
self.seed(seed)
def seed(self, seed):
self.mt[0] = seed
self.index = self.N
for i in range(1, self.index):
self.mt[i] = (self.F * (self.mt[i - 1] ^ (self.mt[i - 1] >> (self.W - 2))) + i) & ((1 << self.W) - 1)
def extract_number(self):
if self.index >= self.N:
if self.index > self.N:
raise Exception
self.twist()
y = self.mt[self.index]
y ^= (y >> self.U) & self.D
y ^= (y << self.S) & self.B
y ^= (y << self.T) & self.C
y ^= y >> self.L
self.index += 1
return y & ((1 << self.W) - 1)
def twist(self):
for i in range(self.N):
x = (self.mt[i] & self.upper_mask) + (self.mt[(i + 1) % self.N] & self.lower_mask)
x_a = x >> 1
if x % 2 != 0:
x_a = x_a ^ self.A
self.mt[i] = self.mt[(i + self.M) % self.N] ^ x_a
self.index = 0
def main():
custom_rng = MT19937(8)
for i in range(10):
print(custom_rng.extract_number())
if __name__ == "__main__":
main()
| dominicle8/cryptopals | 3_21.py | 3_21.py | py | 1,466 | python | en | code | 0 | github-code | 36 |
43041407676 | import sys
a = int(sys.argv[1])
b = int(sys.argv[2])
c = int(sys.argv[3])
def solve_equation(a, b, c):
d = b * b - 4 * a * c
if d > 0:
x1 = (-b + d ** 0.5) / 2 * a
x2 = (-b - d ** 0.5) / 2 * a
return ("{0}\n{1}".format(int(x1), int(x2)))
elif d == 0:
x1 = x2 = -b / 2 * a
return ("{0}\n{1}".format(int(x1), int(x2)))
else:
return ("Нет корней")
if __name__ == "__main__":
print(solve_equation(a, b, c))
| AlexanderdeI/python_coursera | week_01/solution_03.py | solution_03.py | py | 463 | python | en | code | 0 | github-code | 36 |
26676026029 | a,b,c = (int(x) for x in input().split())
lis= [a,b,c]
sum = a+b+c
for i in range(len(lis)-1):
for j in range(i+1,len(lis)):
if lis[i] == lis[j]:
print(sum - (lis[i]*2))
exit()
print(0)
| MasaIshi2001/atcoder | ABC/ABC203_1.py | ABC203_1.py | py | 226 | python | en | code | 0 | github-code | 36 |
27281502055 | """project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Import the include() function: from django.conf.urls import url, include
3. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import url, include
from django.conf.urls.static import static
from django.contrib import admin
from project import settings
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'', include('apps.base.urls', namespace='base')),
url(r'^legoteka/', include('apps.legoteka.urls', namespace='legoteka')),
url(r'^humanitarian_aid/', include('apps.aid.urls', namespace='humanitarian_aid')),
url(r'^library/', include('apps.library.urls', namespace='library')),
url(r'^financial/', include('apps.financial.urls', namespace='financial')),
url(r'^api/', include('api.urls')),
url(r'^pages/(\S+)/$', 'apps.pages.views.pages_detail', name='pages_detail'),
url(r'^redactor/', include('redactor.urls')),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| mikha1lov/headway | project/urls.py | urls.py | py | 1,515 | python | en | code | 0 | github-code | 36 |
8290193445 | from lxml import etree as ET
def parse_params_xmlfile(params_xml_file):
parameter = dict()
tree = ET.parse(params_xml_file)
root = tree.getroot()
global_parameter = root.find('global')
parameter['rpn_nms_thresh'] = float(global_parameter.find('rpn_nms_thresh').text)
parameter['rpn_fg_iou_thresh'] = float(global_parameter.find('rpn_fg_iou_thresh').text)
parameter['rpn_bg_iou_thresh'] = float(global_parameter.find('rpn_bg_iou_thresh').text)
parameter['box_nms_thresh'] = float(global_parameter.find('box_nms_thresh').text)
parameter['box_fg_iou_thresh'] = float(global_parameter.find('box_fg_iou_thresh').text)
parameter['box_bg_iou_thresh'] = float(global_parameter.find('box_bg_iou_thresh').text)
input_parameter = root.find('train')
parameter['batchsize'] = int(input_parameter.find('batchsize').text)
parameter['num_workers'] = int(input_parameter.find('num_workers').text)
parameter['learning_rate'] = float(input_parameter.find('learning_rate').text)
parameter['backbone'] = input_parameter.find('backbone').text
parameter['test_dir'] = input_parameter.find('test_dir').text
parameter['resume_from'] = input_parameter.find('resume_from').text
parameter['test_ratio'] = float(input_parameter.find('test_ratio').text) if input_parameter.find('test_ratio').text else None
parameter['save_log_path'] = input_parameter.find('save_log_path').text
return parameter
def parse_params_xmlfile_test(params_xml_file):
parameter = dict()
from lxml import etree as ET
tree = ET.parse(params_xml_file)
root = tree.getroot()
global_parameter = root.find('global')
parameter['rpn_nms_thresh'] = float(global_parameter.find('rpn_nms_thresh').text)
parameter['rpn_fg_iou_thresh'] = float(global_parameter.find('rpn_fg_iou_thresh').text)
parameter['rpn_bg_iou_thresh'] = float(global_parameter.find('rpn_bg_iou_thresh').text)
parameter['box_nms_thresh'] = float(global_parameter.find('box_nms_thresh').text)
parameter['box_fg_iou_thresh'] = float(global_parameter.find('box_fg_iou_thresh').text)
parameter['box_bg_iou_thresh'] = float(global_parameter.find('box_bg_iou_thresh').text)
input_parameter = root.find('test')
parameter['batchsize'] = int(input_parameter.find('batchsize').text)
parameter['num_workers'] = int(input_parameter.find('num_workers').text)
parameter['gap'] = int(input_parameter.find('gap').text)
parameter['backbone'] = input_parameter.find('backbone').text
parameter['mask_threshold'] = float(input_parameter.find('mask_threshold').text)
parameter['nms_name'] = input_parameter.find('nms_name').text
parameter['iou_threshold'] = float(input_parameter.find('iou_threshold').text)
parameter['score_threshold'] = float(input_parameter.find('score_threshold').text)
return parameter | PauliKarl/shipdet | shipdet/datasets/parse.py | parse.py | py | 2,919 | python | en | code | 1 | github-code | 36 |
29976190660 | #!/usr/bin/env python3
import numpy as np
import matplotlib.pyplot as pl
from numba import autojit
import time
import sys
@autojit
def stochastic(t, eta, amplitude, frequency):
"""
Create time series of stochastic oscillations for a given damping rate
(eta), amplitude and frequency. From De Ridder et al. 2006
Usage:
t - array: time stamps given in units of seconds
eta - float: damping rate
amplitude - float: amplitude of oscillations
frequency - float: frequency of oscillations
Author: grd349
Edited by jsk389
"""
# Compute cadence from time stamps
dt = (t.max()-t.min()) / float(len(t))
# Compute time between kicks for a given damping rate
dtkick = 1.0 / eta / 100.0
# If time between kicks is less than cadence set equal to cadence
if dtkick < dt:
dtkick = dt
# Standard deviation of white noise component
sigmae = amplitude * np.sqrt(eta * dtkick)
N_noise = np.round((t.max() - t.min()) / dtkick + 1).astype(int)
# Compute white noise components
bnoise = sigmae * np.random.randn(N_noise)
cnoise = sigmae * np.random.randn(N_noise)
bn, cn = np.zeros(N_noise), np.zeros(N_noise)
# Amplitudes
coeff = np.exp(-eta * dtkick)
for i in range(N_noise):
bn[i] = coeff * bn[i-1] + bnoise[i]
cn[i] = coeff * cn[i-1] + cnoise[i]
# Generate signal
N_time = len(t)
output = np.zeros(N_time)
n = np.floor(t / dtkick).astype(int)
#output = np.exp(-eta * (t - (n*dtkick))) * (\
# bn * np.sin(2.0*np.pi*frequency*t) + \
# cn * np.cos(2.0*np.pi*frequency*t))
for i in range(N_time):
first = bn[n[i]] * np.sin(2.0 * np.pi * frequency * t[i])
second = cn[n[i]] * np.cos(2.0 * np.pi * frequency * t[i])
output[i] = np.exp(-eta * (t[i] - (n[i] * dtkick))) * \
(first + second)
return output
@autojit
def lorentzian(t, linewidth, amplitude, frequency):
"""
It is much easier to think of oscillation parameters in terms of the
Lorentzian profile that is seen in the power spectrum. Therefore
generate oscillations with respect to supplied Lorentzian profile
parameters
Usage:
t - array: time stamps
linewidth - array: linewidth of Lorentzian profile, linked to eta
through eta = linewidth * pi
amplitude - array: amplitude of Lorentzian
frequency - array: central frequency of Lorentzian (Hertz)
"""
eta = linewidth * np.pi
y = stochastic(t, eta, amplitude, frequency)
return y
if __name__=="__main__":
# Run quick example
cadence = 40.0
days = 100.0 * 1.0 * 73.0
npts = days * 24.0 * 3600.0 / cadence
linewidth = 1.0e-6
amplitude = 100.0
frequency = 200e-6
t = np.linspace(0, npts*cadence, npts)
s = time.time()
y = lorentzian(t, linewidth, amplitude, frequency)
print("Time taken for dataset of length {0} days is {1} s", int(days), time.time()-s)
| jsk389/Stochastic-Simulations | Oscillations/oscillations.py | oscillations.py | py | 3,024 | python | en | code | 1 | github-code | 36 |
24835307446 | import aiologger
from aiologger.handlers.streams import AsyncStreamHandler
from aiologger.handlers.files import AsyncFileHandler
import logging
class MyFormatter(logging.Formatter):
def format(self, record):
return f"{record.created} - {record.name} - {record.levelname} - {record.msg}"
def setup_async_logger():
logger = aiologger.Logger.with_default_handlers(
name="my_app",
level=logging.DEBUG
)
console_handler = AsyncStreamHandler(level=logging.DEBUG)
file_handler = AsyncFileHandler(
filename="my_app.log",
mode="a",
encoding="utf-8"
)
formatter = MyFormatter()
console_handler.formatter = formatter
file_handler.formatter = formatter
logger.add_handler(console_handler)
logger.add_handler(file_handler)
return logger
| bucin98/fast_api_coin_price | app/get_logger.py | get_logger.py | py | 830 | python | en | code | 0 | github-code | 36 |
15903331919 | """
CP1404/CP5632 Practical
A testing area for subject_reader.py
"""
FILENAME = "subject_data.txt"
def main():
data = get_data()
print(data)
def get_data():
"""Read data from file formatted like: subject,lecturer,number of students."""
things = []
input_file = open(FILENAME)
for line in input_file:
# print(repr(line))
line = line.strip()
parts = line.split(',')
print(parts)
things.append(parts)
print(things)
input_file.close()
main()
| azariahpundari1/cp1404practicals | prac_04/subject_reader_test.py | subject_reader_test.py | py | 517 | python | en | code | 0 | github-code | 36 |
21301309349 | from celery.decorators import task
from tracker import celery_app
from api.models import User, Tracker
from core_listing_scraper import get_current_listings, make_dict
from mailgun_email_api.mailgun_email_api import send_confirmation_message, send_email_for_new_or_updated_listings
@task(name='create_tracker')
def create_tracker(user_email, results_page_url):
user, created = User.objects.get_or_create(email=user_email)
user.save()
data = get_current_listings(results_page_url)
tracker = Tracker(user=user, results_page_url=results_page_url, listings=data)
tracker.save()
# send initial email with current listings
send_confirmation_message(user_email, results_page_url, data)
@celery_app.task(name='api.update_trackers')
def update_trackers():
users = User.objects.all()
for user in users:
trackers = user.tracker_set.all()
for tracker in trackers:
results_page_url = tracker.results_page_url
outdated_listings = tracker.listings
current_listings = get_current_listings(results_page_url)
new_or_updated_listings = get_new_or_updated_listings(outdated_listings, current_listings)
if new_or_updated_listings:
send_email_for_new_or_updated_listings(user.email, results_page_url, new_or_updated_listings)
tracker.listings = current_listings
tracker.save()
def get_new_or_updated_listings(outdated_listings, current_listings):
new_or_updated_listings = {}
for craig_id, current_listing in current_listings.iteritems():
outdated_listing = outdated_listings.get(craig_id)
if listing_did_not_exist(outdated_listing):
new_or_updated_listings[craig_id] = make_dict(current_listing)
elif listing_has_been_updated(outdated_listing, current_listing):
new_or_updated_listings[craig_id] = make_dict(current_listing)
else:
# listing has not changed
continue
return new_or_updated_listings
def listing_has_been_updated(outdated_listing, current_listing):
has_been_updated = (outdated_listing.get('title') != current_listing.get('title') or
outdated_listing.get('price') != current_listing.get('price') or
outdated_listing.get('absolute_url') != current_listing.get('absolute_url') or
outdated_listing.get('last_modified_at') != current_listing.get('last_modified_at'))
return has_been_updated
def listing_did_not_exist(outdated_listing):
return outdated_listing == None
| brianleungwh/tracker | api/tasks.py | tasks.py | py | 2,626 | python | en | code | 0 | github-code | 36 |
21420497711 | import torch
import torch.nn as nn
import torch.nn.functional as F
## Defining the network
Hidden_layer = 64
Conv_kernel = 3
Conv_kerenl_time = 3
Padd_space = 1
Padd_time = 1
drop_out_level = 0.15
Bias = True
class Net(nn.Module):
def __init__(self):
super(Net,self).__init__()
#down layer 1
self.conv_DL1 = torch.nn.Sequential()
self.conv_DL1.add_module("Conv_DL1",nn.Conv3d(1,Hidden_layer,(Conv_kernel,Conv_kernel,Conv_kerenl_time), padding = (Padd_space,Padd_space,Padd_time), stride = 1,bias = Bias))
self.conv_DL1.add_module("BN1_DL1",nn.BatchNorm3d(Hidden_layer))
self.DropOut1 = nn.Dropout3d(p=drop_out_level,inplace=True)
self.conv_DL1_v2 = torch.nn.Sequential()
self.conv_DL1_v2.add_module("Conv_DL1_v2",nn.Conv3d(Hidden_layer,Hidden_layer,(Conv_kernel,Conv_kernel,Conv_kerenl_time), padding = (Padd_space,Padd_space,Padd_time), stride = 1,bias = Bias))
self.conv_DL1_v2.add_module("BN1_DL1_v2",nn.BatchNorm3d(Hidden_layer))
self.DropOut2 = nn.Dropout3d(p=drop_out_level,inplace=True)
# max pooling layer
self.conv_MP1 = torch.nn.Sequential()
self.conv_MP1.add_module("Max Pool 1",nn.MaxPool3d((2,2,2),stride = (2,2,2)))
#down layer 2
self.conv_DL2 = torch.nn.Sequential()
self.conv_DL2.add_module("Conv_DL2",nn.Conv3d(Hidden_layer,Hidden_layer*2,(Conv_kernel,Conv_kernel,Conv_kerenl_time), padding = (Padd_space,Padd_space,Padd_time), stride = 1,bias = Bias))
self.conv_DL2.add_module("BN1_DL2",nn.BatchNorm3d(Hidden_layer*2))
self.DropOut3 = nn.Dropout3d(p=drop_out_level,inplace=True)
self.conv_DL2_v2 = torch.nn.Sequential()
self.conv_DL2_v2.add_module("Conv_DL2_v2",nn.Conv3d(Hidden_layer*2,Hidden_layer*2,(Conv_kernel,Conv_kernel,Conv_kerenl_time), padding = (Padd_space,Padd_space,Padd_time), stride = 1,bias = Bias))
self.conv_DL2_v2.add_module("BN1_DL2_v2",nn.BatchNorm3d(Hidden_layer*2))
self.DropOut4 = nn.Dropout3d(p=drop_out_level,inplace=True)
# max pooling layer
self.conv_MP2 = torch.nn.Sequential()
self.conv_MP2.add_module("Max Pool 2",nn.MaxPool3d((2,2,2),stride = (2,2,2)))
#down layer 2
self.conv_DL3 = torch.nn.Sequential()
self.conv_DL3.add_module("Conv_DL3",nn.Conv3d(Hidden_layer*2,Hidden_layer*4,(Conv_kernel,Conv_kernel,Conv_kerenl_time), padding = (Padd_space,Padd_space,Padd_time), stride = 1,bias = Bias))
self.conv_DL3.add_module("BN1_DL3",nn.BatchNorm3d(Hidden_layer*4))
self.DropOut5 = nn.Dropout3d(p=drop_out_level,inplace=True)
self.conv_DL3_v2 = torch.nn.Sequential()
self.conv_DL3_v2.add_module("Conv_DL3_v2",nn.Conv3d(Hidden_layer*4,Hidden_layer*4,(Conv_kernel,Conv_kernel,Conv_kerenl_time), padding = (Padd_space,Padd_space,Padd_time), stride = 1,bias = Bias))
self.conv_DL3_v2.add_module("BN1_DL3_v2",nn.BatchNorm3d(Hidden_layer*4))
self.DropOut6 = nn.Dropout3d(p=drop_out_level,inplace=True)
# Conv Transpose
self.convT1 = nn.ConvTranspose3d(Hidden_layer*4,Hidden_layer*2,(2,2,2),stride = (2,2,2))
#up layer 1
self.conv_UP1 = torch.nn.Sequential()
self.conv_UP1.add_module("Conv_UP1",nn.Conv3d(Hidden_layer*4,Hidden_layer*2,(Conv_kernel,Conv_kernel,Conv_kerenl_time), padding = (Padd_space,Padd_space,Padd_time), stride = 1,bias = Bias))
self.conv_UP1.add_module("BN1_UP1",nn.BatchNorm3d(Hidden_layer*2))
self.DropOut7 = nn.Dropout3d(p=drop_out_level,inplace=True)
self.conv_UP1_v2 = torch.nn.Sequential()
self.conv_UP1_v2.add_module("Conv_UP1_v2",nn.Conv3d(Hidden_layer*2,Hidden_layer*2,(Conv_kernel,Conv_kernel,Conv_kerenl_time), padding = (Padd_space,Padd_space,Padd_time), stride = 1,bias = Bias))
self.conv_UP1_v2.add_module("BN1_UP1_v2",nn.BatchNorm3d(Hidden_layer*2))
self.DropOut8 = nn.Dropout3d(p=drop_out_level,inplace=True)
# Conv Transpose
self.convT2 = nn.ConvTranspose3d(Hidden_layer*2,Hidden_layer,(2,2,2),stride = (2,2,2))
#up layer 2
self.conv_UP2 = torch.nn.Sequential()
self.conv_UP2.add_module("Conv_UP2",nn.Conv3d(Hidden_layer*2,Hidden_layer,(Conv_kernel,Conv_kernel,Conv_kerenl_time), padding = (Padd_space,Padd_space,Padd_time), stride = 1,bias = Bias))
self.conv_UP2.add_module("BN1_UP2",nn.BatchNorm3d(Hidden_layer))
self.DropOut9 = nn.Dropout3d(p=drop_out_level,inplace=True)
self.conv_UP2_v2 = torch.nn.Sequential()
self.conv_UP2_v2.add_module("Conv_UP2_v2",nn.Conv3d(Hidden_layer,Hidden_layer,(Conv_kernel,Conv_kernel,Conv_kerenl_time), padding = (Padd_space,Padd_space,Padd_time), stride = 1,bias = Bias))
self.conv_UP2_v2.add_module("BN1_UP2_v2",nn.BatchNorm3d(Hidden_layer))
self.DropOut10 = nn.Dropout3d(p=drop_out_level,inplace=True)
#Final layer
self.conv_final = torch.nn.Sequential()
self.conv_final.add_module("Conv Final", nn.Conv3d(Hidden_layer,1,(1,1,1),padding = (0,0,0),stride = 1,bias = Bias))
def forward(self,x):
x_down1 = F.relu(self.DropOut1(self.conv_DL1.forward(x)))
x_down1_v2 = F.relu(self.DropOut2(self.conv_DL1_v2.forward(x_down1)))
x_MaxPool = self.conv_MP1.forward(x_down1_v2)
x_down2 = F.relu(self.DropOut3(self.conv_DL2.forward(x_MaxPool)))
x_down2_v2 = F.relu(self.DropOut4(self.conv_DL2_v2.forward(x_down2)))
x_MaxPool_v2 = self.conv_MP2.forward(x_down2_v2)
x_down3 = F.relu(self.DropOut5(self.conv_DL3.forward(x_MaxPool_v2)))
x_down3_v2 = F.relu(self.DropOut6(self.conv_DL3_v2.forward(x_down3)))
x_up1_ConvT = self.convT1(x_down3_v2,output_size = x_down2_v2.size())
x_down2_up1_stack = torch.cat((x_down2_v2,x_up1_ConvT),1)
x_up1 = F.relu(self.DropOut7(self.conv_UP1.forward(x_down2_up1_stack)))
x_up1_v2 = F.relu(self.DropOut8(self.conv_UP1_v2.forward(x_up1)))
x_up2_ConvT = self.convT2(x_up1_v2,output_size = x_down1_v2.size())
x_down1_up2_stack = torch.cat((x_down1_v2,x_up2_ConvT),1)
x_up2 = F.relu(self.DropOut9(self.conv_UP2.forward(x_down1_up2_stack)))
x_up2_v2 = F.relu(self.DropOut10(self.conv_UP2_v2.forward(x_up2)))
output = x+self.conv_final.forward(x_up2_v2)
return output | HMS-CardiacMR/DRAPR | InLineIntegration/network_arch.py | network_arch.py | py | 6,361 | python | en | code | 15 | github-code | 36 |
8089030032 | class Solution:
def findWords(self, words: List[str]) -> List[str]:
cache1 = set('qwertyuiopQWERTYUIOP')
cache2 = set('asdfghjklASDFGHJKL')
cache3 = set('zxcvbnmZXCVBNM')
def func(cache, word):
for x in word:
if x not in cache:
return False
return True
ans = []
for x in words:
flag = False
if x[0] in cache1:
flag = func(cache1, x)
elif x[0] in cache2:
flag = func(cache2, x)
else:
flag = func(cache3, x)
if flag:
ans.append(x)
return ans | alankrit03/LeetCode_Solutions | 500. Keyboard Row.py | 500. Keyboard Row.py | py | 688 | python | en | code | 1 | github-code | 36 |
23458756442 | import logging
import os.path
import schedule
import time
import threading
import requests
import ip_provider
SERVER_ADDRESS = "http://{}:8080".format(os.getenv("KIOSK_SERVER", "localhost"))
CONNECTOR_SERVICE_ADDRESS = "/kiosksConnector"
AUTHENTICATION_HEADER_KEY = "Authentication"
SERVICE_CALL_INTERVAL_IN_SECONDS = 30
SENT_FROM_IP_HEADER_KEY = 'X-From-Ip'
def worker_job(jwt, controller_service_port):
schedule.every(SERVICE_CALL_INTERVAL_IN_SECONDS).seconds.do(call_create_method, jwt=jwt,
controller_service_port=controller_service_port)
while True:
schedule.run_pending()
time.sleep(1)
def start_status_update_worker(jwt, controller_service_port):
threading.Thread(target=worker_job, args=(jwt, controller_service_port)).start()
def call_create_method(jwt, controller_service_port):
try:
session = requests.Session()
session.headers.update({AUTHENTICATION_HEADER_KEY: jwt})
session.headers.update({SENT_FROM_IP_HEADER_KEY: ip_provider.get_ip() + ":" + str(controller_service_port)})
response = session.post(SERVER_ADDRESS + CONNECTOR_SERVICE_ADDRESS)
if response.status_code != 202:
logging.warning(
"Error status code returned while updating last online time, status code {}".format(
response.status_code))
else:
logging.info("Updating last online time finished successfully")
session.close()
except requests.exceptions.ConnectionError:
logging.warning("Connection error while updating last online time")
| z13z/Kiosks | kiosk-worker/alive.py | alive.py | py | 1,645 | python | en | code | 0 | github-code | 36 |
42911015376 | #define variables
standardCookieBatch= 12.0
standardCupsOfSugar= .33
standardCupsOfButter= .50
standardCupsOfFlour= 1.00
standardCostofSugar=.10
standardCostofButter=.25
standardCostofFlour=.10
#prompt user for input
userNumberOfCookies=float(input("How many cookies do you want to make?: "))
#calculation variables
modifiedAmountOfSugar=(userNumberOfCookies/standardCookieBatch)*standardCupsOfSugar
modifiedAmountOfButter=(userNumberOfCookies/standardCookieBatch)*standardCupsOfButter
modifiedAmountOfFlour=(userNumberOfCookies/standardCookieBatch)*standardCupsOfFlour
modifiedCostofSugar=(userNumberOfCookies/standardCookieBatch)*standardCostofSugar
modifiedCostofButter=(userNumberOfCookies/standardCookieBatch)*standardCostofButter
modifiedCostofFlour=(userNumberOfCookies/standardCookieBatch)*standardCostofFlour
totalCost=modifiedCostofSugar + modifiedCostofButter + modifiedCostofFlour
#prints the amount and cost of each portion of the recipe, and the total cost
print("For " + format(userNumberOfCookies, ".0f") + " sugar cookies " + "you need " '\n'
+ format(modifiedAmountOfSugar, ".2f") + " cups of sugar " + '\n'
"at a cost of $" + format(modifiedCostofSugar, ".2f") + '\n'
"and " + format(modifiedAmountOfButter, ".2f") + " cups of butter " '\n'
"at a cost of $" + format(modifiedCostofButter, ".2f") + '\n'
"and " + format(modifiedAmountOfFlour, ".2f") + " cups of flour " + '\n'
"at a cost of $" + format(modifiedCostofFlour, ".2f") + '\n'
"your total cost will be $" + format(totalCost, ".2f") )
#this prints space between two sections
print('''
''')
#prints recipe link url
print("For detailed recipe instructions go to" + '\n'
"http://www.delish.com/cooking/recipe-ideas/recipes/a45306/3-ingredient-sugar-cookies/")
#prints space between sections
print('''
''')
#if else statement that prints a funny message regarding the users feelings about cookies
if userNumberOfCookies >= 96.0:
print("You must really love cookies like even more than unicorns!")
elif userNumberOfCookies >= 48.0:
print("You must love cookies like alot!")
elif userNumberOfCookies >= 24.0:
print("You must at least enojoy cookies somewhat.")
elif userNumberOfCookies >= 12.0:
print("You must feel somewhat nuetral regarding cookies.")
else:
print("You hate cookies.")
| FPU-CIS03/CIS312-Project2 | Mini-Project_LukeGiffen_2.py | Mini-Project_LukeGiffen_2.py | py | 2,377 | python | en | code | 0 | github-code | 36 |
28779315671 | """
Game of Life
author: Manny egalli64@gmail.com
info: http://thisthread.blogspot.com/2017/01/codeeval-game-of-life.html
https://www.codeeval.com/open_challenges/161/
"""
import sys
STEPS = 10
ALIVE = '*'
DEAD = '.'
def local_population(matrix, i, j):
result = 0
for row in [i-1, i, i+1]:
for col in [j-1, j, j+1]:
if 0 <= row < len(matrix) and 0 <= col < len(matrix) \
and not (row == i and col == j) \
and matrix[row][col] == ALIVE:
result += 1
return result
def next_iteration(matrix):
next_step = []
for i in range(len(matrix)):
row = []
for j in range(len(matrix[0])):
count = local_population(matrix, i, j)
if matrix[i][j] == ALIVE:
row.append(ALIVE if 1 < count < 4 else DEAD)
else:
row.append(ALIVE if count == 3 else DEAD)
next_step.append(row)
return next_step
def solution(data):
step = [list(row) for row in data.rstrip().split('\n')]
for i in range(STEPS):
step = next_iteration(step)
result = []
for row in step:
result.append(''.join([c for c in row]))
return '\n'.join(result) + '\n'
if __name__ == '__main__':
if len(sys.argv) == 2:
file = open(sys.argv[1], 'r')
print(solution(file.read()))
file.close()
else:
print('Data filename expected as argument!')
| egalli64/pythonesque | ce/c161.py | c161.py | py | 1,459 | python | en | code | 17 | github-code | 36 |
9044071393 | """Sensor platform for Ambrogio Robot."""
from __future__ import annotations
from homeassistant.core import HomeAssistant
from homeassistant.const import (
ATTR_LOCATION,
ATTR_LATITUDE,
ATTR_LONGITUDE,
)
from homeassistant.components.device_tracker import SOURCE_TYPE_GPS
from homeassistant.components.device_tracker.config_entry import TrackerEntity
from homeassistant.config_entries import ConfigEntry
from homeassistant.helpers.entity import EntityDescription
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .const import (
DOMAIN,
)
from .coordinator import AmbrogioDataUpdateCoordinator
from .entity import AmbrogioRobotEntity
ENTITY_DESCRIPTIONS = (
EntityDescription(
key="location",
name="Robot Location",
icon="mdi:robot-mower",
translation_key="location",
),
)
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_devices: AddEntitiesCallback
):
"""Set up the sensor platform."""
coordinator: AmbrogioDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id]
async_add_devices(
[
AmbrogioRobotDeviceTracker(
coordinator=coordinator,
entity_description=entity_description,
robot_imei=robot_imei,
robot_name=robot_name,
)
for robot_imei, robot_name in coordinator.robots.items()
for entity_description in ENTITY_DESCRIPTIONS
],
update_before_add=True,
)
class AmbrogioRobotDeviceTracker(AmbrogioRobotEntity, TrackerEntity):
"""Ambrogio Robot Device Tracker class."""
def __init__(
self,
coordinator: AmbrogioDataUpdateCoordinator,
entity_description: EntityDescription,
robot_imei: str,
robot_name: str,
) -> None:
"""Initialize the sensor class."""
super().__init__(
coordinator=coordinator,
robot_imei=robot_imei,
robot_name=robot_name,
entity_type="device_tracker",
entity_key=entity_description.key,
)
self.entity_description = entity_description
@property
def latitude(self) -> float | None:
"""Return latitude value of the device."""
location = self._get_attribute(ATTR_LOCATION, {}).get(ATTR_LATITUDE, None)
return location if location else None
@property
def longitude(self) -> float | None:
"""Return longitude value of the device."""
location = self._get_attribute(ATTR_LOCATION, {}).get(ATTR_LONGITUDE, None)
return location if location else None
@property
def source_type(self):
"""Return the source type, eg gps or router, of the device."""
return SOURCE_TYPE_GPS
@property
def device_class(self):
"""Return Device Class."""
return None
| sHedC/homeassistant-ambrogio | custom_components/ambrogio_robot/device_tracker.py | device_tracker.py | py | 2,892 | python | en | code | 2 | github-code | 36 |
10762317060 | from action_msgs.msg import GoalStatus
import rclpy
from rclpy.action import ActionClient
from rclpy.node import Node
from handy_msgs.action import Nav
from nav_msgs.msg import Path
from geometry_msgs.msg import PoseStamped
class MinimalActionClient(Node):
def __init__(self):
super().__init__('nav_instructor')
self._action_client = ActionClient(self, Nav, '/navigation')
self.at_point = None
def goal_response_callback(self, future):
goal_handle = future.result()
if not goal_handle.accepted:
self.get_logger().info('Goal rejected :(')
return
self.get_logger().info('Goal accepted :)')
self._get_result_future = goal_handle.get_result_async()
self._get_result_future.add_done_callback(self.get_result_callback)
def feedback_callback(self, feedback):
self.at_point = feedback.feedback.wp_reached if self.at_point is None else self.at_point
if feedback.feedback.wp_reached != self.at_point:
self.at_point = feedback.feedback.wp_reached
self.get_logger().info(f'Point >> {feedback.feedback.wp_reached} << reached')
self.get_logger().info(f'Latitude: {feedback.feedback.latitude}, Longitude: {feedback.feedback.longitude}')
def get_result_callback(self, future):
result = future.result().result
status = future.result().status
if status == GoalStatus.STATUS_SUCCEEDED:
self.get_logger().info('Goal succeeded! Result: {0}'.format(result.plan_result))
else:
self.get_logger().info('Goal failed with status: {0}'.format(status))
rclpy.shutdown()
def send_goal(self, pose_array):
self.get_logger().info('Waiting for action server...')
self._action_client.wait_for_server()
goal_msg = Nav.Goal()
path_msg = Path()
path_msg.header.frame_id = 'map'
path_msg.header.stamp = self.get_clock().now().to_msg()
# Add poses from the pose array
for pose_data in pose_array:
pose = PoseStamped()
pose.header.frame_id = 'map'
pose.pose.position.x = pose_data[0]
pose.pose.position.y = pose_data[1]
pose.pose.position.z = pose_data[2]
pose.pose.orientation.x = pose_data[3]
pose.pose.orientation.y = pose_data[4]
pose.pose.orientation.z = pose_data[5]
pose.pose.orientation.w = pose_data[6]
path_msg.poses.append(pose)
goal_msg.initial_path = path_msg
self.get_logger().info('Sending goal request...')
self._send_goal_future = self._action_client.send_goal_async(
goal_msg,
feedback_callback=self.feedback_callback)
self._send_goal_future.add_done_callback(self.goal_response_callback)
def main(args=None):
rclpy.init(args=args)
action_client = MinimalActionClient()
poses = [
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0],
[5.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0],
[5.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0],
[0.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0],
]
action_client.send_goal(poses)
rclpy.spin(action_client)
if __name__ == '__main__':
main() | bresilla/webo | webots_ros2_pioneer3at/webots_ros2_pioneer3at/path_server/instruct.py | instruct.py | py | 3,278 | python | en | code | 0 | github-code | 36 |
21428748001 |
import util.utilcube as utilcube
import util.spicube as spicube
import numpy as np
import time
import itertools
class blocks:
def __init__(self):
self.ex = False
def exit(self):
self.ex = True
def full_color_change(self):
grad = utilcube.get_grad_array()
while True:
for g in grad:
r,g,b = utilcube.grad_to_rgb(g)
a = utilcube.alle(r,g,b)
spicube.schreiben(a)
time.sleep(1)
if self.ex: return "exit"
| ThomasMoellerR/11_02_rpi_cube | animations/blocks.py | blocks.py | py | 550 | python | en | code | 0 | github-code | 36 |
35713960656 | from mercurial.i18n import _
from mercurial.node import nullid, short
from mercurial import commands, cmdutil, hg, util, url, error
from mercurial.lock import release
def fetch(ui, repo, source='default', **opts):
'''pull changes from a remote repository, merge new changes if needed.
This finds all changes from the repository at the specified path
or URL and adds them to the local repository.
If the pulled changes add a new branch head, the head is
automatically merged, and the result of the merge is committed.
Otherwise, the working directory is updated to include the new
changes.
When a merge occurs, the newly pulled changes are assumed to be
"authoritative". The head of the new changes is used as the first
parent, with local changes as the second. To switch the merge
order, use --switch-parent.
See :hg:`help dates` for a list of formats valid for -d/--date.
Returns 0 on success.
'''
date = opts.get('date')
if date:
opts['date'] = util.parsedate(date)
parent, p2 = repo.dirstate.parents()
branch = repo.dirstate.branch()
branchnode = repo.branchtags().get(branch)
if parent != branchnode:
raise util.Abort(_('working dir not at branch tip '
'(use "hg update" to check out branch tip)'))
if p2 != nullid:
raise util.Abort(_('outstanding uncommitted merge'))
wlock = lock = None
try:
wlock = repo.wlock()
lock = repo.lock()
mod, add, rem, del_ = repo.status()[:4]
if mod or add or rem:
raise util.Abort(_('outstanding uncommitted changes'))
if del_:
raise util.Abort(_('working directory is missing some files'))
bheads = repo.branchheads(branch)
bheads = [head for head in bheads if len(repo[head].children()) == 0]
if len(bheads) > 1:
raise util.Abort(_('multiple heads in this branch '
'(use "hg heads ." and "hg merge" to merge)'))
other = hg.repository(hg.remoteui(repo, opts),
ui.expandpath(source))
ui.status(_('pulling from %s\n') %
url.hidepassword(ui.expandpath(source)))
revs = None
if opts['rev']:
try:
revs = [other.lookup(rev) for rev in opts['rev']]
except error.CapabilityError:
err = _("Other repository doesn't support revision lookup, "
"so a rev cannot be specified.")
raise util.Abort(err)
# Are there any changes at all?
modheads = repo.pull(other, heads=revs)
if modheads == 0:
return 0
# Is this a simple fast-forward along the current branch?
newheads = repo.branchheads(branch)
newchildren = repo.changelog.nodesbetween([parent], newheads)[2]
if len(newheads) == 1:
if newchildren[0] != parent:
return hg.clean(repo, newchildren[0])
else:
return 0
# Are there more than one additional branch heads?
newchildren = [n for n in newchildren if n != parent]
newparent = parent
if newchildren:
newparent = newchildren[0]
hg.clean(repo, newparent)
newheads = [n for n in newheads if n != newparent]
if len(newheads) > 1:
ui.status(_('not merging with %d other new branch heads '
'(use "hg heads ." and "hg merge" to merge them)\n') %
(len(newheads) - 1))
return 1
# Otherwise, let's merge.
err = False
if newheads:
# By default, we consider the repository we're pulling
# *from* as authoritative, so we merge our changes into
# theirs.
if opts['switch_parent']:
firstparent, secondparent = newparent, newheads[0]
else:
firstparent, secondparent = newheads[0], newparent
ui.status(_('updating to %d:%s\n') %
(repo.changelog.rev(firstparent),
short(firstparent)))
hg.clean(repo, firstparent)
ui.status(_('merging with %d:%s\n') %
(repo.changelog.rev(secondparent), short(secondparent)))
err = hg.merge(repo, secondparent, remind=False)
if not err:
# we don't translate commit messages
message = (cmdutil.logmessage(opts) or
('Automated merge with %s' %
url.removeauth(other.url())))
editor = cmdutil.commiteditor
if opts.get('force_editor') or opts.get('edit'):
editor = cmdutil.commitforceeditor
n = repo.commit(message, opts['user'], opts['date'], editor=editor)
ui.status(_('new changeset %d:%s merges remote changes '
'with local\n') % (repo.changelog.rev(n),
short(n)))
return err
finally:
release(lock, wlock)
cmdtable = {
'fetch':
(fetch,
[('r', 'rev', [],
_('a specific revision you would like to pull'), _('REV')),
('e', 'edit', None, _('edit commit message')),
('', 'force-editor', None, _('edit commit message (DEPRECATED)')),
('', 'switch-parent', None, _('switch parents when merging')),
] + commands.commitopts + commands.commitopts2 + commands.remoteopts,
_('hg fetch [SOURCE]')),
}
| helloandre/cr48 | bin/mercurial-1.7.5/hgext/fetch.py | fetch.py | py | 5,611 | python | en | code | 41 | github-code | 36 |
22524801892 | from .extentions import (
login_manager,
db,
moment,
bootstrap,
avatarTeam,
avatarUser,
coverPost,
imgTeam,
coverUser,
commonImage,
ckeditor,
nav,
mail
)
from flask_uploads import patch_request_class, configure_uploads
from .config import config
from flask import Flask, redirect, url_for, render_template, flash
from flask_login import current_user
from .tools.photo import resize
import logging
def create_app(config_name):
"""
创建flask实例并配置
初始化扩展
注册蓝本
:param config_name:
:return: flask
"""
#create instance
app = Flask(__name__)
app.config.from_object(config[config_name])
config[config_name].init_app(app)
#initial extention with flask instance
db.init_app(app)
moment.init_app(app)
bootstrap.init_app(app)
login_manager.init_app(app)
ckeditor.init_app(app)
nav.init_app(app)
mail.init_app(app)
# image upload config
configure_uploads(app, (avatarUser, avatarTeam, coverPost, imgTeam, coverUser, commonImage))
patch_request_class(app, 10*1024*1024)
#register blueprint
from .auth import auth
app.register_blueprint(auth)
from .user import user
app.register_blueprint(user)
from .admin import admin
app.register_blueprint(admin)
from .team import team
app.register_blueprint(team)
from .pay import pay
app.register_blueprint(pay)
# logger
handler = logging.FileHandler('flask.log', encoding='UTF-8')
handler.setLevel(logging.DEBUG)
logging_format = logging.Formatter(
'%(asctime)s - %(levelname)s - %(filename)s - %(funcName)s - %(lineno)s - %(message)s')
handler.setFormatter(logging_format)
app.logger.addHandler(handler)
@app.context_processor
def inject_vars():
from .models.activity import registration_way, volunteer_type, RegistrationWay
from .models.tools import province
return dict(RegistrationWay=registration_way, Province=province, VolunteerType=volunteer_type,Registration_Way=RegistrationWay)
#add main router
@app.route('/')
def index():
from .models.activity import Activity
from .models.outdoorType import OutdoorType
carousel_items = Activity.get_activities_home()
collection = OutdoorType.show_list()
activities = Activity.get_activities_home_panel()
from .models.team import Team
teams = Team.query.limit(10).all()
return render_template('home.html',
carousel_items = carousel_items,
collection=collection,
activities=activities,
teams=teams)
@app.route('/invest', methods=['GET', 'POST'])
def invest():
from .models.demand import Demand
from .forms.demand import DemandForm
form = DemandForm()
if form.validate_on_submit():
demand = Demand(company=form.company.data,
contact = form.contact.data,
phone = form.phone.data,
image = form.image.data,
brand = form.brand.data,
product = form.product.data,
market = form.market.data,
other = form.other.data)
if current_user.is_authenticated:
demand.user_id = current_user.id
db.session.add(demand)
flash('您已经提交了您的需求,稍后会与您联系')
return redirect(url_for('invest'))
return render_template('invest.html', form=form)
# -----------------ckeditor图片上传-----------
@app.route('/ckupload/', methods=['POST'])
def ckupload():
from flask import request, make_response
from .tools.string_tools import get_rnd_filename_w_ext
import os
error = ''
url = ''
callback = request.args.get("CKEditorFuncNum")
if request.method == 'POST' and 'upload' in request.files:
fileobj = request.files['upload']
rnd_name = get_rnd_filename_w_ext(fileobj.filename)
filepath = os.path.join(app.static_folder,'images', 'upload', rnd_name)
# 检查路径是否存在,不存在则创建
dirname = os.path.dirname(filepath)
if not os.path.exists(dirname):
try:
os.makedirs(dirname)
except:
error = 'ERROR_CREATE_DIR'
elif not os.access(dirname, os.W_OK):
error = 'ERROR_DIR_NOT_WRITEABLE'
if not error:
#fileobj.save(filepath)
#不限制上传大小,但是图片必须在1200像素以下
resize(fileobj, filepath, 1200)
url = url_for('static', filename='%s/%s' % ('images/upload/', rnd_name))
else:
error = 'post error'
res = """<script type="text/javascript">
window.parent.CKEDITOR.tools.callFunction(%s, '%s', '%s');
</script>""" % (callback, url, error)
response = make_response(res)
response.headers["Content-Type"] = "text/html"
return response
#---------错误处理--------------
@app.errorhandler(404)
def page_not_fount(e):
return render_template('404.html'), 404
@app.errorhandler(500)
def internal_server_error(e):
app.logger.exception('error 500:%s', e)
app.logger.error(e)
return render_template('500.html', e=e), 500
@app.errorhandler(403)
def internal_server_error(e):
return render_template('403.html'), 403
@app.errorhandler(413)
def internal_server_error(e):
return render_template('413.html'), 413
return app
| Honglin-Li/TravelPlatform | app/__init__.py | __init__.py | py | 6,069 | python | en | code | 0 | github-code | 36 |
38380439299 | import matplotlib.pyplot as plt
import numpy as np
import netCDF4
def plot( file,ofile=None):
nc = netCDF4.Dataset( file )
fn = file.rpartition("/")[-1]
label = fn.split("_")[0]
var = nc.variables[label]
long_name = var.long_name
units = var.units
if len(var.shape) > 2:
print ( var.dimensions )
var = var[0,:,:]
lat = nc.variables["lat"]
lon = nc.variables["lon"]
fig = plt.figure(figsize=(6,5))
left, bottom, width, height = 0.1, 0.1, 0.8, 0.8
ax = fig.add_axes([left, bottom, width, height])
X, Y = np.meshgrid(lon, lat )
cp = plt.contourf(X[:], Y[:], var[:])
plt.colorbar(cp)
ax.set_title("%s: %s [%s]" % (label,long_name,units))
ax.set_xlabel('Longitude')
ax.set_ylabel('Latitude')
if ofile != None:
plt.savefig( ofile )
else:
plt.show()
if __name__ == "__main__":
import sys
if len(sys.argv) == 1:
plot( "../esgf_fetch/data_files/sftlf_fx_MIROC-ES2L_historical_r1i1p1f2_gn.nc" )
else:
file = sys.argv[1]
ofile = None
if len(sys.argv) == 3:
ofile = sys.argv[2]
plot ( file, ofile=ofile )
| cp4cds/cmip6_range_check_old | scripts/plot2.py | plot2.py | py | 1,090 | python | en | code | 1 | github-code | 36 |
74572928422 | from flask import Flask,render_template
from os import path
from flask_misaka import markdown,Misaka
from LocalStorageBackend import folderlist
##### this is the Template jinja stuff for the webpage
app = Flask(__name__, template_folder="views")
### need this line for the Misaka markdown rendering
Misaka(app,fenced_code="true")
def get_namespaceslocal(filepath):
content = list(((folderlist(filepath))))
return content
@app.route('/') # to list all the module namespaces
def index():
filepath = f'./v1/modules/'
namespaces = get_namespaceslocal(filepath)
return render_template('index.html',modules=namespaces, filepath=filepath)
@app.route('/v1/modules/<namespace>/', methods=['GET']) # list all the modules in a namespace
def namespaceselect(namespace):
filepath = f'./v1/modules/{namespace}'
namespaces = get_namespaceslocal(filepath)
return render_template('namespace.html',modules=namespaces, filepath=filepath)
@app.route('/v1/modules/<namespace>/<name>/', methods=['GET']) # list the providers of a particular module
def moduleselect(namespace,name):
filepath = f'./v1/modules/{namespace}/{name}'
namespaces = get_namespaceslocal(filepath)
return render_template('modules.html',modules=namespaces, filepath=filepath)
@app.route('/v1/modules/<namespace>/<name>/<provider>/', methods=['GET']) # list the versions of a module for a given provider
def providerselect(namespace,name,provider):
filepath = f'./v1/modules/{namespace}/{name}/{provider}'
namespaces = get_namespaceslocal(filepath)
return render_template('provider.html',modules=namespaces, filepath=filepath)
#Renders the Readme.md from the verion folder
@app.route('/v1/modules/<namespace>/<name>/<provider>/<version>/', methods=['GET'])
def load_readme(namespace, name,provider,version):
filepath = f'./v1/modules/{namespace}/{name}/{provider}/{version}'
with open(f'{filepath}/readme.md', 'r') as f:
content = f.read()
return render_template("readme.html",text=content, title=f'Readme for {namespace}/{name}/{provider}/{version}')
| gabrielmccoll/Simple-Terraform-Registry | LocalStorageGUI.py | LocalStorageGUI.py | py | 2,087 | python | en | code | 4 | github-code | 36 |
24706362299 | import pygame
from Snake import Snake
from Segment import Segment
class Player(Snake):
def __init__(self,x,y,w,h,filePath, winDims):
super().__init__(x,y,w,h,filePath)
self.winDims = winDims
def update(self,orbs,snakes):
self.calculateDirection()
return super().update(snakes)
def calculateDirection(self):
mousePos = pygame.mouse.get_pos()
worldPos = (mousePos[0] - self.winDims[0] / 2 + self.rect.x, mousePos[1] - self.winDims[1] / 2 + self.rect.y)
self.direction = [worldPos[0] - self.rect.x, worldPos[1] - self.rect.y]
length = (self.direction[0] ** 2 + self.direction[1] ** 2) ** (1 / 2)
self.direction = [self.direction[0] / length, self.direction[1] / length] | MCK144/Slither.io | Player.py | Player.py | py | 777 | python | en | code | 0 | github-code | 36 |
9659112930 | #!/usr/bin/env python
# coding: utf-8
# @Author: lapis-hong
# @Date : 2018/5/3
"""Prob 21. Merge Two Sorted Lists
https://leetcode.com/problems/merge-two-sorted-lists/description/
Description:
Merge two sorted linked lists and return it as a new list. The new list should be made by splicing together the nodes of the first two lists.
Example:
Input: 1->2->4, 1->3->4
Output: 1->1->2->3->4->4
"""
# Definition for singly-linked list.
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
def mergeTwoLists1(a, b):
if a and b:
if a.val > b.val:
a, b = b, a
a.next = mergeTwoLists1(a.next, b)
return a or b
def mergeTwoLists2(a, b):
if not a or b and a.val > b.val:
a, b = b, a
if a:
a.next = mergeTwoLists2(a.next, b)
return a
# iteratively
def mergeTwoLists3(l1, l2):
dummy = cur = ListNode(0)
while l1 and l2:
if l1.val < l2.val:
cur.next = l1
l1 = l1.next
else:
cur.next = l2
l2 = l2.next
cur = cur.next
cur.next = l1 or l2
return dummy.next
# recursively
def mergeTwoLists4(l1, l2):
if not l1 or not l2:
return l1 or l2
if l1.val < l2.val:
l1.next = mergeTwoLists4(l1.next, l2)
return l1
else:
l2.next = mergeTwoLists4(l1, l2.next)
return l2
# in-place, iteratively
def mergeTwoLists5(l1, l2):
if None in (l1, l2):
return l1 or l2
dummy = cur = ListNode(0)
dummy.next = l1
while l1 and l2:
if l1.val < l2.val:
l1 = l1.next
else:
nxt = cur.next
cur.next = l2
tmp = l2.next
l2.next = nxt
l2 = tmp
cur = cur.next
cur.next = l1 or l2
return dummy.next | Lapis-Hong/Leetcode | python/easy/21.Merge-Two-Sorted-Lists.py | 21.Merge-Two-Sorted-Lists.py | py | 1,862 | python | en | code | 8 | github-code | 36 |
5807342443 | import heapq
class puzzle:
def __init__(self):
self.moves = [(-1, 0), (1, 0), (0, -1), (0, 1)]
def input(self, state):
for i in range(9):
state.append(int(input(f"Enter element in position {i + 1}: ")))
return state
def hamming_distance(self, current_state, goal_state):
distance = 0
for i in range(9):
if goal_state[i] != 0 and current_state[i] != goal_state[i]:
distance += 1
return distance
def solve_puzzle(self, initial_state, goal_state):
open_list = [(self.hamming_distance(initial_state, goal_state), 0, initial_state)] # (estimated_total_cost, cuurent_cost, current_state
closed_set = set()
while open_list:
total_distance, g, current_state = heapq.heappop(open_list)
if current_state == goal_state:
return g
closed_set.add(tuple(current_state))
zero_index = current_state.index(0)
x, y = divmod(zero_index, 3)
for dx, dy in self.moves:
new_x, new_y = x + dx, y + dy
if 0 <= new_x < 3 and 0 <= new_y < 3:
new_index = (new_x * 3 + new_y)
new_state = current_state[:]
new_state[zero_index], new_state[new_index] = new_state[new_index], new_state[zero_index]
if tuple(new_state) not in closed_set:
heapq.heappush(open_list, (self.hamming_distance(new_state, goal_state), g + 1, new_state))
def check(self, goal_state):
puzzle = [num for num in goal_state if num != 0]
inversions = 0
for i in range(len(puzzle)):
for j in range(i + 1, len(puzzle)):
if puzzle[i] > puzzle[j]:
inversions += 1
return inversions % 2
if __name__ == "__main__":
initial_state = [1,3,4,0,5,8,7,2,6]
goal_state = [1,2,3,4,5,6,7,8,0]
p = puzzle()
valid = p.check(goal_state)
print(f"The no. of inversions are: {valid}")
if valid == 1:
print("Error, Puzzle Not Solvable")
exit()
steps = p.solve_puzzle(initial_state, goal_state)
print(f"Minimum number of moves to solve the puzzle: {steps}")
| varad-kadam/8-Puzzle | 8_puzzle_A*_hamming.py | 8_puzzle_A*_hamming.py | py | 2,274 | python | en | code | 0 | github-code | 36 |
21489617325 | import os
import pickle
from autoencoder.models import Encoder, Classifier
PARAM_LIMIT = 5e6
SIZE_LIMIT_MB = 20
ACC_THRESHOLD = 0.5
def load_model(model_path):
model_dict = pickle.load(open(model_path, "rb"))["classifier_pt1"]
encoder = Encoder(
model_dict["encoder_hparam"],
model_dict["encoder_inputsize"],
model_dict["encoder_latent_dim"],
)
model = Classifier(model_dict["hparams"], encoder)
model.load_state_dict(model_dict["state_dict"])
return model
def save_model(model, file_name, directory="models"):
model = model.cpu()
model_dict = {
"classifier_pt1": {
"state_dict": model.state_dict(),
"hparams": model.hparams,
"encoder_hparam": model.encoder.hparams,
"encoder_inputsize": model.encoder.input_size,
"encoder_latent_dim": model.encoder.latent_dim,
"encoder_state_dict": model.encoder.state_dict(),
}
}
if not os.path.exists(directory):
os.makedirs(directory)
pickle.dump(model_dict, open(os.path.join(directory, file_name), "wb", 4))
| chloeskt/deep_learning_topics | autoencoder/autoencoder/utils.py | utils.py | py | 1,119 | python | en | code | 0 | github-code | 36 |
40186857847 | # import community
import numpy as np
import networkx as nx
import matplotlib as mpl
from matplotlib.pyplot import imshow
from matplotlib import pyplot as plt
import matplotlib.image as mpimg
import pygraphviz
from networkx.drawing.nx_agraph import write_dot, graphviz_layout
import random
import pydoc
from ds import McmcTree as Tree
from utils import ColorPrint as _
import sys
sys.path.append("..")
from datasets.synthetic.generator import TreeGenerator
font = {'weight' : 'normal',
'size' : 24}
mpl.rc('font', **font)
### load random data
M = 20
N = 40
ZETA = 1
Gamma = 0.15
alpha = 0.01
beta = 0.01
MR = 0.005
tg = TreeGenerator(
M = M,
N = N,
ZETA = ZETA,
Gamma = Gamma,
alpha = alpha,
beta = beta,
MR = MR,
)
(gt_E, gt_D, D, gt_T) = tg.generate()
gensNames = list( str(i) for i in range(M) )
print(gensNames)
C_num = D.shape[1]
G_num = D.shape[0]
_.print_warn( 'There is {} cells and {} mutations at {} genes in this dataset.'.format(C_num, G_num, len(gensNames)) )
# ### fill missed data
# def tf(m,c):
# os = len(np.where(D[:,c]==1.))*1.
# zs = len(np.where(D[:,c]==0.))*1.
# return 1. if np.random.rand() < os/(os+zs) else 0.
# for m in range(G_num):
# for c in range(C_num):
# if D[m,c] == 3.:
# D[m,c] = tf(m,c)
### Run
dl = list(d for d in D)
root = [n for n,d in gt_T.in_degree() if d==0][0]
print('ROOT:', root)
T = Tree(gensNames, D, data_list=dl, root=str(root), alpha=alpha, beta=beta)
T.set_ground_truth(gt_D, gt_E, gt_T=gt_T)
T.randomize()
T.plot_best_T('initial T')
# T.plot('T0')
for i in range(1000):
if T.next():
break
T.plot_all_results()
| afshinbigboy/itmt | src/test.py | test.py | py | 1,675 | python | en | code | 0 | github-code | 36 |
31422423491 | from __future__ import barry_as_FLUFL, print_function, division
__version__ = '0.1'
__author__ = 'Maryam Najafian'
"""
* Implementing Part of Speech (POS) tagging
* Using RNN in Tensorflow
structure: Embedding --> GRU --> Dense
* INPUTs are one hot encoded words and OUTPUTs are tags
* Measure F1-score and accuracy
* Note: in TF (unlike Theano) all sequences should have equal length
Anything that is shorter than the longest sequence is 0 padded
You can think of your data as a NXTXD
* N samples
* samples of length T
* D is the dimensionality of each word vector
This allows us to process our data in batches
which is more difficult in Theano where you are
going to have variable length sequences
"""
from builtins import range
import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
import os
import sys
sys.path.append(os.path.abspath('..'))
from pos_baseline import get_data
from sklearn.utils import shuffle
from util import init_weight
from datetime import datetime
from sklearn.metrics import f1_score
from tensorflow.contrib.rnn import static_rnn as get_rnn_output
from tensorflow.contrib.rnn import BasicRNNCell, GRUCell
import config
def get_data(split_sequences=False):
train_text = config.CHUNKING_DATA + '/train.txt'
test_text = config.CHUNKING_DATA + '/test.txt'
if not os.path.exists(config.CHUNKING_DATA):
print("Please create a folder in your local directory called 'chunking'")
print("train.txt and test.txt should be stored in there.")
print("Please check the comments to get the download link.")
exit()
elif not os.path.exists(train_text):
print("train.txt is not in chunking/train.txt")
print("Please check the comments to get the download link.")
exit()
elif not os.path.exists(test_text):
print("test.txt is not in chunking/test.txt")
print("Please check the comments to get the download link.")
exit()
word2idx = {}
tag2idx = {}
# unlike the Theano version of this code the index starts
# from 1 because TF needs all input seq. to be the same size
# and it does 0 padding and 0 is a special number that we can't
# use for anything else
word_idx = 1
tag_idx = 1
# X/Ytrain:each element is a sample and each sample is a list containing word or tag indeces
Xtrain = []
Ytrain = []
# currentX/Y: contain a list of words and tags in the current sentence
currentX = []
currentY = []
# each line contains one word and one tag
# each sentence is separated by a blank line
for line in open(train_text):
line = line.rstrip()
if line: # check if the line is blank
r = line.split()
word, tag, _ = r
if word not in word2idx:
word2idx[word] = word_idx
word_idx += 1
currentX.append(word2idx[word])
if tag not in tag2idx:
tag2idx[tag] = tag_idx
tag_idx += 1
currentY.append(tag2idx[tag])
elif split_sequences: # add the whole list as a sample
Xtrain.append(currentX)
Ytrain.append(currentY)
currentX = []
currentY = []
if not split_sequences:
Xtrain = currentX
Ytrain = currentY
# load and score test data
Xtest = []
Ytest = []
currentX = []
currentY = []
for line in open(test_text):
line = line.rstrip()
if line:
r = line.split()
word, tag, _ = r
if word in word2idx:
currentX.append(word2idx[word])
else:
currentX.append(word_idx) # use this as unknown
currentY.append(tag2idx[tag])
elif split_sequences:
Xtest.append(currentX)
Ytest.append(currentY)
currentX = []
currentY = []
if not split_sequences:
Xtest = currentX
Ytest = currentY
return Xtrain, Ytrain, Xtest, Ytest, word2idx
def flatten(l): # we need to flatten our data which is a list of lists
return [item for sublist in l for item in sublist]
# get the data
Xtrain, Ytrain, Xtest, Ytest, word2idx = get_data(split_sequences=True)
V = len(word2idx) + 2 # vocab size (+1 for including an index for unknown, +1 for starting from 1 rather than 0)
K = len(set(flatten(Ytrain)) | set(
flatten(Ytest))) + 1 # num classes (assumption no unknown index, +1 for starting from 1 rather than 0)
# training config
epochs = 20
learning_rate = 1e-2
mu = 0.99
batch_size = 32
hidden_layer_size = 10
embedding_dim = 10
sequence_length = max(len(x) for x in Xtrain + Xtest)
# pad sequences
Xtrain = tf.keras.preprocessing.sequence.pad_sequences(Xtrain, maxlen=sequence_length)
Ytrain = tf.keras.preprocessing.sequence.pad_sequences(Ytrain, maxlen=sequence_length)
Xtest = tf.keras.preprocessing.sequence.pad_sequences(Xtest, maxlen=sequence_length)
Ytest = tf.keras.preprocessing.sequence.pad_sequences(Ytest, maxlen=sequence_length)
print("Xtrain.shape:", Xtrain.shape)
print("Ytrain.shape:", Ytrain.shape)
# inputs
inputs = tf.placeholder(tf.int32, shape=(None, sequence_length))
targets = tf.placeholder(tf.int32, shape=(None, sequence_length))
num_samples = tf.shape(inputs)[0] # useful for later
# word embedding matrix
We = np.random.randn(V, embedding_dim).astype(np.float32)
# weight and bias of the final dense layer (output layer)
Wo = init_weight(hidden_layer_size, K).astype(np.float32)
bo = np.zeros(K).astype(np.float32)
# make them tensorflow variables
tfWe = tf.Variable(We)
tfWo = tf.Variable(Wo)
tfbo = tf.Variable(bo)
# make the rnn unit
rnn_unit = GRUCell(num_units=hidden_layer_size, activation=tf.nn.relu)
# pass the inputs through the embedding layer to get the output and from that build the cost
x = tf.nn.embedding_lookup(tfWe, inputs)
# TF doesn'tt like 3D objects, so unsttack converts x from a tensor of shape N x T x M
# into a list of length T, where each element is a tensor of shape N x M
x = tf.unstack(x, sequence_length, 1)
# get the rnn output
outputs, states = get_rnn_output(rnn_unit, x, dtype=tf.float32)
# outputs are now of size (T, N, M)
# so make it (N, T, M)
outputs = tf.transpose(outputs, (1, 0, 2))
outputs = tf.reshape(outputs, (sequence_length * num_samples, hidden_layer_size)) # NT x M
# final dense layer
logits = tf.matmul(outputs, tfWo) + tfbo # we need to flatten our data because matmul only works on 2D tensor
# objects: NT x K
predictions = tf.argmax(logits, 1)
predict_op = tf.reshape(predictions, (num_samples, sequence_length))
labels_flat = tf.reshape(targets, [-1])
# we need to flatten our data because cross entropy cost function only works on 2D tensor objects and doesn't like
# 3D tensors
cost_op = tf.reduce_mean(
tf.nn.sparse_softmax_cross_entropy_with_logits(
logits=logits,
labels=labels_flat
)
)
train_op = tf.train.AdamOptimizer(learning_rate).minimize(cost_op)
# init stuff
sess = tf.InteractiveSession()
init = tf.global_variables_initializer()
sess.run(init)
# training loop
costs = []
n_batches = len(Ytrain) // batch_size
for i in range(epochs):
n_total = 0
n_correct = 0
t0 = datetime.now()
Xtrain, Ytrain = shuffle(Xtrain, Ytrain)
cost = 0
for j in range(n_batches):
x = Xtrain[j * batch_size:(j + 1) * batch_size]
y = Ytrain[j * batch_size:(j + 1) * batch_size]
# get the cost, predictions, and perform a gradient descent step
c, p, _ = sess.run(
(cost_op, predict_op, train_op),
feed_dict={inputs: x, targets: y})
cost += c
# calculate the accuracy
for yi, pi in zip(y, p):
# we don't care about the padded entries so ignore them because
# 0 means padding so we filter out all the enteries where the target is 0
yii = yi[yi > 0]
pii = pi[yi > 0]
n_correct += np.sum(yii == pii)
n_total += len(yii)
# print stuff out periodically
if j % 10 == 0:
sys.stdout.write(
"j/N: %d/%d correct rate so far: %f, cost so far: %f\r" %
(j, n_batches, float(n_correct) / n_total, cost)
)
sys.stdout.flush()
# get test acc. too
p = sess.run(predict_op, feed_dict={inputs: Xtest, targets: Ytest})
n_test_correct = 0
n_test_total = 0
for yi, pi in zip(Ytest, p):
yii = yi[yi > 0]
pii = pi[yi > 0]
n_test_correct += np.sum(yii == pii)
n_test_total += len(yii)
test_acc = float(n_test_correct) / n_test_total
print(
"i:", i, "cost:", "%.4f" % cost,
"train acc:", "%.4f" % (float(n_correct) / n_total),
"test acc:", "%.4f" % test_acc,
"time for epoch:", (datetime.now() - t0)
)
costs.append(cost)
plt.plot(costs)
plt.show()
| MaryamNajafian/Tea_Maryam_NLP | Code/pos_tf.py | pos_tf.py | py | 8,941 | python | en | code | 0 | github-code | 36 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.