id stringlengths 1 265 | text stringlengths 6 5.19M | dataset_id stringclasses 7
values |
|---|---|---|
62645 | <reponame>krishotte/env_data2<gh_stars>0
from orator.migrations import Migration
class AddDataPressure(Migration):
def up(self):
"""
Run the migrations.
"""
with self.schema.table('data') as table:
table.float('pressure')
def down(self):
"""
Revert the migrations.
"""
with self.schema.table('data') as table:
table.drop_column('pressure')
| StarcoderdataPython |
18990 | <reponame>sladinji/blousebrothers<gh_stars>1-10
from django.db import migrations
from decimal import Decimal
from dateutil.relativedelta import relativedelta
from datetime import date
def fix_subscription(apps, schema_editor):
Subscription = apps.get_model('confs', 'Subscription')
SubscriptionType = apps.get_model('confs', 'SubscriptionType')
# update presale sub past due date
pdate = date.today() + relativedelta(months=+12)
Subscription.objects.all().update(date_over=pdate)
Subscription.objects.all().update(type_id=5)
SubscriptionType.objects.exclude(id=5).delete()
sub = SubscriptionType.objects.first()
if sub:
sub.bonus = Decimal('2')
sub.save()
else:
return
# Update Abo product type
Product = apps.get_model('catalogue', 'Product')
ProductAttribute = apps.get_model('catalogue', 'ProductAttribute')
ProductAttributeValue = apps.get_model('catalogue', 'ProductAttributeValue')
abo = Product.objects.filter(title__icontains='abo').get()
pclass = abo.product_class
bonus = ProductAttribute(name='bonus', code='bonus', type='text')
bonus.save()
email_msg = ProductAttribute(name='email_msg', code='email_msg', type='richtext')
email_msg.save()
bonus_sponsor = ProductAttribute(name='bonus_sponsor', code='bonus_sponsor', type='text')
bonus_sponsor.save()
pclass.attributes.add(bonus, email_msg, bonus_sponsor)
pclass.save()
#add a 2€ bonus attribute to presale subscription
mybonus = ProductAttributeValue(attribute=bonus, value_text='2.00', product=abo)
mybonus.save()
abo.attribute_values.add(mybonus)
abo.save()
class Migration(migrations.Migration):
dependencies = [
('confs', '0043_auto_20170206_0855'),
]
operations = [
# omit reverse_code=... if you don't want the migration to be reversible.
migrations.RunPython(fix_subscription),
]
| StarcoderdataPython |
3349715 | <reponame>InfernalAzazel/dragon
import time
from fastapi import APIRouter, Request, BackgroundTasks
from loguru import logger
from robak import Jdy, JdySerialize
from conf import Settings
from func.jd_web_hook.models import WebHookItem
doc = '''
客户自销量奖励核算申请 -> 流程完成 -> 触发
目标表单:
U8应收单过渡表
条件
支付客户合计(含补差)> 0
'''
def register(router: APIRouter):
@router.post('/customer_business_accounting_apply', tags=['客户自销量奖励核算申请-U8应收单过渡表'], description=doc)
async def customer_business_accounting_apply(whi: WebHookItem, req: Request, background_tasks: BackgroundTasks):
# 验证签名
if req.headers['x-jdy-signature'] != Jdy.get_signature(
nonce=req.query_params['nonce'],
secret=Settings.JD_SECRET,
timestamp=req.query_params['timestamp'],
payload=bytes(await req.body()).decode('utf-8')):
return 'fail', 401
# 添加任务
background_tasks.add_task(business, whi, str(req.url))
return '2xx'
# 处理业务
async def business(whi: WebHookItem, url):
async def errFn(e):
if e is not None:
await Settings.log.send(
level=Settings.log.ERROR,
url=url,
secret=Settings.JD_SECRET,
err=e,
data=whi.dict(),
is_start_workflow=True
)
return
# 启动时间
start = Settings.log.start_time()
if whi.data['flowState'] == 1 and whi.op == 'data_update':
if whi.data['money'] is None:
pass
else:
if whi.data['money'] > 0 and whi.data['tb_u8'] == '是':
jdy = Jdy(
app_id=Settings.JD_APP_ID_BUSINESS,
entry_id='5facec6b40e1cb00079e03cb',
api_key=Settings.JD_API_KEY,
)
remarks = whi.data['remarks']
source_no = whi.data['source_no']
filling_time = whi.data['filling_time']
customer_name = whi.data['customer_name']
customer_code = whi.data['customer_code']
money = whi.data['money']
qujili_u8_code = whi.data['qujili_u8_code']
zhuguan_no = whi.data['zhuguan_no']
abstract = f'{whi.data["source_no"]} {remarks}'
subform = [
{
'_id': '60bf5e91ff26fc00000e0000',
'r_fx': '借',
'r_km_code': '236',
'r_money': money,
'r_bm_code': qujili_u8_code,
'r_zhuguan_no': zhuguan_no,
'r_abstract': abstract,
}
]
res, err = await jdy.get_form_data(
data_filter={
"cond": [
{
"field": 'source_no',
"type": 'text',
"method": "eq",
"value": whi.data['source_no'] # 申请批号
}
]
})
await errFn(err)
if res:
_, err = await jdy.update_data(
dataId=res[0]['_id'],
data={
'source_no': {'value': source_no},
'is_handle': {'value': '是'},
'source_form': {'value': '新客户实销量奖励核算申请'},
'filling_time': {'value': filling_time},
'customer_name': {'value': customer_name},
'customer_code': {'value': customer_code},
'km_code': {'value': '122'},
'money': {'value': money},
'qujili_u8_code': {'value': qujili_u8_code},
'zhuguan_no': {'value': zhuguan_no},
'abstract': {'value': abstract},
'receivable': JdySerialize.subform('receivable', subform)['receivable'],
})
await errFn(err)
else:
_, err = await jdy.create_data(
data={
'source_no': {'value': source_no},
'is_handle': {'value': '否'},
'source_form': {'value': '新客户实销量奖励核算申请'},
'filling_time': {'value': filling_time},
'customer_name': {'value': customer_name},
'customer_code': {'value': customer_code},
'km_code': {'value': '122'},
'money': {'value': money},
'qujili_u8_code': {'value': qujili_u8_code},
'zhuguan_no': {'value': zhuguan_no},
'abstract': {'value': abstract},
'receivable': JdySerialize.subform('receivable', subform)['receivable'],
},
is_start_workflow=True
)
await errFn(err)
# 结束时间
elapsed = (time.perf_counter() - start)
logger.info(f'[+] 程序处理耗时 {elapsed}s')
| StarcoderdataPython |
1699237 | <reponame>ftomassetti/pydiffparser
from model import *
# Diff Parser, produce Diff representations.
class Parser:
class SectionParser:
def eat(self, lines, i, diff):
if len(lines)==i:
return
if not lines[i].startswith("--- "):
raise Exception("Section expected at line %i, found: %s" % (i, lines[i]) )
fname_old = lines[i][4:].strip()
if len(lines)==i+1:
raise Exception("Section not complete, second file declaration expected at line %i EOF found" % (i) )
if not lines[i+1].startswith("+++ "):
raise Exception("Section not complete, second file declaration expected at line %i, found: %s" % (i, lines[i+1]) )
fname_new = lines[i+1][4:].strip()
section = Section()
section.fname_old = fname_old
section.fname_new = fname_new
diff.sections.append(section)
Parser.HunkParser().eat(lines, i+2, section, diff)
class HunkParser:
def eat(self, lines, i, section, diff):
if len(lines)==i:
return
if not lines[i].startswith("@@ "):
Parser.SectionParser().eat(lines, i, diff)
return
parts = lines[i][4:-4].split("+")
hunk = Hunk()
hunk.start_old = int(parts[0].split(",")[0])
hunk.end_old = int(parts[0].split(",")[1])
hunk.start_new = int(parts[1].split(",")[0])
hunk.end_new = int(parts[1].split(",")[1])
section.hunks.append(hunk)
old_toread = hunk.lines_old()
new_toread = hunk.lines_new()
cur = i+1
next_old = hunk.start_old
next_new = hunk.start_new
while old_toread>0 or new_toread>0:
if lines[cur].startswith(" "):
old_toread -= 1
new_toread -= 1
hunk.store_old(next_old, False, lines[cur][1:])
hunk.store_new(next_new, False, lines[cur][1:])
next_old += 1
next_new += 1
elif lines[cur].startswith("+"):
new_toread -= 1
hunk.store_new(next_new, True, lines[cur][1:])
next_new += 1
elif lines[cur].startswith("-"):
old_toread -= 1
hunk.store_old(next_old, True, lines[cur][1:])
next_old += 1
else:
raise Exception("Line %i" % cur)
cur+=1
self.eat(lines, cur, section, diff)
class DiffLineParser:
pass
def parse(self, fname):
with open(fname) as f:
lines = f.readlines()
diff = Diff()
Parser.SectionParser().eat(lines, 0, diff)
return diff | StarcoderdataPython |
140414 | from django.apps import AppConfig
class SharingmylinkfrontendConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'SharingMyLinkFrontEnd'
| StarcoderdataPython |
4841834 | from unittest import skip
import numpy as np
from VariableUnittest import VariableUnitTest
from gwlfe.AFOS.nonGrazingAnimals.Losses import NGLostBarnNSum
class TestNGLostBarnNSum(VariableUnitTest):
@skip('Not Ready Yet.')
def test_NGLostBarnNSum(self):
z = self.z
np.testing.assert_array_almost_equal(
NGLostBarnNSum.NGLostBarnNSum_f(),
NGLostBarnNSum.NGLostBarnNSum(), decimal=7)
| StarcoderdataPython |
114632 | while True:
try:
n = int(input("Enter N: "))
except ValueError:
print("Enter correct number!")
else:
if n <= 100:
print("Error: N must be greater than 100!")
else:
for i in range(11, n + 1):
s = i
i = (i-1) + i*i
if i < 11:
i = 10
print(i)
break
| StarcoderdataPython |
161725 | <reponame>shahparth123/deploying-research
from flask import Flask, redirect, url_for, request
app = Flask(__name__)
@app.route('/calculator/add/<a>/<b>')
def add1(a, b):
ans = int(a)+int(b)
return 'answer is %s' % ans
@app.route('/calculator/mul/<a>/<b>')
def mul1(a, b):
ans = int(a)*int(b)
return 'answer is %s' % ans
@app.route('/calculator/sub/<a>/<b>')
def sub1(a, b):
ans = int(a)-int(b)
return 'answer is %s' % ans
@app.route('/calculator/div/<a>/<b>')
def div1(a, b):
ans = int(a)/int(b)
return 'answer is %s' % ans
'''
@app.route('/calculator/<a>/<b>')
def success(a,b):
ans = int(a)+int(b)
return 'answer is %s' % ans
'''
@app.route('/calculator2', methods=['POST', 'GET'])
def calc():
if request.method == 'POST':
a = request.form['a']
b = request.form['b']
c=int(a)+int(b)
return str(c)
elif request.method == 'GET':
a = request.args.get('a')
b = request.args.get('b')
c=int(a)+int(b)
return str(c)
@app.route('/jsondemo', methods=['POST', 'GET'])
def jsondemo():
content = request.get_json(silent=True, force=True)
return str( int(content['a']) + int(content['b']) )
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0', port=9090)
| StarcoderdataPython |
83452 | <filename>app.py
#!/usr/bin/env python3
# local imports
from os import name
from os.path import abspath, dirname, join
import time
import logging
import sqlite3
from sqlite3 import Error
import requests
from flask import flash, Flask, Markup, redirect, render_template, url_for, request
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy.orm import relationship
# create the flask application object
app = Flask(__name__)
app.config.from_object(__name__)
# create/connect to the db
_cwd = dirname(abspath(__file__))
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + join(_cwd, 'flask-database.db')
app.config['SQLALCHEMY_ECHO'] = True
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db = SQLAlchemy(app)
# database models
class Category(db.Model):
__tablename__ = 'category'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String)
def __repr__(self):
return '<Category {:d} {}>'.format(self.id, self.name)
def __str__(self):
return self.name
class Delegate(db.Model):
__tablename__ = 'delegate'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String)
location = db.Column(db.String)
description = db.Column(db.String)
category_id = db.Column(db.Integer, db.ForeignKey('category.id'))
internalurl = db.Column(db.String)
externalurl = db.Column(db.String)
categoryRelationship = relationship("Category",backref="Category",uselist=True)
category=relationship("Category",lazy="joined")
def __repr__(self):
return '<Delegate {:d} {}>'.format(self.id, self.name)
def __str__(self):
return self.name
def returnOrderByField(querystringParameter):
#Let's see if they have asked for a specific sort
if querystringParameter=="location":
return Delegate.location
else:
return Delegate.name
# Every page that is on the website will need an app.route defined here
@app.route('/')
def get_home():
return render_template('home.html', title='Home', description='This is the meta-description.')
@app.route('/about')
def get_about():
return render_template('about.html', title='More about our Convention', description='')
@app.route('/acknowledgements')
def get_acknowledgements():
return render_template('acknowledgements.html', title='Acknowledgements of the Convention', description='')
@app.route("/delegates/", defaults={"internalURL": None})
@app.route('/delegates/<string:internalURL>')
def get_delegates_filtered(internalURL):
if internalURL is None:
#Simply return all the delegate records - sorted if necessary
query = Delegate.query.filter(Delegate.id >= 0).order_by(returnOrderByField(request.args.get('sort', default = 'name', type = str)))
builtDescription=""
filteredView=0
else:
#Filter the delegate records to only those whose category name matches the filter.
#We have to replace the dashes (-) back to spaces, that were removed in the template files, for this to work.
query = Delegate.query.filter(Delegate.category.has(name=internalURL.replace("-"," "))).order_by(returnOrderByField(request.args.get('sort', default = 'name', type = str)))
filteredView=1
builtDescription=internalURL.replace("-"," ")
return render_template('delegates.html', title='Delegates attending the Convention', description=builtDescription, filteredView=filteredView,rows=query.all())
@app.route('/delegate/<string:internalURL>')
def get_delegate(internalURL):
query = Delegate.query.filter_by(internalurl=internalURL).first_or_404()
# In this instance, the meta title and description values must come from the database.
return render_template('delegate.html', title='', description='', row=query)
@app.route('/feedback')
def get_feedback():
#TO DO - Create the form itself.
#TO DO - Create a new route to send the form contents to the database.
return render_template('feedback.html', title='Feedback Form', description='')
@app.route('/map')
def get_map():
return render_template('map.html', title='Location Map of the Convention', description='')
@app.route('/news')
def get_news():
return render_template('news.html', title='Latest news from the Careers Convention', description='')
@app.errorhandler(404)
def page_not_found(error):
return render_template('error404.html', title = 'Page not found'), 404
# start the server with the 'run()' method - debug=True for testing - NOT LIVE
if __name__ == '__main__':
app.debug = True
db.create_all(app=app)
db.init_app(app=app)
app.run()
#app.run(debug=True)
| StarcoderdataPython |
3274399 | <reponame>c-yan/atcoder
def f(data, total, targets):
if total <= 0:
return 0
result = INF
for i in range(len(targets)):
p, _, score, subtotal = data[targets[i]]
t = (total + score - 1) // score
if t < p and t < result:
result = t
nt = tuple(targets[0:i] + targets[i + 1:])
if nt not in calced:
calced.add(nt)
t = p + f(data, total - subtotal, nt)
if t < result:
result = t
calced.add(targets)
return result
INF = float('inf')
D, G = map(int, input().split())
data = []
calced = set()
for i in range(D):
p, c = map(int, input().split())
data.append([p, c, (i + 1) * 100, (i + 1) * 100 * p + c])
print(f(data, G, tuple(range(len(data)))))
| StarcoderdataPython |
3335038 | from distutils.core import setup
import py2exe
setup(
name = "Mal",
description = "Python-based App",
version = "1.0",
console=["rawpymal.py"],
options = {
"py2exe": {
"unbuffered": True,
"optimize": 2,
"bundle_files": 1,
"packages":"ctypes",
"includes": "base64,sys,socket,struct,time,code,platform,getpass,shutil"
}
}
) | StarcoderdataPython |
1793006 | <gh_stars>0
import array
import os
from MSTClustering import *
from optparse import OptionParser
usage = 'test_MST.py reconFile [opt] <reconFile>'
parser = OptionParser()
parser.add_option('-o', '--output-file', type = str, dest = 'o',
default = None,
help = 'path to the output file')
parser.add_option('-x', '--max-num-xtals', type = int, dest = 'x',
default = 10000,
help = 'maximum number of xtal for running the MST')
parser.add_option('-m', '--max-num-clusters', type = int, dest = 'm',
default = 5,
help = 'maximum number of clusters in the output tree')
parser.add_option('-w', '--max-edge-weight', type = float, dest = 'w',
default = None,
help = 'threshold length for the MST clustering (in mm)')
parser.add_option('-n', '--num-events', type = int, dest = 'n',
default = 10000000,
help = 'number of events to be processed')
(opts, args) = parser.parse_args()
if len(args) == 0:
print 'Usage: %s' % usage
sys.exit('Please provide a recon input root file.')
elif len(args) > 2:
print 'Usage: %s' % usage
sys.exit('Too many arguments.')
inputFilePath = args[0]
outputFilePath = opts.o or inputFilePath.replace('recon.root', 'MSTClu.root')
if os.path.exists(outputFilePath):
sys.exit('Output file %s exists, remove it first.' % outputFilePath)
MAX_NUM_XTALS = opts.x
WEIGHT_THRESHOLD = opts.w
MAX_NUM_CLUSTERS = opts.m
reader = ReconReader(inputFilePath)
numEvents = min(opts.n, reader.getEntries())
if numEvents < 0:
numEvents = reader.getEntries()
outputFile = ROOT.TFile(outputFilePath, 'RECREATE')
outputTree = ROOT.TTree('MSTTuple', 'MSTTuple')
arrayDict = {}
BRANCH_DICT = {'EvtRun' : ('i', 1),
'EvtEventId' : ('i', 1),
'CalEnergyRaw' : ('f', 1),
'McEnergy' : ('f', 1),
'TkrNumTracks' : ('f', 1),
'CalCsIRLn' : ('f', 1),
'NumXtals' : ('i', 1),
'NumClusters' : ('i', 1),
'UberClusterNumXtals': ('f', 1),
'UberClusterEnergy' : ('f', 1),
'UberClusterMeanW' : ('f', 1),
'UberClusterRmsW' : ('f', 1),
'UberClusterMaxW' : ('f', 1),
'ClusterNumXtals' : ('i', MAX_NUM_CLUSTERS),
'ClusterEnergy' : ('f', MAX_NUM_CLUSTERS),
'ClusterMeanW' : ('f', MAX_NUM_CLUSTERS),
'ClusterRmsW' : ('f', MAX_NUM_CLUSTERS),
'ClusterMaxW' : ('f', MAX_NUM_CLUSTERS)
}
for (branchName, (branchType, branchSize)) in BRANCH_DICT.items():
a = array.array(branchType, [0]*branchSize)
arrayDict[branchName] = a
if branchSize == 1:
branchTitle = '%s/%s' % (branchName, branchType.upper())
else:
branchTitle = '%s[%d]/%s' %\
(branchName, branchSize, branchType.upper())
outputTree.Branch(branchName, a, branchTitle)
for i in xrange(numEvents):
reader.getEntry(i)
print '\nProcessing event %d/%d...' % (i, numEvents)
xtalCol = reader.getCalXtalRecCol()
numXtals = reader.getNumCalXtals()
arrayDict['EvtRun'][0] = reader.getMeritVariable('EvtRun')
arrayDict['EvtEventId'][0] = reader.getMeritVariable('EvtEventId')
arrayDict['CalEnergyRaw'][0] = reader.getMeritVariable('CalEnergyRaw')
arrayDict['McEnergy'][0] = reader.getMeritVariable('McEnergy')
arrayDict['TkrNumTracks'][0] = reader.getMeritVariable('TkrNumTracks')
arrayDict['CalCsIRLn'][0] = reader.getMeritVariable('CalCsIRLn')
arrayDict['NumXtals'][0] = numXtals
if numXtals <= MAX_NUM_XTALS:
clustering = MSTClustering(xtalCol, WEIGHT_THRESHOLD)
numClusters = clustering.getNumClusters()
arrayDict['NumClusters'][0] = numClusters
uberCluster = clustering.getUberCluster()
arrayDict['UberClusterNumXtals'][0] = uberCluster.getNumNodes()
arrayDict['UberClusterEnergy'][0] = uberCluster.EnergySum
arrayDict['UberClusterMeanW'][0] = uberCluster.getMeanEdgeWeight()
arrayDict['UberClusterRmsW'][0] = uberCluster.getRmsEdgeWeight()
arrayDict['UberClusterMaxW'][0] = uberCluster.getMaxEdgeWeight()
for cId in xrange(MAX_NUM_CLUSTERS):
if cId < numClusters:
c = clustering.getCluster(cId)
arrayDict['ClusterNumXtals'][cId] = c.getNumNodes()
arrayDict['ClusterEnergy'][cId] = c.EnergySum
arrayDict['ClusterMeanW'][cId] = c.getMeanEdgeWeight()
arrayDict['ClusterRmsW'][cId] = c.getRmsEdgeWeight()
arrayDict['ClusterMaxW'][cId] = c.getMaxEdgeWeight()
else:
arrayDict['ClusterNumXtals'][cId] = 0
arrayDict['ClusterEnergy'][cId] = 0.0
arrayDict['ClusterMeanW'][cId] = 0.0
arrayDict['ClusterRmsW'][cId] = 0.0
arrayDict['ClusterMaxW'][cId] = 0.0
else:
arrayDict['NumClusters'][0] = 0
arrayDict['UberClusterNumXtals'][0] = 0
arrayDict['UberClusterEnergy'][0] = 0.0
arrayDict['UberClusterMeanW'][0] = 0.0
arrayDict['UberClusterRmsW'][0] = 0.0
arrayDict['UberClusterMaxW'][0] = 0.0
for cId in xrange(MAX_NUM_CLUSTERS):
arrayDict['ClusterNumXtals'][cId] = 0
arrayDict['ClusterEnergy'][cId] = 0.0
arrayDict['ClusterMeanW'][cId] = 0.0
arrayDict['ClusterRmsW'][cId] = 0.0
arrayDict['ClusterMaxW'][cId] = 0.0
outputTree.Fill()
outputFile.Write()
outputFile.Close()
| StarcoderdataPython |
193387 | <reponame>sos1sos2Sixteen/tool-shack<filename>tests/basic.py
import traceback
import argparse
import tool_shack.debug as debug
import tool_shack.core as core
import tool_shack.scripting as scripting
import tool_shack.data as data
from tool_shack.debug import testcase
def tprint(*args, **kwargs):
print(' ', end='')
print(*args, **kwargs)
@testcase()
def test_package():
assert core._sanity_check(2) == 3
@testcase()
def test_aggregator():
import os.path as osp
root_dir = 'some_where'
with scripting.AggregateVars() as agg:
folder_1 = osp.join(root_dir, 'pretrained')
subfolder = osp.join(folder_1, 'designations')
folder_2 = osp.join(root_dir, 'events')
for dir in agg:
tprint(f'{dir} exists: {osp.exists(dir)}')
@testcase(should_fail=True)
def failed_successfully():
raise NotImplementedError()
@testcase()
def agg_funcs():
with scripting.AggregateVars() as agg:
def f():
return 0
def g():
return 1
for h in agg:
tprint(h.__name__, h())
@testcase()
def test_bigand():
assert core.big_and((True, True, True)) == True
assert core.big_and([False, True, True]) == False
assert core.contains_all(
(1, 2), [1, 3, 4, 5]
) == False
assert core.contains_all(
[1, 2], (1, 2, 3)
) == True
@testcase()
def test_null():
core.nullcall()
core.nullcall(1, 2, 3, abc=3)
@testcase()
def test_unique():
xs = [1,1,1,1,1,2,2,2,2,6,7,8]
assert [1,2,6,7,8] == core.tell(tprint, list(data.unique_filter(xs)))
assert [1,2,2,8] == core.tell(tprint, data.take_every(3, xs))
@testcase()
def test_indices():
import copy
xs = [
(1, 'one'),
(2, 'two'),
(3, 'three')
]
dxs = copy.copy(xs)
dxs.append((3, 'another three'))
getter = lambda x: x[0]
index_res = {
1 : (1, 'one'),
2 : (2, 'two'),
3 : (3, 'three')
}
group_res = {
1 : [(1, 'one')],
2 : [(2, 'two')],
3 : [(3, 'three'), (3, 'another three')]
}
assert core.tell(tprint, data.index_by(getter, xs)) == index_res
assert data.group_by(getter, dxs) == group_res
assert data.reduce_group(
lambda xs: sum([x[0] for x in xs]), group_res
) == {
1: 1,
2: 2,
3: 6
}
def test_find_attr():
import re
class A():
def test_a(self):
'''doc string for method `a`'''
pass
def test_b(self):
'''doc string for method b'''
pass
debug.find_attr(A(), 'test')
debug.find_attr(A(), re.compile('test'))
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-v', '--verbose', action='store_true', default=False, help='print stack trace on error')
args = parser.parse_args()
n_failed = 0
test_cases = [
test_package,
test_aggregator,
failed_successfully,
agg_funcs,
test_bigand,
test_null,
test_unique,
test_indices,
test_find_attr,
]
for case in test_cases:
try:
case()
except Exception as e:
n_failed += 1
print(f'case failed with {type(e)}')
if args.verbose:
print(traceback.format_exc())
if n_failed == 0:
print(f'all tests passed succesfully!')
else :
print(f'{n_failed} tests failed, rerun with -v flag for a stack trace')
if __name__ == '__main__':
main()
pass
| StarcoderdataPython |
137542 | <filename>modules/show_case/src/pages/examplebutton/examplebutton.py
from kivy.uix.screenmanager import Screen
from kivy.lang import Builder
from kivy.uix.floatlayout import FloatLayout
from kivy.clock import Clock
from kivy_modules.kivyapi import kivyapi
class ExampleButton(Screen):
def __init__(self, **kwargs):
super().__init__(**kwargs)
with open('./src/pages/examplebutton/examplebutton.kv', 'r', encoding = 'utf-8') as screen:
Builder.load_string(screen.read()) | StarcoderdataPython |
3307293 | <reponame>RohanTej/restaurant-manager
from django.shortcuts import render
from django.http import HttpResponse
from .models import Post
from django.urls import reverse
from django.views.generic import ListView, DetailView, CreateView
def home_view(request):
context = {
'posts': Post.objects.all()
}
return render(request, 'blog/home.html', context)
class PostListView(ListView):
model = Post
template_name = 'blog/home.html' # <app>/<model>_<viewtype>.html
context_object_name = 'posts'
ordering = ['-date_posted']
class PostDetailView(DetailView):
model = Post
class PostCreateView(CreateView):
model = Post
fields = ['title', 'content']
def form_valid(self, form):
form.instance.author = self.request.user
return super().form_valid(form)
def get_success_url(self):
return reverse('blog-detail', kwargs={'pk': self.object.id}) | StarcoderdataPython |
1795849 | import os, sys
import tensorflow as tf
# pass in model path as arg (eg - /tf-output/latest_model)
# python score-model.py '../tf-output/latest_model'
model_path = sys.argv[1]
label_lines = [line.rstrip() for line in tf.gfile.GFile(model_path + "/got_retrained_labels.txt")]
with tf.gfile.FastGFile(model_path + "/got_retrained_graph.pb", 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
tf.import_graph_def(graph_def, name='')
total_score = 0
with tf.Session() as sess:
images = ['jon-snow.jpg','night-king.jpg','cersei.jpg','robert-baratheon.jpg','theon-greyjoy.jpg','daenerys-targaryen.jpg','drogon.jpg','hodor.jpg','samwell.jpg','tyrion.jpg']
for image in images:
image_data = tf.gfile.FastGFile(image, 'rb').read()
softmax_tensor = sess.graph.get_tensor_by_name('final_result:0')
predictions = sess.run(softmax_tensor, {'DecodeJpeg/contents:0': image_data})
top_k = predictions[0].argsort()[-len(predictions[0]):][::-1]
score = predictions[0][top_k[0]]
character = label_lines[top_k[0]]
print(character + ': ' + str(score))
total_score = total_score + score
avg_score = total_score / 10
print('---')
print('average model accuracy: ' + str(avg_score)) | StarcoderdataPython |
1640968 | <filename>code/ThreadDTO.py<gh_stars>0
class ThreadDTO:
def __init__(self, id, subject, author, comment, fileurl, published, sticky, closed):
self.id = id;
self.subject = subject;
self.author = author;
self.comment = comment;
self.fileurl = fileurl;
self.published = published.strftime('%c');
self.sticky = sticky;
self.closed = closed;
def to_JSON(self):
return json.dumps(self, default=lambda o: o.__dict__) | StarcoderdataPython |
1610886 | # Copyright 2018 PerfKitBenchmarker Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Runs memtier benchmark against memcached on cloud virtual machines.
Memcached is an in-memory key-value store for small chunks of arbitrary
data (strings, objects) from results of database calls, API calls, or page
rendering.
Memcached homepage: https://memcached.org/
Memtier_benchmark is a load generator created by RedisLabs to benchmark
NoSQL key-value databases.
Memtier_benchmark homepage: https://github.com/RedisLabs/memtier_benchmark
Memtier_benchmark usage:
https://redislabs.com/blog/memtier_benchmark-a-high-throughput-benchmarking-tool-for-redis-memcached/
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging
from perfkitbenchmarker import configs
from perfkitbenchmarker import flags
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.linux_packages import memcached_server
from perfkitbenchmarker.linux_packages import memtier
FLAGS = flags.FLAGS
BENCHMARK_NAME = 'memcached_memtier'
BENCHMARK_CONFIG = """
memcached_memtier:
description: Run memtier against a memcached installation.
vm_groups:
server:
vm_spec: *default_single_core
vm_count: 1
client:
vm_spec: *default_single_core
vm_count: 1
"""
def GetConfig(user_config):
config = configs.LoadConfig(BENCHMARK_CONFIG, user_config, BENCHMARK_NAME)
return config
def _InstallMemtier(vm):
vm.Install('memtier')
def _InstallMemcached(vm):
vm.Install('memcached_server')
def Prepare(benchmark_spec):
"""Prepare the virtual machines to run memtier against memcached.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
client = benchmark_spec.vm_groups['client']
server = benchmark_spec.vm_groups['server']
vm_util.RunThreaded(_InstallMemtier, client)
vm_util.RunThreaded(_InstallMemcached, server)
vm_util.RunThreaded(memcached_server.ConfigureAndStart, server)
def Run(benchmark_spec):
"""Runs memtier against memcached and gathers the results.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample instances.
"""
client = benchmark_spec.vm_groups['client'][0]
server = benchmark_spec.vm_groups['server'][0]
server_ip = server.internal_ip
metadata = {'memcached_version': memcached_server.GetVersion(server),
'memcached_server_size': FLAGS.memcached_size_mb}
logging.info('Start benchmarking memcached using memtier.')
samples = memtier.Run(client, server_ip, memcached_server.MEMCACHED_PORT)
for sample in samples:
sample.metadata.update(metadata)
return samples
def Cleanup(unused_benchmark_spec):
pass
| StarcoderdataPython |
1712634 | <reponame>EHilly/dreamnet
import random
import os
import requests
import pattern.en as en
from dreamnet.english_cnet import *
current_dir = os.path.dirname(__file__)
dream_ideas = open(os.path.join(current_dir, "nounlist.txt")).read().splitlines()
def locationItems(concept, min_weight=2, min_items=3):
"""Return nodes strongly associated with being located at concept.
If not enough nodes can be found, return None"""
edges = requests.get("http://api.conceptnet.io/query?end={}&" \
"rel=/r/AtLocation&limit=1000".format(concept)).json()['edges']
items = [x['start']['term'] for x in edges if x['weight'] >= min_weight]
#only include items that actually let the user do something with them
items = list(filter(lambda x: len(conceptOptions(x)) > 0, items))
if len(items) < min_items: return None
return random.sample(items, min_items)
def generateOptions(concept):
"""Generate and print a list of ways the user can interact
with an item."""
options = conceptOptions(concept)
if not len(options):
print("You can't think of anything to do with {}.".format(
termToReadable(concept)))
return None
readable = termToReadable(concept)
for opt_num, opt in enumerate(options, 1):
opt['label'] = conjugateVerbs(opt['label'], "INFINITIVE")
print('{}. Use {} to {}'.format(opt_num, readable, opt['label']))
return options
def conceptOptions(concept):
"""Query what the concept is capable of, required for, and used
for, and return up to three random samples from the results."""
allOptions = (query(concept, 'CapableOf', all_edges=True) +
query(concept, 'HasPrerequisite', all_edges=True,
reverse=True) +
query(concept, 'UsedFor', all_edges=True))
return random.sample(allOptions, min(3, len(allOptions)))
def describeObject(concept, max_descriptions=4):
query_messages = {"IsA" : "{} is a type of {}.",
"HasProperty" : "{} is {}.",
"HasA" : "{} has {}.",
"Desires" : "{} wants to {}.",
"NotDesires" : "{} doesn't want to {}.",
"PartOf" : "{} is part of {}.",
"CausesDesire" : "{} makes you want to {}.",
"RecievesAction" : "{} can be {}."}
describeConcept(concept, query_messages)
def describeLocation(concept):
query_messages = {"LocationOf" : "{} is located at {}.",
"HasA" : "{} has a {}.",
"IsA" : "{} is a type of {}."}
describeConcept(concept, query_messages)
def describeConcept(concept, query_messages, max_descriptions=4):
"""Accept a dict of relation names mapped to message templates.
Query a few of these relations, printing the templates with
information about the concept."""
readable = termToReadable(concept)
default_description = "There's not much to say about {}.".format(readable)
descriptions_given = 0
for q, m in query_messages.items():
if descriptions_given >= max_descriptions: break
result = query(concept, q)
if result:
print(m.format(readable, result['label']).capitalize(), end=" ")
default_description = ""
descriptions_given += 1
print(default_description)
def optionByproducts(opt):
"""Given a concept node that represents a verb phrase, figure out
what new items will be established in the scene after carrying out
that phrase. Print each item."""
dirObj = findDirectObject(opt['label'])
new_things = [dirObj] if dirObj else []
effects = query(opt['term'], 'Causes')
created = query(opt['term'], 'CreatedBy', reverse=True)
if effects and conceptOptions(effects['term']):
new_things.append(effects['term'])
if created and conceptOptions(created['term']):
new_things.append(created['term'])
new_things = [attemptSingularization(t) for t in new_things]
for t in new_things:
print("There is now {} in the scene.".format(
en.referenced(termToReadable(t))))
return new_things
class Scene:
def __init__(self, location="", items=set(), inventory=[]):
self.location = location
self.items = items
self.inventory = inventory
self.options = []
def handleInput(self, inp):
if not inp: return
words = inp.lower().split()
command = words[0]
if command[0] == "\'":
command = command.strip("\'")
print("Don't actually type the quotation marks, silly.")
if inp.startswith("the name of an object"): print("Very funny.")
if self.options and command[0].isnumeric():
self.selectOption(command)
elif command == "look":
self.look(words[1:])
self.options = None
elif command == "dream":
self.dream(' '.join(words[1:]))
self.options = None
else:
self.interact(inp)
def interact(self, concept):
"""Handle input that does not start with a special
command word like 'look', 'dream' or '2'."""
asTerm = readableToTerm(concept)
if asTerm not in self.items:
if self.options:
print("If you're trying to select an option," \
" you just have to type '1' or '2' or '3'.")
print("There is no {} here.".format(concept))
print("Type 'help' to see the command formats.")
return
describeObject(asTerm)
self.options = generateOptions(asTerm)
def selectOption(self, opt_num):
"""Print the consequences of the chosen course of action.
Add any new items created by it to the scene."""
try:
n = int(opt_num)
if n < 1 or n > 3: raise ValueError("Improper option index")
opt = self.options[int(opt_num) - 1]
except:
print("Not a valid option.")
return
print('You {}.'.format(opt['label']))
sub = query(opt['term'], 'HasSubevent')
if sub:
print('As a result of {}, you {}.'.format(
conjugateVerbs(opt['label'], "continuous"),
conjugateVerbs(sub['label'], "infinitive")))
self.items.update(optionByproducts(opt))
def dream(self, concept):
"""Create a new scene based off of the input concept."""
new_items = []
concept = attemptSingularization(readableToTerm(concept))
potential_loc = concept
while True:
#see if this can be a location (contains enough associated items)
new_items = locationItems(potential_loc, min_items=3)
if new_items:
self.location = potential_loc
break
#Conversely, this concept might be an item found within a location
foundAt = query(potential_loc, "AtLocation")
if foundAt: potential_loc = foundAt['term']
#if concept is neither a location nor found at a good location,
# just pick a random common word and repeat the process
else: potential_loc = readableToTerm(random.choice(dream_ideas))
if richness(concept) > 0 and self.location != concept:
new_items.append(concept)
self.items = set([attemptSingularization(i) for i in new_items])
self.lookAround()
def look(self, words):
"""Parse the input and call lookAround, or lookItem if the user
specified an item to look at."""
if not words: return self.lookAround()
if words[0] == 'around' or words[0] == self.location:
print("In the future, you can just say 'look'")
return self.lookAround()
if words[0] == 'at':
print("""You don't need to say 'look at the item';
just 'look item' will work.""")
words = words[2:] if words[1] == 'the' else words[1:]
item = readableToTerm(' '.join(words))
if item in self.items:
describeObject(item)
else:
print("There's no {} here.".format(termToReadable(item)))
def lookAround(self):
"""Print a description of the scene location, including a list
of all the items it contains."""
readable = termToReadable(self.location)
print("You are in a {}.".format(readable), end=" ")
describeLocation(self.location)
item_names = [termToReadable(x) for x in self.items]
print("There's {} in the {}.".format(en.quantify(item_names), readable)) | StarcoderdataPython |
17076 | <reponame>allanwright/media-classifier
'''Defines a pipeline step which prepares training and test data for
named entity recognition.
'''
import ast
import json
import pickle
from mccore import EntityRecognizer
from mccore import ner
from mccore import persistence
import pandas as pd
from sklearn.utils import resample
from src.step import Step
class PrepareNerData(Step):
'''Defines a pipeline step which prepares training and test data for
named entity recognition.
'''
def __init__(self):
super(PrepareNerData, self).__init__()
self.input = {
'processed': 'data/interim/processed.csv',
'ner_labelled_csv': 'data/interim/ner_labelled.csv',
}
self.output = {
'stacked': 'data/interim/stacked.csv',
'ner_labelled_tsv': 'data/interim/ner_labelled.tsv',
'ner_labelled_json': 'data/interim/ner_labelled.json',
'ner_labelled_pickle': 'data/processed/ner_labelled.pickle',
}
def run(self):
'''Runs the pipeline step.
'''
# Process data for named entity recognition labelling
self.__process_data_for_ner()
# Process labelled named entity recognition data (if any)
self.__process_labelled_ner_data()
def __process_data_for_ner(self):
df = pd.read_csv(self.input['processed'])
self.print('Processing data for named entity recognition ({rows} rows)', rows=df.shape[0])
# Drop anything other than movies and tv shows
categories = [1, 2]
df = df[df['category'].isin(categories)]
# Drop subtitle files
df.drop(df[df['ext'] == 'srt'].index, inplace=True)
# Drop anything that contains unwanted words
blacklist = [
'tamilrockers',
'www',
'hindi',
'Ã',
'ita',
'french',
'spa',
'torrent9',
'torrentcounter',
'ssrmovies',
'rus',
'bengali',
]
def contains_blacklisted_word(name):
for word in name.split():
if word in blacklist:
return True
return False
df['blacklisted'] = df['name'].apply(contains_blacklisted_word)
df.drop(df[df['blacklisted']].index, inplace=True)
# Downsample to a number of files that is reasonable enough for
# human verification of the labels provided by the ner model
self.print('Downsampling dataset ({rows} rows)', rows=df.shape[0])
categories = [df[df.category == c] for c in df.category.unique()]
downsampled = [resample(c,
replace=False,
n_samples=250,
random_state=123) for c in categories]
df = pd.concat(downsampled)
df['entities'] = ''
nlp, _ = ner.get_model()
nlp_bytes = persistence.bin_to_obj('models/ner_mdl.pickle')
nlp.from_bytes(nlp_bytes)
recognizer = EntityRecognizer(nlp)
def get_entities(name):
return str(list(recognizer.predict(name)))
df['entities'] = df['name'].apply(get_entities)
# Split the filename into individual words then stack the DataFrame
self.print('Stacking dataset ({rows} rows)', rows=df.shape[0])
index = [df.index, df.name, df.category, df.entities]
df = pd.DataFrame(df['name'].str.split().tolist(), index=index).stack()
df = df.reset_index()
df.columns = ['index', 'name', 'category', 'entities', 'pos', 'word']
# Add entity column
df['entity'] = ''
def get_entity(row):
entities = ast.literal_eval(row['entities'])
word = row['word'].upper()
for i in entities:
if word in (str(s).upper() for s in str(i[1]).split()):
return i[0]
return ''
df['entity'] = df.apply(get_entity, axis=1)
df.drop(columns=['category', 'entities'], inplace=True)
# Save interim stacked output before processing further
df.to_csv(self.output['stacked'], index=False)
def __process_labelled_ner_data(self):
df = pd.read_csv(self.input['ner_labelled_csv'])
# Keep only word and corresponding label
df = df[['word', 'entity']]
# Save to tsv
df.to_csv(
self.output['ner_labelled_tsv'],
sep='\t',
header=False,
index=False)
# Convert from tsv to json
self.__tsv_to_json_format(
self.output['ner_labelled_tsv'],
self.output['ner_labelled_json'],
'na')
# Write out spacy file
self.__write_spacy_file(
self.output['ner_labelled_json'],
self.output['ner_labelled_pickle'])
def __tsv_to_json_format(self, input_path, output_path, unknown_label):
try:
input_file = open(input_path, 'r') # input file
output_file = open(output_path, 'w') # output file
data_dict = {}
annotations = []
label_dict = {}
words = ''
start = 0
for line in input_file:
word, entity = line.split('\t')
words += word + " "
entity = entity[:len(entity)-1]
if entity != unknown_label:
if len(entity) != 1:
d = {}
d['text'] = word
d['start'] = start
d['end'] = start+len(word) - 1
try:
label_dict[entity].append(d)
except:
label_dict[entity] = []
label_dict[entity].append(d)
start += len(word) + 1
if entity == 'extension':
data_dict['content'] = words
words = ''
label_list = []
for ents in list(label_dict.keys()):
for i in range(len(label_dict[ents])):
if label_dict[ents][i]['text'] != '':
l = [ents, label_dict[ents][i]]
for j in range(i + 1, len(label_dict[ents])):
if label_dict[ents][i]['text'] == label_dict[ents][j]['text']:
di = {}
di['start'] = label_dict[ents][j]['start']
di['end'] = label_dict[ents][j]['end']
di['text'] = label_dict[ents][i]['text']
l.append(di)
label_dict[ents][j]['text'] = ''
label_list.append(l)
for entities in label_list:
label = {}
label['label'] = [entities[0]]
label['points'] = entities[1:]
annotations.append(label)
data_dict['annotation'] = annotations
annotations = []
json.dump(data_dict, output_file)
output_file.write('\n')
data_dict = {}
start = 0
label_dict = {}
except Exception as e:
print("Unable to process file" + "\n" + "error = " + str(e))
return None
def __write_spacy_file(self, input_file=None, output_file=None):
try:
training_data = []
lines = []
with open(input_file, 'r') as f:
lines = f.readlines()
for line in lines:
data = json.loads(line)
text = data['content']
entities = []
for annotation in data['annotation']:
point = annotation['points'][0]
labels = annotation['label']
if not isinstance(labels, list):
labels = [labels]
for label in labels:
entities.append((point['start'], point['end'] + 1, label))
training_data.append((text, {"entities" : entities}))
with open(output_file, 'wb') as fp:
pickle.dump(training_data, fp)
except Exception as e:
print("Unable to process " + input_file + "\n" + "error = " + str(e))
return None
| StarcoderdataPython |
3237452 | from aioflow.service import Service, ServiceStatus, service_deco
from aioflow.pipeline import Pipeline
from aioflow.middlewareabc import MiddlewareABC
from aioflow.mixins import PercentMixin
__author__ = "a.lemets"
| StarcoderdataPython |
3281599 | <reponame>kenahoo/recurrent<gh_stars>1-10
import unittest
import datetime
from dateutil import rrule
from recurrent.event_parser import RecurringEvent
NOW = datetime.datetime(2010, 1, 1)
class ExpectedFailure(object):
def __init__(self, v):
self.correct_value = v
expressions = [
# recurring events
('daily', dict(freq='daily', interval=1)),
('each day', dict(freq='daily', interval=1)),
('everyday', dict(freq='daily', interval=1)),
('every other day', dict(freq='daily', interval=2)),
('tuesdays', dict(freq='weekly', interval=1, byday='TU')),
('weekends', dict(freq='weekly', interval=1, byday='SA,SU')),
('weekdays', dict(freq='weekly', interval=1, byday='MO,TU,WE,TH,FR')),
('every weekday', dict(freq='weekly', interval=1, byday='MO,TU,WE,TH,FR')),
('tuesdays and thursdays', dict(freq='weekly', interval=1, byday='TU,TH')),
('weekly on wednesdays', dict(freq='weekly', interval=1, byday='WE')),
('weekly on wednesdays and fridays', dict(freq='weekly', interval=1, byday='WE,FR')),
('every sunday and saturday', dict(freq='weekly', interval=1, byday='SU,SA')),
('every wed', dict(freq='weekly', interval=1, byday='WE')),
('every wed.', dict(freq='weekly', interval=1, byday='WE')),
('every wednsday', dict(freq='weekly', interval=1, byday='WE')),
('every week on tues', dict(freq='weekly', interval=1, byday='TU')),
('once a week on sunday', dict(freq='weekly', interval=1, byday='SU')),
('every 3 weeks on mon', dict(freq='weekly', interval=3, byday='MO')),
('every 3 days', dict(freq='daily', interval=3)),
('every 4th of the month', dict(freq='monthly', interval=1, bymonthday='4')),
('every 4th and 10th of the month', dict(freq='monthly', interval=1, bymonthday='4,10')),
('every first friday of the month', dict(freq='monthly', interval=1, byday='1FR')),
('first friday of every month', dict(freq='monthly', interval=1, byday='1FR')),
('first friday of each month', dict(freq='monthly', interval=1, byday='1FR')),
('first and third friday of each month', dict(freq='monthly', interval=1, byday='1FR,3FR')),
('yearly on the fourth thursday in november', dict(freq='yearly', interval=1,byday='4TH', bymonth='11')),
('every year on the fourth thursday in november', dict(freq='yearly', interval=1,byday='4TH', bymonth='11')),
('once a year on december 25th', dict(freq='yearly', interval=1, bymonthday='25', bymonth='12')),
('every july 4th', dict(freq='yearly', interval=1, bymonthday='4', bymonth='7')),
# with start and end dates
('daily starting march 3rd',
dict(dtstart='%d0303'%NOW.year, freq='daily', interval=1)),
('starting tomorrow on weekends',
dict(dtstart='%d0102'%NOW.year, freq='weekly',
interval=1, byday='SA,SU')),
('daily starting march 3rd until april 5th',
dict(dtstart='%d0303'%NOW.year, until='%d0405'%NOW.year, freq='daily', interval=1)),
('every wed until november',
dict(until='%d1101'%NOW.year, freq='weekly', interval=1, byday='WE')),
('every 4th of the month starting next tuesday',
dict(dtstart=(NOW +
datetime.timedelta(days=(1 - NOW.weekday())%7)).strftime('%Y%m%d'),
freq='monthly', interval=1, bymonthday='4')),
('mondays and thursdays from jan 1 to march 25th',
dict(dtstart='%d0101'%NOW.year,
until='%d0325'%NOW.year,
freq='weekly', interval=1, byday='MO,TH')),
# time recurrences
('every 5 minutes', dict(freq='minutely', interval=5)),
('every 30 seconds', dict(freq='secondly', interval=30)),
('every other hour', dict(freq='hourly', interval=2)),
('every 2 hours', dict(freq='hourly', interval=2)),
('every 20 min', ExpectedFailure(dict(freq='minutely', interval=20))),
# with times
('daily at 3pm', dict(freq='daily', interval=1, byhour='15', byminute='0')),
('daily at 3:00pm', dict(freq='daily', interval=1, byhour='15', byminute='0')),
# TODO
#('saturday through tuesday', dict(freq='daily', interval=1, byday='SA,SU,MO,TU')),
#('every thursday for the next three weeks', dict(freq='weekly',
# interval=1, count=3, byday='TH')),
# non-recurring
('march 3rd', datetime.datetime(NOW.year, 3, 3).date()),
('tomorrow', datetime.datetime(NOW.year, NOW.month, NOW.day +
1).date()),
('mar 2 2012', datetime.datetime(2012, 3, 2).date()),
('this sunday',
(NOW + datetime.timedelta(days=(6 -
NOW.weekday())%7)).date()),
# pdt fucks this up, does feb 18 first, then adjusts thurs
('thursday, february 18th',
ExpectedFailure(datetime.datetime(NOW.year, 2, 18).date())),
]
time_expressions = [
('march 3rd at 12:15am', datetime.datetime(NOW.year, 3, 3, 0, 15)),
('tomorrow at 3:30', datetime.datetime(NOW.year, NOW.month, NOW.day +
1, 15, 30)),
('in 30 minutes', NOW.replace(minute=NOW.minute + 30)),
('at 4', NOW.replace(hour=16)),
('2 hours from now', NOW.replace(hour=NOW.hour + 2)),
('sunday at 2', (NOW + datetime.timedelta(days=(6 -
NOW.weekday())%7)).replace(hour=14)),
]
expressions += time_expressions
ambiguous_expressions = (
('weekly', dict(freq='weekly', interval=1)),
('twice weekly', dict(freq='weekly', interval=1)),
('three times a week', dict(freq='weekly', interval=1)),
('monthly', dict(freq='monthly', interval=1)),
('once a month', dict(freq='monthly', interval=1)),
('yearly', dict(freq='yearly', interval=1)),
)
non_dt_expressions = (
('Once in a while.', None),
('Every time i hear that i apreciate it.', None),
('Once every ones in', None),
('first time for everything. wait a minute', None),
# Failing. parses as may
('may this test pass.', ExpectedFailure(None)),
('seconds anyone?', None),
)
embedded_expressions = [('im available ' + s, v) for s,v in expressions] + [
(s + ' would work best for me', v) for s,v in expressions] + [
('remind me to move car ' + s + ' would work best for me', v) for s,v in expressions]
expressions += embedded_expressions
expressions += non_dt_expressions
class ParseTest(unittest.TestCase):
def test_return_recurring(self):
string = 'every day'
date = RecurringEvent()
ret = date.parse(string)
self.assertTrue(isinstance(ret, str))
def test_return_non_recurring(self):
string = 'march 3rd, 2001'
date = RecurringEvent()
ret = date.parse(string)
self.assertTrue(isinstance(ret, datetime.datetime))
def test_return_non_recurring2(self):
string = 'next wednesday'
date = RecurringEvent()
ret = date.parse(string)
self.assertTrue(isinstance(ret, datetime.datetime))
def test_return_non_date(self):
string = 'remember to call mitchell'
date = RecurringEvent()
ret = date.parse(string)
self.assertFalse(ret)
def test_rrule_string(self):
string = 'every day starting feb 2'
date = RecurringEvent(NOW)
date.parse(string)
expected = """DTSTART:20100202\nRRULE:FREQ=DAILY;INTERVAL=1"""
self.assertEqual(expected, date.get_RFC_rrule())
def test_expression(string, expected):
def test_(self):
date = RecurringEvent(NOW)
val = date.parse(string)
expected_params = expected
known_failure = False
if isinstance(expected, ExpectedFailure):
known_failure = True
expected_params = expected.correct_value
try:
if expected_params is None:
self.assertTrue(val is None or list(date.get_params().keys()) == ['interval'],
"Non-date error: '%s' -> '%s', expected '%s'"%(
string, val, expected_params))
elif isinstance(expected_params, datetime.datetime) or isinstance(expected_params, datetime.date):
if isinstance(expected_params, datetime.datetime):
self.assertEqual(val, expected_params,
"Date parse error: '%s' -> '%s', expected '%s'"%(
string, val, expected_params))
else:
self.assertEqual(val.date(), expected_params,
"Date parse error: '%s' -> '%s', expected '%s'"%(
string, val, expected_params))
else:
actual_params = date.get_params()
for k, v in list(expected_params.items()):
av = actual_params.pop(k, None)
self.assertEqual(av, v,
"Rule mismatch on rule '%s' for '%s'. Expected %s, got %s\nRules: %s" % (k, string, v, av,
date.get_params()))
# make sure any extra params are empty/false
for k, v in list(actual_params.items()):
self.assertFalse(v)
# ensure rrule string can be parsed by dateutil
rrule.rrulestr(val)
except AssertionError as e:
if known_failure:
print("Expected failure:", expected_params)
return
raise e
if known_failure:
raise AssertionError("Known failure passed:", expected_params, string)
return test_
# add a test for each expression
for i, expr in enumerate(expressions):
string, params = expr
setattr(ParseTest, 'test_%03d_%s' % (i, string.replace(' ', '_')), test_expression(string, params))
if __name__ == '__main__':
print("Dates relative to %s" % NOW)
unittest.main(verbosity=2)
| StarcoderdataPython |
150438 | <reponame>aagusti/sp2d<gh_stars>0
from ..models import SipkdBase, SipkdDBSession
from datetime import datetime
from sqlalchemy import (
Column,
Integer,
BigInteger,
SmallInteger,
Text,
DateTime,
Date,
String,
ForeignKey,
text,
UniqueConstraint,
Numeric,
ForeignKeyConstraint,
PrimaryKeyConstraint
)
from sqlalchemy.orm import (
relationship,backref )
class SimdaBank(SipkdBase):
__tablename__ = 'ref_bank'
kd_bank = Column(Integer, nullable=False, primary_key=True)
nm_bank = Column(String(50), nullable=False)
no_rekening = Column(String(50))
kd_rek_1 = Column(Integer, nullable=False)
kd_rek_2 = Column(Integer, nullable=False)
kd_rek_3 = Column(Integer, nullable=False)
kd_rek_4 = Column(Integer, nullable=False)
kd_rek_5 = Column(Integer, nullable=False)
class SimdaSpm(SipkdBase):
__tablename__ = 'ta_spm'
__table_args__ = (PrimaryKeyConstraint('tahun', 'no_spm'),)
tahun = Column(Integer, nullable=False)
no_spm = Column(String(50), nullable=False)
kd_urusan = Column(Integer, nullable=False)
kd_bidang = Column(Integer, nullable=False)
kd_unit = Column(Integer, nullable=False)
kd_sub = Column(Integer, nullable=False)
no_spp = Column(String(50))
jn_spm = Column(Integer, nullable=False)
tgl_spm = Column(DateTime, nullable=False)
uraian = Column(String(255))
nm_penerima = Column(String(100))
bank_penerima = Column(String(50))
rek_penerima = Column(String(50))
npwp = Column(String(20))
bank_pembayar = Column(Integer)
nm_verifikator = Column(String(50))
nm_penandatangan = Column(String(50))
nip_penandatangan = Column(String(21))
jbt_penandatangan = Column(String(75))
kd_edit = Column(Integer)
class SimdaSpmDet(SipkdBase):
__tablename__ = 'ta_spm_rinc'
__table_args__ = (PrimaryKeyConstraint('tahun', 'no_spm', 'no_id'),
ForeignKeyConstraint(['tahun', 'no_spm'], ['ta_spm.tahun', 'ta_spm.no_spm']),)
tahun = Column(Integer, nullable=False)
no_spm = Column(String(50), nullable=False)
no_id = Column(Integer, nullable=False)
kd_urusan = Column(Integer, nullable=False)
kd_bidang = Column(Integer, nullable=False)
kd_unit = Column(Integer, nullable=False)
kd_sub = Column(Integer, nullable=False)
kd_prog = Column(Integer, nullable=False)
id_prog = Column(Integer, nullable=False)
kd_keg = Column(Integer, nullable=False)
kd_rek_1 = Column(Integer, nullable=False)
kd_rek_2 = Column(Integer, nullable=False)
kd_rek_3 = Column(Integer, nullable=False)
kd_rek_4 = Column(Integer, nullable=False)
kd_rek_5 = Column(Integer, nullable=False)
nilai = Column(Numeric, nullable=False)
class SimdaSpmInfo(SipkdBase):
__tablename__ = 'ta_spm_info'
__table_args__ = (PrimaryKeyConstraint('tahun', 'no_spm', 'kd_pot_rek'),
ForeignKeyConstraint(['tahun', 'no_spm'], ['ta_spm.tahun', 'ta_spm.no_spm']),)
tahun = Column(Integer, nullable=False)
no_spm = Column(String(50), nullable=False)
kd_pot_rek = Column(Integer, ForeignKey("ref_pot_spm.kd_pot"), nullable=False)
nilai = Column(Numeric, nullable=False)
class SimdaSpmPot(SipkdBase):
__tablename__ = 'ta_spm_pot'
__table_args__ = (PrimaryKeyConstraint('tahun', 'no_spm', 'kd_pot_rek'),
ForeignKeyConstraint(['tahun', 'no_spm'], ['ta_spm.tahun', 'ta_spm.no_spm']),)
tahun = Column(Integer, nullable=False)
no_spm = Column(String(50), nullable=False)
kd_pot_rek = Column(Integer, ForeignKey("ref_pot_spm.kd_pot"), nullable=False)
nilai = Column(Numeric, nullable=False)
class SimdaRefSpmPot(SipkdBase):
__tablename__ = 'ref_pot_spm'
kd_pot = Column(Integer, nullable=False, primary_key=True)
nm_pot = Column(String(50), nullable=False)
kd_map = Column(String(6))
class SimdaSp2d(SipkdBase):
__tablename__ = 'ta_sp2d'
__table_args__ = (ForeignKeyConstraint(['tahun', 'no_spm'], ['ta_spm.tahun', 'ta_spm.no_spm']),)
tahun = Column(Integer, nullable=False, primary_key=True)
no_sp2d = Column(String(50), nullable=False, primary_key=True)
no_spm = Column(String(50), nullable=False)
tgl_sp2d = Column(DateTime, nullable=False)
kd_bank = Column(Integer, nullable=False)
no_bku = Column(Integer, nullable=False)
nm_penandatangan = Column(String(50))
nip_penandatangan = Column(String(21))
jbt_penandatangan = Column(String(75))
keterangan = Column(String(255), nullable=False)
spm = relationship(SimdaSpm, foreign_keys=[tahun, no_spm])
| StarcoderdataPython |
1681991 | <reponame>MaxStrange/ArtieInfant
"""
Online logging script to be run concurrently with make train.
"""
import functools
import itertools
import math
import matplotlib.pyplot as plt
import matplotlib.animation as animation
import sys
import random
FILE_PATH = "log.csv"
class Plotter:
def __init__(self, x1, y1, x2, y2, x3, y3, getter):
self._fig = plt.figure()
self._ax1 = self._fig.add_subplot(3, 1, 1)
self._ax2 = self._fig.add_subplot(3, 1, 2)
self._ax3 = self._fig.add_subplot(3, 1, 3)
self._data_y1 = y1
self._data_x1 = x1
self._data_y2 = y2
self._data_x2 = x2
self._data_y3 = y3
self._data_x3 = x3
self._data_getter = getter
def animate(self):
ani = animation.FuncAnimation(self._fig, self._draw, interval=1)
plt.tight_layout()
plt.show()
def _update_data(self):
new_y1, new_y2, new_y3 = self._data_getter.get()
self._data_y1.extend(new_y1)
self._data_y2.extend(new_y2)
self._data_y3.extend(new_y3)
return new_y1, new_y2, new_y3
def _draw(self, frame):
new_y1s, new_y2s, new_y3s = self._update_data()
if new_y1s:
new_x1s = range(len(self._data_x1), len(self._data_x1) + len(new_y1s))
new_x2s = range(len(self._data_x2), len(self._data_x2) + len(new_y2s))
new_x3s = range(len(self._data_x3), len(self._data_x3) + len(new_y3s))
self._data_x1.extend(new_x1s)
self._data_x2.extend(new_x2s)
self._data_x3.extend(new_x3s)
linewidth = max(0.005, min(1.0, 10 / math.log(len(self._data_x1))))
self._ax1.clear()
self._ax1.plot(self._data_x1, self._data_y1, linewidth=linewidth)
self._ax1.set_title("Loss")
self._ax2.clear()
self._ax2.plot(self._data_x2, self._data_y2, linewidth=linewidth)
self._ax2.set_title("Accuracy")
self._ax3.clear()
self._ax3.plot(self._data_x3, self._data_y3, linewidth=linewidth)
self._ax3.set_title("F1Score")
class Getter:
"""
Class that provides a 'get' function for retrieving one data point at a time.
"""
def __init__(self, fpath):
"""
:param fpath: The path to the file to read from.
:param dataindex: A parameter that indicates which index in the tuple of data items on a line to get.
"""
self.fpath = fpath
self.epoch_num = 0
def get(self):
epoch_str = lambda epnum : "----- " + str(epnum) + " -----"
with open(self.fpath) as f:
# Take only the lines after the most recent epoch
lines = [line.strip() for line in itertools.dropwhile(lambda x: x.strip() != epoch_str(self.epoch_num), f)]
lines = [line.strip() for line in itertools.takewhile(lambda x: x.strip() != epoch_str(self.epoch_num + 1), lines)]
lines = [line for line in lines if line.strip() != "" and not line.startswith('-')]
tups = [line.split(',') for line in lines]
data_x1 = [float(tup[0].strip()) for tup in tups]
data_x2 = [float(tup[1].strip()) for tup in tups]
data_x3 = [float(tup[2].strip()) for tup in tups]
if data_x1:
self.epoch_num += 1
return data_x1, data_x2, data_x3
if __name__ == "__main__":
acc = 0
loss = 1
g = Getter(FILE_PATH)
plotter = Plotter([0, 1], [0, 1], [0, 1], [0, 1], [0, 1], [0, 1], g)
plotter.animate()
| StarcoderdataPython |
27500 | import numpy as np
from pathlib import Path
import sys, os
if __name__ == "__main__":
"""
Jobs:
1) VAE (VAE loss) for data=[dsprites, celeba, chairs]
2) VAE (beta-TC loss with alpha=beta=gamma=1) for data=[dsprites, celeba, chairs]
3) beta-TCVAE for alpha=gamma=[0.5, 1, 2], for beta=[3,6], for data=[dsprites, celeba, chairs]
"""
# absolute path
my_path = Path(__file__).parent.resolve().expanduser()
main_path = my_path.parent.parent.parent
# hypars
cons_list = ["kl", "rec"]
epochs_list = [120, 800, 1200]
seed = 1234
nlat = 64
batchs = 64
lr = 1e-5
n_stddevs = 3
datasets = ["dsprites", "celeba", "chairs"]
alpha_gammas = [0.5, 1, 2]
betas = [3, 6]
# cherry-pick data samples as done in repo
cherry_picked = ["92595 339150 656090",
"88413 176606 179144 32260 191281 143307 101535 70059 87889 131612",
"40919 5172 22330", ]
# .sh filename
fname = my_path / f'run_jobs_1.sh'
# clear .sh file
os.system(f'rm {fname}')
# VAE
for data, epochs, cherries in zip(datasets, epochs_list, cherry_picked):
VAE_cmd = (
# f"python main.py qualitative/VAE_{data}_z{nlat} -s {seed} "
# f"--checkpoint-every 50 -d {data} -e {epochs} -b {batchs} "
# f"-z {nlat} -l VAE --lr {lr} "
# f'--no-progress-bar -F {str(my_path / f"VAE_{data}_z{nlat}.out")} '
# f"--record-loss-every=50 --pin-dataset-gpu \n"
f"python main_viz.py qualitative/VAE_{data}_z{nlat} all -i {cherries} "
f"-s {seed} -c 10 -r 10 -t 2 --is-show-loss --is-posterior \n"
)
alpha_gamma = 1
beta = 1
BTC_cmd = (
# f"python main.py qualitative/btcvae_{data}_z{nlat}_A{alpha_gamma}_B{beta}_G{alpha_gamma} -s {seed} "
# f"--checkpoint-every 50 -d {data} -e {epochs} -b {batchs} "
# f"-z {nlat} -l btcvae --lr {lr} --btcvae-A {alpha_gamma} --btcvae-B {beta} --btcvae-G {alpha_gamma} "
# f'--no-progress-bar -F {str(my_path / f"btcvae_{data}_z{nlat}_A{alpha_gamma}_B{beta}_G{alpha_gamma}.out")} '
# f"--record-loss-every=50 --pin-dataset-gpu \n"
f"python main_viz.py qualitative/btcvae_{data}_z{nlat}_A{alpha_gamma}_B{beta}_G{alpha_gamma} all -i {cherries} "
f"-s {seed} -c 10 -r 10 -t 2 --is-show-loss --is-posterior \n"
)
with open(fname, 'a') as f:
f.write(VAE_cmd + BTC_cmd)
# beta-TCVAE
for data, epochs, cherries in zip(datasets, epochs_list, cherry_picked):
for alpha_gamma in alpha_gammas:
for beta in betas:
BTC_cmd = (
# f"python main.py qualitative/btcvae_{data}_z{nlat}_A{alpha_gamma}_B{beta}_G{alpha_gamma} -s {seed} "
# f"--checkpoint-every 50 -d {data} -e {epochs} -b {batchs} "
# f"-z {nlat} -l btcvae --lr {lr} --btcvae-A {alpha_gamma} --btcvae-B {beta} --btcvae-G {alpha_gamma} "
# f'--no-progress-bar -F {str(my_path / f"btcvae_{data}_z{nlat}_A{alpha_gamma}_B{beta}_G{alpha_gamma}.out")} '
# f"--record-loss-every=50 --pin-dataset-gpu \n"
f"python main_viz.py qualitative/btcvae_{data}_z{nlat}_A{alpha_gamma}_B{beta}_G{alpha_gamma} all -i {cherries} "
f"-s {seed} -c 10 -r 10 -t 2 --is-show-loss --is-posterior \n"
)
with open(fname, 'a') as f:
f.write(BTC_cmd)
| StarcoderdataPython |
3361365 | """Provides the main class for Merkle-trees and related functionalites
"""
from .hashing import hash_machine
from .utils import log_2, decompose, NONE
from .nodes import Node, Leaf
from .proof import Proof
from .serializers import MerkleTreeSerializer
from .exceptions import LeafConstructionError, NoChildException, EmptyTreeException, NoPathException, InvalidProofRequest, NoSubtreeException, NoPrincipalSubrootsException, InvalidTypesException, InvalidComparison, WrongJSONFormat, UndecodableRecordError, NotSupportedEncodingError, NotSupportedHashTypeError
import json
from json import JSONDecodeError
import uuid
import os
import mmap
import contextlib
from tqdm import tqdm
NONE_BAR = '\n ' + '\u2514' + '\u2500' + NONE # └─[None]
class MerkleTree(object):
"""Class for Merkle-trees
:param \*records: [optional] The records initially stored by the Merkle-tree. If provided, the tree is constructed with
as many leafs from the beginning, storing the hashes of the inserted records in the respective order.
:type \*records: str or bytes or bytearray
:param hash_type: [optional] Defaults to ``'sha256'``. Should be included in ``hashing.HASH_TYPES`` (upper- or mixed-case
with '-' instead of '_' allowed), otherwise an exception is thrown.
:type hash_type: str
:param encoding: [optional] Defaults to ``'utf_8'``. Should be included in ``hashing.ENCODINGS`` (upper- or mixed-case
with '-' instead of '_' allowed), otherwise an exception is thrown.
:type encoding: str
:param security: [optional] Defaults to ``True``.If ``False``, defense against second-preimage attack will be disabled
:type security: bool
:raises UndecodableRecordError: if any of the provided ``records`` is a bytes-like object which cannot be decoded with
the provided encoding type
:raises NotSupportedHashTypeError: if ``hash_type`` is not contained in ``hashing.HASH_TYPES``
:raises NotSupportedEncodingType : if ``encoding`` is not contained in ``hashing.ENCODINGS``
:ivar uuid: (*str*) uuid of the Merkle-tree (time-based)
:ivar hash_type: (*str*) See the constructor's homonymous argument
:ivar encoding: (*str*) See the constructor's homonymous argument
:ivar security: (*bool*) Iff ``True``, security measures against second-preimage attack are activated
:ivar hash: (*method*) Core hash functionality of the Merkle-tree
:ivar multi_hash: (*method*) Hash functionality used by the Merkle-tree for performing inclusion tests (explicitly or
implicitly upon a request for consistency proof)
"""
def __init__(self, *records, hash_type='sha256', encoding='utf-8', security=True):
self.uuid = str(uuid.uuid1())
try:
# Hash type, encoding type and security mode configuration
machine = hash_machine(
hash_type=hash_type,
encoding=encoding,
security=security
)
except (NotSupportedEncodingError, NotSupportedHashTypeError):
raise
self.hash_type = hash_type.lower().replace('-', '_')
self.encoding = encoding.lower().replace('-', '_')
self.security = security
self.hash = machine.hash
self.multi_hash = machine.multi_hash
# Initialized here so that consistency-proof works in some edge cases
self.leaves = []
self.nodes = set()
# Tree generation
for record in records:
try:
self.update(record=record)
except UndecodableRecordError:
raise
# --------------------------- Boolean implementation ---------------------
def __bool__(self):
"""
:returns: ``False`` iff the Merkle-tree has no nodes
:rtype: bool
"""
return bool(self.nodes)
# ------------------------------------ Properties ------------------------
@property
def root(self):
"""Returns the current root of the Merkle-tree
:returns: the tree's current root
:rtype: nodes._Node
:raises EmptyTreeException: if the Merkle-tree is currently empty
"""
if not self:
raise EmptyTreeException
return self._root
@property
def rootHash(self):
"""Returns the current root-hash of the Merkle-tree, i.e., the hash stored by its current root
:returns: the tree's current root-hash
:rtype: bytes
:raises EmptyTreeException: if the Merkle-tree is currently empty
"""
try:
_root = self.root
except EmptyTreeException:
raise
return _root.digest
@property
def length(self):
"""Returns the Merkle-tree's current length, i.e., the number of its leaves
:rtype: int
"""
return len(self.leaves)
@property
def size(self):
"""Returns the current number of the Merkle-tree's nodes
:rtype: int
"""
return len(self.nodes)
@property
def height(self):
"""Calculates and returns the Merkle-tree's current height
.. note:: Since the tree is binary *balanced*, its height coincides with the length of its leftmost branch
:rtype: int
"""
length = len(self.leaves)
if length > 0:
return log_2(length) + 1 if length != 2**log_2(length) else log_2(length)
else:
return 0
# ---------------------------------- Updating ----------------------------
def update(self, record=None, digest=None):
"""Updates the Merkle-tree by storing the hash of the inserted record in a newly-created leaf, restructuring
the tree appropriately and recalculating all necessary interior hashes
:param record: [optional] The record whose hash is to be stored into a new leaf.
:type record: str or bytes or bytearray
:param digest: [optional] The hash to be stored by the new leaf (after encoding).
:type digest: str
.. warning:: Exactly *one* of *either* ``record`` *or* ``digest`` should be provided
:raises LeafConstructionError: if both ``record`` and ``digest`` were provided
:raises UndecodableRecordError: if the provided ``record`` is a bytes-like object which could not be decoded with
the Merkle-tree's encoding type
"""
if self:
# ~ Height and root of the *full* binary subtree with maximum
# ~ possible length containing the rightmost leaf
last_power = decompose(len(self.leaves))[-1]
last_subroot = self.leaves[-1].descendant(degree=last_power)
# Store new record to new leaf
try:
new_leaf = Leaf(
hashfunc=self.hash,
encoding=self.encoding,
record=record,
digest=digest
)
except (LeafConstructionError, UndecodableRecordError):
raise
# Assimilate new leaf
self.leaves.append(new_leaf)
self.nodes.add(new_leaf)
try:
# Save child info before bifurcation
old_child = last_subroot.child
except NoChildException: # last_subroot was previously root
self._root = Node(
hashfunc=self.hash,
encoding=self.encoding,
left=last_subroot,
right=new_leaf
)
self.nodes.add(self._root)
else:
# Bifurcate
# Create bifurcation node
new_child = Node(
hashfunc=self.hash,
encoding=self.encoding,
left=last_subroot,
right=new_leaf
)
self.nodes.add(new_child)
# Interject bifurcation node
old_child.set_right(new_child)
new_child.set_child(old_child)
# Recalculate hashes only at the rightmost branch of the tree
current_node = old_child
while True:
current_node.recalculate_hash(hashfunc=self.hash)
try:
current_node = current_node.child
except NoChildException:
break
else: # Empty tree case
try:
new_leaf = Leaf(
hashfunc=self.hash,
encoding=self.encoding,
record=record,
digest=digest
)
except (LeafConstructionError, UndecodableRecordError):
raise
self.leaves = [new_leaf]
self.nodes = set([new_leaf])
self._root = new_leaf
# ---------------------------- Audit-proof utilities ---------------------
def audit_path(self, index):
"""Computes and returns the main body for the audit-proof requested upon the provided index
Body of an audit-proof consist of an *audit-path* (a sequence of signed hashes) and a *proof-index* (the position
within the above sequence where a subsequent proof-validation should start from)
:param index: index (zero based) of the leaf where the audit-path computation should be based upon
:type index: int
:returns: starting position for application of hashing along with the tuple of signed hashes (pairs of the form
*(+1/-1, bytes)*, the sign ``+1`` or ``-1`` indicating pairing with the right resp. left neighbour)
:rtype: (int, tuple<(+1/-1, bytes)>)
:raises NoPathException: if the provided index is out of range (including the empty Merkle-tree case)
"""
if index < 0:
# ~ Handle negative index case separately NoPathException, since certain
# ~ negative indices might otherwise be considered as valid positions
raise NoPathException
else:
try:
current_node = self.leaves[index]
except IndexError:
raise NoPathException # Covers also the empty tree case
else:
initial_sign = +1
if current_node.is_right_parent():
initial_sign = -1
path = [(initial_sign, current_node.digest)]
start = 0
while True:
try:
current_child = current_node.child
except NoChildException:
break
else:
if current_node.is_left_parent():
next_hash = current_child.right.digest
if current_child.is_left_parent():
path.append((+1, next_hash))
else:
path.append((-1, next_hash))
else:
next_hash = current_child.left.digest
if current_child.is_right_parent():
path.insert(0, (-1, next_hash))
else:
path.insert(0, (+1, next_hash))
start += 1
current_node = current_child
return start, tuple(path)
def auditProof(self, arg):
"""Response of the Merkle-tree to the request of providing an audit-proof based upon the provided argument
:param arg: the record (if type is *str* or *bytes* or *bytearray*) or index of leaf (if type is *int*) where the
computation of audit-proof must be based upon
:type arg: str or bytes or bytearray or int
:returns: audit-proof appropriately formatted along with its validation parameters (so that it can be passed in
as the second argument to the ``validations.validateProof()`` function)
:rtype: proof.Proof
:raises InvalidProofRequest: if the provided argument's type is not as prescribed
"""
if type(arg) not in (int, str, bytes, bytearray):
raise InvalidProofRequest
elif type(arg) is int:
index = arg
else:
# ~ arg is of type str, or bytes or bytearray; in this case, detect the index
# ~ of the first leaf having recorded the inserted argument; if no such leaf
# ~ exists (i.e., the inserted argument has not been encrypted into the tree),
# ~ set index equal to -1 so that a NoPathException be subsequently raised
index = -1
count = 0
_hash = self.hash(arg)
_leaves = (leaf for leaf in self.leaves)
while True:
try:
_leaf = next(_leaves)
except StopIteration:
break
else:
if _hash == _leaf.digest:
index = count
break
count += 1
try:
# Calculate proof path
proof_index, audit_path = self.audit_path(index=index)
except NoPathException: # Includes case of negative `arg`
return Proof(
provider=self.uuid,
hash_type=self.hash_type,
encoding=self.encoding,
security=self.security,
proof_index=-1,
proof_path=()
)
else:
return Proof(
provider=self.uuid,
hash_type=self.hash_type,
encoding=self.encoding,
security=self.security,
proof_index=proof_index,
proof_path=audit_path
)
# --------------------------- Consistency-proof utils ---------------------------
def subroot(self, start, height):
"""Returns the root of the unique *full* binary subtree of the Merkle-tree, whose leftmost leaf is located
at the provded position ``start`` and whose height is equal to the provded ``height``
:param start: index (zero based) of leaf where detection of subtree should start from
:type start: int
:param height: height of candidate subtree to be detected
:type height: int
:returns: root of the detected subtree
:rtype: nodes._Node
:raises NoSubtreeException: if no subtree does exists for the given parameters
"""
# Detect candidate subroot
try:
subroot = self.leaves[start]
except IndexError:
raise NoSubtreeException
i = 0
while i < height:
try:
next_node = subroot.child
except NoChildException:
raise NoSubtreeException
else:
if next_node.left is not subroot:
raise NoSubtreeException
subroot = subroot.child
i += 1
# ~ Verify existence of *full* binary subtree for the above
# ~ detected candidate subroot
right_parent = subroot
i = 0
while i < height:
if isinstance(right_parent, Leaf):
raise NoSubtreeException
right_parent = right_parent.right
i += 1
return subroot
def principal_subroots(self, sublength):
"""Detects and returns in corresponding order the roots of the *successive*, *rightmost*, *full* binary
subtrees of maximum (and thus decreasing) length, whose lengths sum up to the provided argument
Returned nodes are prepended with a sign (``+1`` or ``-1``), carrying information used in the generation of
consistency-proofs after extracting hashes
:param sublength: a non-negative integer smaller than or equal to the Merkle-tree's current length, such that
the corresponding sequence of subroots exists
:returns: The signed roots of the detected subtrees, whose hashes are to be used for the generation
of consistency-proofs
:rtype: list<(+1/-1, nodes._Node)>
:raises NoPrincipalSubrootsException: if the provided ``sublength`` does not fulfill the prescribed conditions
"""
if sublength < 0:
raise NoPrincipalSubrootsException # Mask negative input case as incompatibility
principal_subroots = []
powers = decompose(sublength)
start = 0
for _power in powers:
try:
_subroot = self.subroot(start, _power)
except NoSubtreeException:
raise NoPrincipalSubrootsException # Incompatibility issue detected
else:
try:
_child = _subroot.child
_grandchild = _child.child
except NoChildException:
if _subroot.is_left_parent():
principal_subroots.append((+1, _subroot))
else:
principal_subroots.append((-1, _subroot))
else:
if _child.is_left_parent():
principal_subroots.append((+1, _subroot))
else:
principal_subroots.append((-1, _subroot))
finally:
start += 2**_power
if len(principal_subroots) > 0:
principal_subroots[-1] = (+1, principal_subroots[-1][1]) # Modify last sign
return principal_subroots
def minimal_complement(self, subroots):
"""Complements optimally the subroot hashes detected by ``.principal_subroots`` with all necessary
interior hashes of the Merkle-tree, so that a full consistency-path can be generated
:param subroots: output of the ``.principal_subroots()`` method
:type subroots: list<nodes._Node>
:returns: a list of signed hashes complementing optimally provided input,
so that a full consistency-path be generated
:rtype: list<(+1/-1, bytes)>
"""
if len(subroots) == 0:
return self.principal_subroots(self.length)
complement = []
while True:
try:
subroots[-1][1].child
except NoChildException:
break
else:
_subroot = subroots[-1][1]
if _subroot.is_left_parent():
if _subroot.child.is_right_parent():
complement.append((-1, _subroot.child.right))
else:
complement.append((+1, _subroot.child.right))
subroots = subroots[:-1]
else:
subroots = subroots[:-2]
subroots.append((+1, _subroot.child))
return complement
def consistency_path(self, sublength):
"""Computes and returns the main body for the consistency-proof requested for the provided parameters
Body of a consistency-proof consist of a *consistency-path* (a sequence of signed hashes) and a *proof-index*
(the position within the above sequence where a subsequent proof-validation should start from)
:param sublength: length of a presumably valid previous state of the Merkle-tree
:type sublength: int
:returns: starting position for application of hashing along with a tuple of hashes signed with ``-1`` (leftmost
hashes for inclusion test to be performed by the Merkle-tree itself) and a tuple of signed hashes for
hash test to be performed from the Client's Side (the sign ``-1``, resp. ``+1`` indicating pairing
with the left resp. right neigbour during proof validation)
:rtype: (int, tuple<(-1, bytes)>, tuple<(+1/-1 bytes)>)
:raises NoPathException: if the provided ``sublength`` is non-positive or no subroot sequence corresponds to it
(i.e., if a ``NoPrincipalSubrootsException`` is implicitely raised)
"""
if sublength < 0 or self.length == 0:
raise NoPathException
try:
left_subroots = self.principal_subroots(sublength)
except NoPrincipalSubrootsException:
raise NoPathException # Incompatibility issue detected
else:
right_subroots = self.minimal_complement([_ for _ in left_subroots])
all_subroots = left_subroots + right_subroots
if right_subroots == [] or left_subroots == []:
all_subroots = [(-1, _[1]) for _ in all_subroots] # Reset all signs to minus
proof_index = len(all_subroots) - 1 # Will start multi-hashing from endpoint
else:
proof_index = len(left_subroots) - 1 # Will start multi-hashing from midpoint
# Collect sign-hash pairs
left_path = tuple([(-1, _[1].digest) for _ in left_subroots])
full_path = tuple([(_[0], _[1].digest) for _ in all_subroots])
return proof_index, left_path, full_path
def consistencyProof(self, oldhash, sublength):
"""Response of the Merkle-tree to the request of providing a consistency-proof for the provided parameters
Arguments of this function amount to a presumed previous state (root-hash and length) of the Merkle-tree
:param oldhash: root-hash of a presumably valid previous state of the Merkle-tree
:type oldhash: bytes
:param sublength: presumable length (number of leaves) for the above previous state of the Merkle-tree
:type sublength: int
:returns: consistency-proof appropriately formatted along with its validation parameters (so that it
can be passed in as the second argument to the ``validations.validateProof()`` function)
:rtype: proof.Proof
.. note:: If no proof-path corresponds to the provided parameters (i.e., a ``NoPathException`` is raised implicitely)
or the provided parameters do not correpond to a valid previous state of the Merkle-tree (i.e., the
corresponding inclusion-test fails), then the proof generated contains an empty proof-path, or, equivalently
a negative proof-index ``-1`` is inscribed in it, so that it is predestined to be found invalid.
:raises InvalidProofRequest: if the type of any of the provided arguments is not as prescribed
"""
if type(oldhash) is not bytes or type(sublength) is not int or sublength <= 0:
raise InvalidProofRequest
try:
# Calculate proof path
proof_index, left_path, full_path = self.consistency_path(sublength=sublength)
except NoPathException: # Includes the empty-tree case
return Proof(
provider=self.uuid,
hash_type=self.hash_type,
encoding=self.encoding,
security=self.security,
proof_index=-1,
proof_path=()
)
# Inclusion test
if oldhash == self.multi_hash(signed_hashes=left_path, start=len(left_path) - 1):
return Proof(
provider=self.uuid,
hash_type=self.hash_type,
encoding=self.encoding,
security=self.security,
proof_index=proof_index,
proof_path=full_path
)
else:
return Proof(
provider=self.uuid,
hash_type=self.hash_type,
encoding=self.encoding,
security=self.security,
proof_index=-1,
proof_path=()
)
# ------------------------------ Inclusion tests ------------------------------
def inclusionTest(self, oldhash, sublength):
"""Verifies that the parameters provided correspond to a valid previous state of the Merkle-tree
:param oldhash: root-hash of a presumably valid previous state of the Merkle-tree
:type oldhash: bytes
:param sublength: length (number of leaves) for the afore-mentioned previous state of the Merkle-tree
:type sublength: int
:returns: ``True`` if the appropriate path of negatively signed hashes, generated implicitely for the provided
``sublength``, leads indeed to the provided ``oldhash``; otherwise ``False``
:rtype: bool
:raises InvalidProofRequest: if the type of any of the provided arguments is not as prescribed
"""
if type(oldhash) is not bytes or type(sublength) is not int or sublength < 0:
raise InvalidTypesException
if sublength == 0:
raise InvalidComparison
if sublength <= len(self.leaves):
# Generate corresponding path of negatively signed hashes
left_roots = self.principal_subroots(sublength)
left_path = tuple([(-1, _[1].digest) for _ in left_roots])
# Perform hash-test
return oldhash == self.multi_hash(signed_hashes=left_path, start=len(left_path) - 1)
else: # sublength exceeds the tree's current length (includes the empty-tree case)
return False
# --------------------------------- Encryption ---------------------------
def encryptRecord(self, record):
"""Updates the Merkle-tree by storing the hash of the inserted record in a newly-created leaf,
restrucuring the tree appropriately and recalculating all necessary interior hashes
:param record: the record whose hash is to be stored into a new leaf
:type record: str or bytes or bytearray
:returns: ``0`` if the provided ``record`` was successfully encrypted, ``1`` othewise
:rtype: int
.. note:: Return-value ``1`` means that ``UndecodableRecordError`` has been implicitely raised
"""
try:
self.update(record=record)
except UndecodableRecordError:
return 1
return 0
def encryptFileContent(self, file_path):
"""Encrypts the provided file as a single new leaf into the Merkle-tree
More accurately, it updates the Merkle-tree with *one* newly-created leaf (cf. doc of the ``.update()`` method)
storing the digest of the provided file's content
:param file_path: relative path of the file under encryption with respect to the current working directory
:type file_path: str
:returns: ``0`` if the provided file was successfully encrypted, ``1`` othewise
:rtype: int
.. note:: Return-value ``1`` means that ``UndecodableRecordError`` has been implicitely raised
:raises FileNotFoundError: if the specified file does not exist
"""
try:
with open(os.path.abspath(file_path), mode='r') as _file:
with contextlib.closing(
mmap.mmap(
_file.fileno(),
0,
access=mmap.ACCESS_READ
)
) as _buffer:
try:
self.update(record=_buffer.read())
except UndecodableRecordError:
return 1
else:
return 0
except FileNotFoundError:
raise
def encryptFilePerLog(self, file_path):
"""Encrypts per log the data of the provided file into the Merkle-tree
More accurately, it successively updates the Merkle-tree (cf. doc of the ``.update()`` method)
with each line of the provided file in the respective order
:param file_path: relative path of the file under enryption with respect to the current working directory
:type file_path: str
:returns: ``0`` if the provided file was successfully encrypted, ``1`` othewise
:rtype: int
.. note:: Return-value ``1`` means that some line of the provided log-file is undecodable with the Merkle-tree's
encoding type (i.e., a ``UnicodeDecodeError`` has been implicitely raised)
:raises FileNotFoundError: if the specified file does not exist
"""
absolute_file_path = os.path.abspath(file_path)
try:
with open(absolute_file_path, mode='r') as _file:
buffer = mmap.mmap(
_file.fileno(),
0,
access=mmap.ACCESS_READ
)
except FileNotFoundError:
raise
else:
records = []
while True:
_record = buffer.readline()
if not _record:
break
else:
try:
_record = _record.decode(self.encoding)
except UnicodeDecodeError:
return 1
else:
records.append(_record)
tqdm.write('')
# Perform line by line encryption
for _record in tqdm(records, desc='Encrypting log file', total=len(records)):
self.update(record=_record)
tqdm.write('Encryption complete\n')
return 0
def encryptObject(self, object, sort_keys=False, indent=0):
"""Encrypts the provided object as a single new leaf into the Merkle-tree
More accurately, it updates (cf. doc of the ``.update()`` method) the Merkle-tree with *one* newly-created leaf
storing the digest of the provided object's stringified version
:param object: the JSON entity under encryption
:type objec: dict
:param sort_keys: [optional] Defaults to ``False``. If ``True``, then the object's keys get alphabetically sorted
before its stringification.
:type sort_keys: bool
:param indent: [optional] Defaults to ``0``. Specifies key indentation upon stringification of the provided object.
:type indent: int
"""
self.update(
record=json.dumps(
object,
sort_keys=sort_keys,
indent=indent
)
)
def encryptObjectFromFile(self, file_path, sort_keys=False, indent=0):
"""Encrypts the object within the provided ``.json`` file as a single new leaf into the Merkle-tree
More accurately, the Merkle-tree is updated with *one* newly-created leaf (cf. doc of the ``.update()`` method)
storing the digest of the stringified version of the object loaded from within the provided file
:param file_path: relative path of a ``.json`` file with respect to the current working directory,
containing *one* JSON entity
:type file_path: str
:param sort_keys: [optional] Defaults to ``False``. If ``True``, then the object's keys get alphabetically sorted
before its stringification
:type sort_keys: bool
:param indent: [optional] Defaults to ``0``. Specifies key indentation upon stringification of the object
under encryption
:type indent: int
:raises FileNotFoundError: if the specified file does not exist
:raises JSONDecodeError: if the specified file could not be deserialized
"""
try:
with open(os.path.abspath(file_path), 'rb') as _file:
object = json.load(_file)
except (FileNotFoundError, JSONDecodeError):
raise
else:
self.update(
record=json.dumps(
object,
sort_keys=sort_keys,
indent=indent
)
)
def encryptFilePerObject(self, file_path, sort_keys=False, indent=0):
"""Encrypts per object the data of the provided ``.json`` file into the Merkle-tree
More accurately, it successively updates the Merkle-tree (cf. doc of the ``.update()`` method) with each newly
created leaf storing the digest of the respective JSON entity in the list loaded from the provided file
:param file_path: relative path of a ``.json`` file with respect to the current working directory,
containing a *list* of JSON entities
:type file_path: str
:param sort_keys: [optional] Defaults to ``False``. If ``True``, then the all objects' keys get alphabetically sorted
before stringification
:type sort_keys: bool
:param indent: [optional] Defaults to ``0``. Specifies uniform key indentation upon stringification of objects
:type indent: int
:raises FileNotFoundError: if the specified file does not exist
:raises JSONDecodeError: if the specified file could not be deserialized
:raises WrongJSONFormat: if the JSON object loaded from within the provided file is not a list
"""
try:
with open(os.path.abspath(file_path), 'rb') as _file:
objects = json.load(_file)
except (FileNotFoundError, JSONDecodeError):
raise
if type(objects) is not list:
raise WrongJSONFormat
for _object in objects:
self.update(
record=json.dumps(
_object,
sort_keys=sort_keys,
indent=indent
)
)
# ------------------------ Export to and load from file ------------------
def export(self, file_path):
"""Creates a ``.json`` file at the provided path and exports the minimum required information into it, so that the
Merkle-tree can be reloaded in its current state from that file
The final file will store a JSON entity with keys ``header`` (containing the parameters ``hash_type``, ``encoding``,
and ``security``) and ``hashes``, mapping to the digests currently stored by the tree's leaves in respective order
.. note:: If the provided path does not end with ``.json``, then this extension is appended to it before exporting
.. warning:: If a file exists already for the provided path (after possibly extending with ``.json``, see above),
then it gets overwritten
:param file_path: relative path of the file to export to with respect to the current working directory
:type file_path: str
"""
with open('%s.json' % file_path if not file_path.endswith('.json') else file_path, 'w') as _file:
json.dump(
self.serialize(),
_file,
indent=4
)
@staticmethod
def loadFromFile(file_path):
"""Loads a Merkle-tree from the provided file, the latter being the result of an export (cf. the ``.export()`` method)
:param file_path: relative path of the file to load from with respect to the current working directory
:type file_path: str
:returns: the Merkle-tree laoded from the provided file
:rtype: tree.MerkleTree
:raises FileNotFoundError: if the specified file does not exist
:raises JSONDecodeError: if the specified file could not be deserialized
:raises WrongJSONFormat: if the JSON object loaded from within is not a Merkle-tree export (cf. the ``.export()`` method)
"""
try:
with open(file_path, 'r') as _file:
loaded_object = json.load(_file)
except (FileNotFoundError, JSONDecodeError):
raise
try:
_header = loaded_object['header']
_tree = MerkleTree(
hash_type=_header['hash_type'],
encoding=_header['encoding'],
security=_header['security']
)
except KeyError:
raise WrongJSONFormat
tqdm.write('\nFile has been loaded')
for hash in tqdm(loaded_object['hashes'], desc='Retreiving tree...'):
_tree.update(digest=hash)
tqdm.write('Tree has been retreived')
return _tree
# --------------------------------- Comparison ---------------------------
def __eq__(self, other):
"""Implements the ``==`` operator
:param other: the Merkle-tree to compare with
:type other: tree.MerkleTree
:raises InvalidComparison: if compared with an object that is not instance of the ``tree.MerkleTree`` class
"""
if not isinstance(other, self.__class__):
raise InvalidComparison
if not other:
return not self
else:
return True if not self else self.rootHash == other.rootHash
def __ne__(self, other):
"""Implements the ``!=`` operator
:param other: the Merkle-tree to compare with
:type other: tree.MerkleTree
:raises InvalidComparison: if compared with an object that is not instance of the ``tree.MerkleTree`` class
"""
if not isinstance(other, self.__class__):
raise InvalidComparison
if not other:
return self.__bool__()
else:
return True if not self else self.rootHash != other.rootHash
def __ge__(self, other):
"""Implements the ``>=`` operator
:param other: the Merkle-tree to compare with
:type other: tree.MerkleTree
:raises InvalidComparison: if compared with an object that is not instance of the ``tree.MerkleTree`` class
"""
if not isinstance(other, self.__class__):
raise InvalidComparison
if not other:
return True
else:
return False if not self else self.inclusionTest(other.rootHash, other.length)
def __le__(self, other):
"""Implements the ``<=`` operator
:param other: the Merkle-tree to compare with
:type other: tree.MerkleTree
:raises InvalidComparison: if compared with an object that is not instance of the ``tree.MerkleTree`` class
"""
if not isinstance(other, self.__class__):
raise InvalidComparison
else:
return other.__ge__(self)
def __gt__(self, other):
"""Implements the ``>`` operator
:param other: the Merkle-tree to compare with
:type other: tree.MerkleTree
:raises InvalidComparison: if compared with an object that is not instance of the ``tree.MerkleTree`` class
"""
if not isinstance(other, self.__class__):
raise InvalidComparison
if not other:
return self.__bool__()
elif not self or self.rootHash == other.rootHash:
return False
else:
return self.inclusionTest(other.rootHash, other.length)
def __lt__(self, other):
"""Implements the ``<`` operator
:param other: the Merkle-tree to compare with
:type other: tree.MerkleTree
:raises InvalidComparison: if compared with an object that is not instance of the ``tree.MerkleTree`` class
"""
if not isinstance(other, self.__class__):
raise InvalidComparison
else:
return other.__gt__(self)
# ------------------------------- Representation -------------------------
def __repr__(self):
"""Overrides the default implementation
Sole purpose of this function is to easily print info about the Merkle-treee by just invoking it at console
.. warning:: Contrary to convention, the output of this implementation is *not* insertible to the ``eval()`` function
"""
return '\n uuid : {uuid}\
\n\
\n hash-type : {hash_type}\
\n encoding : {encoding}\
\n security : {security}\
\n\
\n root-hash : {root_hash}\
\n\
\n length : {length}\
\n size : {size}\
\n height : {height}\n'.format(
uuid=self.uuid,
hash_type=self.hash_type.upper().replace('_', '-'),
encoding=self.encoding.upper().replace('_', '-'),
security='ACTIVATED' if self.security else 'DEACTIVATED',
root_hash=self.rootHash.decode(self.encoding) if self else NONE,
length=self.length,
size=self.size,
height=self.height
)
def __str__(self, indent=3):
"""Overrides the default implementation.
Designed so that inserting the Merkle-tree as an argument to ``print()`` displays it in a terminal friendly way.
Resembles the output of the ``tree`` command at Unix based platforms.
:param indent: [optional] Defaults to ``3``. The horizontal depth at which each level will be indented with
respect to its previous one
:type indent: int
:rtype: str
.. note:: The left parent of each node is printed *above* the right one
"""
try:
_root = self.root
except EmptyTreeException:
return NONE_BAR
return _root.__str__(indent=indent, encoding=self.encoding)
# ------------------------------- Serialization --------------------------
def serialize(self):
""" Returns a JSON entity with the Merkle-trees's current characteristics and hashes currently stored by its leaves.
:rtype: dict
.. note:: This method does *not* serialize the tree structure itself, but only the info about the tree's fixed configs
and current state, so that the tree be retrievable
"""
return MerkleTreeSerializer().default(self)
def JSONstring(self):
"""Returns a nicely stringified version of the Merkle-tree's JSON serialized form
.. note:: The output of this method is to be passed into the ``print()`` function
:rtype: str
"""
return json.dumps(
self,
cls=MerkleTreeSerializer,
sort_keys=True,
indent=4
)
# ---------------------------------- Clearance ---------------------------
def clear(self):
"""Deletes all nodes of the Merkle-tree, setting its ``root`` equal to ``None``
"""
self.leaves = []
self.nodes = set()
self._root = None
| StarcoderdataPython |
33475 | <gh_stars>0
import torch
class GradReverse(torch.autograd.Function):
@staticmethod
def forward(ctx, x):
return x.view_as(x)
@staticmethod
def backward(ctx, grad_output):
return - grad_output
class LambdaLayer(torch.nn.Module):
def __init__(self, fn):
super().__init__()
self.fn = fn
def forward(self, x):
return self.fn(x) | StarcoderdataPython |
149627 | MX_ROBOT_MAX_NB_ACCELEROMETERS = 1
MX_DEFAULT_ROBOT_IP = "192.168.0.100"
MX_ROBOT_TCP_PORT_CONTROL = 10000
MX_ROBOT_TCP_PORT_FEED = 10001
MX_ROBOT_UDP_PORT_TRACE = 10002
MX_ROBOT_UDP_PORT_RT_CTRL = 10003
MX_CHECKPOINT_ID_MIN = 1
MX_CHECKPOINT_ID_MAX = 8000
MX_ACCELEROMETER_UNIT_PER_G = 16000
MX_GRAVITY_MPS2 = 9.8067
MX_ACCELEROMETER_JOINT_M500 = 5
MX_EXT_TOOL_MPM500_NB_VALVES = 2
MX_EXT_TOOL_VBOX_MAX_VALVES = 6
MX_EIP_MAJOR_VERSION = 2
MX_EIP_MINOR_VERSION = 1
MX_NB_DYNAMIC_PDOS = 4
MX_ROBOT_MODEL_UNKNOWN = 0
MX_ROBOT_MODEL_M500_R1 = 1
MX_ROBOT_MODEL_M500_R2 = 2
MX_ROBOT_MODEL_M500_R3 = 3
MX_ROBOT_MODEL_M1000_R1 = 10
MX_ROBOT_MODEL_SCARA_R1 = 20
MX_EXT_TOOL_NONE = 0
MX_EXT_TOOL_MEGP25_SHORT = 1
MX_EXT_TOOL_MEGP25_LONG = 2
MX_EXT_TOOL_VBOX_2VALVES = 3
MX_EXT_TOOL_TYPE_INVALID = 0xFFFFFFFF
MX_EXT_TOOL_COMPLEMENTARY = 0
MX_EXT_TOOL_INDEPENDENT = 1
MX_EXT_TOOL_POSITION = 2
MX_EXT_TOOL_MODE_INVALID = 0xFFFFFFFF
MX_VALVE_STATE_STAY = -1
MX_VALVE_STATE_CLOSE = 0
MX_VALVE_STATE_OPEN = 1
MX_EVENT_SEVERITY_SILENT = 0
MX_EVENT_SEVERITY_WARNING = 1
MX_EVENT_SEVERITY_PAUSE_MOTION = 2
MX_EVENT_SEVERITY_CLEAR_MOTION = 3
MX_EVENT_SEVERITY_ERROR = 4
MX_EVENT_SEVERITY_INVALID = 0xFFFFFFFF
MX_TORQUE_LIMITS_DETECT_ALL = 0
MX_TORQUE_LIMITS_DETECT_SKIP_ACCEL = 1
MX_TORQUE_LIMITS_INVALID = 0xFFFFFFFF
MX_MOTION_CMD_TYPE_NO_MOVE = 0
MX_MOTION_CMD_TYPE_MOVEJOINTS = 1
MX_MOTION_CMD_TYPE_MOVEPOSE = 2
MX_MOTION_CMD_TYPE_MOVELIN = 3
MX_MOTION_CMD_TYPE_MOVELINRELTRF = 4
MX_MOTION_CMD_TYPE_MOVELINRELWRF = 5
MX_MOTION_CMD_TYPE_DELAY = 6
MX_MOTION_CMD_TYPE_SETBLENDING = 7
MX_MOTION_CMD_TYPE_SETJOINTVEL = 8
MX_MOTION_CMD_TYPE_SETJOINTACC = 9
MX_MOTION_CMD_TYPE_SETCARTANGVEL = 10
MX_MOTION_CMD_TYPE_SETCARTLINVEL = 11
MX_MOTION_CMD_TYPE_SETCARTACC = 12
MX_MOTION_CMD_TYPE_SETTRF = 13
MX_MOTION_CMD_TYPE_SETWRF = 14
MX_MOTION_CMD_TYPE_SETCONF = 15
MX_MOTION_CMD_TYPE_SETAUTOCONF = 16
MX_MOTION_CMD_TYPE_SETCHECKPOINT = 17
MX_MOTION_CMD_TYPE_GRIPPER = 18
MX_MOTION_CMD_TYPE_GRIPPERVEL = 19
MX_MOTION_CMD_TYPE_GRIPPERFORCE = 20
MX_MOTION_CMD_TYPE_MOVEJOINTSVEL = 21
MX_MOTION_CMD_TYPE_MOVELINVELWRF = 22
MX_MOTION_CMD_TYPE_MOVELINVELTRF = 23
MX_MOTION_CMD_TYPE_VELCTRLTIMEOUT = 24
MX_MOTION_CMD_TYPE_SETCONFTURN = 25
MX_MOTION_CMD_TYPE_SETAUTOCONFTURN = 26
MX_MOTION_CMD_TYPE_SETTORQUELIMITS = 27
MX_MOTION_CMD_TYPE_SETTORQUELIMITSCFG = 28
MX_MOTION_CMD_TYPE_MOVEJOINTSREL = 29
MX_MOTION_CMD_TYPE_SETVALVESTATE = 30
MX_MOTION_CMD_TYPE_START_OFFLINE_PROGRAM = 100
MX_MOTION_CMD_TYPE_SETDBG = 1000
MX_EIP_DYNAMIC_AUTO = 0
MX_EIP_DYNAMIC_CFG_FW_VERSION = 1
MX_EIP_DYNAMIC_CFG_PRODUCT_TYPE = 2
MX_EIP_DYNAMIC_CFG_ROBOT_SERIAL = 3
MX_EIP_DYNAMIC_CFG_JOINT_OFFSET = 4
MX_EIP_DYNAMIC_CFG_ROBOT_DH_MODEL_1 = 5
MX_EIP_DYNAMIC_CFG_ROBOT_DH_MODEL_2 = 6
MX_EIP_DYNAMIC_CFG_ROBOT_DH_MODEL_3 = 7
MX_EIP_DYNAMIC_CFG_ROBOT_DH_MODEL_4 = 8
MX_EIP_DYNAMIC_CFG_ROBOT_DH_MODEL_5 = 9
MX_EIP_DYNAMIC_CFG_ROBOT_DH_MODEL_6 = 10
MX_EIP_DYNAMIC_CFG_JOINT_LIMITS_CFG = 11
MX_EIP_DYNAMIC_CFG_MODEL_JOINT_LIMITS_1_2_3 = 12
MX_EIP_DYNAMIC_CFG_MODEL_JOINT_LIMITS_4_5_6 = 13
MX_EIP_DYNAMIC_CFG_JOINT_LIMITS_1_2_3 = 14
MX_EIP_DYNAMIC_CFG_JOINT_LIMITS_4_5_6 = 15
MX_EIP_DYNAMIC_MQ_CONF = 20
MX_EIP_DYNAMIC_MQ_PARAMS = 21
MX_EIP_DYNAMIC_MQ_VEL_ACCEL = 22
MX_EIP_DYNAMIC_MQ_GRIPPER_CFG = 23
MX_EIP_DYNAMIC_MQ_TORQUE_LIMITS_CFG = 24
MX_EIP_DYNAMIC_MQ_TORQUE_LIMITS = 25
MX_EIP_DYNAMIC_RT_TARGET_JOINT_POS = 30
MX_EIP_DYNAMIC_RT_TARGET_CART_POS = 31
MX_EIP_DYNAMIC_RT_TARGET_JOINT_VEL = 32
MX_EIP_DYNAMIC_RT_TARGET_JOINT_TORQ = 33
MX_EIP_DYNAMIC_RT_TARGET_CART_VEL = 34
MX_EIP_DYNAMIC_RT_TARGET_CONF = 35
MX_EIP_DYNAMIC_RT_JOINT_POS = 40
MX_EIP_DYNAMIC_RT_CART_POS = 41
MX_EIP_DYNAMIC_RT_JOINT_VEL = 42
MX_EIP_DYNAMIC_RT_JOINT_TORQ = 43
MX_EIP_DYNAMIC_RT_CART_VEL = 44
MX_EIP_DYNAMIC_RT_CONF = 45
MX_EIP_DYNAMIC_RT_ACCELEROMETER_5 = 46
MX_EIP_DYNAMIC_RT_WRF = 50
MX_EIP_DYNAMIC_RT_TRF = 51
MX_EIP_DYNAMIC_RT_EXTTOOL_STATUS = 52
MX_EIP_DYNAMIC_RT_GRIPPER_VALVE_STATE = 53
MX_EIP_DYNAMIC_FORCE_32_BITS = 0xFFFFFFFF
MX_ST_BUFFER_FULL = 1000
MX_ST_UNKNOWN_CMD = 1001
MX_ST_SYNTAX_ERR = 1002
MX_ST_ARG_ERR = 1003
MX_ST_NOT_ACTIVATED = 1005
MX_ST_NOT_HOMED = 1006
MX_ST_JOINT_OVER_LIMIT = 1007
MX_ST_VEL_OVER_LIMIT = 1008
MX_ST_ACCEL_OVER_LIMIT = 1009
MX_ST_BLOCKED_BY_180_DEG_PROT = 1010
MX_ST_ALREADY_ERR = 1011
MX_ST_SINGULARITY_ERR = 1012
MX_ST_ACTIVATION_ERR = 1013
MX_ST_HOMING_ERR = 1014
MX_ST_MASTER_ERR = 1015
MX_ST_OUT_OF_REACH = 1016
MX_ST_COMM_ERR = 1017
MX_ST_EOS_MISSING = 1018
MX_ST_ROBOT_NOT_LEVELED = 1019
MX_ST_BRAKES_ERR = 1020
MX_ST_DEACTIVATION_ERR = 1021
MX_ST_OFFLINE_SAVE_ERR = 1022
MX_ST_IGNORE_CMD_OFFLINE = 1023
MX_ST_MASTERING_NEEDED = 1024
MX_ST_IMPOSSIBLE_RESET_ERR = 1025
MX_ST_MUST_BE_DEACTIVATED = 1026
MX_ST_SIM_MUST_DEACTIVATED = 1027
MX_ST_NETWORK_ERR = 1028
MX_ST_OFFLINE_FULL = 1029
MX_ST_ALREADY_SAVING = 1030
MX_ST_ILLEGAL_WHILE_SAVING = 1031
MX_ST_GRIPPER_FORCE_OVER_LIMIT = 1035
MX_ST_GRIPPER_VEL_OVER_LIMIT = 1036
MX_ST_GRIPPER_RANGE_OVER_LIMIT = 1037
MX_ST_NO_GRIPPER = 1038
MX_ST_GRIPPER_TEMP_OVER_LIMIT = 1039
MX_ST_CMD_FAILED = 1040
MX_ST_NO_VBOX = 1041
MX_ST_ACTIVATED = 2000
MX_ST_ALREADY_ACTIVATED = 2001
MX_ST_HOME_DONE = 2002
MX_ST_HOME_ALREADY = 2003
MX_ST_DEACTIVATED = 2004
MX_ST_ERROR_RESET = 2005
MX_ST_NO_ERROR_RESET = 2006
MX_ST_GET_STATUS_ROBOT = 2007
MX_ST_BRAKES_OFF = 2008
MX_ST_MASTER_DONE = 2009
MX_ST_BRAKES_ON = 2010
MX_ST_GET_WRF = 2013
MX_ST_GET_TRF = 2014
MX_ST_SET_CART_VEL = 2020
MX_ST_SET_CART_ACC = 2021
MX_ST_SET_JOINT_VEL = 2022
MX_ST_SET_JOINT_ACC = 2023
MX_ST_SET_TOOL_DEF = 2024
MX_ST_SET_WRF = 2025
MX_ST_GET_JOINTS = 2026
MX_ST_GET_POSE = 2027
MX_ST_GET_AUTO_CONF = 2028
MX_ST_GET_CONF = 2029
MX_ST_GET_PHYS_CONF = 2030
MX_ST_GET_AUTO_CONF_TURN = 2031
MX_ST_SET_CORNERING = 2032
MX_ST_CLR_CORNERING = 2033
MX_ST_AUTOCONF_ON = 2034
MX_ST_AUTOCONF_OFF = 2035
MX_ST_GET_CONF_TURN = 2036
MX_ST_ACT_POS_FEED = 2038
MX_ST_DEACT_POS_FEED = 2039
MX_ST_ACT_JOINTS_FEED = 2040
MX_ST_DEACT_JOINTS_FEED = 2041
MX_ST_PAUSE_MOTION = 2042
MX_ST_RESUME_MOTION = 2043
MX_ST_CLEAR_MOTION = 2044
MX_ST_SIM_ON = 2045
MX_ST_SIM_OFF = 2046
MX_ST_EXTTOOL_SIM = 2047
MX_ST_EXTTOOL_SIM_OFF = 2048
MX_ST_RECOVERY_MODE_ON = 2049
MX_ST_RECOVERY_MODE_OFF = 2050
MX_ST_RECOVERY_VEL_CAP = 2051
MX_ST_EOM_ON = 2052
MX_ST_EOM_OFF = 2053
MX_ST_EOB_ON = 2054
MX_ST_EOB_OFF = 2055
MX_ST_START_SAVING = 2060
MX_ST_N_CMD_SAVED = 2061
MX_ST_OFFLINE_ALREADY_SAVING = 2062
MX_ST_OFFLINE_START = 2063
MX_ST_OFFLINE_LOOP_ON = 2064
MX_ST_OFFLINE_LOOP_OFF = 2065
MX_ST_START_PROGRAM_ARDY = 2066
MX_ST_SET_CART_DELTAREF_WRF = 2067
MX_ST_SET_CART_DELTAREF_TRF = 2068
MX_ST_ACTIVATION_IN_PROGRESS = 2070
MX_ST_HOMING_IN_PROGRESS = 2071
MX_ST_MASTER_IN_PROGRESS = 2072
MX_ST_GRIP_HOME = 2075
MX_ST_GRIP_ARD_HOME = 2076
MX_ST_SET_GRIP_FORCE = 2077
MX_ST_SET_GRIP_VEL = 2078
MX_ST_GET_STATUS_GRIPPER = 2079
MX_ST_GET_CMD_PENDING_COUNT = 2080
MX_ST_GET_FW_VERSION = 2081
MX_ST_GET_FW_VERSION_FULL = 2082
MX_ST_GET_ROBOT_SERIAL = 2083
MX_ST_GET_PRODUCT_TYPE = 2084
MX_ST_CMD_SUCCESSFUL = 2085
MX_ST_GET_JOINT_LIMITS = 2090
MX_ST_SET_JOINT_LIMITS = 2092
MX_ST_SET_JOINT_LIMITS_CFG = 2093
MX_ST_GET_JOINT_LIMITS_CFG = 2094
MX_ST_GET_ROBOT_NAME = 2095
MX_ST_SET_CTRL_PORT_MONIT = 2096
MX_ST_SYNC_CMD_QUEUE = 2097
MX_ST_JOINT_TORQUE = 2100
MX_ST_JOINT_SPEED = 2101
MX_ST_JOINT_POS = 2102
MX_ST_CART_POSE = 2103
MX_ST_TEMPERATURE = 2104
MX_ST_GET_ROBOT_KIN_MODEL = 2110
MX_ST_GET_ROBOT_DH_MODEL = 2111
MX_ST_GET_JOINT_OFFSET = 2112
MX_ST_GET_MODEL_JOINT_LIMITS = 2113
MX_ST_GET_MOTION_OPTIONS = 2115
MX_ST_GET_MONITORING_INTERVAL = 2116
MX_ST_GET_REAL_TIME_MONITORING = 2117
MX_ST_GET_STATUS_EVENTS = 2118
MX_ST_GET_NETWORK_OPTIONS = 2119
MX_ST_GET_RTC = 2140
MX_ST_GET_BLENDING = 2150
MX_ST_GET_VEL_TIMEOUT = 2151
MX_ST_GET_JOINT_VEL = 2152
MX_ST_GET_JOINT_ACC = 2153
MX_ST_GET_CART_LIN_VEL = 2154
MX_ST_GET_CART_ANG_VEL = 2155
MX_ST_GET_CART_ACC = 2156
MX_ST_GET_CHECKPOINT = 2157
MX_ST_GET_GRIPPER_FORCE = 2158
MX_ST_GET_GRIPPER_VEL = 2159
MX_ST_GET_TORQUE_LIMITS_CFG = 2160
MX_ST_GET_TORQUE_LIMITS = 2161
MX_ST_RT_TARGET_JOINT_POS = 2200
MX_ST_RT_TARGET_CART_POS = 2201
MX_ST_RT_TARGET_JOINT_VEL = 2202
MX_ST_RT_TARGET_JOINT_TORQ = 2203
MX_ST_RT_TARGET_CART_VEL = 2204
MX_ST_RT_TARGET_CONF = 2208
MX_ST_RT_TARGET_CONF_TURN = 2209
MX_ST_RT_JOINT_POS = 2210
MX_ST_RT_CART_POS = 2211
MX_ST_RT_JOINT_VEL = 2212
MX_ST_RT_JOINT_TORQ = 2213
MX_ST_RT_CART_VEL = 2214
MX_ST_RT_CONF = 2218
MX_ST_RT_CONF_TURN = 2219
MX_ST_RT_ACCELEROMETER = 2220
MX_ST_RT_CHECKPOINT = 2227
MX_ST_RT_WRF = 2228
MX_ST_RT_TRF = 2229
MX_ST_RT_CYCLE_END = 2230
MX_ST_RT_EXTTOOL_STATUS = 2300
MX_ST_RT_VALVE_STATE = 2310
MX_ST_RT_GRIPPER_STATE = 2320
MX_ST_RT_GRIPPER_FORCE = 2321
MX_ST_RT_GRIPPER_POS = 2322
MX_ST_CONNECTED = 3000
MX_ST_USER_ALREADY = 3001
MX_ST_UPGRADE_IN_PROGRESS = 3002
MX_ST_CMD_TOO_LONG = 3003
MX_ST_EOM = 3004
MX_ST_ERROR_MOTION = 3005
MX_ST_SEND_JOINT_RT = 3007
MX_ST_COLLISION = 3008
MX_ST_INIT_FAILED = 3009
MX_ST_SEND_POS_RT = 3010
MX_ST_CANNOT_MOVE = 3011
MX_ST_EOB = 3012
MX_ST_END_OFFLINE = 3013
MX_ST_CANT_SAVE_OFFLINE = 3014
MX_ST_OFFLINE_TIMEOUT = 3015
MX_ST_IGNORING_CMD = 3016
MX_ST_NO_OFFLINE_SAVED = 3017
MX_ST_OFFLINE_LOOP = 3018
MX_ST_JOGGING_STOPPED = 3019
MX_ST_ERROR_GRIPPER = 3025
MX_ST_MAINTENANCE_CHECK = 3026
MX_ST_INTERNAL_ERROR = 3027
MX_ST_EXCESSIVE_TRQ = 3028
MX_ST_CHECKPOINT_REACHED = 3030
MX_ST_TEXT_API_ERROR = 3031
MX_ST_PSTOP = 3032
MX_ST_NO_VALID_CFG = 3033
MX_ST_TRACE_LVL_CHANGED = 3034
MX_ST_TCP_DUMP_STARTED = 3035
MX_ST_TCP_DUMP_DONE = 3036
MX_ST_ERROR_VBOX = 3037
MX_ST_INVALID = 0xFFFFFFFF
class RobotStatusCodeInfo:
def __init__(self, code, name, is_error):
"""This class contains information bout a robot status codes above (ex: MX_ST_BUFFER_FULL)
Parameters
----------
code : integer
The integer value (ex: 1001)
name : string
The code name (ex: "MX_ST_BUFFER_FULL"
is_error : bool
True if this is an error code
"""
self.code = code
self.name = name
self.is_error = is_error
robot_status_code_info = {
MX_ST_BUFFER_FULL:
RobotStatusCodeInfo(MX_ST_BUFFER_FULL, "MX_ST_BUFFER_FULL", is_error=True),
MX_ST_UNKNOWN_CMD:
RobotStatusCodeInfo(MX_ST_UNKNOWN_CMD, "MX_ST_UNKNOWN_CMD", is_error=True),
MX_ST_SYNTAX_ERR:
RobotStatusCodeInfo(MX_ST_SYNTAX_ERR, "MX_ST_SYNTAX_ERR", is_error=True),
MX_ST_ARG_ERR:
RobotStatusCodeInfo(MX_ST_ARG_ERR, "MX_ST_ARG_ERR", is_error=True),
MX_ST_NOT_ACTIVATED:
RobotStatusCodeInfo(MX_ST_NOT_ACTIVATED, "MX_ST_NOT_ACTIVATED", is_error=True),
MX_ST_NOT_HOMED:
RobotStatusCodeInfo(MX_ST_NOT_HOMED, "MX_ST_NOT_HOMED", is_error=True),
MX_ST_JOINT_OVER_LIMIT:
RobotStatusCodeInfo(MX_ST_JOINT_OVER_LIMIT, "MX_ST_JOINT_OVER_LIMIT", is_error=True),
MX_ST_BLOCKED_BY_180_DEG_PROT:
RobotStatusCodeInfo(MX_ST_BLOCKED_BY_180_DEG_PROT, "MX_ST_BLOCKED_BY_180_DEG_PROT", is_error=True),
MX_ST_ALREADY_ERR:
RobotStatusCodeInfo(MX_ST_ALREADY_ERR, "MX_ST_ALREADY_ERR", is_error=True),
MX_ST_SINGULARITY_ERR:
RobotStatusCodeInfo(MX_ST_SINGULARITY_ERR, "MX_ST_SINGULARITY_ERR", is_error=True),
MX_ST_ACTIVATION_ERR:
RobotStatusCodeInfo(MX_ST_ACTIVATION_ERR, "MX_ST_ACTIVATION_ERR", is_error=True),
MX_ST_HOMING_ERR:
RobotStatusCodeInfo(MX_ST_HOMING_ERR, "MX_ST_HOMING_ERR", is_error=True),
MX_ST_MASTER_ERR:
RobotStatusCodeInfo(MX_ST_MASTER_ERR, "MX_ST_MASTER_ERR", is_error=True),
MX_ST_OUT_OF_REACH:
RobotStatusCodeInfo(MX_ST_OUT_OF_REACH, "MX_ST_OUT_OF_REACH", is_error=True),
MX_ST_OFFLINE_SAVE_ERR:
RobotStatusCodeInfo(MX_ST_OFFLINE_SAVE_ERR, "MX_ST_OFFLINE_SAVE_ERR", is_error=True),
MX_ST_IGNORE_CMD_OFFLINE:
RobotStatusCodeInfo(MX_ST_IGNORE_CMD_OFFLINE, "MX_ST_IGNORE_CMD_OFFLINE", is_error=True),
MX_ST_MASTERING_NEEDED:
RobotStatusCodeInfo(MX_ST_MASTERING_NEEDED, "MX_ST_MASTERING_NEEDED", is_error=True),
MX_ST_IMPOSSIBLE_RESET_ERR:
RobotStatusCodeInfo(MX_ST_IMPOSSIBLE_RESET_ERR, "MX_ST_IMPOSSIBLE_RESET_ERR", is_error=True),
MX_ST_MUST_BE_DEACTIVATED:
RobotStatusCodeInfo(MX_ST_MUST_BE_DEACTIVATED, "MX_ST_MUST_BE_DEACTIVATED", is_error=True),
MX_ST_SIM_MUST_DEACTIVATED:
RobotStatusCodeInfo(MX_ST_SIM_MUST_DEACTIVATED, "MX_ST_SIM_MUST_DEACTIVATED", is_error=True),
MX_ST_OFFLINE_FULL:
RobotStatusCodeInfo(MX_ST_OFFLINE_FULL, "MX_ST_OFFLINE_FULL", is_error=True),
MX_ST_ALREADY_SAVING:
RobotStatusCodeInfo(MX_ST_ALREADY_SAVING, "MX_ST_ALREADY_SAVING", is_error=True),
MX_ST_ILLEGAL_WHILE_SAVING:
RobotStatusCodeInfo(MX_ST_ILLEGAL_WHILE_SAVING, "MX_ST_ILLEGAL_WHILE_SAVING", is_error=True),
MX_ST_NO_GRIPPER:
RobotStatusCodeInfo(MX_ST_NO_GRIPPER, "MX_ST_NO_GRIPPER", is_error=True),
MX_ST_NO_VBOX:
RobotStatusCodeInfo(MX_ST_NO_VBOX, "MX_ST_NO_VBOX", is_error=True),
MX_ST_CMD_FAILED:
RobotStatusCodeInfo(MX_ST_CMD_FAILED, "MX_ST_CMD_FAILED", is_error=True),
MX_ST_ACTIVATED:
RobotStatusCodeInfo(MX_ST_ACTIVATED, "MX_ST_ACTIVATED", is_error=False),
MX_ST_ALREADY_ACTIVATED:
RobotStatusCodeInfo(MX_ST_ALREADY_ACTIVATED, "MX_ST_ALREADY_ACTIVATED", is_error=False),
MX_ST_HOME_DONE:
RobotStatusCodeInfo(MX_ST_HOME_DONE, "MX_ST_HOME_DONE", is_error=False),
MX_ST_HOME_ALREADY:
RobotStatusCodeInfo(MX_ST_HOME_ALREADY, "MX_ST_HOME_ALREADY", is_error=False),
MX_ST_DEACTIVATED:
RobotStatusCodeInfo(MX_ST_DEACTIVATED, "MX_ST_DEACTIVATED", is_error=False),
MX_ST_ERROR_RESET:
RobotStatusCodeInfo(MX_ST_ERROR_RESET, "MX_ST_ERROR_RESET", is_error=False),
MX_ST_NO_ERROR_RESET:
RobotStatusCodeInfo(MX_ST_NO_ERROR_RESET, "MX_ST_NO_ERROR_RESET", is_error=False),
MX_ST_GET_STATUS_ROBOT:
RobotStatusCodeInfo(MX_ST_GET_STATUS_ROBOT, "MX_ST_GET_STATUS_ROBOT", is_error=False),
MX_ST_BRAKES_OFF:
RobotStatusCodeInfo(MX_ST_BRAKES_OFF, "MX_ST_BRAKES_OFF", is_error=False),
MX_ST_MASTER_DONE:
RobotStatusCodeInfo(MX_ST_MASTER_DONE, "MX_ST_MASTER_DONE", is_error=False),
MX_ST_BRAKES_ON:
RobotStatusCodeInfo(MX_ST_BRAKES_ON, "MX_ST_BRAKES_ON", is_error=False),
MX_ST_GET_WRF:
RobotStatusCodeInfo(MX_ST_GET_WRF, "MX_ST_GET_WRF", is_error=False),
MX_ST_GET_TRF:
RobotStatusCodeInfo(MX_ST_GET_TRF, "MX_ST_GET_TRF", is_error=False),
MX_ST_GET_JOINTS:
RobotStatusCodeInfo(MX_ST_GET_JOINTS, "MX_ST_GET_JOINTS", is_error=False),
MX_ST_GET_POSE:
RobotStatusCodeInfo(MX_ST_GET_POSE, "MX_ST_GET_POSE", is_error=False),
MX_ST_GET_AUTO_CONF:
RobotStatusCodeInfo(MX_ST_GET_AUTO_CONF, "MX_ST_GET_AUTO_CONF", is_error=False),
MX_ST_GET_CONF:
RobotStatusCodeInfo(MX_ST_GET_CONF, "MX_ST_GET_CONF", is_error=False),
MX_ST_GET_AUTO_CONF_TURN:
RobotStatusCodeInfo(MX_ST_GET_AUTO_CONF_TURN, "MX_ST_GET_AUTO_CONF_TURN", is_error=False),
MX_ST_GET_CONF_TURN:
RobotStatusCodeInfo(MX_ST_GET_CONF_TURN, "MX_ST_GET_CONF_TURN", is_error=False),
MX_ST_PAUSE_MOTION:
RobotStatusCodeInfo(MX_ST_PAUSE_MOTION, "MX_ST_PAUSE_MOTION", is_error=False),
MX_ST_RESUME_MOTION:
RobotStatusCodeInfo(MX_ST_RESUME_MOTION, "MX_ST_RESUME_MOTION", is_error=False),
MX_ST_CLEAR_MOTION:
RobotStatusCodeInfo(MX_ST_CLEAR_MOTION, "MX_ST_CLEAR_MOTION", is_error=False),
MX_ST_SIM_ON:
RobotStatusCodeInfo(MX_ST_SIM_ON, "MX_ST_SIM_ON", is_error=False),
MX_ST_SIM_OFF:
RobotStatusCodeInfo(MX_ST_SIM_OFF, "MX_ST_SIM_OFF", is_error=False),
MX_ST_EXTTOOL_SIM:
RobotStatusCodeInfo(MX_ST_EXTTOOL_SIM, "MX_ST_EXTTOOL_SIM", is_error=False),
MX_ST_EOM_ON:
RobotStatusCodeInfo(MX_ST_EOM_ON, "MX_ST_EOM_ON", is_error=False),
MX_ST_EOM_OFF:
RobotStatusCodeInfo(MX_ST_EOM_OFF, "MX_ST_EOM_OFF", is_error=False),
MX_ST_EOB_ON:
RobotStatusCodeInfo(MX_ST_EOB_ON, "MX_ST_EOB_ON", is_error=False),
MX_ST_EOB_OFF:
RobotStatusCodeInfo(MX_ST_EOB_OFF, "MX_ST_EOB_OFF", is_error=False),
MX_ST_START_SAVING:
RobotStatusCodeInfo(MX_ST_START_SAVING, "MX_ST_START_SAVING", is_error=False),
MX_ST_N_CMD_SAVED:
RobotStatusCodeInfo(MX_ST_N_CMD_SAVED, "MX_ST_N_CMD_SAVED", is_error=False),
MX_ST_OFFLINE_START:
RobotStatusCodeInfo(MX_ST_OFFLINE_START, "MX_ST_OFFLINE_START", is_error=False),
MX_ST_OFFLINE_LOOP_ON:
RobotStatusCodeInfo(MX_ST_OFFLINE_LOOP_ON, "MX_ST_OFFLINE_LOOP_ON", is_error=False),
MX_ST_OFFLINE_LOOP_OFF:
RobotStatusCodeInfo(MX_ST_OFFLINE_LOOP_OFF, "MX_ST_OFFLINE_LOOP_OFF", is_error=False),
MX_ST_GET_STATUS_GRIPPER:
RobotStatusCodeInfo(MX_ST_GET_STATUS_GRIPPER, "MX_ST_GET_STATUS_GRIPPER", is_error=False),
MX_ST_GET_CMD_PENDING_COUNT:
RobotStatusCodeInfo(MX_ST_GET_CMD_PENDING_COUNT, "MX_ST_GET_CMD_PENDING_COUNT", is_error=False),
MX_ST_GET_FW_VERSION:
RobotStatusCodeInfo(MX_ST_GET_FW_VERSION, "MX_ST_GET_FW_VERSION", is_error=False),
MX_ST_GET_FW_VERSION_FULL:
RobotStatusCodeInfo(MX_ST_GET_FW_VERSION_FULL, "MX_ST_GET_FW_VERSION_FULL", is_error=False),
MX_ST_GET_ROBOT_SERIAL:
RobotStatusCodeInfo(MX_ST_GET_ROBOT_SERIAL, "MX_ST_GET_ROBOT_SERIAL", is_error=False),
MX_ST_GET_PRODUCT_TYPE:
RobotStatusCodeInfo(MX_ST_GET_PRODUCT_TYPE, "MX_ST_GET_PRODUCT_TYPE", is_error=False),
MX_ST_CMD_SUCCESSFUL:
RobotStatusCodeInfo(MX_ST_CMD_SUCCESSFUL, "MX_ST_CMD_SUCCESSFUL", is_error=False),
MX_ST_SET_CTRL_PORT_MONIT:
RobotStatusCodeInfo(MX_ST_SET_CTRL_PORT_MONIT, "MX_ST_SET_CTRL_PORT_MONIT", is_error=False),
MX_ST_SYNC_CMD_QUEUE:
RobotStatusCodeInfo(MX_ST_SYNC_CMD_QUEUE, "MX_ST_SYNC_CMD_QUEUE", is_error=False),
MX_ST_GET_JOINT_LIMITS:
RobotStatusCodeInfo(MX_ST_GET_JOINT_LIMITS, "MX_ST_GET_JOINT_LIMITS", is_error=False),
MX_ST_SET_JOINT_LIMITS:
RobotStatusCodeInfo(MX_ST_SET_JOINT_LIMITS, "MX_ST_SET_JOINT_LIMITS", is_error=False),
MX_ST_SET_JOINT_LIMITS_CFG:
RobotStatusCodeInfo(MX_ST_SET_JOINT_LIMITS_CFG, "MX_ST_SET_JOINT_LIMITS_CFG", is_error=False),
MX_ST_GET_JOINT_LIMITS_CFG:
RobotStatusCodeInfo(MX_ST_GET_JOINT_LIMITS_CFG, "MX_ST_GET_JOINT_LIMITS_CFG", is_error=False),
MX_ST_GET_ROBOT_NAME:
RobotStatusCodeInfo(MX_ST_GET_ROBOT_NAME, "MX_ST_GET_ROBOT_NAME", is_error=False),
MX_ST_GET_ROBOT_KIN_MODEL:
RobotStatusCodeInfo(MX_ST_GET_ROBOT_KIN_MODEL, "MX_ST_GET_ROBOT_KIN_MODEL", is_error=False),
MX_ST_GET_ROBOT_DH_MODEL:
RobotStatusCodeInfo(MX_ST_GET_ROBOT_DH_MODEL, "MX_ST_GET_ROBOT_DH_MODEL", is_error=False),
MX_ST_GET_JOINT_OFFSET:
RobotStatusCodeInfo(MX_ST_GET_JOINT_OFFSET, "MX_ST_GET_JOINT_OFFSET", is_error=False),
MX_ST_GET_MODEL_JOINT_LIMITS:
RobotStatusCodeInfo(MX_ST_GET_MODEL_JOINT_LIMITS, "MX_ST_GET_MODEL_JOINT_LIMITS", is_error=False),
MX_ST_GET_MOTION_OPTIONS:
RobotStatusCodeInfo(MX_ST_GET_MOTION_OPTIONS, "MX_ST_GET_MOTION_OPTIONS", is_error=False),
MX_ST_GET_MONITORING_INTERVAL:
RobotStatusCodeInfo(MX_ST_GET_MONITORING_INTERVAL, "MX_ST_GET_MONITORING_INTERVAL", is_error=False),
MX_ST_GET_REAL_TIME_MONITORING:
RobotStatusCodeInfo(MX_ST_GET_REAL_TIME_MONITORING, "MX_ST_GET_REAL_TIME_MONITORING", is_error=False),
MX_ST_GET_STATUS_EVENTS:
RobotStatusCodeInfo(MX_ST_GET_STATUS_EVENTS, "MX_ST_GET_STATUS_EVENTS", is_error=False),
MX_ST_GET_NETWORK_OPTIONS:
RobotStatusCodeInfo(MX_ST_GET_NETWORK_OPTIONS, "MX_ST_GET_NETWORK_OPTIONS", is_error=False),
MX_ST_GET_RTC:
RobotStatusCodeInfo(MX_ST_GET_RTC, "MX_ST_GET_RTC", is_error=False),
MX_ST_GET_BLENDING:
RobotStatusCodeInfo(MX_ST_GET_BLENDING, "MX_ST_GET_BLENDING", is_error=False),
MX_ST_GET_VEL_TIMEOUT:
RobotStatusCodeInfo(MX_ST_GET_VEL_TIMEOUT, "MX_ST_GET_VEL_TIMEOUT", is_error=False),
MX_ST_GET_JOINT_VEL:
RobotStatusCodeInfo(MX_ST_GET_JOINT_VEL, "MX_ST_GET_JOINT_VEL", is_error=False),
MX_ST_GET_JOINT_ACC:
RobotStatusCodeInfo(MX_ST_GET_JOINT_ACC, "MX_ST_GET_JOINT_ACC", is_error=False),
MX_ST_GET_CART_LIN_VEL:
RobotStatusCodeInfo(MX_ST_GET_CART_LIN_VEL, "MX_ST_GET_CART_LIN_VEL", is_error=False),
MX_ST_GET_CART_ANG_VEL:
RobotStatusCodeInfo(MX_ST_GET_CART_ANG_VEL, "MX_ST_GET_CART_ANG_VEL", is_error=False),
MX_ST_GET_CART_ACC:
RobotStatusCodeInfo(MX_ST_GET_CART_ACC, "MX_ST_GET_CART_ACC", is_error=False),
MX_ST_GET_CHECKPOINT:
RobotStatusCodeInfo(MX_ST_GET_CHECKPOINT, "MX_ST_GET_CHECKPOINT", is_error=False),
MX_ST_GET_GRIPPER_FORCE:
RobotStatusCodeInfo(MX_ST_GET_GRIPPER_FORCE, "MX_ST_GET_GRIPPER_FORCE", is_error=False),
MX_ST_GET_GRIPPER_VEL:
RobotStatusCodeInfo(MX_ST_GET_GRIPPER_VEL, "MX_ST_GET_GRIPPER_VEL", is_error=False),
MX_ST_GET_TORQUE_LIMITS_CFG:
RobotStatusCodeInfo(MX_ST_GET_TORQUE_LIMITS_CFG, "MX_ST_GET_TORQUE_LIMITS_CFG", is_error=False),
MX_ST_GET_TORQUE_LIMITS:
RobotStatusCodeInfo(MX_ST_GET_TORQUE_LIMITS, "MX_ST_GET_TORQUE_LIMITS", is_error=False),
MX_ST_RT_TARGET_JOINT_POS:
RobotStatusCodeInfo(MX_ST_RT_TARGET_JOINT_POS, "MX_ST_RT_TARGET_JOINT_POS", is_error=False),
MX_ST_RT_TARGET_CART_POS:
RobotStatusCodeInfo(MX_ST_RT_TARGET_CART_POS, "MX_ST_RT_TARGET_CART_POS", is_error=False),
MX_ST_RT_TARGET_JOINT_VEL:
RobotStatusCodeInfo(MX_ST_RT_TARGET_JOINT_VEL, "MX_ST_RT_TARGET_JOINT_VEL", is_error=False),
MX_ST_RT_TARGET_JOINT_TORQ:
RobotStatusCodeInfo(MX_ST_RT_TARGET_JOINT_TORQ, "MX_ST_RT_TARGET_JOINT_TORQ", is_error=False),
MX_ST_RT_TARGET_CART_VEL:
RobotStatusCodeInfo(MX_ST_RT_TARGET_CART_VEL, "MX_ST_RT_TARGET_CART_VEL", is_error=False),
MX_ST_RT_TARGET_CONF:
RobotStatusCodeInfo(MX_ST_RT_TARGET_CONF, "MX_ST_RT_TARGET_CONF", is_error=False),
MX_ST_RT_TARGET_CONF_TURN:
RobotStatusCodeInfo(MX_ST_RT_TARGET_CONF_TURN, "MX_ST_RT_TARGET_CONF_TURN", is_error=False),
MX_ST_RT_JOINT_POS:
RobotStatusCodeInfo(MX_ST_RT_JOINT_POS, "MX_ST_RT_JOINT_POS", is_error=False),
MX_ST_RT_CART_POS:
RobotStatusCodeInfo(MX_ST_RT_CART_POS, "MX_ST_RT_CART_POS", is_error=False),
MX_ST_RT_JOINT_VEL:
RobotStatusCodeInfo(MX_ST_RT_JOINT_VEL, "MX_ST_RT_JOINT_VEL", is_error=False),
MX_ST_RT_JOINT_TORQ:
RobotStatusCodeInfo(MX_ST_RT_JOINT_TORQ, "MX_ST_RT_JOINT_TORQ", is_error=False),
MX_ST_RT_CART_VEL:
RobotStatusCodeInfo(MX_ST_RT_CART_VEL, "MX_ST_RT_CART_VEL", is_error=False),
MX_ST_RT_CONF:
RobotStatusCodeInfo(MX_ST_RT_CONF, "MX_ST_RT_CONF", is_error=False),
MX_ST_RT_CONF_TURN:
RobotStatusCodeInfo(MX_ST_RT_CONF_TURN, "MX_ST_RT_CONF_TURN", is_error=False),
MX_ST_RT_ACCELEROMETER:
RobotStatusCodeInfo(MX_ST_RT_ACCELEROMETER, "MX_ST_RT_ACCELEROMETER", is_error=False),
MX_ST_RT_GRIPPER_FORCE:
RobotStatusCodeInfo(MX_ST_RT_GRIPPER_FORCE, "MX_ST_RT_GRIPPER_FORCE", is_error=False),
MX_ST_RT_EXTTOOL_STATUS:
RobotStatusCodeInfo(MX_ST_RT_EXTTOOL_STATUS, "MX_ST_RT_EXTTOOL_STATUS", is_error=False),
MX_ST_RT_GRIPPER_STATE:
RobotStatusCodeInfo(MX_ST_RT_GRIPPER_STATE, "MX_ST_RT_GRIPPER_STATE", is_error=False),
MX_ST_RT_VALVE_STATE:
RobotStatusCodeInfo(MX_ST_RT_VALVE_STATE, "MX_ST_RT_VALVE_STATE", is_error=False),
MX_ST_RT_CHECKPOINT:
RobotStatusCodeInfo(MX_ST_RT_CHECKPOINT, "MX_ST_RT_CHECKPOINT", is_error=False),
MX_ST_RT_WRF:
RobotStatusCodeInfo(MX_ST_RT_WRF, "MX_ST_RT_WRF", is_error=False),
MX_ST_RT_TRF:
RobotStatusCodeInfo(MX_ST_RT_TRF, "MX_ST_RT_TRF", is_error=False),
MX_ST_RT_CYCLE_END:
RobotStatusCodeInfo(MX_ST_RT_CYCLE_END, "MX_ST_RT_CYCLE_END", is_error=False),
MX_ST_CONNECTED:
RobotStatusCodeInfo(MX_ST_CONNECTED, "MX_ST_CONNECTED", is_error=False),
MX_ST_USER_ALREADY:
RobotStatusCodeInfo(MX_ST_USER_ALREADY, "MX_ST_USER_ALREADY", is_error=True),
MX_ST_UPGRADE_IN_PROGRESS:
RobotStatusCodeInfo(MX_ST_UPGRADE_IN_PROGRESS, "MX_ST_UPGRADE_IN_PROGRESS", is_error=False),
MX_ST_CMD_TOO_LONG:
RobotStatusCodeInfo(MX_ST_CMD_TOO_LONG, "MX_ST_CMD_TOO_LONG", is_error=True),
MX_ST_EOM:
RobotStatusCodeInfo(MX_ST_EOM, "MX_ST_EOM", is_error=False),
MX_ST_ERROR_MOTION:
RobotStatusCodeInfo(MX_ST_ERROR_MOTION, "MX_ST_ERROR_MOTION", is_error=True),
MX_ST_INIT_FAILED:
RobotStatusCodeInfo(MX_ST_INIT_FAILED, "MX_ST_INIT_FAILED", is_error=True),
MX_ST_EOB:
RobotStatusCodeInfo(MX_ST_EOB, "MX_ST_EOB", is_error=False),
MX_ST_END_OFFLINE:
RobotStatusCodeInfo(MX_ST_END_OFFLINE, "MX_ST_END_OFFLINE", is_error=False),
MX_ST_CANT_SAVE_OFFLINE:
RobotStatusCodeInfo(MX_ST_CANT_SAVE_OFFLINE, "MX_ST_CANT_SAVE_OFFLINE", is_error=True),
MX_ST_IGNORING_CMD:
RobotStatusCodeInfo(MX_ST_IGNORING_CMD, "MX_ST_IGNORING_CMD", is_error=True),
MX_ST_NO_OFFLINE_SAVED:
RobotStatusCodeInfo(MX_ST_NO_OFFLINE_SAVED, "MX_ST_NO_OFFLINE_SAVED", is_error=True),
MX_ST_OFFLINE_LOOP:
RobotStatusCodeInfo(MX_ST_OFFLINE_LOOP, "MX_ST_OFFLINE_LOOP", is_error=False),
MX_ST_ERROR_GRIPPER:
RobotStatusCodeInfo(MX_ST_ERROR_GRIPPER, "MX_ST_ERROR_GRIPPER", is_error=True),
MX_ST_ERROR_VBOX:
RobotStatusCodeInfo(MX_ST_ERROR_VBOX, "MX_ST_ERROR_VBOX", is_error=True),
MX_ST_MAINTENANCE_CHECK:
RobotStatusCodeInfo(MX_ST_MAINTENANCE_CHECK, "MX_ST_MAINTENANCE_CHECK", is_error=True),
MX_ST_INTERNAL_ERROR:
RobotStatusCodeInfo(MX_ST_INTERNAL_ERROR, "MX_ST_INTERNAL_ERROR", is_error=True),
MX_ST_EXCESSIVE_TRQ:
RobotStatusCodeInfo(MX_ST_EXCESSIVE_TRQ, "MX_ST_EXCESSIVE_TRQ", is_error=True),
MX_ST_CHECKPOINT_REACHED:
RobotStatusCodeInfo(MX_ST_CHECKPOINT_REACHED, "MX_ST_CHECKPOINT_REACHED", is_error=False),
MX_ST_TEXT_API_ERROR:
RobotStatusCodeInfo(MX_ST_TEXT_API_ERROR, "MX_ST_TEXT_API_ERROR", is_error=True),
MX_ST_PSTOP:
RobotStatusCodeInfo(MX_ST_PSTOP, "MX_ST_PSTOP", is_error=True),
MX_ST_NO_VALID_CFG:
RobotStatusCodeInfo(MX_ST_NO_VALID_CFG, "MX_ST_NO_VALID_CFG", is_error=True),
MX_ST_TRACE_LVL_CHANGED:
RobotStatusCodeInfo(MX_ST_TRACE_LVL_CHANGED, "MX_ST_TRACE_LVL_CHANGED", is_error=False),
MX_ST_TCP_DUMP_STARTED:
RobotStatusCodeInfo(MX_ST_TCP_DUMP_STARTED, "MX_ST_TCP_DUMP_STARTED", is_error=False),
MX_ST_TCP_DUMP_DONE:
RobotStatusCodeInfo(MX_ST_TCP_DUMP_DONE, "MX_ST_TCP_DUMP_DONE", is_error=False),
}
| StarcoderdataPython |
3392618 | <gh_stars>1-10
import os
import json
import pickle
from datetime import date
languages = ['fi', 'sv']
def process_articles(path="../data/yle", start_year=2012, end_year=2014):
articles = {lang: [] for lang in languages}
None_id = '18-3626'
for lang in languages:
print("Processing articles in: ", lang)
path_lang = path+"/"+lang
years = os.listdir(path_lang)
years.sort()
selected_years = range(start_year, end_year+1)
selected_years = [str(y) for y in selected_years]
for y in selected_years:
path_year = path_lang+"/"+y
months = os.listdir(path_year)
months.sort()
for m in months:
path_month = path_year+"/"+m
files = os.listdir(path_month)
files.sort(reverse=True)
for f in files:
path_file = path_month+"/"+f
json_file = open(path_file,'r')
json_dict = json.load(json_file)
data = json_dict['data']
print("Processing file:", path_file)
for art in data:
article_id = art['id']
date_pub = art['datePublished']
date_pub = date_pub.split("-")
year_pub = date_pub[0]
month_pub = date_pub[1]
day_pub = date_pub[-1].split("T")[0]
date_formatted = date(year=int(year_pub), month=int(month_pub), day=int(day_pub))
headline = art['headline']['full']
art_content = art['content']
content = ""
for con in art_content:
if 'type' in con.keys():
if con['type'] == 'text':
content += con['text'] + " "
subject_list = []
subject_id_list = []
if "subjects" in art.keys():
subjects = art['subjects']
for sub in subjects:
subj = {}
subj['title'] = sub['title']['fi']
subj['id'] = sub['id']
if subj['id'] != None_id:
subject_list.append(subj['title'])
subject_id_list.append((subj['id']))
# if subj['id'] not in subject_dict.keys():
# subject_dict[subj['id']] = {}
# subject_dict[subj['id']]['title'] = subj['title']
# subject_dict[subj['id']]['count'] = 0
# subject_dict[subj['id']]['count'] += 1
a = {"date": date_formatted, "headline": headline, "content": content, "article_id": article_id}
if len(subject_list) > 0 and len(subject_id_list) > 0:
a['subjects'] = subject_list
a['subject_ids'] = subject_id_list
articles[lang].append(a)
return articles
def align_articles_one_to_one(articles):
#align articles using date and named entities
aa = {lang: [] for lang in languages}
unmatched = {lang: [] for lang in languages}
aa_count = 0
un_count = 0
for art_fi in articles['fi']:
date_fi = art_fi['date']
if date_fi.year:
if 'subjects' in art_fi.keys():
subjects_fi = [s for s in art_fi['subjects'] if s is not None]
for art_sv in articles['sv']:
day_delta = (art_sv['date'] - date_fi).days
if abs(day_delta) <= 3: #check Swedish articles published 2 days before/after the Finnish article
#extract relevant NE from the Swedish article
text_sv = art_sv['content']
subjects_sv = [s for s in subjects_fi if s in text_sv]
#check if the articles share 3 or more NEs
inter = list(set(subjects_fi).intersection(set(subjects_sv)))
if len(subjects_sv) >= 7:
aa['fi'].append(art_fi)
aa['sv'].append(art_sv)
aa_count += 1
print(date_fi)
print("Aligned articles:", aa_count)
#articles['sv'].remove(art_sv)
break
# store unmatched articles for validation/testing
else:
unmatched['fi'].append(art_fi)
unmatched['sv'].append(art_sv)
un_count += 1
#print("Unmatched articles: ", un_count)
elif day_delta >= 30:
break
print("Total aligned articles: ", aa_count)
print("Total unmatched articles: ", un_count)
return aa, unmatched
# link one Finnish news article to one or more Swedish articles
def align_articles_one_to_many(articles):
aligned_articles = {}
for art_fi in articles['fi']:
print("Aligning Finnish article", art_fi['article_id'])
date_fi = art_fi['date']
if date_fi.year:
if 'subjects' in art_fi.keys():
subjects_fi = [s for s in art_fi['subjects'] if s is not None]
for art_sv in articles['sv']:
day_delta = (art_sv['date'] - date_fi).days
if abs(day_delta) <= 30: #check Swedish articles published 2 days before/after the Finnish article
#extract relevant NE from the Swedish article
text_sv = art_sv['content']
#subjects_sv = [s for s in subjects_fi if s in text_sv]
if 'subjects' in art_sv.keys():
subjects_sv2 = list(set(art_sv['subjects']).intersection(set(subjects_fi)))
subjects_sv = list(set(subjects_sv + subjects_sv2))
#check if the articles share 3 or more NEs
#inter = list(set(subjects_fi).intersection(set(subjects_sv)))
if len(subjects_sv2) >= 3:
if 'related_articles' not in art_fi.keys():
art_fi['related_articles'] = []
art_fi['related_articles'].append(art_sv)
elif day_delta >= 30:
break
if 'related_articles' in art_fi:
aligned_articles[art_fi['article_id']] = art_fi
return aligned_articles
def align_articles_one_to_many_monthly(articles):
aligned_articles = {}
for art_fi in articles['fi']:
print("Aligning Finnish article", art_fi['article_id'])
date_fi = art_fi['date']
if date_fi.year:
if 'subjects' in art_fi.keys():
subjects_fi = [s for s in art_fi['subjects'] if s is not None]
for art_sv in articles['sv']:
date_sv = art_sv['date']
#day_delta = (art_sv['date'] - date_fi).days
if date_fi.year == date_sv.year and date_fi.month == date_sv.month: #check that Finnish and Swedish articles are in the same month
#extract relevant NE from the Swedish article
text_sv = art_sv['content']
subjects_sv = [s for s in subjects_fi if s in text_sv]
if 'subjects' in art_sv.keys():
subjects_sv2 = list(set(art_sv['subjects']).intersection(set(subjects_fi)))
subjects_sv = list(set(subjects_sv + subjects_sv2))
#check if the articles share 3 or more NEs
#inter = list(set(subjects_fi).intersection(set(subjects_sv)))
if len(subjects_sv2) >= 3:
if 'related_articles' not in art_fi.keys():
art_fi['related_articles'] = []
art_fi['related_articles'].append(art_sv)
if 'related_articles' in art_fi:
aligned_articles[art_fi['article_id']] = art_fi
return aligned_articles
# link one Finnish news article to one or more Swedish articles
def align_articles_one_to_many2(articles):
aligned_articles = {}
aligned_dict = {}
for art_fi in articles['fi']:
print("Aligning Finnish article", art_fi['article_id'])
date_fi = art_fi['date']
if date_fi.year:
if 'subjects' in art_fi.keys():
subjects_fi = [s for s in art_fi['subjects'] if s is not None]
for art_sv in articles['sv']:
day_delta = (art_sv['date'] - date_fi).days
if abs(day_delta) <= 5: #check Swedish articles published 2 days before/after the Finnish article
#extract relevant NE from the Swedish article
text_sv = art_sv['content']
subjects_sv = [s for s in subjects_fi if s in text_sv]
if 'subjects' in art_sv.keys():
subjects_sv2 = list(set(art_sv['subjects']).intersection(set(subjects_fi)))
subjects_sv = list(set(subjects_sv + subjects_sv2))
#check if the articles share 3 or more NEs
#inter = list(set(subjects_fi).intersection(set(subjects_sv)))
if len(subjects_sv) >= 3:
if 'related_articles' not in art_fi.keys():
art_fi['related_articles'] = []
art_fi['related_articles'].append(art_sv)
art_fi_id = art_fi['article_id']
if art_fi_id not in aligned_dict:
aligned_dict[art_fi_id] = []
aligned_dict[art_fi_id].append(art_sv['article_id'])
elif day_delta >= 30:
break
if 'related_articles' in art_fi:
aligned_articles[art_fi['article_id']] = art_fi
return aligned_articles, aligned_dict
def write_articles_to_file(path):
fp = open(path, "r")
data = json.load(fp)
languages = list(data.keys())
text_data = {lang: {} for lang in languages}
art_count = len(data['fi'])
for i in range(art_count):
print("Art count: ", i)
date = data['fi'][i]['date']
date = date.__str__()
header = "||Article_id:" + str(i + 1)+"||"
text_data_fi = data['fi'][i]['content']
text_data_sv = data['sv'][i]['content']
if date in text_data['fi'].keys():
text_data['fi'][date] += "\n" + header + text_data_fi
text_data['sv'][date] += "\n" + header + text_data_sv
else:
text_data['fi'][date] = header + text_data_fi
text_data['sv'][date] = header + text_data_sv
#write articles to text files
dates = text_data['fi'].keys()
parent_dir = "data/yle/raw_text/"
for dat in dates:
print("Date: ", dat)
for lang in languages:
fname = parent_dir+dat+"_"+lang+".txt"
fp = open(fname, 'w')
fp.write(text_data[lang][dat])
fp.close()
print("Saved file as: ", fname)
print("Done writing all articles as raw text!")
def write_articles_to_file2(articles):
languages = list(articles.keys())
text_data = {lang: {} for lang in languages}
for lang in languages:
art_count = len(articles[lang])
for i in range(art_count):
print("Art count -",lang,":", i)
article_id = articles[lang][i]['article_id']
date = articles[lang][i]['date'].__str__()
header = "||Article_id:" + article_id+"||"
article_text_data = articles[lang][i]['content']
if date in text_data[lang].keys():
text_data[lang][date] += "\n" + header + article_text_data
else:
text_data[lang][date] = header + article_text_data
#write articles to text files
dates = text_data[lang].keys()
parent_dir = "../data/yle/raw_text2/"
for dat in dates:
print("Date: ", dat)
fname = parent_dir+dat+"_"+lang+".txt"
with open(fname, 'w') as fp:
fp.write(text_data[lang][dat])
fp.close()
print("Saved file as: ", fname)
print("Done writing all articles as raw text!")
def write_aligned_articles_to_file(aligned_data, out_dir):
languages = ['fi', 'sv']
art_count_fi = 0
art_count_sv = 0
text_data = {lang: {} for lang in languages}
for art_id in aligned_data:
#print("FI article id:", art_id)
date = aligned_data[art_id]['date'].__str__()
if art_count_fi%10 == 0 and art_count_fi/10 > 0:
date_count_fi = int(art_count_fi/10)
date += "-" + str(date_count_fi)
print("FI Date:", date)
header = "||Article_id:" + str(art_id)+"||"
article_text_data = aligned_data[art_id]['content']
if date in text_data['fi'].keys():
text_data['fi'][date] += "\n" + header + article_text_data
else:
text_data['fi'][date] = header + article_text_data
art_count_fi += 1
related_articles = aligned_data[art_id]['related_articles']
for related_art in related_articles:
art_id_rel = related_art['article_id']
#print("SV article id:", art_id_rel)
date_rel = related_art['date'].__str__()
if art_count_sv % 10 == 0 and int(art_count_sv/10) > 0:
date_count_sv = int(art_count_sv/10)
date_rel += "-" + str(date_count_sv)
print("SV Date: ", date_rel)
header_rel = "||Article_id:" + str(art_id_rel) + "||"
article_text_data_rel = related_art['content']
if date_rel in text_data['sv'].keys():
text_data['sv'][date_rel] += "\n" + header_rel + article_text_data_rel
else:
text_data['sv'][date_rel] = header_rel + article_text_data_rel
art_count_sv += 1
for lang in languages:
print("Language: ", lang.upper())
dates = text_data[lang].keys()
parent_dir = "../data/yle/"+out_dir+"/"
for dat in dates:
print("Date: ", dat)
fname = parent_dir+dat+"_"+lang+".txt"
with open(fname, 'w') as fp:
fp.write(text_data[lang][dat])
fp.close()
print("Saved file as: ", fname)
print("Done writing all articles as raw text!")
def write_aligned_articles_to_file2(articles, valid_ids, out_dir):
languages = ['fi', 'sv']
text_data = {lang: {} for lang in languages}
for lang in languages:
art_count = 0
for art in articles[lang]:
art_id = art['article_id']
#print("Art id:", art_id)
if art_id in valid_ids:
date = art['date']
date_str = date.__str__()
date_str += "-" + str(art_count-(art_count % 20))
print("Date -", lang, ":", date_str)
header = "||Article_id:" + str(art_id)+"||"
article_text_data = art['content']
if date_str in text_data[lang].keys():
text_data[lang][date_str] += "\n" + header + article_text_data
else:
text_data[lang][date_str] = header + article_text_data
art_count += 1
for lang in languages:
print("Language: ", lang.upper())
dates = text_data[lang].keys()
parent_dir = "../data/yle/"+out_dir+"/"
for dat in dates:
print("Date: ", dat)
fname = parent_dir+dat+"_"+lang+".txt"
with open(fname, 'w') as fp:
fp.write(text_data[lang][dat])
fp.close()
print("Saved file as: ", fname)
print("Done writing all articles as raw text!")
path = "../data/yle"
start_year = 2013
end_year = 2015
articles = process_articles(path=path, start_year=start_year, end_year=end_year)
aa = align_articles_one_to_many_monthly(articles)
outfile_pkl = "aligned_articles_"+str(start_year)+"_"+str(end_year)+"_monthly_2.pkl"
with open(outfile_pkl, "wb") as f:
pickle.dump(aa, f)
f.close()
for key in aa:
aa[key]['date'] = aa[key]['date'].__str__()
for rel in aa[key]['related_articles']:
rel['date'] = rel['date'].__str__()
outfile = "aligned_articles_"+str(start_year)+"_"+str(end_year)+"_monthly_sv.json"
with open(outfile, 'w') as f:
json.dump(aa, f)
f.close()
print("***** Saved aligned articles as", outfile,"*****")
| StarcoderdataPython |
1721750 | <reponame>brechmos-stsci/deleteme<filename>cubeviz/image_viewer.py
# This file contains a sub-class of the glue image viewer with further
# customizations.
import numpy as np
from astropy import units as u
from astropy.coordinates import SkyCoord
from qtpy.QtWidgets import QLabel
from glue.core.message import SettingsChangeMessage
from glue.viewers.image.qt import ImageViewer
from glue.viewers.image.layer_artist import ImageLayerArtist
from glue.viewers.image.state import ImageLayerState
from glue.viewers.image.qt.layer_style_editor import ImageLayerStyleEditor
__all__ = ['CubevizImageViewer']
class CubevizImageLayerState(ImageLayerState):
"""
Sub-class of ImageLayerState that includes the ability to include smoothing
on-the-fly.
"""
preview_function = None
def get_sliced_data(self, view=None):
if self.preview_function is None:
return super(CubevizImageLayerState, self).get_sliced_data(view=view)
else:
data = super(CubevizImageLayerState, self).get_sliced_data()
return self.preview_function(data)
class CubevizImageLayerArtist(ImageLayerArtist):
_layer_state_cls = CubevizImageLayerState
class CubevizImageViewer(ImageViewer):
tools = ['select:rectangle', 'select:xrange', 'select:yrange',
'select:circle', 'select:polygon', 'image:contrast_bias']
def __init__(self, *args, **kwargs):
super(CubevizImageViewer, self).__init__(*args, **kwargs)
self. _layer_style_widget_cls[CubevizImageLayerArtist] = ImageLayerStyleEditor
self._synced_checkbox = None
self._slice_index = None
self.is_mouse_over = False # If mouse cursor is over viewer
self.hold_coords = False # Switch to hold current displayed coords
self._coords_in_degrees = True # Switch display coords to True=deg or False=Deg/Hr:Min:Sec
self._coords_format_function = self._format_to_degree_string # Function to format ra and dec
self.x_mouse = None # x position of mouse in pix
self.y_mouse = None # y position of mouse in pix
self.is_smoothing_preview_active = False # Smoothing preview flag
self.smoothing_preview_title = ""
self.is_axes_hidden = False # True if axes is hidden
self.axes_title = "" # Plot title
self.coord_label = QLabel("") # Coord display
self.statusBar().addPermanentWidget(self.coord_label)
# Connect matplotlib events to event handlers
self.figure.canvas.mpl_connect('motion_notify_event', self.mouse_move)
self.figure.canvas.mpl_connect('axes_leave_event', self.mouse_exited)
def get_data_layer_artist(self, layer=None, layer_state=None):
if layer.ndim == 1:
cls = self._scatter_artist
else:
cls = CubevizImageLayerArtist
return self.get_layer_artist(cls, layer=layer, layer_state=layer_state)
def update_axes_title(self, title=None):
"""
Update plot title.
:param title: str: Plot title
"""
if title is not None:
self.axes_title = title
self.axes.set_title(self.axes_title, color="black")
self.axes.figure.canvas.draw()
# Disabled feature:
#self.statusBar().showMessage(self.axes_title)
def show_smoothing_title(self):
"""
Override normal plot title to show smoothing preview title.
"""
if self.is_axes_hidden:
if self.is_smoothing_preview_active:
st = self.figure.suptitle(self.smoothing_preview_title, color="black")
st.set_bbox(dict(facecolor='red', edgecolor='red'))
self.axes.set_title("", color="black")
else:
if self.is_smoothing_preview_active:
self.axes.set_title(self.smoothing_preview_title, color="r")
def hide_smoothing_title(self):
self.figure.suptitle("", color="black")
def set_smoothing_preview(self, preview_function, preview_title=None):
"""
Sets up on the fly smoothing and displays smoothing preview title.
:param preview_function: function: Single-slice smoothing function
:param preview_title: str: Title displayed when previewing
"""
self.is_smoothing_preview_active = True
if preview_title is None:
self.smoothing_preview_title = "Smoothing Preview"
else:
self.smoothing_preview_title = preview_title
self.show_smoothing_title()
for layer in self.layers:
if isinstance(layer, CubevizImageLayerArtist):
layer.state.preview_function = preview_function
self.axes._composite_image.invalidate_cache()
self.axes.figure.canvas.draw()
def end_smoothing_preview(self):
"""
Ends on the fly smoothing.
Warning: A change of combo index should always happen
after calling this function!
"""
self.is_smoothing_preview_active = False
self.hide_smoothing_title()
self.update_axes_title()
self.smoothing_preview_title = "Smoothing Preview"
for layer in self.layers:
if isinstance(layer, CubevizImageLayerArtist):
layer.state.preview_function = None
self.axes._composite_image.invalidate_cache()
self.axes.figure.canvas.draw()
def toggle_hidden_axes(self, is_axes_hidden):
"""
Opertations to execute when axes is hidden/shown.
:param is_axes_hidden: bool: True if axes is now hidden
"""
self.is_axes_hidden = is_axes_hidden
# Plot title operations
if self.is_smoothing_preview_active:
self.hide_smoothing_title()
self.show_smoothing_title()
else:
self.update_axes_title()
def _synced_checkbox_callback(self, event):
if self._synced_checkbox.isChecked():
msg = SettingsChangeMessage(self, [self])
self.parent().tab_widget.session.hub.broadcast(msg)
self.update_slice_index(self.parent().tab_widget.synced_index)
def assign_synced_checkbox(self, checkbox):
self._synced_checkbox = checkbox
self._synced_checkbox.stateChanged.connect(self._synced_checkbox_callback)
def update_slice_index(self, index):
self._slice_index = index
z, y, x = self.state.slices
self.state.slices = (self._slice_index, y, x)
@property
def synced(self):
return self._synced_checkbox.isChecked()
@synced.setter
def synced(self, value):
self._synced_checkbox.setChecked(value)
@property
def slice_index(self):
return self._slice_index
def get_coords(self):
"""
Returns coord display string.
"""
if not self.is_mouse_over:
return None
return self.coord_label.text()
def toggle_hold_coords(self):
"""
Switch hold_coords state
"""
if self.hold_coords:
self.hold_coords = False
else:
self.statusBar().showMessage("Frozen Coordinates")
self.hold_coords = True
def toggle_coords_in_degrees(self):
"""
Switch coords_in_degrees state
"""
if self._coords_in_degrees:
self._coords_in_degrees = False
self._coords_format_function = self._format_to_hex_string
else:
self._coords_in_degrees = True
self._coords_format_function = self._format_to_degree_string
def clear_coords(self):
"""
Reset coord display and mouse tracking variables.
If hold_coords is active (True), make changes
only to indicate that the mouse is no longer over viewer.
"""
self.is_mouse_over = False
if self.hold_coords:
return
self.x_mouse = None
self.y_mouse = None
self.coord_label.setText('')
def _format_to_degree_string(self, ra, dec):
"""
Format RA and Dec in degree format. If wavelength
is available add it to the output sting.
:return: string
"""
coord_string = "({:0>8.4f}, {:0>8.4f}".format(ra, dec)
# Check if wavelength is available
if self.slice_index is not None and self.parent().tab_widget._wavelengths is not None:
wave = self.parent().tab_widget._wavelengths[self.slice_index]
coord_string += ", {:1.2e}m)".format(wave)
else:
coord_string += ")"
return coord_string
def _format_to_hex_string(self, ra, dec):
"""
Format RA and Dec in D:M:S and H:M:S formats respectively. If wavelength
is available add it to the output sting.
:return: string
"""
c = SkyCoord(ra=ra * u.degree, dec=dec * u.degree)
coord_string = "("
coord_string += "{0:0>2.0f}h:{1:0>2.0f}m:{2:0>2.0f}s".format(*c.ra.hms)
coord_string += " "
coord_string += "{0:0>3.0f}d:{1:0>2.0f}m:{2:0>2.0f}s".format(*c.dec.dms)
# Check if wavelength is available
if self.slice_index is not None and self.parent().tab_widget._wavelengths is not None:
wave = self.parent().tab_widget._wavelengths[self.slice_index]
coord_string += " {:1.2e}m)".format(wave)
else:
coord_string += ")"
return coord_string
def mouse_move(self, event):
"""
Event handler for matplotlib motion_notify_event.
Updates coord display and vars.
:param event: matplotlib event.
"""
# Check if mouse is in widget but not on plot
if not event.inaxes:
self.clear_coords()
return
self.is_mouse_over = True
# If hold_coords is active, return
if self.hold_coords:
return
# Get x and y of the pixel under the mouse
x, y = [int(event.xdata + 0.5), int(event.ydata + 0.5)]
self.x_mouse, self.y_mouse = [x, y]
# Create coord display string
if self._slice_index is not None:
string = "({:1.0f}, {:1.0f}, {:1.0f})".format(x, y, self._slice_index)
else:
string = "({:1.0f}, {:1.0f})".format(x, y)
# If viewer has a layer.
if len(self.state.layers) > 0:
# Get array arr that contains the image values
# Default layer is layer at index 0.
arr = self.state.layers[0].get_sliced_data()
if 0 <= y < arr.shape[0] and 0 <= x < arr.shape[1]:
# if x and y are in bounds. Note: x and y are swapped in array.
# get value and check if wcs is obtainable
# WCS:
if len(self.figure.axes) > 0:
wcs = self.figure.axes[0].wcs.celestial
if wcs is not None:
# Check the number of axes in the WCS and add to string
ra = dec = None
if wcs.naxis == 3 and self.slice_index is not None:
ra, dec, wave = wcs.wcs_pix2world([[x, y, self._slice_index]], 0)[0]
elif wcs.naxis == 2:
ra, dec = wcs.wcs_pix2world([[x, y]], 0)[0]
if ra is not None and dec is not None:
string = string + " " + self._coords_format_function(ra, dec)
# Pixel Value:
v = arr[y][x]
string = "{:1.4f} ".format(v) + string
# Add a gap to string and add to viewer.
string += " "
self.coord_label.setText(string)
return
def mouse_exited(self, event):
"""
Event handler for matplotlib axes_leave_event.
Clears coord display and vars.
:param event: matplotlib event
"""
self.clear_coords()
return
def leaveEvent(self, event):
"""
Event handler for Qt widget leave events.
Clears coord display and vars.
Overrides default.
:param event: QEvent
"""
self.clear_coords()
return
| StarcoderdataPython |
124186 | from pathlib import Path
import configparser
from logger import logger
def change_config(**options):
"""takes arbitrary keyword arguments and
writes their values into the config"""
# overwrite values
for k, v in options.items():
config.set('root', k, v)
# write back, but without the mandatory header
config_string = '\n'.join(['{}={}'.format(k, v)
for (k, v) in config['root'].items()])
with open(str(config_path), 'w') as f:
f.write(config_string)
f.write('\n')
def get_config(key):
return config['root'][key]
# load config file for both server.py and fader.py
config = None
config_path = None
try:
config_path = Path(Path(__file__).resolve().parent,
Path('../config')).resolve()
with open(str(config_path), 'r') as f:
config = configparser.RawConfigParser()
config.read_string('[root]\n' + f.read())
if not 'raspberry_port' in config['root']:
# for the port I just went with some random unassigned port from this list:
# https://www.iana.org/assignments/service-names-port-numbers/service-names-port-numbers.xhtml?search=Unassigned
change_config(raspberry_port=3546)
if not 'raspberry_ip' in config['root']:
# 0.0.0.0 works if you send requests from another local machine to the raspberry
# 'localhost' would only allow requests from within the raspberry
change_config(raspberry_ip='0.0.0.0')
except FileNotFoundError:
logger.warning(
'config file could not be found! using port {}'.format(raspberry_port))
| StarcoderdataPython |
1701602 | <filename>space_invader.py
import pygame
import os
import time
import random
import ctypes
pygame.font.init()
pygame.init()
ctypes.windll.user32.SetProcessDPIAware()
WIDTH, HEIGHT = pygame.display.Info().current_w, pygame.display.Info().current_h
ENEMY_SHIP_SIZE_TRANSFORM = 0.08
WIN = pygame.display.set_mode((WIDTH, HEIGHT), pygame.FULLSCREEN)
pygame.display.set_caption("Space Shooter")
# Load images
RED_SPACE_SHIP = pygame.transform.scale(pygame.image.load(os.path.join("assets", "pixel_ship_red_small.png")), (int(
WIDTH * ENEMY_SHIP_SIZE_TRANSFORM), int(HEIGHT * ENEMY_SHIP_SIZE_TRANSFORM)))
GREEN_SPACE_SHIP = pygame.transform.scale(pygame.image.load(os.path.join("assets", "pixel_ship_green_small.png")), (int(
WIDTH * ENEMY_SHIP_SIZE_TRANSFORM), int(HEIGHT * ENEMY_SHIP_SIZE_TRANSFORM)))
BLUE_SPACE_SHIP = pygame.transform.scale(pygame.image.load(os.path.join("assets", "pixel_ship_blue_small.png")), (int(
WIDTH * ENEMY_SHIP_SIZE_TRANSFORM), int(HEIGHT * ENEMY_SHIP_SIZE_TRANSFORM)))
# PLayer ship
YELLOW_SPACE_SHIP = pygame.image.load(
os.path.join("assets", "pixel_ship_yellow.png"))
# Lasers
RED_LASER = pygame.image.load(os.path.join("assets", "pixel_laser_red.png"))
GREEN_LASER = pygame.image.load(
os.path.join("assets", "pixel_laser_green.png"))
BLUE_LASER = pygame.image.load(os.path.join("assets", "pixel_laser_blue.png"))
YELLOW_LASER = pygame.image.load(
os.path.join("assets", "pixel_laser_yellow.png"))
# Background
BG = pygame.transform.scale(pygame.image.load(
os.path.join("assets", "background-black.png")), (WIDTH, HEIGHT))
class Laser:
def __init__(self, x, y, img):
self.x = x
self.y = y
self.img = img
self.mask = pygame.mask.from_surface(self.img)
def draw(self, window):
window.blit(self.img, (self.x, self.y))
def move(self, vel):
self.y += vel
def off_screen(self, height):
return not (self.y <= height and self.y >= 0)
def collision(self, obj):
return collide(self, obj)
class Ship:
COOLDOWN = 15
def __init__(self, x, y, health=100):
self.x = x
self.y = y
self.health = health
self.ship_img = None
self.laser_img = None
self.lasers = []
self.cool_down_counter = 0
def draw(self, window):
window.blit(self.ship_img, (self.x, self.y))
for laser in self.lasers:
laser.draw(window)
def move_lasers(self, vel, obj):
self.cooldown()
for laser in self.lasers:
laser.move(vel)
if laser.off_screen(HEIGHT):
self.lasers.remove(laser)
elif laser.collision(obj):
obj.health -= 10
self.lasers.remove(laser)
def cooldown(self):
if self.cool_down_counter >= self.COOLDOWN:
self.cool_down_counter = 0
elif self.cool_down_counter > 0:
self.cool_down_counter += 1
def shoot(self):
if self.cool_down_counter == 0:
laser = Laser(self.x, self.y, self.laser_img)
self.lasers.append(laser)
self.cool_down_counter = 1
def get_width(self):
return self.ship_img.get_width()
def get_height(self):
return self.ship_img.get_height()
class Player(Ship):
def __init__(self, x, y, health=100):
super().__init__(x, y, health)
self.ship_img = YELLOW_SPACE_SHIP
self.laser_img = YELLOW_LASER
self.mask = pygame.mask.from_surface(self.ship_img)
self.max_health = health
def move_lasers(self, vel, objs):
self.cooldown()
enem_killed = 0
for laser in self.lasers:
laser.move(vel)
if laser.off_screen(HEIGHT):
self.lasers.remove(laser)
else:
for obj in objs:
if laser.collision(obj):
objs.remove(obj)
enem_killed += 1
if laser in self.lasers:
self.lasers.remove(laser)
return enem_killed
def healthbar(self, window):
pygame.draw.rect(window, (255, 0, 0), (self.x, self.y +
self.ship_img.get_height() + 10, self.ship_img.get_width(), 10))
pygame.draw.rect(window, (0, 255, 0), (self.x, self.y + self.ship_img.get_height() +
10, self.ship_img.get_width() * (self.health/self.max_health), 10))
def draw(self, window):
super().draw(window)
self.healthbar(window)
class Enemy(Ship):
COLOR_MAP = {
"red": (RED_SPACE_SHIP, RED_LASER),
"green": (GREEN_SPACE_SHIP, GREEN_LASER),
"blue": (BLUE_SPACE_SHIP, BLUE_LASER)
}
def __init__(self, x, y, color, health=100):
super().__init__(x, y, health)
self.ship_img, self.laser_img = self.COLOR_MAP[color]
self.mask = pygame.mask.from_surface(self.ship_img)
def move(self, vel):
self.y += vel
def shoot(self):
if self.cool_down_counter == 0:
laser = Laser(self.x + 27, self.y, self.laser_img)
self.lasers.append(laser)
self.cool_down_counter = 1
def collide(obj1, obj2):
offset_x = int(obj2.x - obj1.x)
offset_y = int(obj2.y - obj1.y)
return obj1.mask.overlap(obj2.mask, (offset_x, offset_y)) != None
def menu_display_text(text, y_offset):
title_font = pygame.font.SysFont("comicsans", 70)
title_label = title_font.render(text, 1, (255, 255, 255))
WIN.blit(title_label, (WIDTH/2-title_label.get_width()/2,
(HEIGHT/2-title_label.get_height()/2) - y_offset))
# title_label1 = title_font.render("Press Enter to begin the game", 1, (255, 255, 255))
# title_label2 = title_font.render("OR", 1, (255, 255, 255))
# title_label3 = title_font.render("Press Esc to Exit", 1, (255, 255, 255))
# WIN.blit(title_label1, (WIDTH/2-title_label1.get_width()/2, (HEIGHT/2-title_label1.get_height()/2) - 150))
# WIN.blit(title_label2, (WIDTH/2-title_label2.get_width()/2, (HEIGHT/2-title_label2.get_height()/2) - 75))
# WIN.blit(title_label3, (WIDTH/2-title_label3.get_width()/2, (HEIGHT/2-title_label3.get_height()/2) - 0))
def main_menu():
run = True
while run:
WIN.blit(BG, (0, 0))
menu_display_text("Press Enter to begin the game..", 150)
menu_display_text("Or", 75)
menu_display_text("Press Esc to Exit", 0)
pygame.display.update()
for event in pygame.event.get():
if event.type == pygame.QUIT or (event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE):
run = False
if event.type == pygame.KEYDOWN and event.key == pygame.K_RETURN:
main()
pygame.quit()
def main():
run = True
FPS = 60
level = 0
lives = 5
main_font = pygame.font.SysFont("comicsans", 40)
lost_font = pygame.font.SysFont("comicsans", 75)
level_update_font = pygame.font.SysFont("comicsans", 50)
enemies = []
wave_length = 5
enemy_vel = 2
enemies_to_kill = 0
enemies_killed = 0
player_vel = 20
laser_vel = 15
player = Player(WIDTH*9/20, HEIGHT-100)
clock = pygame.time.Clock()
lost = False
killed_less = False
lost_count = 0
def level_update():
WIN.blit(BG, (0, 0))
level_label = lost_font.render(f"Level: {level}", 1, (255, 69, 0))
enemies_in_wave_label = level_update_font.render(
f"Enemy ships in Level: {wave_length}", 1, (255, 255, 255))
enemy_to_kill_label = level_update_font.render(
f"Enemy ships to destroy: {enemies_to_kill}", 1, (255, 255, 255))
WIN.blit(level_label, (WIDTH/2 - level_label.get_width()/2, HEIGHT/2 - 60))
WIN.blit(enemies_in_wave_label, (WIDTH/2 -
enemies_in_wave_label.get_width()/2, HEIGHT/2))
WIN.blit(enemy_to_kill_label, (WIDTH/2 -
enemy_to_kill_label.get_width()/2, HEIGHT/2 + 60))
pygame.display.update()
time.sleep(3)
def redraw_window():
WIN.blit(BG, (0, 0))
# draw text
lives_label = main_font.render(
f"Enemy ships destroyed: {enemies_killed}", 1, (255, 255, 255))
level_label = main_font.render(f"Level: {level}", 1, (255, 255, 255))
WIN.blit(lives_label, (10, 10))
WIN.blit(level_label, (WIDTH - level_label.get_width() - 10, 10))
for enemy in enemies:
enemy.draw(WIN)
player.draw(WIN)
if lost:
lost_label = lost_font.render("You Lost!", 1, (255, 0, 0))
needed_label = level_update_font.render(
f"You needed to destroy {enemies_to_kill} Enemy ships", 1, (255, 255, 255))
base_taken_label = level_update_font.render(
"Enemy ships have taken the base!", 1, (255, 20, 20))
WIN.blit(lost_label, (WIDTH/2 - lost_label.get_width() /
2, HEIGHT/2 - lost_label.get_height() - 20))
WIN.blit(needed_label, (WIDTH/2 -
needed_label.get_width() / 2, HEIGHT/2))
WIN.blit(base_taken_label, (WIDTH/2 - base_taken_label.get_width() /
2, HEIGHT/2 + needed_label.get_height() + 20))
pygame.display.update()
while run:
clock.tick(FPS)
redraw_window()
if killed_less or player.health <= 0:
lost = True
lost_count += 1
if lost:
if lost_count > FPS * 2:
run = False
else:
continue
if len(enemies) == 0:
if enemies_killed < enemies_to_kill:
killed_less = True
continue
level += 1
enemy_vel += 1 if level < player_vel/2 else player_vel/2
player_vel += 5 if level % 3 == 0 else 0
Ship.COOLDOWN -= 2 if Ship.COOLDOWN > 2 else 2
wave_length += (3 * level)
enemies_to_kill = int(wave_length * 0.7)
enemies_killed = 0
level_update()
for _ in range(wave_length):
enemy = Enemy(random.randrange(
50, WIDTH-100), random.randrange(-1500, -100), random.choice(["red", "blue", "green"]))
enemies.append(enemy)
for event in pygame.event.get():
if event.type == pygame.QUIT or (event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE):
quit()
keys = pygame.key.get_pressed()
if keys[pygame.K_a] and (player.x - player_vel > 0): # left
player.x -= player_vel
if keys[pygame.K_d] and (player.x + player_vel + player.get_width() < WIDTH): # right
player.x += player_vel
if keys[pygame.K_w] and (player.y - player_vel > 0): # up
player.y -= player_vel
if keys[pygame.K_s] and (player.y + player_vel + player.get_height() + 30 < HEIGHT): # down
player.y += player_vel
if keys[pygame.K_SPACE]:
player.shoot()
for enemy in enemies[:]:
enemy.move(enemy_vel)
enemy.move_lasers(laser_vel, player)
if random.randrange(0, 2*FPS) == 1:
enemy.shoot()
if collide(enemy, player):
player.health -= 10
enemies.remove(enemy)
elif enemy.y + enemy.get_height() > HEIGHT:
# lives -= 1
enemies.remove(enemy)
enemies_killed += player.move_lasers(-laser_vel, enemies)
main_menu()
| StarcoderdataPython |
3308475 | import os
import torch
from pathlib import Path
from typing import Dict, List
from dotenv import load_dotenv
from transformers import AutoModelForCausalLM, AutoTokenizer
from rome import repr_tools
from .layer_stats import layer_stats
from .rome_hparams import ROMEHyperParams
# Cache variables
inv_mom2_cache = {}
# Load directory configurations
load_dotenv()
STATS_DIR = Path(os.getenv("STATS_DIR"))
def get_inv_cov(
model: AutoModelForCausalLM,
tok: AutoTokenizer,
layer_name: str,
mom2_dataset: str,
mom2_n_samples: str,
mom2_dtype: str,
) -> torch.Tensor:
"""
Retrieves covariance statistics, then computes the algebraic inverse.
Caches result for future use.
"""
global inv_mom2_cache
model_name = model.config._name_or_path.replace("/", "_")
key = (model_name, layer_name)
if key not in inv_mom2_cache:
print(
f"Retrieving inverse covariance statistics for {model_name} @ {layer_name}. "
f"The result will be cached to avoid repetitive computation."
)
stat = layer_stats(
model,
tok,
layer_name,
STATS_DIR,
mom2_dataset,
to_collect=["mom2"],
sample_size=mom2_n_samples,
precision=mom2_dtype,
)
inv_mom2_cache[key] = torch.inverse(
stat.mom2.moment().to("cuda")
).float() # Cast back to float32
return inv_mom2_cache[key]
def compute_u(
model: AutoModelForCausalLM,
tok: AutoTokenizer,
request: Dict,
hparams: ROMEHyperParams,
layer: int,
context_templates: List[str],
) -> torch.Tensor:
"""
Computes the right vector used in constructing the rank-1 update matrix.
"""
print("Computing left vector (u)...")
# Compute projection token
word_repr_args = dict(
model=model,
tok=tok,
layer=layer,
module_template=hparams.rewrite_module_tmp,
track="in",
)
if "subject_" in hparams.fact_token and hparams.fact_token.index("subject_") == 0:
word = request["subject"]
print(f"Selected u projection object {word}")
cur_repr = torch.stack(
[ # TODO batch this to improve performance
repr_tools.get_repr_at_word_token(
context_template=templ.format(request["prompt"]),
word=word,
subtoken=hparams.fact_token[len("subject_") :],
**word_repr_args,
)
for templ in context_templates
],
dim=0,
).mean(0)
elif hparams.fact_token == "last":
# Heuristic to choose last word. Not a huge deal if there's a minor
# edge case (e.g. multi-token word) because the function below will
# take the last token.
cur_repr = repr_tools.get_repr_at_idxs(
context=request["prompt"].format(request["subject"]),
idxs=[-1],
**word_repr_args,
)
print("Selected u projection token with last token")
else:
raise ValueError(f"fact_token={hparams.fact_token} not recognized")
# Apply inverse second moment adjustment
u = cur_repr
if hparams.mom2_adjustment:
u = get_inv_cov(
model,
tok,
hparams.rewrite_module_tmp.format(layer),
hparams.mom2_dataset,
hparams.mom2_n_samples,
hparams.mom2_dtype,
) @ u.float().unsqueeze(1)
u = u.squeeze()
return u / u.norm()
| StarcoderdataPython |
1649772 | <filename>test_autoarray/structures/test_kernel_2d.py
from os import path
import numpy as np
import pytest
from astropy import units
from astropy.modeling import functional_models
from astropy.coordinates import Angle
import autoarray as aa
from autoarray import exc
test_data_dir = path.join("{}".format(path.dirname(path.realpath(__file__))), "files")
class TestAPI:
def test__manual__input_kernel__all_attributes_correct_including_data_inheritance(
self,
):
kernel = aa.Kernel2D.ones(
shape_native=(3, 3), pixel_scales=1.0, normalize=False
)
assert kernel.shape_native == (3, 3)
assert (kernel.native == np.ones((3, 3))).all()
assert kernel.pixel_scales == (1.0, 1.0)
assert kernel.origin == (0.0, 0.0)
kernel = aa.Kernel2D.ones(
shape_native=(4, 3), pixel_scales=1.0, normalize=False
)
assert kernel.shape_native == (4, 3)
assert (kernel.native == np.ones((4, 3))).all()
assert kernel.pixel_scales == (1.0, 1.0)
assert kernel.origin == (0.0, 0.0)
def test__full_kernel_is_set_of_full_values(self):
kernel = aa.Kernel2D.full(fill_value=3.0, shape_native=(3, 3), pixel_scales=1.0)
assert kernel.shape_native == (3, 3)
assert (kernel.native == 3.0 * np.ones((3, 3))).all()
assert kernel.pixel_scales == (1.0, 1.0)
assert kernel.origin == (0.0, 0.0)
def test__ones_zeros__kernel_is_set_of_full_values(self):
kernel = aa.Kernel2D.ones(shape_native=(3, 3), pixel_scales=1.0)
assert kernel.shape_native == (3, 3)
assert (kernel.native == np.ones((3, 3))).all()
assert kernel.pixel_scales == (1.0, 1.0)
assert kernel.origin == (0.0, 0.0)
kernel = aa.Kernel2D.zeros(shape_native=(3, 3), pixel_scales=1.0)
assert kernel.shape_native == (3, 3)
assert (kernel.native == np.zeros((3, 3))).all()
assert kernel.pixel_scales == (1.0, 1.0)
assert kernel.origin == (0.0, 0.0)
def test__from_fits__input_kernel_3x3__all_attributes_correct_including_data_inheritance(
self,
):
kernel = aa.Kernel2D.from_fits(
file_path=path.join(test_data_dir, "3x2_ones.fits"), hdu=0, pixel_scales=1.0
)
assert (kernel.native == np.ones((3, 2))).all()
kernel = aa.Kernel2D.from_fits(
file_path=path.join(test_data_dir, "3x2_twos.fits"), hdu=0, pixel_scales=1.0
)
assert (kernel.native == 2.0 * np.ones((3, 2))).all()
def test__no_blur__correct_kernel(self):
kernel = aa.Kernel2D.no_blur(pixel_scales=1.0)
assert (kernel.native == np.array([[1.0]])).all()
assert kernel.pixel_scales == (1.0, 1.0)
kernel = aa.Kernel2D.no_blur(pixel_scales=2.0)
assert (kernel.native == np.array([[1.0]])).all()
assert kernel.pixel_scales == (2.0, 2.0)
class TestNormalize:
def test__input_is_already_normalized__no_change(self):
kernel_data = np.ones((3, 3)) / 9.0
kernel = aa.Kernel2D.manual_native(
array=kernel_data, pixel_scales=1.0, normalize=True
)
assert kernel.native == pytest.approx(kernel_data, 1e-3)
def test__input_is_above_normalization_so_is_normalized(self):
kernel_data = np.ones((3, 3))
kernel = aa.Kernel2D.manual_native(
array=kernel_data, pixel_scales=1.0, normalize=True
)
assert kernel.native == pytest.approx(np.ones((3, 3)) / 9.0, 1e-3)
kernel = aa.Kernel2D.manual_native(
array=kernel_data, pixel_scales=1.0, normalize=False
)
kernel = kernel.normalized
assert kernel.native == pytest.approx(np.ones((3, 3)) / 9.0, 1e-3)
def test__same_as_above__renomalized_false_does_not_normalize(self):
kernel_data = np.ones((3, 3))
kernel = aa.Kernel2D.manual_native(
array=kernel_data, pixel_scales=1.0, normalize=False
)
assert kernel.native == pytest.approx(np.ones((3, 3)), 1e-3)
class TestBinnedUp:
def test__kernel_is_even_x_even__rescaled_to_odd_x_odd__no_use_of_dimension_trimming(
self,
):
array_2d = np.ones((6, 6))
kernel = aa.Kernel2D.manual_native(
array=array_2d, pixel_scales=1.0, normalize=False
)
kernel = kernel.rescaled_with_odd_dimensions_from_rescale_factor(
rescale_factor=0.5, normalize=True
)
assert kernel.pixel_scales == (2.0, 2.0)
assert (kernel.native == (1.0 / 9.0) * np.ones((3, 3))).all()
array_2d = np.ones((9, 9))
kernel = aa.Kernel2D.manual_native(
array=array_2d, pixel_scales=1.0, normalize=False
)
kernel = kernel.rescaled_with_odd_dimensions_from_rescale_factor(
rescale_factor=0.333333333333333, normalize=True
)
assert kernel.pixel_scales == (3.0, 3.0)
assert (kernel.native == (1.0 / 9.0) * np.ones((3, 3))).all()
array_2d = np.ones((18, 6))
kernel = aa.Kernel2D.manual_native(
array=array_2d, pixel_scales=1.0, normalize=False
)
kernel = kernel.rescaled_with_odd_dimensions_from_rescale_factor(
rescale_factor=0.5, normalize=True
)
assert kernel.pixel_scales == (2.0, 2.0)
assert (kernel.native == (1.0 / 27.0) * np.ones((9, 3))).all()
array_2d = np.ones((6, 18))
kernel = aa.Kernel2D.manual_native(
array=array_2d, pixel_scales=1.0, normalize=False
)
kernel = kernel.rescaled_with_odd_dimensions_from_rescale_factor(
rescale_factor=0.5, normalize=True
)
assert kernel.pixel_scales == (2.0, 2.0)
assert (kernel.native == (1.0 / 27.0) * np.ones((3, 9))).all()
def test__kernel_is_even_x_even_after_binning_up__resized_to_odd_x_odd_with_shape_plus_one(
self,
):
kernel = aa.Kernel2D.ones(
shape_native=(2, 2), pixel_scales=1.0, normalize=False
)
kernel = kernel.rescaled_with_odd_dimensions_from_rescale_factor(
rescale_factor=2.0, normalize=True
)
assert kernel.pixel_scales == (0.4, 0.4)
assert (kernel.native == (1.0 / 25.0) * np.ones((5, 5))).all()
kernel = aa.Kernel2D.ones(
shape_native=(40, 40), pixel_scales=1.0, normalize=False
)
kernel = kernel.rescaled_with_odd_dimensions_from_rescale_factor(
rescale_factor=0.1, normalize=True
)
assert kernel.pixel_scales == (8.0, 8.0)
assert (kernel.native == (1.0 / 25.0) * np.ones((5, 5))).all()
kernel = aa.Kernel2D.ones(
shape_native=(2, 4), pixel_scales=1.0, normalize=False
)
kernel = kernel.rescaled_with_odd_dimensions_from_rescale_factor(
rescale_factor=2.0, normalize=True
)
assert kernel.pixel_scales[0] == pytest.approx(0.4, 1.0e-4)
assert kernel.pixel_scales[1] == pytest.approx(0.4444444, 1.0e-4)
assert (kernel.native == (1.0 / 45.0) * np.ones((5, 9))).all()
kernel = aa.Kernel2D.ones(
shape_native=(4, 2), pixel_scales=1.0, normalize=False
)
kernel = kernel.rescaled_with_odd_dimensions_from_rescale_factor(
rescale_factor=2.0, normalize=True
)
assert kernel.pixel_scales[0] == pytest.approx(0.4444444, 1.0e-4)
assert kernel.pixel_scales[1] == pytest.approx(0.4, 1.0e-4)
assert (kernel.native == (1.0 / 45.0) * np.ones((9, 5))).all()
def test__kernel_is_odd_and_even_after_binning_up__resized_to_odd_and_odd_with_shape_plus_one(
self,
):
kernel = aa.Kernel2D.ones(
shape_native=(6, 4), pixel_scales=1.0, normalize=False
)
kernel = kernel.rescaled_with_odd_dimensions_from_rescale_factor(
rescale_factor=0.5, normalize=True
)
assert kernel.pixel_scales == pytest.approx((2.0, 1.3333333333), 1.0e-4)
assert (kernel.native == (1.0 / 9.0) * np.ones((3, 3))).all()
kernel = aa.Kernel2D.ones(
shape_native=(9, 12), pixel_scales=1.0, normalize=False
)
kernel = kernel.rescaled_with_odd_dimensions_from_rescale_factor(
rescale_factor=0.33333333333, normalize=True
)
assert kernel.pixel_scales == pytest.approx((3.0, 2.4), 1.0e-4)
assert (kernel.native == (1.0 / 15.0) * np.ones((3, 5))).all()
kernel = aa.Kernel2D.ones(
shape_native=(4, 6), pixel_scales=1.0, normalize=False
)
kernel = kernel.rescaled_with_odd_dimensions_from_rescale_factor(
rescale_factor=0.5, normalize=True
)
assert kernel.pixel_scales == pytest.approx((1.33333333333, 2.0), 1.0e-4)
assert (kernel.native == (1.0 / 9.0) * np.ones((3, 3))).all()
kernel = aa.Kernel2D.ones(
shape_native=(12, 9), pixel_scales=1.0, normalize=False
)
kernel = kernel.rescaled_with_odd_dimensions_from_rescale_factor(
rescale_factor=0.33333333333, normalize=True
)
assert kernel.pixel_scales == pytest.approx((2.4, 3.0), 1.0e-4)
assert (kernel.native == (1.0 / 15.0) * np.ones((5, 3))).all()
class TestConvolve:
def test__kernel_is_not_odd_x_odd__raises_error(self):
kernel = aa.Kernel2D.manual_native(
array=[[0.0, 1.0], [1.0, 2.0]], pixel_scales=1.0
)
with pytest.raises(exc.KernelException):
kernel.convolved_array_from_array(np.ones((5, 5)))
def test__image_is_3x3_central_value_of_one__kernel_is_cross__blurred_image_becomes_cross(
self,
):
image = aa.Array2D.manual_native(
array=[[0.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 0.0]], pixel_scales=1.0
)
kernel = aa.Kernel2D.manual_native(
array=[[0.0, 1.0, 0.0], [1.0, 2.0, 1.0], [0.0, 1.0, 0.0]], pixel_scales=1.0
)
blurred_image = kernel.convolved_array_from_array(image)
assert (blurred_image == kernel).all()
def test__image_is_4x4_central_value_of_one__kernel_is_cross__blurred_image_becomes_cross(
self,
):
image = aa.Array2D.manual_native(
array=[
[0.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0],
],
pixel_scales=1.0,
)
kernel = aa.Kernel2D.manual_native(
array=[[0.0, 1.0, 0.0], [1.0, 2.0, 1.0], [0.0, 1.0, 0.0]], pixel_scales=1.0
)
blurred_image = kernel.convolved_array_from_array(array=image)
assert (
blurred_image.native
== np.array(
[
[0.0, 1.0, 0.0, 0.0],
[1.0, 2.0, 1.0, 0.0],
[0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0],
]
)
).all()
def test__image_is_4x3_central_value_of_one__kernel_is_cross__blurred_image_becomes_cross(
self,
):
image = aa.Array2D.manual_native(
array=[[0.0, 0.0, 0.0], [0.0, 1.0, 0.0], [0.0, 0.0, 0.0], [0.0, 0.0, 0.0]],
pixel_scales=1.0,
)
kernel = aa.Kernel2D.manual_native(
array=[[0.0, 1.0, 0.0], [1.0, 2.0, 1.0], [0.0, 1.0, 0.0]], pixel_scales=1.0
)
blurred_image = kernel.convolved_array_from_array(image)
assert (
blurred_image.native
== np.array(
[[0.0, 1.0, 0.0], [1.0, 2.0, 1.0], [0.0, 1.0, 0.0], [0.0, 0.0, 0.0]]
)
).all()
def test__image_is_3x4_central_value_of_one__kernel_is_cross__blurred_image_becomes_cross(
self,
):
image = aa.Array2D.manual_native(
array=[[0.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0]],
pixel_scales=1.0,
)
kernel = aa.Kernel2D.manual_native(
array=[[0.0, 1.0, 0.0], [1.0, 2.0, 1.0], [0.0, 1.0, 0.0]], pixel_scales=1.0
)
blurred_image = kernel.convolved_array_from_array(image)
assert (
blurred_image.native
== np.array(
[[0.0, 1.0, 0.0, 0.0], [1.0, 2.0, 1.0, 0.0], [0.0, 1.0, 0.0, 0.0]]
)
).all()
def test__image_is_4x4_has_two_central_values__kernel_is_asymmetric__blurred_image_follows_convolution(
self,
):
image = aa.Array2D.manual_native(
array=[
[0.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 0.0],
],
pixel_scales=1.0,
)
kernel = aa.Kernel2D.manual_native(
array=[[1.0, 1.0, 1.0], [2.0, 2.0, 1.0], [1.0, 3.0, 3.0]], pixel_scales=1.0
)
blurred_image = kernel.convolved_array_from_array(image)
assert (
blurred_image.native
== np.array(
[
[1.0, 1.0, 1.0, 0.0],
[2.0, 3.0, 2.0, 1.0],
[1.0, 5.0, 5.0, 1.0],
[0.0, 1.0, 3.0, 3.0],
]
)
).all()
def test__image_is_4x4_values_are_on_edge__kernel_is_asymmetric__blurring_does_not_account_for_edge_effects(
self,
):
image = aa.Array2D.manual_native(
[
[0.0, 0.0, 0.0, 0.0],
[1.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 1.0],
[0.0, 0.0, 0.0, 0.0],
],
pixel_scales=1.0,
)
kernel = aa.Kernel2D.manual_native(
array=[[1.0, 1.0, 1.0], [2.0, 2.0, 1.0], [1.0, 3.0, 3.0]], pixel_scales=1.0
)
blurred_image = kernel.convolved_array_from_array(image)
assert (
blurred_image.native
== np.array(
[
[1.0, 1.0, 0.0, 0.0],
[2.0, 1.0, 1.0, 1.0],
[3.0, 3.0, 2.0, 2.0],
[0.0, 0.0, 1.0, 3.0],
]
)
).all()
def test__image_is_4x4_values_are_on_corner__kernel_is_asymmetric__blurring_does_not_account_for_edge_effects(
self,
):
image = aa.Array2D.manual_native(
array=[
[1.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 1.0],
],
pixel_scales=1.0,
)
kernel = aa.Kernel2D.manual_native(
array=[[1.0, 1.0, 1.0], [2.0, 2.0, 1.0], [1.0, 3.0, 3.0]], pixel_scales=1.0
)
blurred_image = kernel.convolved_array_from_array(image)
assert (
blurred_image.native
== np.array(
[
[2.0, 1.0, 0.0, 0.0],
[3.0, 3.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 1.0],
[0.0, 0.0, 2.0, 2.0],
]
)
).all()
class TestFromGaussian:
def test__identical_to_gaussian_light_profile(self):
kernel = aa.Kernel2D.from_gaussian(
shape_native=(3, 3),
pixel_scales=1.0,
centre=(0.1, 0.1),
axis_ratio=0.9,
angle=45.0,
sigma=1.0,
normalize=True,
)
assert kernel.native == pytest.approx(
np.array(
[
[0.06281, 0.13647, 0.0970],
[0.11173, 0.21589, 0.136477],
[0.065026, 0.11173, 0.06281],
]
),
1.0e-3,
)
class TestFromAlmaGaussian:
def test__identical_to_astropy_gaussian_model__circular_no_rotation(self):
pixel_scales = 0.1
x_stddev = (
2.0e-5
* (units.deg).to(units.arcsec)
/ pixel_scales
/ (2.0 * np.sqrt(2.0 * np.log(2.0)))
)
y_stddev = (
2.0e-5
* (units.deg).to(units.arcsec)
/ pixel_scales
/ (2.0 * np.sqrt(2.0 * np.log(2.0)))
)
gaussian_astropy = functional_models.Gaussian2D(
amplitude=1.0,
x_mean=2.0,
y_mean=2.0,
x_stddev=x_stddev,
y_stddev=y_stddev,
theta=0.0,
)
shape = (5, 5)
y, x = np.mgrid[0 : shape[1], 0 : shape[0]]
kernel_astropy = gaussian_astropy(x, y)
kernel_astropy /= np.sum(kernel_astropy)
kernel = aa.Kernel2D.from_as_gaussian_via_alma_fits_header_parameters(
shape_native=shape,
pixel_scales=pixel_scales,
y_stddev=2.0e-5,
x_stddev=2.0e-5,
theta=0.0,
normalize=True,
)
assert kernel_astropy == pytest.approx(kernel.native, 1e-4)
def test__identical_to_astropy_gaussian_model__circular_no_rotation_different_pixel_scale(
self,
):
pixel_scales = 0.02
x_stddev = (
2.0e-5
* (units.deg).to(units.arcsec)
/ pixel_scales
/ (2.0 * np.sqrt(2.0 * np.log(2.0)))
)
y_stddev = (
2.0e-5
* (units.deg).to(units.arcsec)
/ pixel_scales
/ (2.0 * np.sqrt(2.0 * np.log(2.0)))
)
gaussian_astropy = functional_models.Gaussian2D(
amplitude=1.0,
x_mean=2.0,
y_mean=2.0,
x_stddev=x_stddev,
y_stddev=y_stddev,
theta=0.0,
)
shape = (5, 5)
y, x = np.mgrid[0 : shape[1], 0 : shape[0]]
kernel_astropy = gaussian_astropy(x, y)
kernel_astropy /= np.sum(kernel_astropy)
kernel = aa.Kernel2D.from_as_gaussian_via_alma_fits_header_parameters(
shape_native=shape,
pixel_scales=pixel_scales,
y_stddev=2.0e-5,
x_stddev=2.0e-5,
theta=0.0,
normalize=True,
)
assert kernel_astropy == pytest.approx(kernel.native, 1e-4)
def test__identical_to_astropy_gaussian_model__include_ellipticity_from_x_and_y_stddev(
self,
):
pixel_scales = 0.1
x_stddev = (
1.0e-5
* (units.deg).to(units.arcsec)
/ pixel_scales
/ (2.0 * np.sqrt(2.0 * np.log(2.0)))
)
y_stddev = (
2.0e-5
* (units.deg).to(units.arcsec)
/ pixel_scales
/ (2.0 * np.sqrt(2.0 * np.log(2.0)))
)
theta_deg = 0.0
theta = Angle(theta_deg, "deg").radian
gaussian_astropy = functional_models.Gaussian2D(
amplitude=1.0,
x_mean=2.0,
y_mean=2.0,
x_stddev=x_stddev,
y_stddev=y_stddev,
theta=theta,
)
shape = (5, 5)
y, x = np.mgrid[0 : shape[1], 0 : shape[0]]
kernel_astropy = gaussian_astropy(x, y)
kernel_astropy /= np.sum(kernel_astropy)
kernel = aa.Kernel2D.from_as_gaussian_via_alma_fits_header_parameters(
shape_native=shape,
pixel_scales=pixel_scales,
y_stddev=2.0e-5,
x_stddev=1.0e-5,
theta=theta_deg,
normalize=True,
)
assert kernel_astropy == pytest.approx(kernel.native, 1e-4)
def test__identical_to_astropy_gaussian_model__include_different_ellipticity_from_x_and_y_stddev(
self,
):
pixel_scales = 0.1
x_stddev = (
3.0e-5
* (units.deg).to(units.arcsec)
/ pixel_scales
/ (2.0 * np.sqrt(2.0 * np.log(2.0)))
)
y_stddev = (
2.0e-5
* (units.deg).to(units.arcsec)
/ pixel_scales
/ (2.0 * np.sqrt(2.0 * np.log(2.0)))
)
theta_deg = 0.0
theta = Angle(theta_deg, "deg").radian
gaussian_astropy = functional_models.Gaussian2D(
amplitude=1.0,
x_mean=2.0,
y_mean=2.0,
x_stddev=x_stddev,
y_stddev=y_stddev,
theta=theta,
)
shape = (5, 5)
y, x = np.mgrid[0 : shape[1], 0 : shape[0]]
kernel_astropy = gaussian_astropy(x, y)
kernel_astropy /= np.sum(kernel_astropy)
kernel = aa.Kernel2D.from_as_gaussian_via_alma_fits_header_parameters(
shape_native=shape,
pixel_scales=pixel_scales,
y_stddev=2.0e-5,
x_stddev=3.0e-5,
theta=theta_deg,
normalize=True,
)
assert kernel_astropy == pytest.approx(kernel.native, 1e-4)
def test__identical_to_astropy_gaussian_model__include_rotation_angle_30(self):
pixel_scales = 0.1
x_stddev = (
1.0e-5
* (units.deg).to(units.arcsec)
/ pixel_scales
/ (2.0 * np.sqrt(2.0 * np.log(2.0)))
)
y_stddev = (
2.0e-5
* (units.deg).to(units.arcsec)
/ pixel_scales
/ (2.0 * np.sqrt(2.0 * np.log(2.0)))
)
theta_deg = 30.0
theta = Angle(theta_deg, "deg").radian
gaussian_astropy = functional_models.Gaussian2D(
amplitude=1.0,
x_mean=1.0,
y_mean=1.0,
x_stddev=x_stddev,
y_stddev=y_stddev,
theta=theta,
)
shape = (3, 3)
y, x = np.mgrid[0 : shape[1], 0 : shape[0]]
kernel_astropy = gaussian_astropy(x, y)
kernel_astropy /= np.sum(kernel_astropy)
kernel = aa.Kernel2D.from_as_gaussian_via_alma_fits_header_parameters(
shape_native=shape,
pixel_scales=pixel_scales,
y_stddev=2.0e-5,
x_stddev=1.0e-5,
theta=theta_deg,
normalize=True,
)
assert kernel_astropy == pytest.approx(kernel.native, 1e-4)
def test__identical_to_astropy_gaussian_model__include_rotation_angle_230(self):
pixel_scales = 0.1
x_stddev = (
1.0e-5
* (units.deg).to(units.arcsec)
/ pixel_scales
/ (2.0 * np.sqrt(2.0 * np.log(2.0)))
)
y_stddev = (
2.0e-5
* (units.deg).to(units.arcsec)
/ pixel_scales
/ (2.0 * np.sqrt(2.0 * np.log(2.0)))
)
theta_deg = 230.0
theta = Angle(theta_deg, "deg").radian
gaussian_astropy = functional_models.Gaussian2D(
amplitude=1.0,
x_mean=1.0,
y_mean=1.0,
x_stddev=x_stddev,
y_stddev=y_stddev,
theta=theta,
)
shape = (3, 3)
y, x = np.mgrid[0 : shape[1], 0 : shape[0]]
kernel_astropy = gaussian_astropy(x, y)
kernel_astropy /= np.sum(kernel_astropy)
kernel = aa.Kernel2D.from_as_gaussian_via_alma_fits_header_parameters(
shape_native=shape,
pixel_scales=pixel_scales,
y_stddev=2.0e-5,
x_stddev=1.0e-5,
theta=theta_deg,
normalize=True,
)
assert kernel_astropy == pytest.approx(kernel.native, 1e-4)
| StarcoderdataPython |
1566 | import pygame, math
from game import map, ui
window = pygame.display.set_mode([800, 600])
ui.window = window
screen = "game"
s = {"fullscreen": False}
running = True
gamedata = {"level": 0, "coal": 0, "iron": 1, "copper":0}
tiles = pygame.sprite.Group()
rails = pygame.sprite.Group()
carts = pygame.sprite.Group()
interactables = pygame.sprite.Group()
listmap = []
clock = pygame.time.Clock()
selected = pygame.image.load("./resources/images/selected.png")
selected2 = pygame.image.load("./resources/images/selected2.png")
box = pygame.image.load("./resources/images/box.png")
uibox = pygame.image.load("./resources/images/ui box.png")
class Mouse(pygame.sprite.Sprite):
def __init__(self):
pygame.sprite.Sprite.__init__(self)
self.image = pygame.surface.Surface([1, 1])
self.rect = self.image.get_rect()
self.rect.topleft = [0, 0]
self.clickedcart = None
self.hoveritem = None
self.tl = self.rect.topleft
self.mode = "select"
def pos(self, position):
self.rect.topleft = position
self.tl = self.rect.topleft
m = Mouse()
def snaptogrid(pos):
return [int(math.floor(pos[0] / 40)), int(math.floor(pos[1] / 40))]
def loadlevel(number):
global tiles, rails, carts, gamedata, listmap, interactables
tiles, rails, interactables, listmap = map.loadmap(int(number))
carts.empty()
gamedata["level"] = number
gamedata["coal"] = 0
gamedata["iron"] = 1
gamedata["copper"] = 0
loadlevel(0)
while running:
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
elif event.type == pygame.MOUSEBUTTONDOWN:
m.pos(pygame.mouse.get_pos())
if screen == "game":
if pygame.sprite.spritecollide(m, carts, False) and m.mode == "select":
carts.update("select", m, listmap)
if m.clickedcart != None:
m.mode = "action"
elif m.mode == "action" and m.clickedcart != None and listmap[snaptogrid(m.tl)[0]][snaptogrid(m.tl)[1]] > 0:
m.clickedcart.pathfind(listmap, snaptogrid(m.tl))
m.clickedcart = None
m.mode = "select"
elif event.type == pygame.MOUSEMOTION:
m.pos(pygame.mouse.get_pos())
if screen == "game":
m.hoveritem = None
if len(pygame.sprite.spritecollide(m, carts, False)) > 0:
m.hoveritem = pygame.sprite.spritecollide(m, carts, False)[0]
elif len(pygame.sprite.spritecollide(m, interactables, False)) > 0:
m.hoveritem = pygame.sprite.spritecollide(m, interactables, False)[0]
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_SPACE:
carts.add(map.Cart(snaptogrid(m.tl), "miner"))
if screen == "game":
window.fill([100, 100, 100])
tiles.draw(window)
carts.draw(window)
carts.update("update", m, listmap)
if not m.hoveritem == None and not m.mode == "action":
window.blit(box, [m.rect.left+10, m.rect.top+10])
ui.Resize(30)
ui.Text(m.hoveritem.type.upper(), [m.rect.left+27, m.rect.top+25])
if m.hoveritem.type.startswith("mine") and m.hoveritem not in carts:
ui.Resize(18)
ui.Text("Carts Inside: " + str(m.hoveritem.data["carts"]), [m.rect.left+27, m.rect.top+47])
ui.Text("Max Carts: " + str(m.hoveritem.data["max"]), [m.rect.left+27, m.rect.top+60])
if not m.clickedcart == None:
window.blit(selected2, [m.clickedcart.rect.left-2, m.clickedcart.rect.top-2])
if m.mode == "action":
window.blit(box, [m.rect.left+10, m.rect.top+10])
ui.Resize(30)
try:
ui.Text(m.hoveritem.type.upper(), [m.rect.left+27, m.rect.top+25])
except:
ui.Text(m.clickedcart.type.upper(), [m.rect.left+27, m.rect.top+25])
if listmap[snaptogrid(m.tl)[0]][snaptogrid(m.tl)[1]] > 0:
ui.Resize(22)
ui.Text("Click to move", [m.rect.left+27, m.rect.top+45])
ui.Text("Cart Here", [m.rect.left+27, m.rect.top+60])
window.blit(selected, [snaptogrid(m.tl)[0]*40-2, snaptogrid(m.tl)[1]*40-2])
window.blit(uibox, [555, 475])
pygame.display.flip()
clock.tick(60)
fps = clock.get_fps()
pygame.quit()
| StarcoderdataPython |
38112 | def test_Dict():
x: dict[i32, i32]
x = {1: 2, 3: 4}
# x = {1: "2", "3": 4} -> sematic error
y: dict[str, i32]
y = {"a": -1, "b": -2}
z: i32
z = y["a"]
z = y["b"]
z = x[1]
def test_dict_insert():
y: dict[str, i32]
y = {"a": -1, "b": -2}
y["c"] = -3
def test_dict_get():
y: dict[str, i32]
y = {"a": -1, "b": -2}
x: i32
x = y.get("a")
x = y.get("a", 0)
| StarcoderdataPython |
60487 | import logging
from logging.handlers import RotatingFileHandler
from flask.logging import default_handler
from app import app
if __name__ == '__main__':
handler = RotatingFileHandler(filename='./log/app.log', maxBytes=1048576, backupCount=3)
formatter = logging.Formatter(fmt='%(asctime)s - %(name)s[line:%(lineno)d] - %(levelname)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
handler.setFormatter(formatter)
handler.setLevel(logging.DEBUG)
app.logger.addHandler(handler)
""" # flask.logging.create_logger
logger = logging.getLogger('flask.app')
if app.debug and logger.level == logging.NOTSET:
logger.setLevel(logging.DEBUG)
if not has_level_handler(logger):
logger.addHandler(default_handler)
return logger
"""
default_handler.setLevel(logging.INFO)
app.logger.addHandler(default_handler)
app.run(host='0.0.0.0', port=9000)
| StarcoderdataPython |
4810271 | <gh_stars>1-10
import numpy as np
import pylab as plt
from dopamine.stochastics import *
from ipdb import set_trace as debug
class DiscWorld(object):
'''This provides a simulation environment for testing reinforcement
learning algorithms. It provides a one-dimensional space with a one-
dimensional action space. Action is a Gaussian random variable
corresponding to the magnitude and direction of thrust.
'''
def __init__(self):
self.inpt = 0
self.x = 10 * (0.5 - np.random.rand())
self.y = 10 * (0.5 - np.random.rand())
self.vx = 0
self.vy = 0
self.ax = 0.0
self.ay = 0.0
self.t = 0
self.dt = 0.1
self.tmx = 1
self.tmy = 1
self.max_time = 30
self.target_x = 5
self.target_y = 5
self.total_steps = 0
self.delta_distance = 0
self.ostate = self.state
self.info = {}
def step(self, action):
'''Advance the simulation by one timestep.'''
# Copy the old state.
self.ostate = list(np.copy(self.state))
# We have a two-dimensional continuous action space now.
self.ax = action[0][0]
self.ay = action[0][1]
# Update position.
self.old_distance = 1.0 * self._distance_to_target
self.t += self.dt
self.x += self.vx * self.dt
self.y += self.vy * self.dt
self.vx += self.ax * self.dt
self.vy += self.ay * self.dt
self.delta_distance = self._distance_to_target - self.old_distance
# self.reward = -self._distance_to_target
self.reward = self._reward
self.total_steps += 1
return self.state, self.reward, self.done, self.info
@property
def concat_state(self):
'''Concats previous state with the current state.'''
return self.ostate + self.state
@property
def _distance_to_target(self):
x, y = self.x, self.y
tx, ty = self.target_x, self.target_y
return np.sqrt((x - tx)**2 + (y - ty)**2)
@property
def nb_actions(self):
'''Continuous actions corresponding to horizontal/vertical thrust.'''
return 2
@property
def done(self):
'''Are we done with the current episode?'''
return (self.t>self.max_time)
@property
def _reward(self):
'''Fancy reward function.'''
return -self._distance_to_target**2
if self._distance_to_target > 1:
return -1.0
else:
return 0.0
@property
def state(self):
'''The currently observed state of the system.'''
# return np.atleast_2d([self.x, self.y, self.target_x, self.target_y])
return np.atleast_2d([self.x, self.y, self.vx, self.vy, \
self.target_x, self.target_y])
@property
def D(self):
'''Returns dimension of the state space.'''
return self.state.shape[1]
def render(self, paths, fignum=100):
'''Illustrate trajectories.'''
plt.figure(fignum);
plt.clf()
for path in paths:
# X coordinate.
plt.subplot(211)
plt.plot(path['state_vectors'][:,0], 'red')
plt.ylim([-20, 20])
plt.xlim([0, self.max_time/self.dt])
plt.plot(plt.xlim(), [self.target_x, self.target_x])
plt.grid(True)
plt.ylabel('x position')
# Y coordinate.
plt.subplot(212)
plt.plot(path['state_vectors'][:,1], 'red')
plt.xlim([0, self.max_time/self.dt])
plt.ylim([-20, 20])
plt.plot(plt.xlim(), [ self.target_y, self.target_y])
plt.grid(True)
plt.ylabel('y position')
plt.show()
plt.pause(0.05)
@property
def steps_per_episode(self):
return int(self.max_time/self.dt)
def reset(self):
'''Reset state vector, time, velocity, etc.'''
self.inpt = 0
self.x, self.vx, self.t = 20*(np.random.rand()-0.5), 2*np.random.randn(), 0
self.y, self.vy, self.t = 20*(np.random.rand()-0.5), 2*np.random.randn(), 0
return self.state
def reset_target(self):
'''Reset the target location.'''
self.target_x = (40 * (np.random.rand() - 0.5))
self.target_y = (40 * (np.random.rand() - 0.5))
def simulate(self, agent):
'''Simulate the environment using the provided agent to control all the
things. The agent must have a take_action method that spits out a valid
action for the environment.'''
x = self.reset()
done = False
xs = []
rs = []
while not done:
a = agent.take_action(x)
x, reward, done = self.step(a)
xs.append(x)
rs.append(reward)
return np.vstack(xs), np.vstack(rs)
if __name__ == '__main__':
# Make an environment.
env = DiscWorld()
pdf = DiagGaussian(2)
x = []
rewards = []
done = False
param = pdf.parameter_vector
sample = pdf.sample(param)
with tf.Session() as sess:
while not done:
# Simple random gaussian policy.
action = sess.run(sample, feed_dict={param: [[0,0]]})
print(action)
# Step the simulation.
state, reward, done = env.step(action)
x.append([state[0][0], state[0][1]])
rewards.append(reward)
# Plot this guy.
x = np.vstack(x)
from mathtools.vanity import *
setup_plotting()
import seaborn
plot(x[:,0], x[:,1])
| StarcoderdataPython |
133758 | <gh_stars>0
import json
from app.service.http_client import http_client
service_name = 'uaa'
def get_user(login, jwt):
return http_client('get', service_name, '/api/users/{}'.format(login), jwt=jwt)
def send_message(message, jwt):
return http_client('post', service_name, '/api/messages/send', body=json.dumps(message), jwt=jwt)
| StarcoderdataPython |
58859 |
# -*- python -*-
import math
import numpy
import Shadow
from Shadow.ShadowPreprocessorsXraylib import prerefl, pre_mlayer, bragg
from srxraylib.sources import srfunc
from sirepo.template import transfer_mat_bl
from pykern.pkcollections import PKDict
from pykern import pkjson
sigmax = 0.0045000000000000005
sigdix = 2.913e-05
sigmaz = 0.0045000000000000005
sigdiz = 2.913e-05
beam_stats = []
epsilon = 1e-06
beam = transfer_mat_bl.create_mat_rays(epsilon)
sigma_mat = numpy.matrix([
[sigmax ** 2, 0, 0, 0],
[0, sigdix ** 2, 0, 0],
[0, 0, sigmaz ** 2, 0],
[0, 0, 0, sigdiz ** 2],
])
alpha = 0
def calculate_stats(pos, oe):
global alpha
Tmat, x_prop_cen, xp_prop_cen, z_prop_cen, zp_prop_cen = transfer_mat_bl.tmat_calc(beam.duplicate(), epsilon)
res = Tmat * sigma_mat * numpy.transpose(Tmat)
pos += (oe.T_SOURCE if oe else 0)
if oe:
# oe.ALPHA is in radians after traceOE()
alpha = int(alpha + 180 / math.pi * oe.ALPHA) % 360
beam_stats.append(PKDict(
isRotated=True if alpha == 90 or alpha == 270 else False,
s=pos * 1e-2,
x=x_prop_cen,
xp=xp_prop_cen,
z=z_prop_cen,
zp=zp_prop_cen,
matrix=Tmat.tolist(),
sigmax=math.sqrt(res[0, 0]) * 1e-2,
sigdix=math.sqrt(res[1, 1]),
sigmaz=math.sqrt(res[2, 2]) * 1e-2,
sigdiz=math.sqrt(res[3, 3]),
))
return pos
pos = calculate_stats(0, None)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 1)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 2)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 3)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 4)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 5)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 6)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 7)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 8)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 9)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 10)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 11)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 12)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 13)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 14)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 15)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 16)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 17)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 18)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 19)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 20)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 21)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 22)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 23)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 24)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 25)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 26)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 27)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 28)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 29)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 30)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 31)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 32)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 33)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 34)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 35)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 36)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 37)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 38)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 39)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 40)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 41)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 42)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 43)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 44)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 45)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 46)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 47)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 48)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 49)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 50)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 51)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 52)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 53)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 54)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 55)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 56)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 57)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 58)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 59)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 60)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 61)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 62)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 63)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 64)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 65)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 66)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 67)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 68)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 69)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 70)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 71)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 72)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 73)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 74)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 75)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 76)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 77)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 78)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 79)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 80)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 81)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 82)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 83)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 84)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 85)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 86)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 87)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 88)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 89)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 90)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 91)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 92)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 93)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 94)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 95)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 96)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 97)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 98)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 99)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.FMIRR = 2
oe.ALPHA = 0
oe.FHIT_C = 0
oe.F_EXT = 0
oe.F_DEFAULT = 0
oe.SSOUR = 2900.0
oe.SIMAG = 1000.0
oe.THETA = 2.0002
oe.F_CONVEX = 0
oe.FCYL = 1
oe.CIL_ANG = 90.0
oe.T_INCIDENCE = 2.0
oe.T_REFLECTION = 2.0
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 28.5
beam.traceOE(oe, 100)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 101)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 102)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 103)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 104)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 105)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 106)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 107)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 108)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 109)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 110)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 111)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 112)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 113)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 114)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 115)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 116)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 117)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 118)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 119)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 120)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 121)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 122)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 123)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 124)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 125)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 126)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 127)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 128)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 129)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 130)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 131)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 132)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 133)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 134)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 135)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 136)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 137)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 138)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 139)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 140)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 141)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 142)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 143)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 144)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 145)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 146)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 147)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 148)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 149)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 150)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 151)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 152)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 153)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 154)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 155)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 156)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 157)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 158)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 159)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 160)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 161)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 162)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 163)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 164)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 165)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 166)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 167)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 168)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 169)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 170)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 171)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 172)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 173)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 174)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 175)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 176)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 177)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 178)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 179)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 180)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 181)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 182)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 183)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 184)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 185)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 186)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 187)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 188)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 189)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 190)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 191)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 192)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 193)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 194)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 195)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 196)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 197)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 198)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 199)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.FMIRR = 2
oe.ALPHA = 0
oe.FHIT_C = 1
oe.F_EXT = 0
oe.F_DEFAULT = 0
oe.SSOUR = 3000.0
oe.SIMAG = 900.0
oe.THETA = 2.0002
oe.F_CONVEX = 0
oe.FCYL = 1
oe.CIL_ANG = 0.0
oe.FSHAPE = 2
oe.RWIDX2 = 15.0
oe.RLEN2 = 25.0
oe.F_MOVE = 1
oe.OFFX = 1.0
oe.T_INCIDENCE = 2.0
oe.T_REFLECTION = 2.0
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.0
beam.traceOE(oe, 200)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 201)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 202)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 203)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 204)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 205)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 206)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 207)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 208)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 209)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 210)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 211)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 212)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 213)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 214)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 215)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 216)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 217)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 218)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 219)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 220)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 221)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 222)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 223)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 224)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 225)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 226)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 227)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 228)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 229)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 230)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 231)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 232)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 233)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 234)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 235)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 236)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 237)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 238)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 239)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 240)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 241)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 242)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 243)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 244)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 245)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 246)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 247)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 248)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 249)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 250)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 251)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 252)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 253)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 254)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 255)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 256)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 257)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 258)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 259)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 260)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 261)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 262)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 263)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 264)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 265)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 266)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 267)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 268)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 269)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 270)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 271)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 272)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 273)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 274)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 275)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 276)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 277)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 278)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 279)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 280)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 281)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 282)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 283)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 284)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 285)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 286)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 287)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 288)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 289)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 290)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 291)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 292)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 293)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 294)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 295)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 296)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 297)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 298)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.050000000000182
beam.traceOE(oe, 299)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty()
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 9.049999999981537
beam.traceOE(oe, 300)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 301)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 302)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 303)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 304)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 305)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 306)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 307)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 308)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 309)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 310)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 311)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 312)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 313)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 314)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 315)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 316)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 317)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 318)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 319)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 320)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 321)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 322)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 323)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 324)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 325)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 326)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 327)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 328)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 329)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 330)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 331)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 332)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 333)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 334)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 335)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 336)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 337)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 338)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 339)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 340)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 341)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 342)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 343)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 344)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 345)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 346)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 347)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 348)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 349)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 350)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 351)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 352)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 353)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 354)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 355)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 356)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 357)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 358)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 359)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 360)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 361)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 362)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 363)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 364)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 365)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 366)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 367)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 368)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 369)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 370)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 371)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 372)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 373)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 374)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 375)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 376)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 377)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 378)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 379)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 380)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 381)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 382)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 383)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 384)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 385)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 386)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 387)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 388)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 389)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 390)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 391)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 392)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 393)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 394)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 395)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 396)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 397)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 398)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 399)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty(ALPHA=0)
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 0.9499999999998181
beam.traceOE(oe, 400)
pos = calculate_stats(pos, oe)
oe = Shadow.OE()
oe.DUMMY = 1.0
oe.set_empty()
oe.FWRITE = 3
oe.T_IMAGE = 0.0
oe.T_SOURCE = 1.864464138634503e-11
beam.traceOE(oe, 401)
pos = calculate_stats(pos, oe)
pkjson.dump_pretty(beam_stats, filename='beam_stats.json')
import Shadow.ShadowTools
Shadow.ShadowTools.plotxy(beam, 1, 3, nbins=100, nolost=1)
| StarcoderdataPython |
3373208 | <filename>delft3dfmpy/io/gridio.py
import netCDF4
import numpy as np
import sys
sys.path.append('D:/Documents/GitHub/delft3dfmpy')
from delft3dfmpy.datamodels.cstructures import meshgeom, meshgeomdim
import logging
logger = logging.getLogger(__name__)
def to_netcdf_old(meshgeom, path):
outformat = "NETCDF3_CLASSIC"
ncfile = netCDF4.Dataset(path, 'w', format=outformat)
ncfile.createDimension("nNetNode", meshgeom.meshgeomdim.numnode)
ncfile.createDimension("nNetLink", meshgeom.meshgeomdim.numedge)
ncfile.createDimension("nNetLinkPts", 2)
# Mesh2D
mesh2d = ncfile.createVariable("Mesh2D", "i4", ())
mesh2d.cf_role = 'mesh_topology'
mesh2d.node_coordinates = 'NetNode_x NetNode_y'
mesh2d.node_dimension = 'nNetNode'
mesh2d.edge_node_connectivity = 'NetLink'
mesh2d.edge_dimension = 'nNetLink'
mesh2d.topology_dimension = 1
# Nodes:
for dim in list('xyz'):
ncvar = ncfile.createVariable(f"NetNode_{dim}", "f8", mesh2d.node_dimension)
ncvar.units = 'm'
if dim == 'z':
ncvar.mesh = 'Mesh2D'
ncvar.coordinates = 'NetNode_x NetNode_y'
ncvar[:] = np.zeros(meshgeom.meshgeomdim.numnode)
else:
ncvar[:] = meshgeom.get_values(f'node{dim}')
# Links
ncvar = ncfile.createVariable("NetLink", "i4", ( mesh2d.edge_dimension, "nNetLinkPts"))
links = meshgeom.get_values('edge_nodes', as_array=True)
ncvar[:] = links.tolist()
# NetLinkType
ncvar = ncfile.createVariable("NetLinkType", "i4", mesh2d.edge_dimension)
ncvar[:] = np.ones(len(links), dtype=int) * 2
ncfile.close()
def from_netcdf_old(meshgeom, path, only2d=False):
"""
Method to read mesh from 'old' netcdf 0.9 format
Function only suitable for
Parameters
----------
meshgeom : [type]
[description]
path : str
Path to netcdf with mesh
"""
ds = netCDF4.Dataset(path, 'r')
# Get netlinktype
netlinktype = ds.variables['NetLinkType'][:].data
links = ds.variables['NetLink'][:, :] - 1
if only2d and (netlinktype != 2).any():
linkidx = netlinktype == 2
nodeidx = np.unique(links[linkidx, :])
links = links[linkidx, :]
meshgeom.meshgeomdim.numnode = len(nodeidx)
meshgeom.meshgeomdim.numedge = sum(linkidx)
# The links should be renumbered, to compensate the missing ones
id_mapping = {old_id: new_id for new_id, old_id in enumerate(nodeidx)}
links = np.reshape([id_mapping[old_id] for old_id in links.ravel()], links.shape)
else:
# Get dimensions
meshgeom.meshgeomdim.numnode = ds.dimensions['nNetNode'].size
meshgeom.meshgeomdim.numedge = ds.dimensions['nNetLink'].size
nodeidx = slice(None)
for dim in list('xyz'):
# Get values
data = ds.variables[f'NetNode_{dim}'][nodeidx]
# Allocate
meshgeom.allocate(f'node{dim}')
# Set values
meshgeom.set_values(f'node{dim}', data)
# Set links
meshgeom.allocate(f'edge_nodes')
meshgeom.set_values('edge_nodes', links.ravel() + 1)
ds.close()
| StarcoderdataPython |
3217705 | import math
N = int(input())
X = list(map(int, input().split()))
m, y, c = 0, 0, 0
for x in X:
c = max(c, abs(x))
x = abs(x)
m += x
y += x**2
print(m)
print(math.sqrt(y))
print(c)
| StarcoderdataPython |
4814206 | from rest_framework import serializers
from .models import Client, Contact, Employee
class ClientSerializer(serializers.ModelSerializer):
class Meta:
fields = '__all__'
model = Client
class ContactSerializer(serializers.ModelSerializer):
def validate(self, data):
"""Проверка, что контакт уникален."""
date = data['date']
client = data['client']
employee = self.context['employee']
contact = Contact.objects.filter(
date=date, client=client, employee=employee).exists()
if contact:
raise serializers.ValidationError('Такой контакт уже существует')
return data
class Meta:
fields = '__all__'
read_only_fields = ('employee', )
model = Contact
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
exclude = ('contacts', )
model = Employee
class ContactCSVSerializer(serializers.ModelSerializer):
employee = EmployeeSerializer(read_only=True)
client = ClientSerializer(read_only=True)
class Meta:
fields = ('id', 'employee', 'date', 'client', )
model = Contact
| StarcoderdataPython |
182442 | <reponame>kungfumas/bahasa-alami
# Latent Semantic Analysis using Python
# Importing the Libraries
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.decomposition import TruncatedSVD
# Sample Data
dataset = ["The amount of polution is increasing day by day",
"The concert was just great",
"I love to see <NAME> cook",
"Google is introducing a new technology",
"AI Robots are examples of great technology present today",
"All of us were singing in the concert",
"We have launch campaigns to stop pollution and global warming"]
dataset = [line.lower() for line in dataset]
# Creating Tfidf Model
vectorizer = TfidfVectorizer()
X = vectorizer.fit_transform(dataset)
# Visualizing the Tfidf Model
print(X[0])
# Creating the SVD
lsa = TruncatedSVD(n_components = 4, n_iter = 100)
lsa.fit(X)
# First Column of V
row1 = lsa.components_[3]
# Visualizing the concepts
terms = vectorizer.get_feature_names()
for i,comp in enumerate(lsa.components_):
componentTerms = zip(terms,comp)
sortedTerms = sorted(componentTerms,key=lambda x:x[1],reverse=True)
sortedTerms = sortedTerms[:10]
print("\nConcept",i,":")
for term in sortedTerms:
print(term) | StarcoderdataPython |
94340 | <reponame>tschiex/toulbar2-diverse
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import argparse
import os, sys
import matplotlib.pyplot as plt
from utils import dissim, read_cfn_gzip, read_sim_mat
python_path = "python3 /home/tschiex/toulbar2-diverse/python-scripts/"
tb2 = "/home/tschiex/toulbar2-diverse/build/bin/Linux/toulbar2"
AAs = "ARNDCQEGHILKMFPSTWYV"
n_aa = len(AAs)
AA3to1 = {'ALA': 'A', 'ARG': 'R', 'ASN': 'N', 'ASP': 'D', 'CYS': 'C', 'GLU': 'E', 'GLN': 'Q', 'GLY': 'G', 'HIS': 'H',
'ILE': 'I', 'LEU': 'L', 'LYS': 'K', 'MET': 'M', 'PHE': 'F', 'PRO': 'P', 'SER': 'S', 'THR': 'T', 'TRP': 'W',
'TYR': 'Y', 'VAL': 'V'}
parser = argparse.ArgumentParser()
parser.add_argument("--name", required=True,
help="Problem name")
parser.add_argument("--niter", default=20, type=int,
help="Number of lagrange iterations")
parser.add_argument("--divmin", default=1, type=int,
help="Minimum diversity between two solutions")
parser.add_argument("--cpd", action="store_true", default=False,
help="Computational Protein Design")
parser.add_argument("--msim", default=None,
help="Similarity matrix (cpd)")
args = parser.parse_args()
name = args.name
divmin = args.divmin
if args.cpd:
cpd_str = " --cpd "
else:
cpd_str = ""
cfn_filename = name + ".cfn.gz"
cfn = read_cfn_gzip(cfn_filename)
sols_mdd_filename = name + "_divmin" + str(divmin) + "_nsols.sols"
sol_filename = name + ".gmec"
mult_div_cmd = python_path + "mult_div_regular.py -i " + cfn_filename + " -o " + sols_mdd_filename + \
" --divmin " + str(divmin) + " --nsols 2 --type mdd" + cpd_str
os.system(mult_div_cmd)
if args.msim:
msim = read_sim_mat(args.msim)
else:
msim = None
# Recover cstar and gmec from sols_mdd_file
with open(sols_mdd_filename, 'r') as sols_mdd:
lines = sols_mdd.readlines()
sol_file = open(sol_filename, 'w')
sol = lines[1]
sol_file.write(sol)
sol = [int(i) for i in sol.split(" ")]
sol_file.close()
cstar_line = 5
xstar_line = 4
if (cpd_str != ""):
cstar_line = 7
xstar_line = 5
cstar = float(lines[cstar_line][:-1])
xstar = [int(xi) for xi in lines[xstar_line][:-1].split(' ')]
print("cstar " + str(cstar))
"""
# Compute qbest
ql_filename = name + "_ql.txt"
qplot_cmd = python_path + "qplot.py -i " + cfn_filename + " -o " + ql_filename + " -s " + \
sol_filename + " --divmin 1" + cpd_str
os.system(qplot_cmd)
with open(ql_filename, 'r') as ql_file:
lines = ql_file.readlines()
qbest = float(lines[-1].split(' ')[1])
print("qbest " + str(qbest))
"""
#######################################
############ Supergradient ############
#######################################
def read_ql_list(output_filename):
with open(output_filename, 'r') as f:
lines = f.readlines()
ql_line = lines[-1]
xbest = lines[-3][1:-2]
lbest = float(lines[-4].split(" ")[3])
ql_list = [float(ql) for ql in ql_line[1:-1].split(', ')]
xbest = [int(xi) for xi in xbest.split(', ')]
return (xbest, ql_list, lbest)
vars = list(cfn['variables'].keys())
def step_plot(step, params, l, divmin, niter):
for h in params:
output_filename = name + "_" + step + "_h" + str(h) + "2.lag"
cmd = python_path + "divmin_lagrangian.py -i " + cfn_filename + " -o " + output_filename + \
" -s " + sol_filename + " -l " + str(l) + " --divmins " + str(divmin) + \
" --niter " + str(niter) + " --step " + step + " --stepparam " \
+ str(h) + cpd_str
os.system(cmd)
(xbest, ql_list, lbest) = read_ql_list(output_filename)
# Compute diversity measure between the first solution and xbest
div = 0
for var_index, v in enumerate(vars):
if args.cpd:
div += dissim(cfn['variables'][v][sol[var_index]][0], cfn['variables'][v][xbest[var_index]][0], AAs,
msim)
else:
div += dissim(sol[var_index], xbest[var_index], None, None)
E = ql_list[-1] + lbest * (div - divmin)
plt.plot(ql_list, label=f'{step} {h}\n(D,E)= ({div}, {E:.4})')
plt.plot([0, niter], [cstar, cstar], label="cstar " + str(cstar))
plt.legend()
plt.title(f'{name}, {step}, divmin {divmin}')
plt.xlabel('Number of iterations t')
plt.ylabel("Best dual value qbest_t")
plt.savefig(name + "_" + step + "_divmin" + str(divmin))
plt.close()
"""
step_plot("cst_stepsize", [0.05, 0.01, 0.005, 0.001], 0, divmin, args.niter)
step_plot("cst_steplength", [0.05, 0.01, 0.005, 0.001], 0, divmin, args.niter)
step_plot("squaresum_stepsize", [0.1, 1, 10], 0, divmin, args.niter)
step_plot("nonsum_stepsize", [0.1, 1, 10], 0, divmin, args.niter)
step_plot("nonsum_steplength", [0.1, 1, 10], 0, divmin, args.niter)
step_plot("polyak", [0.1, 1, 10], 0, divmin, args.niter)
"""
step_plot("squaresum_stepsize", [0.1], 0, divmin, args.niter) | StarcoderdataPython |
1639776 | from api.models import (
Account,
Asset,
CashAsset,
ModelSerializerFactory,
ModelViewSetFactory,
Position,
Share,
StockAsset,
)
from django.contrib.auth.models import User
from django.urls import include, path
from rest_framework import routers
router = routers.DefaultRouter()
router.register(r"users", ModelViewSetFactory(User, ModelSerializerFactory(User)))
router.register(
r"accounts", ModelViewSetFactory(Account, ModelSerializerFactory(Account))
)
router.register(
r"positions", ModelViewSetFactory(Position, ModelSerializerFactory(Position))
)
router.register(r"assets", ModelViewSetFactory(Asset, ModelSerializerFactory(Asset)))
router.register(
r"stock_assets", ModelViewSetFactory(StockAsset, ModelSerializerFactory(StockAsset))
)
router.register(
r"cash_assets", ModelViewSetFactory(CashAsset, ModelSerializerFactory(CashAsset))
)
router.register(r"shares", ModelViewSetFactory(Share, ModelSerializerFactory(Share)))
urlpatterns = [
path("", include(router.urls)),
]
| StarcoderdataPython |
121485 | <filename>algorithms.py<gh_stars>1-10
from grid import *
from numpy.random import randint
def dijkstra(draw, grid, start, end):
print("started")
inf = 100000
d = np.empty((len(grid), len(grid)))
d.fill(inf)
q = PriorityQueue()
q.put((0, start))
d[start.get_pos()[0]][start.get_pos()[1]] = 0
came_from = {}
while not q.empty():
cur_d, v = q.get()
if cur_d > d[v.get_pos()[0]][v.get_pos()[1]]:
continue
for to in v.neighbors:
if d[v.get_pos()[0]][v.get_pos()[1]] + 1 < d[to.get_pos()[0]][to.get_pos()[1]]:
d[to.get_pos()[0]][to.get_pos()[1]] = d[v.get_pos()[0]][v.get_pos()[1]] + 1
came_from[to] = v
q.put((-d[to.get_pos()[0]][to.get_pos()[1]], to))
draw()
if v != start:
v.make_closed()
if d[start.get_pos()[0]][start.get_pos()[1]] == inf:
return False
else:
reconstruct_path(came_from, end, draw)
end.make_end()
return True
def dfs(draw, grid, start, end):
used = np.zeros((len(grid), len(grid))) # check if pos in grid is used or not
came_from = {} # path
stack = [start]
while stack:
v = stack.pop()
if v == end:
reconstruct_path(came_from, end, draw)
end.make_end()
return True
for to in v.neighbors:
if not used[to.get_pos()[0]][to.get_pos()[1]]:
used[v.get_pos()[0]][v.get_pos()[1]] = 1
came_from[to] = v
stack.append(to)
draw()
if v != start:
v.make_closed()
return False
def bfs(draw, grid, start, end):
#print(start)
used = np.zeros((len(grid), len(grid)))
d = np.empty((len(grid), len(grid)))
q = Queue()
q.put(start)
used[start.get_pos()[0]][start.get_pos()[1]] = 1
d[start.get_pos()[0]][start.get_pos()[1]] = 0
came_from = {}
while not q.empty():
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
v = q.get()
if v == end:
reconstruct_path(came_from, end, draw)
end.make_end()
return True
for to in v.neighbors:
if not used[to.get_pos()[0]][to.get_pos()[1]]:
used[to.get_pos()[0]][to.get_pos()[1]] = 1
q.put(to)
#print(to.get_pos()[0])
d[to.get_pos()[0]][to.get_pos()[1]] = d[v.get_pos()[0]][v.get_pos()[1]] + 1
came_from[to] = v
draw()
if v != start:
v.make_closed()
return False
def algorithm(draw, grid, start, end): # A* search
#print("pressed")
count = 0
open_set = PriorityQueue()
open_set.put((0, count, start))
came_from = {}
g_score = {spot: float("inf") for row in grid for spot in row}
g_score[start]=0
f_score = {spot: float("inf") for row in grid for spot in row}
f_score[start]=h(start.get_pos(), end.get_pos())
open_set_hash = {start}
while not open_set.empty():
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
current = open_set.get()[2]
open_set_hash.remove(current)
if current == end:
reconstruct_path(came_from, end, draw)
end.make_end()
return True
for neighbor in current.neighbors:
temp_g_score = g_score[current]+1
if temp_g_score < g_score[neighbor]:
came_from[neighbor] = current
g_score[neighbor] = temp_g_score
f_score[neighbor] = temp_g_score + h(neighbor.get_pos(), end.get_pos())
if neighbor not in open_set_hash:
count +=1
open_set.put((f_score[neighbor], count, neighbor))
open_set_hash.add(neighbor)
#neighbor.make_open()
draw()
if current != start:
current.make_closed()
return False
def h(p1, p2): # heuristic
x1, y1 = p1
x2, y2 = p2
return abs(x1-x2) + abs(y1 - y2)
def maze_gen(draw, grid, start, end): # maze generation loop
done = False
last_pos = (0, 0)
pos_history = []
pos_history.append(last_pos)
back_step = 0
while not done:
draw()
last_pos, back_step, done = generate_step(grid, last_pos, pos_history, back_step)
if last_pos not in pos_history:
pos_history.append(last_pos)
def generate_step(grid, last_pos, pos_history, back_step):
(x, y) = last_pos
grid[x][y].make_barrier()
grid_dim = (len(grid), len(grid))
possible_steps = possible_next_steps(grid_dim, last_pos)
#print(f"Position: {last_pos}")
#print(f"Possible steps: {possible_steps}")
valid_steps = []
for step in possible_steps:
(x1, y1) = step[0]
(x2, y2) = step[1]
not_barrier = (not grid[x1][y1].is_barrier()) & (not grid[x2][y2].is_barrier())
not_start = (not grid[x1][y1].is_start()) & (not grid[x2][y2].is_start())
not_end = (not grid[x1][y1].is_end()) & (not grid[x2][y2].is_end())
if bool(not_barrier * not_start * not_end):
valid_steps.append(step)
#print(f"Valid steps: {valid_steps}")
if (len(valid_steps) == 0): # if it is a dead end
last_pos = pos_history[-2 - back_step]
if last_pos == (0,0):
print("finished")
done = True
return last_pos, back_step, done
back_step += 1
done = False
return last_pos, back_step, done
else:
back_step = 0 # reset it
# choose a valid step at random
if (len(valid_steps) == 1):
last_pos = valid_steps[0]
(x1, y1) = last_pos[0]
(x2, y2) = last_pos[1]
grid[x1][y1].make_barrier()
grid[x2][y2].make_open()
last_pos = last_pos[1]
done = False
return last_pos, back_step, done
else:
index = randint(0, len(valid_steps))
# print(f"valid: {len(valid_steps)}, chose {index}")
last_pos = valid_steps[index]
(x1, y1) = last_pos[0]
(x2, y2) = last_pos[1]
grid[x1][y1].make_barrier()
grid[x2][y2].make_open
last_pos = last_pos[1]
done = False
return last_pos, back_step, done
def possible_next_steps(grid_dim, last_pos):
"""
Parameters
----------
grid_dim : tuple of 2 ints
dimensions of the grid
last_pos : tuple of 2 ints
x, y coordinates of current position
Returns
possible_steps: list of list of tuples (x,y) denoting the
next 2 movements possible in every direction possible
"""
x_pos, y_pos = last_pos # unroll coordinates
possible_steps = []
operations_1 = [(0,1), (0,-1), (1,0), (-1,0)]
operations_2 = [(0,2), (0,-2), (2,0), (-2,0)]
num_operations = len(operations_1)
for i in range(num_operations):
op1_x, op1_y = operations_1[i]
op2_x, op2_y = operations_2[i]
if (is_in_map((x_pos + op1_x, y_pos + op1_y), grid_dim)) and (is_in_map((x_pos + op2_x, y_pos + op2_y), grid_dim)):
possible_steps.append([(x_pos + op1_x, y_pos + op1_y), (x_pos + op2_x, y_pos + op2_y)])
return possible_steps
def is_in_map(pos, grid_dim):
"""
Parameters
----------
pos : tuple of 2 ints
x, y coordinates in the grid system of current position
grid_dim : tuple of ints
x, y dimension of the grid system
Returns
true if pos in map
false if not in map
"""
(max_x, max_y) = grid_dim # unroll the dimensions
(x, y) = pos # unroll the position coordinates
x_in = (x < max_x) & (x >= 0) # logical x in map
y_in = (y < max_y) & (y >= 0) # logical y in map
return bool(x_in*y_in) # only true if both true | StarcoderdataPython |
5763 | <gh_stars>0
"""Functional authentication tests with fake MRP Apple TV."""
import inspect
from aiohttp.test_utils import AioHTTPTestCase, unittest_run_loop
import pyatv
from pyatv import exceptions
from pyatv.const import Protocol
from pyatv.conf import MrpService, AppleTV
from pyatv.mrp.server_auth import PIN_CODE, CLIENT_IDENTIFIER, CLIENT_CREDENTIALS
from tests.fake_device import FakeAppleTV
class MrpAuthFunctionalTest(AioHTTPTestCase):
def setUp(self):
AioHTTPTestCase.setUp(self)
self.service = MrpService(
CLIENT_IDENTIFIER, self.fake_atv.get_port(Protocol.MRP)
)
self.conf = AppleTV("127.0.0.1", "Apple TV")
self.conf.add_service(self.service)
async def tearDownAsync(self):
if inspect.iscoroutinefunction(self.handle.close):
await self.handle.close()
else:
self.handle.close()
await super().tearDownAsync()
async def get_application(self, loop=None):
self.fake_atv = FakeAppleTV(self.loop)
self.state, self.usecase = self.fake_atv.add_service(Protocol.MRP)
return self.fake_atv.app
@unittest_run_loop
async def test_pairing_with_device(self):
self.handle = await pyatv.pair(self.conf, Protocol.MRP, self.loop)
self.assertIsNone(self.service.credentials)
self.assertTrue(self.handle.device_provides_pin)
await self.handle.begin()
self.handle.pin(PIN_CODE)
await self.handle.finish()
self.assertTrue(self.handle.has_paired)
self.assertTrue(self.state.has_paired)
self.assertIsNotNone(self.service.credentials)
@unittest_run_loop
async def test_pairing_with_existing_credentials(self):
self.service.credentials = CLIENT_CREDENTIALS
self.handle = await pyatv.pair(self.conf, Protocol.MRP, self.loop)
self.assertFalse(self.handle.has_paired)
self.assertIsNotNone(self.service.credentials)
self.assertTrue(self.handle.device_provides_pin)
await self.handle.begin()
self.handle.pin(PIN_CODE)
await self.handle.finish()
self.assertTrue(self.handle.has_paired)
self.assertTrue(self.state.has_paired)
self.assertIsNotNone(self.service.credentials)
@unittest_run_loop
async def test_pairing_with_bad_pin(self):
self.handle = await pyatv.pair(self.conf, Protocol.MRP, self.loop)
self.assertIsNone(self.service.credentials)
self.assertTrue(self.handle.device_provides_pin)
await self.handle.begin()
self.handle.pin(PIN_CODE + 1)
with self.assertRaises(exceptions.PairingError):
await self.handle.finish()
self.assertFalse(self.handle.has_paired)
self.assertFalse(self.state.has_paired)
self.assertIsNone(self.service.credentials)
@unittest_run_loop
async def test_authentication(self):
self.service.credentials = CLIENT_CREDENTIALS
self.handle = await pyatv.connect(self.conf, self.loop)
self.assertTrue(self.state.has_authenticated)
| StarcoderdataPython |
3379896 | <filename>checksec.py
#!/usr/bin/env python3
import json
import sys
import asyncio
import os
from pathlib import Path
from urllib import request
# PE files
winchecksec_required_all = {
"aslr": "Present", # randomised virtual memory layouts
"dynamicBase": "Present", # enables the program to by loaded anywhere in memory (PIC)
"gs": "Present", # stack protection
"nx": "Present", # this binary can run with stack set to non executable etc. (DEP)
}
# Removed this flag since it is only valid for 64 bit binaries and it is on by default anyhow
# "highEntropyVA": "Present", # high entropy virtual address support , better ASLR
winchecksec_should_all = {
"cfg": "Present", # binary contain map on where it is allowed to jmp/ret, CFG
"seh": "Present", # structured exception handlers
}
# ELF files
checksec_required_all = {
"relro": "full", # Relocation Read-Only, makes some binary sections read-only (like the GOT)
"canary": "yes", # stack protections
"nx": "yes", # supports non executable mem segments
}
# ELF executables
checksec_required_exe = {
"pie": "yes" # code can be loaded randomly in memory: openbsd.org/papers/nycbsdcon08-pie
}
# ELF files in release mode
checksec_should_release = {
# only check if CMAKE_BUILD_TYPE=Release
"fortify_source": "yes" # fortify should be on but only works for release binaries
}
checksec_should_all = {
"rpath": "no", # rpath is dangerous but only a warning
"runpath": "no", # runpath is dangerous but only a warning
}
gh_token = os.getenv("GITHUB_TOKEN")
gh_comment_url = os.getenv("GITHUB_COMMENT_URL")
def post_pr_comment(msg):
if (
gh_token is None
or gh_comment_url is None
or gh_token == ""
or gh_comment_url == ""
):
print("[x] no GITHUB_TOKEN or GITHUB_COMMENT_URL env, printing to log:")
print(msg)
return
req = request.Request(
gh_comment_url, data=bytes(json.dumps({"body": msg}), encoding="utf-8")
)
req.add_header("Content-Type", "application/json")
req.add_header("Authorization", f"token {gh_token}")
resp = request.urlopen(req)
def verify_pe(file, output, exe=True):
o = json.loads(output)
e = []
w = []
try:
for key in winchecksec_required_all:
if o["mitigations"][key]["presence"] != winchecksec_required_all[key]:
e.append(
f":no_entry: failed {key} check ({o['mitigations'][key]['description']})"
)
except Exception as e:
e.append(f"Failed checking for {key}: {str(e)}")
try:
for key in winchecksec_should_all:
if o["mitigations"][key]["presence"] != winchecksec_should_all[key]:
w.append(
f":warning: failed {key} check ({o['mitigations'][key]['description']})"
)
except Exception as e:
w.append(f"Failed checking for {key}: {str(e)}")
return (e, w)
def verify_elf(file, output, exe=True):
o = list(json.loads(output).values())[0]
e = []
w = []
try:
for key in checksec_required_all:
if o[key] != checksec_required_all[key]:
e.append(f":no_entry: failed {key} check")
if exe:
for key in checksec_required_exe:
if o[key] != checksec_required_exe[key]:
e.append(f":no_entry: failed {key} check")
except Exception as e:
e.append(f"Failed checking for {key}: {str(e)}")
try:
for key in checksec_should_all:
if o[key] != checksec_should_all[key]:
w.append(f":warning: failed {key} check")
except Exception as e:
w.append(f"Failed checking for {key}: {str(e)}")
return (e, w)
async def checksec(file_tuple_tuple):
action_home = Path(sys.argv[0]).resolve().parent
(orig_file, file), exe = file_tuple_tuple
if os.getenv("OS") == "Windows_NT":
wincheckdir = action_home / "winchecksec" / "x64"
os.chdir(wincheckdir)
proc = await asyncio.create_subprocess_exec(
"./winchecksec.exe",
"-j",
file,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE,
)
stdout, stderr = await proc.communicate()
print(f"[cmd exited with {proc.returncode}]")
if proc.returncode != 0:
m = f"**winchecksec failed for {orig_file}** :x:\n\n{stderr.decode('utf-8')}"
return (1, [m], [], orig_file, [])
else:
e, w = verify_pe(file, stdout, exe)
else:
proc = await asyncio.create_subprocess_exec(
action_home / "checksec.sh/checksec",
"--output=json",
f"--file={file}",
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE,
)
stdout, stderr = await proc.communicate()
print(f"[cmd exited with {proc.returncode}]")
if proc.returncode != 0:
m = f"**checksec.sh failed for {orig_file}** :x:\n\n{stderr.decode('utf-8')}"
return (1, [m], [], orig_file, [])
else:
e, w = verify_elf(file, stdout, exe)
msg = []
if os.getenv("CHECKSEC_VERBOSE") == "on":
msg = ["| Flag | Status |", "| :------------- | -----------: |"]
if os.getenv("OS") == "Windows_NT":
o = json.loads(stdout)["mitigations"]
msg += [f"|{k}|{o[k]['presence']}|" for k in o]
else:
o = list(json.loads(stdout).values())[0]
msg += [f"|{k}|{o[k]}|" for k in o]
msg += ["\n"]
return (0, e, w, orig_file, msg)
async def main():
files = []
paths = []
cwd = Path(os.getcwd())
if os.path.exists(".executables"):
with open(".executables", "r") as r:
paths += [(f, True) for f in r.read().splitlines()]
if os.path.exists(".libraries"):
with open(".libraries", "r") as r:
paths += [(f, False) for f in r.read().splitlines()]
print(paths)
for (p, t) in paths:
if p.startswith("/"):
file = Path(p)
else:
file = cwd / p
if not file.exists():
if os.getenv("CHECKSEC_VERBOSE") == "on":
p.replace("\\", "/")
m = f"**There is no file called {str(p)}, ignoring it** :thinking:"
post_pr_comment(m)
else:
files.append(((p, str(file)), t))
print(f"files: {files}")
exit_value = 0
msg = []
for (r, e, w, f, flag_table) in await asyncio.gather(*[checksec(x) for x in files]):
if r != 0:
exit_value += r
if len(e) != 0 or len(w) != 0:
msg += [f"**checksec issues with {f}**:"]
if len(e):
msg += [f"**errors:**"]
msg += [f"{m}" for m in e]
if len(w):
msg += [f"**warnings:**"]
msg += [f"{m}" for m in w]
if len(flag_table):
if len(e) == 0 and len(w) == 0:
msg += [f"**checksec results for {f}** :heavy_check_mark:"]
msg += flag_table
if (len(msg)):
post_pr_comment("\n".join(msg))
return exit_value
asyncio.run(main())
| StarcoderdataPython |
119192 | <reponame>Stranger6667/Flask-Postmark<filename>test/conftest.py
import pytest
from flask import Flask, json, request
from flask_postmark import Postmark
@pytest.fixture
def app(server_token, postmark_request):
app = Flask(__name__)
app.config["POSTMARK_SERVER_TOKEN"] = server_token
app.config["JSONIFY_PRETTYPRINT_REGULAR"] = False
postmark = Postmark(app)
def make_response():
return json.dumps(postmark_request.mock_calls[0][2]["json"])
@app.route("/token", methods=["GET"])
def token():
return postmark.client.server_token
@app.route("/send", methods=["POST"])
def send():
data = request.get_json()
postmark.send(**data)
return make_response()
@app.route("/is_same_client", methods=["POST"])
def is_same_client():
return json.dumps(postmark.client is postmark.client)
@app.route("/send_batch", methods=["POST"])
def send_batch():
data = request.get_json()
postmark.send_batch(*data)
return make_response()
return app
@pytest.fixture
def server_token():
return b"Foo"
@pytest.fixture
def test_client(app):
return app.test_client()
@pytest.fixture
def post(test_client):
def inner(url, data=None):
response = test_client.post(url, data=json.dumps(data), content_type="application/json")
return json.loads(response.data)
return inner
| StarcoderdataPython |
3268932 | <reponame>KopelmanLab/au_nanosnake_dda
import os
import time
import subprocess
import argparse
import pickle
def main():
parser = argparse.ArgumentParser()
parser.add_argument('dirno', type=int)
args = parser.parse_args()
os.chdir('wavelengths_{}'.format(args.dirno))
time_taken = None
start_time = time.time()
with open('log.out', 'w') as log:
subprocess.call('./ddscat', shell=True, stdout=log, stderr=log)
end_time = time.time()
time_taken = end_time - start_time
with open('wave.pickle', 'rb') as wave:
ex = pickle.load(wave)
s = {
'start_wavelength': ex['start_wavelength'],
'end_wavelength': ex['end_wavelength'],
'start_time': start_time,
'end_time': end_time,
'time_taken': time_taken,
}
with open('log.pickle', 'wb') as log:
pickle.dump(s, log)
subprocess.call('mv qtable ../qtable_{}'.format(args.dirno), shell=True)
subprocess.call('mv qtable2 ../qtable2_{}'.format(args.dirno), shell=True)
subprocess.call('mv mtable ../mtable_{}'.format(args.dirno), shell=True)
main()
| StarcoderdataPython |
3291928 | """Mock AiiDA database"""
import os
from datetime import datetime as timezone
from sqlalchemy import (
Column,
DateTime,
ForeignKey,
Integer,
String,
create_engine,
event,
)
from sqlalchemy.orm import declarative_base, relationship, sessionmaker
from sqlalchemy.orm.session import Session
from sqlalchemy.sql.expression import text
Base = declarative_base()
TERMINATED_STATES = ("finished", "excepted", "killed")
class Node(Base):
"""Mock implementation of the node."""
__tablename__ = "db_dbnode"
id = Column(Integer, primary_key=True)
mtime = Column(DateTime(timezone=True), default=timezone.now, onupdate=timezone.now)
status = Column(String(36), default="created", nullable=False)
@property
def is_terminated(self):
return self.status in TERMINATED_STATES
class ProcessSchedule(Base):
"""A new table, which stores information about running processes."""
__tablename__ = "db_dbprocess"
id = Column(Integer, primary_key=True)
mtime = Column(DateTime(timezone=True), default=timezone.now, onupdate=timezone.now)
# TODO currently the process record is deleted when the process node is deleted
# but if the process is already being run by a worker,
# it will continue until it excepts, because it cannot update the node
# alternatively, we could maybe set `ondelete="SET NULL"`
# and have the server handle killing the process, before removing the record
dbnode_id = Column(
Integer,
ForeignKey("db_dbnode.id", ondelete="CASCADE"),
nullable=False,
unique=True,
)
node = relationship("Node")
# an action that has been requested for this process: pause | play | kill
action = Column(String(255), nullable=True)
# the identifiers for the worker running the process (if assigned)
# we use an additional uuid, generated by the worker, to guard against pid re-use
worker_pid = Column(Integer, nullable=True)
worker_uuid = Column(String(36), nullable=True)
def get_session(path: str) -> Session:
"""Return a new session to connect to the SQLite DB (created if missing)."""
create = not os.path.exists(path)
engine = create_engine(f"sqlite:///{path}", future=True)
# For the next two bindings, see background on
# https://docs.sqlalchemy.org/en/13/dialects/sqlite.html#serializable-isolation-savepoints-transactional-ddl
@event.listens_for(engine, "connect")
def do_connect(dbapi_connection, _):
"""Hook function that is called upon connection.
It modifies the default behavior of SQLite to use WAL and to
go back to the 'default' isolation level mode.
"""
# disable pysqlite's emitting of the BEGIN statement entirely.
# also stops it from emitting COMMIT before any DDL.
dbapi_connection.isolation_level = None
# Open the file in WAL mode (see e.g. https://stackoverflow.com/questions/9671490)
# This allows to have as many readers as one wants, and a concurrent writer (up to one)
# Note that this writes on a journal, on a different packs.idx-wal,
# and also creates a packs.idx-shm file.
# Note also that when the session is created, you will keep reading from the same version,
# so you need to close and reload the session to see the newly written data.
# Docs on WAL: https://www.sqlite.org/wal.html
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA journal_mode=wal;")
cursor.close()
# For this binding, see background on
# https://docs.sqlalchemy.org/en/13/dialects/sqlite.html#serializable-isolation-savepoints-transactional-ddl
@event.listens_for(engine, "begin")
def do_begin(conn): # pylint: disable=unused-variable
# emit our own BEGIN
conn.execute(text("BEGIN"))
if create:
# Create all tables in the engine. This is equivalent to "Create Table"
# statements in raw SQL.
Base.metadata.create_all(engine)
# Bind the engine to the metadata of the Base class so that the
# declaratives can be accessed through a DBSession instance
Base.metadata.bind = engine
# We set autoflush = False to avoid to lock the DB if just doing queries/reads
session = sessionmaker(
bind=engine, autoflush=False, autocommit=False, future=True
)()
return session
| StarcoderdataPython |
125504 | <gh_stars>0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import torch
import torch.nn.functional
import numpy as np
def conv2d_size_out(size, kernel_size=4, stride=2):
return (size - (kernel_size - 1) - 1) // stride + 1
class QNetwork(torch.nn.Module):
def __init__(self, obs_shape, act_shape):
super(QNetwork, self).__init__()
h, w, c = obs_shape
self.conv_0 = torch.nn.Conv2d(in_channels=c, out_channels=16, kernel_size=4, stride=2, padding=0)
self.conv_1 = torch.nn.Conv2d(in_channels=16, out_channels=32, kernel_size=2, stride=2, padding=0)
# conv_w = conv2d_size_out(conv2d_size_out(w))
# conv_h = conv2d_size_out(conv2d_size_out(h))
# input_shape = conv_w * conv_h * 32
self.fc_0 = torch.nn.Linear(32, 64)
# self.fc_0 = torch.nn.Linear(obs_shape, 64)
# self.fc_1 = torch.nn.Linear(64, 64)
# out_shape = conv_w * conv_h * 32
self.out = torch.nn.Linear(64, act_shape)
def forward(self, x):
# x = x.view((x.size(0), -1))
b, h, w, c = x.shape
x = x.view((b, c, h, w))
x = torch.nn.functional.relu(self.conv_0(x))
x = torch.nn.functional.relu(self.conv_1(x))
x = x.view((-1, x.shape[1]))
x = torch.nn.functional.relu(self.fc_0(x))
# x = torch.nn.functional.relu(self.fc_0(x))
# x = torch.nn.functional.relu(self.fc_1(x))
x = self.out(x)
return x
| StarcoderdataPython |
3337539 | """
Creates a neat little batch file that can be used to download Advent of Code input files and also stores the day's part 1 website for offline use
Prep work:
- get the session cookie value (e.g. from Chrome's cookies) and update it below
- create the required directories (mkdir xx from a command prompt) - i should really add this to the script!
- run this python script
- run the batch file
Don't forget to check if any of the daily inputs is included in the website and not an input file.
To do:
- instead of creating a batch file, retrieve the input directly via Python, e.g. using Requests
- create a daily downloader in Python that takes a day / year combo and creates the dir, downloads the input and creates a stub Python file
- include creating a stub Python file e.g. `aoc2020_10.py` - maybe even using a template file
"""
# Update this before running
session_cookie = "---"
year = 2020
day_from = 10
day_to = 23
with open('dl_aoc_batch.bat', 'w', encoding='utf-8') as f:
for i in range(day_from, day_to + 1):
print(f'curl https://adventofcode.com/{year}/day/{i} --cookie "session={session_cookie}" -o {i}\\{year}_{i}_1.html',
file=f)
print(f'curl https://adventofcode.com/{year}/day/{i}/input --cookie "session={session_cookie}" -o {i}\\input.txt',
file=f)
| StarcoderdataPython |
3323840 | """
Find Angle MBC
https://www.hackerrank.com/challenges/find-angle/problem
"""
from math import atan2, pi
AB = int(input())
BC = int(input())
print(u"{}°".format(round(atan2(AB, BC) * 180 / pi)))
| StarcoderdataPython |
1602132 | <reponame>UDICatNCHU/KCM-Data-Source-Extractor
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
import logging
from itertools import takewhile, count
from six.moves import zip
from dcard import api
from dcard.utils import flatten_lists
logger = logging.getLogger(__name__)
class Forum:
metas_per_page = 30
infinite_page = -1
def __init__(self, name=None, client=None):
self.name = None
self.posts_meta_url = None
self.client = client
self._initial_forum(name)
def __call__(self, name):
self._initial_forum(name)
return self
def get(self, no_school=False):
forums = self.client.get_json(api.forums_url)
if no_school:
return [forum for forum in self._extract_general(forums)]
return forums
def get_metas(
self, num=30, sort='new', before=None, timebound=None, callback=None):
logger.info('<%s> 開始取得看板內文章資訊', self.name)
paged_metas = self.get_paged_metas(sort, num, before, timebound)
buff = flatten_lists(metas for metas in paged_metas)
results = callback(buff) if callback else buff
logger.info('<%s> 資訊蒐集完成,共%d筆', self.name, len(buff))
return results
def get_paged_metas(self, sort, num, before, timebound=''):
params = {'popular': sort == 'popular', 'before': before}
pages = -(-num // self.metas_per_page)
def filter_metas(metas):
if num >= 0 and page == pages:
metas = metas[:num - (pages - 1) * self.metas_per_page]
if timebound:
metas = [m for m in metas if m['updatedAt'] > timebound]
return metas
def eager_for_metas(bundle):
page, metas = bundle
if num >= 0 and page == pages + 1:
return False
if len(metas) == 0:
logger.warning('[%s] 已到最末頁,第%d頁!', self.name, page)
return len(metas) != 0
def get_single_page_metas():
while True:
yield self.client.get_json(self.posts_meta_url, params=params)
paged_metas = zip(count(start=1), get_single_page_metas())
for page, metas in takewhile(eager_for_metas, paged_metas):
params['before'] = metas[-1]['id']
metas = filter_metas(metas)
if len(metas) == 0:
return
yield metas
def _initial_forum(self, name):
self.name = name
self.posts_meta_url = api.posts_meta_url_pattern.format(forum=name)
def _extract_general(self, forums):
return (forum for forum in forums if not forum['isSchool'])
| StarcoderdataPython |
1656607 | import sys
FILE = sys.stdin
# FILE = open('sample.in')
test_cases = range(int(FILE.readline()))
for tc in test_cases:
# number of stores to visit
n = int(FILE.readline().strip())
# positions on Long street
x = list(map(int, FILE.readline().strip().split()))
print(2 * (max(x) - min(x)))
| StarcoderdataPython |
29525 |
class KeyBoardService():
def __init__(self):
pass
def is_key_pressed(self, *keys):
pass
def is_key_released(self, *key):
pass | StarcoderdataPython |
4816844 | from pylab import *
def marker(m,name):
size = 256,16
dpi = 72.0
figsize= size[0]/float(dpi),size[1]/float(dpi)
fig = figure(figsize=figsize, dpi=dpi)
fig.patch.set_alpha(0)
axes([0,0,1,1],frameon=False)
X = np.arange(11)
Y = np.ones(11)
plot(X,Y,color='w', lw=1, marker=m, ms=10, mfc=(.75,.75,1,1), mec=(0,0,1,1))
xlim(0,10)
xticks([]), yticks([])
print '../figures/marker-%s.png' % name
savefig('../figures/marker-%s.png' % name, dpi=dpi)
#print"""
#.. list-table::
# :widths: 15 30 50
# :header-rows: 1
#
# * - Symbol
# - Description
# - Appearance
#"""
for m in [0,1,2,3,4,5,6,7,'o','h','_','1','2','3','4','8','p',
'^','v','<','>','|','d',',','+','s','*','|','x']:
if type(m) is int:
marker(m, 'i%d' % m)
#print " * - %d" % m
#print " - "
#print " - .. image:: figures/marker-i%d.png" % m
else:
marker(m,m)
#print " * - ``%s``" % m
#print " - "
#print " - .. image:: figures/marker-%s.png" % m
marker('D', 'dd')
marker('H', 'hh')
marker('.', 'dot')
marker(r"$\sqrt{2}$", "latex")
#print " * - ``r'$\sqrt{2}$'``"
#print " - "
#print " - .. image:: figures/marker-latex.png"
#print
| StarcoderdataPython |
1781972 | import argparse
import re
import os
import random
import json
from tqdm import tqdm
import math
import rdkit.Chem as Chem
from template.generate_retro_templates import process_an_example
from template.rdchiral.main import rdchiralRun, rdchiralReaction, rdchiralReactants
import pdb
def smi_tokenizer(smi):
"""
Tokenize a SMILES molecule or reaction
"""
pattern = "(\[[^\]]+]|Br?|Cl?|N|O|S|P|F|I|b|c|n|o|s|p|\(|\)|\.|=|#|-|\+|\\\\|\/|:|~|@|\?|>|\*|\$|\%[0-9]{2}|[0-9])"
regex = re.compile(pattern)
tokens = [token for token in regex.findall(smi)]
assert smi == ''.join(tokens)
return ' '.join(tokens)
def unmapped_smiles(smiles):
mol = Chem.MolFromSmiles(smiles)
if mol is None:
return ''
for atom in mol.GetAtoms():
if atom.HasProp('molAtomMapNumber'):
atom.ClearProp('molAtomMapNumber')
return Chem.MolToSmiles(mol)
def write_rxn_smiles(src_file, tgt_file, rxn_smiles, rxn_class=None):
r_smiles, p_smiles = rxn_smiles.split('>>')
r_mol = Chem.MolFromSmiles(r_smiles)
p_mol = Chem.MolFromSmiles(p_smiles)
def remove_mapping(mol):
for atom in mol.GetAtoms():
if atom.HasProp('molAtomMapNumber'):
atom.ClearProp('molAtomMapNumber')
remove_mapping(r_mol)
remove_mapping(p_mol)
tgt_tokens = smi_tokenizer(Chem.MolToSmiles(r_mol))
src_tokens = smi_tokenizer(Chem.MolToSmiles(p_mol))
if rxn_class is not None:
src_tokens = ('<RX_%d> ' % rxn_class) + src_tokens
src_file.write(src_tokens + '\n')
tgt_file.write(tgt_tokens + '\n')
def match_smiles(source_smiles, target_smiles):
source_set = set(source_smiles.split('.'))
target_set = set(target_smiles.split('.'))
if len(source_set) != len(target_set):
return False
for smiles in target_set:
if smiles not in source_set:
return False
return True
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-template_path', default='data/template_data_2/all_templates.json')
parser.add_argument('-input_data', default='template/data_processed.csv')
parser.add_argument('-output_dir', default='data/template_data_2')
parser.add_argument('-template_frac', type=float, default=0.825)
args = parser.parse_args()
if not os.path.exists(args.output_dir):
os.makedirs(args.output_dir)
# hard-coded data file
with open(args.input_data, 'r+') as data_file:
data = []
skip_header = True
for line in data_file.readlines():
if skip_header:
skip_header = False
continue
splits = line.strip().split(',')
rxn_smiles = splits[4]
data.append(rxn_smiles)
print('Raw data read...')
# # Split all the templates from the data into train and test_templates
# all_templates = []
# for idx, rxn_smiles in enumerate(tqdm(data)):
# template = process_an_example(rxn_smiles, super_general=True)
# if template is None:
# continue
# template = '({})>>{}'.format(template.split('>>')[0], template.split('>>')[1])
#
# if template in all_templates:
# continue
#
# all_templates.append(template)
#
# with open('%s/all_templates.json' % args.output_dir, 'w+') as template_file:
# json.dump(all_templates, template_file)
with open(args.template_path, 'r+') as template_file:
template_list = json.load(template_file)
random.shuffle(template_list)
n_templates = len(template_list)
n_train = math.ceil(n_templates * args.template_frac)
train_templates = template_list[:n_train]
test_templates = template_list[n_train:]
print('N train templates: %d, N test templates: %d' % (
len(train_templates), len(test_templates)))
with open('%s/train_templates.json' % args.output_dir, 'w+') as train_template_file:
json.dump(train_templates, train_template_file)
with open('%s/test_templates.json' % args.output_dir, 'w+') as test_template_file:
json.dump(test_templates, test_template_file)
train_rxns, test_rxns = [], []
n_skipped = 0
for idx, rxn_smiles in enumerate(tqdm(data)):
if (idx + 1) % 100 == 0:
print('N train rxns: %d, N test rxns: %d' % (len(train_rxns), len(test_rxns)))
tgt_smiles, src_smiles = rxn_smiles.split('>>')
unmapped_src_smiles = unmapped_smiles(src_smiles)
unmapped_tgt_smiles = unmapped_smiles(tgt_smiles)
rd_rct = rdchiralReactants(unmapped_src_smiles)
template = process_an_example(rxn_smiles, super_general=True)
if template is not None:
template = '({})>>{}'.format(template.split('>>')[0], template.split('>>')[1])
if template in train_templates:
train_rxns.append((unmapped_src_smiles, unmapped_tgt_smiles))
continue
for template in train_templates:
rd_rxn = rdchiralReaction(template)
outcomes = rdchiralRun(rd_rxn, rd_rct, combine_enantiomers=False)
matched = False
for outcome_smiles in outcomes:
matched = match_smiles(source_smiles=outcome_smiles, target_smiles=unmapped_tgt_smiles)
if matched:
break
if matched:
break
if matched:
n_skipped += 1
else:
test_rxns.append((unmapped_src_smiles, unmapped_tgt_smiles))
n_train = len(train_rxns)
n_val = math.ceil(n_train * 0.1)
random.shuffle(train_rxns)
val_rxns = train_rxns[:n_val]
train_rxns = train_rxns[n_val:]
outputs = {}
for type in ['train', 'val', 'test']:
for loc in ['src', 'tgt']:
outputs['%s-%s' % (loc, type)] = open(
'%s/%s-%s.txt' % (args.output_dir, loc, type), 'w+')
for src_smiles, tgt_smiles in train_rxns:
outputs['src-train'].write(smi_tokenizer(src_smiles) + '\n')
outputs['tgt-train'].write(smi_tokenizer(tgt_smiles) + '\n')
for src_smiles, tgt_smiles in val_rxns:
outputs['src-val'].write(smi_tokenizer(src_smiles) + '\n')
outputs['tgt-val'].write(smi_tokenizer(tgt_smiles) + '\n')
for src_smiles, tgt_smiles in test_rxns:
outputs['src-test'].write(smi_tokenizer(src_smiles) + '\n')
outputs['tgt-test'].write(smi_tokenizer(tgt_smiles) + '\n')
pdb.set_trace()
if __name__ == '__main__':
main()
| StarcoderdataPython |
1660198 | <filename>config/urls.py
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
from django.contrib.auth import views as auth_views
urlpatterns = [
url(r'^accounts/reset/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[<KEY>})/$',
auth_views.password_reset_confirm,
name='password_reset_confirm'),
url(r'^email_confirmed/', TemplateView.as_view(template_name='emails/email_confirmed.html')),
url(r'^reset/done/$', auth_views.password_reset_complete, name='password_reset_complete'),
url(r'^$', TemplateView.as_view(template_name='base.html'), name='home'),
url(settings.ADMIN_URL, include(admin.site.urls)),
url('^api/', include('config.api_urls', namespace='api')),
url(r'^accounts/', include('allauth.urls')),
url(r'^events/', include('good_spot.events.urls')),
]
if settings.USE_SILK:
urlpatterns += [
url(r'^silk/', include('silk.urls', namespace='silk'))
]
if settings.USE_DEBUG_TOOLBAR:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| StarcoderdataPython |
3316021 | import smtplib
from email.mime import MimeText
class Notifiers:
def __init__(self):
self._default = None
self._notifiers = {}
def add(self, name, kind, default=False, **kwargs):
notifier = _known_notifiers[kind](**kwargs)
self._notifiers[name] = notifier
if default:
if self._default:
raise TypeError('default already set')
self._default = notifier
@property
def default(self):
if self._default is None:
raise TypeError('no default set')
return self._default
def __getitem__(self, key):
return self._notifiers[key]
class ConsoleNotifier:
def notify(self, eq):
print('New stuff: {}'.format(eq.added))
class EmailNotifier:
def __init__(self, server, username, password, to_addr, from_addr=None):
server, port = server.split(':')
self.server = server
self.port = int(port)
self.username = username
self.password = password
self.from_addr = from_addr or username
self.to_addr = to_addr
def notify(self, eq):
server = smtplib.SMTP(self.server, self.port)
server.starttls()
server.login(self.username, self.password)
msg = MimeText('New stuff: {}'.format(eq.added))
msg['From'] = self.from_addr
msg['To'] = self.to_addr
server.send_message(msg)
server.quit()
_known_notifiers = {
'console': ConsoleNotifier,
'email': EmailNotifier,
}
| StarcoderdataPython |
1664609 | import os
import sys
import cv2
import numpy as np
DRONE_IMAGE_SIZE = (5300, 7950)
CASE1_CONFIG = ["./images/CASE_1", "drone7.JPG", 800, 600, 4096, 4096]
CASE2_CONFIG = ["./images/CASE_2", "drone10.JPG", 409, 1200, 4096, 4096]
CASE3_CONFIG = ["./images/CASE_3", "drone8.JPG", 700, 200, 4096, 4096]
CASE4_CONFIG = ["./images/CASE_4", "drone1.JPG", 300, 700, 4096, 4096]
CONFIG = CASE2_CONFIG
# set lidar roi and make size (4096 x 4096)
lidar_roi_y = CONFIG[2]
lidar_roi_x = CONFIG[3]
lidar_roi_height = CONFIG[4]
lidar_roi_width = CONFIG[4]
lidar_roi_center_x = lidar_roi_width / 2
lidar_roi_center_y = lidar_roi_height / 2
lidar_roi_center_x = int(lidar_roi_center_x)
lidar_roi_center_y = int(lidar_roi_center_y)
lidar_roi_width_height = 600
drone_image_height = 5300
drone_image_width = 7950
drone_roi_center_x = drone_image_width / 2
drone_roi_center_y = drone_image_height / 2
drone_roi_center_x = int(drone_roi_center_x)
drone_roi_center_y = int(drone_roi_center_y)
drone_roi_width_height = 600
lidar_pos_change = True
drone_pos_change = True
# mouse callback function
def lidar_move(event, x, y, flags, param):
global lidar_roi_center_x
global lidar_roi_center_y
global lidar_pos_change
if event == cv2.EVENT_LBUTTONDOWN:
lidar_roi_center_x = x * 1 / param
lidar_roi_center_x = int(lidar_roi_center_x)
lidar_roi_center_y = y * 1 / param
lidar_roi_center_y = int(lidar_roi_center_y)
lidar_pos_change = True
# mouse callback function
def drone_move(event, x, y, flags, param):
global drone_roi_center_x
global drone_roi_center_y
global drone_pos_change
if event == cv2.EVENT_LBUTTONDOWN:
drone_roi_center_x = x * 1 / param
drone_roi_center_x = int(drone_roi_center_x)
drone_roi_center_y = y * 1 / param
drone_roi_center_y = int(drone_roi_center_y)
drone_pos_change = True
def main():
global lidar_pos_change
global drone_pos_change
global drone_roi_width_height
lidar_imshow_scale = 0.1
drone_imshow_scale = 0.08
cv2.namedWindow('lidar_image')
cv2.namedWindow('drone_image')
cv2.setMouseCallback('lidar_image', lidar_move, param=lidar_imshow_scale)
cv2.setMouseCallback('drone_image', drone_move, param=drone_imshow_scale)
DRONE_IMG_PATH = os.path.join(CONFIG[0], "drone", CONFIG[1])
LIDAR_IMG_PATH = os.path.join(CONFIG[0], "lidar", "lidar.jpg")
drone_image = cv2.imread(DRONE_IMG_PATH)
lidar_image = cv2.imread(LIDAR_IMG_PATH)
lidar_image = lidar_image[lidar_roi_y:lidar_roi_y +
lidar_roi_height, lidar_roi_x:lidar_roi_x + lidar_roi_width, :]
drone_image = drone_image[0:drone_image_height, 0:drone_image_width, :]
#resized_lidar = cv2.resize(lidar_image, dsize=None,fx=0.1, fy=0.1)
#resized_drone = cv2.resize(drone_image, dsize=None,fx=0.1, fy=0.1)
resized_lidar = cv2.resize(
lidar_image, dsize=None, fx=lidar_imshow_scale, fy=lidar_imshow_scale)
resized_drone = cv2.resize(
drone_image, dsize=None, fx=drone_imshow_scale, fy=drone_imshow_scale)
while(1):
temp_lidar_image = resized_lidar.copy()
temp_drone_image = resized_drone.copy()
lidar_x1 = (lidar_roi_center_x - lidar_roi_width_height)
scaled_lidar_x1 = int(lidar_x1 * lidar_imshow_scale)
lidar_y1 = (lidar_roi_center_y - lidar_roi_width_height)
scaled_lidar_y1 = int(lidar_y1 * lidar_imshow_scale)
lidar_x2 = (lidar_roi_center_x + lidar_roi_width_height)
scaled_lidar_x2 = int(lidar_x2 * lidar_imshow_scale)
lidar_y2 = (lidar_roi_center_y + lidar_roi_width_height)
scaled_lidar_y2 = int(lidar_y2 * lidar_imshow_scale)
cv2.rectangle(temp_lidar_image,
(scaled_lidar_x1, scaled_lidar_y1),
(scaled_lidar_x2, scaled_lidar_y2),
(0, 0, 255), 2)
drone_x1 = (drone_roi_center_x - drone_roi_width_height)
scaled_drone_x1 = int(drone_x1 * drone_imshow_scale)
drone_y1 = (drone_roi_center_y - drone_roi_width_height)
scaled_drone_y1 = int(drone_y1 * drone_imshow_scale)
drone_x2 = (drone_roi_center_x + drone_roi_width_height)
scaled_drone_x2 = int(drone_x2 * drone_imshow_scale)
drone_y2 = (drone_roi_center_y + drone_roi_width_height)
scaled_drone_y2 = int(drone_y2 * drone_imshow_scale)
cv2.rectangle(temp_drone_image,
(scaled_drone_x1, scaled_drone_y1),
(scaled_drone_x2, scaled_drone_y2),
(0, 0, 255), 2)
if lidar_pos_change == True:
lidar_roi_image = lidar_image[lidar_y1:lidar_y1 +
lidar_roi_width_height*2, lidar_x1:lidar_x1+lidar_roi_width_height*2:]
print(lidar_roi_width_height)
lidar_pos_change = False
if drone_pos_change == True:
drone_roi_image = drone_image[drone_y1:drone_y1 +
drone_roi_width_height*2, drone_x1:drone_x1+drone_roi_width_height*2:]
drone_pos_change = False
cv2.imshow('lidar_roi_image', cv2.resize(
lidar_roi_image, dsize=(250, 250)))
cv2.imshow('drone_roi_image', cv2.resize(
drone_roi_image, dsize=(250, 250)))
cv2.imshow('lidar_image', temp_lidar_image)
cv2.imshow('drone_image', temp_drone_image)
k = cv2.waitKey(40)
if k == ord('o'): # 's' key
drone_roi_width_height += 10
elif k == ord('p'): # 's' key
drone_roi_width_height -= 10
elif k == ord('s'):
save_lidar_image = cv2.resize(lidar_roi_image, dsize=(1200, 1200))
save_drone_image = cv2.resize(drone_roi_image, dsize=(1200, 1200))
cv2.imwrite("lidar.bmp", save_lidar_image)
cv2.imwrite("drone.bmp", save_drone_image)
# cv2.imshow('resized_drone',resized_drone)
# cv2.waitKey()
#img = np.zeros((512,512,3), np.uint8)
# def main():
# Create a black image, a window and bind the function to window
# cv2.namedWindow('image')
# cv2.setMouseCallback('image',draw_circle)
# while(1):
# cv2.imshow('image',img)
# if cv2.waitKey(20) & 0xFF == 27:
# break
# cv2.destroyAllWindows()
if __name__ == '__main__':
main()
| StarcoderdataPython |
3323800 | <filename>tests/test_rk4step.py<gh_stars>1-10
#
# test_varstep.py
#
from delsmm.systems.lag_doublepen import LagrangianDoublePendulum
import torch
import numpy as np
from tqdm import tqdm
import matplotlib.pyplot as plt
def test():
torch.set_default_dtype(torch.float64)
torch.manual_seed(1)
sys = LagrangianDoublePendulum(0.05, 1.,1.,1.,1.,10., method='rk4')
q1 = torch.rand(5,1,2) * 2 * np.pi - np.pi
q2 = q1.clone() + torch.randn_like(q1) * 0.01
q = 0.5 * (q1 + q2)
qdot = (q2-q1)/0.05
sys.compute_qddot(q,qdot)
x = torch.cat([q,qdot],dim=-1)
xs = [x]
for t in tqdm(range(200)):
nx = sys.step(torch.zeros(5,1).float(), xs[-1]).detach()
xs.append(nx)
xs = torch.cat(xs, dim=1)
qs = xs[...,:2]
# for i in range(5):
# plt.subplot(5,1,i+1)
# plt.plot(qs[i])
# plt.show()
if __name__ == '__main__':
test() | StarcoderdataPython |
4817181 | import numpy as np
BIG_NUM = 1000000 # try factors of 10 until solution found
goal = 33100000
houses_a = np.zeros(BIG_NUM)
houses_b = np.zeros(BIG_NUM)
for elf in xrange(1, BIG_NUM):
houses_a[elf::elf] += 10 * elf
houses_b[elf:(elf+1)*50:elf] += 11 * elf
print(np.nonzero(houses_a >= goal)[0][0])
print(np.nonzero(houses_b >= goal)[0][0])
| StarcoderdataPython |
1753552 | import logging
from django import forms
from django.contrib import admin, messages
from django.contrib.admin import ModelAdmin, SimpleListFilter, widgets
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import Group
from django.forms import Form
from django.template.response import TemplateResponse
from django.urls import reverse
from django.utils.translation import gettext_lazy as _
from admin_extra_urls.api import button
from admin_extra_urls.mixins import ExtraUrlMixin
from .config import UNICEF_EMAIL
from .graph import default_group, Synchronizer, SyncResult
from .models import BusinessArea, Region, User
from .sync import load_business_area, load_region
logger = logging.getLogger(__name__)
def admin_reverse(model, page="changelist"):
return reverse(f"admin:{model._meta.app_label}_{model._meta.model_name}_{page}")
@admin.register(Region)
class RegionAdmin(ExtraUrlMixin, ModelAdmin):
list_display = ['code', 'name']
@button()
def sync(self, request):
load_region()
@admin.register(BusinessArea)
class BusinessAreaAdmin(ExtraUrlMixin, ModelAdmin):
list_display = ['code', 'name', 'long_name', 'region', 'country']
list_filter = ['region', 'country']
search_fields = ('name',)
@button()
def sync(self, request):
try:
load_business_area()
except Exception as e:
logger.error(e)
self.message_user(request, str(e), messages.ERROR)
class LoadUsersForm(forms.Form):
emails = forms.CharField(widget=forms.Textarea)
class FF(Form):
selection = forms.CharField()
class UNICEFUserFilter(SimpleListFilter):
title = 'UNICEF user filter'
parameter_name = 'email'
def lookups(self, request, model_admin):
return [
('unicef', 'UNICEF'),
('external', 'External'),
]
def queryset(self, request, queryset):
if self.value() == 'unicef':
return queryset.filter(email__endswith=UNICEF_EMAIL)
elif self.value() == 'external':
return queryset.exclude(email__endswith=UNICEF_EMAIL)
return queryset
@admin.register(User)
class UserAdmin2(ExtraUrlMixin, UserAdmin):
list_display = ['username', 'display_name', 'email', 'is_staff',
'is_active', 'is_superuser', 'is_linked', 'last_login']
list_filter = ['is_superuser', 'is_staff', 'is_active', UNICEFUserFilter]
search_fields = ['username', 'display_name']
fieldsets = (
(None, {'fields': (('username', 'azure_id'), 'password')}),
(_('Personal info'), {'fields': (('first_name', 'last_name',),
('email', 'display_name'),
('job_title',),
)}),
(_('Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',
'groups', 'user_permissions')}),
(_('Important dates'), {'fields': ('last_login', 'date_joined')}),
)
add_fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('username', '<PASSWORD>', '<PASSWORD>'),
}),
)
readonly_fields = ('azure_id', 'job_title', 'display_name')
def is_linked(self, obj):
return bool(obj.azure_id)
is_linked.boolean = True
@button(label='Sync')
def sync_user(self, request, pk):
obj = self.get_object(request, pk)
try:
syncronizer = Synchronizer()
syncronizer.sync_user(obj)
except Exception as e:
self.message_user(request, str(e), messages.ERROR)
self.message_user(request, "User synchronized")
@button(label='Link user')
def link_user_data(self, request, pk):
opts = self.model._meta
ctx = {
'opts': opts,
'app_label': 'security',
'change': True,
'is_popup': False,
'save_as': False,
'has_delete_permission': False,
'has_add_permission': False,
'has_change_permission': True,
}
obj = self.get_object(request, pk)
syncronizer = Synchronizer()
try:
if request.method == 'POST':
if request.POST.get('selection'):
data = syncronizer.get_user(request.POST.get('selection'))
syncronizer.sync_user(obj, data['id'])
self.message_user(request, "User linked")
return None
else:
ctx['message'] = 'Select one entry to link'
data = syncronizer.search_users(obj)
ctx['data'] = data
return TemplateResponse(request, 'admin/link_user.html', ctx)
except Exception as e:
self.message_user(request, str(e), messages.ERROR)
@button()
def load(self, request):
opts = self.model._meta
ctx = {
'opts': opts,
'app_label': 'security',
'change': True,
'is_popup': False,
'save_as': False,
'has_delete_permission': False,
'has_add_permission': False,
'has_change_permission': True,
}
if request.method == 'POST':
form = LoadUsersForm(request.POST)
if form.is_valid():
synchronizer = Synchronizer()
emails = form.cleaned_data['emails'].split()
total_results = SyncResult()
for email in emails:
result = synchronizer.fetch_users("startswith(mail,'%s')" % email,
callback=default_group)
total_results += result
self.message_user(request,
f"{len(total_results.created)} users have been created,"
f"{len(total_results.updated)} updated."
f"{len(total_results.skipped)} invalid entries found.")
else:
form = LoadUsersForm()
ctx['form'] = form
return TemplateResponse(request, 'admin/load_users.html', ctx)
class RoleForm(forms.Form):
# overwrite_existing = forms.BooleanField(help_text="Overwrite existing entries", required=False)
business_areas = forms.ModelMultipleChoiceField(queryset=BusinessArea.objects.all(),
widget=widgets.FilteredSelectMultiple('Services', False)
)
user = forms.ModelChoiceField(queryset=User.objects.all())
group = forms.ModelChoiceField(queryset=Group.objects.all())
# @admin.register(Role)
# class RoleAdmin(ExtraUrlMixin, ModelAdmin):
# list_display = ['user', 'group', 'business_area']
# search_fields = ('user',)
# list_filter = ('group', ('business_area', RelatedFieldComboFilter))
#
# def has_add_permission(self, request):
# return False
#
# @button()
# def add_grants(self, request):
# opts = self.model._meta
# ctx = {
# 'opts': opts,
# 'add': False,
# 'has_view_permission': True,
# 'has_editable_inline_admin_formsets': True,
# 'app_label': opts.app_label,
# 'change': True,
# 'is_popup': False,
# 'save_as': False,
# 'media': self.media,
# 'has_delete_permission': False,
# 'has_add_permission': False,
# 'has_change_permission': True,
# }
# if request.method == 'POST':
# form = RoleForm(request.POST)
# if form.is_valid():
# user = form.cleaned_data.pop('user')
# business_areas = form.cleaned_data.pop('business_areas')
# group = form.cleaned_data.pop('group')
# # overwrite_existing = form.cleaned_data.pop('overwrite_existing')
#
# for business_area in business_areas:
# Role.objects.update_or_create(user=user,
# business_area=business_area,
# group=group)
# self.message_user(request, 'ACLs created')
# return HttpResponseRedirect(admin_reverse(Role))
# else:
# form = RoleForm(initial={})
# ctx['adminform'] = AdminForm(form,
# [(None, {'fields': ['user',
# 'group',
# 'business_areas']})],
# {})
# ctx['media'] = self.media + form.media
# return TemplateResponse(request, 'admin/unicef_security/add_grants.html', ctx)
| StarcoderdataPython |
27889 | <filename>EllipticCurves/Curve.py
import matplotlib.pyplot as plt
import numpy as np
def main():
a = -1
b = 1
y, x = np.ogrid[-5:5:100j, -5:5:100j]
plt.contour(
x.ravel(),
y.ravel(),
pow(y, 2) - pow(x, 3) - x * a - b,
[0]
)
plt.plot(1, 1, 'ro')
plt.grid()
plt.show()
if __name__ == '__main__':
main() | StarcoderdataPython |
1774454 | <gh_stars>1-10
###########################################################################
### Calibration of the conceptual groundwater model using NSGA-II ###
### Author : <NAME> ###
### Last Edit: 07 Jul 2020 ###
###########################################################################
from platypus import NSGAII, Problem, Real
import pandas as pd
import os, sys
dirpath = os.getcwd()
sys.path.insert(0, ".\Functions")
#import the user defined functions
from modelcalb_nsga2 import sim_mod
from visualplot import paretoplot
from data_divide import data_sep
from Utils import sortinput
from mkdir import addpath
addpath() # creates the new directory to store the results
############## inputs and data management #############################################
#read the csv file (input data)
filedir = 'C:\Users\Laks\Desktop\REGSim-main\REGSim module\Data'
filename = 'sampledata.csv'
path = os.path.join(filedir,filename)
#read the csv file (input data)
## Column 1 -Time ## Column 2 groundwater head## Column 3 rainfall
## Column 4- Potential evapotranspiration ##Column5&6 Lateral inflow and outflow
data = pd.read_csv(path,sep = ',',)
# sort the data for the model
input_data = sortinput(data)
# sepearte the data for calibration and validation period
# data_sep(dataset,totalmonth_calibrationmonth)
[input_calib, input_valid] = data_sep(input_data, 60, 48)
###########################################################################################
#Calibration of the model using NSGA2 optimization method
def nsga2(rech_case,pcase,indata,area,mv,M=3,V=None,sy=None,Qp=None,r1=None,r11=None,r12=None,r21=None,r22=None,r23=None):
def gw_model(input_para):
return sim_mod(input_para,indata,area,rech_case,mv,pcase)
# main program run - NSGA2, this code runs for unconstrained condition
# define the no if decision variables and objective functions
problem = Problem(V, M)
# define the decision variable
x1 = Real(sy[0],sy[1])
x2 = Real(Qp[0],Qp[1])
if rech_case==1:
x3 = Real(r1[0],r1[1])
input_para = [x1,x2,x3]
if rech_case==2:
x3 = Real(r11[0],r11[1])
x4 = Real(r12[0],r12[1])
input_para = [x1,x2,x3,x4]
if rech_case==3:
x3 = Real(r21[0],r21[1])
x4 = Real(r22[0],r22[1])
x5 = Real(r23[0],r23[1])
input_para = [x1,x2,x3,x4,x5]
# define the problem definition
problem.types[:] = input_para
problem.function = gw_model
# instantiate the optimization algorithm
algorithm = NSGAII(problem)#, population_size=500)
# optimize the problem using function evaluations
nsim = 10000#input('no. of iterations:')
algorithm.run(nsim)
#stores the results of NSGA2
result = algorithm.result
#store results in dataframe
df_opt = pd.DataFrame()
if rech_case==1:
df_opt["Sy"] = [s.variables[0] for s in result]
df_opt["Qp"] = [s.variables[1] for s in result]
df_opt["r"] = [s.variables[2] for s in result]
if rech_case==2:
df_opt["Sy"] = [s.variables[0] for s in result]
df_opt["Qp"] = [s.variables[1] for s in result]
df_opt["r11"] = [s.variables[2] for s in result]
df_opt["r12"] = [s.variables[3] for s in result]
if rech_case==3:
df_opt["Sy"] = [s.variables[0] for s in result]
df_opt["Qp"] = [s.variables[1] for s in result]
df_opt["r21"] = [s.variables[2] for s in result]
df_opt["r22"] = [s.variables[3] for s in result]
df_opt["r23"] = [s.variables[4] for s in result]
df_opt["RMSE"] = [s.objectives[0] for s in result]
df_opt["MAE"] = [s.objectives[1] for s in result]
df_opt["NSE"] = [s.objectives[2] for s in result]
# save the output into csv file
df_opt.to_csv(".\ Results\paretofront_case{}_modvar{}.txt".format(rech_case,mv),
sep='\t',index=False, header=True, encoding='utf-8')
#
# ploting pareto optimal front for each case
par_plt = paretoplot(df_opt,rech_case,mv)
print ('Optimization completed')
return df_opt
# run the nsga 2 algorithm
#nsga2(rech_case,pcase,indata,area,mv,M=3,V=None,sy=None,Qp=None,r1=None,r11=None,r12=None,r21=None,r22=None,r23=None):
'''
rech_case - recharge factor case
pcase - pumping conditions
indata - total input dataset
area - area of the study [sq.m]
mv- model variant condition M- objective function, V- decision variable (3-for case-1,4 for case-2, 5 for case-3)
Qp- max pumping rate[MCM] sy- specfic yield [-], r1- constant recharge for case-1
r11 - recharge factor for nonmonsoon for case-2 r12- recharge factor for monsoon for case-2
r21- recharge factor for summer for case-3 r22- recharge factor for winter for case-3, r23- recharge factor for nonmonsoon for case-3
'''
df_opt = nsga2(1, 3, input_calib, 5536407425,mv=1,V=3,sy=[0.001,0.16],Qp=[50,100],r1=[0.0,0.6])
#############end of the script#################################
| StarcoderdataPython |
1643614 | <reponame>vmgabriel/tabu-base<filename>src/config/__init__.py<gh_stars>0
"""
Configuration of env
"""
# Modules
from src.config.tabu import configuration as config_tabu
configuration = {
'tabu': config_tabu,
}
| StarcoderdataPython |
153909 | <reponame>94JuHo/Algorithm_study
import sys
input_data = sys.stdin.readline().rstrip()
print(input_data) | StarcoderdataPython |
188762 | import twitter
import ccxt
import logging
import wget
import pytesseract
from datetime import datetime, timedelta, timezone
from dynaconf import settings
from PIL import Image
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(name)-40s %(levelname)-8s %(message)s")
logger = logging.getLogger(__name__)
bittrex = ccxt.bittrex()
coin_keywords = ['coin', 'week']
api = twitter.Api(consumer_key=settings.TWITTER.CONSUMER_KEY,
consumer_secret=settings.TWITTER.CONSUMER_SECRET,
access_token_key=settings.TWITTER.ACCESS_TOKEN,
access_token_secret=settings.TWITTER.ACCESS_TOKEN_SECRET)
def extract_coin(text):
# Extract all possible coins
possible_coins = []
for word in text.split(" "):
if len(word) == 3 or len(word) == 4:
possible_coins.append(word)
if len(possible_coins) == 0:
logger.info("Nop it was not a coin of the week: {}".format(text))
return None
# Try to find real coin
found_coin = None
for coin in possible_coins:
symbol = "{}/ETH".format(coin)
try:
bittrex.fetch_ticker(symbol)
except:
logging.debug("{} is not a coin".format(coin))
continue
found_coin = coin
return found_coin
def main():
statuses = api.GetUserTimeline(screen_name='officialmcafee')
last_hour = datetime.now(timezone.utc) - timedelta(hours=1)
for status in statuses:
# Only fetch the last hour
created = datetime.strptime(status.created_at, "%a %b %d %H:%M:%S %z %Y")
if created < last_hour:
continue
print(status)
# Check if relevant
keywords = [x for x in coin_keywords if x in status.text.lower()]
if len(keywords) != len(coin_keywords):
continue
logger.info("Seems like a coin of the week, trying to find it")
# Extract all possible coins from text
found_coin = extract_coin(status.text)
if found_coin is not None:
logger.info("Found the coin of the week: {}".format(found_coin))
break
logger.info("Did not find a coin the the text")
# Extract all possible coins from image
if len(status.media) == 0:
logger.info("No media to OCR, no coin to be found")
continue
url = status.media[0].media_url
filename = wget.download(url, "./data/")
image_text = pytesseract.image_to_string(Image.open(filename))
found_coin = extract_coin(image_text)
if found_coin is not None:
logger.info("Found the coin of the week: {}".format(found_coin))
break
if __name__ == '__main__':
main()
| StarcoderdataPython |
4800104 | import pandas as pd
student={'A':39,'B':41,'C':42,'D':44}
s=pd.Series(student)
print(s) | StarcoderdataPython |
108218 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from webbreaker.webbreakerlogger import Logger
json_scan_settings = {
"settingsName": "",
"overrides": {
"scanName": ""
}
}
def formatted_settings_payload(settings, scan_name, runenv, scan_mode, scan_scope, login_macro, scan_policy,
scan_start, start_urls, workflow_macros, allowed_hosts):
global json_scan_settings
json_scan_settings['settingsName'] = settings
# scanName option
if runenv == "jenkins":
json_scan_settings['overrides']['scanName'] = os.getenv('BUILD_TAG')
else:
json_scan_settings['overrides']['scanName'] = scan_name
# crawlAuditMode option
if scan_mode:
json_scan_settings['overrides']['crawlAuditMode'] = ""
if scan_mode == "scan":
json_scan_settings['overrides']['crawlAuditMode'] = 'AuditOnly'
elif scan_mode == "crawl":
json_scan_settings['overrides']['crawlAuditMode'] = 'CrawlOnly'
else:
json_scan_settings['overrides']['crawlAuditMode'] = 'CrawlAndAudit'
if scan_scope:
json_scan_settings['overrides']['scanScope'] = ""
if scan_scope == "all":
json_scan_settings['overrides']['scanScope'] = 'Unrestricted'
elif scan_scope == "strict":
json_scan_settings['overrides']['scanScope'] = 'Self'
elif scan_scope == "children":
json_scan_settings['overrides']['scanScope'] = 'Children'
elif scan_scope == "ancestors":
json_scan_settings['overrides']['scanScope'] = 'Ancestors'
else:
#json_scan_settings['overrides']['scanScope'] = 'None'
Logger.app.error("Usage: all, strict, children, or ancestors are options! \n"
"The value {} for scan_scope is not available!".format(scan_scope))
if login_macro:
json_scan_settings['overrides']['loginMacro'] = login_macro
if scan_policy:
json_scan_settings['overrides']['policyId'] = scan_policy
if scan_start:
json_scan_settings['overrides']['startOption'] = ""
if scan_start == "url":
json_scan_settings['overrides']['startOption'] = "Url"
elif scan_start == "macro":
json_scan_settings['overrides']['startOption'] = "Macro"
else:
Logger.app.error("usage: url or macro are options NOT scan_start: {}!".format(scan_start))
if start_urls:
json_scan_settings['overrides']['startUrls'] = start_urls
if workflow_macros:
json_scan_settings['overrides']['workflowMacros'] = workflow_macros
if allowed_hosts:
json_scan_settings['overrides']['allowedHosts'] = allowed_hosts
return json_scan_settings
| StarcoderdataPython |
1724658 | <gh_stars>0
import time
import gzip
import csv
import logging
import sys
if sys.version_info.minor < 7:
import importlib_resources
else:
import importlib.resources as importlib_resources
import tqdm
import pandas as pd
import numpy as np
import scipy.sparse
from sklearn.metrics import pairwise_distances, pairwise_kernels
from retrieve.methods import pairwise_kernels_chunked
from retrieve.sparse_utils import top_k, substract_vector
logger = logging.getLogger(__name__)
SIM, DIST = 0, 1
def load_embeddings(path, vocab=None):
# handle word2vec format
skiprows = 0
with open(path) as f:
if len(next(f).strip().split()) == 2:
skiprows = 1
embs = pd.read_csv(
path, sep=" ", header=None,
index_col=0, skiprows=skiprows, quoting=csv.QUOTE_NONE)
embs = embs.dropna(axis=1, how='all')
embs = embs.T
if vocab is not None:
# drop words not in vocab
missing = embs.columns.difference(vocab)
logger.info("Dropping {} words from vocabulary".format(len(missing)))
embs.drop(missing, 1, inplace=True)
return embs
def load_fasttext(path):
try:
import fastText
return fastText.load(path)
except ModuleNotFoundError:
try:
import fasttext
return fasttext.load_model(path)
except ModuleNotFoundError:
raise ValueError("Couldn't import `fastText` or `fasttext` module")
def normalize_vectors(vectors):
return vectors / np.linalg.norm(vectors, axis=1)[:, None]
class Embeddings:
# class constants
SIM = SIM
DIST = DIST
"""
Convenience class to handle embeddings. This class is better initialized
from the method `from_csv`
Arguments
=========
keys : list of strings representing the words in the rows of vectors
vectors : an np.array(n_words, dim_size)
"""
def __init__(self, keys, vectors):
if len(keys) != len(vectors):
raise ValueError("Expected {} vectors".format(len(keys)))
self.word2id = {}
self.id2word = {}
for idx, word in enumerate(keys):
self.word2id[word] = idx
self.id2word[idx] = word
self.vectors = vectors
def __len__(self):
return len(self.word2id)
def __getitem__(self, key):
return self.vectors[self.word2id[key]]
def __contains__(self, key):
return key in self.word2id
def apply_projection(self, proj, batch_size=4096, renorm=False):
import torch
if isinstance(proj, str):
# assume path
proj = torch.load(proj)
vectors = self.vectors
if renorm:
vectors = normalize_vectors(vectors)
for i in tqdm.tqdm(range(0, len(self.vectors), batch_size)):
start, stop = i, min(i + batch_size, len(vectors))
vectors[start:stop, :] = (proj @ vectors[start:stop, :].T).T
self.vectors = vectors
def normalize_vectors(self):
self.vectors = normalize_vectors(self.vectors)
@property
def keys(self):
return dict(self.word2id)
def get_vectors(self, keys):
targets = [w for w in keys if w in self.word2id]
return targets, np.array(list(map(self.__getitem__, targets)))
def default_vector(self):
return np.mean(self.vectors, 0)
@classmethod
def require_embeddings(cls, embs, msg='', **kwargs):
if isinstance(embs, str):
embs = cls.from_file(embs, **kwargs)
if not isinstance(embs, cls):
raise ValueError(msg)
return embs
@classmethod
def from_fasttext(cls, path, vocab):
if vocab is None:
raise ValueError("FastText model requires vocab")
model = load_fasttext(path)
vectors = []
for word in vocab:
vectors.append(model.get_word_vector(word))
return cls(vocab, np.array(vectors))
@classmethod
def from_file(cls, path, vocab=None, skip_header=False):
# dispatch fastText
if path.endswith('bin'):
return cls.from_fasttext(path, vocab)
if vocab is not None:
vocab = set(vocab)
logger.info("Loading {} word embeddings".format(len(vocab)))
keys, vectors = [], []
open_fn = gzip.open if path.endswith(".gz") else open
with open_fn(path) as f:
if skip_header:
next(f)
for line in f:
if isinstance(line, bytes):
line = line.decode()
word, *vec = line.strip().split()
if vocab and word not in vocab:
continue
keys.append(word)
vectors.append(np.array(vec, dtype=np.float))
# report missing
if vocab is not None:
logger.info("Loaded {}/{} words from vocabulary".format(
len(keys), len(vocab)))
return cls(keys, np.array(vectors))
@classmethod
def from_csv(cls, path, vocab=None):
"""
Arguments
=========
path : str, path to file with embeddings in csv format
(word is assumed to go in first column)
vocab : optional, subset of words to load
Output
======
keys : dict, mapping words to the index in indices respecting the
order in which the keys appear
indices : list, mapping keys to the index in embedding matrix
"""
df = load_embeddings(path, vocab=vocab)
return cls(list(df.keys()), np.array(df).T)
@classmethod
def from_resource(cls, path, vocab=None):
if not importlib_resources.is_resource('retrieve.resources.misc', path):
raise ValueError("Unknown resource: {}".format(path))
with importlib_resources.path('retrieve.resources.misc', path) as f:
return cls.from_file(str(f), vocab=vocab)
def to_csv(self, path):
with open(path, 'w') as f:
for idx, word in sorted(self.id2word.items()):
vec = ["{:.6}".format(i) for i in self.vectors[idx].tolist()]
f.write(word + '\t' + ' '.join(vec) + '\n')
def get_indices(self, words):
keys, indices = {}, []
for idx, w in enumerate(words):
if w in self.word2id:
keys[w] = idx
indices.append(self.word2id[w])
return keys, indices
def get_S(self, vocab=None, fill_missing=False,
metric='cosine', beta=1, apply_mod=True, cutoff=0.0, chunk_size=0):
"""
Arguments
=========
vocab : list (optional), vocab in desired order. The output matrix will
have word-similarities ordered according to the order in `vocab`.
However, if `fill_missing` is False, while the order is mantained,
there will be gaps.
fill_missing : bool, whether to fill similarities with one-hot vectors
for out-of-vocabulary words
Output
======
keys : list of words ordered as the output matrix
S : np.array (or scipy.sparse.lil_matrix) (vocab x vocab), this will be
a sparse array if a positive `cutoff` is passed
>>> vectors = [[0.35, 0.75], [0.5, 0.5], [0.75, 0.35]]
>>> embs = Embeddings(['a', 'c', 'e'], np.array(vectors))
>>> vocab = ['c', 'd', 'a', 'f']
>>> S = embs.get_S(vocab=vocab, fill_missing=True)
>>> S.shape # asked for 4 words (fill_missing)
(4, 4)
>>> S[1, 3] == 0.0 # missing words evaluate to one-hot vectors
True
>>> w1, w2 = embs['a'], embs['c']
>>> sim = np.dot(w1, w2)/(np.linalg.norm(w1) * np.linalg.norm(w2))
>>> np.allclose(S[0, 2], sim)
True
>>> S[2, 0] == S[0, 2]
True
>>> keys, S = embs.get_S(vocab=vocab)
>>> list(keys) == ['c', 'a'] # words only in keys in requested order
True
>>> S.shape # only words in space (fill_missing=False)
(2, 2)
>>> w1, w2 = embs['a'], embs['c']
>>> sim = np.dot(w1, w2)/(np.linalg.norm(w1) * np.linalg.norm(w2))
>>> np.allclose(S[0, 1], sim)
True
"""
if fill_missing and not vocab:
raise ValueError("`fill_missing` requires `vocab`")
if apply_mod and beta > 1 and cutoff is not None and cutoff < 0:
raise ValueError("Negative cutoff with positive beta yields wrong results")
keys, indices = self.get_indices(vocab or self.keys)
if not keys:
raise ValueError("Couldn't find any of the requested vocab")
# (found words x found words)
logger.info("Computing {} similarities".format(len(indices)))
start = time.time()
S = pairwise_kernels_chunked(
self.vectors[indices], metric=metric, chunk_size=chunk_size,
threshold=cutoff)
logger.info("Got S in {:.2f} secs".format(time.time() - start))
# apply modifications on S
if apply_mod:
S = (S.power(beta) if scipy.sparse.issparse(S) else np.power(S, beta))
# add one-hot vectors for OOV and rearrange to match input vocabulary
if fill_missing:
# (requested words x requested words)
S_ = scipy.sparse.lil_matrix((len(vocab), len(vocab)))
# rearrange
index = np.array([keys[w] for w in vocab if w in keys])
index = np.tile(index, (len(index), 1))
S_[index, index.T] = S
S = S_
# make sure diagonal is always 1
S.setdiag(1)
return S.tocsr()
return keys, S
def nearest_neighbours(self, words, n=10,
metric='cosine',
metric_type=SIM, csls_k=0):
"""
If `metric_type` is Embeddings.SIM then `metric` must be one of
sklearn.metrics.pairwise.PAIRWISE_KERNEL_FUNCTIONS:
- ['cosine', 'sigmoid', 'linear', etc.]
If `metric_type` is Embeddings.DIST then `metric` must be one of
sklearn.metrics.pairwise.PAIRWISE_DISTANCE_FUNCTIONS:
- ['cosine', 'euclidean', 'l1', 'l2', 'manhattan', 'cityblock']
"""
if csls_k > 0 and metric_type != SIM:
raise ValueError("CSLS is defined over similarities not distances")
keys, index = self.get_indices(words)
if metric_type == Embeddings.SIM:
if csls_k > 0:
S = csls(
pairwise_kernels(self.vectors, metric=metric, n_jobs=-1),
csls_k)
S = S[index]
else:
S = pairwise_kernels(
self.vectors[index], self.vectors, metric=metric, n_jobs=-1)
# get neighbours
neighs = np.argsort(-S, axis=1)[:, 1: n+1]
elif metric_type == Embeddings.DIST:
S = pairwise_distances(
self.vectors[index], self.vectors, metric=metric, n_jobs=-1)
neighs = np.argsort(S, axis=1)[:, 1: n+1]
else:
raise ValueError("Unknown `metric_type`")
S = S[np.arange(len(keys)).repeat(n), np.ravel(neighs)]
S = S.reshape(len(keys), -1)
# human form
neighs = [{self.id2word[neighs[i, j]]: S[i, j] for j in range(n)}
for i in range(len(keys))]
return keys, neighs
def train_gensim_embeddings(path, output_path=None, **kwargs):
from gensim.models import Word2Vec
from gensim.models.word2vec import LineSentence
from retrieve import enable_log_level
m = Word2Vec(sentences=LineSentence(path), **kwargs)
if output_path:
m.wv.save_word2vec_format(output_path)
return m
def export_fasttext_embeddings(path, vocab, output_path=None):
model = load_fasttext(path)
keys, vectors = {}, []
for idx, word in enumerate(vocab):
keys[word] = idx
vectors.append(model.get_word_vector(word))
if output_path is not None:
with open(output_path, 'w+') as f:
for word in keys:
vec = ["{:.6}".format(i) for i in vectors[keys[word]].tolist()]
f.write(word + '\t' + ' '.join(vec) + '\n')
return keys, vectors
def csls_dense(S, k=10):
indices, values = top_k(S, k + 1)
mean_values = values[:, 1:].mean(1)
return (2 * S) - mean_values[:, None] - mean_values[None, :]
def csls_sparse(S, k=10):
indices, values = top_k(S, k + 1)
mean_values = values[:, 1:].mean(1)
S = substract_vector(
substract_vector(S * 2, mean_values, axis=1),
mean_values, axis=0)
return S
def csls(S, k=10):
if scipy.sparse.issparse(S):
return csls_sparse(S, k)
return csls_dense(S, k)
def csls_crosslingual(S, S1, S2, k=10):
S1_mean = top_k(S1, k + 1)[1][:, 1:].mean(1)
S2_mean = top_k(S2, k + 1)[1][:, 1:].mean(1)
return (2 * S) - S1_mean[:, None] - S2_mean[None, :]
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('input')
parser.add_argument('--output')
parser.add_argument('--size', type=int, default=100)
parser.add_argument('--window', type=int, default=5)
parser.add_argument('--min_count', type=int, default=1)
parser.add_argument('--workers', type=int, default=4)
args = parser.parse_args()
m = train_gensim_embeddings(args.input, output_path=args.output,
size=args.size, window=args.window,
min_count=args.min_count, workers=args.workers) | StarcoderdataPython |
3266917 | import unittest
from bingocardgenerator.square_getter import filter_candidates
class TestFilterCanidates(unittest.TestCase):
def test_filter_unique_candidates_returns_identity(self):
self.assertEqual(filter_candidates(['a', 'b', 'c']), ['a', 'b', 'c'])
def test_filter_redundant_candidates(self):
self.assertEqual(filter_candidates(['a', 'a b', 'b', 'c', 'b c']), ['a b', 'b c'])
def test_filter_later_candidates(self):
self.assertEqual(filter_candidates(['a b c', 'a b', 'b c']), ['a b c'])
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
3284260 | import pandas as pd
from tqdm.notebook import tqdm as tqdm
import multiprocessing
from functools import reduce
import numpy as np
def get_sub_timeseries(df_x, index, window_start, window_end, identifier):
"""
Helper method which extracts a sub dataframe of a pandas dataframe. The sub dataframe is defined by a
index aswell as the relative start end end index of the window. An identifier column is added with a
constant value which is given as parameter. This ensures multiple sub dataframes can be distinguished
from each other.
Example:
-----[|-----------|-----------|----------|]------------|-------------|---------------->
index-window_end index-window_start index
Because iloc is not supported in dask dataframes it is assumed the index equals the level of the row
(like reset_index does).
:param df_x: the pandas dataframe the sub dataframe should be extracted
:param index: absolute index of the dataframe the window is extracted
:param window_start: relative start of the subwindow
:param window_end: relative end of the subwindow
:param identifier: a unique constant identifier to distinguish later the sub dataframe
:return: the extracted sub dataframe
"""
sub_df_x = df_x.iloc[index - window_end:index - window_start]
sub_df_x['window_id'] = identifier
return sub_df_x
def get_rolling_timeseries(df_x, start_index, lag, window_start, window_end):
"""
Extracts all possible sub windows of the given dataframe. It is assumed that the index of the dataframe is
the default one (reset_index()).
Example:
-----[|[[...-------|-----------|----------|]]]...--------|-------------|-------------|------->
|<--- window_end-window_start --->| index
|<--- window_end --->|
|<---window--->|
_start
|<--- lag -->|
:param df_x: pandas dataframe where the sub windows are extracted.
:param start_index: the first index the sub windows should be extracted.
This is necessary because the method can be applied multiple times with different windows.
To merge the extracted features later on the window id must match.
:param lag: the distance between the current row and the target row (y). necessary to limit the number of windows
at the end of the dataframe where an extraction of sub windows would be possible but no target row
is available.
:param window_start: relative distance between the current row and the start of the sub windows
:param window_end: relative distance between the current row and the start of the sub windows
:param npartitions: (dask parameter) the number of partitions used for the dataframe (used for parallelization).
According to stackoverflow the number should be a multiple of the number of processors
(default = 1xcpu_count)
:return: a pandas dataframe containing all sub windows each with a unique window id (ascending numbers from 0 to #windows)
"""
print("Extracting sub windows", window_start, "-", window_end, ":")
# extract every possible sub window
sub_dfs = [get_sub_timeseries(
df_x=df_x,
index=i,
window_start=window_start,
window_end=window_end,
identifier=i - start_index -1) for i in
tqdm(range(start_index, len(df_x) - lag))]
sub_df_x_comp = pd.concat([df for df in tqdm(sub_dfs)], ignore_index=True)
return sub_df_x_comp
def extract_sub_window(df_x, y, window, start_index, lag, fc_parameters="min", n_jobs=-1):
from tsfresh import extract_relevant_features
from tsfresh.feature_extraction.settings import MinimalFCParameters
if fc_parameters == "min":
fc_parameters = MinimalFCParameters()
window_start, window_end = window
sub_df_x = get_rolling_timeseries(df_x, start_index, lag, window_start, window_end)
if n_jobs == -1:
n_jobs = multiprocessing.cpu_count()
y = y[y.index.isin(sub_df_x.window_id)]
features = extract_relevant_features(sub_df_x, y, column_id="window_id", column_sort="timestamp", column_value=None,
default_fc_parameters=fc_parameters, n_jobs=n_jobs)
features = features.add_suffix(f"_{window_start}_{window_end}")
return (features, y)
def extract_sub_windows(df_x, df_y, window_array, lag, fc_parameters, n_jobs=-1):
df_x = df_x.reset_index('timestamp')
split_func = lambda x: list(map(int, x.split("-")))
windows = np.array(list(map(split_func, window_array)))
max_end = max(windows[:, 1])
y = df_y.iloc[max_end + lag:len(df_y)]
y = y.reset_index(drop=True)
y.index.name = 'window_id'
features = [extract_sub_window(df_x, y, window, max_end, lag, fc_parameters, n_jobs) for window in windows]
features = reduce(lambda left, right: pd.merge(left, right, left_index=True, right_index=True, how='inner'),
features)
return features, y
| StarcoderdataPython |
3325089 | <reponame>KonstantinPakulev/OSM-one-shot-multispeaker<filename>src/main.py<gh_stars>0
import argparse
import yaml
import os
from tts_modules.common.multispeaker import MultispeakerManager
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("main_config_path", type=str,
default='tts_modules/common/configs/main_config.yaml',
help='Path to main yaml configs')
args = parser.parse_args()
with open(args.main_config_path, "r") as ymlfile:
main_configs = yaml.load(ymlfile)
if not os.path.exists(main_configs["SPEAKER_SPEECH_PATH"]):
os.makedirs(main_configs["SPEAKER_SPEECH_PATH"])
if not os.path.exists(main_configs["INPUT_TEXTS_PATH"]):
os.makedirs(main_configs["INPUT_TEXTS_PATH"])
if not os.path.exists(main_configs["OUTPUT_AUDIO_DIR"]):
os.makedirs(main_configs["OUTPUT_AUDIO_DIR"])
multispeaker_manager = MultispeakerManager(main_configs)
multispeaker_manager.inference()
| StarcoderdataPython |
160625 | <reponame>niole/Parse-Graph
import collections
def parseGraphFromString(inputLines):
graph = collections.defaultdict(set)
for line in inputLines:
graph[line[0]].add(line[5])
return graph
output = {
'a' : {'b', 'c'},
'b' : {'c', 'f'},
'd' : {'a'}
}
inputLines = """a -> b
b -> c
a -> c
d -> a
b -> f""".split('\n')
def iterFindNextNode(g,target,start,currNode,i,visited):
"""
Looks for next unvisited child node in g[currNode] and
calls main function on it.
If no unvisited nodes, function calls iterBackTrack,
to find previous node with unvisited children.
"""
if len(g[currNode]) > 0 and i != len(g[currNode]):
if g[currNode][i] not in visited:
return iterBfsGraph(g,target,start,g[currNode][i],visited)
else:
return iterFindNextNode(g,target,start,currNode,i+1,visited)
if i == len(g[currNode]):
return
def childrenVisited(g,start,parent,visited):
children = [x for x in g[parent] if x in visited]
if len(children) == len(g[parent]):
return True
return False
def iterBackTrack(g,target,start,currNode,visited):
for parent in visited:
if parent == start and childrenVisited(g,start,parent,visited):
print "Can't reach target node."
return
if currNode in g[parent]:
return iterBfsGraph(g,target,start,parent,visited)
def iterBfsGraph(g,target,start,currNode,visited):
"""
Continue search until path matches one in visited.
Continue until can't go forward and save visited nodes.
Back track until find node with path not in visited.
Assume all targets accessible and no loops exist.
"""
while currNode != target:
if currNode in visited and iterFindNextNode(g,target,start,currNode,1,visited) != False:
#for backtracking: if current parent already in visited, but not all of it's children have
#been searched
return iterFindNextNode(g,target,start,currNode,0,visited)
if currNode not in visited:
#if current node not visited and therefore none of its children visited, first check if has children,
#if has children call iterFindNextNode
visited.add(currNode)
if currNode in g:
return iterFindNextNode(g,target,start,currNode,0,visited)
else:
return iterBackTrack(g,target,start,currNode,visited)
else:
#if current node and all of it's children in visited, backtrack to previous parent node
return iterBackTrack(g,target,start,currNode,visited)
print currNode
# --------------------------------------------#
graph = dict(parseGraphFromString(inputLines))
def convertGraph(graph):
for e in graph:
graph[e] = list(graph[e])
return graph
#print convertGraph(graph)
# -------------------------------------------#
print iterBfsGraph(convertGraph(graph),'f','a','a',set())
| StarcoderdataPython |
1752031 | <reponame>Amourspirit/ooo_uno_tmpl<gh_stars>0
# coding: utf-8
#
# Copyright 2022 :Barry-Thomas-Paul: Moss
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http: // www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from ....dyn.configuration.backend.authentication_failed_exception import AuthenticationFailedException as AuthenticationFailedException
from ....dyn.configuration.backend.backend import Backend as Backend
from ....dyn.configuration.backend.backend_access_exception import BackendAccessException as BackendAccessException
from ....dyn.configuration.backend.backend_adapter import BackendAdapter as BackendAdapter
from ....dyn.configuration.backend.backend_setup_exception import BackendSetupException as BackendSetupException
from ....dyn.configuration.backend.cannot_connect_exception import CannotConnectException as CannotConnectException
from ....dyn.configuration.backend.component_change_event import ComponentChangeEvent as ComponentChangeEvent
from ....dyn.configuration.backend.connection_lost_exception import ConnectionLostException as ConnectionLostException
from ....dyn.configuration.backend.copy_importer import CopyImporter as CopyImporter
from ....dyn.configuration.backend.data_importer import DataImporter as DataImporter
from ....dyn.configuration.backend.default_backend import DefaultBackend as DefaultBackend
from ....dyn.configuration.backend.hierarchy_browser import HierarchyBrowser as HierarchyBrowser
from ....dyn.configuration.backend.importer import Importer as Importer
from ....dyn.configuration.backend.insufficient_access_rights_exception import InsufficientAccessRightsException as InsufficientAccessRightsException
from ....dyn.configuration.backend.interaction_handler import InteractionHandler as InteractionHandler
from ....dyn.configuration.backend.invalid_authentication_mechanism_exception import InvalidAuthenticationMechanismException as InvalidAuthenticationMechanismException
from ....dyn.configuration.backend.layer import Layer as Layer
from ....dyn.configuration.backend.layer_describer import LayerDescriber as LayerDescriber
from ....dyn.configuration.backend.layer_filter import LayerFilter as LayerFilter
from ....dyn.configuration.backend.layer_update_merger import LayerUpdateMerger as LayerUpdateMerger
from ....dyn.configuration.backend.ldap_multi_layer_stratum import LdapMultiLayerStratum as LdapMultiLayerStratum
from ....dyn.configuration.backend.ldap_single_backend import LdapSingleBackend as LdapSingleBackend
from ....dyn.configuration.backend.ldap_single_stratum import LdapSingleStratum as LdapSingleStratum
from ....dyn.configuration.backend.local_data_importer import LocalDataImporter as LocalDataImporter
from ....dyn.configuration.backend.local_hierarchy_browser import LocalHierarchyBrowser as LocalHierarchyBrowser
from ....dyn.configuration.backend.local_schema_supplier import LocalSchemaSupplier as LocalSchemaSupplier
from ....dyn.configuration.backend.local_single_backend import LocalSingleBackend as LocalSingleBackend
from ....dyn.configuration.backend.local_single_stratum import LocalSingleStratum as LocalSingleStratum
from ....dyn.configuration.backend.malformed_data_exception import MalformedDataException as MalformedDataException
from ....dyn.configuration.backend.merge_importer import MergeImporter as MergeImporter
from ....dyn.configuration.backend.merge_recovery_request import MergeRecoveryRequest as MergeRecoveryRequest
from ....dyn.configuration.backend.multi_layer_stratum import MultiLayerStratum as MultiLayerStratum
from ....dyn.configuration.backend.multi_stratum_backend import MultiStratumBackend as MultiStratumBackend
from ....dyn.configuration.backend.node_attribute import NodeAttribute as NodeAttribute
from ....dyn.configuration.backend.node_attribute import NodeAttributeEnum as NodeAttributeEnum
from ....dyn.configuration.backend.offline_backend import OfflineBackend as OfflineBackend
from ....dyn.configuration.backend.online_backend import OnlineBackend as OnlineBackend
from ....dyn.configuration.backend.platform_backend import PlatformBackend as PlatformBackend
from ....dyn.configuration.backend.property_info import PropertyInfo as PropertyInfo
from ....dyn.configuration.backend.schema import Schema as Schema
from ....dyn.configuration.backend.schema_attribute import SchemaAttribute as SchemaAttribute
from ....dyn.configuration.backend.schema_attribute import SchemaAttributeEnum as SchemaAttributeEnum
from ....dyn.configuration.backend.schema_supplier import SchemaSupplier as SchemaSupplier
from ....dyn.configuration.backend.single_backend import SingleBackend as SingleBackend
from ....dyn.configuration.backend.single_backend_adapter import SingleBackendAdapter as SingleBackendAdapter
from ....dyn.configuration.backend.single_layer_stratum import SingleLayerStratum as SingleLayerStratum
from ....dyn.configuration.backend.stratum_creation_exception import StratumCreationException as StratumCreationException
from ....dyn.configuration.backend.system_integration import SystemIntegration as SystemIntegration
from ....dyn.configuration.backend.template_identifier import TemplateIdentifier as TemplateIdentifier
from ....dyn.configuration.backend.updatable_layer import UpdatableLayer as UpdatableLayer
from ....dyn.configuration.backend.x_backend import XBackend as XBackend
from ....dyn.configuration.backend.x_backend_changes_listener import XBackendChangesListener as XBackendChangesListener
from ....dyn.configuration.backend.x_backend_changes_notifier import XBackendChangesNotifier as XBackendChangesNotifier
from ....dyn.configuration.backend.x_backend_entities import XBackendEntities as XBackendEntities
from ....dyn.configuration.backend.x_composite_layer import XCompositeLayer as XCompositeLayer
from ....dyn.configuration.backend.x_layer import XLayer as XLayer
from ....dyn.configuration.backend.x_layer_content_describer import XLayerContentDescriber as XLayerContentDescriber
from ....dyn.configuration.backend.x_layer_handler import XLayerHandler as XLayerHandler
from ....dyn.configuration.backend.x_layer_importer import XLayerImporter as XLayerImporter
from ....dyn.configuration.backend.x_multi_layer_stratum import XMultiLayerStratum as XMultiLayerStratum
from ....dyn.configuration.backend.x_schema import XSchema as XSchema
from ....dyn.configuration.backend.x_schema_handler import XSchemaHandler as XSchemaHandler
from ....dyn.configuration.backend.x_schema_supplier import XSchemaSupplier as XSchemaSupplier
from ....dyn.configuration.backend.x_single_layer_stratum import XSingleLayerStratum as XSingleLayerStratum
from ....dyn.configuration.backend.x_updatable_layer import XUpdatableLayer as XUpdatableLayer
from ....dyn.configuration.backend.x_update_handler import XUpdateHandler as XUpdateHandler
from ....dyn.configuration.backend.x_versioned_schema_supplier import XVersionedSchemaSupplier as XVersionedSchemaSupplier
| StarcoderdataPython |
4809883 | from model import profile
class SessionHelper:
def __init__(self, app):
self.app = app
def get_url(self):
return self.app.driver.current_url
def login(self, user):
driver = self.app.driver
if not driver.current_url.endswith("/account"):
driver.get("https://ucbreport.ru/account")
driver.find_element_by_xpath("//app-button-esia/a/span").click()
driver.find_element_by_id("mobileOrEmail").click()
driver.find_element_by_xpath("//button[@id='loginByPwdButton']/span").click()
driver.find_element_by_id("mobileOrEmail").clear()
driver.find_element_by_id("mobileOrEmail").send_keys(user.mobile)
driver.find_element_by_id("password").clear()
driver.find_element_by_id("password").send_keys(<PASSWORD>)
driver.find_element_by_xpath("//button[@id='loginByPwdButton']/span").click()
def ensure_login(self, user):
driver = self.app.driver
if self.is_logged_in():
if self.is_logged_in_as(user.surnameNS):
return
else:
self.logout()
self.login(user)
def logout(self):
driver = self.app.driver
driver.find_element_by_xpath("//div[@id='navbarCollapse']/links/ul/li[5]/a/div/span").click()
driver.find_element_by_link_text(u"Выйти").click()
def ensure_logout(self):
driver = self.app.driver
if self.is_logged_in():
self.logout()
def is_logged_in(self):
driver = self.app.driver
driver.get("https://ucbreport.ru/account")
return len(driver.find_elements_by_link_text("Выйти")) > 0
def is_logged_in_as(self, surnameNS):
driver = self.app.driver
return driver.find_element_by_xpath("//*[@id='navbarCollapse']/links/ul/li[5]/a/div/span").text == surnameNS
| StarcoderdataPython |
1630926 | import collections, time, functools
from matplotlib.patches import FancyArrowPatch
from mpl_toolkits.mplot3d import proj3d
class Arrow3D(FancyArrowPatch):
"""
Arrow used in the plotting of 3D vecotrs
ex.
a = Arrow3D([0, 1], [0, 1], [0, 1], mutation_scale=20,
lw=1, arrowstyle="-|>", color="k")
ax.add_artist(a)
"""
def __init__(self, xs, ys, zs, *args, **kwargs):
FancyArrowPatch.__init__(self, (0, 0), (0, 0), *args, **kwargs)
self._verts3d = xs, ys, zs
def draw(self, renderer):
xs3d, ys3d, zs3d = self._verts3d
xs, ys, zs = proj3d.proj_transform(xs3d, ys3d, zs3d, renderer.M)
self.set_positions((xs[0], ys[0]), (xs[1], ys[1]))
FancyArrowPatch.draw(self, renderer)
# Note to avoid using external packages such as functools32 we use this code
# only using the standard library
def lru_cache(maxsize=255, timeout=None):
"""
Thanks to ilialuk @ https://stackoverflow.com/users/2121105/ilialuk for
this code snippet. Modifications by <NAME>
"""
class LruCacheClass(object):
def __init__(self, input_func, max_size, timeout):
self._input_func = input_func
self._max_size = max_size
self._timeout = timeout
# This will store the cache for this function,
# format - {caller1 : [OrderedDict1, last_refresh_time1],
# caller2 : [OrderedDict2, last_refresh_time2]}.
# In case of an instance method - the caller is the instance,
# in case called from a regular function - the caller is None.
self._caches_dict = {}
def cache_clear(self, caller=None):
# Remove the cache for the caller, only if exists:
if caller in self._caches_dict:
del self._caches_dict[caller]
self._caches_dict[caller] = [collections.OrderedDict(),
time.time()]
def __get__(self, obj, objtype):
""" Called for instance methods """
return_func = functools.partial(self._cache_wrapper, obj)
return_func.cache_clear = functools.partial(self.cache_clear,
obj)
# Return the wrapped function and wraps it to maintain the
# docstring and the name of the original function:
return functools.wraps(self._input_func)(return_func)
def __call__(self, *args, **kwargs):
""" Called for regular functions """
return self._cache_wrapper(None, *args, **kwargs)
# Set the cache_clear function in the __call__ operator:
__call__.cache_clear = cache_clear
def _cache_wrapper(self, caller, *args, **kwargs):
# Create a unique key including the types (in order to
# differentiate between 1 and '1'):
kwargs_key = "".join(map(
lambda x: str(x) + str(type(kwargs[x])) + str(kwargs[x]),
sorted(kwargs)))
key = "".join(
map(lambda x: str(type(x)) + str(x), args)) + kwargs_key
# Check if caller exists, if not create one:
if caller not in self._caches_dict:
self._caches_dict[caller] = [collections.OrderedDict(),
time.time()]
else:
# Validate in case the refresh time has passed:
if self._timeout is not None:
if (time.time() - self._caches_dict[caller][1]
> self._timeout):
self.cache_clear(caller)
# Check if the key exists, if so - return it:
cur_caller_cache_dict = self._caches_dict[caller][0]
if key in cur_caller_cache_dict:
return cur_caller_cache_dict[key]
# Validate we didn't exceed the max_size:
if len(cur_caller_cache_dict) >= self._max_size:
# Delete the first item in the dict:
try:
cur_caller_cache_dict.popitem(False)
except KeyError:
pass
# Call the function and store the data in the cache (call it
# with the caller in case it's an instance function)
if caller is not None:
args = (caller,) + args
cur_caller_cache_dict[key] = self._input_func(*args, **kwargs)
return cur_caller_cache_dict[key]
# Return the decorator wrapping the class (also wraps the instance to
# maintain the docstring and the name of the original function):
return (lambda input_func: functools.wraps(input_func)(
LruCacheClass(input_func, maxsize, timeout)))
| StarcoderdataPython |
3268620 | <gh_stars>1-10
#!/usr/bin/python3
# -*- coding: UTF-8 -*-
# @license : Copyright(C), Your Company
# @Author: <NAME>
# @Contact : <EMAIL>
# @Date: 2020-09-15 21:41
# @Description:
# https://www.jianshu.com/p/1e498888f505
# https://github.com/jllan/jannlp
# @Software : PyCharm
import re
import math
import jieba
text = """
自然语言处理是计算机科学领域与人工智能领域中的一个重要方向。
它研究能实现人与计算机之间用自然语言进行有效通信的各种理论和方法。
自然语言处理是一门融语言学、计算机科学、数学于一体的科学。
因此,这一领域的研究将涉及自然语言,即人们日常使用的语言,
所以它与语言学的研究有着密切的联系,但又有重要的区别。
自然语言处理并不是一般地研究自然语言,
而在于研制能有效地实现自然语言通信的计算机系统,
特别是其中的软件系统。因而它是计算机科学的一部分。
"""
def get_sentences(doc):
line_break = re.compile('[\r\n]')
delimiter = re.compile('[,。?!;]')
sentences = []
for line in line_break.split(doc):
line = line.strip()
if not line:
continue
for sent in delimiter.split(line):
sent = sent.strip()
if not sent:
continue
sentences.append(sent)
return sentences
# def filter_stop(words):
# return list(filter(lambda x: x not in stop, words))
class BM25(object):
def __init__(self, docs):
self.D = len(docs)
self.avgdl = sum([len(doc) + 0.0 for doc in docs]) / self.D
self.docs = docs
self.f = [] # 列表的每一个元素是一个dict,dict存储着一个文档中每个词的出现次数
self.df = {} # 存储每个词及出现了该词的文档数量
self.idf = {} # 存储每个词的idf值
self.k1 = 1.5
self.b = 0.75
self.init()
def init(self):
for doc in self.docs:
tmp = {}
for word in doc:
tmp[word] = tmp.get(word, 0) + 1 # 存储每个文档中每个词的出现次数
self.f.append(tmp)
for k in tmp.keys():
self.df[k] = self.df.get(k, 0) + 1
for k, v in self.df.items():
self.idf[k] = math.log(self.D - v + 0.5) - math.log(v + 0.5)
def sim(self, doc, index):
score = 0
for word in doc:
if word not in self.f[index]:
continue
d = len(self.docs[index])
score += self.idf[word] * self.f[index][word] * (self.k1 + 1) / (self.f[index][word] + self.k1 * (1 - self.b + self.b * d / self.avgdl))
return score
def simall(self, doc):
scores = []
for index in range(self.D):
score = self.sim(doc, index)
scores.append(score)
return scores
if __name__ == '__main__':
sents = get_sentences(text)
doc = []
for sent in sents:
words = list(jieba.cut(sent))
# words = filter_stop(words)
doc.append(words)
print(doc)
m = BM25(doc)
print(m.f)
print(m.idf)
print(m.simall(['自然语言', '计算机科学', '领域', '人工智能', '领域']))
| StarcoderdataPython |
4839055 | '''
input: list of words or raw string
output: dictionary mapping words to frequencies
'''
def word_frequency(text):
if type(text) == str:
text = text.split(' ')
d = dict()
for word in set(text):
d[word] = 0
for word in text:
d[word] += 1
return 'word_frequency',d
'''
input: list of words or raw string
output: dictionary mapping lengths of words to frequencies
'''
def word_length_frequency(text):
if type(text) == str:
text = text.split(' ')
d = dict()
for word in set(text):
d[len(word)] = 0
for word in text:
d[len(word)] += 1
return 'word_length_frequency',d
def average_word_length(text):
if type(text) == str:
text = text.split(' ')
return 'average_word_length',len(''.join(text))/len(text) | StarcoderdataPython |
3295439 | #!/usr/bin/env python3
import argparse
import socket
import time
import struct
from collections import OrderedDict
class Barrier:
def __init__(self, host, port, waitFor, printer=None):
self.host = host
self.port = port
self.waitFor = waitFor
self.printer = printer
self.startTimes = dict()
def listen(self):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.sock.bind((self.host, self.port))
self.sock.listen(128)
def wait(self):
connections = []
addresses = []
while True:
conn, addr = self.sock.accept()
idInBytes = []
while len(idInBytes) < 8:
data = conn.recv(8 - len(idInBytes))
if not data:
raise Exception("Could not recv the LogicalPID")
idInBytes += data
pid = struct.unpack('!Q', bytes(idInBytes))[0]
connections.append((pid, conn))
addresses.append(addr)
if self.printer:
self.printer("Connection from {}, corresponds to PID {}".format(addr, pid))
if len(connections) == self.waitFor:
break
for pid, conn in connections:
self.startTimes[pid] = int(time.time() * 1000)
conn.close()
return None
def startTimesFuture(self):
return self.startTimes
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--host",
default="0.0.0.0",
dest="host",
help="IP address where the barrier listens to (default: any)",
)
parser.add_argument(
"--port",
default=10000,
type=int,
dest="port",
help="TCP port where the barrier listens to (default: 10000)",
)
parser.add_argument(
"-p",
"--processes",
required=True,
type=int,
dest="processes",
help="Number of processes the barrier waits for",
)
results = parser.parse_args()
barrier = Barrier(results.host, results.port, results.processes, print)
barrier.listen()
print("Barrier listens on {}:{} and waits for {} processes".format(results.host, results.port, results.processes))
barrier.wait()
for pid, startTs in OrderedDict(sorted(barrier.startTimesFuture().items())).items():
print("Process {} started broadcasting at time {} ms from Unix epoch ".format(pid, startTs)) | StarcoderdataPython |
3330349 | <filename>coop2/bin/thread.py
#!/usr/bin/env python3
import RPi.GPIO as GPIO
from time import sleep
import threading
# basackwards relay setup
RUN = False
STOP = True
# setup I/O Constants
DOOR_UP = 4
DOOR_DOWN = 5
DOOR_LOCK = 6
LIGHTS = 7
MAN_UP = 22
MAN_DOWN = 23
MAN_LIGHT = 24
UP_PROX = 26
DOWN_PROX = 27
# setup I/O
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(DOOR_UP, GPIO.OUT) # Motor FWD
GPIO.output(DOOR_UP, STOP)
GPIO.setup(DOOR_DOWN, GPIO.OUT) # Motor REV
GPIO.output(DOOR_DOWN, STOP)
GPIO.setup(DOOR_LOCK, GPIO.OUT) # Door Lock
GPIO.output(DOOR_LOCK, STOP)
GPIO.setup(LIGHTS, GPIO.OUT) # Lights
GPIO.output(LIGHTS, STOP)
GPIO.setup(MAN_UP, GPIO.IN,pull_up_down=GPIO.PUD_DOWN) # Manual Up Switch
GPIO.setup(MAN_DOWN, GPIO.IN,pull_up_down=GPIO.PUD_DOWN) # Manual Down Switch
GPIO.setup(MAN_LIGHT, GPIO.IN,pull_up_down=GPIO.PUD_DOWN) # Manual Light Switch
GPIO.setup(UP_PROX, GPIO.IN,pull_up_down=GPIO.PUD_DOWN) # Door Up Switch
GPIO.setup(DOWN_PROX, GPIO.IN,pull_up_down=GPIO.PUD_DOWN) # Door Down Switch
def open():
print('open')
try:
to = threading.Timer(1.0, open)
while True:
if GPIO.input(DOOR_UP):
to.start()
else:
to.stop()
sleep(0.1)
except KeyboardInterrupt:
# here you put any code you want to run before the program
# exits when you press CTRL+C
print('\nKeyBoard Interrupt')
except Exception as e:
# this covers all other exceptions
print(str(e))
finally:
GPIO.cleanup() # this ensures a clean exit
| StarcoderdataPython |
1614270 | <reponame>yaakov-github/notifiers
import pytest
import datetime
import time
from email import utils
from notifiers.exceptions import BadArguments
from notifiers.core import FAILURE_STATUS
provider = 'mailgun'
class TestMailgun:
def test_mailgun_metadata(self, provider):
assert provider.metadata == {
'base_url': 'https://api.mailgun.net/v3/{domain}/messages',
'name': 'mailgun',
'site_url': 'https://documentation.mailgun.com/'
}
@pytest.mark.parametrize('data, message', [
({}, 'to'),
({'to': 'foo'}, 'domain'),
({'to': 'foo', 'domain': 'bla'}, 'api_key'),
({'to': 'foo', 'domain': 'bla', 'api_key': 'bla'}, 'from'),
({'to': 'foo', 'domain': 'bla', 'api_key': 'bla', 'from': 'bbb'}, 'message')
])
def test_mailgun_missing_required(self, data, message, provider):
data['env_prefix'] = 'test'
with pytest.raises(BadArguments, match=f"'{message}' is a required property"):
provider.notify(**data)
@pytest.mark.online
def test_mailgun_sanity(self, provider):
provider.notify(message='foo', raise_on_errors=True)
@pytest.mark.online
def test_mailgun_all_options(self, provider, tmpdir):
dir_ = tmpdir.mkdir("sub")
file_1 = dir_.join("hello.txt")
file_1.write("content")
file_2 = dir_.join("world.txt")
file_2.write("content")
now = datetime.datetime.now() + datetime.timedelta(minutes=3)
rfc_2822 = utils.formatdate(time.mktime(now.timetuple()))
data = {
'message': 'foo',
'html': f'<b>{now}</b>',
'subject': f'{now}',
'attachment': [
file_1.strpath,
file_2.strpath
],
'inline': [
file_1.strpath,
file_2.strpath
],
'tag': [
'foo',
'bar'
],
'dkim': True,
'deliverytime': rfc_2822,
'testmode': False,
'tracking': True,
'tracking_clicks': 'htmlonly',
'tracking_opens': True,
'require_tls': False,
'skip_verification': True,
'headers': {
'foo': 'bar'
},
'data': {
'foo': {
'bar': 'bla'
}
}
}
provider.notify(**data, raise_on_errors=True)
def test_mailgun_error_response(self, provider):
data = {
'api_key': 'FOO',
'message': 'bla',
'to': '<EMAIL>',
'domain': 'foo',
'from': '<EMAIL>'
}
rsp = provider.notify(**data)
assert rsp.status == FAILURE_STATUS
assert 'Forbidden' in rsp.errors
| StarcoderdataPython |
98952 | # coding=utf-8
import time
from service.mahjong.models.hutype.basetype import BaseType
from service.mahjong.constants.carddefine import CardType
from service.mahjong.models.hutype.basetype import BaseType
from service.mahjong.constants.carddefine import CardType, CARD_SIZE
from service.mahjong.models.card.hand_card import HandCard
from service.mahjong.models.utils.cardanalyse import CardAnalyse
class SanFengKe(BaseType):
"""
1) 三风刻: 胡牌时,牌里有3个风刻(杠)
不记番:箭刻
"""
def __init__(self):
super(SanFengKe, self).__init__()
def is_feng(self, i):
if i:
return i[0] in [65, 66, 67, 68]
else:
return False
def is_this_type(self, hand_card, card_analyse):
peng_cards = hand_card.peng_card_vals
an_gang_card_vals = hand_card.an_gang_card_vals
bu_gang_card_vals = hand_card.bu_gang_card_vals
dian_gang_card_vals = hand_card.dian_gang_card_vals
ke_lst = []
ke_lst.extend(peng_cards)
ke_lst.extend(an_gang_card_vals)
ke_lst.extend(bu_gang_card_vals)
ke_lst.extend(dian_gang_card_vals)
feng_ke_count = 0
ret = card_analyse.get_jiang_ke_shun_plus(hand_card.hand_card_vals)
for index in range(len(ret)):
k = ret[index]["k"]
ke_lst.extend(k)
for i in k:
if self.is_feng(i):
feng_ke_count += 1
if feng_ke_count >= 3:
return True
return False
if __name__ == "__main__":
pass
card_analyse = CardAnalyse()
hand_card = HandCard(0, None)
# hand_card.hand_card_info = {
# 1: [9, 1, 1, 1, 1, 1, 1, 1, 1, 1], # 万
# 2: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], # 条
# 3: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], # 饼
# 4: [2, 2, 0, 0, 0], # 风
# 5: [3, 3, 0, 0], # 箭
# }
hand_card.hand_card_info = {
1: [5, 0, 3, 0, 2, 0, 0, 0, 0, 0], # 万
2: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], # 条
3: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], # 饼
4: [9, 3, 3, 3, 0], # 风
5: [0, 0, 0, 0], # 箭
}
hand_card.handle_hand_card_for_settle_show()
hand_card.union_hand_card()
print("hand_card =", hand_card.hand_card_vals)
test_type = SanFengKe()
start_time = time.time()
print(test_type.is_this_type(hand_card, card_analyse))
print("time = ", time.time() - start_time) | StarcoderdataPython |
1778002 | <reponame>ftconan/python3
"""
@author: magician
@date: 2019/11/22
@file: namedtuple_demo.py
"""
import json
from collections import namedtuple
Car = namedtuple('Car', 'color mileage')
class MyCarWithMethods(Car):
"""
MyCarWithMethods
"""
def hexcolor(self):
if self.color == 'red':
return '#ff0000'
else:
return '#000000'
if __name__ == '__main__':
tup = ('hello', object(), 42)
print(tup)
print(tup[2])
try:
tup[2] = 23
except Exception as e:
print(e)
# Namedtuples to the Rescue
# Car = namedtuple('Car', 'color mileage')
# print('color mileage'.split())
Car = namedtuple('Car', ['color', 'mileage'])
my_car = Car('red', 3812.4)
print(my_car.color)
print(my_car.mileage)
print(my_car[0])
print(tuple(my_car))
color, mileage = my_car
print(color, mileage)
print(*my_car)
print(my_car)
try:
my_car.color = 'blue'
except Exception as e:
print(e)
# Subclassing Namedtuple
c = MyCarWithMethods('red', 1234)
print(c.hexcolor())
# add properties
Car = namedtuple('Car', 'color mileage')
ElectricCar = namedtuple('ElectricCar', Car._fields + ('charge',))
print(ElectricCar('red', 1234, 45.0))
# Built-in Helper Method
print(my_car._asdict())
print(json.dumps(my_car._asdict()))
print(my_car._replace(color='blue'))
print(Car._make(['red', 999]))
# When to Use Namedtuple
pass
| StarcoderdataPython |
3281585 | import logging
from hetdesrun.component.load import base_module_path
from hetdesrun.runtime.exceptions import (
RuntimeExecutionError,
DAGProcessingError,
UncaughtComponentException,
MissingOutputDataError,
ComponentDataValidationError,
WorkflowOutputValidationError,
WorkflowInputDataValidationError,
)
runtime_component_logger = logging.getLogger(base_module_path)
| StarcoderdataPython |
45078 | #!/usr/bin/env python3
"""TPatrick | Alta3 Research
Creating a simple dice program utilizing classes."""
from random import randint
class Player:
def __init__(self):
self.dice = []
def roll(self):
self.dice = []
for i in range(3):
self.dice.append(randint(1,6))
def get_dice(self):
return self.dice
def main():
"""Called at run time"""
player1 = Player()
player2 = Player()
player1.roll()
player2.roll()
print(f"Player 1 rolled {player1.get_dice()}")
print(f"Player 2 rolled {player2.get_dice()}")
if sum(player1.get_dice()) == sum(player2.get_dice()):
print("Draw!")
elif sum(player1.get_dice()) > sum(player2.get_dice()):
print("Player 1 wins!")
else:
print("Player 2 wins!")
if __name__ == "__main__":
main()
| StarcoderdataPython |
69655 | def solution(lottos, win_nums):
answer = []
zeros=0
for i in lottos:
if(i==0) : zeros+=1
correct = list(set(lottos).intersection(set(win_nums)))
_dict = {6:1,5:2,4:3,3:4,2:5,1:6,0:6}
answer.append(_dict[len(correct)+zeros])
answer.append(_dict[len(correct)])
return answer
| StarcoderdataPython |
1799547 | <gh_stars>0
import json
import sys
from pathlib import Path
from django.conf import settings
from django.contrib import messages
from django.core.exceptions import ObjectDoesNotExist
from django.db.models import F
from django.forms import formset_factory
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import get_object_or_404, render, redirect
from django.urls import reverse_lazy
from django.views import View
from django.views.generic import DetailView, ListView, TemplateView, View
from django.views.generic.edit import CreateView, DeleteView, UpdateView, FormView
from experiments import forms as forms
from experiments import models as models
from experiments.utils.repo import find_new_experiments, get_latest_commit
sys.path.append(Path(settings.ROOT_DIR, "expfactory_deploy_local"))
from expfactory_deploy_local.utils import generate_experiment_context
# Repo Views
class RepoOriginListView(ListView):
model = models.RepoOrigin
queryset = models.RepoOrigin.objects.prefetch_related("experimentrepo_set")
class RepoOriginCreate(CreateView):
model = models.RepoOrigin
form_class = forms.RepoOriginForm
success_url = reverse_lazy("experiments:experiment-repo-list")
def form_valid(self, form):
response = super().form_valid(form)
self.object.clone()
return response
# Experiment Views
def experiment_instances_from_latest(experiment_repos):
for experiment_repo in experiment_repos:
latest = get_latest_commit(experiment_repo.location)
ExperimentInstance.get_or_create(
experiment_repo_id=experiment_repo.id, commit=latest
)
class ExperimentRepoList(ListView):
model = models.ExperimentRepo
queryset = models.ExperimentRepo.objects.prefetch_related("origin")
class ExperimentRepoDetail(DetailView):
model = models.ExperimentRepo
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
batteries = models.Battery.objects.filter(batteryexperiments__experiment_instance__experiment_repo_id=self.get_object())
results = models.Result.objects.filter(battery_experiment__experiment_instance__experiment_repo_id=self.get_object())
batt_results = [(batt, list(results.filter(battery_experiment__battery=batt))) for batt in batteries]
context['batt_results'] = batt_results
return context
def add_new_experiments(request):
created_repos, created_experiments, errors = find_new_experiments()
for repo in created_repos:
messages.info(request, f"Tracking previously unseen repository {repo.url}")
for experiment in created_experiments:
messages.info(request, f"Added new experiment {experiment.name}")
for error in errors:
messages.error(request, error)
return redirect('/experiments')
class ExperimentRepoUpdate(UpdateView):
model = models.ExperimentRepo
fields = ["name"]
class ExperimentRepoDelete(DeleteView):
model = models.ExperimentRepo
success_url = reverse_lazy("experiment-repo-list")
# Battery Views
# Inject list of experiment repos into a context and the id attribute used by the form
def add_experiment_repos(context):
context[
"experiment_repo_list"
] = models.ExperimentRepo.objects.all().prefetch_related("origin")
context["exp_repo_select_id"] = "place_holder"
class BatteryList(ListView):
model = models.Battery
def get_queryset(self):
return models.Battery.objects.filter(status='template').prefetch_related("children")
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
return context
class BatteryDetail(DetailView):
model = models.Battery
queryset = models.Battery.objects.prefetch_related("assignment_set", "experiment_instances")
"""
View used for battery creation. Handles creating expdeirmentinstance
objects and order entries in the battery <-> experiment instance pivot table
as needed.
"""
class BatteryComplex(TemplateView):
template_name = "experiments/battery_form.html"
battery = None
battery_kwargs = {}
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
qs = models.ExperimentInstance.objects.none()
ordering = None
if self.battery:
qs = models.ExperimentInstance.objects.filter(
batteryexperiments__battery=self.battery
).order_by("batteryexperiments__order")
ordering = qs.annotate(exp_order=F("batteryexperiments__order"))
context["form"] = forms.BatteryForm(**self.battery_kwargs)
add_experiment_repos(context)
context["exp_instance_formset"] = forms.ExpInstanceFormset(
queryset=qs, form_kwargs={"ordering": ordering}
)
return context
def get_object(self):
battery_id = self.kwargs.get("pk")
if battery_id is not None:
battery = get_object_or_404(models.Battery, pk=battery_id)
self.battery = battery
self.battery_kwargs = {"instance": battery}
def get(self, request, *args, **kwargs):
self.get_object()
if self.battery.status in ['published', 'inactive']:
return redirect("experiments:battery-detail", pk=self.battery.id)
return self.render_to_response(self.get_context_data())
def post(self, request, *args, **kwargs):
self.get_object()
form = forms.BatteryForm(self.request.POST, **self.battery_kwargs)
battery = form.save()
ordering = models.ExperimentInstance.objects.filter(
batteryexperiments__battery=self.battery
).order_by("batteryexperiments__order")
exp_instance_formset = forms.ExpInstanceFormset(
self.request.POST, form_kwargs={"battery_id": battery.id}
)
valid = exp_instance_formset.is_valid()
if valid:
ei = exp_instance_formset.save()
battery.batteryexperiments_set.exclude(experiment_instance__in=ei).delete()
elif not valid:
print(exp_instance_formset.errors)
if form.is_valid():
return HttpResponseRedirect("/battery/")
else:
print(form.errors)
return HttpResponseRedirect(reverse_lazy("battery-list"))
class BatteryClone(View):
def get(self, request, *args, **kwargs):
pk = self.kwargs.get("pk")
batt = get_object_or_404(models.Battery, pk=pk)
return redirect('experiments:battery-list')
"""
class BatteryDeploymentDelete(DeleteView):
model = models.Battery
success_url = reverse_lazy('battery-list')
"""
def jspsych_context(exp_instance):
deploy_static_fs = exp_instance.deploy_static()
deploy_static_url = deploy_static_fs.replace(
settings.DEPLOYMENT_DIR, settings.STATIC_DEPLOYMENT_URL
)
location = exp_instance.experiment_repo_id.location
exp_fs_path = Path(deploy_static_fs, Path(location).stem)
exp_url_path = Path(deploy_static_url, Path(location).stem)
# default js/css location for poldracklab style experiments
static_url_path = Path(settings.STATIC_NON_REPO_URL, "default")
return generate_experiment_context(
exp_fs_path, static_url_path, exp_url_path
)
class Preview(View):
def get(self, request, *args, **kwargs):
exp_id = self.kwargs.get("exp_id")
experiment = get_object_or_404(models.ExperimentRepo, id=exp_id)
commit = experiment.get_latest_commit()
exp_instance, created = models.ExperimentInstance.objects.get_or_create(
experiment_repo_id=experiment, commit=commit
)
# Could embed commit or instance id in kwargs, default to latest for now
# default template for poldracklab style experiments
template = "experiments/jspsych_deploy.html"
context = jspsych_context(exp_instance)
return render(request, template, context)
class Serve(TemplateView):
subjectg = None
battery = None
experiment = None
assignment = None
def get_template_names(self):
"""
Return a list of template names to be used for the request. Must return
a list. May not be called if render_to_response() is overridden.
"""
return ["experiments/jspsych_deploy.html"]
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
return context
def set_objects(self):
subject_id = self.kwargs.get("subject_id")
battery_id = self.kwargs.get("battery_id")
""" we might accept the uuid assocaited with the subject instead of its id """
if subject_id is not None:
self.subject = get_object_or_404(
models.Subject, id=subject_id
)
else:
self.subject = None
self.battery = get_object_or_404(
models.Battery, id=battery_id
)
try:
self.assignment = models.Assignment.objects.get(
subject=self.subject, battery=self.battery
)
except ObjectDoesNotExist:
# make new assignment for testing, in future 404.
assignment = models.Assignment(
subject_id=subject_id, battery_id=battery_id,
)
assignment.save()
self.assignment = assignment
def get(self, request, *args, **kwargs):
self.set_objects()
if self.assignment.consent_accepted is not True:
# display instructions and consent
pass
self.experiment = self.assignment.get_next_experiment()
exp_context = jspsych_context(self.experiment)
exp_context["post_url"] = reverse_lazy("experiments:push-results", args=[self.assignment.id, self.experiment.id])
exp_context["next_page"] = reverse_lazy("serve-battery", args=[self.subject.id, self.battery.id])
context = {**self.get_context_data(), **exp_context}
return self.render_to_response(context)
def post(self, request, *args, **kwargs):
return
class Results(View):
# If more frameworks are added this would dispatch to their respective
# versions of this function.
# expfactory-docker purges keys and process survey data at this step
def process_exp_data(self, post_data, assignment):
data = json.loads(post_data)
finished = data.get("status") == "finished"
if finished:
assignment.status = "completed"
elif assignment.status == "not-started":
assignment.status = "started"
return data, finished
def post(self, request, *args, **kwargs):
assignment_id = self.kwargs.get("assignment_id")
experiment_id = self.kwargs.get("experiment_id")
exp_instance = get_object_or_404(models.ExperimentInstance, id=experiment_id)
assignment = get_object_or_404(models.Assignment, id=assignment_id)
batt_exp = get_object_or_404(models.BatteryExperiments, battery=assignment.battery, experiment_instance=exp_instance)
data, finished = self.process_exp_data(request.body, assignment)
if finished:
models.Result(assignment=assignment, battery_experiment=batt_exp, subject=assignment.subject, data=data, status="completed").save()
assignment.save()
return HttpResponse('recieved')
class SubjectList(ListView):
model = models.Subject
queryset = models.Subject.objects.prefetch_related("assignment_set")
class CreateSubjects(FormView):
template_name = 'experiments/create_subjects.html'
form_class = forms.SubjectCount
success_url = reverse_lazy('experiments:subject-list')
def form_valid(self, form):
new_subjects = [models.Subject() for i in range(form.cleaned_data['count'])]
models.Subject.objects.bulk_create(new_subjects)
return super().form_valid(form)
| StarcoderdataPython |
1623353 | <reponame>lars-frogner/bifrost-rust
#!/usr/bin/env python
import os
import sys
import re
import pathlib
import logging
import shutil
import csv
import warnings
import numpy as np
from tqdm import tqdm
from ruamel.yaml import YAML
from joblib import Parallel, delayed
from matplotlib.offsetbox import AnchoredText
try:
import backstaff.units as units
import backstaff.running as running
import backstaff.fields as fields
import backstaff.helita_utils as helita_utils
except ModuleNotFoundError:
import units
import running
import fields
import helita_utils
def update_dict_nested(d, u):
for k, v in u.items():
if isinstance(v, dict):
d[k] = update_dict_nested(d.get(k, {}), v)
else:
d[k] = v
return d
def abort(logger, *args, **kwargs):
logger.critical(*args, **kwargs)
sys.exit(1)
def bifrost_data_has_variable(bifrost_data, variable_name, try_fetch=False):
if hasattr(bifrost_data, variable_name) or variable_name in bifrost_data.compvars:
return True
if try_fetch:
try:
bifrost_data.get_var(variable_name)
return True
except:
return False
else:
return False
def bifrost_data_has_variables(bifrost_data, *variable_names, **kwargs):
result = True
for variable_name in variable_names:
if not bifrost_data_has_variable(bifrost_data, variable_name, **kwargs):
result = False
return result
class Quantity:
@staticmethod
def for_reduction(name, unit_scale=1):
return Quantity(name, unit_scale, None, None)
def __init__(self, name, unit_scale, description, cmap_name):
self.name = name
self.unit_scale = (-1 if (len(name) == 2 and name[-1] == "z") else 1) * float(
unit_scale
) # Account for flipped z-axis
self.description = description
self.cmap_name = cmap_name
def get_plot_kwargs(self):
return dict(clabel=self.description, cmap_name=self.cmap_name)
@property
def tag(self):
return self.name
@property
def dependency_name(self):
return self.name
@property
def dependency_type(self):
return "derived"
def set_name(self, name):
self.name = name
def is_available(self, bifrost_data):
return bifrost_data_has_variable(bifrost_data, self.name, try_fetch=True)
@classmethod
def parse_file(cls, file_path, logger=logging):
def decomment(csvfile):
for row in csvfile:
raw = row.split("#")[0].strip()
if raw:
yield raw
logger.debug(f"Parsing {file_path}")
file_path = pathlib.Path(file_path)
quantities = {}
with open(file_path, newline="") as f:
reader = csv.reader(decomment(f))
for row in reader:
if len(row) == 3:
row.append("")
if len(row) != 4:
abort(
logger,
f'Invalid number of entries in CSV line in {file_path}: {", ".join(row)}',
)
name, unit, description, cmap_name = row
name = name.strip()
unit = unit.strip()
if len(unit) > 0 and unit[0] == "-":
unit_sign = -1
unit = unit[1:].strip()
else:
unit_sign = 1
try:
unit = float(unit)
except ValueError:
try:
unit = getattr(units, unit)
except AttributeError:
if len(unit) != 0:
logger.warning(
f"Unit {unit} for quantity {name} in {file_path} not recognized, using 1.0"
)
unit = 1.0
unit *= unit_sign
description = description.strip()
cmap_name = cmap_name.strip()
if len(cmap_name) == 0:
cmap_name = "viridis"
logger.debug(
f"Using quantity info: {name}, {unit:g}, {description}, {cmap_name}"
)
quantities[name] = cls(name, unit, description, cmap_name)
return quantities
class SynthesizedQuantity(Quantity):
@staticmethod
def from_quantity(quantity, line_name, axis_name, logger=logging):
try:
SynthesizedQuantity.get_central_wavelength(line_name)
except:
abort(
logger,
f"Invalid format for spectral line name {line_name} , must be <element>_<ionization stage>_<central wavelength in Å> (e.g. si_4_1393.755)",
)
subtypes = [LineIntensityQuantity, LineShiftQuantity, LineVarianceQuantity]
constructor = None
for subtype in subtypes:
if quantity.name in subtype.supported_quantity_names:
constructor = subtype
break
if constructor is None:
all_names = sum(
[subtype.supported_quantity_names for subtype in subtypes], []
)
abort(
logger,
f'Invalid name {quantity.name} for spectral line quantity (valid quantities are {",".join(all_names)})',
)
return constructor(
quantity.name,
line_name,
axis_name,
quantity.unit_scale,
quantity.description,
quantity.cmap_name,
)
@staticmethod
def is_synthesized_quantity(quantity):
subtypes = [LineIntensityQuantity, LineShiftQuantity, LineVarianceQuantity]
all_names = sum([subtype.supported_quantity_names for subtype in subtypes], [])
return quantity.name in all_names
@staticmethod
def get_central_wavelength(line_name):
return float(line_name.split("_")[-1]) * 1e-8 # [cm]
@staticmethod
def get_command_args(quantities):
if len(quantities) == 0:
return []
line_names = list(set((quantity.line_name for quantity in quantities)))
quantity_names = list(set((quantity.quantity_name for quantity in quantities)))
return [
"synthesize",
f'--spectral-lines={",".join(line_names)}',
f'--quantities={",".join(quantity_names)}',
"-v",
"--ignore-warnings",
]
def __init__(self, quantity_name, line_name, axis_name, *args, **kwargs):
super().__init__(f"{quantity_name}_{line_name}", *args, **kwargs)
self.quantity_name = quantity_name
self.line_name = line_name
self.axis_name = axis_name
self.central_wavelength = self.get_central_wavelength(line_name)
self.emis_name = f"emis_{self.line_name}"
self.shift_name = f"shift{self.axis_name}_{self.line_name}"
self.emis_shift_name = f"emis_{self.shift_name}"
self.vartg_name = f"vartg_{self.line_name}"
self.vartgshift2_name = f"vartgshift2{self.axis_name}_{self.shift_name}"
self.emis_vartgshift2_name = f"emis_{self.vartgshift2_name}"
@property
def dependency_name(self):
raise NotImplementedError()
@property
def dependency_type(self):
return "synthesized"
def is_available(self, bifrost_data):
raise NotImplementedError()
def process_base_quantity(self, field):
return field if self.unit_scale == 1 else field * self.unit_scale
class LineIntensityQuantity(SynthesizedQuantity):
base_quantity_name = "intens"
supported_quantity_names = [base_quantity_name]
@property
def dependency_name(self):
return self.emis_name
def is_available(self, bifrost_data):
return bifrost_data_has_variable(bifrost_data, self.emis_name)
def process_base_quantity(self, field):
return super().process_base_quantity(field)
class LineShiftQuantity(SynthesizedQuantity):
base_quantity_name = "profshift"
supported_quantity_names = [base_quantity_name, "dopvel"]
@property
def dependency_name(self):
return self.emis_shift_name
def corresponding_intensity_quantity(self):
return LineIntensityQuantity(
LineIntensityQuantity.base_quantity_name,
self.line_name,
self.axis_name,
1,
None,
None,
)
def is_available(self, bifrost_data):
return bifrost_data_has_variable(
bifrost_data, self.emis_shift_name
) or bifrost_data_has_variables(bifrost_data, self.emis_name, self.shift_name)
def process_base_quantity(self, field, axis_name=None):
if self.quantity_name == "dopvel":
field = field.with_values(
(-1 if axis_name == "z" else 1) # Account for flipped z-axis
* units.CLIGHT
* field.get_values()
/ self.central_wavelength
) # [cm/s]
return super().process_base_quantity(field)
class LineVarianceQuantity(SynthesizedQuantity):
base_quantity_name = "profvar"
supported_quantity_names = [base_quantity_name, "profwidth", "widthvel"]
@property
def dependency_name(self):
return self.emis_vartgshift2_name
def corresponding_intensity_quantity(self):
return LineIntensityQuantity(
LineIntensityQuantity.base_quantity_name,
self.line_name,
self.axis_name,
1,
None,
None,
)
def corresponding_shift_quantity(self):
return LineShiftQuantity(
LineShiftQuantity.base_quantity_name,
self.line_name,
self.axis_name,
1,
None,
None,
)
def is_available(self, bifrost_data):
return (
bifrost_data_has_variable(bifrost_data, self.emis_vartgshift2_name)
or bifrost_data_has_variables(
bifrost_data, self.emis_name, self.vartgshift2_name
)
or bifrost_data_has_variables(
bifrost_data, self.emis_name, self.vartg_name, self.shift_name
)
)
def process_base_quantity(self, field):
if self.quantity_name in ("profwidth", "widthvel"):
field = field.with_values(
(2 * np.sqrt(2 * np.log(2))) * np.sqrt(field.get_values())
) # [cm]
if self.quantity_name == "widthvel":
field = field.with_values(
units.CLIGHT * field.get_values() / self.central_wavelength
) # [cm/s]
return super().process_base_quantity(field)
class Reduction:
AXIS_NAMES = ["x", "y", "z"]
def __init__(self, axis):
self.axis = int(axis)
@property
def axis_name(self):
return self.AXIS_NAMES[self.axis]
@property
def yields_multiple_fields(self):
return False
@staticmethod
def parse(reduction_config, logger=logging):
if isinstance(reduction_config, str):
reduction_config = {reduction_config: {}}
if not isinstance(reduction_config, dict):
abort(logger, f"Reduction entry must be dict, is {type(reduction_config)}")
classes = dict(
scan=Scan,
sum=Sum,
mean=Mean,
slice=Slice,
integral=Integral,
synthesis=Synthesis,
)
reductions = None
for name, cls in classes.items():
if name in reduction_config:
logger.debug(f"Found reduction {name}")
reduction_config = dict(reduction_config[name])
axes = reduction_config.pop("axes", "x")
if not isinstance(axes, list):
axes = [axes]
reductions = [
cls(axis=Reduction.AXIS_NAMES.index(axis_name), **reduction_config)
for axis_name in axes
]
if reductions is None:
abort(logger, "Missing valid reduction entry")
return reductions
def get_plot_kwargs(self, field):
plot_axis_names = list(self.AXIS_NAMES)
plot_axis_names.pop(self.axis)
return dict(
xlabel=f"${plot_axis_names[0]}$ [Mm]", ylabel=f"${plot_axis_names[1]}$ [Mm]"
)
def _get_axis_size(self, bifrost_data):
return getattr(bifrost_data, self.axis_name).size
def _parse_slice_coord_or_idx_to_idx(self, bifrost_data, coord_or_idx):
return max(
0,
min(
self._get_axis_size(bifrost_data) - 1,
self.__class__._parse_float_or_int_input_to_int(
coord_or_idx,
lambda coord: fields.ScalarField2.slice_coord_to_idx(
bifrost_data, self.axis, coord
),
),
),
)
def _parse_distance_or_stride_to_stride(self, bifrost_data, distance_or_stride):
return max(
1,
self.__class__._parse_float_or_int_input_to_int(
distance_or_stride,
lambda distance: int(
distance / getattr(bifrost_data, f"d{self.axis_name}")
),
),
)
def _parse_coord_or_idx_range_to_slice(self, bifrost_data, coord_or_idx_range):
assert (
isinstance(coord_or_idx_range, (tuple, list))
and len(coord_or_idx_range) == 2
)
coord_or_idx_range_copy = list(coord_or_idx_range)
for i in range(2):
if isinstance(coord_or_idx_range[i], int):
coord_or_idx_range_copy[i] = None
if self.axis == 2:
coords = helita_utils.inverted_zdn(bifrost_data)
else:
coords = [bifrost_data.xdn, bifrost_data.ydn][self.axis]
coord_slice = helita_utils.inclusive_coord_slice(
coords, coord_or_idx_range_copy
)
coord_slice = [coord_slice.start, coord_slice.stop]
if isinstance(coord_or_idx_range[0], int):
coord_slice[0] = coord_or_idx_range[0]
if isinstance(coord_or_idx_range[1], int):
coord_slice[1] = coord_or_idx_range[1] + 1
return slice(*coord_slice)
@staticmethod
def _parse_float_or_int_input_to_int(float_or_int, converter):
is_float = False
if isinstance(float_or_int, str) and len(float_or_int) > 0:
if float_or_int[0] == "i":
float_or_int = int(float_or_int[1:])
is_float = False
elif float_or_int[0] == "c":
float_or_int = float(float_or_int[1:])
is_float = True
else:
float_or_int = float(float_or_int)
is_float = True
elif isinstance(float_or_int, int):
float_or_int = float_or_int
is_float = False
else:
float_or_int = float(float_or_int)
is_float = True
if is_float:
result = converter(float_or_int)
else:
result = float_or_int
return int(result)
class Scan(Reduction):
def __init__(self, axis=0, start=None, end=None, step="i1"):
super().__init__(axis)
self.start = start
self.end = end
self.step = step
@property
def tag(self):
return f'scan_{self.axis_name}_{"" if self.start is None else self.start}:{"" if self.end is None else self.end}{"" if self.step == "i1" else f":{self.step}"}'
@property
def yields_multiple_fields(self):
return True
def __call__(self, bifrost_data, quantity):
start_idx = (
0
if self.start is None
else self._parse_slice_coord_or_idx_to_idx(bifrost_data, self.start)
)
end_idx = (
(self._get_axis_size(bifrost_data) - 1)
if self.end is None
else self._parse_slice_coord_or_idx_to_idx(bifrost_data, self.end)
)
stride = self._parse_distance_or_stride_to_stride(bifrost_data, self.step)
return ScanSlices(self, bifrost_data, quantity, start_idx, end_idx, stride)
def get_slice_label(self, bifrost_data, slice_idx):
coords = getattr(bifrost_data, self.axis_name)
if self.axis == 2:
coords = coords[::-1]
return f"${self.axis_name} = {coords[slice_idx]:.2f}$ Mm"
class ScanSlices:
def __init__(self, scan, bifrost_data, quantity, start_idx, end_idx, stride):
self._scan = scan
self._bifrost_data = bifrost_data
self._quantity = quantity
self._start_idx = start_idx
self._end_idx = end_idx
self._stride = stride
def get_ids(self):
return list(range(self._start_idx, self._end_idx + 1, self._stride))
def __call__(self, slice_idx):
return ScanSlice(
fields.ScalarField2.slice_from_bifrost_data(
self._bifrost_data,
self._quantity.name,
slice_axis=self._scan.axis,
slice_idx=slice_idx,
scale=self._quantity.unit_scale,
),
self._scan.get_slice_label(self._bifrost_data, slice_idx),
)
class ScanSlice:
def __init__(self, field, label):
self._field = field
self._label = label
@property
def field(self):
return self._field
@property
def label(self):
return self._label
class Sum(Reduction):
@property
def tag(self):
return f"sum_{self.axis_name}"
def __call__(self, bifrost_data, quantity):
return fields.ScalarField2.accumulated_from_bifrost_data(
bifrost_data,
quantity.name,
accum_axis=self.axis,
accum_operator=np.nansum,
scale=quantity.unit_scale,
)
class Mean(Reduction):
def __init__(self, axis=0, ignore_val=None):
super().__init__(axis)
self.ignore_val = ignore_val
@property
def tag(self):
return f"mean_{self.axis_name}"
def __call__(self, bifrost_data, quantity):
def value_processor(field):
if self.ignore_val is None:
return field
else:
field = field.copy()
ignore_val = self.ignore_val
if not isinstance(self.ignore_val, list):
ignore_val = [ignore_val]
for val in ignore_val:
field[field == val] = np.nan
return field
def mean(*args, **kwargs):
with warnings.catch_warnings(): # Suppress "mean of empty slice" warning
warnings.simplefilter("ignore", category=RuntimeWarning)
result = np.nanmean(*args, **kwargs)
return result
return fields.ScalarField2.accumulated_from_bifrost_data(
bifrost_data,
quantity.name,
accum_axis=self.axis,
value_processor=value_processor,
accum_operator=mean,
scale=quantity.unit_scale,
)
class Integral(Reduction):
def __init__(self, axis=0, start=None, end=None):
super().__init__(axis)
self.start = start
self.end = end
@property
def tag(self):
return f'integral_{self.axis_name}{"" if self.start is None else self.start}:{"" if self.end is None else self.end}'
def __call__(
self, bifrost_data, quantity, *extra_quantity_names, combine_fields=lambda x: x
):
coord_slice = self._parse_coord_or_idx_range_to_slice(
bifrost_data, (self.start, self.end)
)
if self.axis == 0:
def value_processor(*fields):
return combine_fields(*[field[coord_slice, :, :] for field in fields])
dx = bifrost_data.dx * units.U_L
accum_operator = lambda field, axis=None: np.sum(field, axis=axis) * dx
elif self.axis == 1:
def value_processor(*fields):
return combine_fields(*[field[:, coord_slice, :] for field in fields])
dy = bifrost_data.dy * units.U_L
accum_operator = lambda field, axis=None: np.sum(field, axis=axis) * dy
elif self.axis == 2:
dz = fields.ScalarField1.dz_in_bifrost_data(
bifrost_data, scale=units.U_L
).get_values()[np.newaxis, np.newaxis, coord_slice]
def value_processor(*fields):
return (
combine_fields(*[field[:, :, coord_slice] for field in fields]) * dz
)
accum_operator = np.sum
return fields.ScalarField2.accumulated_from_bifrost_data(
bifrost_data,
[quantity.name, *extra_quantity_names],
accum_axis=self.axis,
value_processor=value_processor,
accum_operator=accum_operator,
scale=quantity.unit_scale,
)
class Synthesis(Integral):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.cache = {}
@property
def tag(self):
return f'syn_{self.axis_name}{"" if self.start is None else self.start}:{"" if self.end is None else self.end}'
def __call__(self, bifrost_data, quantity):
if isinstance(quantity, LineIntensityQuantity):
return self._compute_intensity(bifrost_data, quantity)
elif isinstance(quantity, LineShiftQuantity):
return self._compute_shift(bifrost_data, quantity)
elif isinstance(quantity, LineVarianceQuantity):
return self._compute_variance(bifrost_data, quantity)
else:
raise ValueError(f"Invalid quantity {quantity.name} for synthesis")
@staticmethod
def _create_cache_entry(bifrost_data, quantity):
return (
bifrost_data.root_name,
bifrost_data.snap,
quantity.base_quantity_name,
quantity.line_name,
)
def _compute_intensity(self, bifrost_data, quantity, processed=True):
cache_entry = self.__class__._create_cache_entry(bifrost_data, quantity)
if cache_entry in self.cache:
return (
quantity.process_base_quantity(self.cache[cache_entry])
if processed
else self.cache[cache_entry]
)
if not bifrost_data_has_variable(bifrost_data, quantity.emis_name):
raise IOError(
f"Missing quantity {quantity.emis_name} for computing spectral line intensity"
)
intensity = super().__call__(
bifrost_data, Quantity.for_reduction(quantity.emis_name)
)
self.cache[cache_entry] = intensity
return quantity.process_base_quantity(intensity) if processed else intensity
def _compute_shift(self, bifrost_data, quantity, processed=True):
cache_entry = self.__class__._create_cache_entry(bifrost_data, quantity)
if cache_entry in self.cache:
return (
quantity.process_base_quantity(
self.cache[cache_entry], axis_name=self.axis_name
)
if processed
else self.cache[cache_entry]
)
intensity = self._compute_intensity(
bifrost_data, quantity.corresponding_intensity_quantity(), processed=False
)
if bifrost_data_has_variable(bifrost_data, quantity.emis_shift_name):
weighted_shift = super().__call__(
bifrost_data, Quantity.for_reduction(quantity.emis_shift_name)
)
elif bifrost_data_has_variables(
bifrost_data, quantity.emis_name, quantity.shift_name
):
weighted_shift = super().__call__(
bifrost_data,
Quantity.for_reduction(quantity.shift_name),
quantity.emis_name,
combine_fields=lambda shift, emis: emis * shift,
)
else:
raise IOError(
f"Missing quantities {quantity.emis_shift_name} or {quantity.emis_name} and {quantity.shift_name} for computing spectral line shift"
)
shift = weighted_shift / intensity
self.cache[cache_entry] = shift
return (
quantity.process_base_quantity(shift, axis_name=self.axis_name)
if processed
else shift
)
def _compute_variance(self, bifrost_data, quantity, processed=True):
cache_entry = self.__class__._create_cache_entry(bifrost_data, quantity)
if cache_entry in self.cache:
return (
quantity.process_base_quantity(self.cache[cache_entry])
if processed
else self.cache[cache_entry]
)
intensity = self._compute_intensity(
bifrost_data, quantity.corresponding_intensity_quantity(), processed=False
)
shift = self._compute_shift(
bifrost_data, quantity.corresponding_shift_quantity(), processed=False
)
if bifrost_data_has_variable(bifrost_data, quantity.emis_vartgshift2_name):
weighted_variance = super().__call__(
bifrost_data, Quantity.for_reduction(quantity.emis_vartgshift2_name)
)
elif bifrost_data_has_variables(
bifrost_data, quantity.emis_name, quantity.vartgshift2_name
):
weighted_variance = super().__call__(
bifrost_data,
Quantity.for_reduction(quantity.vartgshift2_name),
quantity.emis_name,
combine_fields=lambda vartgshift2, emis: emis * vartgshift2,
)
elif bifrost_data_has_variables(
bifrost_data, quantity.vartg_name, quantity.emis_name, quantity.shift_name
):
weighted_variance = super().__call__(
bifrost_data,
Quantity.for_reduction(quantity.vartg_name),
quantity.emis_name,
quantity.shift_name,
combine_fields=lambda vartg, emis, shift: emis * (vartg + shift**2),
)
else:
raise IOError(
f"Missing quantities {quantity.emis_vartgshift2_name} or {quantity.emis_name} and {quantity.vartgshift2_name} or {quantity.emis_name}, {quantity.vartg_name} and {quantity.shift_name} for computing spectral line variance"
)
variance = weighted_variance / intensity - shift * shift
self.cache[cache_entry] = variance
return quantity.process_base_quantity(variance) if processed else variance
class Slice(Reduction):
def __init__(self, axis=0, pos="i0"):
super().__init__(axis)
self.pos = pos
@property
def tag(self):
return f"slice_{self.AXIS_NAMES[self.axis]}_{self.pos}"
def __call__(self, bifrost_data, quantity):
slice_idx = self._parse_slice_coord_or_idx_to_idx(bifrost_data, self.pos)
return fields.ScalarField2.slice_from_bifrost_data(
bifrost_data,
quantity.name,
slice_axis=self.axis,
slice_idx=slice_idx,
scale=quantity.unit_scale,
)
class Scaling:
def __init__(self, vmin=None, vmax=None) -> None:
self.vmin = vmin
self.vmax = vmax
@staticmethod
def parse(scaling_config, logger=logging):
classes = dict(linear=LinearScaling, log=LogScaling, symlog=SymlogScaling)
if isinstance(scaling_config, dict):
scaling = None
for name, cls in classes.items():
if name in scaling_config:
logger.debug(f"Found scaling {name}")
scaling = cls(**scaling_config[name])
if scaling is None:
abort(logger, "Missing reduction entry")
elif isinstance(scaling_config, str):
logger.debug(f"Found scaling type {scaling_config}")
scaling = classes[scaling_config]()
else:
abort(
logger, f"scaling entry must be dict or str, is {type(scaling_config)}"
)
return scaling
class SymlogScaling(Scaling):
def __init__(self, linthresh=None, vmax=None, linthresh_quantile=0.2) -> None:
self.linthresh = linthresh
self.vmax = vmax
self.linthresh_quantile = float(linthresh_quantile)
@property
def tag(self):
return "symlog"
def get_plot_kwargs(self, field):
linthresh = self.linthresh
vmax = self.vmax
if linthresh is None or vmax is None:
values = np.abs(field.get_values())
if linthresh is None:
linthresh = np.quantile(values[values > 0], self.linthresh_quantile)
if vmax is None:
vmax = values.max()
return dict(symlog=True, vmin=-vmax, vmax=vmax, linthresh=linthresh)
class LinearScaling(Scaling):
@property
def tag(self):
return "linear"
def get_plot_kwargs(self, *args):
return dict(log=False, vmin=self.vmin, vmax=self.vmax)
class LogScaling(LinearScaling):
@property
def tag(self):
return "log"
def get_plot_kwargs(self, *args):
plot_kwargs = super().get_plot_kwargs(*args)
plot_kwargs["log"] = True
return plot_kwargs
class PlotDescription:
def __init__(self, quantity, reduction, scaling, name=None, **extra_plot_kwargs):
self.quantity = quantity
self.reduction = reduction
self.scaling = scaling
self.name = name
self.extra_plot_kwargs = extra_plot_kwargs
@classmethod
def parse(cls, quantities, plot_config, allow_reference=True, logger=logging):
try:
plot_config = dict(plot_config)
except ValueError:
if allow_reference and isinstance(plot_config, str):
name = plot_config
return [name]
else:
abort(
logger,
f'plots list entry must be dict{", or str referring to plot" if allow_reference else ""}, is {type(plot_config)}',
)
return cls._parse_dict(quantities, plot_config, logger=logger)
@classmethod
def _parse_dict(cls, quantities, plot_config, logger=logging):
name = plot_config.pop("name", None)
if "quantity" not in plot_config:
abort(logger, f"Missing entry quantity")
quantity = plot_config.pop("quantity")
if not isinstance(quantity, str):
abort(logger, f"quantity entry must be str, is {type(quantity)}")
logger.debug(f"Found quantity {quantity}")
if quantity not in quantities:
abort(logger, f"Quantity {quantity} not present in quantity file")
quantity = quantities[quantity]
if "reduction" not in plot_config:
abort(logger, f"Missing reduction entry")
reductions = Reduction.parse(plot_config.pop("reduction"), logger=logger)
reduction_is_synthesis = isinstance(reductions[0], Synthesis)
spectral_line_name = plot_config.pop("spectral_line", None)
quantity_is_synthesized = SynthesizedQuantity.is_synthesized_quantity(quantity)
if quantity_is_synthesized:
if spectral_line_name is None:
abort(
logger, f"Quantity {quantity.name} requires a spectral_line entry"
)
if not reduction_is_synthesis:
abort(
logger,
f"Quantity {quantity.name} requires reduction to be synthesis",
)
else:
if reduction_is_synthesis:
abort(
logger,
f"Quantity {quantity.name} not compatible with synthesis reduction",
)
if spectral_line_name is not None:
if reduction_is_synthesis:
quantity = SynthesizedQuantity.from_quantity(
quantity, spectral_line_name, reductions[0].axis_name, logger=logger
)
else:
quantity.set_name(f"{quantity.name}_{spectral_line_name}")
if "scaling" not in plot_config:
abort(logger, "Missing scaling entry")
scaling = Scaling.parse(plot_config.pop("scaling"), logger=logger)
return [
cls(quantity, reduction, scaling, name=name, **plot_config)
for reduction in reductions
]
@property
def tag(self):
return f"{self.scaling.tag}_{self.quantity.tag}_{self.reduction.tag}"
@property
def has_multiple_fields(self):
return self.reduction.yields_multiple_fields
def get_plot_kwargs(self, field):
return update_dict_nested(
update_dict_nested(
update_dict_nested(
self.quantity.get_plot_kwargs(),
self.reduction.get_plot_kwargs(field),
),
self.scaling.get_plot_kwargs(field),
),
self.extra_plot_kwargs,
)
def get_field(self, bifrost_data):
return self.reduction(bifrost_data, self.quantity)
class VideoDescription:
def __init__(self, fps=15):
self.fps = fps
@classmethod
def parse(cls, video_config, logger=logging):
if isinstance(video_config, dict):
return cls(**video_config)
elif video_config:
return cls()
else:
return None
@property
def config(self):
return dict(fps=self.fps)
class SimulationRun:
def __init__(
self,
name,
data_dir,
start_snap_num=None,
end_snap_num=None,
video_description=None,
logger=logging,
):
self._name = name
self._data_dir = data_dir
self._start_snap_num = start_snap_num
self._end_snap_num = end_snap_num
self._video_description = video_description
self._logger = logger
self._snap_nums = self._find_snap_nums()
@classmethod
def parse(cls, simulation_run_config, logger=logging):
simulation_name = simulation_run_config.get(
"name", simulation_run_config.get("simulation_name", None)
)
if simulation_name is None:
abort(logger, f"Missing simulation_name entry")
if not isinstance(simulation_name, str):
abort(
logger, f"simulation_name entry must be str, is {type(simulation_name)}"
)
simulation_name = simulation_name.strip()
logger.debug(f"Using simulation_name {simulation_name}")
simulation_dir = simulation_run_config.get(
"dir", simulation_run_config.get("simulation_dir", None)
)
if simulation_dir is None:
abort(logger, f"Missing entry simulation_dir")
if simulation_dir is None:
abort(logger, "Missing simulation_dir entry")
if not isinstance(simulation_dir, str):
abort(
logger, f"simulation_dir entry must be str, is {type(simulation_dir)}"
)
simulation_dir = pathlib.Path(simulation_dir)
if not simulation_dir.is_absolute():
abort(
logger,
f"simulation_dir entry {simulation_dir} must be an absolute path",
)
if not simulation_dir.is_dir():
abort(logger, f"Could not find simulation_dir directory {simulation_dir}")
simulation_dir = simulation_dir.resolve()
logger.debug(f"Using simulation_dir {simulation_dir}")
start_snap_num = None
end_snap_num = None
snap_nums = simulation_run_config.get("snap_nums", None)
if isinstance(snap_nums, str):
parts = snap_nums.split(":")
if len(parts) < 2:
start_snap_num = int(parts[0])
end_snap_num = start_snap_num
elif len(parts) == 2:
start_snap_num = None if len(parts[0].strip()) == 0 else int(parts[0])
end_snap_num = None if len(parts[1].strip()) == 0 else int(parts[1])
else:
abort(logger, f"Invalid format for snap_nums: {snap_nums}")
elif snap_nums is not None:
start_snap_num = int(snap_nums)
end_snap_num = start_snap_num
logger.debug(f"Using start_snap_num {start_snap_num}")
logger.debug(f"Using end_snap_num {end_snap_num}")
video_description = simulation_run_config.get("video", None)
if video_description is not None:
video_description = VideoDescription.parse(video_description, logger=logger)
return cls(
simulation_name,
simulation_dir,
start_snap_num=start_snap_num,
end_snap_num=end_snap_num,
video_description=video_description,
logger=logger,
)
@property
def logger(self):
return self._logger
@property
def name(self):
return self._name
@property
def data_dir(self):
return self._data_dir
@property
def data_available(self):
return len(self._snap_nums) > 0
@property
def snap_nums(self):
return list(self._snap_nums)
@property
def video_config(self):
return (
None if self._video_description is None else self._video_description.config
)
def set_logger(self, logger):
self._logger = logger
def ensure_data_is_ready(self, prepared_data_dir, plot_descriptions):
self.logger.info(f"Preparing data for {self.name}")
assert self.data_available
plot_data_locations = {}
bifrost_data = self.get_bifrost_data(self._snap_nums[0])
(
_,
unavailable_quantities,
available_plots,
unavailable_plots,
) = self._check_quantity_availability(bifrost_data, plot_descriptions)
if len(available_plots) > 0:
plot_data_locations[self.data_dir] = available_plots
if len(unavailable_plots) == 0:
return plot_data_locations
if prepared_data_dir.is_dir():
try:
prepared_bifrost_data = self.get_bifrost_data(
self._snap_nums[0], other_data_dir=prepared_data_dir
)
except:
prepared_bifrost_data = None
if prepared_bifrost_data is not None:
(
available_quantities_prepared,
unavailable_quantities_prepared,
available_plots_prepared,
_,
) = self._check_quantity_availability(
prepared_bifrost_data, unavailable_plots
)
if len(unavailable_quantities_prepared) == 0:
if len(available_plots_prepared) > 0:
plot_data_locations[
prepared_data_dir
] = available_plots_prepared
prepared_snap_nums = self._find_snap_nums(
other_data_dir=prepared_data_dir
)
missing_snap_nums = [
snap_num
for snap_num in self._snap_nums
if snap_num not in prepared_snap_nums
]
if len(missing_snap_nums) > 0:
prepared_bifrost_data = None
self._prepare_derived_data(
prepared_data_dir,
available_quantities_prepared,
snap_nums=missing_snap_nums,
)
return plot_data_locations
prepared_bifrost_data = None
self._prepare_derived_data(prepared_data_dir, unavailable_quantities)
prepared_bifrost_data = self.get_bifrost_data(
self._snap_nums[0], other_data_dir=prepared_data_dir
)
(
_,
unavailable_quantities_prepared,
available_plots_prepared,
_,
) = self._check_quantity_availability(prepared_bifrost_data, unavailable_plots)
if len(available_plots_prepared) > 0:
plot_data_locations[prepared_data_dir] = available_plots_prepared
for quantity in unavailable_quantities_prepared:
self.logger.error(
f"Could not obtain quantity {quantity.name} for simulation {self.name}, skipping"
)
return plot_data_locations
def get_bifrost_data(self, snap_num, other_data_dir=None):
fdir = self.data_dir if other_data_dir is None else other_data_dir
self.logger.debug(f"Reading snap {snap_num} of {self.name} in {fdir}")
assert snap_num in self._snap_nums
return helita_utils.CachingBifrostData(
self.name, fdir=fdir, snap=snap_num, verbose=False
)
def _find_snap_nums(self, other_data_dir=None):
input_dir = self.data_dir if other_data_dir is None else other_data_dir
snap_nums = self._find_all_snap_nums(input_dir)
if self._start_snap_num is not None:
snap_nums = [n for n in snap_nums if n >= self._start_snap_num]
if self._end_snap_num is not None:
snap_nums = [n for n in snap_nums if n <= self._end_snap_num]
self.logger.debug(
f'Found snaps {", ".join(map(str, snap_nums))} in {input_dir}'
)
return snap_nums
def _find_all_snap_nums(self, input_dir):
p = re.compile("{}_(\d\d\d)\.idl$".format(self.name))
snap_nums = []
for name in os.listdir(input_dir):
match = p.match(name)
if match:
snap_nums.append(int(match.group(1)))
return sorted(snap_nums)
def _check_quantity_availability(self, bifrost_data, plot_descriptions):
available_quantities = []
unavailable_quantities = []
available_plots = []
unavailable_plots = []
for plot_description in plot_descriptions:
quantity = plot_description.quantity
if quantity in available_quantities:
available_plots.append(plot_description)
elif quantity in unavailable_quantities:
unavailable_plots.append(plot_description)
else:
if plot_description.quantity.is_available(bifrost_data):
self.logger.debug(
f"Quantity {quantity.name} available for {bifrost_data.file_root}"
)
available_quantities.append(quantity)
available_plots.append(plot_description)
else:
self.logger.debug(
f"Quantity {quantity.name} not available for {bifrost_data.file_root}"
)
unavailable_quantities.append(quantity)
unavailable_plots.append(plot_description)
return (
available_quantities,
unavailable_quantities,
available_plots,
unavailable_plots,
)
def _prepare_derived_data(self, prepared_data_dir, quantities, snap_nums=None):
if len(quantities) == 0:
return
os.makedirs(prepared_data_dir, exist_ok=True)
if snap_nums is None:
snap_nums = self._snap_nums
param_file_name = f"{self.name}_{snap_nums[0]}.idl"
snap_range_specification = (
[f"--snap-range={snap_nums[0]},{snap_nums[-1]}"]
if len(snap_nums) > 1
else []
)
derived_dependency_names = []
synthesized_quantities = []
for quantity in quantities:
if quantity.dependency_type == "derived":
derived_dependency_names.append(quantity.dependency_name)
elif quantity.dependency_type == "synthesized":
synthesized_quantities.append(quantity)
else:
raise ValueError(f"Invalid dependency type {quantity.dependency_type}")
synthesize_command_args = SynthesizedQuantity.get_command_args(
synthesized_quantities
)
all_dependency_names = derived_dependency_names + [
quantity.dependency_name for quantity in synthesized_quantities
]
return_code = running.run_command(
[
"backstaff",
"--protected-file-types=",
"snapshot",
"-v",
*snap_range_specification,
param_file_name,
"derive",
"-v",
"--ignore-warnings",
*synthesize_command_args,
"write",
"-v",
"--ignore-warnings",
"--overwrite",
f'--included-quantities={",".join(all_dependency_names)}',
str((prepared_data_dir / param_file_name).resolve()),
],
cwd=self.data_dir,
logger=self.logger.debug,
error_logger=self.logger.error,
)
if return_code != 0:
abort(self.logger, "Non-zero return code")
for snap_num in snap_nums:
snap_name = f"{self.name}_{snap_num:03}.snap"
snap_path = self.data_dir / snap_name
linked_snap_path = prepared_data_dir / snap_name
if (
linked_snap_path.with_suffix(".idl").is_file()
and not linked_snap_path.is_file()
):
os.symlink(snap_path, linked_snap_path)
if return_code != 0:
abort(self.logger, "Non-zero return code")
class Visualizer:
def __init__(self, simulation_run, output_dir_name="autoviz"):
self._simulation_run = simulation_run
self._logger = simulation_run.logger
self._output_dir = self._simulation_run.data_dir / output_dir_name
self._prepared_data_dir = self._output_dir / "data"
self._logger.debug(f"Using output directory {self._output_dir}")
@property
def logger(self):
return self._logger
@property
def simulation_name(self):
return self._simulation_run.name
def set_logger(self, logger):
self._simulation_run.set_logger(logger)
self._logger = logger
def clean(self):
if not self._output_dir.is_dir():
print(f"No data to clean for {self.simulation_name}")
return
print(f"The directory {self._output_dir} and all its content will be removed")
while True:
answer = input("Continue? [y/N] ").strip().lower()
if answer in ("", "n"):
print("Aborted")
break
if answer == "y":
shutil.rmtree(self._output_dir)
self.logger.debug(f"Removed {self._output_dir}")
break
def create_videos_only(self, *plot_descriptions):
video_config = self._simulation_run.video_config
if video_config is not None:
snap_nums = self._simulation_run.snap_nums
if len(snap_nums) == 0:
return
for plot_description in plot_descriptions:
frame_dir = self._output_dir / plot_description.tag
if plot_description.has_multiple_fields:
bifrost_data = self._simulation_run.get_bifrost_data(snap_nums[0])
for snap_num in snap_nums:
fields = plot_description.get_field(bifrost_data)
field_ids = fields.get_ids()
output_dir = frame_dir / f"{snap_num}"
self._create_video_from_frames(
output_dir,
field_ids,
frame_dir.with_name(f"{frame_dir.stem}_{snap_num}.mp4"),
**video_config,
)
else:
self._create_video_from_frames(
frame_dir,
snap_nums,
frame_dir.with_suffix(".mp4"),
**video_config,
)
def visualize(
self,
*plot_descriptions,
overwrite=False,
job_idx=0,
show_progress=True,
new_logger_builder=None,
):
if new_logger_builder is not None:
self.set_logger(new_logger_builder())
if not self._simulation_run.data_available:
self.logger.error(
f"No data for simulation {self.simulation_name} in {self._simulation_run.data_dir}, aborting"
)
return
def add_progress_bar(iterable, extra_desc=None):
if not show_progress:
return iterable
return tqdm(
iterable,
desc=f"{self.simulation_name} {plot_description.tag}"
+ ("" if extra_desc is None else f" {extra_desc}"),
position=job_idx,
ascii=True,
)
plot_data_locations = self._simulation_run.ensure_data_is_ready(
self._prepared_data_dir, plot_descriptions
)
plot_data_locations_inverted = {}
for data_dir, plot_descriptions in plot_data_locations.items():
for plot_description in plot_descriptions:
plot_data_locations_inverted[plot_description] = data_dir
snap_nums = self._simulation_run.snap_nums
for plot_description, data_dir in plot_data_locations_inverted.items():
frame_dir = self._output_dir / plot_description.tag
os.makedirs(frame_dir, exist_ok=True)
self.logger.info(
f"Plotting frames for {plot_description.tag} in {self.simulation_name}"
)
bifrost_data = self._simulation_run.get_bifrost_data(snap_nums[0], data_dir)
if plot_description.has_multiple_fields:
for snap_num in snap_nums:
output_dir = frame_dir / f"{snap_num}"
os.makedirs(output_dir, exist_ok=True)
bifrost_data.set_snap(snap_num)
fields = plot_description.get_field(bifrost_data)
field_ids = fields.get_ids()
for field_id in add_progress_bar(
field_ids, extra_desc=f"(snap {snap_num})"
):
output_path = output_dir / f"{field_id}.png"
if output_path.exists() and not overwrite:
self.logger.debug(f"{output_path} already exists, skipping")
continue
field_wrapper = fields(field_id)
self._plot_frame(
bifrost_data,
plot_description,
field_wrapper.field,
output_path,
label=field_wrapper.label,
)
if self._simulation_run.video_config is not None:
self._create_video_from_frames(
output_dir,
field_ids,
frame_dir.with_name(f"{frame_dir.stem}_{snap_num}.mp4"),
**self._simulation_run.video_config,
)
else:
for snap_num in add_progress_bar(snap_nums):
output_path = frame_dir / f"{snap_num}.png"
if output_path.exists() and not overwrite:
self.logger.debug(f"{output_path} already exists, skipping")
continue
bifrost_data.set_snap(snap_num)
field = plot_description.get_field(bifrost_data)
self._plot_frame(bifrost_data, plot_description, field, output_path)
if self._simulation_run.video_config is not None:
self._create_video_from_frames(
frame_dir,
snap_nums,
frame_dir.with_suffix(".mp4"),
**self._simulation_run.video_config,
)
def _plot_frame(
self, bifrost_data, plot_description, field, output_path, label=None
):
time = float(bifrost_data.params["t"]) * units.U_T
text = f"{time:.1f} s"
if label is not None:
text = f"{text}\n{label}"
field.plot(
output_path=output_path,
extra_artists=[AnchoredText(text, "upper left", frameon=False)],
**plot_description.get_plot_kwargs(field),
)
def _create_video_from_frames(self, frame_dir, frame_indices, output_path, fps=15):
self.logger.info(
f"Creating video {output_path.name} from {self.simulation_name}"
)
tempdir = frame_dir / ".ffmpeg_tmp"
if tempdir.exists():
shutil.rmtree(tempdir)
os.makedirs(tempdir)
frame_num = 0
for frame_idx in frame_indices:
frame_path = frame_dir / f"{frame_idx:d}.png"
linked_frame_path = tempdir / f"{frame_num:d}.png"
if frame_path.is_file():
os.symlink(frame_path, linked_frame_path)
frame_num += 1
frame_path_template = tempdir / "%d.png"
return_code = running.run_command(
[
"ffmpeg",
"-loglevel",
"error",
"-y",
"-r",
"{:d}".format(fps),
"-start_number",
"0",
"-i",
str(frame_path_template),
"-vf",
"pad=width=ceil(iw/2)*2:height=ceil(ih/2)*2:color=white",
"-vcodec",
"libx264",
"-pix_fmt",
"yuv420p",
str(output_path),
],
logger=self.logger.debug,
error_logger=self.logger.error,
)
shutil.rmtree(tempdir)
if return_code != 0:
self.logger.error("Could not create video, skipping")
class Visualization:
def __init__(self, visualizer, *plot_descriptions):
self._visualizer = visualizer
self._plot_descriptions = plot_descriptions
@property
def visualizer(self):
return self._visualizer
def create_videos_only(self, **kwargs):
self.visualizer.create_videos_only(*self._plot_descriptions, **kwargs)
def visualize(self, **kwargs):
self.visualizer.visualize(*self._plot_descriptions, **kwargs)
def parse_config_file(file_path, logger=logging):
logger.debug(f"Parsing {file_path}")
file_path = pathlib.Path(file_path)
if not file_path.exists():
abort(logger, f"Could not find config file {file_path}")
yaml = YAML()
with open(file_path, "r") as f:
try:
entries = yaml.load(f)
except yaml.YAMLError as e:
abort(logger, e)
if isinstance(entries, list):
entries = dict(simulations=entries)
global_quantity_path = entries.get("quantity_path", None)
if global_quantity_path is not None:
global_quantity_path = pathlib.Path(global_quantity_path)
simulations = entries.get("simulations", [])
if not isinstance(simulations, list):
simulations = [simulations]
visualizations = []
for simulation in simulations:
simulation_run = SimulationRun.parse(simulation, logger=logger)
if "quantity_file" not in simulation:
if global_quantity_path is None:
quantity_file = pathlib.Path("quantities.csv")
else:
if global_quantity_path.is_dir():
quantity_file = global_quantity_path / "quantities.csv"
else:
quantity_file = global_quantity_path
else:
quantity_file = pathlib.Path(simulation["quantity_file"])
if not quantity_file.is_absolute():
if global_quantity_path is None or not global_quantity_path.is_dir():
quantity_file = simulation_run.data_dir / quantity_file
else:
quantity_file = global_quantity_path / quantity_file
quantity_file = quantity_file.resolve()
if not quantity_file.exists():
abort(logger, f"Could not find quantity_file {quantity_file}")
quantities = Quantity.parse_file(quantity_file, logger=logger)
global_plots = entries.get("plots", [])
if not isinstance(global_plots, list):
global_plots = [global_plots]
local_plots = simulation.get("plots", [])
if not isinstance(local_plots, list):
local_plots = [local_plots]
global_plot_descriptions = [
plot_description
for plot_config in global_plots
for plot_description in PlotDescription.parse(
quantities, plot_config, allow_reference=False, logger=logger
)
]
references, plot_descriptions = [], []
for plot_config in local_plots:
for p in PlotDescription.parse(
quantities, plot_config, allow_reference=True, logger=logger
):
(references, plot_descriptions)[isinstance(p, PlotDescription)].append(
p
)
global_plot_descriptions_with_name = []
for plot_description in global_plot_descriptions:
(global_plot_descriptions_with_name, plot_descriptions)[
plot_description.name is None
].append(plot_description)
for name in references:
found_plot = False
for plot_description in global_plot_descriptions_with_name:
if name == plot_description.name:
plot_descriptions.append(plot_description)
found_plot = True
if not found_plot:
logger.warning(f"No plots found with name {name}, skipping")
visualizer = Visualizer(simulation_run)
visualizations.append(Visualization(visualizer, *plot_descriptions))
return visualizations
class LoggerBuilder:
def __init__(self, name="autoviz", level=logging.INFO, log_file=None):
self.name = name
self.level = level
self.log_file = log_file
def __call__(self):
logger = logging.getLogger(self.name)
if len(logger.handlers) == 0:
logger.setLevel(self.level)
logger.propagate = False
if self.log_file is None:
sh = logging.StreamHandler()
sh.setFormatter(logging.Formatter("%(levelname)s: %(message)s"))
logger.addHandler(sh)
else:
fh = logging.FileHandler(self.log_file, mode="a")
fh.setFormatter(
logging.Formatter("[%(asctime)s] %(levelname)s: %(message)s")
)
logger.addHandler(fh)
def handle_exception(exc_type, exc_value, exc_traceback):
if issubclass(exc_type, KeyboardInterrupt):
sys.__excepthook__(exc_type, exc_value, exc_traceback)
return
logger.critical(
"Uncaught exception", exc_info=(exc_type, exc_value, exc_traceback)
)
sys.excepthook = handle_exception
return logger
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Visualize Bifrost simulation runs.")
parser.add_argument(
"config_file", help="path to visualization config file (in YAML format)"
)
parser.add_argument(
"-c", "--clean", action="store_true", help="clean visualization data"
)
parser.add_argument(
"-d", "--debug", action="store_true", help="use debug log level"
)
parser.add_argument(
"-v",
"--video-only",
action="store_true",
help="only generate videos from existing frames",
)
parser.add_argument(
"-o",
"--overwrite",
action="store_true",
help="whether to overwrite existing video frames",
)
parser.add_argument(
"--hide-progress", action="store_true", help="whether to hide progress bars"
)
parser.add_argument(
"-l",
"--log-file",
metavar="PATH",
help="where to write log (prints to terminal by default)",
)
parser.add_argument(
"-s",
"--simulations",
metavar="NAMES",
help="subset of simulations in config file to operate on (comma-separated)",
)
parser.add_argument(
"-n",
"--n-threads",
type=int,
default=1,
metavar="NUM",
help="max number of threads to use for visualization",
)
args = parser.parse_args()
logger_builder = LoggerBuilder(
level=(logging.DEBUG if args.debug else logging.INFO), log_file=args.log_file
)
logger = logger_builder()
all_visualizations = parse_config_file(args.config_file, logger=logger)
if args.simulations is None:
visualizations = all_visualizations
else:
simulation_names = args.simulations.split(",")
visualizations = [
v
for v in all_visualizations
if v.visualizer.simulation_name in simulation_names
]
if len(visualizations) == 0:
logger.info("Nothing to visualize")
sys.exit()
if args.clean:
for visualization in visualizations:
visualization.visualizer.clean()
elif args.video_only:
for visualization in visualizations:
visualization.create_videos_only()
else:
n_jobs = min(args.n_threads, len(visualizations))
Parallel(n_jobs=n_jobs)(
delayed(
lambda idx, v: v.visualize(
overwrite=args.overwrite,
job_idx=idx,
show_progress=(not args.hide_progress),
new_logger_builder=(None if n_jobs == 1 else logger_builder),
)
)(idx, v)
for idx, v in enumerate(visualizations)
)
| StarcoderdataPython |
3237292 | from datetime import datetime
from ..schema import BaseTransformer
class Transformer(BaseTransformer):
"""Transform South Carolina raw data for consolidation."""
postal_code = "SC"
fields = dict(
company="company",
location="location",
notice_date="date",
jobs="jobs",
)
date_format = "%m/%d/%Y"
date_corrections = {
"4/8/20/20": datetime(2020, 4, 8),
"12/31//2015": datetime(2015, 12, 31),
}
| StarcoderdataPython |
1609785 | """Density plot from a distribution of points in 3D"""
import numpy as np
from vedo import *
n = 3000
p = np.random.normal(7, 0.3, (n,3))
p[:int(n*1/3) ] += [1,0,0] # shift 1/3 of the points along x by 1
p[ int(n*2/3):] += [1.7,0.4,0.2]
pts = Points(p, alpha=0.5)
vol = pts.density().c('Dark2').alpha([0.1,1]) # density() returns a Volume
r = precision(vol.info['radius'], 2) # retrieve automatic radius value
vol.addScalarBar3D(title='Density (counts in r_s ='+r+')', c='k', italic=1)
show([(pts,__doc__), vol], N=2, axes=True).close()
| StarcoderdataPython |
1677051 | from __future__ import absolute_import
from django.utils.translation import ugettext_lazy as _
from sentry import http, options
from sentry.identity.pipeline import IdentityProviderPipeline
from sentry.identity.github import get_user_info
from sentry.integrations import IntegrationProvider, IntegrationMetadata
from sentry.pipeline import NestedPipelineView, PipelineView
from sentry.utils.http import absolute_uri
from .utils import get_jwt
DESCRIPTION = """
Fill me out
"""
metadata = IntegrationMetadata(
description=DESCRIPTION.strip(),
author='<NAME>',
noun=_('Installation'),
issue_url='https://github.com/getsentry/sentry/issues/new?title=GitHub%20Integration:%20&labels=Component%3A%20Integrations',
source_url='https://github.com/getsentry/sentry/tree/master/src/sentry/integrations/github',
aspects={}
)
class GitHubIntegrationProvider(IntegrationProvider):
key = 'github'
name = 'GitHub'
metadata = metadata
setup_dialog_config = {
'width': 1030,
'height': 1000,
}
def get_pipeline_views(self):
identity_pipeline_config = {
'oauth_scopes': (),
'redirect_url': absolute_uri('/extensions/github/setup/'),
}
identity_pipeline_view = NestedPipelineView(
bind_key='identity',
provider_key='github',
pipeline_cls=IdentityProviderPipeline,
config=identity_pipeline_config,
)
return [GitHubInstallationRedirect(), identity_pipeline_view]
def get_installation_info(self, access_token, installation_id):
session = http.build_session()
resp = session.get(
'https://api.github.com/app/installations/%s' % installation_id,
headers={
'Authorization': 'Bearer %s' % get_jwt(),
'Accept': 'application/vnd.github.machine-man-preview+json',
}
)
resp.raise_for_status()
installation_resp = resp.json()
resp = session.get(
'https://api.github.com/user/installations',
params={'access_token': access_token},
headers={'Accept': 'application/vnd.github.machine-man-preview+json'}
)
resp.raise_for_status()
user_installations_resp = resp.json()
# verify that user actually has access to the installation
for installation in user_installations_resp['installations']:
if installation['id'] == installation_resp['id']:
return installation_resp
return None
def build_integration(self, state):
identity = state['identity']['data']
user = get_user_info(identity['access_token'])
installation = self.get_installation_info(
identity['access_token'], state['installation_id'])
return {
'name': installation['account']['login'],
'external_id': installation['id'],
'metadata': {
# The access token will be populated upon API usage
'access_token': None,
'expires_at': None,
'icon': installation['account']['avatar_url'],
'domain_name': installation['account']['html_url'].replace('https://', ''),
},
'user_identity': {
'type': 'github',
'external_id': user['id'],
'scopes': [], # GitHub apps do not have user scopes
'data': {'access_token': identity['access_token']},
},
}
class GitHubInstallationRedirect(PipelineView):
def get_app_url(self):
name = options.get('github-app.name')
return 'https://github.com/apps/%s' % name
def dispatch(self, request, pipeline):
if 'installation_id' in request.GET:
pipeline.bind_state('installation_id', request.GET['installation_id'])
return pipeline.next_step()
return self.redirect(self.get_app_url())
| StarcoderdataPython |
4834526 | import sys,os
curr_path = os.path.dirname(os.path.abspath(__file__)) # 当前文件所在绝对路径
parent_path = os.path.dirname(curr_path) # 父路径
sys.path.append(parent_path) # 添加路径到系统路径
import gym
import torch
import numpy as np
import datetime
from common.utils import plot_rewards
from common.utils import save_results,make_dir
from PPO.ppo2 import PPO
curr_time = datetime.datetime.now().strftime("%Y%m%d-%H%M%S") # 获取当前时间
class Config:
def __init__(self) -> None:
################################## 环境超参数 ###################################
self.algo_name = "DQN" # 算法名称
self.env_name = 'CartPole-v0' # 环境名称
self.continuous = False # 环境是否为连续动作
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu") # 检测GPU
self.seed = 10 # 随机种子,置0则不设置随机种子
self.train_eps = 200 # 训练的回合数
self.test_eps = 20 # 测试的回合数
################################################################################
################################## 算法超参数 ####################################
self.batch_size = 5 # mini-batch SGD中的批量大小
self.gamma = 0.95 # 强化学习中的折扣因子
self.n_epochs = 4
self.actor_lr = 0.0003 # actor的学习率
self.critic_lr = 0.0003 # critic的学习率
self.gae_lambda = 0.95
self.policy_clip = 0.2
self.hidden_dim = 256
self.update_fre = 20 # 策略更新频率
################################################################################
################################# 保存结果相关参数 ################################
self.result_path = curr_path+"/outputs/" + self.env_name + \
'/'+curr_time+'/results/' # 保存结果的路径
self.model_path = curr_path+"/outputs/" + self.env_name + \
'/'+curr_time+'/models/' # 保存模型的路径
self.save = True # 是否保存图片
################################################################################
def env_agent_config(cfg):
''' 创建环境和智能体
'''
env = gym.make(cfg.env_name) # 创建环境
state_dim = env.observation_space.shape[0] # 状态维度
if cfg.continuous:
action_dim = env.action_space.shape[0] # 动作维度
else:
action_dim = env.action_space.n # 动作维度
agent = PPO(state_dim, action_dim, cfg) # 创建智能体
if cfg.seed !=0: # 设置随机种子
torch.manual_seed(cfg.seed)
env.seed(cfg.seed)
np.random.seed(cfg.seed)
return env, agent
def train(cfg,env,agent):
print('开始训练!')
print(f'环境:{cfg.env_name}, 算法:{cfg.algo_name}, 设备:{cfg.device}')
rewards = [] # 记录所有回合的奖励
ma_rewards = [] # 记录所有回合的滑动平均奖励
steps = 0
for i_ep in range(cfg.train_eps):
state = env.reset()
done = False
ep_reward = 0
while not done:
action, prob, val = agent.choose_action(state)
state_, reward, done, _ = env.step(action)
steps += 1
ep_reward += reward
agent.memory.push(state, action, prob, val, reward, done)
if steps % cfg.update_fre == 0:
agent.update()
state = state_
rewards.append(ep_reward)
if ma_rewards:
ma_rewards.append(0.9*ma_rewards[-1]+0.1*ep_reward)
else:
ma_rewards.append(ep_reward)
if (i_ep+1)%10 == 0:
print(f"回合:{i_ep+1}/{cfg.train_eps},奖励:{ep_reward:.2f}")
print('完成训练!')
return rewards,ma_rewards
def test(cfg,env,agent):
print('开始测试!')
print(f'环境:{cfg.env_name}, 算法:{cfg.algo_name}, 设备:{cfg.device}')
rewards = [] # 记录所有回合的奖励
ma_rewards = [] # 记录所有回合的滑动平均奖励
for i_ep in range(cfg.test_eps):
state = env.reset()
done = False
ep_reward = 0
while not done:
action, prob, val = agent.choose_action(state)
state_, reward, done, _ = env.step(action)
ep_reward += reward
state = state_
rewards.append(ep_reward)
if ma_rewards:
ma_rewards.append(
0.9*ma_rewards[-1]+0.1*ep_reward)
else:
ma_rewards.append(ep_reward)
print('回合:{}/{}, 奖励:{}'.format(i_ep+1, cfg.test_eps, ep_reward))
print('完成训练!')
return rewards,ma_rewards
if __name__ == "__main__":
cfg = Config()
# 训练
env,agent = env_agent_config(cfg)
rewards, ma_rewards = train(cfg, env, agent)
make_dir(cfg.result_path, cfg.model_path) # 创建保存结果和模型路径的文件夹
agent.save(path=cfg.model_path)
save_results(rewards, ma_rewards, tag='train', path=cfg.result_path)
plot_rewards(rewards, ma_rewards, cfg, tag="train")
# 测试
env,agent = env_agent_config(cfg)
agent.load(path=cfg.model_path)
rewards,ma_rewards = test(cfg,env,agent)
save_results(rewards,ma_rewards,tag='test',path=cfg.result_path)
plot_rewards(rewards,ma_rewards,cfg,tag="test") | StarcoderdataPython |
4823576 | <reponame>tylerclair/py3canvas<filename>py3canvas/apis/submissions.py
"""Submissions API Version 1.0.
This API client was generated using a template. Make sure this code is valid before using it.
"""
import logging
from datetime import date, datetime
from .base import BaseCanvasAPI
from .base import BaseModel
class SubmissionsAPI(BaseCanvasAPI):
"""Submissions API Version 1.0."""
def __init__(self, *args, **kwargs):
"""Init method for SubmissionsAPI."""
super(SubmissionsAPI, self).__init__(*args, **kwargs)
self.logger = logging.getLogger("py3canvas.SubmissionsAPI")
def submit_assignment_courses(self, assignment_id, course_id, submission_submission_type, comment_text_comment=None, submission_annotatable_attachment_id=None, submission_body=None, submission_file_ids=None, submission_media_comment_id=None, submission_media_comment_type=None, submission_submitted_at=None, submission_url=None, submission_user_id=None):
"""
Submit an assignment.
Make a submission for an assignment. You must be enrolled as a student in
the course/section to do this.
All online turn-in submission types are supported in this API. However,
there are a few things that are not yet supported:
* Files can be submitted based on a file ID of a user or group file or through the {api:SubmissionsApiController#create_file file upload API}. However, there is no API yet for listing the user and group files.
* Media comments can be submitted, however, there is no API yet for creating a media comment to submit.
* Integration with Google Docs is not yet supported.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - course_id
"""
ID
"""
path["course_id"] = course_id
# REQUIRED - PATH - assignment_id
"""
ID
"""
path["assignment_id"] = assignment_id
# OPTIONAL - comment[text_comment]
"""
Include a textual comment with the submission.
"""
if comment_text_comment is not None:
data["comment[text_comment]"] = comment_text_comment
# REQUIRED - submission[submission_type]
"""
The type of submission being made. The assignment submission_types must
include this submission type as an allowed option, or the submission will be rejected with a 400 error.
The submission_type given determines which of the following parameters is
used. For instance, to submit a URL, submission [submission_type] must be
set to "online_url", otherwise the submission [url] parameter will be
ignored.
"""
self._validate_enum(submission_submission_type, ["online_text_entry", "online_url", "online_upload", "media_recording", "basic_lti_launch", "student_annotation"])
data["submission[submission_type]"] = submission_submission_type
# OPTIONAL - submission[body]
"""
Submit the assignment as an HTML document snippet. Note this HTML snippet
will be sanitized using the same ruleset as a submission made from the
Canvas web UI. The sanitized HTML will be returned in the response as the
submission body. Requires a submission_type of "online_text_entry".
"""
if submission_body is not None:
data["submission[body]"] = submission_body
# OPTIONAL - submission[url]
"""
Submit the assignment as a URL. The URL scheme must be "http" or "https",
no "ftp" or other URL schemes are allowed. If no scheme is given (e.g.
"www.example.com") then "http" will be assumed. Requires a submission_type
of "online_url" or "basic_lti_launch".
"""
if submission_url is not None:
data["submission[url]"] = submission_url
# OPTIONAL - submission[file_ids]
"""
Submit the assignment as a set of one or more previously uploaded files
residing in the submitting user's files section (or the group's files
section, for group assignments).
To upload a new file to submit, see the submissions {api:SubmissionsApiController#create_file Upload a file API}.
Requires a submission_type of "online_upload".
"""
if submission_file_ids is not None:
data["submission[file_ids]"] = submission_file_ids
# OPTIONAL - submission[media_comment_id]
"""
The media comment id to submit. Media comment ids can be submitted via
this API, however, note that there is not yet an API to generate or list
existing media comments, so this functionality is currently of limited use.
Requires a submission_type of "media_recording".
"""
if submission_media_comment_id is not None:
data["submission[media_comment_id]"] = submission_media_comment_id
# OPTIONAL - submission[media_comment_type]
"""
The type of media comment being submitted.
"""
if submission_media_comment_type is not None:
self._validate_enum(submission_media_comment_type, ["audio", "video"])
data["submission[media_comment_type]"] = submission_media_comment_type
# OPTIONAL - submission[user_id]
"""
Submit on behalf of the given user. Requires grading permission.
"""
if submission_user_id is not None:
data["submission[user_id]"] = submission_user_id
# OPTIONAL - submission[annotatable_attachment_id]
"""
The Attachment ID of the document being annotated. This should match
the annotatable_attachment_id on the assignment.
Requires a submission_type of "student_annotation".
"""
if submission_annotatable_attachment_id is not None:
data["submission[annotatable_attachment_id]"] = submission_annotatable_attachment_id
# OPTIONAL - submission[submitted_at]
"""
Choose the time the submission is listed as submitted at. Requires grading permission.
"""
if submission_submitted_at is not None:
if issubclass(submission_submitted_at.__class__, str):
submission_submitted_at = self._validate_iso8601_string(submission_submitted_at)
elif issubclass(submission_submitted_at.__class__, date) or issubclass(submission_submitted_at.__class__, datetime):
submission_submitted_at = submission_submitted_at.strftime('%Y-%m-%dT%H:%M:%S+00:00')
data["submission[submitted_at]"] = submission_submitted_at
self.logger.debug("POST /api/v1/courses/{course_id}/assignments/{assignment_id}/submissions with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("POST", "/api/v1/courses/{course_id}/assignments/{assignment_id}/submissions".format(**path), data=data, params=params, no_data=True)
def submit_assignment_sections(self, assignment_id, section_id, submission_submission_type, comment_text_comment=None, submission_annotatable_attachment_id=None, submission_body=None, submission_file_ids=None, submission_media_comment_id=None, submission_media_comment_type=None, submission_submitted_at=None, submission_url=None, submission_user_id=None):
"""
Submit an assignment.
Make a submission for an assignment. You must be enrolled as a student in
the course/section to do this.
All online turn-in submission types are supported in this API. However,
there are a few things that are not yet supported:
* Files can be submitted based on a file ID of a user or group file or through the {api:SubmissionsApiController#create_file file upload API}. However, there is no API yet for listing the user and group files.
* Media comments can be submitted, however, there is no API yet for creating a media comment to submit.
* Integration with Google Docs is not yet supported.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - section_id
"""
ID
"""
path["section_id"] = section_id
# REQUIRED - PATH - assignment_id
"""
ID
"""
path["assignment_id"] = assignment_id
# OPTIONAL - comment[text_comment]
"""
Include a textual comment with the submission.
"""
if comment_text_comment is not None:
data["comment[text_comment]"] = comment_text_comment
# REQUIRED - submission[submission_type]
"""
The type of submission being made. The assignment submission_types must
include this submission type as an allowed option, or the submission will be rejected with a 400 error.
The submission_type given determines which of the following parameters is
used. For instance, to submit a URL, submission [submission_type] must be
set to "online_url", otherwise the submission [url] parameter will be
ignored.
"""
self._validate_enum(submission_submission_type, ["online_text_entry", "online_url", "online_upload", "media_recording", "basic_lti_launch", "student_annotation"])
data["submission[submission_type]"] = submission_submission_type
# OPTIONAL - submission[body]
"""
Submit the assignment as an HTML document snippet. Note this HTML snippet
will be sanitized using the same ruleset as a submission made from the
Canvas web UI. The sanitized HTML will be returned in the response as the
submission body. Requires a submission_type of "online_text_entry".
"""
if submission_body is not None:
data["submission[body]"] = submission_body
# OPTIONAL - submission[url]
"""
Submit the assignment as a URL. The URL scheme must be "http" or "https",
no "ftp" or other URL schemes are allowed. If no scheme is given (e.g.
"www.example.com") then "http" will be assumed. Requires a submission_type
of "online_url" or "basic_lti_launch".
"""
if submission_url is not None:
data["submission[url]"] = submission_url
# OPTIONAL - submission[file_ids]
"""
Submit the assignment as a set of one or more previously uploaded files
residing in the submitting user's files section (or the group's files
section, for group assignments).
To upload a new file to submit, see the submissions {api:SubmissionsApiController#create_file Upload a file API}.
Requires a submission_type of "online_upload".
"""
if submission_file_ids is not None:
data["submission[file_ids]"] = submission_file_ids
# OPTIONAL - submission[media_comment_id]
"""
The media comment id to submit. Media comment ids can be submitted via
this API, however, note that there is not yet an API to generate or list
existing media comments, so this functionality is currently of limited use.
Requires a submission_type of "media_recording".
"""
if submission_media_comment_id is not None:
data["submission[media_comment_id]"] = submission_media_comment_id
# OPTIONAL - submission[media_comment_type]
"""
The type of media comment being submitted.
"""
if submission_media_comment_type is not None:
self._validate_enum(submission_media_comment_type, ["audio", "video"])
data["submission[media_comment_type]"] = submission_media_comment_type
# OPTIONAL - submission[user_id]
"""
Submit on behalf of the given user. Requires grading permission.
"""
if submission_user_id is not None:
data["submission[user_id]"] = submission_user_id
# OPTIONAL - submission[annotatable_attachment_id]
"""
The Attachment ID of the document being annotated. This should match
the annotatable_attachment_id on the assignment.
Requires a submission_type of "student_annotation".
"""
if submission_annotatable_attachment_id is not None:
data["submission[annotatable_attachment_id]"] = submission_annotatable_attachment_id
# OPTIONAL - submission[submitted_at]
"""
Choose the time the submission is listed as submitted at. Requires grading permission.
"""
if submission_submitted_at is not None:
if issubclass(submission_submitted_at.__class__, str):
submission_submitted_at = self._validate_iso8601_string(submission_submitted_at)
elif issubclass(submission_submitted_at.__class__, date) or issubclass(submission_submitted_at.__class__, datetime):
submission_submitted_at = submission_submitted_at.strftime('%Y-%m-%dT%H:%M:%S+00:00')
data["submission[submitted_at]"] = submission_submitted_at
self.logger.debug("POST /api/v1/sections/{section_id}/assignments/{assignment_id}/submissions with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("POST", "/api/v1/sections/{section_id}/assignments/{assignment_id}/submissions".format(**path), data=data, params=params, no_data=True)
def list_assignment_submissions_courses(self, assignment_id, course_id, grouped=None, include=None):
"""
List assignment submissions.
A paginated list of all existing submissions for an assignment.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - course_id
"""
ID
"""
path["course_id"] = course_id
# REQUIRED - PATH - assignment_id
"""
ID
"""
path["assignment_id"] = assignment_id
# OPTIONAL - include
"""
Associations to include with the group. "group" will add group_id and group_name.
"""
if include is not None:
self._validate_enum(include, ["submission_history", "submission_comments", "rubric_assessment", "assignment", "visibility", "course", "user", "group", "read_status"])
params["include"] = include
# OPTIONAL - grouped
"""
If this argument is true, the response will be grouped by student groups.
"""
if grouped is not None:
params["grouped"] = grouped
self.logger.debug("GET /api/v1/courses/{course_id}/assignments/{assignment_id}/submissions with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/courses/{course_id}/assignments/{assignment_id}/submissions".format(**path), data=data, params=params, all_pages=True)
def list_assignment_submissions_sections(self, assignment_id, section_id, grouped=None, include=None):
"""
List assignment submissions.
A paginated list of all existing submissions for an assignment.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - section_id
"""
ID
"""
path["section_id"] = section_id
# REQUIRED - PATH - assignment_id
"""
ID
"""
path["assignment_id"] = assignment_id
# OPTIONAL - include
"""
Associations to include with the group. "group" will add group_id and group_name.
"""
if include is not None:
self._validate_enum(include, ["submission_history", "submission_comments", "rubric_assessment", "assignment", "visibility", "course", "user", "group", "read_status"])
params["include"] = include
# OPTIONAL - grouped
"""
If this argument is true, the response will be grouped by student groups.
"""
if grouped is not None:
params["grouped"] = grouped
self.logger.debug("GET /api/v1/sections/{section_id}/assignments/{assignment_id}/submissions with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/sections/{section_id}/assignments/{assignment_id}/submissions".format(**path), data=data, params=params, all_pages=True)
def list_submissions_for_multiple_assignments_courses(self, course_id, assignment_ids=None, enrollment_state=None, graded_since=None, grading_period_id=None, grouped=None, include=None, order=None, order_direction=None, post_to_sis=None, state_based_on_date=None, student_ids=None, submitted_since=None, workflow_state=None):
"""
List submissions for multiple assignments.
A paginated list of all existing submissions for a given set of students and assignments.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - course_id
"""
ID
"""
path["course_id"] = course_id
# OPTIONAL - student_ids
"""
List of student ids to return submissions for. If this argument is
omitted, return submissions for the calling user. Students may only list
their own submissions. Observers may only list those of associated
students. The special id "all" will return submissions for all students
in the course/section as appropriate.
"""
if student_ids is not None:
params["student_ids"] = student_ids
# OPTIONAL - assignment_ids
"""
List of assignments to return submissions for. If none are given,
submissions for all assignments are returned.
"""
if assignment_ids is not None:
params["assignment_ids"] = assignment_ids
# OPTIONAL - grouped
"""
If this argument is present, the response will be grouped by student,
rather than a flat array of submissions.
"""
if grouped is not None:
params["grouped"] = grouped
# OPTIONAL - post_to_sis
"""
If this argument is set to true, the response will only include
submissions for assignments that have the post_to_sis flag set to true and
user enrollments that were added through sis.
"""
if post_to_sis is not None:
params["post_to_sis"] = post_to_sis
# OPTIONAL - submitted_since
"""
If this argument is set, the response will only include submissions that
were submitted after the specified date_time. This will exclude
submissions that do not have a submitted_at which will exclude unsubmitted
submissions.
The value must be formatted as ISO 8601 YYYY-MM-DDTHH:MM:SSZ.
"""
if submitted_since is not None:
if issubclass(submitted_since.__class__, str):
submitted_since = self._validate_iso8601_string(submitted_since)
elif issubclass(submitted_since.__class__, date) or issubclass(submitted_since.__class__, datetime):
submitted_since = submitted_since.strftime('%Y-%m-%dT%H:%M:%S+00:00')
params["submitted_since"] = submitted_since
# OPTIONAL - graded_since
"""
If this argument is set, the response will only include submissions that
were graded after the specified date_time. This will exclude
submissions that have not been graded.
The value must be formatted as ISO 8601 YYYY-MM-DDTHH:MM:SSZ.
"""
if graded_since is not None:
if issubclass(graded_since.__class__, str):
graded_since = self._validate_iso8601_string(graded_since)
elif issubclass(graded_since.__class__, date) or issubclass(graded_since.__class__, datetime):
graded_since = graded_since.strftime('%Y-%m-%dT%H:%M:%S+00:00')
params["graded_since"] = graded_since
# OPTIONAL - grading_period_id
"""
The id of the grading period in which submissions are being requested
(Requires grading periods to exist on the account)
"""
if grading_period_id is not None:
params["grading_period_id"] = grading_period_id
# OPTIONAL - workflow_state
"""
The current status of the submission
"""
if workflow_state is not None:
self._validate_enum(workflow_state, ["submitted", "unsubmitted", "graded", "pending_review"])
params["workflow_state"] = workflow_state
# OPTIONAL - enrollment_state
"""
The current state of the enrollments. If omitted will include all
enrollments that are not deleted.
"""
if enrollment_state is not None:
self._validate_enum(enrollment_state, ["active", "concluded"])
params["enrollment_state"] = enrollment_state
# OPTIONAL - state_based_on_date
"""
If omitted it is set to true. When set to false it will ignore the effective
state of the student enrollments and use the workflow_state for the
enrollments. The argument is ignored unless enrollment_state argument is
also passed.
"""
if state_based_on_date is not None:
params["state_based_on_date"] = state_based_on_date
# OPTIONAL - order
"""
The order submissions will be returned in. Defaults to "id". Doesn't
affect results for "grouped" mode.
"""
if order is not None:
self._validate_enum(order, ["id", "graded_at"])
params["order"] = order
# OPTIONAL - order_direction
"""
Determines whether ordered results are returned in ascending or descending
order. Defaults to "ascending". Doesn't affect results for "grouped" mode.
"""
if order_direction is not None:
self._validate_enum(order_direction, ["ascending", "descending"])
params["order_direction"] = order_direction
# OPTIONAL - include
"""
Associations to include with the group. `total_scores` requires the
`grouped` argument.
"""
if include is not None:
self._validate_enum(include, ["submission_history", "submission_comments", "rubric_assessment", "assignment", "total_scores", "visibility", "course", "user"])
params["include"] = include
self.logger.debug("GET /api/v1/courses/{course_id}/students/submissions with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/courses/{course_id}/students/submissions".format(**path), data=data, params=params, no_data=True)
def list_submissions_for_multiple_assignments_sections(self, section_id, assignment_ids=None, enrollment_state=None, graded_since=None, grading_period_id=None, grouped=None, include=None, order=None, order_direction=None, post_to_sis=None, state_based_on_date=None, student_ids=None, submitted_since=None, workflow_state=None):
"""
List submissions for multiple assignments.
A paginated list of all existing submissions for a given set of students and assignments.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - section_id
"""
ID
"""
path["section_id"] = section_id
# OPTIONAL - student_ids
"""
List of student ids to return submissions for. If this argument is
omitted, return submissions for the calling user. Students may only list
their own submissions. Observers may only list those of associated
students. The special id "all" will return submissions for all students
in the course/section as appropriate.
"""
if student_ids is not None:
params["student_ids"] = student_ids
# OPTIONAL - assignment_ids
"""
List of assignments to return submissions for. If none are given,
submissions for all assignments are returned.
"""
if assignment_ids is not None:
params["assignment_ids"] = assignment_ids
# OPTIONAL - grouped
"""
If this argument is present, the response will be grouped by student,
rather than a flat array of submissions.
"""
if grouped is not None:
params["grouped"] = grouped
# OPTIONAL - post_to_sis
"""
If this argument is set to true, the response will only include
submissions for assignments that have the post_to_sis flag set to true and
user enrollments that were added through sis.
"""
if post_to_sis is not None:
params["post_to_sis"] = post_to_sis
# OPTIONAL - submitted_since
"""
If this argument is set, the response will only include submissions that
were submitted after the specified date_time. This will exclude
submissions that do not have a submitted_at which will exclude unsubmitted
submissions.
The value must be formatted as ISO 8601 YYYY-MM-DDTHH:MM:SSZ.
"""
if submitted_since is not None:
if issubclass(submitted_since.__class__, str):
submitted_since = self._validate_iso8601_string(submitted_since)
elif issubclass(submitted_since.__class__, date) or issubclass(submitted_since.__class__, datetime):
submitted_since = submitted_since.strftime('%Y-%m-%dT%H:%M:%S+00:00')
params["submitted_since"] = submitted_since
# OPTIONAL - graded_since
"""
If this argument is set, the response will only include submissions that
were graded after the specified date_time. This will exclude
submissions that have not been graded.
The value must be formatted as ISO 8601 YYYY-MM-DDTHH:MM:SSZ.
"""
if graded_since is not None:
if issubclass(graded_since.__class__, str):
graded_since = self._validate_iso8601_string(graded_since)
elif issubclass(graded_since.__class__, date) or issubclass(graded_since.__class__, datetime):
graded_since = graded_since.strftime('%Y-%m-%dT%H:%M:%S+00:00')
params["graded_since"] = graded_since
# OPTIONAL - grading_period_id
"""
The id of the grading period in which submissions are being requested
(Requires grading periods to exist on the account)
"""
if grading_period_id is not None:
params["grading_period_id"] = grading_period_id
# OPTIONAL - workflow_state
"""
The current status of the submission
"""
if workflow_state is not None:
self._validate_enum(workflow_state, ["submitted", "unsubmitted", "graded", "pending_review"])
params["workflow_state"] = workflow_state
# OPTIONAL - enrollment_state
"""
The current state of the enrollments. If omitted will include all
enrollments that are not deleted.
"""
if enrollment_state is not None:
self._validate_enum(enrollment_state, ["active", "concluded"])
params["enrollment_state"] = enrollment_state
# OPTIONAL - state_based_on_date
"""
If omitted it is set to true. When set to false it will ignore the effective
state of the student enrollments and use the workflow_state for the
enrollments. The argument is ignored unless enrollment_state argument is
also passed.
"""
if state_based_on_date is not None:
params["state_based_on_date"] = state_based_on_date
# OPTIONAL - order
"""
The order submissions will be returned in. Defaults to "id". Doesn't
affect results for "grouped" mode.
"""
if order is not None:
self._validate_enum(order, ["id", "graded_at"])
params["order"] = order
# OPTIONAL - order_direction
"""
Determines whether ordered results are returned in ascending or descending
order. Defaults to "ascending". Doesn't affect results for "grouped" mode.
"""
if order_direction is not None:
self._validate_enum(order_direction, ["ascending", "descending"])
params["order_direction"] = order_direction
# OPTIONAL - include
"""
Associations to include with the group. `total_scores` requires the
`grouped` argument.
"""
if include is not None:
self._validate_enum(include, ["submission_history", "submission_comments", "rubric_assessment", "assignment", "total_scores", "visibility", "course", "user"])
params["include"] = include
self.logger.debug("GET /api/v1/sections/{section_id}/students/submissions with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/sections/{section_id}/students/submissions".format(**path), data=data, params=params, no_data=True)
def get_single_submission_courses(self, assignment_id, course_id, user_id, include=None):
"""
Get a single submission.
Get a single submission, based on user id.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - course_id
"""
ID
"""
path["course_id"] = course_id
# REQUIRED - PATH - assignment_id
"""
ID
"""
path["assignment_id"] = assignment_id
# REQUIRED - PATH - user_id
"""
ID
"""
path["user_id"] = user_id
# OPTIONAL - include
"""
Associations to include with the group.
"""
if include is not None:
self._validate_enum(include, ["submission_history", "submission_comments", "rubric_assessment", "full_rubric_assessment", "visibility", "course", "user", "read_status"])
params["include"] = include
self.logger.debug("GET /api/v1/courses/{course_id}/assignments/{assignment_id}/submissions/{user_id} with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/courses/{course_id}/assignments/{assignment_id}/submissions/{user_id}".format(**path), data=data, params=params, no_data=True)
def get_single_submission_sections(self, assignment_id, section_id, user_id, include=None):
"""
Get a single submission.
Get a single submission, based on user id.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - section_id
"""
ID
"""
path["section_id"] = section_id
# REQUIRED - PATH - assignment_id
"""
ID
"""
path["assignment_id"] = assignment_id
# REQUIRED - PATH - user_id
"""
ID
"""
path["user_id"] = user_id
# OPTIONAL - include
"""
Associations to include with the group.
"""
if include is not None:
self._validate_enum(include, ["submission_history", "submission_comments", "rubric_assessment", "full_rubric_assessment", "visibility", "course", "user", "read_status"])
params["include"] = include
self.logger.debug("GET /api/v1/sections/{section_id}/assignments/{assignment_id}/submissions/{user_id} with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/sections/{section_id}/assignments/{assignment_id}/submissions/{user_id}".format(**path), data=data, params=params, no_data=True)
def upload_file_courses(self, assignment_id, course_id, user_id):
"""
Upload a file.
Upload a file to a submission.
This API endpoint is the first step in uploading a file to a submission as a student.
See the {file:file_uploads.html File Upload Documentation} for details on the file upload workflow.
The final step of the file upload workflow will return the attachment data,
including the new file id. The caller can then POST to submit the
+online_upload+ assignment with these file ids.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - course_id
"""
ID
"""
path["course_id"] = course_id
# REQUIRED - PATH - assignment_id
"""
ID
"""
path["assignment_id"] = assignment_id
# REQUIRED - PATH - user_id
"""
ID
"""
path["user_id"] = user_id
self.logger.debug("POST /api/v1/courses/{course_id}/assignments/{assignment_id}/submissions/{user_id}/files with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("POST", "/api/v1/courses/{course_id}/assignments/{assignment_id}/submissions/{user_id}/files".format(**path), data=data, params=params, no_data=True)
def upload_file_sections(self, assignment_id, section_id, user_id):
"""
Upload a file.
Upload a file to a submission.
This API endpoint is the first step in uploading a file to a submission as a student.
See the {file:file_uploads.html File Upload Documentation} for details on the file upload workflow.
The final step of the file upload workflow will return the attachment data,
including the new file id. The caller can then POST to submit the
+online_upload+ assignment with these file ids.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - section_id
"""
ID
"""
path["section_id"] = section_id
# REQUIRED - PATH - assignment_id
"""
ID
"""
path["assignment_id"] = assignment_id
# REQUIRED - PATH - user_id
"""
ID
"""
path["user_id"] = user_id
self.logger.debug("POST /api/v1/sections/{section_id}/assignments/{assignment_id}/submissions/{user_id}/files with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("POST", "/api/v1/sections/{section_id}/assignments/{assignment_id}/submissions/{user_id}/files".format(**path), data=data, params=params, no_data=True)
def grade_or_comment_on_submission_courses(self, assignment_id, course_id, user_id, comment_file_ids=None, comment_group_comment=None, comment_media_comment_id=None, comment_media_comment_type=None, comment_text_comment=None, include_visibility=None, rubric_assessment=None, submission_excuse=None, submission_late_policy_status=None, submission_posted_grade=None, submission_seconds_late_override=None):
"""
Grade or comment on a submission.
Comment on and/or update the grading for a student's assignment submission.
If any submission or rubric_assessment arguments are provided, the user
must have permission to manage grades in the appropriate context (course or
section).
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - course_id
"""
ID
"""
path["course_id"] = course_id
# REQUIRED - PATH - assignment_id
"""
ID
"""
path["assignment_id"] = assignment_id
# REQUIRED - PATH - user_id
"""
ID
"""
path["user_id"] = user_id
# OPTIONAL - comment[text_comment]
"""
Add a textual comment to the submission.
"""
if comment_text_comment is not None:
data["comment[text_comment]"] = comment_text_comment
# OPTIONAL - comment[group_comment]
"""
Whether or not this comment should be sent to the entire group (defaults
to false). Ignored if this is not a group assignment or if no text_comment
is provided.
"""
if comment_group_comment is not None:
data["comment[group_comment]"] = comment_group_comment
# OPTIONAL - comment[media_comment_id]
"""
Add an audio/video comment to the submission. Media comments can be added
via this API, however, note that there is not yet an API to generate or
list existing media comments, so this functionality is currently of
limited use.
"""
if comment_media_comment_id is not None:
data["comment[media_comment_id]"] = comment_media_comment_id
# OPTIONAL - comment[media_comment_type]
"""
The type of media comment being added.
"""
if comment_media_comment_type is not None:
self._validate_enum(comment_media_comment_type, ["audio", "video"])
data["comment[media_comment_type]"] = comment_media_comment_type
# OPTIONAL - comment[file_ids]
"""
Attach files to this comment that were previously uploaded using the
Submission Comment API's files action
"""
if comment_file_ids is not None:
data["comment[file_ids]"] = comment_file_ids
# OPTIONAL - include[visibility]
"""
Whether this assignment is visible to the owner of the submission
"""
if include_visibility is not None:
data["include[visibility]"] = include_visibility
# OPTIONAL - submission[posted_grade]
"""
Assign a score to the submission, updating both the "score" and "grade"
fields on the submission record. This parameter can be passed in a few
different formats:
points:: A floating point or integral value, such as "13.5". The grade
will be interpreted directly as the score of the assignment.
Values above assignment.points_possible are allowed, for awarding
extra credit.
percentage:: A floating point value appended with a percent sign, such as
"40%". The grade will be interpreted as a percentage score on the
assignment, where 100% == assignment.points_possible. Values above 100%
are allowed, for awarding extra credit.
letter grade:: A letter grade, following the assignment's defined letter
grading scheme. For example, "A-". The resulting score will be the high
end of the defined range for the letter grade. For instance, if "B" is
defined as 86% to 84%, a letter grade of "B" will be worth 86%. The
letter grade will be rejected if the assignment does not have a defined
letter grading scheme. For more fine-grained control of scores, pass in
points or percentage rather than the letter grade.
"pass/complete/fail/incomplete":: A string value of "pass" or "complete"
will give a score of 100%. "fail" or "incomplete" will give a score of
0.
Note that assignments with grading_type of "pass_fail" can only be
assigned a score of 0 or assignment.points_possible, nothing inbetween. If
a posted_grade in the "points" or "percentage" format is sent, the grade
will only be accepted if the grade equals one of those two values.
"""
if submission_posted_grade is not None:
data["submission[posted_grade]"] = submission_posted_grade
# OPTIONAL - submission[excuse]
"""
Sets the "excused" status of an assignment.
"""
if submission_excuse is not None:
data["submission[excuse]"] = submission_excuse
# OPTIONAL - submission[late_policy_status]
"""
Sets the late policy status to either "late", "missing", "none", or null.
"""
if submission_late_policy_status is not None:
data["submission[late_policy_status]"] = submission_late_policy_status
# OPTIONAL - submission[seconds_late_override]
"""
Sets the seconds late if late policy status is "late"
"""
if submission_seconds_late_override is not None:
data["submission[seconds_late_override]"] = submission_seconds_late_override
# OPTIONAL - rubric_assessment
"""
Assign a rubric assessment to this assignment submission. The
sub-parameters here depend on the rubric for the assignment. The general
format is, for each row in the rubric:
The points awarded for this row.
rubric_assessment[criterion_id][points]
The rating id for the row.
rubric_assessment[criterion_id][rating_id]
Comments to add for this row.
rubric_assessment[criterion_id][comments]
For example, if the assignment rubric is (in JSON format):
!!!javascript
[
{
'id': 'crit1',
'points': 10,
'description': 'Criterion 1',
'ratings':
[
{ 'id': 'rat1', 'description': 'Good', 'points': 10 },
{ 'id': 'rat2', 'description': 'Poor', 'points': 3 }
]
},
{
'id': 'crit2',
'points': 5,
'description': 'Criterion 2',
'ratings':
[
{ 'id': 'rat1', 'description': 'Exemplary', 'points': 5 },
{ 'id': 'rat2', 'description': 'Complete', 'points': 5 },
{ 'id': 'rat3', 'description': 'Incomplete', 'points': 0 }
]
}
]
Then a possible set of values for rubric_assessment would be:
rubric_assessment[crit1][points]=3&rubric_assessment[crit1][rating_id]=rat1&rubric_assessment[crit2][points]=5&rubric_assessment[crit2][rating_id]=rat2&rubric_assessment[crit2][comments]=Well%20Done.
"""
if rubric_assessment is not None:
data["rubric_assessment"] = rubric_assessment
self.logger.debug("PUT /api/v1/courses/{course_id}/assignments/{assignment_id}/submissions/{user_id} with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("PUT", "/api/v1/courses/{course_id}/assignments/{assignment_id}/submissions/{user_id}".format(**path), data=data, params=params, no_data=True)
def grade_or_comment_on_submission_sections(self, assignment_id, section_id, user_id, comment_file_ids=None, comment_group_comment=None, comment_media_comment_id=None, comment_media_comment_type=None, comment_text_comment=None, include_visibility=None, rubric_assessment=None, submission_excuse=None, submission_late_policy_status=None, submission_posted_grade=None, submission_seconds_late_override=None):
"""
Grade or comment on a submission.
Comment on and/or update the grading for a student's assignment submission.
If any submission or rubric_assessment arguments are provided, the user
must have permission to manage grades in the appropriate context (course or
section).
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - section_id
"""
ID
"""
path["section_id"] = section_id
# REQUIRED - PATH - assignment_id
"""
ID
"""
path["assignment_id"] = assignment_id
# REQUIRED - PATH - user_id
"""
ID
"""
path["user_id"] = user_id
# OPTIONAL - comment[text_comment]
"""
Add a textual comment to the submission.
"""
if comment_text_comment is not None:
data["comment[text_comment]"] = comment_text_comment
# OPTIONAL - comment[group_comment]
"""
Whether or not this comment should be sent to the entire group (defaults
to false). Ignored if this is not a group assignment or if no text_comment
is provided.
"""
if comment_group_comment is not None:
data["comment[group_comment]"] = comment_group_comment
# OPTIONAL - comment[media_comment_id]
"""
Add an audio/video comment to the submission. Media comments can be added
via this API, however, note that there is not yet an API to generate or
list existing media comments, so this functionality is currently of
limited use.
"""
if comment_media_comment_id is not None:
data["comment[media_comment_id]"] = comment_media_comment_id
# OPTIONAL - comment[media_comment_type]
"""
The type of media comment being added.
"""
if comment_media_comment_type is not None:
self._validate_enum(comment_media_comment_type, ["audio", "video"])
data["comment[media_comment_type]"] = comment_media_comment_type
# OPTIONAL - comment[file_ids]
"""
Attach files to this comment that were previously uploaded using the
Submission Comment API's files action
"""
if comment_file_ids is not None:
data["comment[file_ids]"] = comment_file_ids
# OPTIONAL - include[visibility]
"""
Whether this assignment is visible to the owner of the submission
"""
if include_visibility is not None:
data["include[visibility]"] = include_visibility
# OPTIONAL - submission[posted_grade]
"""
Assign a score to the submission, updating both the "score" and "grade"
fields on the submission record. This parameter can be passed in a few
different formats:
points:: A floating point or integral value, such as "13.5". The grade
will be interpreted directly as the score of the assignment.
Values above assignment.points_possible are allowed, for awarding
extra credit.
percentage:: A floating point value appended with a percent sign, such as
"40%". The grade will be interpreted as a percentage score on the
assignment, where 100% == assignment.points_possible. Values above 100%
are allowed, for awarding extra credit.
letter grade:: A letter grade, following the assignment's defined letter
grading scheme. For example, "A-". The resulting score will be the high
end of the defined range for the letter grade. For instance, if "B" is
defined as 86% to 84%, a letter grade of "B" will be worth 86%. The
letter grade will be rejected if the assignment does not have a defined
letter grading scheme. For more fine-grained control of scores, pass in
points or percentage rather than the letter grade.
"pass/complete/fail/incomplete":: A string value of "pass" or "complete"
will give a score of 100%. "fail" or "incomplete" will give a score of
0.
Note that assignments with grading_type of "pass_fail" can only be
assigned a score of 0 or assignment.points_possible, nothing inbetween. If
a posted_grade in the "points" or "percentage" format is sent, the grade
will only be accepted if the grade equals one of those two values.
"""
if submission_posted_grade is not None:
data["submission[posted_grade]"] = submission_posted_grade
# OPTIONAL - submission[excuse]
"""
Sets the "excused" status of an assignment.
"""
if submission_excuse is not None:
data["submission[excuse]"] = submission_excuse
# OPTIONAL - submission[late_policy_status]
"""
Sets the late policy status to either "late", "missing", "none", or null.
"""
if submission_late_policy_status is not None:
data["submission[late_policy_status]"] = submission_late_policy_status
# OPTIONAL - submission[seconds_late_override]
"""
Sets the seconds late if late policy status is "late"
"""
if submission_seconds_late_override is not None:
data["submission[seconds_late_override]"] = submission_seconds_late_override
# OPTIONAL - rubric_assessment
"""
Assign a rubric assessment to this assignment submission. The
sub-parameters here depend on the rubric for the assignment. The general
format is, for each row in the rubric:
The points awarded for this row.
rubric_assessment[criterion_id][points]
The rating id for the row.
rubric_assessment[criterion_id][rating_id]
Comments to add for this row.
rubric_assessment[criterion_id][comments]
For example, if the assignment rubric is (in JSON format):
!!!javascript
[
{
'id': 'crit1',
'points': 10,
'description': 'Criterion 1',
'ratings':
[
{ 'id': 'rat1', 'description': 'Good', 'points': 10 },
{ 'id': 'rat2', 'description': 'Poor', 'points': 3 }
]
},
{
'id': 'crit2',
'points': 5,
'description': 'Criterion 2',
'ratings':
[
{ 'id': 'rat1', 'description': 'Exemplary', 'points': 5 },
{ 'id': 'rat2', 'description': 'Complete', 'points': 5 },
{ 'id': 'rat3', 'description': 'Incomplete', 'points': 0 }
]
}
]
Then a possible set of values for rubric_assessment would be:
rubric_assessment[crit1][points]=3&rubric_assessment[crit1][rating_id]=rat1&rubric_assessment[crit2][points]=5&rubric_assessment[crit2][rating_id]=rat2&rubric_assessment[crit2][comments]=Well%20Done.
"""
if rubric_assessment is not None:
data["rubric_assessment"] = rubric_assessment
self.logger.debug("PUT /api/v1/sections/{section_id}/assignments/{assignment_id}/submissions/{user_id} with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("PUT", "/api/v1/sections/{section_id}/assignments/{assignment_id}/submissions/{user_id}".format(**path), data=data, params=params, no_data=True)
def list_gradeable_students(self, assignment_id, course_id):
"""
List gradeable students.
A paginated list of students eligible to submit the assignment. The caller must have permission to view grades.
If anonymous grading is enabled for the current assignment and the allow_new_anonymous_id parameter is passed,
the returned data will not include any values identifying the student, but will instead include an
assignment-specific anonymous ID for each student.
Section-limited instructors will only see students in their own sections.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - course_id
"""
ID
"""
path["course_id"] = course_id
# REQUIRED - PATH - assignment_id
"""
ID
"""
path["assignment_id"] = assignment_id
self.logger.debug("GET /api/v1/courses/{course_id}/assignments/{assignment_id}/gradeable_students with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/courses/{course_id}/assignments/{assignment_id}/gradeable_students".format(**path), data=data, params=params, all_pages=True)
def list_multiple_assignments_gradeable_students(self, course_id, assignment_ids=None):
"""
List multiple assignments gradeable students.
A paginated list of students eligible to submit a list of assignments. The caller must have
permission to view grades for the requested course.
Section-limited instructors will only see students in their own sections.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - course_id
"""
ID
"""
path["course_id"] = course_id
# OPTIONAL - assignment_ids
"""
Assignments being requested
"""
if assignment_ids is not None:
params["assignment_ids"] = assignment_ids
self.logger.debug("GET /api/v1/courses/{course_id}/assignments/gradeable_students with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/courses/{course_id}/assignments/gradeable_students".format(**path), data=data, params=params, no_data=True)
def grade_or_comment_on_multiple_submissions_courses_submissions(self, course_id, grade_data_<assignment_id>_<student_id>=None, grade_data_<student_id>_excuse=None, grade_data_<student_id>_file_ids=None, grade_data_<student_id>_group_comment=None, grade_data_<student_id>_media_comment_id=None, grade_data_<student_id>_media_comment_type=None, grade_data_<student_id>_posted_grade=None, grade_data_<student_id>_rubric_assessment=None, grade_data_<student_id>_text_comment=None):
"""
Grade or comment on multiple submissions.
Update the grading and comments on multiple student's assignment
submissions in an asynchronous job.
The user must have permission to manage grades in the appropriate context
(course or section).
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - course_id
"""
ID
"""
path["course_id"] = course_id
# OPTIONAL - grade_data[<student_id>][posted_grade]
"""
See documentation for the posted_grade argument in the
{api:SubmissionsApiController#update Submissions Update} documentation
"""
if grade_data_<student_id>_posted_grade is not None:
data["grade_data[<student_id>][posted_grade]"] = grade_data_<student_id>_posted_grade
# OPTIONAL - grade_data[<student_id>][excuse]
"""
See documentation for the excuse argument in the
{api:SubmissionsApiController#update Submissions Update} documentation
"""
if grade_data_<student_id>_excuse is not None:
data["grade_data[<student_id>][excuse]"] = grade_data_<student_id>_excuse
# OPTIONAL - grade_data[<student_id>][rubric_assessment]
"""
See documentation for the rubric_assessment argument in the
{api:SubmissionsApiController#update Submissions Update} documentation
"""
if grade_data_<student_id>_rubric_assessment is not None:
data["grade_data[<student_id>][rubric_assessment]"] = grade_data_<student_id>_rubric_assessment
# OPTIONAL - grade_data[<student_id>][text_comment]
"""
no description
"""
if grade_data_<student_id>_text_comment is not None:
data["grade_data[<student_id>][text_comment]"] = grade_data_<student_id>_text_comment
# OPTIONAL - grade_data[<student_id>][group_comment]
"""
no description
"""
if grade_data_<student_id>_group_comment is not None:
data["grade_data[<student_id>][group_comment]"] = grade_data_<student_id>_group_comment
# OPTIONAL - grade_data[<student_id>][media_comment_id]
"""
no description
"""
if grade_data_<student_id>_media_comment_id is not None:
data["grade_data[<student_id>][media_comment_id]"] = grade_data_<student_id>_media_comment_id
# OPTIONAL - grade_data[<student_id>][media_comment_type]
"""
no description
"""
if grade_data_<student_id>_media_comment_type is not None:
self._validate_enum(grade_data_<student_id>_media_comment_type, ["audio", "video"])
data["grade_data[<student_id>][media_comment_type]"] = grade_data_<student_id>_media_comment_type
# OPTIONAL - grade_data[<student_id>][file_ids]
"""
See documentation for the comment[] arguments in the
{api:SubmissionsApiController#update Submissions Update} documentation
"""
if grade_data_<student_id>_file_ids is not None:
data["grade_data[<student_id>][file_ids]"] = grade_data_<student_id>_file_ids
# OPTIONAL - grade_data[<assignment_id>][<student_id>]
"""
Specifies which assignment to grade. This argument is not necessary when
using the assignment-specific endpoints.
"""
if grade_data_<assignment_id>_<student_id> is not None:
data["grade_data[<assignment_id>][<student_id>]"] = grade_data_<assignment_id>_<student_id>
self.logger.debug("POST /api/v1/courses/{course_id}/submissions/update_grades with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("POST", "/api/v1/courses/{course_id}/submissions/update_grades".format(**path), data=data, params=params, single_item=True)
def grade_or_comment_on_multiple_submissions_courses_assignments(self, assignment_id, course_id, grade_data_<assignment_id>_<student_id>=None, grade_data_<student_id>_excuse=None, grade_data_<student_id>_file_ids=None, grade_data_<student_id>_group_comment=None, grade_data_<student_id>_media_comment_id=None, grade_data_<student_id>_media_comment_type=None, grade_data_<student_id>_posted_grade=None, grade_data_<student_id>_rubric_assessment=None, grade_data_<student_id>_text_comment=None):
"""
Grade or comment on multiple submissions.
Update the grading and comments on multiple student's assignment
submissions in an asynchronous job.
The user must have permission to manage grades in the appropriate context
(course or section).
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - course_id
"""
ID
"""
path["course_id"] = course_id
# REQUIRED - PATH - assignment_id
"""
ID
"""
path["assignment_id"] = assignment_id
# OPTIONAL - grade_data[<student_id>][posted_grade]
"""
See documentation for the posted_grade argument in the
{api:SubmissionsApiController#update Submissions Update} documentation
"""
if grade_data_<student_id>_posted_grade is not None:
data["grade_data[<student_id>][posted_grade]"] = grade_data_<student_id>_posted_grade
# OPTIONAL - grade_data[<student_id>][excuse]
"""
See documentation for the excuse argument in the
{api:SubmissionsApiController#update Submissions Update} documentation
"""
if grade_data_<student_id>_excuse is not None:
data["grade_data[<student_id>][excuse]"] = grade_data_<student_id>_excuse
# OPTIONAL - grade_data[<student_id>][rubric_assessment]
"""
See documentation for the rubric_assessment argument in the
{api:SubmissionsApiController#update Submissions Update} documentation
"""
if grade_data_<student_id>_rubric_assessment is not None:
data["grade_data[<student_id>][rubric_assessment]"] = grade_data_<student_id>_rubric_assessment
# OPTIONAL - grade_data[<student_id>][text_comment]
"""
no description
"""
if grade_data_<student_id>_text_comment is not None:
data["grade_data[<student_id>][text_comment]"] = grade_data_<student_id>_text_comment
# OPTIONAL - grade_data[<student_id>][group_comment]
"""
no description
"""
if grade_data_<student_id>_group_comment is not None:
data["grade_data[<student_id>][group_comment]"] = grade_data_<student_id>_group_comment
# OPTIONAL - grade_data[<student_id>][media_comment_id]
"""
no description
"""
if grade_data_<student_id>_media_comment_id is not None:
data["grade_data[<student_id>][media_comment_id]"] = grade_data_<student_id>_media_comment_id
# OPTIONAL - grade_data[<student_id>][media_comment_type]
"""
no description
"""
if grade_data_<student_id>_media_comment_type is not None:
self._validate_enum(grade_data_<student_id>_media_comment_type, ["audio", "video"])
data["grade_data[<student_id>][media_comment_type]"] = grade_data_<student_id>_media_comment_type
# OPTIONAL - grade_data[<student_id>][file_ids]
"""
See documentation for the comment[] arguments in the
{api:SubmissionsApiController#update Submissions Update} documentation
"""
if grade_data_<student_id>_file_ids is not None:
data["grade_data[<student_id>][file_ids]"] = grade_data_<student_id>_file_ids
# OPTIONAL - grade_data[<assignment_id>][<student_id>]
"""
Specifies which assignment to grade. This argument is not necessary when
using the assignment-specific endpoints.
"""
if grade_data_<assignment_id>_<student_id> is not None:
data["grade_data[<assignment_id>][<student_id>]"] = grade_data_<assignment_id>_<student_id>
self.logger.debug("POST /api/v1/courses/{course_id}/assignments/{assignment_id}/submissions/update_grades with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("POST", "/api/v1/courses/{course_id}/assignments/{assignment_id}/submissions/update_grades".format(**path), data=data, params=params, single_item=True)
def grade_or_comment_on_multiple_submissions_sections_submissions(self, section_id, grade_data_<assignment_id>_<student_id>=None, grade_data_<student_id>_excuse=None, grade_data_<student_id>_file_ids=None, grade_data_<student_id>_group_comment=None, grade_data_<student_id>_media_comment_id=None, grade_data_<student_id>_media_comment_type=None, grade_data_<student_id>_posted_grade=None, grade_data_<student_id>_rubric_assessment=None, grade_data_<student_id>_text_comment=None):
"""
Grade or comment on multiple submissions.
Update the grading and comments on multiple student's assignment
submissions in an asynchronous job.
The user must have permission to manage grades in the appropriate context
(course or section).
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - section_id
"""
ID
"""
path["section_id"] = section_id
# OPTIONAL - grade_data[<student_id>][posted_grade]
"""
See documentation for the posted_grade argument in the
{api:SubmissionsApiController#update Submissions Update} documentation
"""
if grade_data_<student_id>_posted_grade is not None:
data["grade_data[<student_id>][posted_grade]"] = grade_data_<student_id>_posted_grade
# OPTIONAL - grade_data[<student_id>][excuse]
"""
See documentation for the excuse argument in the
{api:SubmissionsApiController#update Submissions Update} documentation
"""
if grade_data_<student_id>_excuse is not None:
data["grade_data[<student_id>][excuse]"] = grade_data_<student_id>_excuse
# OPTIONAL - grade_data[<student_id>][rubric_assessment]
"""
See documentation for the rubric_assessment argument in the
{api:SubmissionsApiController#update Submissions Update} documentation
"""
if grade_data_<student_id>_rubric_assessment is not None:
data["grade_data[<student_id>][rubric_assessment]"] = grade_data_<student_id>_rubric_assessment
# OPTIONAL - grade_data[<student_id>][text_comment]
"""
no description
"""
if grade_data_<student_id>_text_comment is not None:
data["grade_data[<student_id>][text_comment]"] = grade_data_<student_id>_text_comment
# OPTIONAL - grade_data[<student_id>][group_comment]
"""
no description
"""
if grade_data_<student_id>_group_comment is not None:
data["grade_data[<student_id>][group_comment]"] = grade_data_<student_id>_group_comment
# OPTIONAL - grade_data[<student_id>][media_comment_id]
"""
no description
"""
if grade_data_<student_id>_media_comment_id is not None:
data["grade_data[<student_id>][media_comment_id]"] = grade_data_<student_id>_media_comment_id
# OPTIONAL - grade_data[<student_id>][media_comment_type]
"""
no description
"""
if grade_data_<student_id>_media_comment_type is not None:
self._validate_enum(grade_data_<student_id>_media_comment_type, ["audio", "video"])
data["grade_data[<student_id>][media_comment_type]"] = grade_data_<student_id>_media_comment_type
# OPTIONAL - grade_data[<student_id>][file_ids]
"""
See documentation for the comment[] arguments in the
{api:SubmissionsApiController#update Submissions Update} documentation
"""
if grade_data_<student_id>_file_ids is not None:
data["grade_data[<student_id>][file_ids]"] = grade_data_<student_id>_file_ids
# OPTIONAL - grade_data[<assignment_id>][<student_id>]
"""
Specifies which assignment to grade. This argument is not necessary when
using the assignment-specific endpoints.
"""
if grade_data_<assignment_id>_<student_id> is not None:
data["grade_data[<assignment_id>][<student_id>]"] = grade_data_<assignment_id>_<student_id>
self.logger.debug("POST /api/v1/sections/{section_id}/submissions/update_grades with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("POST", "/api/v1/sections/{section_id}/submissions/update_grades".format(**path), data=data, params=params, single_item=True)
def grade_or_comment_on_multiple_submissions_sections_assignments(self, assignment_id, section_id, grade_data_<assignment_id>_<student_id>=None, grade_data_<student_id>_excuse=None, grade_data_<student_id>_file_ids=None, grade_data_<student_id>_group_comment=None, grade_data_<student_id>_media_comment_id=None, grade_data_<student_id>_media_comment_type=None, grade_data_<student_id>_posted_grade=None, grade_data_<student_id>_rubric_assessment=None, grade_data_<student_id>_text_comment=None):
"""
Grade or comment on multiple submissions.
Update the grading and comments on multiple student's assignment
submissions in an asynchronous job.
The user must have permission to manage grades in the appropriate context
(course or section).
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - section_id
"""
ID
"""
path["section_id"] = section_id
# REQUIRED - PATH - assignment_id
"""
ID
"""
path["assignment_id"] = assignment_id
# OPTIONAL - grade_data[<student_id>][posted_grade]
"""
See documentation for the posted_grade argument in the
{api:SubmissionsApiController#update Submissions Update} documentation
"""
if grade_data_<student_id>_posted_grade is not None:
data["grade_data[<student_id>][posted_grade]"] = grade_data_<student_id>_posted_grade
# OPTIONAL - grade_data[<student_id>][excuse]
"""
See documentation for the excuse argument in the
{api:SubmissionsApiController#update Submissions Update} documentation
"""
if grade_data_<student_id>_excuse is not None:
data["grade_data[<student_id>][excuse]"] = grade_data_<student_id>_excuse
# OPTIONAL - grade_data[<student_id>][rubric_assessment]
"""
See documentation for the rubric_assessment argument in the
{api:SubmissionsApiController#update Submissions Update} documentation
"""
if grade_data_<student_id>_rubric_assessment is not None:
data["grade_data[<student_id>][rubric_assessment]"] = grade_data_<student_id>_rubric_assessment
# OPTIONAL - grade_data[<student_id>][text_comment]
"""
no description
"""
if grade_data_<student_id>_text_comment is not None:
data["grade_data[<student_id>][text_comment]"] = grade_data_<student_id>_text_comment
# OPTIONAL - grade_data[<student_id>][group_comment]
"""
no description
"""
if grade_data_<student_id>_group_comment is not None:
data["grade_data[<student_id>][group_comment]"] = grade_data_<student_id>_group_comment
# OPTIONAL - grade_data[<student_id>][media_comment_id]
"""
no description
"""
if grade_data_<student_id>_media_comment_id is not None:
data["grade_data[<student_id>][media_comment_id]"] = grade_data_<student_id>_media_comment_id
# OPTIONAL - grade_data[<student_id>][media_comment_type]
"""
no description
"""
if grade_data_<student_id>_media_comment_type is not None:
self._validate_enum(grade_data_<student_id>_media_comment_type, ["audio", "video"])
data["grade_data[<student_id>][media_comment_type]"] = grade_data_<student_id>_media_comment_type
# OPTIONAL - grade_data[<student_id>][file_ids]
"""
See documentation for the comment[] arguments in the
{api:SubmissionsApiController#update Submissions Update} documentation
"""
if grade_data_<student_id>_file_ids is not None:
data["grade_data[<student_id>][file_ids]"] = grade_data_<student_id>_file_ids
# OPTIONAL - grade_data[<assignment_id>][<student_id>]
"""
Specifies which assignment to grade. This argument is not necessary when
using the assignment-specific endpoints.
"""
if grade_data_<assignment_id>_<student_id> is not None:
data["grade_data[<assignment_id>][<student_id>]"] = grade_data_<assignment_id>_<student_id>
self.logger.debug("POST /api/v1/sections/{section_id}/assignments/{assignment_id}/submissions/update_grades with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("POST", "/api/v1/sections/{section_id}/assignments/{assignment_id}/submissions/update_grades".format(**path), data=data, params=params, single_item=True)
def mark_submission_as_read_courses(self, assignment_id, course_id, user_id):
"""
Mark submission as read.
No request fields are necessary.
On success, the response will be 204 No Content with an empty body.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - course_id
"""
ID
"""
path["course_id"] = course_id
# REQUIRED - PATH - assignment_id
"""
ID
"""
path["assignment_id"] = assignment_id
# REQUIRED - PATH - user_id
"""
ID
"""
path["user_id"] = user_id
self.logger.debug("PUT /api/v1/courses/{course_id}/assignments/{assignment_id}/submissions/{user_id}/read with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("PUT", "/api/v1/courses/{course_id}/assignments/{assignment_id}/submissions/{user_id}/read".format(**path), data=data, params=params, no_data=True)
def mark_submission_as_read_sections(self, assignment_id, section_id, user_id):
"""
Mark submission as read.
No request fields are necessary.
On success, the response will be 204 No Content with an empty body.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - section_id
"""
ID
"""
path["section_id"] = section_id
# REQUIRED - PATH - assignment_id
"""
ID
"""
path["assignment_id"] = assignment_id
# REQUIRED - PATH - user_id
"""
ID
"""
path["user_id"] = user_id
self.logger.debug("PUT /api/v1/sections/{section_id}/assignments/{assignment_id}/submissions/{user_id}/read with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("PUT", "/api/v1/sections/{section_id}/assignments/{assignment_id}/submissions/{user_id}/read".format(**path), data=data, params=params, no_data=True)
def mark_submission_as_unread_courses(self, assignment_id, course_id, user_id):
"""
Mark submission as unread.
No request fields are necessary.
On success, the response will be 204 No Content with an empty body.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - course_id
"""
ID
"""
path["course_id"] = course_id
# REQUIRED - PATH - assignment_id
"""
ID
"""
path["assignment_id"] = assignment_id
# REQUIRED - PATH - user_id
"""
ID
"""
path["user_id"] = user_id
self.logger.debug("DELETE /api/v1/courses/{course_id}/assignments/{assignment_id}/submissions/{user_id}/read with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("DELETE", "/api/v1/courses/{course_id}/assignments/{assignment_id}/submissions/{user_id}/read".format(**path), data=data, params=params, no_data=True)
def mark_submission_as_unread_sections(self, assignment_id, section_id, user_id):
"""
Mark submission as unread.
No request fields are necessary.
On success, the response will be 204 No Content with an empty body.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - section_id
"""
ID
"""
path["section_id"] = section_id
# REQUIRED - PATH - assignment_id
"""
ID
"""
path["assignment_id"] = assignment_id
# REQUIRED - PATH - user_id
"""
ID
"""
path["user_id"] = user_id
self.logger.debug("DELETE /api/v1/sections/{section_id}/assignments/{assignment_id}/submissions/{user_id}/read with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("DELETE", "/api/v1/sections/{section_id}/assignments/{assignment_id}/submissions/{user_id}/read".format(**path), data=data, params=params, no_data=True)
def submission_summary_courses(self, assignment_id, course_id, grouped=None):
"""
Submission Summary.
Returns the number of submissions for the given assignment based on gradeable students
that fall into three categories: graded, ungraded, not submitted.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - course_id
"""
ID
"""
path["course_id"] = course_id
# REQUIRED - PATH - assignment_id
"""
ID
"""
path["assignment_id"] = assignment_id
# OPTIONAL - grouped
"""
If this argument is true, the response will take into account student groups.
"""
if grouped is not None:
params["grouped"] = grouped
self.logger.debug("GET /api/v1/courses/{course_id}/assignments/{assignment_id}/submission_summary with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/courses/{course_id}/assignments/{assignment_id}/submission_summary".format(**path), data=data, params=params, no_data=True)
def submission_summary_sections(self, assignment_id, section_id, grouped=None):
"""
Submission Summary.
Returns the number of submissions for the given assignment based on gradeable students
that fall into three categories: graded, ungraded, not submitted.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - section_id
"""
ID
"""
path["section_id"] = section_id
# REQUIRED - PATH - assignment_id
"""
ID
"""
path["assignment_id"] = assignment_id
# OPTIONAL - grouped
"""
If this argument is true, the response will take into account student groups.
"""
if grouped is not None:
params["grouped"] = grouped
self.logger.debug("GET /api/v1/sections/{section_id}/assignments/{assignment_id}/submission_summary with query params: {params} and form data: {data}".format(params=params, data=data, **path))
return self.generic_request("GET", "/api/v1/sections/{section_id}/assignments/{assignment_id}/submission_summary".format(**path), data=data, params=params, no_data=True)
class Mediacomment(BaseModel):
"""Mediacomment Model."""
def __init__(self, content_type=None, display_name=None, media_id=None, media_type=None, url=None):
"""Init method for Mediacomment class."""
self._content_type = content_type
self._display_name = display_name
self._media_id = media_id
self._media_type = media_type
self._url = url
self.logger = logging.getLogger('py3canvas.Mediacomment')
@property
def content_type(self):
"""content_type."""
return self._content_type
@content_type.setter
def content_type(self, value):
"""Setter for content_type property."""
self.logger.warn("Setting values on content_type will NOT update the remote Canvas instance.")
self._content_type = value
@property
def display_name(self):
"""display_name."""
return self._display_name
@display_name.setter
def display_name(self, value):
"""Setter for display_name property."""
self.logger.warn("Setting values on display_name will NOT update the remote Canvas instance.")
self._display_name = value
@property
def media_id(self):
"""media_id."""
return self._media_id
@media_id.setter
def media_id(self, value):
"""Setter for media_id property."""
self.logger.warn("Setting values on media_id will NOT update the remote Canvas instance.")
self._media_id = value
@property
def media_type(self):
"""media_type."""
return self._media_type
@media_type.setter
def media_type(self, value):
"""Setter for media_type property."""
self.logger.warn("Setting values on media_type will NOT update the remote Canvas instance.")
self._media_type = value
@property
def url(self):
"""url."""
return self._url
@url.setter
def url(self, value):
"""Setter for url property."""
self.logger.warn("Setting values on url will NOT update the remote Canvas instance.")
self._url = value
class Submissioncomment(BaseModel):
"""Submissioncomment Model."""
def __init__(self, id=None, author_id=None, author_name=None, author=None, comment=None, created_at=None, edited_at=None, media_comment=None):
"""Init method for Submissioncomment class."""
self._id = id
self._author_id = author_id
self._author_name = author_name
self._author = author
self._comment = comment
self._created_at = created_at
self._edited_at = edited_at
self._media_comment = media_comment
self.logger = logging.getLogger('py3canvas.Submissioncomment')
@property
def id(self):
"""id."""
return self._id
@id.setter
def id(self, value):
"""Setter for id property."""
self.logger.warn("Setting values on id will NOT update the remote Canvas instance.")
self._id = value
@property
def author_id(self):
"""author_id."""
return self._author_id
@author_id.setter
def author_id(self, value):
"""Setter for author_id property."""
self.logger.warn("Setting values on author_id will NOT update the remote Canvas instance.")
self._author_id = value
@property
def author_name(self):
"""author_name."""
return self._author_name
@author_name.setter
def author_name(self, value):
"""Setter for author_name property."""
self.logger.warn("Setting values on author_name will NOT update the remote Canvas instance.")
self._author_name = value
@property
def author(self):
"""Abbreviated user object UserDisplay (see users API)."""
return self._author
@author.setter
def author(self, value):
"""Setter for author property."""
self.logger.warn("Setting values on author will NOT update the remote Canvas instance.")
self._author = value
@property
def comment(self):
"""comment."""
return self._comment
@comment.setter
def comment(self, value):
"""Setter for comment property."""
self.logger.warn("Setting values on comment will NOT update the remote Canvas instance.")
self._comment = value
@property
def created_at(self):
"""created_at."""
return self._created_at
@created_at.setter
def created_at(self, value):
"""Setter for created_at property."""
self.logger.warn("Setting values on created_at will NOT update the remote Canvas instance.")
self._created_at = value
@property
def edited_at(self):
"""edited_at."""
return self._edited_at
@edited_at.setter
def edited_at(self, value):
"""Setter for edited_at property."""
self.logger.warn("Setting values on edited_at will NOT update the remote Canvas instance.")
self._edited_at = value
@property
def media_comment(self):
"""media_comment."""
return self._media_comment
@media_comment.setter
def media_comment(self, value):
"""Setter for media_comment property."""
self.logger.warn("Setting values on media_comment will NOT update the remote Canvas instance.")
self._media_comment = value
class Submission(BaseModel):
"""Submission Model."""
def __init__(self, assignment_id=None, assignment=None, course=None, attempt=None, body=None, grade=None, grade_matches_current_submission=None, html_url=None, preview_url=None, score=None, submission_comments=None, submission_type=None, submitted_at=None, url=None, user_id=None, grader_id=None, graded_at=None, user=None, late=None, assignment_visible=None, excused=None, missing=None, late_policy_status=None, points_deducted=None, seconds_late=None, workflow_state=None, extra_attempts=None, anonymous_id=None, posted_at=None, read_status=None):
"""Init method for Submission class."""
self._assignment_id = assignment_id
self._assignment = assignment
self._course = course
self._attempt = attempt
self._body = body
self._grade = grade
self._grade_matches_current_submission = grade_matches_current_submission
self._html_url = html_url
self._preview_url = preview_url
self._score = score
self._submission_comments = submission_comments
self._submission_type = submission_type
self._submitted_at = submitted_at
self._url = url
self._user_id = user_id
self._grader_id = grader_id
self._graded_at = graded_at
self._user = user
self._late = late
self._assignment_visible = assignment_visible
self._excused = excused
self._missing = missing
self._late_policy_status = late_policy_status
self._points_deducted = points_deducted
self._seconds_late = seconds_late
self._workflow_state = workflow_state
self._extra_attempts = extra_attempts
self._anonymous_id = anonymous_id
self._posted_at = posted_at
self._read_status = read_status
self.logger = logging.getLogger('py3canvas.Submission')
@property
def assignment_id(self):
"""The submission's assignment id."""
return self._assignment_id
@assignment_id.setter
def assignment_id(self, value):
"""Setter for assignment_id property."""
self.logger.warn("Setting values on assignment_id will NOT update the remote Canvas instance.")
self._assignment_id = value
@property
def assignment(self):
"""The submission's assignment (see the assignments API) (optional)."""
return self._assignment
@assignment.setter
def assignment(self, value):
"""Setter for assignment property."""
self.logger.warn("Setting values on assignment will NOT update the remote Canvas instance.")
self._assignment = value
@property
def course(self):
"""The submission's course (see the course API) (optional)."""
return self._course
@course.setter
def course(self, value):
"""Setter for course property."""
self.logger.warn("Setting values on course will NOT update the remote Canvas instance.")
self._course = value
@property
def attempt(self):
"""This is the submission attempt number."""
return self._attempt
@attempt.setter
def attempt(self, value):
"""Setter for attempt property."""
self.logger.warn("Setting values on attempt will NOT update the remote Canvas instance.")
self._attempt = value
@property
def body(self):
"""The content of the submission, if it was submitted directly in a text field."""
return self._body
@body.setter
def body(self, value):
"""Setter for body property."""
self.logger.warn("Setting values on body will NOT update the remote Canvas instance.")
self._body = value
@property
def grade(self):
"""The grade for the submission, translated into the assignment grading scheme (so a letter grade, for example)."""
return self._grade
@grade.setter
def grade(self, value):
"""Setter for grade property."""
self.logger.warn("Setting values on grade will NOT update the remote Canvas instance.")
self._grade = value
@property
def grade_matches_current_submission(self):
"""A boolean flag which is false if the student has re-submitted since the submission was last graded."""
return self._grade_matches_current_submission
@grade_matches_current_submission.setter
def grade_matches_current_submission(self, value):
"""Setter for grade_matches_current_submission property."""
self.logger.warn("Setting values on grade_matches_current_submission will NOT update the remote Canvas instance.")
self._grade_matches_current_submission = value
@property
def html_url(self):
"""URL to the submission. This will require the user to log in."""
return self._html_url
@html_url.setter
def html_url(self, value):
"""Setter for html_url property."""
self.logger.warn("Setting values on html_url will NOT update the remote Canvas instance.")
self._html_url = value
@property
def preview_url(self):
"""URL to the submission preview. This will require the user to log in."""
return self._preview_url
@preview_url.setter
def preview_url(self, value):
"""Setter for preview_url property."""
self.logger.warn("Setting values on preview_url will NOT update the remote Canvas instance.")
self._preview_url = value
@property
def score(self):
"""The raw score."""
return self._score
@score.setter
def score(self, value):
"""Setter for score property."""
self.logger.warn("Setting values on score will NOT update the remote Canvas instance.")
self._score = value
@property
def submission_comments(self):
"""Associated comments for a submission (optional)."""
return self._submission_comments
@submission_comments.setter
def submission_comments(self, value):
"""Setter for submission_comments property."""
self.logger.warn("Setting values on submission_comments will NOT update the remote Canvas instance.")
self._submission_comments = value
@property
def submission_type(self):
"""The types of submission ex: ('online_text_entry'|'online_url'|'online_upload'|'media_recording'|'student_annotation')."""
return self._submission_type
@submission_type.setter
def submission_type(self, value):
"""Setter for submission_type property."""
self.logger.warn("Setting values on submission_type will NOT update the remote Canvas instance.")
self._submission_type = value
@property
def submitted_at(self):
"""The timestamp when the assignment was submitted."""
return self._submitted_at
@submitted_at.setter
def submitted_at(self, value):
"""Setter for submitted_at property."""
self.logger.warn("Setting values on submitted_at will NOT update the remote Canvas instance.")
self._submitted_at = value
@property
def url(self):
"""The URL of the submission (for 'online_url' submissions)."""
return self._url
@url.setter
def url(self, value):
"""Setter for url property."""
self.logger.warn("Setting values on url will NOT update the remote Canvas instance.")
self._url = value
@property
def user_id(self):
"""The id of the user who created the submission."""
return self._user_id
@user_id.setter
def user_id(self, value):
"""Setter for user_id property."""
self.logger.warn("Setting values on user_id will NOT update the remote Canvas instance.")
self._user_id = value
@property
def grader_id(self):
"""The id of the user who graded the submission. This will be null for submissions that haven't been graded yet. It will be a positive number if a real user has graded the submission and a negative number if the submission was graded by a process (e.g. Quiz autograder and autograding LTI tools). Specifically autograded quizzes set grader_id to the negative of the quiz id. Submissions autograded by LTI tools set grader_id to the negative of the tool id."""
return self._grader_id
@grader_id.setter
def grader_id(self, value):
"""Setter for grader_id property."""
self.logger.warn("Setting values on grader_id will NOT update the remote Canvas instance.")
self._grader_id = value
@property
def graded_at(self):
"""graded_at."""
return self._graded_at
@graded_at.setter
def graded_at(self, value):
"""Setter for graded_at property."""
self.logger.warn("Setting values on graded_at will NOT update the remote Canvas instance.")
self._graded_at = value
@property
def user(self):
"""The submissions user (see user API) (optional)."""
return self._user
@user.setter
def user(self, value):
"""Setter for user property."""
self.logger.warn("Setting values on user will NOT update the remote Canvas instance.")
self._user = value
@property
def late(self):
"""Whether the submission was made after the applicable due date."""
return self._late
@late.setter
def late(self, value):
"""Setter for late property."""
self.logger.warn("Setting values on late will NOT update the remote Canvas instance.")
self._late = value
@property
def assignment_visible(self):
"""Whether the assignment is visible to the user who submitted the assignment. Submissions where `assignment_visible` is false no longer count towards the student's grade and the assignment can no longer be accessed by the student. `assignment_visible` becomes false for submissions that do not have a grade and whose assignment is no longer assigned to the student's section."""
return self._assignment_visible
@assignment_visible.setter
def assignment_visible(self, value):
"""Setter for assignment_visible property."""
self.logger.warn("Setting values on assignment_visible will NOT update the remote Canvas instance.")
self._assignment_visible = value
@property
def excused(self):
"""Whether the assignment is excused. Excused assignments have no impact on a user's grade."""
return self._excused
@excused.setter
def excused(self, value):
"""Setter for excused property."""
self.logger.warn("Setting values on excused will NOT update the remote Canvas instance.")
self._excused = value
@property
def missing(self):
"""Whether the assignment is missing."""
return self._missing
@missing.setter
def missing(self, value):
"""Setter for missing property."""
self.logger.warn("Setting values on missing will NOT update the remote Canvas instance.")
self._missing = value
@property
def late_policy_status(self):
"""The status of the submission in relation to the late policy. Can be late, missing, none, or null."""
return self._late_policy_status
@late_policy_status.setter
def late_policy_status(self, value):
"""Setter for late_policy_status property."""
self.logger.warn("Setting values on late_policy_status will NOT update the remote Canvas instance.")
self._late_policy_status = value
@property
def points_deducted(self):
"""The amount of points automatically deducted from the score by the missing/late policy for a late or missing assignment."""
return self._points_deducted
@points_deducted.setter
def points_deducted(self, value):
"""Setter for points_deducted property."""
self.logger.warn("Setting values on points_deducted will NOT update the remote Canvas instance.")
self._points_deducted = value
@property
def seconds_late(self):
"""The amount of time, in seconds, that an submission is late by."""
return self._seconds_late
@seconds_late.setter
def seconds_late(self, value):
"""Setter for seconds_late property."""
self.logger.warn("Setting values on seconds_late will NOT update the remote Canvas instance.")
self._seconds_late = value
@property
def workflow_state(self):
"""The current state of the submission."""
return self._workflow_state
@workflow_state.setter
def workflow_state(self, value):
"""Setter for workflow_state property."""
self.logger.warn("Setting values on workflow_state will NOT update the remote Canvas instance.")
self._workflow_state = value
@property
def extra_attempts(self):
"""Extra submission attempts allowed for the given user and assignment."""
return self._extra_attempts
@extra_attempts.setter
def extra_attempts(self, value):
"""Setter for extra_attempts property."""
self.logger.warn("Setting values on extra_attempts will NOT update the remote Canvas instance.")
self._extra_attempts = value
@property
def anonymous_id(self):
"""A unique short ID identifying this submission without reference to the owning user. Only included if the caller has administrator access for the current account."""
return self._anonymous_id
@anonymous_id.setter
def anonymous_id(self, value):
"""Setter for anonymous_id property."""
self.logger.warn("Setting values on anonymous_id will NOT update the remote Canvas instance.")
self._anonymous_id = value
@property
def posted_at(self):
"""The date this submission was posted to the student, or nil if it has not been posted."""
return self._posted_at
@posted_at.setter
def posted_at(self, value):
"""Setter for posted_at property."""
self.logger.warn("Setting values on posted_at will NOT update the remote Canvas instance.")
self._posted_at = value
@property
def read_status(self):
"""The read status of this submission for the given user (optional). Including read_status will mark submission(s) as read."""
return self._read_status
@read_status.setter
def read_status(self, value):
"""Setter for read_status property."""
self.logger.warn("Setting values on read_status will NOT update the remote Canvas instance.")
self._read_status = value
| StarcoderdataPython |
122038 | import numpy as np
import sympy as sp
from scipy.misc import derivative
from prettytable import PrettyTable
import math
from math import *
def nuevosValoresa(ecua, derivadas, Ecuaciones, variables,var):
valor_ini = []
func_numerica = []
derv_numerica = []
funcs = vars(math)
for i in range(0, Ecuaciones):
funcion = ecua[i]
funcion_eval = eval(funcion, funcs, variables)
derivada = derivadas[i]
derivada_eval = eval(derivada, funcs, variables)
nuevo_valor = variables[f'{var[i]}']-funcion_eval/derivada_eval
variables[f'{var[i]}'] = nuevo_valor
valor_ini.append(nuevo_valor)
func_numerica.append(funcion_eval)
derv_numerica.append(derivada_eval)
# print(funcion_eval)
# print(derivada_eval)
# print(variables[f'{var[i]}'])
return valor_ini, func_numerica # ,derivada_eval
def cambiarValores(dic_var, nuevos_valores, num_ecuaciones, var):
for i in range(0, num_ecuaciones):
dic_var[f'{var[i]}'] = nuevos_valores[i]
return dic_var
def derivadaSimple(ecuaciones, variables, num_ecuaciones):
derivada_parcial = []
for i in range(0, num_ecuaciones):
var = sp.Symbol(variables[i])
df_dvar = sp.Derivative(ecuaciones[i], var, evaluate=True)
derivada_parcial.append(str(df_dvar))
return derivada_parcial
def efunciones(var, valor, Ecuaciones):
var_valor = {}
for i in range(0, Ecuaciones):
variable = var[i]
anadir = {f'{variable}': valor[i]}
var_valor.update(anadir)
return var_valor
def newtonModificado(ecua, var, valor, Ecuaciones, emax, N=50):
# constructor de la tabla
encabezados = []
contenido = []
encabezados.append("Iteracion")
for i in var:
encabezados.append(f'f{var.index(i)}=0')
#encabezados.append(f'f')
#print(var.index(i))
encabezados.append(i)
encabezados.append(f"Error")
tabla = PrettyTable(encabezados)
tabla.title = "METODO DE NEWTON RHAPSON MULTIVARIABLE MODIFICADO"
# Valores iniciales
dicc_valores = efunciones(var, valor, Ecuaciones)
# derivadas de las ecuaciones
derv_parciales = derivadaSimple(ecua, var, Ecuaciones)
for k in range(1, N):
variables = cambiarValores(dicc_valores, valor, Ecuaciones, var)
derivadas_numericas = []
nuevos_Valores, funcion_evaluada = nuevosValoresa(
ecua, derv_parciales, Ecuaciones, variables,var)
# error
ea = abs(max(funcion_evaluada))
eb = abs(min(funcion_evaluada))
if ea > eb:
error = ea
elif eb >= ea:
error = eb
# Verificar error
if error < emax or error == 0:
break
# anadir a la tabla
contenido = []
contenido.append(k)
for i in range(0, Ecuaciones):
contenido.append("{0:.7f}".format(funcion_evaluada[i]))
contenido.append("{0:.7f}".format(nuevos_Valores[i]))
contenido.append("{0:.7f}".format(error))
tabla.add_row(contenido)
valor = nuevos_Valores
# Constructor de la tabla de las derivadas parciales
u = np.array(derv_parciales).T
derivadas_p = PrettyTable()
derivadas_p.title = "Derivadas parciales"
der_res = []
for j in range(0, Ecuaciones):
der_res.append(f'df{j}/d{var[j]}')
derivadas_p.add_row(u)
derivadas_p.field_names = der_res
print(derivadas_p)
# print(f'{u}')
print(tabla)
print(f'Solucion del sistema: ')
for i in range(0, Ecuaciones):
print(f' {var[i]} = {"{0:.4f}".format(valor[i])}')
""" ecua = ['x**2-10*x+y**2+8', 'x*y**2+x-10*y+8']
var = ['x', 'y']
valori = [0.0, 0.0]
Ecuaciones = 2 """
""" ecua = ['x**2+x-y**2-1', 'y-sin(x**2)']
var = ['x', 'y']
valori = [0.0, 0.0]
Ecuaciones = 2 """
""" ecua = ['x**2+y**2+z**2-9', 'x*y*z-1', 'x+y-z**2']
var = ['x', 'y', 'z']
valori = [2.5, 0.2, 1.6]
Ecuaciones = 3 """
""" #no converge
ecua = ['x**2-625*y**2', '3*x-cos(y*z)-0.5', 'exp(-x*y)+20*z+(10*pi-3)/3']
var = ['x', 'y', 'z']
valori = [1, 1, 1]
Ecuaciones = 3
"""
""" ecua = ['3*x-cos(y*z)-0.5', 'x**2-625*y**2', 'exp(-x*y)+20*z+(10*pi-3)/3']
var = ['x', 'y', 'z']
valori = [1, 0.2, 1]
Ecuaciones = 3
newtonModificado(ecua, var, valori, Ecuaciones, 1e-3) """
| StarcoderdataPython |
4829234 | # python3 -m annotator.panoptic_segmenter
from detectron2.data import MetadataCatalog
from detectron2.config import get_cfg
from detectron2.engine import DefaultPredictor
from detectron2 import model_zoo
class PanopticSegmenter(object):
def __init__(self, *args):
super(PanopticSegmenter, self).__init__(*args)
self.configure()
self.predictor = DefaultPredictor(self.cfg)
def configure(self):
self.cfg = get_cfg()
self.cfg.MODEL.DEVICE = 'cpu'
self.cfg.merge_from_file(model_zoo.get_config_file(
"COCO-PanopticSegmentation/panoptic_fpn_R_101_3x.yaml"))
self.cfg.MODEL.WEIGHTS = model_zoo.get_checkpoint_url(
"COCO-PanopticSegmentation/panoptic_fpn_R_101_3x.yaml")
self.meta_data = MetadataCatalog.get(self.cfg.DATASETS.TRAIN[0])
def predict_segments(self, img):
return self.predictor(img)["panoptic_seg"]
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.