text string | size int64 | token_count int64 |
|---|---|---|
from django.contrib.auth.models import User
from rest_framework import generics
from .serializers import UserSerializer
class UserList(generics.ListCreateAPIView):
queryset = User.objects.all()
serializer_class = UserSerializer
| 240 | 66 |
# Copyright 2017 Pavlo Penenko
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import metashade.rtsl.profile as rtsl
import metashade.clike.struct as struct
from . import data_types
from . import samplers
import sys, inspect
class UniformBuffer:
def __init__(self, sh, register : int, name : str = None):
self._sh = sh
self._name = name
self._register = register
def __enter__(self):
self._sh._emit('cbuffer')
if self._name is not None:
self._sh._emit(' ')
self._sh._emit(self._name)
self._sh._emit(
' : register(b{register})\n{{\n'.format(register = self._register)
)
self._sh._push_indent()
return self
def __exit__(self, exc_type, exc_value, traceback):
self._sh._pop_indent()
self._sh._emit('};\n\n')
class Generator(rtsl.Generator):
_is_pixel_shader = False
class _UsedRegisterSet(set):
def __init__(self, category : str):
self._category = category
def check_candidate(self, register : int):
if register < 0:
raise RuntimeError('Invalid register value')
if register in self:
raise RuntimeError(self._category + ' register already in use')
def __init__(self, file_):
super(Generator, self).__init__(file_)
self._uniforms_by_semantic = dict()
self._used_uniform_buffer_registers = \
self.__class__._UsedRegisterSet('Uniform buffer')
self._used_texture_registers = \
self.__class__._UsedRegisterSet('Texture')
self._used_sampler_registers = \
self.__class__._UsedRegisterSet('Sampler')
def uniform_buffer(self, register : int, name : str = None):
self._used_uniform_buffer_registers.check_candidate(register)
return UniformBuffer(self, register = register, name = name)
# TODO: registers, packoffset
def uniform(
self,
name : str,
dtype,
semantic : str = None,
annotations = None
):
self._check_public_name(name)
if not self._check_global_scope():
raise RuntimeError(
"Uniforms can only be defined at the global scope"
)
if semantic is not None:
existing = self._uniforms_by_semantic.get(semantic)
if existing is not None:
raise RuntimeError(
"Can't define uniform '{name}' with semantic '{semantic}' "
"because uniform '{existing_name}' already uses that "
"semantic.".format(
name = name,
semantic = semantic,
existing_name = existing._name
)
)
value = dtype() #TODO: make it immutable
self._set_global(name, value)
self._emit_indent()
value._define(self, name, semantic, annotations = annotations)
self._emit(';\n')
def combined_sampler_2d(
self,
texture_name : str, texture_register : int,
sampler_name : str, sampler_register : int
):
self._check_public_name(texture_name)
self._check_public_name(sampler_name)
if not self._check_global_scope():
raise RuntimeError(
"Uniform textures and samplers "
"can only be defined at the global scope"
)
self._used_texture_registers.check_candidate(texture_register)
self._used_sampler_registers.check_candidate(sampler_register)
texture = samplers.Texture2d(self, texture_name, texture_register)
self._set_global(texture_name, texture)
self._used_texture_registers.add(texture_register)
sampler = samplers.Sampler(
self, sampler_name, sampler_register, texture
)
self._set_global(sampler_name, sampler)
self._used_sampler_registers.add(sampler_register)
def vs_input(self, name):
return stage_interface.VsInputDef(self, name)
def vs_output(self, name):
return stage_interface.VsOutputDef(self, name)
def ps_output(self, name):
return stage_interface.PsOutputDef(self, name)
# Reference all the data types from the generator class
for name, cls in inspect.getmembers(
sys.modules[data_types.__name__],
lambda member: (inspect.isclass(member)
and member.__module__ == data_types.__name__
and not member.__name__.startswith('_')
)):
setattr(Generator, name, cls)
| 5,093 | 1,447 |
#!/usr/bin/python2.6
import os
import sys
import time
from optparse import OptionParser
from google.refine import refine
from clean_ads_affiliations import clean_ads_affs
assert sys.hexversion >= 0x02060000
SERVER = 'http://adsx.cfa.harvard.edu:3333'
def create_refine_project(path, name, pretend=False, verbose=0):
"""
Creates a project in google Refine and loads the affiliations.
"""
input_file = os.path.abspath(path)
msg('Create a file that we can upload to Refine.', verbose)
new_input_file = clean_ads_affs(input_file, verbose)
msg('Upload to Refine.', verbose)
project_name = 'Astronomy affiliations (%s) (created %s)' % (os.path.basename(path).replace('.reversed', '.merged'), time.asctime())
print 'Creating project "%s".' % project_name
if not pretend:
r = refine.Refine(SERVER)
project = r.new_project(project_file=new_input_file,
project_name=project_name,
split_into_columns=True,
separator='\t',
ignore_initial_non_blank_lines=0,
header_lines=1,
skip_initial_data_rows=0,
limit=0,
guess_value_type=False,
ignore_quotes=False)
msg('Done with success.', verbose)
return project.project_id
def main():
parser = OptionParser()
parser.add_option("-i", "--input", dest="input_file",
help="create Refine project from FILE", metavar="FILE")
parser.add_option("-t", "--title", dest="title",
help="create Refine project with TITLE", metavar="TITLE")
parser.add_option("--pretend", dest="pretend", action="store_true", default=False,
help="do not upload affiliations")
options, _ = parser.parse_args()
create_refine_project(options.input_file, options.title, options.pretend, 1)
def msg(message, verbose):
if verbose:
print message
if __name__ == '__main__':
main()
| 1,981 | 623 |
from django.http import JsonResponse, Http404
from django.views.decorators.csrf import csrf_exempt
from exponent_server_sdk import PushClient, PushMessage, DeviceNotRegisteredError
from .models import Group, User, Event
import hashlib, uuid
def getParams(request, tags):
print(request.POST)
return [request.POST[i] for i in tags]
def getHash(name, pwd):
return hashlib.sha256((name+pwd).encode()).digest()
# Create your views here.
@csrf_exempt
def getUid(request):#done, tested
[name] = getParams(request, ['name'])
q = User.objects.filter(pk=name)
if len(q) > 0:
return JsonResponse({'uid': q[0].uid})
else:
raise Http404("you done fked up")
@csrf_exempt
def joinOpenGroup(request):#done, tested
[uid, gid] = getParams(request, ['uid', 'gid'])
g = Group.objects.get(pk=gid)
u = User.objects.get(pk=uid)
if g.groupType == 'public' or g.groupType == 'private':
g.members.add(u)
g.save()
return JsonResponse({'success': 'true'})
else:
raise Http404("Invalid group or invalid user!")
@csrf_exempt
def addEvent(request):#done, tested
[uid, gid, name, desc, loc] = getParams(request, ['uid', 'gid', 'name', 'desc', 'loc'])
newEvent = Event(name=name, eid=str(uuid.uuid4()), desc=desc, loc=loc, owner=User.objects.get(pk=uid))
newEvent.save()
q = Group.objects.get(pk=gid)
q.events.add(newEvent)
q.save()
if q.groupType == 'private' or q.groupType == 'public':
responses = PushClient().publish_multiple([PushMessage(to=u.expoPushToken,
title='{} happening at {}!'.format(name, loc),
body=newEvent.desc,
ttl=3,
priority='high',
sound='default') for u in q.members.all()])
for i in range(len(responses)):
try:
responses[i].validate_response()
except DeviceNotRegisteredError:
u = q.members.all()[i]
u.expoPushToken = ''
u.save()
return JsonResponse({'eid': newEvent.eid})
@csrf_exempt
def deleteEvent(request):#done, BUGGY
[uid, eid] = getParams(request, ['uid', 'eid'])
q = Event.objects.get(pk=eid)
g = q.group_events.all()[0]
if uid == q.owner.uid or uid == g.owner.uid:
g.events.remove(q)
q.delete()
q.save()
return JsonResponse({'success': 'true'})
else:
raise Http404("Restricted access!")
@csrf_exempt
def getGroupList(request):#done, tested
[uid] = getParams(request, ['uid'])
gList = User.objects.get(pk=uid).group_members.all()
return JsonResponse({'groupList': [g.gid for g in gList]})
@csrf_exempt
def getGroupInfo(request):#done, tested
[gid] = getParams(request, ['gid'])
g = Group.objects.get(pk=gid)
return JsonResponse({'gid': gid,'name': g.name, 'type': g.groupType,
'memberList': [u.uid for u in g.members.all()],
'owner': g.owner.uid, 'unconfirmed': 0})
@csrf_exempt
def getEventList(request):#done, should be ok
[gid] = getParams(request, ['gid'])
eList = Group.objects.get(gid=gid).events.all()
return JsonResponse({'eventList': [e.eid for e in eList]})
@csrf_exempt
def getEventInfo(request):#done, tested
[eid, uid] = getParams(request, ['eid', 'uid'])
q = Event.objects.get(pk=eid)
return JsonResponse({'eid': eid, 'name': q.name,'desc': q.desc, 'loc': q.loc,
'status': q.confirmed, 'initTime': q.initTime.strftime('%b-%d %I:%M %p'),
'owner': q.owner.uid, 'isOwner': uid == q.owner.uid or uid == q.group_events.all()[0].owner.uid})
@csrf_exempt
def register(request):#done, tested
[name, pwd] = getParams(request, ['name', 'pwd'])
if len(User.objects.filter(name=name)) > 0:
raise Http404("Try another name!")
newUser = User(name=name, uid=str(uuid.uuid4()), pwdHash=getHash(name, pwd))
newUser.save()
return JsonResponse({'uid': newUser.uid})
@csrf_exempt
def login(request):#done, tested
[name, pwd] = getParams(request, ['name', 'pwd'])
u = User.objects.get(name=name)
if u.pwdHash == getHash(name, pwd):
for otheruser in User.objects.all():
if otheruser.expoPushToken == u.expoPushToken:
otheruser.expoPushToken = ''
return JsonResponse({'uid': u.uid})
else:
raise Http404("Restricted access!")
@csrf_exempt
def createGroup(request):#done, tested
[uid, name, gtype] = getParams(request, ['uid', 'name', 'type'])
newGroup = Group(name=name, gid=str(uuid.uuid4()), owner=User.objects.get(uid=uid), groupType=gtype)
newGroup.save()
newGroup.members.add(User.objects.get(uid=uid))
newGroup.save()
return JsonResponse({'gid': newGroup.gid})
@csrf_exempt
def removeMember(request):#done, tested
[m_uid, uid, gid] = getParams(request, ['m_uid', 'uid', 'gid'])
if m_uid == Group.objects.get(pk=gid).owner.uid or m_uid == uid:
q = Group.objects.get(pk=gid)
q.members.remove(User.objects.get(pk=uid))
q.save()
return JsonResponse({'status': 'success'})
else:
raise Http404("Restricted access!")
@csrf_exempt
def addMember(request):#done, tested
[m_uid, uid, gid] = getParams(request, ['m_uid', 'uid', 'gid'])
if m_uid == Group.objects.get(pk=gid).owner.uid:
q = Group.objects.get(pk=gid)
q.members.add(User.objects.get(pk=uid))
q.save()
return JsonResponse({'status': 'success'})
else:
raise Http404("Restricted access!")
@csrf_exempt
def deleteGroup(request):#done, BUGGY
[gid, uid] = getParams(request, ['gid', 'uid'])
q = Group.objects.get(pk=gid)
if uid == q.owner.uid:
q.delete()
return JsonResponse({'status': 'success'})
else:
raise Http404("Restricted access!")
@csrf_exempt
def getUserInfo(request):#done, tested
[uid] = getParams(request, ['uid'])
name = User.objects.get(pk=uid).name
return JsonResponse({'name': name})
@csrf_exempt
def confirmEvent(request):#done, tested
[uid, eid] = getParams(request, ['uid', 'eid'])
e = Event.objects.get(pk=eid)
if len(e.confirmedMembers.filter(pk=uid)) == 0:
e.confirmed += 1
e.confirmedMembers.add(User.objects.get(pk=uid))
e.save()
if e.confirmed == 1:
g = e.group_events.all()[0]
if g.groupType == 'public':
responses = PushClient().publish_multiple([PushMessage(to=u.expoPushToken,
title="You'll never believe what you're missing out on!",
body="This is a test notification",
ttl=30,
priority='high',
sound='default') for u in g.members.all()])
for i in range(len(responses)):
try:
responses[i].validate_response()
except DeviceNotRegisteredError:
u = g.members.all()[i]
u.expoPushToken = ''
u.save()
return JsonResponse({'status': 'success'})
else:
raise Http404("Multiple confirmation")
@csrf_exempt
def search(request):#done, tested
[query] = getParams(request, ['q'])
return JsonResponse({'list': [g.gid for g in Group.objects.all()
if query in g.name and g.groupType == 'public']})
@csrf_exempt
def updateToken(request):
[token, uid] = getParams(request, ['token', 'uid'])
u = User.objects.get(uid=uid)
print("before: "+u.expoPushToken)
u.expoPushToken = token
u.save()
print("after: "+u.expoPushToken)
return JsonResponse({'status': 'success'}) | 8,256 | 2,693 |
"""Implement this function across different project.
----ZY.2020.Oct.
"""
import os
from easydict import EasyDict
from torchvision.utils import save_image
from logging import Logger
from subprocess import call
def create_save_folders(root_folder, folder_list: list):
"""Create folders to save visualization image.
:param root_folder: The root folder.
:param folder_list: The list of folders
"""
for folder in folder_list:
os.makedirs(os.path.join(root_folder, folder), exist_ok=True)
def unet_vis(
in_batch: dict, out_batch: tuple, training: bool, epoch: int, step: int, options: EasyDict, logger: Logger
):
"""The visualization function of UNet.
:param in_batch: The input batch.
:param out_batch: The output batch.
:param training: Whether it is training stage.
:param epoch: The epoch number start with 1.
:param step: The step.
:param logger: The logger.
:param options: The options for visualization.
"""
# Folders
if training:
vis_dir = os.path.join(options.vis.dir, "train_vis")
else:
vis_dir = os.path.join(options.vis.dir, "val_vis")
out_dir = os.path.join(vis_dir, "epoch-{:04d}".format(epoch))
# Customize the list of folders.
dir_list = ["input_image", "info"]
# Create the list folders.
create_save_folders(out_dir, dir_list)
# The list of key in input and output batch.
key_list = ["input_image", ["loss"]]
batch = {}
batch.update(in_batch)
batch.update(out_batch[0])
batch.update(out_batch[1])
# Get the batch size.
if training:
batch_size = options.train.batch_size
else:
batch_size = options.test.batch_size
# Get number of steps each epoch.
if training: # Update the number of training samples in options.
num_step_each_epoch = options.dataset.len_train // (options.train.batch_size * options.num_gpus)
else: # Update the number of validation samples in options.
num_step_each_epoch = options.dataset.len_test // (options.test.batch_size * options.num_gpus)
# Save images and info.
for i in range(batch_size):
batch_id = step % num_step_each_epoch
fn = "data-{:04d}.png".format(batch_id * batch_size + i) # file name.
for key, folder in zip(key_list, dir_list):
if folder == "info":
with open(os.path.join(out_dir, folder, fn.replace('.png', '.txt')), 'w') as file:
for loss_item in key:
file.write("{}: {}\n".format(loss_item, batch[loss_item][i].item()))
else:
save_image(batch[key][i], os.path.join(out_dir, folder, fn))
# Get the KC step interval.
if training:
kc_steps = options.train.kc_steps
else:
kc_steps = options.test.kc_steps
# Generate HTML file.
mod_step = step % num_step_each_epoch # step starts ar 1.
extra_step = (mod_step + kc_steps) / num_step_each_epoch
if mod_step == 0 or extra_step > 1.0:
# Visualize HTML.
logger.info("Generating html visualization ...")
sublist = ",".join(dir_list)
script_path = os.path.join(os.path.abspath(os.getcwd()), "utils", "gen_html_hierarchy_local.py")
if not os.path.exists(script_path):
raise ValueError("{} this python script does not exist!".format(script_path))
cmd = "cd {} && python {} . 10 htmls {} {} > /dev/null".format(
out_dir, script_path, sublist, sublist
)
call(cmd, shell=True)
logger.info("DONE")
| 3,572 | 1,156 |
from nose.tools import assert_equals, assert_true
from wikimetrics.metrics import metric_classes
from wikimetrics.models import (
MetricReport
)
from ..fixtures import DatabaseTest
class MetricReportTest(DatabaseTest):
def setUp(self):
DatabaseTest.setUp(self)
self.common_cohort_1()
def test_basic_response(self):
metric = metric_classes['NamespaceEdits'](
name='NamespaceEdits',
namespaces=[0, 1, 2],
start_date='2013-01-01 00:00:00',
end_date='2013-01-02 00:00:00',
)
mr = MetricReport(
metric, self.cohort.id,
[
self.editors[0].user_id,
self.editors[1].user_id,
self.editors[2].user_id,
],
'wiki'
)
result = mr.run()
assert_equals(result[self.editor(0)]['edits'], 2)
def test_repr(self):
metric = metric_classes['NamespaceEdits'](
name='NamespaceEdits',
namespaces=[0, 1, 2],
start_date='2013-05-01 00:00:00',
end_date='2013-09-01 00:00:00',
)
mr = MetricReport(
metric, self.cohort.id,
[
self.editors[0].user_id,
self.editors[1].user_id,
self.editors[2].user_id,
],
'wiki'
)
assert_true(str(mr).find('MetricReport') >= 0)
| 1,465 | 501 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from setuptools import setup
setup(
name="clut2dtstyle",
license="Unlicense",
version="0.1",
author="Manu Mannattil",
author_email="manu.mannattil@gmail.com",
description="Script to convert Hald CLUTs to darktable styles",
py_modules=["clut2dtstyle"],
install_requires=["numpy>=1.11.0"],
classifiers=[
"License :: Public Domain",
"Programming Language :: Python :: 3",
"Topic :: Multimedia :: Graphics"
],
entry_points="""
[console_scripts]
clut2dtstyle=clut2dtstyle:main
"""
)
| 615 | 211 |
import meshzoo
import numpy as np
import xarray as xr
import xugrid
def transform(vertices, minx, maxx, miny):
"""
Transform vertices to fit within minx to maxx.
Maintains x:y aspect ratio.
"""
x, y = vertices.T
xmin = x.min()
xmax = x.max()
ymin = y.min()
ymax = y.max()
dx = xmax - xmin
dy = ymax - ymin
new_dx = maxx - minx
new_dy = dy / dx * new_dx
x = (x - xmin) * new_dx / dx + minx
y = (y - ymin) * new_dy / dy + miny
return np.column_stack([x, y])
def disk():
def function_z(x, y):
"""
from https://matplotlib.org/stable/gallery/images_contours_and_fields/tricontour_smooth_user.html
"""
r1 = np.sqrt((0.5 - x) ** 2 + (0.5 - y) ** 2)
theta1 = np.arctan2(0.5 - x, 0.5 - y)
r2 = np.sqrt((-x - 0.2) ** 2 + (-y - 0.2) ** 2)
theta2 = np.arctan2(-x - 0.2, -y - 0.2)
z = -(
2 * (np.exp((r1 / 10) ** 2) - 1) * 30.0 * np.cos(7.0 * theta1)
+ (np.exp((r2 / 10) ** 2) - 1) * 30.0 * np.cos(11.0 * theta2)
+ 0.7 * (x ** 2 + y ** 2)
)
zmin = z.min()
zmax = z.max()
return (zmax - z) / (zmax - zmin) * 10.0
vertices, triangles = meshzoo.disk(6, 8)
vertices = transform(vertices, 0.0, 10.0, 0.0)
grid = xugrid.Ugrid2d(
node_x=vertices[:, 0],
node_y=vertices[:, 1],
fill_value=-1,
face_node_connectivity=triangles,
)
ds = xr.Dataset()
ds["node_z"] = xr.DataArray(
data=function_z(*grid.node_coordinates.T),
dims=[grid.node_dimension],
)
ds["face_z"] = xr.DataArray(
data=function_z(*grid.face_coordinates.T),
dims=[grid.face_dimension],
)
ds["edge_z"] = xr.DataArray(
data=function_z(*grid.edge_coordinates.T),
dims=[grid.edge_dimension],
)
return xugrid.UgridDataset(ds, grid)
| 1,903 | 812 |
# Generated by Django 3.2.5 on 2021-07-19 20:22
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('calendar_generator', '0005_alter_calendar_options'),
]
operations = [
migrations.CreateModel(
name='ResetDay',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date', models.DateField()),
('calendar', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='calendar_generator.calendar')),
('day', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='calendar_generator.day')),
],
options={
'unique_together': {('calendar', 'date')},
},
),
]
| 898 | 272 |
import ckan.plugins as plugins
import ckan.model as model
import ckan.logic as logic
import ckan.plugins.toolkit as toolkit
import ckan.lib.plugins as plugs
from pylons import c
NotFound = logic.NotFound
get_action = logic.get_action
class GroupPlugin(plugins.SingletonPlugin, plugs.DefaultGroupForm):
plugins.implements(plugins.IGroupForm, inherit=False)
plugins.implements(plugins.interfaces.IGroupController, inherit=True)
def before_view(self, group):
if c.controller == 'group':
group['owner'] = group_owner(group)
if c.userobj and c.userobj.id:
group['display'] = True
else:
group['display'] = False
return group
def group_types(self):
return ['group']
def is_fallback(self):
return True
def form_to_db_schema(self):
schema = super(GroupPlugin, self).form_to_db_schema()
schema = self._modify_group_schema(schema)
return schema
def db_to_form_schema(self):
schema = super(GroupPlugin, self).form_to_db_schema()
_convert_from_extras = toolkit.get_converter('convert_from_extras')
_ignore_missing = toolkit.get_validator('ignore_missing')
_boolean = toolkit.get_validator('boolean_validator')
default_validators = [_convert_from_extras, _ignore_missing, _boolean]
schema.update({
'private': default_validators
})
return schema
def _modify_group_schema(self, schema):
#Import core converters and validators
_convert_to_extras = toolkit.get_converter('convert_to_extras')
_ignore_missing = toolkit.get_validator('ignore_missing')
_boolean = toolkit.get_validator('boolean_validator')
default_validators = [_ignore_missing, _boolean, _convert_to_extras]
schema.update({
'private': default_validators
})
return schema
def group_owner(group):
context = {'model': model, 'session': model.Session,
'user': c.user or c.author,
'for_view': True}
admin = logic.get_action('member_list')(context, {'id': group.get('name'), 'object_type': 'user', 'capacity': 'admin'})
if admin and isinstance(admin, list) and admin[0][0]:
user = logic.get_action('user_show')(context, {'id': admin[0][0]})
return {'name': user.get('display_name'), 'link': user.get('id')}
else:
return {'name': 'unknown', 'link': '--'}
| 2,492 | 744 |
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Discovardenovo(AutotoolsPackage):
"""DISCOVAR de novo is a large (and small) de novo genome assembler.
It quickly generates highly accurate and complete assemblies using the
same single library data as used by DISCOVAR. It currently doesn't
support variant calling, for that, please use DISCOVAR instead."""
homepage = "https://software.broadinstitute.org/software/discovar/blog/"
url = "ftp://ftp.broadinstitute.org/pub/crd/DiscovarDeNovo/latest_source_code/discovardenovo-52488.tar.gz"
version('52488', '2b08c77b1b998d85be8048e5efb10358')
# lots of compiler errors with GCC7, works with 4.8.5
# and devs claim it works with 4.7 so I'm assuming 4.7-4.8'll work
conflicts('%gcc@5:')
conflicts('%gcc@:4.7.0')
depends_on('samtools')
depends_on('jemalloc')
| 2,087 | 670 |
#Name: s2_convert.py
#Purpose: Convert PDFs to TXT format
#Invocation: python3 s2_convert.py <projName> <lng> <clss>
import codecs
import os
import re
import sys
#Name: valid_arguments
#Purpose: Check whether the command-line arguments are valid
#Parameters: sys.argv (globally defined list of command-line arguments)
#Returns: True (arguments are valid) or False (arguments are invalid)
def valid_arguments():
lngValid = set(["danish", "dutch", "english", "finnish", "french", "german", "hungarian", "italian", "norwegian", "portuguese", "spanish", "swedish", "turkish"])
clssValid = set(["neg", "pos", "pred"])
if len(sys.argv) == 4 and re.search(r"^[a-zA-Z][a-zA-Z_-]*$", sys.argv[1]) and sys.argv[2] in lngValid and sys.argv[3] in clssValid:
return True
return False
#Name: match_page
#Purpose: Match line to an XML page tag
#Parameters: line (line of text from XML file)
#Returns: Regular expression match object
def match_page(line):
return re.search(r"<page id=\"(\d+)\"", line)
#Name: match_textbox
#Purpose: Match line to an XML textbox tag
#Parameters: line (line of text from XML file)
#Returns: Regular expression match object
def match_textbox(line):
return re.search(r"<textbox id=\"(\d+)\"", line)
#Name: match_textline
#Purpose: Match line to an XML textline tag
#Parameters: line (line of text from XML file)
#Returns: Regular expression match object
def match_textline(line):
return re.search(r"<textline", line)
#Name: match_text
#Purpose: Match line to an XML text tag
#Parameters: line (line of text from XML file)
#Returns: Regular expression match object
def match_text(line):
return re.search(r"<text.*font=\"(.*)\".*bbox=\"([0-9]+\.[0-9]+),([0-9]+\.[0-9]+),([0-9]+\.[0-9]+),([0-9]+\.[0-9]+)\".*size=\"([0-9]+\.[0-9]+)\">(.*)</text>", line)
#Name: clean_char
#Purpose: Clean character to deal with punctuation, numbers, and foreign accent marks
#Parameters: old (character)
#Returns: Cleaned character
def clean_char(old):
#Check the length of the argument
if len(old) == 0:
new = ""
elif len(old) >= 2:
new = " "
else:
#The function "ord" returns the integer representing the Unicode code point of a character
ucp = ord(old)
#Control codes
if (0 <= ucp <= 31):
new = " "
#Punctuation
elif (32 <= ucp <= 38) or (40 <= ucp <= 47) or (58 <= ucp <= 64) or (91 <= ucp <= 96) or (123 <= ucp <= 126) or ucp == 8221:
new = " "
#Apostrophe
elif ucp == 39 or ucp == 8217:
new = ""
#Numbers
elif (48 <= ucp <= 57):
new = " "
#Letters
elif (192 <= ucp <= 198) or (224 <= ucp <= 230):
new = "a"
elif ucp == 199 or ucp == 231:
new = "c"
elif (200 <= ucp <= 203) or (232 <= ucp <= 235):
new = "e"
elif (204 <= ucp <= 207) or (236 <= ucp <= 239):
new = "i"
elif ucp == 209 or ucp == 241:
new = "n"
elif (210 <= ucp <= 214) or ucp == 216 or (242 <= ucp <= 246) or ucp == 248:
new = "o"
elif ucp == 223:
new = "ss"
elif (217 <= ucp <= 220) or (249 <= ucp <= 252):
new = "u"
elif ucp == 221 or ucp == 253 or ucp == 255:
new = "y"
elif ucp >= 128:
new = " "
else:
new = old
return new
#Name: get_chars
#Purpose: Extract the character values, coordinates, hierarchy, and font information from XML file
#Parameters: xmlFile (location of XML file)
#Returns: List of tuples (one for each character) containing character data
def get_chars(xmlFile):
chars = []
page = 0
textbox = 0
textline = 0
#Open XML file and use regular expressions to parse contents
f = codecs.open(xmlFile, "rU", encoding="utf8")
for l in f:
line = l.strip()
pageMatch = match_page(line)
textboxMatch = match_textbox(line)
textlineMatch = match_textline(line)
textMatch = match_text(line)
if pageMatch:
page = int(pageMatch.group(1))
elif textboxMatch:
textline = 0
textbox = int(textboxMatch.group(1))
elif textlineMatch:
textline += 1
elif textMatch:
font = textMatch.group(1)
x1 = float(textMatch.group(2))
y1 = float(textMatch.group(3))
x2 = float(textMatch.group(4))
y2 = float(textMatch.group(5))
size = float(textMatch.group(6))
value = clean_char(textMatch.group(7))
chars.append((page, textbox, textline, x1, y1, x2, y2, size, font, value))
f.close()
return chars
#Name: clean_text
#Purpose: Clean string of text and check each word against a list of stop words
#Parameters: text (string of text)
#Returns: Cleaned text
def clean_text(text):
text = text.lower()
text = re.sub("\s+", " ", text)
#Remove stop words
textClean = []
text = text.split(" ")
global stopWords
for word in text:
word = word.strip()
if word not in stopWords:
textClean.append(word)
textClean = " ".join(textClean)
return textClean
#Name: write_text
#Purpose: Construct words character by character
#Parameters: chars (list of tuples)
# txtFile (location of TXT file)
#Returns:
def write_text(chars, txtFile):
text = []
#Sort characters according to page, textbox, textline, y1, and x1
chars = sorted(chars, key = lambda z: (z[0], z[1], z[2], -z[4], z[3]))
pageCur = chars[0][0]
textboxCur = chars[0][1]
textlineCur = chars[0][2]
for char in chars:
spaceFlag = 0
pageNew = char[0]
textboxNew = char[1]
textlineNew = char[2]
if pageNew != pageCur:
pageCur = pageNew
spaceFlag = 1
if textboxNew != textboxCur:
textboxCur = textboxNew
spaceFlag = 1
if textlineNew != textlineCur:
textlineCur = textlineNew
spaceFlag = 1
if spaceFlag == 1:
text.append(" ")
text.append(char[9])
text = "".join(text)
f = codecs.open(txtFile, "w")
f.write(clean_text(text))
f.close()
return
#Name: create_output
#Purpose: Convert a PDF document of a given class to TXT format
#Parameters: projName (project name)
# clss ("pos" or "neg")
# docName (document name)
#Returns:
def create_output(projName, clss, docName):
#Create file locations
pdfFile = "/" + projName + "/" + clss + "_pdf/" + docName + ".pdf"
xmlFile = "/" + projName + "/" + clss + "_xml/" + docName + ".xml"
txtFile = "/" + projName + "/" + clss + "_txt/" + docName + ".txt"
probFile = "/" + projName + "/" + clss + "_prob/" + docName + ".pdf"
#probFlag indicates whether there is a problem extracting text from the PDF
#The problem PDFs are moved to separate folders where they can be inspected
probFlag = 0
chars = []
#If the TXT file does not already exist, then try creating it
if not os.path.isfile(txtFile):
try:
#The pdf2txt.py program comes with the PDFMiner module
os.system("pdf2txt.py -o " + xmlFile + " -t xml " + pdfFile)
except PDFTextExtractionNotAllowed:
#Exception indicates that text cannot be extracted from the PDF
probFlag = 1
if not os.path.isfile(xmlFile):
probFlag = 1
elif os.stat(xmlFile).st_size == 0:
probFlag = 1
if probFlag == 0:
chars = get_chars(xmlFile)
if len(chars) == 0:
probFlag = 1
#Check probFlag value and act accordingly
if probFlag == 0:
write_text(chars, txtFile)
if os.path.isfile(xmlFile):
#The intermediate XML file is deleted because it tends to be large
os.remove(xmlFile)
print(docName)
elif probFlag == 1:
if os.path.isfile(xmlFile):
#The intermediate XML file is deleted because it tends to be large
os.remove(xmlFile)
if os.path.isfile(txtFile):
#Any text that has been extracted from the problem PDF is deleted
os.remove(txtFile)
os.system("mv " + pdfFile + " " + probFile)
print("!!! PROBLEM: " + docName)
return
#Name: convert_files
#Purpose: Convert PDFs to TXT format
#Parameters: projName (project name)
# lng (language)
# clss ("neg", "pos", or "pred")
#Returns:
def convert_files(projName, lng, clss):
#Read in stop words
stopWordsList = []
f = codecs.open("stop_" + lng + ".txt", "rU")
for word in f:
if word.strip() != "":
stopWordsList.append(word.strip())
f.close()
global stopWords
stopWords = set(stopWordsList)
#Iterate through PDFs of a given class, extract text, and create output files
print("\n***** " + clss + " *****\n")
pdfs = sorted(os.listdir("/" + projName + "/" + clss + "_pdf/"))
for pdf in pdfs:
pdfMatch = re.search(r"^(\S+)\.([pP][dD][fF])$", pdf)
if pdfMatch:
docName = pdfMatch.group(1)
if pdfMatch.group(2) != "pdf":
oldFile = "/" + projName + "/" + clss + "_pdf/" + docName + "." + pdfMatch.group(2)
newFile = "/" + projName + "/" + clss + "_pdf/" + docName + ".pdf"
os.system("mv " + oldFile + " " + newFile)
create_output(projName, clss, docName)
print("")
return
def main():
if valid_arguments():
convert_files(sys.argv[1], sys.argv[2], sys.argv[3])
else:
print("\nInvalid arguments\n")
return
if __name__ == "__main__":
main()
| 10,379 | 3,597 |
def test_greet_user(client, test_strings):
for s in test_strings:
_route = f"/api/v1/greeting/{s}"
r = client.get(_route)
assert r.status_code == 200
assert r.json == {"greeting": "Hello", "user": s}
def test_add_int_success(client, test_ints):
_data = {"add": test_ints}
r = client.post("/api/v1/math/sum", data=_data)
assert r.status_code == 200
assert r.json == {"sum": float(sum(test_ints))}
def test_add_float_success(client, test_floats):
_data = {"add": test_floats}
r = client.post("/api/v1/math/sum", data=_data)
assert r.status_code == 200
assert r.json == {"sum": sum(test_floats)}
def test_add_bad_key_failure(client, test_ints):
_data = {"junk": test_ints}
r = client.post("/api/v1/math/sum", data=_data)
assert r.status_code == 400
def test_add_bad_data_failure(client, test_strings):
_data = {"add": test_strings}
r = client.post("/api/v1/math/sum", data=_data)
assert r.status_code == 400 | 1,004 | 393 |
# -*-coding: utf-8-*-
from sdata.experiments import Test
class TensionTest(Test):
"""Tension test
"""
def __init__(self, name, **kwargs):
Test.__init__(self, name=name, **kwargs) | 200 | 71 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""The code implementation of SharedGradNet.
main0.py is for neural networks without hidden layer.
Some part from: https://jhui.github.io/2018/02/09/PyTorch-Variables-functionals-and-Autograd/
2019/06/17: Update with hyper-parameter tuning script.
2019/06/25: Committed main0.py.
"""
__author__ = 'Ninnart Fuengfusin'
__version__ = '0.0.1'
__email__ = 'ninnart.fuengfusin@yahoo.com'
import os
import time
import logging
import argparse
import torch
import torch.nn as nn
import model
from weight_decay import *
from dataset import load_dataset
from utils import *
from recorder import Recorder
from updater import UpdateMomentum
from namer import namer
parser = argparse.ArgumentParser(description='PyTorch implementation of SharedGradNet.')
parser.add_argument('--epoch', '-e', type=int, default=600, help='Number of training epoch.')
parser.add_argument('--learning_rate', '-lr', type=float, default=3e-1, help='A floating for initial learning rate.')
parser.add_argument('--train_batch', type=int, default=128, help='A integer for train batch amount.')
parser.add_argument('--test_batch', type=int, default=128, help='A integer for test batch amount')
parser.add_argument('--num_neuron', type=int, default=784,
help='Number of neurons in fully connected layer for produce codes')
parser.add_argument('--weight_decay', type=float, default=0, help='A floating for weight decay.')
parser.add_argument('--load', type=str2bool, default=False,
help='A boolean for loading weights from load_location or not.')
parser.add_argument('--load_location', type=str, default='model1-baseline',
help='A string of location for loading weights.')
parser.add_argument('--seed', '-s', type=int, default=0,
help='An integer for initialization randomness.')
args = parser.parse_args()
if __name__ == '__main__':
save_loc = namer(
f'epoch{args.epoch}', f'lr{args.learning_rate}',
f'decay{args.weight_decay}', f'seed{args.seed}')
set_logger(os.path.join(os.getcwd(), save_loc), 'info.log')
logging.info(__doc__)
logging.info(args)
set_printoptions()
seed_everywhere_torch(args.seed)
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
record = Recorder('test_acc', 'test_acc2', 'test_acc3', 'test_loss', 'test_loss2', 'test_loss3')
train_loader, test_loader, img_size = load_dataset(
num_train_batch=args.train_batch, num_test_batch=args.test_batch,
num_extra_batch=0, num_worker=8, dataset='mnist')
model1 = model.NetworkWithSub1()
updaterW1_1 = UpdateMomentum()
updaterB1_1 = UpdateMomentum()
model1.to(device)
BETA = 0.9
t1 = time.time()
for i in range(args.epoch):
# Accumulating variables.
total_train_loss = 0
train_correct = 0
train_total = 0
total_test_loss = 0
test_correct = 0
test_total = 0
model1.train()
args.learning_rate = args.learning_rate/3 if i % 200 == 0 and i != 0 else args.learning_rate
for train_data, train_label in train_loader:
model1.zero_grad()
train_data, train_label = train_data.to(device), train_label.to(device)
train_output = model1.forward(train_data)
train_loss = nn.CrossEntropyLoss()(
train_output, train_label) #+ l2_weight_decay(args.weight_decay2, model2.w1)
train_loss.backward()
total_train_loss += train_loss.item()
_, train_predicted = torch.max(train_output.data, 1)
train_correct += (train_predicted == train_label).sum().item()
train_total += train_label.data.size(0)
model1.w1.data = updaterW1_1.update(
model1.w1.data, BETA, args.learning_rate, model1.w1.grad.data)
model1.b1.data = updaterB1_1.update(
model1.b1.data, BETA, args.learning_rate, model1.b1.grad.data)
logging.info(f'Epoch: {i + 1}')
logging.info(f'Train Accuracy: {train_correct/train_total}, \nLoss: {total_train_loss/train_total}')
with torch.no_grad():
model1.eval()
for test_data, test_label in test_loader:
test_data, test_label = test_data.to(device), test_label.to(device)
test_output = model1.forward(test_data)
test_loss = nn.CrossEntropyLoss()(test_output, test_label)
total_test_loss += test_loss.item()
_, test_predicted = torch.max(test_output.data, 1)
test_correct += (test_predicted == test_label).sum().item()
test_total += test_label.data.size(0)
if record.more_than_highest('test_acc', test_correct/test_total):
save_model(model1, os.path.join(os.getcwd(), save_loc, 'checkpoint.pth'))
logging.info(f'Save model')
t2 = time.time() - t1
logging.info(f'Test Accuracy: {test_correct/test_total}, \nLoss: {total_test_loss/test_total}')
record.record('test_acc', test_correct/test_total)
logging.info(f'Learning rate {args.learning_rate}')
logging.info(f'Timer: {to_hhmmss(t2)}')
logging.info(f'=====================================================================================')
record.save_all(os.path.join(os.getcwd(), save_loc))
logging.info(f'best test_acc: {record.highest("test_acc")}')
logging.info(f'model1:w1 = {model1.w1.data}')
record.plot(
'test_acc', save=True,
save_loc=os.path.join(os.getcwd(), save_loc, 'test_acc.png'))
np.savetxt(
os.path.join(os.getcwd(), save_loc, f'{record.highest("test_acc")}.txt'),
record.highest("test_acc"), delimiter=',')
| 5,818 | 1,947 |
# -*- coding: utf-8 -*-
###########################################################################
## Python code generated with wxFormBuilder (version Aug 8 2018)
## http://www.wxformbuilder.org/
##
## PLEASE DO *NOT* EDIT THIS FILE!
###########################################################################
import wx
import wx.xrc
###########################################################################
## Class efuseWin_BootCfg2
###########################################################################
class efuseWin_BootCfg2 ( wx.Frame ):
def __init__( self, parent ):
wx.Frame.__init__ ( self, parent, id = wx.ID_ANY, title = wx.EmptyString, pos = wx.DefaultPosition, size = wx.Size( 860,370 ), style = wx.DEFAULT_FRAME_STYLE|wx.TAB_TRAVERSAL )
self.SetSizeHints( wx.DefaultSize, wx.DefaultSize )
self.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_WINDOW ) )
wSizer_win = wx.WrapSizer( wx.HORIZONTAL, wx.WRAPSIZER_DEFAULT_FLAGS )
bSizer_byteIdx = wx.BoxSizer( wx.VERTICAL )
self.m_staticText_address = wx.StaticText( self, wx.ID_ANY, u"Address", wx.DefaultPosition, wx.Size( 80,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SUNKEN )
self.m_staticText_address.Wrap( -1 )
self.m_staticText_address.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_INFOBK ) )
bSizer_byteIdx.Add( self.m_staticText_address, 0, wx.ALL, 5 )
self.m_staticText_byteIdx0 = wx.StaticText( self, wx.ID_ANY, u" 0x470[7:0]", wx.DefaultPosition, wx.Size( 80,51 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SUNKEN )
self.m_staticText_byteIdx0.Wrap( -1 )
self.m_staticText_byteIdx0.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_INFOBK ) )
bSizer_byteIdx.Add( self.m_staticText_byteIdx0, 0, wx.ALL, 5 )
self.m_staticText_byteIdx1 = wx.StaticText( self, wx.ID_ANY, u" 0x470[15:8]", wx.DefaultPosition, wx.Size( 80,51 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SUNKEN )
self.m_staticText_byteIdx1.Wrap( -1 )
self.m_staticText_byteIdx1.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_INFOBK ) )
bSizer_byteIdx.Add( self.m_staticText_byteIdx1, 0, wx.ALL, 5 )
self.m_staticText_byteIdx2 = wx.StaticText( self, wx.ID_ANY, u" 0x470[23:16]", wx.DefaultPosition, wx.Size( 80,51 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SUNKEN )
self.m_staticText_byteIdx2.Wrap( -1 )
self.m_staticText_byteIdx2.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_INFOBK ) )
bSizer_byteIdx.Add( self.m_staticText_byteIdx2, 0, wx.ALL, 5 )
self.m_staticText_byteIdx3 = wx.StaticText( self, wx.ID_ANY, u" 0x470[31:24]", wx.DefaultPosition, wx.Size( 80,51 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SUNKEN )
self.m_staticText_byteIdx3.Wrap( -1 )
self.m_staticText_byteIdx3.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_INFOBK ) )
bSizer_byteIdx.Add( self.m_staticText_byteIdx3, 0, wx.ALL, 5 )
wSizer_win.Add( bSizer_byteIdx, 1, wx.EXPAND, 5 )
bSizer_bitIdx = wx.BoxSizer( wx.VERTICAL )
wSizer_bitIdx = wx.WrapSizer( wx.HORIZONTAL, wx.WRAPSIZER_DEFAULT_FLAGS )
self.m_staticText_bitIdx7 = wx.StaticText( self, wx.ID_ANY, u"7", wx.DefaultPosition, wx.Size( 80,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SUNKEN )
self.m_staticText_bitIdx7.Wrap( -1 )
self.m_staticText_bitIdx7.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_INFOBK ) )
wSizer_bitIdx.Add( self.m_staticText_bitIdx7, 0, wx.ALL, 5 )
self.m_staticText_bitIdx6 = wx.StaticText( self, wx.ID_ANY, u"6", wx.DefaultPosition, wx.Size( 80,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SUNKEN )
self.m_staticText_bitIdx6.Wrap( -1 )
self.m_staticText_bitIdx6.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_INFOBK ) )
wSizer_bitIdx.Add( self.m_staticText_bitIdx6, 0, wx.ALL, 5 )
self.m_staticText_bitIdx5 = wx.StaticText( self, wx.ID_ANY, u"5", wx.DefaultPosition, wx.Size( 80,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SUNKEN )
self.m_staticText_bitIdx5.Wrap( -1 )
self.m_staticText_bitIdx5.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_INFOBK ) )
wSizer_bitIdx.Add( self.m_staticText_bitIdx5, 0, wx.ALL, 5 )
self.m_staticText_bitIdx4 = wx.StaticText( self, wx.ID_ANY, u"4", wx.DefaultPosition, wx.Size( 80,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SUNKEN )
self.m_staticText_bitIdx4.Wrap( -1 )
self.m_staticText_bitIdx4.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_INFOBK ) )
wSizer_bitIdx.Add( self.m_staticText_bitIdx4, 0, wx.ALL, 5 )
self.m_staticText_bitIdx3 = wx.StaticText( self, wx.ID_ANY, u"3", wx.DefaultPosition, wx.Size( 80,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SUNKEN )
self.m_staticText_bitIdx3.Wrap( -1 )
self.m_staticText_bitIdx3.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_INFOBK ) )
wSizer_bitIdx.Add( self.m_staticText_bitIdx3, 0, wx.ALL, 5 )
self.m_staticText_bitIdx2 = wx.StaticText( self, wx.ID_ANY, u"2", wx.DefaultPosition, wx.Size( 80,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SUNKEN )
self.m_staticText_bitIdx2.Wrap( -1 )
self.m_staticText_bitIdx2.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_INFOBK ) )
wSizer_bitIdx.Add( self.m_staticText_bitIdx2, 0, wx.ALL, 5 )
self.m_staticText_bitIdx1 = wx.StaticText( self, wx.ID_ANY, u"1", wx.DefaultPosition, wx.Size( 80,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SUNKEN )
self.m_staticText_bitIdx1.Wrap( -1 )
self.m_staticText_bitIdx1.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_INFOBK ) )
wSizer_bitIdx.Add( self.m_staticText_bitIdx1, 0, wx.ALL, 5 )
self.m_staticText_bitIdx0 = wx.StaticText( self, wx.ID_ANY, u"0", wx.DefaultPosition, wx.Size( 80,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SUNKEN )
self.m_staticText_bitIdx0.Wrap( -1 )
self.m_staticText_bitIdx0.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_INFOBK ) )
wSizer_bitIdx.Add( self.m_staticText_bitIdx0, 0, wx.ALL, 5 )
self.m_staticText_bit7 = wx.StaticText( self, wx.ID_ANY, u"DLL Override", wx.DefaultPosition, wx.Size( 80,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SIMPLE )
self.m_staticText_bit7.Wrap( -1 )
wSizer_bitIdx.Add( self.m_staticText_bit7, 0, wx.ALL, 5 )
self.m_staticText_bit6 = wx.StaticText( self, wx.ID_ANY, u"SD1_RST_PO_SEL", wx.DefaultPosition, wx.Size( 80,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SIMPLE )
self.m_staticText_bit6.Wrap( -1 )
wSizer_bitIdx.Add( self.m_staticText_bit6, 0, wx.ALL, 5 )
self.m_staticText_bit5 = wx.StaticText( self, wx.ID_ANY, u"SD2 VOLTAGE", wx.DefaultPosition, wx.Size( 80,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SIMPLE )
self.m_staticText_bit5.Wrap( -1 )
wSizer_bitIdx.Add( self.m_staticText_bit5, 0, wx.ALL, 5 )
self.m_staticText_bit4 = wx.StaticText( self, wx.ID_ANY, u"UART_Ser-D_Dis", wx.DefaultPosition, wx.Size( 80,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SIMPLE )
self.m_staticText_bit4.Wrap( -1 )
wSizer_bitIdx.Add( self.m_staticText_bit4, 0, wx.ALL, 5 )
self.m_staticText_bit3 = wx.StaticText( self, wx.ID_ANY, u"Dis_SDMMC_manu", wx.DefaultPosition, wx.Size( 80,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SIMPLE )
self.m_staticText_bit3.Wrap( -1 )
wSizer_bitIdx.Add( self.m_staticText_bit3, 0, wx.ALL, 5 )
self.m_staticText_bit2 = wx.StaticText( self, wx.ID_ANY, u"L1 I-Cache Dis", wx.DefaultPosition, wx.Size( 80,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SIMPLE )
self.m_staticText_bit2.Wrap( -1 )
wSizer_bitIdx.Add( self.m_staticText_bit2, 0, wx.ALL, 5 )
self.m_staticText_bit1 = wx.StaticText( self, wx.ID_ANY, u"L1_D-Cache_DIS", wx.DefaultPosition, wx.Size( 80,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SIMPLE )
self.m_staticText_bit1.Wrap( -1 )
wSizer_bitIdx.Add( self.m_staticText_bit1, 0, wx.ALL, 5 )
self.m_staticText_bit0 = wx.StaticText( self, wx.ID_ANY, u"Override_Pad_Set", wx.DefaultPosition, wx.Size( 80,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SIMPLE )
self.m_staticText_bit0.Wrap( -1 )
wSizer_bitIdx.Add( self.m_staticText_bit0, 0, wx.ALL, 5 )
m_choice_bit7Choices = [ u"0 -Slave Mode", u"1 -Override Mode" ]
self.m_choice_bit7 = wx.Choice( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( 80,-1 ), m_choice_bit7Choices, 0 )
self.m_choice_bit7.SetSelection( 0 )
wSizer_bitIdx.Add( self.m_choice_bit7, 0, wx.ALL, 5 )
m_choice_bit6Choices = [ u"0 -Reset Low", u"1 -Reset High" ]
self.m_choice_bit6 = wx.Choice( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( 80,-1 ), m_choice_bit6Choices, 0 )
self.m_choice_bit6.SetSelection( 0 )
wSizer_bitIdx.Add( self.m_choice_bit6, 0, wx.ALL, 5 )
m_choice_bit5Choices = [ u"0 -3.3V ", u"1 -1.8V" ]
self.m_choice_bit5 = wx.Choice( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( 80,-1 ), m_choice_bit5Choices, 0 )
self.m_choice_bit5.SetSelection( 0 )
wSizer_bitIdx.Add( self.m_choice_bit5, 0, wx.ALL, 5 )
m_choice_bit4Choices = [ u"0 -Not Disable ", u"1 -Disable" ]
self.m_choice_bit4 = wx.Choice( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( 80,-1 ), m_choice_bit4Choices, 0 )
self.m_choice_bit4.SetSelection( 0 )
wSizer_bitIdx.Add( self.m_choice_bit4, 0, wx.ALL, 5 )
m_choice_bit3Choices = [ u"0 -Enable ", u"1 -Disable" ]
self.m_choice_bit3 = wx.Choice( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( 80,-1 ), m_choice_bit3Choices, 0 )
self.m_choice_bit3.SetSelection( 0 )
wSizer_bitIdx.Add( self.m_choice_bit3, 0, wx.ALL, 5 )
m_choice_bit2Choices = [ u"0 ", u"1 " ]
self.m_choice_bit2 = wx.Choice( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( 80,-1 ), m_choice_bit2Choices, 0 )
self.m_choice_bit2.SetSelection( 0 )
wSizer_bitIdx.Add( self.m_choice_bit2, 0, wx.ALL, 5 )
m_choice_bit1Choices = [ u"0 -Enable ", u"1 -Disable" ]
self.m_choice_bit1 = wx.Choice( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( 80,-1 ), m_choice_bit1Choices, 0 )
self.m_choice_bit1.SetSelection( 0 )
wSizer_bitIdx.Add( self.m_choice_bit1, 0, wx.ALL, 5 )
m_choice_bit0Choices = [ u"0", u"1" ]
self.m_choice_bit0 = wx.Choice( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( 80,-1 ), m_choice_bit0Choices, 0 )
self.m_choice_bit0.SetSelection( 0 )
wSizer_bitIdx.Add( self.m_choice_bit0, 0, wx.ALL, 5 )
self.m_staticText_bit15 = wx.StaticText( self, wx.ID_ANY, u"SD2_RST_PO_SEL", wx.DefaultPosition, wx.Size( 80,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SIMPLE )
self.m_staticText_bit15.Wrap( -1 )
wSizer_bitIdx.Add( self.m_staticText_bit15, 0, wx.ALL, 5 )
self.m_staticText_bit14 = wx.StaticText( self, wx.ID_ANY, u"RE_TO_PRE-IDLE", wx.DefaultPosition, wx.Size( 80,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SIMPLE )
self.m_staticText_bit14.Wrap( -1 )
wSizer_bitIdx.Add( self.m_staticText_bit14, 0, wx.ALL, 5 )
self.m_staticText_bit13 = wx.StaticText( self, wx.ID_ANY, u"Override_HYS_bit", wx.DefaultPosition, wx.Size( 80,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SIMPLE )
self.m_staticText_bit13.Wrap( -1 )
wSizer_bitIdx.Add( self.m_staticText_bit13, 0, wx.ALL, 5 )
self.m_staticText_bit12 = wx.StaticText( self, wx.ID_ANY, u"USDHC_PAD_DOWN", wx.DefaultPosition, wx.Size( 80,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SIMPLE )
self.m_staticText_bit12.Wrap( -1 )
wSizer_bitIdx.Add( self.m_staticText_bit12, 0, wx.ALL, 5 )
self.m_staticText_bit11 = wx.StaticText( self, wx.ID_ANY, u"ENA_EMMC_22K_PULLUP", wx.DefaultPosition, wx.Size( 80,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SIMPLE )
self.m_staticText_bit11.Wrap( -1 )
wSizer_bitIdx.Add( self.m_staticText_bit11, 0, wx.ALL, 5 )
self.m_staticText_bit10 = wx.StaticText( self, wx.ID_ANY, u"BootFailIndiPinSelect[4]", wx.DefaultPosition, wx.Size( 80,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SIMPLE )
self.m_staticText_bit10.Wrap( -1 )
wSizer_bitIdx.Add( self.m_staticText_bit10, 0, wx.ALL, 5 )
self.m_staticText_bit9 = wx.StaticText( self, wx.ID_ANY, u"USDHC_IOMUX_SION_BIT_ENA", wx.DefaultPosition, wx.Size( 80,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SIMPLE )
self.m_staticText_bit9.Wrap( -1 )
wSizer_bitIdx.Add( self.m_staticText_bit9, 0, wx.ALL, 5 )
self.m_staticText_bit8 = wx.StaticText( self, wx.ID_ANY, u"USDHC_IOMUX_SRE_Ena", wx.DefaultPosition, wx.Size( 80,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SIMPLE )
self.m_staticText_bit8.Wrap( -1 )
wSizer_bitIdx.Add( self.m_staticText_bit8, 0, wx.ALL, 5 )
m_choice_bit15Choices = [ u"0 -Reset Low", u"1 -Reset High" ]
self.m_choice_bit15 = wx.Choice( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( 80,-1 ), m_choice_bit15Choices, 0 )
self.m_choice_bit15.SetSelection( 0 )
wSizer_bitIdx.Add( self.m_choice_bit15, 0, wx.ALL, 5 )
m_choice_bit14Choices = [ u"0", u"1" ]
self.m_choice_bit14 = wx.Choice( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( 80,-1 ), m_choice_bit14Choices, 0 )
self.m_choice_bit14.SetSelection( 0 )
wSizer_bitIdx.Add( self.m_choice_bit14, 0, wx.ALL, 5 )
m_choice_bit13Choices = [ u"0", u"1" ]
self.m_choice_bit13 = wx.Choice( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( 80,-1 ), m_choice_bit13Choices, 0 )
self.m_choice_bit13.SetSelection( 0 )
wSizer_bitIdx.Add( self.m_choice_bit13, 0, wx.ALL, 5 )
m_choice_bit12Choices = [ u"0 -no action", u"1 -pull down" ]
self.m_choice_bit12 = wx.Choice( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( 80,-1 ), m_choice_bit12Choices, 0 )
self.m_choice_bit12.SetSelection( 0 )
wSizer_bitIdx.Add( self.m_choice_bit12, 0, wx.ALL, 5 )
m_choice_bit11Choices = [ u"0 -47K pullup", u"1 -22K pullup" ]
self.m_choice_bit11 = wx.Choice( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( 80,-1 ), m_choice_bit11Choices, 0 )
self.m_choice_bit11.SetSelection( 0 )
wSizer_bitIdx.Add( self.m_choice_bit11, 0, wx.ALL, 5 )
m_choice_bit10Choices = [ u"0", u"1" ]
self.m_choice_bit10 = wx.Choice( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( 80,-1 ), m_choice_bit10Choices, 0 )
self.m_choice_bit10.SetSelection( 0 )
wSizer_bitIdx.Add( self.m_choice_bit10, 0, wx.ALL, 5 )
m_choice_bit9Choices = [ u"0 -Disable", u"1 -Enable" ]
self.m_choice_bit9 = wx.Choice( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( 80,-1 ), m_choice_bit9Choices, 0 )
self.m_choice_bit9.SetSelection( 0 )
wSizer_bitIdx.Add( self.m_choice_bit9, 0, wx.ALL, 5 )
m_choice_bit8Choices = [ u"0 -Disable", u"1 -Enable" ]
self.m_choice_bit8 = wx.Choice( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( 80,-1 ), m_choice_bit8Choices, 0 )
self.m_choice_bit8.SetSelection( 0 )
wSizer_bitIdx.Add( self.m_choice_bit8, 0, wx.ALL, 5 )
self.m_staticText_bit23 = wx.StaticText( self, wx.ID_ANY, u"USDHC_CMD_OE_PRE_EN", wx.DefaultPosition, wx.Size( 80,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SIMPLE )
self.m_staticText_bit23.Wrap( -1 )
wSizer_bitIdx.Add( self.m_staticText_bit23, 0, wx.ALL, 5 )
self.m_staticText_bit22_21 = wx.StaticText( self, wx.ID_ANY, u"LPB_BOOT", wx.DefaultPosition, wx.Size( 170,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SIMPLE )
self.m_staticText_bit22_21.Wrap( -1 )
wSizer_bitIdx.Add( self.m_staticText_bit22_21, 0, wx.ALL, 5 )
self.m_staticText_bit20 = wx.StaticText( self, wx.ID_ANY, u"Reserved", wx.DefaultPosition, wx.Size( 80,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SIMPLE )
self.m_staticText_bit20.Wrap( -1 )
wSizer_bitIdx.Add( self.m_staticText_bit20, 0, wx.ALL, 5 )
self.m_staticText_bit19_16 = wx.StaticText( self, wx.ID_ANY, u"Boot Failure Indicator Pin Select[3:0]", wx.DefaultPosition, wx.Size( 350,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SIMPLE )
self.m_staticText_bit19_16.Wrap( -1 )
wSizer_bitIdx.Add( self.m_staticText_bit19_16, 0, wx.ALL, 5 )
m_choice_bit23Choices = [ u"0", u"1" ]
self.m_choice_bit23 = wx.Choice( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( 80,-1 ), m_choice_bit23Choices, 0 )
self.m_choice_bit23.SetSelection( 0 )
wSizer_bitIdx.Add( self.m_choice_bit23, 0, wx.ALL, 5 )
m_choice_bit22_21Choices = [ u"00 -Div by 1", u"01 -Div by 2", u"10 -Div by 3", u"11 -Div by 4" ]
self.m_choice_bit22_21 = wx.Choice( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( 170,-1 ), m_choice_bit22_21Choices, 0 )
self.m_choice_bit22_21.SetSelection( 0 )
wSizer_bitIdx.Add( self.m_choice_bit22_21, 0, wx.ALL, 5 )
m_choice_bit20Choices = [ u"x" ]
self.m_choice_bit20 = wx.Choice( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( 80,-1 ), m_choice_bit20Choices, 0 )
self.m_choice_bit20.SetSelection( 0 )
wSizer_bitIdx.Add( self.m_choice_bit20, 0, wx.ALL, 5 )
m_choice_bit19_16Choices = []
self.m_choice_bit19_16 = wx.Choice( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( 350,-1 ), m_choice_bit19_16Choices, 0 )
self.m_choice_bit19_16.SetSelection( 0 )
wSizer_bitIdx.Add( self.m_choice_bit19_16, 0, wx.ALL, 5 )
self.m_staticText_bit31 = wx.StaticText( self, wx.ID_ANY, u"OverNAND_PadSet", wx.DefaultPosition, wx.Size( 80,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SIMPLE )
self.m_staticText_bit31.Wrap( -1 )
wSizer_bitIdx.Add( self.m_staticText_bit31, 0, wx.ALL, 5 )
self.m_staticText_bit30_24 = wx.StaticText( self, wx.ID_ANY, u"MMC_DLL_DLY[6:0]", wx.DefaultPosition, wx.Size( 620,-1 ), wx.ALIGN_CENTER_HORIZONTAL|wx.BORDER_SIMPLE )
self.m_staticText_bit30_24.Wrap( -1 )
wSizer_bitIdx.Add( self.m_staticText_bit30_24, 0, wx.ALL, 5 )
m_choice_bit31Choices = [ u"0", u"1" ]
self.m_choice_bit31 = wx.Choice( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( 80,-1 ), m_choice_bit31Choices, 0 )
self.m_choice_bit31.SetSelection( 0 )
wSizer_bitIdx.Add( self.m_choice_bit31, 0, wx.ALL, 5 )
m_choice_bit30_24Choices = []
self.m_choice_bit30_24 = wx.Choice( self, wx.ID_ANY, wx.DefaultPosition, wx.Size( 620,-1 ), m_choice_bit30_24Choices, 0 )
self.m_choice_bit30_24.SetSelection( 0 )
wSizer_bitIdx.Add( self.m_choice_bit30_24, 0, wx.ALL, 5 )
self.m_staticText_null0BitIdx = wx.StaticText( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 420,-1 ), 0 )
self.m_staticText_null0BitIdx.Wrap( -1 )
wSizer_bitIdx.Add( self.m_staticText_null0BitIdx, 0, wx.ALL, 5 )
self.m_button_ok = wx.Button( self, wx.ID_ANY, u"Ok", wx.DefaultPosition, wx.Size( 100,-1 ), 0 )
wSizer_bitIdx.Add( self.m_button_ok, 0, wx.ALL, 5 )
self.m_button_cancel = wx.Button( self, wx.ID_ANY, u"Cancel", wx.DefaultPosition, wx.Size( 100,-1 ), 0 )
wSizer_bitIdx.Add( self.m_button_cancel, 0, wx.ALL, 5 )
self.m_staticText_null1BitIdx = wx.StaticText( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.Size( 720,1 ), 0 )
self.m_staticText_null1BitIdx.Wrap( -1 )
wSizer_bitIdx.Add( self.m_staticText_null1BitIdx, 0, wx.ALL, 5 )
bSizer_bitIdx.Add( wSizer_bitIdx, 1, wx.EXPAND, 5 )
wSizer_win.Add( bSizer_bitIdx, 1, wx.EXPAND, 5 )
self.SetSizer( wSizer_win )
self.Layout()
self.Centre( wx.BOTH )
# Connect Events
self.Bind( wx.EVT_CLOSE, self.callbackClose )
self.m_button_ok.Bind( wx.EVT_BUTTON, self.callbackOk )
self.m_button_cancel.Bind( wx.EVT_BUTTON, self.callbackCancel )
def __del__( self ):
pass
# Virtual event handlers, overide them in your derived class
def callbackClose( self, event ):
event.Skip()
def callbackOk( self, event ):
event.Skip()
def callbackCancel( self, event ):
event.Skip()
| 19,144 | 9,362 |
print("Hello World")
print(5+4)
print(5,"+",4,"=",5+4)
| 55 | 29 |
from django.core.management.base import BaseCommand, CommandError
import time
from hud_api_replace.models import CachedGeodata
class Command(BaseCommand):
help = 'Deletes geodata records that are expired.'
def handle(self, *args, **options):
CachedGeodata.objects.all().filter(expires__lte=time.time()).delete()
| 332 | 102 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import utils
class Protection(object):
# 2 = Alarm, 1 = Warning, 0 = Normal
def __init__(self):
self.voltage_high = None
self.voltage_low = None
self.voltage_cell_low = None
self.soc_low = None
self.current_over = None
self.current_under = None
self.cell_imbalance = None
self.internal_failure = None
self.temp_high_charge = None
self.temp_low_charge = None
self.temp_high_discharge = None
self.temp_low_discharge = None
class Cell:
voltage = None
balance = None
def __init__(self, balance):
self.balance = balance
class Battery(object):
def __init__(self, port, baud):
self.port = port
self.baud_rate = baud
self.role = 'battery'
self.type = 'Generic'
self.poll_interval = 1000
self.hardware_version = None
self.voltage = None
self.current = None
self.capacity_remain = None
self.capacity = None
self.cycles = None
self.total_ah_drawn = None
self.production = None
self.protection = Protection()
self.version = None
self.soc = None
self.charge_fet = None
self.discharge_fet = None
self.cell_count = None
self.temp_sensors = None
self.temp1 = None
self.temp2 = None
self.cells = []
self.control_charging = None
self.control_voltage = None
self.control_current = None
self.control_previous_total = None
self.control_previous_max = None
self.control_discharge_current = None
self.control_charge_current = None
self.control_allow_charge = None
# max battery charge/discharge current
self.max_battery_current = None
self.max_battery_discharge_current = None
def test_connection(self):
# Each driver must override this function to test if a connection can be made
# return false when fail, true if successful
return false
def get_settings(self):
# Each driver must override this function to read/set the battery settings
# It is called once after a successful connection by DbusHelper.setup_vedbus()
# Values: battery_type, version, hardware_version, min_battery_voltage, max_battery_voltage,
# MAX_BATTERY_CURRENT, MAX_BATTERY_DISCHARGE_CURRENT, cell_count, capacity
# return false when fail, true if successful
return false
def refresh_data(self):
# Each driver must override this function to read battery data and populate this class
# It is called each poll just before the data is published to vedbus
# return false when fail, true if successful
return false
def to_temp(self, sensor, value):
# Keep the temp value between -20 and 100 to handle sensor issues or no data.
# The BMS should have already protected before those limits have been reached.
if sensor == 1:
self.temp1 = min(max(value, -20), 100)
if sensor == 2:
self.temp2 = min(max(value, -20), 100)
def manage_charge_current(self):
# Start with the current values
# Change depending on the SOC values
if self.soc > 99:
self.control_allow_charge = False
else:
self.control_allow_charge = True
# Change depending on the SOC values
if 98 < self.soc <= 100:
self.control_charge_current = 1
elif 95 < self.soc <= 97:
self.control_charge_current = 4
elif 91 < self.soc <= 95:
self.control_charge_current = self.max_battery_current/2
else:
self.control_charge_current = self.max_battery_current
# Change depending on the SOC values
if self.soc <= 20:
self.control_discharge_current = 5
elif 20 < self.soc <= 30:
self.control_discharge_current = self.max_battery_discharge_current/4
elif 30 < self.soc <= 35:
self.control_discharge_current = self.max_battery_discharge_current/2
else:
self.control_discharge_current = self.max_battery_discharge_current
def get_min_cell(self):
min_voltage = 9999
min_cell = None
if len(self.cells) == 0 and hasattr(self, 'cell_min_no'):
return self.cell_min_no
for c in range(min(len(self.cells), self.cell_count)):
if self.cells[c].voltage is not None and min_voltage > self.cells[c].voltage:
min_voltage = self.cells[c].voltage
min_cell = c
return min_cell
def get_max_cell(self):
max_voltage = 0
max_cell = None
if len(self.cells) == 0 and hasattr(self, 'cell_max_no'):
return self.cell_max_no
for c in range(min(len(self.cells), self.cell_count)):
if self.cells[c].voltage is not None and max_voltage < self.cells[c].voltage:
max_voltage = self.cells[c].voltage
max_cell = c
return max_cell
def get_min_cell_desc(self):
cell_no = self.get_min_cell()
if cell_no is None:
return None
return 'C' + str(cell_no + 1)
def get_max_cell_desc(self):
cell_no = self.get_max_cell()
if cell_no is None:
return None
return 'C' + str(cell_no + 1)
def get_min_cell_voltage(self):
min_voltage = 9999
if len(self.cells) == 0 and hasattr(self, 'cell_min_voltage'):
return self.cell_min_voltage
for c in range(min(len(self.cells), self.cell_count)):
if self.cells[c].voltage is not None and min_voltage > self.cells[c].voltage:
min_voltage = self.cells[c].voltage
return min_voltage
def get_max_cell_voltage(self):
max_voltage = 0
if len(self.cells) == 0 and hasattr(self, 'cell_max_voltage'):
return self.cell_max_voltage
for c in range(min(len(self.cells), self.cell_count)):
if self.cells[c].voltage is not None and max_voltage < self.cells[c].voltage:
max_voltage = self.cells[c].voltage
return max_voltage
def get_balancing(self):
for c in range(min(len(self.cells), self.cell_count)):
if self.cells[c].balance is not None and self.cells[c].balance:
return 1
return 0
def get_temp(self):
if self.temp1 is not None and self.temp2 is not None:
return round((float(self.temp1) + float(self.temp2)) / 2, 2)
if self.temp1 is not None and self.temp2 is None:
return round(float(self.temp1) , 2)
if self.temp1 is None and self.temp2 is not None:
return round(float(self.temp2) , 2)
else:
return None
def get_min_temp(self):
if self.temp1 is not None and self.temp2 is not None:
return min(self.temp1, self.temp2)
if self.temp1 is not None and self.temp2 is None:
return self.temp1
if self.temp1 is None and self.temp2 is not None:
return self.temp2
else:
return None
def get_max_temp(self):
if self.temp1 is not None and self.temp2 is not None:
return max(self.temp1, self.temp2)
if self.temp1 is not None and self.temp2 is None:
return self.temp1
if self.temp1 is None and self.temp2 is not None:
return self.temp2
else:
return None
| 7,694 | 2,450 |
import numpy as np
from scipy.special import logsumexp
from scipy.optimize import minimize
from functools import partial
from dataclasses import dataclass, field
import matplotlib.pyplot as plt
@dataclass
class BindingDwelltimesBootstrap:
"""Bootstrap distributions for a binding dwelltime model.
This class is stored in the `BindingDwelltime.bootstrap` attribute
and should not be constructed manually.
Attributes
----------
_samples : np.ndarray
array of optimized model parameters for each bootstrap sample pull; shape is
[number of parameters, number of samples]
"""
_samples: np.ndarray = field(default_factory=lambda: np.array([]), repr=False)
def _sample_distributions(self, optimized, iterations):
"""Construct bootstrap distributions for parameters.
For each iteration, a dataset is randomly selected (with replacement) with the same
size as the data used to optimize the model. Model parameters are then optimized
for this new sampled dataset.
Parameters
----------
optimized : BindingDwelltimes
optimized model results
iterations : int
number of iterations (random samples) to use for the bootstrap
"""
n_data = optimized.dwelltimes_sec.size
self._samples = np.empty((optimized._parameters.size, iterations))
for itr in range(iterations):
sample = np.random.choice(optimized.dwelltimes_sec, size=n_data, replace=True)
result = _kinetic_mle_optimize(
optimized.n_components,
sample,
*optimized.observation_limits,
initial_guess=optimized._parameters,
)
self._samples[:, itr] = result._parameters
@property
def n_samples(self):
"""Number of samples in the bootstrap."""
return self._samples.shape[1]
@property
def n_components(self):
"""Number of components in the model."""
return int(self._samples.shape[0] / 2)
@property
def amplitude_distributions(self):
"""Array of sample optimized amplitude parameters; shape is
[number of components, number of samples]"""
return self._samples[: self.n_components]
@property
def lifetime_distributions(self):
"""Array of sample optimized lifetime parameters; shape is
[number of components, number of samples]"""
return self._samples[self.n_components :]
def calculate_stats(self, key, component, alpha=0.05):
"""Calculate the mean and confidence intervals of the bootstrap distribution for a parameter.
*NOTE*: the `100*(1-alpha)` % confidence intervals calculated here correspond to the
`100*(alpha/2)` and `100*(1-(alpha/2))` quantiles of the distribution. For distributions
which are not well approximated by a normal distribution these values are not reliable
confidence intervals.
Parameters
----------
key : {'amplitude', 'lifetime'}
name of the parameter to be analyzed
component : int
index of the component to be analyzed
alpha : float
confidence intervals are calculated as 100*(1-alpha)%
"""
if key not in ("amplitude", "lifetime"):
raise KeyError("key must be either 'amplitude' or 'lifetime'")
data = getattr(self, f"{key}_distributions")[component]
mean = np.mean(data)
lower = np.quantile(data, alpha / 2)
upper = np.quantile(data, 1 - (alpha / 2))
return mean, (lower, upper)
def plot(self, alpha=0.05, n_bins=25, hist_kwargs={}, span_kwargs={}, line_kwargs={}):
"""Plot the bootstrap distributions for the parameters of a model.
Parameters
----------
alpha : float
confidence intervals are calculated as 100*(1-alpha)%
n_bins : int
number of bins in the histogram
hist_kwargs : dict
dictionary of plotting kwargs applied to histogram
span_kwargs : dict
dictionary of plotting kwargs applied to the patch indicating the area
spanned by the confidence intervals
line_kwargs : dict
dictionary of plotting kwargs applied to the line indicating the
distribution means
"""
hist_kwargs = {"facecolor": "#c5c5c5", "edgecolor": "#888888", **hist_kwargs}
span_kwargs = {"facecolor": "tab:red", "alpha": 0.3, **span_kwargs}
line_kwargs = {"color": "k", **line_kwargs}
def plot_axes(data, key, component, use_index):
plt.hist(data, bins=n_bins, **hist_kwargs)
mean, (lower, upper) = self.calculate_stats(key, component, alpha)
plt.axvspan(lower, upper, **span_kwargs)
plt.axvline(mean, **line_kwargs)
plt.xlabel(f"{key}" if key == "amplitude" else f"{key} (sec)")
plt.ylabel("counts")
label = "a" if key == "amplitude" else r"\tau"
unit = "" if key == "amplitude" else "sec"
prefix = fr"${label}_{component+1}$" if use_index else fr"${label}$"
plt.title(f"{prefix} = {mean:0.2g} ({lower:0.2g}, {upper:0.2g}) {unit}")
if self.n_components == 1:
data = self.lifetime_distributions.squeeze()
plot_axes(data, "lifetime", 0, False)
else:
for component in range(2):
for column, key in enumerate(("amplitude", "lifetime")):
data = getattr(self, f"{key}_distributions")[component]
column += 1
plt.subplot(self.n_components, 2, 2 * component + column)
plot_axes(data, key, component, True)
plt.tight_layout()
@dataclass(frozen=True)
class BindingDwelltimes:
"""Results of exponential mixture model optimization for binding dwelltimes.
This class is returned from `_kinetic_mle_optimize()` and should not be
constructed manually.
Attributes
----------
n_components : int
number of components in the model.
dwelltimes_sec : np.ndarray
observations on which the model was trained.
observations_limits : tuple
tuple of (`min`, `max`) values of the experimental observation time.
_parameters : np.ndarray
optimized parameters in the order [amplitudes, lifetimes]
log_likelihood : float
log likelihood of the trained model
bootstrap : BindingDwelltimesBootstrap
object containing information about the bootstrapping analysis.
"""
n_components: int
dwelltimes_sec: np.ndarray = field(repr=False)
observation_limits: list = field(repr=False)
_parameters: np.ndarray = field(repr=False)
log_likelihood: float
bootstrap: BindingDwelltimesBootstrap = field(
default_factory=BindingDwelltimesBootstrap, init=False, repr=False
)
@property
def amplitudes(self):
"""Fractional amplitude of each model component"""
return self._parameters[: self.n_components]
@property
def lifetimes(self):
"""Lifetime parameter (in seconds) of each model component."""
return self._parameters[self.n_components :]
@property
def aic(self):
"""Akaike Information Criterion."""
k = (2 * self.n_components) - 1 # number of parameters
return 2 * k - 2 * self.log_likelihood
@property
def bic(self):
"""Bayesian Information Criterion."""
k = (2 * self.n_components) - 1 # number of parameters
n = self.dwelltimes_sec.size # number of observations
return k * np.log(n) - 2 * self.log_likelihood
def calculate_bootstrap(self, iterations=500):
self.bootstrap._sample_distributions(self, iterations)
def plot(
self,
n_bins=25,
bin_spacing="linear",
hist_kwargs={},
component_kwargs={},
fit_kwargs={},
xscale=None,
yscale=None,
):
"""Plot the dwelltime distribution histogram and overlayed model density.
Parameters
----------
n_bins : int
number of bins in the histogram
bin_spacing : {"log", "linear"}
determines how bin edges are spaced apart
hist_kwargs : dict
dictionary of plotting kwargs applied to histogram
component_kwargs : dict
dictionary of plotting kwargs applied to the line plot for each component
fit_kwargs : dict
dictionary of plotting kwargs applied to line plot for the total fit
xscale : {"log", "linear", None}
scaling for the x-axis; when `None` default is "linear"
yscale : {"log", "linear", None}
scaling for the y-axis; when `None` default is same as `bin_spacing`
"""
if bin_spacing == "log":
scale = np.logspace
limits = (np.log10(self.dwelltimes_sec.min()), np.log10(self.dwelltimes_sec.max()))
xscale = "linear" if xscale is None else xscale
yscale = "log" if yscale is None else yscale
elif bin_spacing == "linear":
scale = np.linspace
limits = (self.dwelltimes_sec.min(), self.dwelltimes_sec.max())
xscale = "linear" if xscale is None else xscale
yscale = "linear" if yscale is None else yscale
else:
raise ValueError("spacing must be either 'log' or 'linear'")
bins = scale(*limits, n_bins)
centers = bins[:-1] + (bins[1:] - bins[:-1]) / 2
hist_kwargs = {"facecolor": "#cdcdcd", "edgecolor": "#aaaaaa", **hist_kwargs}
component_kwargs = {"marker": "o", "ms": 3, **component_kwargs}
fit_kwargs = {"color": "k", **fit_kwargs}
components = np.exp(
exponential_mixture_log_likelihood_components(
self.amplitudes, self.lifetimes, centers, *self.observation_limits
)
)
def label_maker(a, t, n):
if self.n_components == 1:
amplitude = ""
lifetime_label = r"$\tau$"
else:
amplitude = f"($a_{n}$ = {a:0.2g}) "
lifetime_label = fr"$\tau_{n}$"
return f"{amplitude}{lifetime_label} = {t:0.2g} sec"
# plot histogram
density, _, _ = plt.hist(self.dwelltimes_sec, bins=bins, density=True, **hist_kwargs)
# plot individual components
for n in range(self.n_components):
label = label_maker(self.amplitudes[n], self.lifetimes[n], n + 1)
plt.plot(centers, components[n], label=label, **component_kwargs)
# plot total fit
label = r"$\ln \mathcal{L} $" + f"= {self.log_likelihood:0.3f}"
plt.plot(centers, np.sum(components, axis=0), label=label, **fit_kwargs)
# rearrange legend entries so that total fit is first
legend_components = [[c[-1], *c[:-1]] for c in plt.gca().get_legend_handles_labels()]
plt.legend(*legend_components, loc="upper right")
# format axes
plt.xscale(xscale)
plt.yscale(yscale)
if yscale == "log":
ylim = (np.min(density[density != 0] * 0.5), np.max(density[density != 0] * 1.5))
plt.ylim(ylim)
plt.ylabel("density")
plt.xlabel("dwelltime (sec)")
plt.tight_layout()
def exponential_mixture_log_likelihood_components(
amplitudes, lifetimes, t, min_observation_time, max_observation_time
):
"""Calculate each component of the log likelihood of an exponential mixture distribution.
The full log likelihood for a single observation is given by:
log(L) = log( sum_i( component_i ) )
with the output of this function being log(component_i) defined as:
log(component_i) = log(a_i) - log(N) + log(tau_i) - t/tau_i
where a_i and tau_i are the amplitude and lifetime of component i and N is a normalization
factor that takes into account the minimum and maximum observation times of the experiment:
N = sum_i { a_i * [ exp(-t_min / tau_i) - exp(-t_max / tau_i) ] }
Therefore, the full log likelihood is calculated from the output of this function by applying
logsumexp(output, axis=0) where the summation is taken over the components.
Parameters
----------
amplitudes : array_like
fractional amplitude parameters for each component
lifetimes : array_like
lifetime parameters for each component in seconds
t : array_like
dwelltime observations in seconds
min_observation_time : float
minimum observation time in seconds
max_observation_time : float
maximum observation time in seconds
"""
amplitudes = amplitudes[:, np.newaxis]
lifetimes = lifetimes[:, np.newaxis]
t = t[np.newaxis, :]
norm_factor = np.log(amplitudes) + np.log(
np.exp(-min_observation_time / lifetimes) - np.exp(-max_observation_time / lifetimes)
)
log_norm_factor = logsumexp(norm_factor, axis=0)
return -log_norm_factor + np.log(amplitudes) - np.log(lifetimes) - t / lifetimes
def exponential_mixture_log_likelihood(params, t, min_observation_time, max_observation_time):
"""Calculate the log likelihood of an exponential mixture distribution.
The full log likelihood for a single observation is given by:
log(L) = log( sum_i( exp( log(component_i) ) ) )
where log(component_i) is output from `exponential_mixture_log_likelihood_components()`
Parameters
----------
amplitudes : array_like
fractional amplitude parameters for each component
lifetimes : array_like
lifetime parameters for each component in seconds
t : array_like
dwelltime observations in seconds
min_observation_time : float
minimum observation time in seconds
max_observation_time : float
maximum observation time in seconds
"""
params = np.reshape(params, (2, -1))
components = exponential_mixture_log_likelihood_components(
params[0], params[1], t, min_observation_time, max_observation_time
)
log_likelihood = logsumexp(components, axis=0)
return -np.sum(log_likelihood)
def _kinetic_mle_optimize(
n_components, t, min_observation_time, max_observation_time, initial_guess=None
):
"""Calculate the maximum likelihood estimate of the model parameters given measured dwelltimes.
Parameters
----------
n_components : int
number of components in the mixture model
t : array_like
dwelltime observations in seconds
min_observation_time : float
minimum observation time in seconds
max_observation_time : float
maximum observation time in seconds
initial_guess : array_like
initial guess for the model parameters ordered as
[amplitude1, amplitude2, ..., lifetime1, lifetime2, ...]
"""
if np.any(np.logical_or(t < min_observation_time, t > max_observation_time)):
raise ValueError(
"some data is outside of the bounded region. Please choose"
"appropriate values for `min_observation_time` and/or `max_observation_time`."
)
cost_fun = partial(
exponential_mixture_log_likelihood,
t=t,
min_observation_time=min_observation_time,
max_observation_time=max_observation_time,
)
if initial_guess is None:
initial_guess_amplitudes = np.ones(n_components) / n_components
initial_guess_lifetimes = np.mean(t) * np.arange(1, n_components + 1)
initial_guess = np.hstack([initial_guess_amplitudes, initial_guess_lifetimes])
bounds = (
*[(np.finfo(float).eps, 1) for _ in range(n_components)],
*[(min_observation_time * 0.1, max_observation_time * 1.1) for _ in range(n_components)],
)
constraints = {"type": "eq", "fun": lambda x, n: 1 - sum(x[:n]), "args": [n_components]}
result = minimize(
cost_fun, initial_guess, method="SLSQP", bounds=bounds, constraints=constraints
)
return BindingDwelltimes(
n_components, t, (min_observation_time, max_observation_time), result.x, -result.fun
)
| 16,227 | 4,690 |
largura=float(input('digite qual a largura da sua parede:'))
comprimento=float(input('digite qual o comprimento da sua parede:'))
quantidade=((largura*comprimento)/2)
print('A area da sua parede é de',(largura*comprimento),'para pintar sua parede será necessario {} litros de tinta'.format(quantidade)) | 302 | 98 |
class Crab:
def __init__(self, crab_id, sex, species, color, damage, carapace, mass, epibiont, molt):
self.id = crab_id
self.sex = sex
self.species = species
self.color = color
self.damage = damage
self.carapace = carapace
self.mass = mass
self.epibiont = epibiont
self.molt = molt
def get_tuple(self):
return self.id, self.sex, self.species, self.color, self.damage, self.carapace, self.mass, self.epibiont, self.molt
| 508 | 173 |
#!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import absolute_import, division, print_function, unicode_literals
import specs.fizz as fizz
import specs.fmt as fmt
import specs.folly as folly
import specs.sodium as sodium
import specs.wangle as wangle
import specs.zstd as zstd
from shell_quoting import ShellQuoted
"fbcode_builder steps to build & test LogDevice"
"""
Running this script from the command line on a dev-server:
1. Ensure you have the HTTP proxy configured in environment
2. This is env items is not compatible with the scutil create call, so must
not be permenently exported.
git config --global http.proxy http://fwdproxy:8080
cd .../fbcode/logdevice/public_tld/build
HTTP_PROXY=http://fwdproxy:8080 HTTPS_PROXY=http://fwdproxy:8080 \
fbcode/opensource/fbcode_builder/facebook_make_legocastle_job.py \
| scutil create
Which outputs a legocastle job to stdout; to be fed into scutil create ...
"""
class FakeClangModule:
"""
fbcode_builder doesn't allow us to inject build stuff before building
dependencies. This is a hack to point set the compiler to clang by injecting
it as a fake module that runs before any other dependency.
"""
@staticmethod
def fbcode_builder_spec(builder):
return {
"depends_on": [],
"steps": [
builder.set_env("CC", "clang-9"),
builder.set_env("CXX", "clang++-9"),
],
}
def fbcode_builder_spec(builder):
# This API should change rarely, so build the latest tag instead of master.
builder.add_option(
"no1msd/mstch:git_hash", ShellQuoted("$(git describe --abbrev=0 --tags)")
)
builder.add_option("PYTHON_VENV", "ON")
builder.add_option(
"LogDevice/logdevice/_build:cmake_defines", {"BUILD_SUBMODULES": "OFF"}
)
builder.add_option(
"facebook/folly:cmake_defines",
{"BUILD_SHARED_LIBS": "ON", "BUILD_TESTS": "OFF", "FOLLY_USE_JEMALLOC": "OFF"},
)
return {
"depends_on": [FakeClangModule, zstd, fmt, folly, fizz, wangle, sodium],
"steps": [
# This isn't a separete spec, since only fbthrift uses mstch.
builder.github_project_workdir("no1msd/mstch", "build"),
builder.cmake_install("no1msd/mstch"),
builder.fb_github_cmake_install("fbthrift/thrift"),
builder.fb_github_cmake_install(
"LogDevice/logdevice/_build", github_org="facebookincubator"
),
],
}
config = {
"github_project": "facebookincubator/LogDevice",
"fbcode_builder_spec": fbcode_builder_spec,
}
| 2,821 | 918 |
#!/usr/bin/env python
# coding: utf-8
"""
Em
--
Em is a terminal tool that prints FILE(s), or standard input to standard
output and highlights the expressions that are matched the PATTERN.
The expression will be highlighted iff the terminal is ANSI-compatible.
Em is Cool
``````````
.. code:: bash
$ tail -f /path/to/log | em "DEBUG|INFO" -f green | em "WARN"
Links
`````
* `documentation <http://em.readthedocs.org/>`_
* `source code <https://github.com/ikalnitsky/em>`_
"""
import os
import glob
import subprocess
from setuptools import setup, Command
install_requires = []
try:
import argparse # NOQA
except ImportError:
install_requires.append('argparse')
class LocaleUpdate(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
root = os.path.dirname(__file__)
src = os.path.join(root, 'em', '__init__.py')
pot = os.path.join(root, 'em', 'locale', 'em.pot')
pos = glob.glob(os.path.join(
root, 'em', 'locale', '*', 'LC_MESSAGES', 'em.po'))
# update .pot file
subprocess.call(['xgettext', src, '--output', pot])
# update .po files from .pot
for po in pos:
subprocess.call(['msgmerge', '--update', '--backup=off', po, pot])
class LocaleCompile(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
root = os.path.dirname(__file__)
pos = glob.glob(os.path.join(
root, 'em', 'locale', '*', 'LC_MESSAGES', 'em.po'))
# compile .po files to .mo
for po in pos:
mo = '{0}.mo'.format(os.path.splitext(po)[0])
subprocess.call(['msgfmt', po, '--output-file', mo])
setup(
name='em',
version='0.4.0',
url='https://github.com/ikalnitsky/em',
license='BSD',
author='Igor Kalnitsky',
author_email='igor@kalnitsky.org',
description="Highlight some PATTERN in terminal's STDOUT",
long_description=__doc__,
include_package_data=True,
packages=[
'em',
'em.tests',
],
install_requires=install_requires,
test_suite='em.tests',
entry_points={
'console_scripts': ['em = em:main'],
},
classifiers=[
'Topic :: Utilities',
'Environment :: Console',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'License :: OSI Approved :: BSD License',
],
platforms=['Linux', 'MacOS', 'Unix'],
# add custom commands to manage locale files
cmdclass={
'locale_update': LocaleUpdate,
'locale_compile': LocaleCompile,
},
)
| 3,040 | 998 |
import json
import os
import random
import string
import requests
from keep_alive import keep_alive
from nltk.sentiment.vader import SentimentIntensityAnalyzer
import discord
client = discord.Client()
starter_motivator = [
"Cheer Up!",
"Always remember, I am here for you!",
"You are a great person. Remember this!",
"Think positive man! There is always a bright side!",
"What about you watching a funny video to swing the mood?",
]
def get_quote():
response = requests.get("https://zenquotes.io/api/random")
json_data = json.loads(response.text)
quote = f"`{json_data[0]['q']}`" + " -" + json_data[0]["a"]
return quote
@client.event
async def on_ready():
print("Logged in as {0.user}".format(client))
@client.event
async def on_message(message):
if message.author == client.user:
return
msg = message.content.lower()
if (
(msg.startswith("$hello"))
or (msg.startswith("$hi"))
or (msg.startswith("$hey"))
):
await message.channel.send(
"Hello there! Nice to see you !!\nHow are you feeling?"
)
if msg.startswith("$motivate"):
quote = get_quote()
await message.channel.send(quote)
if msg.startswith("$help"):
await message.channel.send(
"This is bot help.\nCommands:\n*` $hey, $hello, $hi `:- Bot responds.\n*` $motivate `:- Generates motivating quotes.\n*` $help `:- Bot help."
)
cleaned_text = msg.translate(str.maketrans("", "", string.punctuation))
score = SentimentIntensityAnalyzer().polarity_scores(cleaned_text)
neg = score["neg"]
pos = score["pos"]
if neg > pos:
await message.channel.send(
"I am sensing `Negative Sentiment` from you.\n"
+ f"`{random.choice(starter_motivator)}`"
)
keep_alive()
client.run(os.environ["TOKEN"])
| 1,883 | 627 |
import cv2, os
import numpy as np
import csv
import glob
label = "Parasitized"
dirList = glob.glob("cell_images/" + label + "/*.png")
file = open("csv/dataset.csv", "a")
for img_path in dirList:
im = cv2.imread(img_path)
im = cv2.GaussianBlur(im, (5, 5), 2)
im_gray = cv2.cvtColor(im, cv2.COLOR_BGR2GRAY)
ret, thresh = cv2.threshold(im_gray, 127, 255, 0)
contours, _ = cv2.findContours(thresh, 1, 2)
for contour in contours:
cv2.drawContours(im_gray, contours, -1, (0, 255, 0), 3)
#cv2.imshow("window", im_gray)
#break
file.write(label)
file.write(",")
for i in range(5):
try:
area = cv2.contourArea(contours[i])
file.write(str(area))
except:
file.write("0")
file.write(",")
file.write("\n")
cv2.waitKey(19000)
label = "Uninfected"
dirList = glob.glob("cell_images/" + label + "/*.png")
file = open("csv/dataset.csv", "a")
for img_path in dirList:
im = cv2.imread(img_path)
im = cv2.GaussianBlur(im, (5, 5), 2)
im_gray = cv2.cvtColor(im, cv2.COLOR_BGR2GRAY)
ret, thresh = cv2.threshold(im_gray, 127, 255, 0)
contours, _ = cv2.findContours(thresh, 1, 2)
for contour in contours:
cv2.drawContours(im_gray, contours, -1, (0, 255, 0), 3)
#cv2.imshow("window", im_gray)
#break
file.write(label)
file.write(",")
for i in range(5):
try:
area = cv2.contourArea(contours[i])
file.write(str(area))
except:
file.write("0")
if i != 4:
file.write(",")
file.write("\n")
cv2.waitKey(19000)
| 1,655 | 710 |
import os
import numpy as np
def load_mnist(path='mnist'):
data_dir = os.path.join("./data", path)
fd = open(os.path.join(data_dir,'train-images-idx3-ubyte'))
loaded = np.fromfile(file=fd,dtype=np.uint8)
trX = loaded[16:].reshape((60000,28,28,1)).astype(np.float)
fd = open(os.path.join(data_dir,'train-labels-idx1-ubyte'))
loaded = np.fromfile(file=fd,dtype=np.uint8)
trY = loaded[8:].reshape((60000)).astype(np.float)
fd = open(os.path.join(data_dir,'t10k-images-idx3-ubyte'))
loaded = np.fromfile(file=fd,dtype=np.uint8)
teX = loaded[16:].reshape((10000,28,28,1)).astype(np.float)
fd = open(os.path.join(data_dir,'t10k-labels-idx1-ubyte'))
loaded = np.fromfile(file=fd,dtype=np.uint8)
teY = loaded[8:].reshape((10000)).astype(np.float)
trY = np.asarray(trY)
teY = np.asarray(teY)
# X = np.concatenate((trX, teX), axis=0)
# y = np.concatenate((trY, teY), axis=0).astype(np.int)
# seed = 547
# np.random.seed(seed)
# np.random.shuffle(X)
# np.random.seed(seed)
# np.random.shuffle(y)
seed = 200
np.random.seed(seed)
np.random.shuffle(trX)
np.random.seed(seed)
np.random.shuffle(trY)
return (trX, trY, teX, teY) | 1,234 | 568 |
from __future__ import unicode_literals
from django.shortcuts import render
from django.http import HttpResponse
def iichome(request):
return render(request, 'iic/home.html')
| 180 | 53 |
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import seaborn as sn
from utils import convert, iou
def average_pairwise_IOU(IOU_mat):
n = IOU_mat.shape[0]
mean_IOU = (np.sum(IOU_mat)-n)/(np.size(IOU_mat)-n)
return mean_IOU
def group_IOU_matrices(paths):
survey_names = [p.replace('.xml','')[-6:] for p in paths]
surveys = [convert.xml2df(p) for p in paths]
binary_IOUs = []
IOUs = []
for i,s_i in enumerate(surveys):
iou_i = []
for j,s_j in enumerate(surveys):
if j!=i:
iou_i.append(iou.all_ious_np(s_i,s_j))
iou_i = np.concatenate(iou_i,axis=1) #Compare 1 person's annotations to everyone else's
iou_max_i = np.max(iou_i,axis=1)
binary_IOU_i = iou_max_i>=0.5
binary_IOUs.append(np.mean(binary_IOU_i))
IOUs.append(np.mean(iou_max_i))
return binary_IOUs, IOUs
if __name__ == '__main__':
import os
import sys
survey_dir = sys.argv[1]
paths = [os.path.join(survey_dir,path) for path in os.listdir(survey_dir)]
surveys = [convert.xml2df(p) for p in paths]
print('\nANALYSIS OF {}'.format(os.path.basename(survey_dir)),'\n')
print(' NO. OF ANNOTATIONS')
print(' ------------------')
for survey,path in zip(surveys,paths):
print(' ',os.path.basename(path).replace('.xml','')+':',len(survey))
total_survey = convert.dfs2df(surveys)
print(' ____________')
print(' TOTAL :',len(total_survey))
print('\n')
group_binary_IOUs, group_IOUs = group_IOU_matrices(paths)
print(' MEAN IoU')
print(' --------')
for i,path in enumerate(paths):
print(' ',os.path.basename(path).replace('.xml','')+':',np.round(group_IOUs[i],4))
print(' ____________')
print(' MEAN :',np.round(np.mean(group_IOUs),4))
print('\n')
print('\n MEAN BINARY IoU (IoU treated as 1 if above 0.5)')
print(' -----------------------------------------------')
for i,path in enumerate(paths):
print(' ',os.path.basename(path).replace('.xml','')+':',np.round(group_binary_IOUs[i],4))
print(' ____________')
print(' MEAN :',np.round(np.mean(group_binary_IOUs),4))
print('\n')
| 2,210 | 847 |
from unittest import TestCase
class TestDebugger(TestCase):
def test_execute(self):
# self.fail()
pass
def test_parse_instr(self):
from zlua_prototype.debugger import _parse_instr
assert _parse_instr('f ')==('f','') | 257 | 81 |
"""
Compares spectrogram computations with TensorFlow and Vesper.
As of 2018-11-09, Vesper is a little more than three times faster than
TensorFlow at computing spectrograms with a DFT size of 128.
"""
import functools
import time
import numpy as np
import tensorflow as tf
import vesper.util.data_windows as data_windows
import vesper.util.time_frequency_analysis_utils as tfa_utils
SHOW_SPECTROGRAMS = False
SAMPLE_RATE = 24000 # Hertz
AMPLITUDE = 1
FREQUENCY = 3000 # Hertz
DURATION = 1000 # seconds
WINDOW_SIZE = .005 # seconds
HOP_SIZE = .5 # fraction of window size
if SHOW_SPECTROGRAMS:
SAMPLE_RATE = 1
FREQUENCY = .25
DURATION = 8
WINDOW_SIZE = 8
HOP_SIZE = 1
def main():
waveform = create_waveform()
window_size = int(round(WINDOW_SIZE * SAMPLE_RATE))
print('Window size is {} samples.'.format(window_size))
hop_size = int(round(window_size * HOP_SIZE))
print('Hop size is {} samples.'.format(hop_size))
gram = compute_tensorflow_spectrogram(waveform, window_size, hop_size)
if SHOW_SPECTROGRAMS:
print(gram)
gram = compute_vesper_spectrogram(waveform, window_size, hop_size)
if SHOW_SPECTROGRAMS:
print(gram)
def create_waveform():
length = int(round(DURATION * SAMPLE_RATE))
print('Waveform length is {} samples.'.format(length))
phases = 2 * np.pi * FREQUENCY / SAMPLE_RATE * np.arange(length)
return AMPLITUDE * np.cos(phases)
def compute_tensorflow_spectrogram(waveform, window_size, hop_size):
waveform_ = tf.placeholder(tf.float32)
window_fn = functools.partial(tf.signal.hann_window, periodic=True)
stft = tf.signal.stft(
waveform_, window_size, hop_size, window_fn=window_fn)
gram = tf.real(stft * tf.conj(stft))
with tf.Session() as sess:
print('Computing TensorFlow spectrogram...')
start_time = time.time()
g = sess.run(gram, feed_dict={waveform_: waveform})
end_time = time.time()
print('Done.')
report_performance(g, start_time, end_time)
return g
def report_performance(gram, start_time, end_time):
num_spectra = len(gram)
delta = end_time - start_time
print('Computed {} spectra in {:.1f} seconds.'.format(num_spectra, delta))
micros = int(round(1000000 * delta / num_spectra))
speedup = DURATION / delta
print((
"That's {} microseconds per spectrum, or {} times faster than "
"real time.").format(micros, speedup))
def compute_vesper_spectrogram(waveform, window_size, hop_size):
window = data_windows.create_window('Hann', window_size).samples
print('Computing Vesper spectrogram...')
start_time = time.time()
gram = tfa_utils.compute_spectrogram(waveform, window, hop_size)
end_time = time.time()
print('Done.')
report_performance(gram, start_time, end_time)
return gram
if __name__ == '__main__':
main()
| 3,033 | 1,106 |
import time
def main(request, response):
time.sleep(0.1)
return [("Content-Type", "text/plain")], "FAIL"
| 110 | 42 |
import os
from django.conf import settings
import django
def configure_settings():
settings.configure(
DEBUG=True,
INSTALLED_APPS=['npm'],
NPM_EXECUTABLE_PATH=os.environ.get('NPM_EXECUTABLE_PATH', 'npm'),
CACHES={
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
)
django.setup()
| 401 | 131 |
from opentrons import protocol_api
import json
import os
import math
# metadata
metadata = {
'protocolName': 'V1 S14 Station A MagMax',
'author': 'Nick <protocols@opentrons.com>',
'source': 'Custom Protocol Request',
'apiLevel': '2.4'
}
NUM_SAMPLES = 64
SAMPLE_VOLUME = 100
TIP_TRACK = False
def run(ctx: protocol_api.ProtocolContext):
# load labware
dest_plate = ctx.load_labware(
'nest_96_wellplate_2ml_deep', '2', '96-deepwell sample plate')
tipracks300 = [ctx.load_labware('opentrons_96_filtertiprack_200ul', '1',
'200µl filter tiprack')]
# load pipette
m300 = ctx.load_instrument(
'p300_multi_gen2', 'right', tip_racks=tipracks300)
tip_log = {'count': {}}
folder_path = '/data/A'
tip_file_path = folder_path + '/tip_log.json'
if TIP_TRACK and not ctx.is_simulating():
if os.path.isfile(tip_file_path):
with open(tip_file_path) as json_file:
data = json.load(json_file)
if 'tips1000' in data:
tip_log['count'][m300] = data['tips1000']
else:
tip_log['count'][m300] = 0
else:
tip_log['count'] = {m300: 0}
tip_log['tips'] = {
m300: [tip for rack in tipracks300 for tip in rack.rows()[0]]
}
tip_log['max'] = {
pip: len(tip_log['tips'][pip])
for pip in [m300]
}
def pick_up(pip):
nonlocal tip_log
if tip_log['count'][pip] == tip_log['max'][pip]:
ctx.pause('Replace ' + str(pip.max_volume) + 'µl tipracks before \
resuming.')
pip.reset_tipracks()
tip_log['count'][pip] = 0
pip.pick_up_tip(tip_log['tips'][pip][tip_log['count'][pip]])
tip_log['count'][pip] += 1
# pool samples
num_cols = math.ceil(NUM_SAMPLES/8)
for i in range(math.ceil(num_cols/2)):
if num_cols % 2 != 0 and i == math.ceil(num_cols/2) - 1:
pool_source_set = [dest_plate.rows()[0][num_cols]]
vol = SAMPLE_VOLUME*2
else:
pool_source_set = dest_plate.rows()[0][i*2:i*2+2]
vol = SAMPLE_VOLUME
for s in pool_source_set:
pick_up(m300)
m300.transfer(vol, s, dest_plate.rows()[0][i+8], air_gap=20,
new_tip='never')
m300.air_gap(20)
m300.drop_tip()
ctx.comment('Move deepwell plate (slot 2) to Station B for RNA \
extraction.')
# track final used tip
if not ctx.is_simulating():
if not os.path.isdir(folder_path):
os.mkdir(folder_path)
data = {'tips1000': tip_log['count'][m300]}
with open(tip_file_path, 'w') as outfile:
json.dump(data, outfile)
| 2,770 | 1,061 |
import logging
import numpy as np
from .efish_ephys_repro import EfishEphys
class Baseline(EfishEphys):
"""Represents the run of the Baseline repro of the efish plugin-set.
"""
_repro_name = "BaselineActivity"
def __init__(self, repro_run, traces, relacs_nix_version=1.1) -> None:
super().__init__(repro_run, traces, relacs_nix_version)
@property
def baseline_rate(self):
"""Baseline spike rate.
Returns
-------
float
The average spike rate.
"""
return len(self.spikes()) / self.duration
@property
def baseline_cv(self):
"""Coefficient of variation of the interspike intervals of the baseline spike response. Depends on the spike times to be stored in the file.
The CV is defines as the standard deviation of the interspike intervals normalized to the average interspike interval and describes the regularity of the spontaneous spiking.
A CV of 0 indicates perfectly regular spiking while a value of 1 is typical for random poisson spiking.
Returns
-------
_description_
"""
spikes = self.spikes()
if spikes is None or len(spikes) == 0:
logging.warn("There are no baseline spikes")
return 0.0
isis = np.diff(spikes)
return np.std(isis) / np.mean(isis)
@property
def eod_frequency(self):
""" Returns the EOD frequency (in Hz) of the fish. Depends on the eod times event signal to be present.
Returns
-------
float or None
the eod frequency in Hz, None if the eod times are not stored in the file.
"""
if "eod times" not in self._signal_trace_map:
logging.warning("EOD times are not stored in the file. You need to detect the eod times manually... ")
return None
return len(self.eod_times()) / self._duration
def serial_correlation(self, max_lags=50):
"""Returns the serial correlation of the baseline interspike intervals.
Parameters
----------
max_lags : int, optional
The number of lags to be calculated, by default 50
Returns
-------
np.ndarray
The serial correlations from lag 0 to max_lags -1
"""
if self.spikes() is None or len(self.spikes()) < max_lags:
return None
isis = np.diff(self.spikes())
unbiased = isis - np.mean(isis, 0)
norm = sum(unbiased ** 2)
a_corr = np.correlate(unbiased, unbiased, "same") / norm
a_corr = a_corr[int(len(a_corr) / 2):]
return a_corr[:max_lags]
| 2,683 | 790 |
# Generated by Django 3.2.3 on 2021-05-17 15:31
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Blogs',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('author', models.CharField(blank=True, max_length=100)),
('pub_date', models.DateField()),
('text', models.TextField()),
('link1', models.URLField(blank=True)),
('link2', models.URLField(blank=True)),
],
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=150)),
('comment', models.TextField()),
('blog', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='blogs.blogs')),
],
),
]
| 1,258 | 362 |
# Reuters-21578 dataset downloader and parser
#
# Author: Eustache Diemert <eustache@diemert.fr>
# http://scikit-learn.org/stable/auto_examples/applications/plot_out_of_core_classification.html
#
# Modified by @herrfz, get pandas DataFrame from the orig SGML
# License: BSD 3 clause
from __future__ import print_function
import re
import os.path
import fnmatch
import sgmllib
import urllib
import tarfile
import itertools
from pandas import DataFrame
###############################################################################
# Reuters Dataset related routines
###############################################################################
def _not_in_sphinx():
# Hack to detect whether we are running by the sphinx builder
return '__file__' in globals()
class ReutersParser(sgmllib.SGMLParser):
"""Utility class to parse a SGML file and yield documents one at a time."""
def __init__(self, verbose=0):
sgmllib.SGMLParser.__init__(self, verbose)
self._reset()
def _reset(self):
self.in_title = 0
self.in_body = 0
self.in_topics = 0
self.in_topic_d = 0
self.title = ""
self.body = ""
self.topics = []
self.topic_d = ""
def parse(self, fd):
self.docs = []
try:
for chunk in fd:
self.feed(chunk)
for doc in self.docs:
yield doc
self.docs = []
except:
pass
self.close()
def handle_data(self, data):
if self.in_body:
self.body += data
elif self.in_title:
self.title += data
elif self.in_topic_d:
self.topic_d += data
def start_reuters(self, attributes):
pass
def end_reuters(self):
self.body = re.sub(r'\s+', r' ', self.body)
self.docs.append({'title': self.title,
'body': self.body,
'topics': self.topics})
self._reset()
def start_title(self, attributes):
self.in_title = 1
def end_title(self):
self.in_title = 0
def start_body(self, attributes):
self.in_body = 1
def end_body(self):
self.in_body = 0
def start_topics(self, attributes):
self.in_topics = 1
def end_topics(self):
self.in_topics = 0
def start_d(self, attributes):
self.in_topic_d = 1
def end_d(self):
self.in_topic_d = 0
self.topics.append(self.topic_d)
self.topic_d = ""
class ReutersStreamReader():
"""Iterate over documents of the Reuters dataset.
The Reuters archive will automatically be downloaded and uncompressed if
the `data_path` directory does not exist.
Documents are represented as dictionaries with 'body' (str),
'title' (str), 'topics' (list(str)) keys.
"""
DOWNLOAD_URL = ('http://archive.ics.uci.edu/ml/machine-learning-databases/'
'reuters21578-mld/reuters21578.tar.gz')
ARCHIVE_FILENAME = 'reuters21578.tar.gz'
def __init__(self, data_path):
self.data_path = data_path
if not os.path.exists(self.data_path):
self.download_dataset()
def download_dataset(self):
"""Download the dataset."""
print("downloading dataset (once and for all) into %s" %
self.data_path)
os.mkdir(self.data_path)
def progress(blocknum, bs, size):
total_sz_mb = '%.2f MB' % (size / 1e6)
current_sz_mb = '%.2f MB' % ((blocknum * bs) / 1e6)
if _not_in_sphinx():
print('\rdownloaded %s / %s' % (current_sz_mb, total_sz_mb),
end='')
urllib.urlretrieve(self.DOWNLOAD_URL,
filename=os.path.join(self.data_path,
self.ARCHIVE_FILENAME),
reporthook=progress)
if _not_in_sphinx():
print('\r', end='')
print("untaring data ...")
tfile = tarfile.open(os.path.join(self.data_path,
self.ARCHIVE_FILENAME),
'r:gz')
tfile.extractall(self.data_path)
print("done !")
def iterdocs(self):
"""Iterate doc by doc, yield a dict."""
for root, _dirnames, filenames in os.walk(self.data_path):
for filename in fnmatch.filter(filenames, '*.sgm'):
path = os.path.join(root, filename)
parser = ReutersParser()
for doc in parser.parse(open(path)):
yield doc
def get_minibatch(doc_iter, size):
"""Extract a minibatch of examples, return a tuple X, y.
Note: size is before excluding invalid docs with no topics assigned.
"""
data = [('{title}\n\n{body}'.format(**doc), doc['topics'])
for doc in itertools.islice(doc_iter, size)
if doc['topics']]
if not len(data):
return DataFrame([])
else:
return DataFrame(data, columns=['text', 'tags'])
| 5,101 | 1,599 |
import numpy as np
import pandas as pd
from scipy.stats import expon, uniform
import sys
sys.path.append('../../well_mixed')
from well_mixed_death_clock import (WellMixedSimulator,
WellMixedSimulationData, exponential_ccm, uniform_ccm,
base_rate_death_signal)
# Exponential cell cycle model
tG1 = 50
tG2 = 50
# Constant base rate death signal
f = base_rate_death_signal
base_rate = 1
Tdeath_fun = lambda eta: eta * base_rate * tG1
# Simulation parameters
tstart = 0
tend = np.inf
max_cell_count = 1000
initial_cell_count = 64
num_eta = 10
num_iter = 100
# Arguments to f and ccm
f_args = (base_rate,)
ccm_args = (tG1,)
# Helper function
def run_g1_truncation_exponential_simulation(eta, seed=None):
# We create a random_state seeded with seed + 1 to sample the initial
# conditions in order to avoid correlations with the simulation.
if not seed is None:
random_state = np.random.RandomState(seed + 1)
else:
random_state = None
ccm = exponential_ccm
Tdeath = Tdeath_fun(eta)
# Initialise simulator
simulator = WellMixedSimulator(f, ccm, Tdeath, tG2, tstart, tend,
f_args, ccm_args, max_cell_count)
# Generate initial conditions
tau_0 = np.zeros(initial_cell_count)
tbirth_0 = np.zeros(initial_cell_count)
tG1_0 = expon.rvs(scale=tG1, size=initial_cell_count, random_state=random_state)
clone_0 = np.arange(initial_cell_count)
# Run simulation
data = simulator.run(tau_0, tbirth_0, tG1_0, clone_0, seed=seed)
# Return processed data
return WellMixedSimulationData(data)
if __name__ == '__main__':
# Exponential ccm parameter sweep
etas = np.arange(4 / num_eta, 4 + 4 / num_eta, 4 / num_eta)
# Generate parameters
eta_data = []
for eta in etas:
for i in range(num_iter):
eta_data.append(eta)
# If initial seed is given as command-line arguments, create seeds in
# increments of 2 to avoid correlations between simulations because seed +
# 1 is used for initial conditions.
if len(sys.argv) == 2:
initial_seed = int(sys.argv[1])
seed_data = np.arange(initial_seed, initial_seed + 2 * len(eta_data), 2)
else:
seed_data = [None] * len(eta_data)
# Run simulations and postprocess data
status_data = []
final_timestep_data = []
final_cell_count_data = []
num_divisions_data = []
num_deaths_data = []
average_time_in_G1_data = []
effective_g1_sample_size_data = []
for eta, seed in zip(eta_data, seed_data):
sim_data = run_g1_truncation_exponential_simulation(eta, seed)
status = sim_data.get_status()
t_events = sim_data.get_t_events()
cell_count = sim_data.get_cell_count()
num_divisions = sim_data.get_num_divisions()
num_deaths = sim_data.get_num_deaths()
effective_time_in_G1 = sim_data.get_effective_time_in_G1()
if status == 0:
final_timestep = t_events[-1]
else:
final_timestep = t_events[-2]
final_cell_count = cell_count[-1]
average_time_in_G1 = np.mean(effective_time_in_G1)
effective_g1_sample_size = len(effective_time_in_G1)
status_data.append(status)
final_timestep_data.append(final_timestep)
final_cell_count_data.append(final_cell_count)
num_divisions_data.append(num_divisions)
num_deaths_data.append(num_deaths)
average_time_in_G1_data.append(average_time_in_G1)
effective_g1_sample_size_data.append(effective_g1_sample_size)
# Create and write dataframe
df = pd.DataFrame({
'eta' : eta_data,
'seed' : seed_data,
'status' : status_data,
'final_timestep' : final_timestep_data,
'final_cell_count' : final_cell_count_data,
'num_divisions' : num_divisions_data,
'num_deaths' : num_deaths_data,
'average_time_in_G1' : average_time_in_G1_data,
'effective_g1_sample_size' : effective_g1_sample_size_data,
})
df.to_csv('exponential-effective-g1-duration-data.csv', index_label='simulation_id')
# Uniform ccm
r_fun = lambda alpha: 2 * alpha * tG1
# Helper function
def run_g1_truncation_uniform_simulation(alpha, eta, seed=None):
# We create a random_state seeded with seed + 1 to sample the initial
# conditions in order to avoid correlations with the simulation.
if not seed is None:
random_state = np.random.RandomState(seed + 1)
else:
random_state = None
ccm = uniform_ccm
r = r_fun(alpha)
Tdeath = Tdeath_fun(eta)
ccm_args = (tG1,r)
# Initialise simulator
simulator = WellMixedSimulator(f, ccm, Tdeath, tG2, tstart, tend,
f_args, ccm_args, max_cell_count)
# Generate initial conditions
tau_0 = np.zeros(initial_cell_count)
tbirth_0 = np.zeros(initial_cell_count)
tG1_0 = uniform.rvs(loc=tG1 - 0.5 * r, scale=r, size=initial_cell_count,
random_state=random_state)
clone_0 = np.arange(initial_cell_count)
# Run simulation
data = simulator.run(tau_0, tbirth_0, tG1_0, clone_0, seed=seed)
# Return processed data
return WellMixedSimulationData(data)
if __name__ == '__main__':
# Uniform ccm parameter sweep
alphas = [0.3, 0.5, 0.7, 1.0]
etas = np.arange(2 / num_eta, 2 + 2 / num_eta, 2 / num_eta)
# Generate parameters
alpha_data = []
eta_data = []
for alpha in alphas:
for eta in etas:
for i in range(num_iter):
alpha_data.append(alpha)
eta_data.append(eta)
# If initial seed is given as command-line arguments, create seeds in
# increments of 2 to avoid correlations between simulations because seed +
# 1 is used for initial conditions.
if len(sys.argv) == 2:
initial_seed = int(sys.argv[1])
seed_data = np.arange(initial_seed, initial_seed + 2 * len(eta_data), 2)
else:
seed_data = [None] * len(eta_data)
# Run simulations and postprocess data
status_data = []
final_timestep_data = []
final_cell_count_data = []
num_divisions_data = []
num_deaths_data = []
average_time_in_G1_data = []
effective_g1_sample_size_data = []
for alpha, eta, seed in zip(alpha_data, eta_data, seed_data):
sim_data = run_g1_truncation_uniform_simulation(alpha, eta, seed)
status = sim_data.get_status()
t_events = sim_data.get_t_events()
cell_count = sim_data.get_cell_count()
num_divisions = sim_data.get_num_divisions()
num_deaths = sim_data.get_num_deaths()
effective_time_in_G1 = sim_data.get_effective_time_in_G1()
if status == 0:
final_timestep = t_events[-1]
else:
final_timestep = t_events[-2]
final_cell_count = cell_count[-1]
average_time_in_G1 = np.mean(effective_time_in_G1)
effective_g1_sample_size = len(effective_time_in_G1)
status_data.append(status)
final_timestep_data.append(final_timestep)
final_cell_count_data.append(final_cell_count)
num_divisions_data.append(num_divisions)
num_deaths_data.append(num_deaths)
average_time_in_G1_data.append(average_time_in_G1)
effective_g1_sample_size_data.append(effective_g1_sample_size)
# Create and write dataframe
df = pd.DataFrame({
'alpha' : alpha_data,
'eta' : eta_data,
'seed' : seed_data,
'status' : status_data,
'final_timestep' : final_timestep_data,
'final_cell_count' : final_cell_count_data,
'num_divisions' : num_divisions_data,
'num_deaths' : num_deaths_data,
'average_time_in_G1' : average_time_in_G1_data,
'effective_g1_sample_size' : effective_g1_sample_size_data,
})
df.to_csv('uniform-effective-g1-duration-data.csv', index_label='simulation_id')
| 7,908 | 2,873 |
class Professor:
def __init__(self, ratemyprof_id: int, first_name: str, last_name: str, num_of_ratings: int, overall_rating):
self.ratemyprof_id = ratemyprof_id
self.name = f"{first_name} {last_name}"
self.first_name = first_name
self.last_name = last_name
self.num_of_ratings = num_of_ratings
if self.num_of_ratings < 1:
self.overall_rating = 0
else:
self.overall_rating = float(overall_rating)
| 485 | 169 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# 导入SQLite驱动:
import sqlite3
# 连接到SQLite数据库
# 数据库文件是test.db
# 如果文件不存在,会自动在当前目录创建:
conn = sqlite3.connect('hello.db')
# 创建一个Cursor:
cursor = conn.cursor()
cursor.execute('drop table user')
# 执行一条SQL语句,创建user表:
cursor.execute('create table user (id varchar(20) primary key, name varchar(20))')
# 继续执行一条SQL语句,插入一条记录:
cursor.execute('insert into user (id, name) values (\'1\', \'Michael\')')
cursor.execute('insert into user (id, name) values (\'2\', \'Jackson\')')
# 通过rowcount获得插入的行数:
print(cursor.rowcount)
# 查询:
print(cursor.execute('select * from user').fetchall())
print(cursor.execute('select * from user').fetchmany(size=1))
print(cursor.execute('select * from user').fetchone())
# 关闭Cursor:
cursor.close()
# 提交事务:
conn.commit()
# 关闭Connection:
conn.close() | 810 | 371 |
# imports
import requests, json
# beautifulsoup4
from bs4 import BeautifulSoup
def searchDisplay(username):
# base url for the data
url = 'https://www.instagram.com/{}/'.format(username)
try:
req = requests.get(url).content
soup=BeautifulSoup(req,"html.parser")
row=soup.find_all('script')
details=str(row[3]).strip("<script type=></")[22:].strip()
account=json.loads(details)
try:
if len(account['description'])<1:
account['description']=""
except:
account['description']=""
print("Name : ",account['name'],'\t',"Username : ",account['alternateName'],
'\t',"Followers : ",account['mainEntityofPage']['interactionStatistic']['userInteractionCount'],'\n',
"Bio : ",account['description'])
except:
print('Not found or no internet connection')
def getDetails(username):
url = 'https://www.instagram.com/{}/'.format(username)
try:
req = requests.get(url).content
soup=BeautifulSoup(req,"html.parser")
row=soup.find_all('script')
details=row[3].text
account=json.loads(details)
return account
except:
print('Not found or no internet connection')
return {}
| 1,324 | 399 |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for jax_ops.py."""
import functools
import itertools
import unittest
from absl.testing import absltest
from absl.testing import parameterized
import numpy as np
import jax.numpy as jnp
import jax
from jax.config import config
config.update("jax_enable_x64", True)
from fast_soft_sort import jax_ops
GAMMAS = (0.1, 1, 10.0)
DIRECTIONS = ("ASCENDING", "DESCENDING")
REGULARIZERS = ("l2", )
class JaxOpsTest(parameterized.TestCase):
def _test(self, func, regularization_strength, direction, regularization):
def loss_func(values):
soft_values = func(values,
regularization_strength=regularization_strength,
direction=direction,
regularization=regularization)
return jnp.sum(soft_values ** 2)
rng = np.random.RandomState(0)
values = jnp.array(rng.randn(5, 10))
mat = jnp.array(rng.randn(5, 10))
unitmat = mat / np.sqrt(np.vdot(mat, mat))
eps = 1e-5
numerical = (loss_func(values + 0.5 * eps * unitmat) -
loss_func(values - 0.5 * eps * unitmat)) / eps
autodiff = jnp.vdot(jax.grad(loss_func)(values), unitmat)
np.testing.assert_almost_equal(numerical, autodiff)
@parameterized.parameters(itertools.product(GAMMAS, DIRECTIONS, REGULARIZERS))
def test_soft_rank(self, regularization_strength, direction, regularization):
self._test(jax_ops.soft_rank,
regularization_strength, direction, regularization)
@parameterized.parameters(itertools.product(GAMMAS, DIRECTIONS, REGULARIZERS))
def test_soft_sort(self, regularization_strength, direction, regularization):
self._test(jax_ops.soft_sort,
regularization_strength, direction, regularization)
if __name__ == "__main__":
absltest.main()
| 2,369 | 785 |
# Generated by Django 2.2.5 on 2019-09-25 14:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0003_auto_20190629_1623'),
]
operations = [
migrations.AddField(
model_name='userdata',
name='primaryField',
field=models.CharField(choices=[('CATEGORIES', 'Categories'), ('LOCATIONS', 'Locations'), ('SOURCES', 'Sources')], default='CATEGORIES', max_length=10),
),
]
| 507 | 183 |
# Do not edit this file directly.
# It was auto-generated by: code/programs/reflexivity/reflexive_refresh
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
def poppy():
http_archive(
name="Poppy" ,
build_file="//bazel/deps/Poppy:build.BUILD" ,
sha256="905921192994f6243efc8dc15691135c0486720b49011fc35473b6ea7635e9f3" ,
strip_prefix="Poppy-9643a498f015a7ade7693ff4b9fb976588f93dc6" ,
urls = [
"https://github.com/Unilang/Poppy/archive/9643a498f015a7ade7693ff4b9fb976588f93dc6.tar.gz",
],
)
| 580 | 294 |
class Rectangle3d(object,IEpsilonComparable[Rectangle3d]):
"""
Rectangle3d(plane: Plane,width: float,height: float)
Rectangle3d(plane: Plane,width: Interval,height: Interval)
Rectangle3d(plane: Plane,cornerA: Point3d,cornerB: Point3d)
"""
def ClosestPoint(self,point,includeInterior=None):
"""
ClosestPoint(self: Rectangle3d,point: Point3d,includeInterior: bool) -> Point3d
ClosestPoint(self: Rectangle3d,point: Point3d) -> Point3d
"""
pass
def Contains(self,*__args):
"""
Contains(self: Rectangle3d,x: float,y: float) -> PointContainment
Contains(self: Rectangle3d,pt: Point3d) -> PointContainment
"""
pass
def Corner(self,index):
""" Corner(self: Rectangle3d,index: int) -> Point3d """
pass
@staticmethod
def CreateFromPolyline(polyline,deviation=None,angleDeviation=None):
"""
CreateFromPolyline(polyline: IEnumerable[Point3d]) -> (Rectangle3d,float,float)
CreateFromPolyline(polyline: IEnumerable[Point3d]) -> Rectangle3d
"""
pass
def EpsilonEquals(self,other,epsilon):
""" EpsilonEquals(self: Rectangle3d,other: Rectangle3d,epsilon: float) -> bool """
pass
def MakeIncreasing(self):
""" MakeIncreasing(self: Rectangle3d) """
pass
def PointAt(self,*__args):
"""
PointAt(self: Rectangle3d,t: float) -> Point3d
PointAt(self: Rectangle3d,x: float,y: float) -> Point3d
"""
pass
def RecenterPlane(self,*__args):
""" RecenterPlane(self: Rectangle3d,origin: Point3d)RecenterPlane(self: Rectangle3d,index: int) """
pass
def ToNurbsCurve(self):
""" ToNurbsCurve(self: Rectangle3d) -> NurbsCurve """
pass
def ToPolyline(self):
""" ToPolyline(self: Rectangle3d) -> Polyline """
pass
def Transform(self,xform):
""" Transform(self: Rectangle3d,xform: Transform) -> bool """
pass
def __init__(self,*args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
@staticmethod
def __new__(self,plane,*__args):
"""
__new__[Rectangle3d]() -> Rectangle3d
__new__(cls: type,plane: Plane,width: float,height: float)
__new__(cls: type,plane: Plane,width: Interval,height: Interval)
__new__(cls: type,plane: Plane,cornerA: Point3d,cornerB: Point3d)
"""
pass
def __reduce_ex__(self,*args):
pass
def __repr__(self,*args):
""" __repr__(self: object) -> str """
pass
def __str__(self,*args):
pass
Area=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Area(self: Rectangle3d) -> float
"""
BoundingBox=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: BoundingBox(self: Rectangle3d) -> BoundingBox
"""
Center=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Center(self: Rectangle3d) -> Point3d
"""
Circumference=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Circumference(self: Rectangle3d) -> float
"""
Height=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Height(self: Rectangle3d) -> float
"""
IsValid=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: IsValid(self: Rectangle3d) -> bool
"""
Plane=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Plane(self: Rectangle3d) -> Plane
Set: Plane(self: Rectangle3d)=value
"""
Width=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Width(self: Rectangle3d) -> float
"""
X=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: X(self: Rectangle3d) -> Interval
Set: X(self: Rectangle3d)=value
"""
Y=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Get: Y(self: Rectangle3d) -> Interval
Set: Y(self: Rectangle3d)=value
"""
Unset=None
| 4,023 | 1,523 |
# shouldn't apparently need this file, but here we are
from . import atommapping
from . import echo | 99 | 25 |
import os
import re
import pymongo
import pandas as pd
import numpy as np
import streamlit as st
from bokeh.plotting import figure
from bokeh.palettes import Set1_9, Set3_12, Inferno256
@st.cache(suppress_st_warning=True, allow_output_mutation=True)
def get_caudales():
"""Function to obtain the rivers basin flows from MongoDB Atlas.
Returns:
DataFrame: Pandas DataFrame with the query result.
"""
st.spinner("Obteniendo los datos de caudales...")
client = pymongo.MongoClient(os.environ['MONGO'])
try:
collection_name = client['publicaciones-especiales']['cuencas-datos-hidraulicos']
project={
'_id': 0,
'fecha': 1,
'situacionCuencaComahue': {
'Caudal Collon Cura': 1,
'Caudal Neuquen': 1,
'Caudal Limay': 1,
'Caudal Río Negro': 1,
'Caudal Limay despues desembocadura de Collon Cura': 1
},
'situacionYacyretaSaltoGrande': {
'Caudal Río Uruguay': 1,
'Caudal Río Paraná': 1
},
'situacionCuencaPatagonica': {
'Caudal Río Chubut': 1,
'Caudal Río Futaleufu': 1
},
'situacionCuencaRioGrande': {
'Caudal Río Grande': 1
},
'situacionCuencaRioSanJuan': {
'Caudal Inicial Río San Juan': 1,
'Caudal Final Río San Juan': 1
}
}
df = pd.DataFrame(collection_name.find(projection=project))
return df
except Exception as e:
st.error(f'Opps, algo fallo\n{e}')
finally:
client.close()
@st.cache(suppress_st_warning=True, allow_output_mutation=True)
def get_cotas():
"""Function to obtai the rivers basin levels from MongoDB Atlas.
Returns:
DataFrame: Pandas DataFrame with the query result.
"""
st.spinner("Obteniendo los datos de cotas...")
client = pymongo.MongoClient(os.environ['MONGO'])
try:
collection_name = client['publicaciones-especiales']['cuencas-datos-hidraulicos']
project={
'_id': 0,
'fecha': 1,
'situacionCuencaComahue': {
'Cota Hoy Alicura': 1,
'Cota Min Alicura': 1,
'Cota Max Alicura': 1,
'Cota Hoy Mari Menuco': 1,
'Cota Min Mari Menuco': 1,
'Cota Max Mari Menuco': 1,
'Cota Hoy Piedra del Aguila': 1,
'Cota Min Piedra del Aguila': 1,
'Cota Max Piedra del Aguila': 1,
'Cota Hoy Planicie Banderita Barreales': 1,
'Cota Min Planicie Banderita Barreales': 1,
'Cota Max Planicie Banderita Barreales': 1,
'Cota Hoy Arroyito': 1,
'Cota Min Arroyito': 1,
'Cota Max Arroyito': 1,
'Cota Hoy El Chocon': 1,
'Cota Min El Chocon': 1,
'Cota Max El Chocon': 1,
'Cota Hoy P': {
'P': {
'Leufu': 1
}
}
},
'situacionYacyretaSaltoGrande': {
'Cota Hoy Yacyreta': 1,
'Cota Min Yacyreta': 1,
'Cota Max Yacyreta': 1,
'Cota Hoy Salto Grande': 1,
'Cota Min Salto Grande': 1,
'Cota Max Salto Grande': 1
},
'situacionCuencaPatagonica': {
'Cota Hoy Futaleufu': 1,
'Cota Min Futaleufu': 1,
'Cota Max Futaleufu': 1,
'Cota Hoy Ameghino': 1,
'Cota Min Ameghino': 1,
'Cota Max Ameghino': 1
},
'situacionCuencaRioGrande': {
'Cota Hoy Río Grande': 1,
'Cota Min Río Grande': 1,
'Cota Max Río Grande': 1
},
'situacionCuencaRioSanJuan': {
'Cota Hoy Quebrada de Ullum': 1,
'Cota Min Quebrada de Ullum': 1,
'Cota Max Quebrada de Ullum': 1,
'Cota Hoy Los Caracole': 1,
'Cota Min Los Caracoles': 1,
'Cota Max Los Caracoles': 1,
'Cota Hoy Punta Negra': 1,
'Cota Min Punta Negra': 1,
'Cota Max Punta Negra': 1
}
}
df = pd.DataFrame(collection_name.find(projection=project))
return df
except Exception as e:
st.error(f'Opps, algo fallo\n{e}')
finally:
client.close()
@st.cache(suppress_st_warning=True, allow_output_mutation=True)
def get_turbinado():
"""Function to obtain the rivers basin turbinate from MongoDB Atlas.
Returns:
DataFrame: Pandas DataFrame with the query result.
"""
st.spinner("Obteniendo los datos de turbinado...")
client = pymongo.MongoClient(os.environ['MONGO'])
try:
collection_name = client['publicaciones-especiales']['cuencas-datos-hidraulicos']
project={
'_id': 0,
'fecha': 1,
'situacionCuencaComahue': {
'Turbinado Alicura': 1,
'Turbinado Piedra del Aguila': 1,
'Turbinado Arroyito': 1,
'Turbinado El Chocon': 1,
'Turbinado Mari Menuco': 1,
'Turbinado P': {
'P': {
'Leufu': 1
}
}
},
'situacionYacyretaSaltoGrande': {
'Turbinado Salto Grande': 1,
'Turbinado Yacyreta': 1
},
'situacionCuencaPatagonica': {
'Turbinado Futaleufu': 1,
'Turbinado Ameghino': 1
},
'situacionCuencaRioGrande': {
'Turbinado Río Grande': 1
},
'situacionCuencaRioSanJuan': {
'Turbinado Punta Negra': 1,
'Turbinado Ullum': 1,
'Turbinado Los Caracoles': 1,
'Turbinado Quebrada de Ullum': 1
}
}
df = pd.DataFrame(collection_name.find(projection=project))
return df
except Exception as e:
st.error(f'Opps, algo fallo\n{e}')
finally:
client.close()
@st.cache(suppress_st_warning=True, allow_output_mutation=True)
def get_vertido():
"""Function to obtain the rivers basin discharge from MongoDB Atlas.
Returns:
DataFrame: Pandas DataFrame with the query result.
"""
st.spinner("Obteniendo los datos de turbinado...")
client = pymongo.MongoClient(os.environ['MONGO'])
try:
collection_name = client['publicaciones-especiales']['cuencas-datos-hidraulicos']
project={
'_id': 0,
'fecha': 1,
'situacionCuencaComahue': {
'Vertido El Chañar': 1,
'Vertido Arroyito': 1,
'Vertido Piedra del Aguila': 1,
'Vertido P': {
'P': {
'Leufu': 1
}
}
},
'situacionYacyretaSaltoGrande': {
'Vertido Salto Grande': 1,
'Vertido Yacyreta': 1
},
'situacionCuencaPatagonica': {
'Vertido Futaleufu': 1,
'Vertido Ameghino': 1
},
'situacionCuencaRioGrande': {
'Bombeo Río Grande': 1
},
'situacionCuencaRioSanJuan': {
'Vertido Punta Negra': 1,
'Vertido Los Caracoles': 1,
'Vertido Quebrada de Ullum': 1
}
}
df = pd.DataFrame(collection_name.find(projection=project))
return df
except Exception as e:
st.error(f'Opps, algo fallo\n{e}')
finally:
client.close()
def caudales():
"""Get the rivers basin flows and process this data.
Returns:
Figure: Bokeh plotting figure.
DataFrame: Pandas DataFrame with the query result.
"""
df = get_caudales()
df = pd.concat([
df['fecha'],
pd.json_normalize(df['situacionCuencaComahue']),
pd.json_normalize(df['situacionYacyretaSaltoGrande']),
pd.json_normalize(df['situacionCuencaPatagonica']),
pd.json_normalize(df['situacionCuencaRioGrande']),
pd.json_normalize(df['situacionCuencaRioSanJuan'])
], axis=1, join="inner")
df.rename(columns={
"fecha": "Fecha",
"Caudal Collon Cura": "Cuenca Comahue - Caudal Collon Cura",
"Caudal Neuquen": "Cuenca Comahue - Caudal Neuquen",
"Caudal Limay": "Cuenca Comahue - Caudal Limay",
"Caudal Río Negro": "Cuenca Comahue - Caudal Río Negro",
"Caudal Limay despues desembocadura de Collon Cura": "Cuenca Comahue - Caudal Limay despues desembocadura de Collon Cura",
"Caudal Río Uruguay": "Yacyreta Salto Grande - Caudal Río Uruguay",
"Caudal Río Paraná": "Yacyreta Salto Grande - Caudal Río Paraná",
"Caudal Río Chubut": "Cuenca Patagónica - Caudal Río Chubut",
"Caudal Río Futaleufu": "Cuenca Patagónica - Caudal Río Futaleufu",
"Caudal Río Grande": "Cuenca Río Grande - Caudal Río Grande",
"Caudal Inicial Río San Juan": "Cuenca Río San Juan - Caudal Inicial Río San Juan",
"Caudal Final Río San Juan": "Cuenca Río San Juan - Caudal Final Río San Juan"
}, inplace=True)
df['Fecha'] = pd.to_datetime(df['Fecha'], format='%Y/%m/%d').dt.date
df = df.drop_duplicates().sort_values('Fecha', ascending=False).reset_index(drop=True)
df = df.replace(0, np.nan)
p = figure(x_axis_type="datetime", title="Caudales cuencas", sizing_mode="stretch_both")
p.grid.grid_line_alpha=0.3
p.xaxis.axis_label = 'Fecha'
p.yaxis.axis_label = 'Caudal [m\u00b3/s]'
p.legend.location = "top_left"
return p, df
def cotas():
"""Get the rivers basin levels and process this data.
Returns:
Figure: Bokeh plotting figure.
DataFrame: Pandas DataFrame with the query result.
"""
df = get_cotas()
df = pd.concat([
df['fecha'],
pd.json_normalize(df['situacionCuencaComahue']),
pd.json_normalize(df['situacionYacyretaSaltoGrande']),
pd.json_normalize(df['situacionCuencaPatagonica']),
pd.json_normalize(df['situacionCuencaRioGrande']),
pd.json_normalize(df['situacionCuencaRioSanJuan'])
], axis=1, join="inner")
df.rename(columns={
'fecha': 'Fecha',
'Cota Hoy Alicura': 'Cuenca Comahue - Alicura',
'Cota Min Alicura': 'Cuenca Comahue - Min Alicura',
'Cota Max Alicura': 'Cuenca Comahue - Max Alicura',
'Cota Hoy Piedra del Aguila': 'Cuenca Comahue - Piedra del Aguil',
'Cota Min Piedra del Aguila': 'Cuenca Comahue - Min Piedra del Aguila',
'Cota Max Piedra del Aguila': 'Cuenca Comahue - Max Piedra del Aguila',
'Cota Hoy Arroyito': 'Cuenca Comahue - Arroyito',
'Cota Min Arroyito': 'Cuenca Comahue - Min Arroyito',
'Cota Max Arroyito': 'Cuenca Comahue - Max Arroyito',
'Cota Hoy Mari Menuco': 'Cuenca Comahue - Mari Menuco',
'Cota Min Mari Menuco': 'Cuenca Comahue - Min Mari Menuco',
'Cota Max Mari Menuco': 'Cuenca Comahue - Max Mari Menuco',
'Cota Hoy Planicie Banderita Barreales': 'Cuenca Comahue - Planicie Banderita Barreales',
'Cota Min Planicie Banderita Barreales': 'Cuenca Comahue - Min Planicie Banderita Barreales',
'Cota Max Planicie Banderita Barreales': 'Cuenca Comahue - Max Planicie Banderita Barreales',
'Cota Hoy El Chocon': 'Cuenca Comahue - El Chocon',
'Cota Min El Chocon': 'Cuenca Comahue - Min El Chocon',
'Cota Max El Chocon': 'Cuenca Comahue - Max El Chocon',
'Cota Hoy P.P.Leufu': 'Cuenca Comahue - Leufu',
'Cota Hoy Yacyreta': 'Cuenca Yacyreta - Yacyreta',
'Cota Min Yacyreta': 'Cuenca Yacyreta - Min Yacyreta',
'Cota Max Yacyreta': 'Cuenca Yacyreta - Max Yacyreta',
'Cota Hoy Salto Grande': 'Cuenca Yacyreta - Salto Grande',
'Cota Min Salto Grande': 'Cuenca Yacyreta - Min Salto Grande',
'Cota Max Salto Grande': 'Cuenca Yacyreta - Max Salto Grande',
'Cota Hoy Futaleufu': 'Cuenca Patagónica - Futaleufu',
'Cota Min Futaleufu': 'Cuenca Patagónica - Min Futaleufu',
'Cota Max Futaleufu': 'Cuenca Patagónica - Max Futaleufu',
'Cota Hoy Ameghino': 'Cuenca Patagónica - Ameghino',
'Cota Min Ameghino': 'Cuenca Patagónica - Min Ameghino',
'Cota Max Ameghino': 'Cuenca Patagónica - Max Ameghino',
'Cota Hoy Río Grande': 'Cuenca Río Grande - Río Grande',
'Cota Min Río Grande': 'Cuenca Río Grande - Min Río Grande',
'Cota Max Río Grande': 'Cuenca Río Grande - Max Río Grande',
'Cota Hoy Quebrada de Ullum': 'Cuenca Río San Juan - Quebrada de Ullum',
'Cota Min Quebrada de Ullum': 'Cuenca Río San Juan - Min Quebrada de Ullum',
'Cota Max Quebrada de Ullum': 'Cuenca Río San Juan - Max Quebrada de Ullum',
'Cota Hoy Punta Negra': 'Cuenca Río San Juan - Punta Negra',
'Cota Min Punta Negra': 'Cuenca Río San Juan - Min Punta Negra',
'Cota Max Punta Negra': 'Cuenca Río San Juan - Max Punta Negra'
}, inplace=True)
df['Fecha'] = pd.to_datetime(df['Fecha'], format='%Y/%m/%d').dt.date
df = df.drop_duplicates().sort_values('Fecha', ascending=False).reset_index(drop=True)
df = df.replace(0, np.nan)
p = figure(x_axis_type="datetime", title="Cotas cuencas", sizing_mode="stretch_both")
p.grid.grid_line_alpha=0.3
p.xaxis.axis_label = 'Fecha'
p.yaxis.axis_label = 'Cota [cm]'
p.legend.location = "top_left"
return p, df
def turbinado():
"""Get the rivers basin discharge and process this data.
Returns:
Figure: Bokeh plotting figure.
DataFrame: Pandas DataFrame with the query result.
"""
df = get_turbinado()
df = pd.concat([
df['fecha'],
pd.json_normalize(df['situacionCuencaComahue']),
pd.json_normalize(df['situacionYacyretaSaltoGrande']),
pd.json_normalize(df['situacionCuencaPatagonica']),
pd.json_normalize(df['situacionCuencaRioGrande']),
pd.json_normalize(df['situacionCuencaRioSanJuan'])
], axis=1, join="inner")
df.rename(columns={
'fecha': 'Fecha',
'Turbinado Alicura': 'Cuenca Comahue - Alicura',
'Turbinado Piedra del Aguila': 'Cuenca Comahue - Piedra del Aguila',
'Turbinado Arroyito': 'Cuenca Comahue - Arroyito',
'Turbinado El Chocon': 'Cuenca Comahue - El Chocon',
'Turbinado Mari Menuco': 'Cuenca Comahue - Mari Menuco',
'Turbinado P.P.Leufu': 'Cuenca Comahue - Leufu',
'Turbinado Salto Grande': 'Cuenca Yacyreta - Salto Grande',
'Turbinado Yacyreta': 'Cuenca Yacyreta - Yacyreta',
'Turbinado Futaleufu': 'Cuenca Patagónica - Futaleufu',
'Turbinado Ameghino': 'Cuenca Patagónica - Ameghino',
'Turbinado Río Grande': 'Cuenca Río Grande - Río Grande',
'Turbinado Punta Negra': 'Cuenca Río San Juan - Punta Negra',
'Turbinado Ullum': 'Cuenca Río San Juan - Ullum',
'Turbinado Los Caracoles': 'Cuenca Río San Juan - Los Caracoles',
'Turbinado Quebrada de Ullum': 'Cuenca Río San Juan - Quebrada de Ullum'
}, inplace=True)
df['Fecha'] = pd.to_datetime(df['Fecha'], format='%Y/%m/%d').dt.date
df = df.drop_duplicates().sort_values('Fecha', ascending=False).reset_index(drop=True)
# df = df.replace(0, np.nan)
p = figure(x_axis_type="datetime", title="Turbinado", sizing_mode="stretch_both")
p.grid.grid_line_alpha=0.3
p.xaxis.axis_label = 'Fecha'
p.yaxis.axis_label = 'Turbinado'
p.legend.location = "top_left"
return p, df
def vertido():
"""Get the rivers basin discharge and process this data.
Returns:
Figure: Bokeh plotting figure.
DataFrame: Pandas DataFrame with the query result.
"""
df = get_vertido()
df = pd.concat([
df['fecha'],
pd.json_normalize(df['situacionCuencaComahue']),
pd.json_normalize(df['situacionYacyretaSaltoGrande']),
pd.json_normalize(df['situacionCuencaPatagonica']),
pd.json_normalize(df['situacionCuencaRioGrande']),
pd.json_normalize(df['situacionCuencaRioSanJuan'])
], axis=1, join="inner")
df.rename(columns={
'fecha': 'Fecha',
'Vertido El Chañar': 'Cuenca Comahue - El Chañar',
'Vertido Arroyito': 'Cuenca Comahue - Arroyito',
'Vertido Piedra del Aguila': 'Cuenca Comahue - Piedra del Aguila',
'Vertido P.P.Leufu': 'Cuenca Comahue - Leufu',
'Vertido Salto Grande': 'Cuenca Yacyreta - Salto Grande',
'Vertido Yacyreta': 'Cuenca Yacyreta - Yacyreta',
'Vertido Futaleufu': 'Cuenca Patagónica - Futaleufu',
'Vertido Ameghino': 'Cuenca Patagónica - Ameghino',
'Bombeo Río Grande': 'Cuenca Río Grande - Bombeo Río Grande',
'Vertido Punta Negra': 'Cuenca Río San Juan - Punta Negra',
'Vertido Los Caracoles': 'Cuenca Río San Juan - Los Caracoles',
'Vertido Quebrada de Ullum': 'Cuenca Río San Juan - Quebrada de Ullum'
}, inplace=True)
df['Fecha'] = pd.to_datetime(df['Fecha'], format='%Y/%m/%d').dt.date
df = df.drop_duplicates().sort_values('Fecha', ascending=False).reset_index(drop=True)
# df = df.replace(0, np.nan)
p = figure(x_axis_type="datetime", title="Vertido", sizing_mode="stretch_both")
p.grid.grid_line_alpha=0.3
p.xaxis.axis_label = 'Fecha'
p.yaxis.axis_label = 'Vertido'
p.legend.location = "top_left"
return p, df
def write():
"""Function to write the Streamlit content of the page pe_cuencas
"""
p_caudales, df_caudales = caudales()
p_cotas, df_cotas = cotas()
p_turbinado, df_turbinado = turbinado()
p_vertido, df_vertido = vertido()
st.header("Publicaciones especiales - Cuencas/Datos Hidráulicos 🌊", anchor=None)
with st.container():
st.subheader("Análisis de caudales", anchor=None)
options = st.multiselect(
"Seleccionar datos a graficar.",
options=[
"Cuenca Comahue - Caudal Collon Cura",
"Cuenca Comahue - Caudal Neuquen",
"Cuenca Comahue - Caudal Limay",
"Cuenca Comahue - Caudal Río Negro",
"Cuenca Comahue - Caudal Limay despues desembocadura de Collon Cura",
"Yacyreta Salto Grande - Caudal Río Uruguay",
"Yacyreta Salto Grande - Caudal Río Paraná",
"Cuenca Patagónica - Caudal Río Chubut",
"Cuenca Patagónica - Caudal Río Futaleufu",
"Cuenca Río Grande - Caudal Río Grande",
"Cuenca Río San Juan - Caudal Inicial Río San Juan",
"Cuenca Río San Juan - Caudal Final Río San Juan"
],
default=[
"Yacyreta Salto Grande - Caudal Río Paraná",
"Yacyreta Salto Grande - Caudal Río Uruguay"
]
)
if len(options)>9:
col = Set3_12
else:
col = Set1_9
for index, value in enumerate(options):
p_caudales.line(
df_caudales['Fecha'],
df_caudales[value],
color=col[index],
legend_label=re.split(r" - ", value)[1].strip()
)
st.bokeh_chart(p_caudales)
with st.expander("Ver datos"):
st.write("Datos de los caudales de las cuencas en [m\u00b3/s].")
st.dataframe(df_caudales)
st.download_button(
label="Descargar dataset como .CSV",
data=df_caudales.to_csv(index=False).encode('utf-8'),
file_name='Caudales.csv',
mime='text/csv',
)
with st.container():
st.subheader("Análisis de cotas", anchor=None)
options_cotas = st.multiselect(
"Seleccionar datos a graficar.",
options=[
'Cuenca Comahue - Alicura',
'Cuenca Comahue - Min Alicura',
'Cuenca Comahue - Max Alicura',
'Cuenca Comahue - Piedra del Aguil',
'Cuenca Comahue - Min Piedra del Aguila',
'Cuenca Comahue - Max Piedra del Aguila',
'Cuenca Comahue - Arroyito',
'Cuenca Comahue - Min Arroyito',
'Cuenca Comahue - Max Arroyito',
'Cuenca Comahue - Mari Menuco',
'Cuenca Comahue - Min Mari Menuco',
'Cuenca Comahue - Max Mari Menuco',
'Cuenca Comahue - Planicie Banderita Barreales',
'Cuenca Comahue - Min Planicie Banderita Barreales',
'Cuenca Comahue - Max Planicie Banderita Barreales',
'Cuenca Comahue - El Chocon',
'Cuenca Comahue - Min El Chocon',
'Cuenca Comahue - Max El Chocon',
'Cuenca Comahue - Leufu',
'Cuenca Yacyreta - Yacyreta',
'Cuenca Yacyreta - Min Yacyreta',
'Cuenca Yacyreta - Max Yacyreta',
'Cuenca Yacyreta - Salto Grande',
'Cuenca Yacyreta - Min Salto Grande',
'Cuenca Yacyreta - Max Salto Grande',
'Cuenca Patagónica - Futaleufu',
'Cuenca Patagónica - Min Futaleufu',
'Cuenca Patagónica - Max Futaleufu',
'Cuenca Patagónica - Ameghino',
'Cuenca Patagónica - Min Ameghino',
'Cuenca Patagónica - Max Ameghino',
'Cuenca Río Grande - Río Grande',
'Cuenca Río Grande - Min Río Grande',
'Cuenca Río Grande - Max Río Grande',
'Cuenca Río San Juan - Quebrada de Ullum',
'Cuenca Río San Juan - Min Quebrada de Ullum',
'Cuenca Río San Juan - Max Quebrada de Ullum',
'Cuenca Río San Juan - Punta Negra',
'Cuenca Río San Juan - Min Punta Negra',
'Cuenca Río San Juan - Max Punta Negra'
],
default=[
'Cuenca Yacyreta - Salto Grande',
'Cuenca Yacyreta - Min Salto Grande',
'Cuenca Yacyreta - Max Salto Grande'
]
)
if len(options_cotas)<=9:
col = Set1_9
elif len(options_cotas) <=12:
col = Set3_12
else:
col = Inferno256
for index, value in enumerate(options_cotas):
p_cotas.line(
df_cotas['Fecha'],
df_cotas[value],
color=col[index],
legend_label=re.split(r" - ", value)[1].strip()
)
st.bokeh_chart(p_cotas)
with st.expander("Ver datos"):
st.write("Datos de los Cotas de las cuencas en [cm].")
st.dataframe(df_cotas)
st.download_button(
label="Descargar dataset como .CSV",
data=df_cotas.to_csv(index=False).encode('utf-8'),
file_name='Cotas.csv',
mime='text/csv',
)
with st.container():
st.subheader("Análisis del turbinado", anchor=None)
options_turbinado = st.multiselect(
"Seleccionar datos a graficar.",
options=[
'Cuenca Comahue - Alicura',
'Cuenca Comahue - Piedra del Aguila',
'Cuenca Comahue - Arroyito',
'Cuenca Comahue - El Chocon',
'Cuenca Comahue - Mari Menuco',
'Cuenca Comahue - Leufu',
'Cuenca Yacyreta - Salto Grande',
'Cuenca Yacyreta - Yacyreta',
'Cuenca Patagónica - Futaleufu',
'Cuenca Patagónica - Ameghino',
'Cuenca Río Grande - Río Grande',
'Cuenca Río San Juan - Punta Negra',
'Cuenca Río San Juan - Ullum',
'Cuenca Río San Juan - Los Caracoles',
'Cuenca Río San Juan - Quebrada de Ullum'
], default=[
'Cuenca Yacyreta - Yacyreta',
'Cuenca Yacyreta - Salto Grande'
]
)
if len(options_turbinado)<=9:
col = Set1_9
elif len(options_turbinado) <=12:
col = Set3_12
else:
col = Inferno256
for index, value in enumerate(options_turbinado):
p_turbinado.line(
df_turbinado['Fecha'],
df_turbinado[value],
color=col[index],
legend_label=re.split(r" - ", value)[1].strip()
)
st.bokeh_chart(p_turbinado)
with st.expander("Ver datos"):
st.write("Datos del turbinado.")
st.dataframe(df_turbinado)
st.download_button(
label="Descargar dataset como .CSV",
data=df_turbinado.to_csv(index=False).encode('utf-8'),
file_name='Turbinado.csv',
mime='text/csv',
)
with st.container():
st.subheader("Análisis del vertido", anchor=None)
options_vertido = st.multiselect(
"Seleccionar datos a graficar.",
options=[
'Cuenca Comahue - El Chañar',
'Cuenca Comahue - Arroyito',
'Cuenca Comahue - Piedra del Aguila',
'Cuenca Comahue - Leufu',
'Cuenca Yacyreta - Salto Grande',
'Cuenca Yacyreta - Yacyreta',
'Cuenca Patagónica - Futaleufu',
'Cuenca Patagónica - Ameghino',
'Cuenca Río Grande - Bombeo Río Grande',
'Cuenca Río San Juan - Punta Negra',
'Cuenca Río San Juan - Los Caracoles',
'Cuenca Río San Juan - Quebrada de Ullum'
], default=[
'Cuenca Yacyreta - Yacyreta',
'Cuenca Yacyreta - Salto Grande'
]
)
if len(options_vertido)>9:
col = Set3_12
else:
col = Set1_9
for index, value in enumerate(options_vertido):
p_vertido.line(
df_vertido['Fecha'],
df_vertido[value],
color=col[index],
legend_label=re.split(r" - ", value)[1].strip()
)
st.bokeh_chart(p_vertido)
with st.expander("Ver datos"):
st.write("Datos del vertido.")
st.dataframe(df_vertido)
st.download_button(
label="Descargar dataset como .CSV",
data=df_vertido.to_csv(index=False).encode('utf-8'),
file_name='Vertido.csv',
mime='text/csv',
)
| 27,163 | 9,112 |
#!/usr/bin/env python3
"""Generator of the function to prohibit certain vowel sequences.
It creates ``_hb_preprocess_text_vowel_constraints``, which inserts dotted
circles into sequences prohibited by the USE script development spec.
This function should be used as the ``preprocess_text`` of an
``hb_ot_complex_shaper_t``.
usage: ./gen-vowel-constraints.py ms-use/IndicShapingInvalidCluster.txt
"""
import collections
import youseedee
def write (s):
sys.stdout.flush ()
sys.stdout.buffer.write (s.encode ('utf-8'))
import sys
if len (sys.argv) != 2:
sys.exit (__doc__)
script_order = {}
scripts = {}
for start, end,script in youseedee.parse_file_ranges("Scripts.txt"):
for u in range (start, end + 1):
scripts[u] = script
if script not in script_order:
script_order[script] = start
class ConstraintSet (object):
"""A set of prohibited code point sequences.
Args:
constraint (List[int]): A prohibited code point sequence.
"""
def __init__ (self, constraint):
# Either a list or a dictionary. As a list of code points, it
# represents a prohibited code point sequence. As a dictionary,
# it represents a set of prohibited sequences, where each item
# represents the set of prohibited sequences starting with the
# key (a code point) concatenated with any of the values
# (ConstraintSets).
self._c = constraint
def add (self, constraint):
"""Add a constraint to this set."""
if not constraint:
return
first = constraint[0]
rest = constraint[1:]
if isinstance (self._c, list):
if constraint == self._c[:len (constraint)]:
self._c = constraint
elif self._c != constraint[:len (self._c)]:
self._c = {self._c[0]: ConstraintSet (self._c[1:])}
if isinstance (self._c, dict):
if first in self._c:
self._c[first].add (rest)
else:
self._c[first] = ConstraintSet (rest)
@staticmethod
def _indent (depth):
return (' ' * depth)
@staticmethod
def _cp_accessor(index):
if index:
return "buffer.items[i+{}].codepoint".format(index)
return "buffer.items[i].codepoint"
def __str__ (self, index=0, depth=2):
s = []
indent = self._indent (depth)
if isinstance (self._c, list):
if len (self._c) == 0:
assert index == 2, 'Cannot use `matched` for this constraint; the general case has not been implemented'
s.append ('{}matched = True\n'.format (indent))
elif len (self._c) == 1:
assert index == 1, 'Cannot use `matched` for this constraint; the general case has not been implemented'
s.append ('{}matched = 0x{:04X} == {}\n'.format (indent, next (iter (self._c)), self._cp_accessor(index)))
else:
s.append ('{}if (0x{:04X} == {} and\n'.format (indent, self._c[0], self._cp_accessor(index)))
if index:
s.append ('{}i + {} < len(buffer.items)-1 and\n'.format (self._indent (depth + 2), index + 1))
for i, cp in enumerate (self._c[1:], start=1):
s.append ('{}0x{:04X} == {}{}\n'.format (
self._indent (depth + 2), cp, self._cp_accessor(index + i), '):' if i == len (self._c) - 1 else 'and')
)
s.append ('{}matched = True\n'.format (self._indent (depth + 1)))
else:
cases = collections.defaultdict (set)
for first, rest in sorted (self._c.items ()):
cases[rest.__str__ (index + 1, depth + 2)].add (first)
for body, labels in sorted (cases.items (), key=lambda b_ls: sorted (b_ls[1])[0]):
if len(labels) == 1:
s.append (self._indent (depth + 1) + "if {} == 0x{:04X}:\n".format(self._cp_accessor(index), list(labels)[0]))
else:
points = ", ".join(['0x{:04X}'.format(cp) for cp in sorted(labels)])
s.append (self._indent (depth + 1) + "if {} in [{}]:\n".format(self._cp_accessor(index), points))
s.append (body)
return ''.join (s)
constraints = {}
with open (sys.argv[1], encoding='utf-8') as f:
constraints_header = []
while True:
line = f.readline ().strip ()
if line == '#':
break
constraints_header.append(line)
for line in f:
j = line.find ('#')
if j >= 0:
line = line[:j]
constraint = [int (cp, 16) for cp in line.split (';')[0].split ()]
if not constraint: continue
assert 2 <= len (constraint), 'Prohibited sequence is too short: {}'.format (constraint)
script = scripts[constraint[0]]
if script in constraints:
constraints[script].add (constraint)
else:
constraints[script] = ConstraintSet (constraint)
assert constraints, 'No constraints found'
print ('# The following functions are generated by running:')
print ('# %s ms-use/IndicShapingInvalidCluster.txt' % sys.argv[0])
print("""
from fontFeatures.shaperLib.Buffer import BufferItem
DOTTED_CIRCLE = 0x25CC
def _insert_dotted_circle(buf, index):
dotted_circle = BufferItem.new_unicode(DOTTED_CIRCLE)
buf.items.insert(index, dotted_circle)
""")
print ('def preprocess_text_vowel_constraints(buffer):')
for script, constraints in sorted (constraints.items (), key=lambda s_c: script_order[s_c[0]]):
print(f' if buffer.script == "{script}":')
print (' i = 0')
print (' while i < len(buffer.items)-1:')
print (' matched = False')
write (str (constraints))
print (' i = i + 1')
print (' if matched: _insert_dotted_circle(buffer, i)')
| 5,930 | 1,811 |
from datetime import datetime, timedelta
import pytz
ALLOWED_AGE_PER_STATUS = {
"default": 5 * 60,
# LMS instruments can take a very long time to generate when the SPS is not cached
"started": 35 * 60,
"nifi_notified": 30 * 60,
}
COMPLETE_STATES = ["inactive", "in_arc"]
def slow_process(state_record, slow_seconds):
oldest_time = datetime.now(pytz.UTC) - timedelta(seconds=slow_seconds)
return datetime.fromisoformat(state_record["updated_at"]) < oldest_time
def slow_process_error(state_record):
total_seconds = int(
(
datetime.now(pytz.UTC) - datetime.fromisoformat(state_record["updated_at"])
).total_seconds()
)
return (
f"Instrument has been in state '{state_record['state']}' for {total_seconds} "
+ f"seconds slow error configuration is {slow_seconds(state_record)} seconds"
)
def running_slow(state_record):
return slow_process(state_record, slow_seconds(state_record))
def slow_seconds(state_record):
current_state = state_record["state"]
if current_state in ALLOWED_AGE_PER_STATUS:
return ALLOWED_AGE_PER_STATUS[current_state]
return ALLOWED_AGE_PER_STATUS["default"]
def state_error(state_record):
if "error_info" in state_record:
return state_record["error_info"]
return "An unknown error occured"
def should_alert(state_record):
if state_record["alerted"]:
return False, None
if state_record["state"] in COMPLETE_STATES:
return False, None
if state_record["state"] == "errored":
return True, state_error(state_record)
if running_slow(state_record):
return True, slow_process_error(state_record)
return False, None
| 1,719 | 550 |
import socket
import select
fd_to_socket = {}
READ_ONLY = ( select.POLLIN | select.POLLPRI | select.POLLHUP | select.POLLERR)
READ_WRITE = (READ_ONLY|select.POLLOUT)
poller = select.poll()
server = None
IP = '127.0.0.1'
Port = 7002
def getAddress ():
return IP, Port
def initServer(ip=IP, port=Port):
global server
global IP
global Port
IP = ip
Port = port
try:
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server.setblocking(False)
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_address = (ip, port)
server.bind(server_address)
server.listen(1)
poller.register(server,READ_ONLY)
global fd_to_socket
fd_to_socket = {server.fileno():server,}
except Exception as e:
print (e)
return False
return True
def isTimeout(events):
return (events == None or len(events) == 0)
def runServer(timeout, timeoutFn, recivedFn):
global fd_to_socket
isFirstTime = True
sumTimeoutTimes = 0
while True:
events = poller.poll(500)
if (isTimeout(events)):
sumTimeoutTimes += 1
if (isFirstTime or sumTimeoutTimes * 500 >= timeout):
isFirstTime = False
sumTimeoutTimes = 0
timeoutFn()
pass
continue # if (isTimeout(events)):
for fd, flag in events:
if flag & (select.POLLIN | select.POLLPRI) :
s = fd_to_socket[fd]
if s is server :
connection , client_address = s.accept()
connection.setblocking(False)
fd_to_socket[connection.fileno()] = connection
poller.register(connection,READ_ONLY)
elif(s):
result = s.recv(1024)
poller.unregister(s)
fd_to_socket.pop(fd)
s.close()
recivedFn(result)
else:
fd_to_socket.pop(fd)
pass
pass # if flag & (select.POLLIN | select.POLLPRI)
pass # while | 2,229 | 683 |
#
# PySNMP MIB module A3COM-HUAWEI-SNA-DLSW-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/A3COM-HUAWEI-SNA-DLSW-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 16:52:16 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
hwproducts, = mibBuilder.importSymbols("A3COM-HUAWEI-OID-MIB", "hwproducts")
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ConstraintsIntersection, ValueRangeConstraint, ConstraintsUnion, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ConstraintsIntersection", "ValueRangeConstraint", "ConstraintsUnion", "SingleValueConstraint")
ifIndex, = mibBuilder.importSymbols("IF-MIB", "ifIndex")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
MibIdentifier, ObjectIdentity, TimeTicks, Counter64, ModuleIdentity, MibScalar, MibTable, MibTableRow, MibTableColumn, iso, Counter32, Bits, Gauge32, NotificationType, IpAddress, Integer32, Unsigned32 = mibBuilder.importSymbols("SNMPv2-SMI", "MibIdentifier", "ObjectIdentity", "TimeTicks", "Counter64", "ModuleIdentity", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "iso", "Counter32", "Bits", "Gauge32", "NotificationType", "IpAddress", "Integer32", "Unsigned32")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
dlsw = ModuleIdentity((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34))
if mibBuilder.loadTexts: dlsw.setLastUpdated('200410301551Z')
if mibBuilder.loadTexts: dlsw.setOrganization('Huawei-3com Technologies co.,Ltd.')
class MacAddressNC(TextualConvention, OctetString):
status = 'current'
displayHint = '1x:'
subtypeSpec = OctetString.subtypeSpec + ConstraintsUnion(ValueSizeConstraint(0, 0), ValueSizeConstraint(6, 6), )
class EndStationLocation(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))
namedValues = NamedValues(("other", 1), ("internal", 2), ("remote", 3), ("local", 4))
class DlcType(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))
namedValues = NamedValues(("other", 1), ("na", 2), ("llc", 3), ("sdlc", 4), ("qllc", 5))
class LFSize(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(516, 1470, 1500, 2052, 4472, 8144, 11407, 11454, 17800, 65535))
namedValues = NamedValues(("lfs516", 516), ("lfs1470", 1470), ("lfs1500", 1500), ("lfs2052", 2052), ("lfs4472", 4472), ("lfs8144", 8144), ("lfs11407", 11407), ("lfs11454", 11454), ("lfs17800", 17800), ("unknown", 65535))
class CreateLineFlag(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2))
namedValues = NamedValues(("createLine", 1), ("deleteLine", 2))
class EntryStatus(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))
namedValues = NamedValues(("valid", 1), ("createRequest", 2), ("underCreation", 3), ("invalid", 4))
dlswNode = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 1))
dlswTConn = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 2))
dlswBridgeGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 3))
dlswLocDirectory = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 4))
dlswCircuit = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 5))
dlswSdlc = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 6))
dlswLlc2 = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 7))
dlswNodeVersion = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(2, 2)).setFixedLength(2)).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswNodeVersion.setStatus('current')
dlswNodeVendorID = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(3, 3)).setFixedLength(3)).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswNodeVendorID.setStatus('current')
dlswNodeVersionString = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswNodeVersionString.setStatus('current')
dlswNodeStdPacingSupport = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 65535))).clone(namedValues=NamedValues(("none", 1), ("adaptiveRcvWindow", 2), ("fixedRcvWindow", 3), ("unknown", 65535)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswNodeStdPacingSupport.setStatus('current')
dlswNodeStatus = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("active", 1), ("inactive", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswNodeStatus.setStatus('current')
dlswNodeUpTime = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 1, 6), Integer32()).setUnits('second').setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswNodeUpTime.setStatus('obsolete')
dlswNodeVirtualSegmentLFSize = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 1, 7), LFSize().clone('lfs1500')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswNodeVirtualSegmentLFSize.setStatus('current')
dlswNodeLocalAddr = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 1, 8), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswNodeLocalAddr.setStatus('current')
dlswNodePriority = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(1, 5), ValueRangeConstraint(65535, 65535), )).clone(5)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswNodePriority.setStatus('current')
dlswNodeInitWindow = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(1, 2000), ValueRangeConstraint(65535, 65535), )).clone(40)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswNodeInitWindow.setStatus('current')
dlswNodeKeepAlive = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(1, 2000), ValueRangeConstraint(65535, 65535), )).clone(30)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswNodeKeepAlive.setStatus('current')
dlswNodeMaxWindow = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(1, 2000), ValueRangeConstraint(65535, 65535), )).clone(255)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswNodeMaxWindow.setStatus('current')
dlswNodePermitDynamic = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 65535))).clone(namedValues=NamedValues(("permitDynamic", 1), ("forbidDynamic", 2), ("unknown", 65535))).clone('forbidDynamic')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswNodePermitDynamic.setStatus('current')
dlswNodeConnTimeout = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(300)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswNodeConnTimeout.setStatus('current')
dlswNodeLocalPendTimeout = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 1, 15), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(30)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswNodeLocalPendTimeout.setStatus('current')
dlswNodeRemotePendTimeout = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 1, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(30)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswNodeRemotePendTimeout.setStatus('current')
dlswNodeSnaCacheTimeout = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 1, 17), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(120)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswNodeSnaCacheTimeout.setStatus('current')
dlswRemotePeerTable = MibTable((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 2, 1), )
if mibBuilder.loadTexts: dlswRemotePeerTable.setStatus('current')
dlswRemotePeerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 2, 1, 1), ).setIndexNames((0, "A3COM-HUAWEI-SNA-DLSW-MIB", "dlswRemotePeerAddr"))
if mibBuilder.loadTexts: dlswRemotePeerEntry.setStatus('current')
dlswRemotePeerAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 2, 1, 1, 1), IpAddress()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: dlswRemotePeerAddr.setStatus('current')
dlswRemotePeerVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 2, 1, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(2, 2)).setFixedLength(2)).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswRemotePeerVersion.setStatus('current')
dlswRemotePeerVendorID = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 2, 1, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(3, 3)).setFixedLength(3)).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswRemotePeerVendorID.setStatus('current')
dlswRemotePeerPaceWindInit = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 2, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswRemotePeerPaceWindInit.setStatus('current')
dlswRemotePeerNumOfTcpSessions = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 2, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 16))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswRemotePeerNumOfTcpSessions.setStatus('obsolete')
dlswRemotePeerVersionString = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 2, 1, 1, 6), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswRemotePeerVersionString.setStatus('current')
dlswRemotePeerIsConfig = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 2, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("yes", 1), ("no", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswRemotePeerIsConfig.setStatus('current')
dlswRemotePeerSetStateToConfig = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 2, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("yes", 1), ("no", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswRemotePeerSetStateToConfig.setStatus('obsolete')
dlswRemotePeerCost = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 2, 1, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 5))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswRemotePeerCost.setStatus('current')
dlswRemotePeerKeepAlive = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 2, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 60000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswRemotePeerKeepAlive.setStatus('current')
dlswRemotePeerLf = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 2, 1, 1, 11), LFSize()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswRemotePeerLf.setStatus('current')
dlswRemotePeerTcpQueneMax = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 2, 1, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(50, 2000))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswRemotePeerTcpQueneMax.setStatus('current')
dlswRemotePeerHaveBackup = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 2, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("yes", 1), ("no", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswRemotePeerHaveBackup.setStatus('current')
dlswRemotePeerIsBackup = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 2, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("yes", 1), ("no", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswRemotePeerIsBackup.setStatus('current')
dlswRemotePeerBackupAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 2, 1, 1, 15), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswRemotePeerBackupAddr.setStatus('current')
dlswRemotePeerLinger = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 2, 1, 1, 16), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 1440)).clone(5)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswRemotePeerLinger.setStatus('current')
dlswRemotePeerLinkState = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 2, 1, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("connecting", 1), ("initCapExchange", 2), ("connected", 3), ("quiescing", 4), ("disconnecting", 5), ("disconnected", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswRemotePeerLinkState.setStatus('current')
dlswRemotePeerRecvPacks = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 2, 1, 1, 18), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswRemotePeerRecvPacks.setStatus('current')
dlswRemotePeerSendPacks = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 2, 1, 1, 19), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswRemotePeerSendPacks.setStatus('current')
dlswRemotePeerDrops = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 2, 1, 1, 20), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswRemotePeerDrops.setStatus('current')
dlswRemotePeerUptime = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 2, 1, 1, 21), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswRemotePeerUptime.setStatus('current')
dlswRemotePeerEntryStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 2, 1, 1, 22), EntryStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswRemotePeerEntryStatus.setStatus('current')
dlswBridgeTable = MibTable((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 3, 1), )
if mibBuilder.loadTexts: dlswBridgeTable.setStatus('current')
dlswBridgeEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 3, 1, 1), ).setIndexNames((0, "A3COM-HUAWEI-SNA-DLSW-MIB", "dlswBridgeNum"))
if mibBuilder.loadTexts: dlswBridgeEntry.setStatus('current')
dlswBridgeNum = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 63)))
if mibBuilder.loadTexts: dlswBridgeNum.setStatus('current')
dlswBridgeStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 3, 1, 1, 2), CreateLineFlag()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswBridgeStatus.setStatus('current')
dlswBridgeIfTable = MibTable((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 3, 2), )
if mibBuilder.loadTexts: dlswBridgeIfTable.setStatus('current')
dlswBridgeIfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 3, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: dlswBridgeIfEntry.setStatus('current')
dlswBridgeIfBriGru = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 3, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 63))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswBridgeIfBriGru.setStatus('current')
dlswBridgeIfName = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 3, 2, 1, 2), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswBridgeIfName.setStatus('current')
dlswBridgeIfStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 3, 2, 1, 3), EntryStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswBridgeIfStatus.setStatus('current')
dlswLocMacTable = MibTable((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 4, 1), )
if mibBuilder.loadTexts: dlswLocMacTable.setStatus('current')
dlswLocMacEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 4, 1, 1), ).setIndexNames((0, "A3COM-HUAWEI-SNA-DLSW-MIB", "dlswLocMacHashIndex"), (0, "A3COM-HUAWEI-SNA-DLSW-MIB", "dlswLocMacHashIndexSeqNum"))
if mibBuilder.loadTexts: dlswLocMacEntry.setStatus('current')
dlswLocMacHashIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 4, 1, 1, 1), Integer32())
if mibBuilder.loadTexts: dlswLocMacHashIndex.setStatus('current')
dlswLocMacHashIndexSeqNum = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 4, 1, 1, 2), Integer32())
if mibBuilder.loadTexts: dlswLocMacHashIndexSeqNum.setStatus('current')
dlswLocMacMac = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 4, 1, 1, 3), MacAddressNC()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswLocMacMac.setStatus('current')
dlswLocMacLocalInterfaceName = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 4, 1, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswLocMacLocalInterfaceName.setStatus('current')
dlswCircuitTable = MibTable((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 5, 1), )
if mibBuilder.loadTexts: dlswCircuitTable.setStatus('current')
dlswCircuitEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 5, 1, 1), ).setIndexNames((0, "A3COM-HUAWEI-SNA-DLSW-MIB", "dlswCircuitS1CircuitId"))
if mibBuilder.loadTexts: dlswCircuitEntry.setStatus('current')
dlswCircuitS1CircuitId = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 5, 1, 1, 1), Integer32()).setMaxAccess("accessiblefornotify")
if mibBuilder.loadTexts: dlswCircuitS1CircuitId.setStatus('current')
dlswCircuitS1Mac = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 5, 1, 1, 2), MacAddressNC()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswCircuitS1Mac.setStatus('current')
dlswCircuitS1Sap = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 5, 1, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswCircuitS1Sap.setStatus('current')
dlswCircuitS2Mac = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 5, 1, 1, 4), MacAddressNC()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswCircuitS2Mac.setStatus('current')
dlswCircuitS2Sap = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 5, 1, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(1, 1)).setFixedLength(1)).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswCircuitS2Sap.setStatus('current')
dlswCircuitS1IfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 5, 1, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswCircuitS1IfIndex.setStatus('current')
dlswCircuitS1Ifname = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 5, 1, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswCircuitS1Ifname.setStatus('current')
dlswCircuitS1DlcType = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 5, 1, 1, 8), DlcType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswCircuitS1DlcType.setStatus('current')
dlswCircuitS2Location = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 5, 1, 1, 9), EndStationLocation()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswCircuitS2Location.setStatus('obsolete')
dlswCircuitS2TAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 5, 1, 1, 10), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswCircuitS2TAddress.setStatus('current')
dlswCircuitS2CircuitId = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 5, 1, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswCircuitS2CircuitId.setStatus('current')
dlswCircuitOrigin = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 5, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("s1", 1), ("s2", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswCircuitOrigin.setStatus('current')
dlswCircuitEntryTime = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 5, 1, 1, 13), TimeTicks()).setUnits('hundredths of a second').setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswCircuitEntryTime.setStatus('current')
dlswCircuitStateTime = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 5, 1, 1, 14), TimeTicks()).setUnits('hundredths of a second').setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswCircuitStateTime.setStatus('current')
dlswCircuitState = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 5, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13))).clone(namedValues=NamedValues(("disconnected", 1), ("circuitStart", 2), ("resolvePending", 3), ("circuitPending", 4), ("circuitEstablished", 5), ("connectPending", 6), ("contactPending", 7), ("connected", 8), ("disconnectPending", 9), ("haltPending", 10), ("haltPendingNoack", 11), ("circuitRestart", 12), ("restartPending", 13)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswCircuitState.setStatus('current')
dlswCircuitPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 5, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("unsupported", 1), ("low", 2), ("medium", 3), ("high", 4), ("highest", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswCircuitPriority.setStatus('obsolete')
dlswCircuitFCSendGrantedUnits = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 5, 1, 1, 17), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswCircuitFCSendGrantedUnits.setStatus('current')
dlswCircuitFCSendCurrentWndw = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 5, 1, 1, 18), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswCircuitFCSendCurrentWndw.setStatus('current')
dlswCircuitFCRecvGrantedUnits = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 5, 1, 1, 19), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswCircuitFCRecvGrantedUnits.setStatus('current')
dlswCircuitFCRecvCurrentWndw = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 5, 1, 1, 20), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswCircuitFCRecvCurrentWndw.setStatus('current')
dlswCircuitFCLargestRecvGranted = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 5, 1, 1, 21), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswCircuitFCLargestRecvGranted.setStatus('obsolete')
dlswCircuitFCLargestSendGranted = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 5, 1, 1, 22), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswCircuitFCLargestSendGranted.setStatus('obsolete')
dlswSdlcPortTable = MibTable((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 6, 1), )
if mibBuilder.loadTexts: dlswSdlcPortTable.setStatus('current')
dlswSdlcPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 6, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"))
if mibBuilder.loadTexts: dlswSdlcPortEntry.setStatus('current')
dlswSdlcPortSerialName = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 6, 1, 1, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswSdlcPortSerialName.setStatus('current')
dlswSdlcPortEncap = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 6, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("sdlc", 1), ("ppp", 2), ("other", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dlswSdlcPortEncap.setStatus('current')
dlswSdlcPortRole = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 6, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("primary", 1), ("seconday", 2), ("norole", 3))).clone('norole')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswSdlcPortRole.setStatus('current')
dlswSdlcPortVmac = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 6, 1, 1, 4), MacAddressNC()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswSdlcPortVmac.setStatus('current')
dlswSdlcPortHoldq = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 6, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(20, 255)).clone(50)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswSdlcPortHoldq.setStatus('current')
dlswSdlcPortK = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 6, 1, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 7)).clone(7)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswSdlcPortK.setStatus('current')
dlswSdlcPortModule = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 6, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(8, 128))).clone(namedValues=NamedValues(("m8", 8), ("m128", 128))).clone('m8')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswSdlcPortModule.setStatus('current')
dlswSdlcPortN1 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 6, 1, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 17680)).clone(265)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswSdlcPortN1.setStatus('current')
dlswSdlcPortN2 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 6, 1, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(20)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswSdlcPortN2.setStatus('current')
dlswSdlcPortPollPauseTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 6, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 10000)).clone(100)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswSdlcPortPollPauseTimer.setStatus('current')
dlswSdlcPortSimultaneousEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 6, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disenable", 2))).clone(1)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswSdlcPortSimultaneousEnable.setStatus('current')
dlswSdlcPortT1 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 6, 1, 1, 12), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 60000)).clone(3000)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswSdlcPortT1.setStatus('current')
dlswSdlcPortT2 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 6, 1, 1, 13), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 60000)).clone(500)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswSdlcPortT2.setStatus('current')
dlswSdlcPortNrziEncoding = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 6, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disenable", 2))).clone('enable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswSdlcPortNrziEncoding.setStatus('obsolete')
dlswSdlcPortIdleMarkEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 6, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disenable", 2))).clone('enable')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswSdlcPortIdleMarkEnable.setStatus('obsolete')
dlswSdlcLsTable = MibTable((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 6, 2), )
if mibBuilder.loadTexts: dlswSdlcLsTable.setStatus('current')
dlswSdlcLsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 6, 2, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "A3COM-HUAWEI-SNA-DLSW-MIB", "dlswSdlcLsAddress"))
if mibBuilder.loadTexts: dlswSdlcLsEntry.setStatus('current')
dlswSdlcLsAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 6, 2, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 254)))
if mibBuilder.loadTexts: dlswSdlcLsAddress.setStatus('current')
dlswSdlcLsLocalId = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 6, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 2147483647))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswSdlcLsLocalId.setStatus('current')
dlswSdlcLsRemoteMac = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 6, 2, 1, 3), MacAddressNC()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswSdlcLsRemoteMac.setStatus('current')
dlswSdlcLsSsap = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 6, 2, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 254))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswSdlcLsSsap.setStatus('current')
dlswSdlcLsDsap = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 6, 2, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 254))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswSdlcLsDsap.setStatus('current')
dlswSdlcLsStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 6, 2, 1, 6), EntryStatus()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswSdlcLsStatus.setStatus('current')
dlswLlc2PortTable = MibTable((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 7, 1), )
if mibBuilder.loadTexts: dlswLlc2PortTable.setStatus('current')
dlswLlc2PortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 7, 1, 1), ).setIndexNames((0, "IF-MIB", "ifIndex"), (0, "A3COM-HUAWEI-SNA-DLSW-MIB", "dlswBridgeIfBriGru"))
if mibBuilder.loadTexts: dlswLlc2PortEntry.setStatus('current')
dlswLLC2PortAckDelayTime = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 7, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 60000)).clone(100)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswLLC2PortAckDelayTime.setStatus('current')
dlswLLC2PortAckMax = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 7, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 127)).clone(3)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswLLC2PortAckMax.setStatus('current')
dlswLLC2PortLocalWnd = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 7, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 127)).clone(7)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswLLC2PortLocalWnd.setStatus('current')
dlswLLC2PortModulus = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 7, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(8, 128))).clone(namedValues=NamedValues(("m8", 8), ("m128", 128))).clone('m128')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswLLC2PortModulus.setStatus('current')
dlswLLC2PortN2 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 7, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255)).clone(20)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswLLC2PortN2.setStatus('current')
dlswLLC2PortT1 = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 7, 1, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 60000)).clone(200)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswLLC2PortT1.setStatus('current')
dlswLLC2PortTbusyTime = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 7, 1, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 60000)).clone(300)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswLLC2PortTbusyTime.setStatus('current')
dlswLLC2PortTpfTime = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 7, 1, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 60000)).clone(500)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswLLC2PortTpfTime.setStatus('current')
dlswLLC2PortTrejTime = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 7, 1, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 60000)).clone(500)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswLLC2PortTrejTime.setStatus('current')
dlswLLC2PortTxqMax = MibTableColumn((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 7, 1, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(20, 200)).clone(50)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswLLC2PortTxqMax.setStatus('current')
dlswTrapControl = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 1, 20))
dlswTrapCntlState = MibScalar((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 1, 20, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enabled", 1), ("disabled", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dlswTrapCntlState.setStatus('current')
dlswTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 8))
dlswTrapsV2 = MibIdentifier((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 8, 0))
dlswTrapTConnPartnerReject = NotificationType((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 8, 0, 1)).setObjects(("A3COM-HUAWEI-SNA-DLSW-MIB", "dlswRemotePeerAddr"))
if mibBuilder.loadTexts: dlswTrapTConnPartnerReject.setStatus('current')
dlswTrapTConnChangeState = NotificationType((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 8, 0, 2)).setObjects(("A3COM-HUAWEI-SNA-DLSW-MIB", "dlswRemotePeerAddr"), ("A3COM-HUAWEI-SNA-DLSW-MIB", "dlswRemotePeerLinkState"))
if mibBuilder.loadTexts: dlswTrapTConnChangeState.setStatus('current')
dlswTrapCircuitChangeState = NotificationType((1, 3, 6, 1, 4, 1, 43, 45, 1, 2, 34, 8, 0, 3)).setObjects(("A3COM-HUAWEI-SNA-DLSW-MIB", "dlswCircuitS1CircuitId"), ("A3COM-HUAWEI-SNA-DLSW-MIB", "dlswCircuitState"), ("A3COM-HUAWEI-SNA-DLSW-MIB", "dlswCircuitS1Mac"), ("A3COM-HUAWEI-SNA-DLSW-MIB", "dlswCircuitS1Sap"), ("A3COM-HUAWEI-SNA-DLSW-MIB", "dlswCircuitS2Mac"), ("A3COM-HUAWEI-SNA-DLSW-MIB", "dlswCircuitS2Sap"))
if mibBuilder.loadTexts: dlswTrapCircuitChangeState.setStatus('current')
mibBuilder.exportSymbols("A3COM-HUAWEI-SNA-DLSW-MIB", dlswLLC2PortModulus=dlswLLC2PortModulus, dlswLLC2PortTxqMax=dlswLLC2PortTxqMax, dlswBridgeNum=dlswBridgeNum, dlswCircuitS2TAddress=dlswCircuitS2TAddress, dlswRemotePeerRecvPacks=dlswRemotePeerRecvPacks, dlswTrapTConnChangeState=dlswTrapTConnChangeState, dlswNodeMaxWindow=dlswNodeMaxWindow, dlswNode=dlswNode, dlswLlc2=dlswLlc2, dlswRemotePeerLinger=dlswRemotePeerLinger, dlswSdlc=dlswSdlc, dlswCircuitS2Location=dlswCircuitS2Location, dlswBridgeTable=dlswBridgeTable, dlswBridgeStatus=dlswBridgeStatus, LFSize=LFSize, dlswSdlcPortSerialName=dlswSdlcPortSerialName, dlswRemotePeerBackupAddr=dlswRemotePeerBackupAddr, dlswSdlcPortTable=dlswSdlcPortTable, dlswNodeVirtualSegmentLFSize=dlswNodeVirtualSegmentLFSize, dlswRemotePeerAddr=dlswRemotePeerAddr, dlswCircuitS1Sap=dlswCircuitS1Sap, dlswRemotePeerNumOfTcpSessions=dlswRemotePeerNumOfTcpSessions, dlswLocMacTable=dlswLocMacTable, dlswCircuitS1IfIndex=dlswCircuitS1IfIndex, dlswRemotePeerLinkState=dlswRemotePeerLinkState, dlswSdlcPortVmac=dlswSdlcPortVmac, dlswNodeStdPacingSupport=dlswNodeStdPacingSupport, PYSNMP_MODULE_ID=dlsw, dlswCircuitS1Ifname=dlswCircuitS1Ifname, dlswRemotePeerSetStateToConfig=dlswRemotePeerSetStateToConfig, dlswCircuitFCRecvCurrentWndw=dlswCircuitFCRecvCurrentWndw, dlswRemotePeerIsConfig=dlswRemotePeerIsConfig, dlswSdlcPortRole=dlswSdlcPortRole, dlswCircuitS1CircuitId=dlswCircuitS1CircuitId, DlcType=DlcType, dlswRemotePeerKeepAlive=dlswRemotePeerKeepAlive, dlswLLC2PortLocalWnd=dlswLLC2PortLocalWnd, dlswSdlcLsAddress=dlswSdlcLsAddress, dlswNodeRemotePendTimeout=dlswNodeRemotePendTimeout, dlswRemotePeerHaveBackup=dlswRemotePeerHaveBackup, dlswLLC2PortTrejTime=dlswLLC2PortTrejTime, dlswCircuitFCLargestSendGranted=dlswCircuitFCLargestSendGranted, dlswNodeSnaCacheTimeout=dlswNodeSnaCacheTimeout, dlswNodeLocalPendTimeout=dlswNodeLocalPendTimeout, dlswNodeStatus=dlswNodeStatus, dlswCircuitStateTime=dlswCircuitStateTime, dlswNodeKeepAlive=dlswNodeKeepAlive, dlswSdlcPortIdleMarkEnable=dlswSdlcPortIdleMarkEnable, dlswRemotePeerVersion=dlswRemotePeerVersion, CreateLineFlag=CreateLineFlag, dlswRemotePeerEntry=dlswRemotePeerEntry, dlswRemotePeerSendPacks=dlswRemotePeerSendPacks, dlswNodeVersionString=dlswNodeVersionString, EntryStatus=EntryStatus, dlswCircuitFCLargestRecvGranted=dlswCircuitFCLargestRecvGranted, dlswNodeUpTime=dlswNodeUpTime, dlswNodeVendorID=dlswNodeVendorID, dlswCircuitPriority=dlswCircuitPriority, dlswNodeVersion=dlswNodeVersion, dlswSdlcPortEncap=dlswSdlcPortEncap, dlswLlc2PortTable=dlswLlc2PortTable, dlswLLC2PortAckDelayTime=dlswLLC2PortAckDelayTime, dlswLLC2PortN2=dlswLLC2PortN2, dlswBridgeIfTable=dlswBridgeIfTable, dlswRemotePeerTable=dlswRemotePeerTable, dlswCircuitEntry=dlswCircuitEntry, dlswCircuitFCSendGrantedUnits=dlswCircuitFCSendGrantedUnits, dlswLLC2PortTpfTime=dlswLLC2PortTpfTime, dlswRemotePeerPaceWindInit=dlswRemotePeerPaceWindInit, dlswRemotePeerIsBackup=dlswRemotePeerIsBackup, dlswCircuitS2Sap=dlswCircuitS2Sap, dlswLLC2PortAckMax=dlswLLC2PortAckMax, dlswLocMacHashIndex=dlswLocMacHashIndex, dlswTrapCircuitChangeState=dlswTrapCircuitChangeState, dlswTConn=dlswTConn, dlswCircuitOrigin=dlswCircuitOrigin, dlswLlc2PortEntry=dlswLlc2PortEntry, dlswCircuitState=dlswCircuitState, dlswCircuitS1Mac=dlswCircuitS1Mac, dlswSdlcLsEntry=dlswSdlcLsEntry, dlswCircuitEntryTime=dlswCircuitEntryTime, dlswSdlcLsStatus=dlswSdlcLsStatus, dlswCircuitS2CircuitId=dlswCircuitS2CircuitId, dlswLLC2PortTbusyTime=dlswLLC2PortTbusyTime, dlswRemotePeerTcpQueneMax=dlswRemotePeerTcpQueneMax, dlswCircuit=dlswCircuit, dlswBridgeEntry=dlswBridgeEntry, dlswSdlcPortEntry=dlswSdlcPortEntry, dlswRemotePeerDrops=dlswRemotePeerDrops, dlswCircuitTable=dlswCircuitTable, dlswNodePermitDynamic=dlswNodePermitDynamic, dlswRemotePeerVendorID=dlswRemotePeerVendorID, dlswSdlcPortModule=dlswSdlcPortModule, dlsw=dlsw, dlswSdlcLsSsap=dlswSdlcLsSsap, dlswCircuitFCRecvGrantedUnits=dlswCircuitFCRecvGrantedUnits, dlswSdlcPortSimultaneousEnable=dlswSdlcPortSimultaneousEnable, dlswSdlcLsTable=dlswSdlcLsTable, dlswTrapControl=dlswTrapControl, dlswSdlcLsLocalId=dlswSdlcLsLocalId, dlswBridgeIfBriGru=dlswBridgeIfBriGru, dlswRemotePeerUptime=dlswRemotePeerUptime, dlswTraps=dlswTraps, dlswNodeConnTimeout=dlswNodeConnTimeout, dlswTrapCntlState=dlswTrapCntlState, dlswTrapsV2=dlswTrapsV2, MacAddressNC=MacAddressNC, dlswSdlcPortN2=dlswSdlcPortN2, dlswLocMacLocalInterfaceName=dlswLocMacLocalInterfaceName, dlswNodeInitWindow=dlswNodeInitWindow, dlswTrapTConnPartnerReject=dlswTrapTConnPartnerReject, dlswSdlcPortN1=dlswSdlcPortN1, dlswRemotePeerCost=dlswRemotePeerCost, dlswSdlcPortPollPauseTimer=dlswSdlcPortPollPauseTimer, dlswSdlcPortK=dlswSdlcPortK, EndStationLocation=EndStationLocation, dlswRemotePeerLf=dlswRemotePeerLf, dlswBridgeIfEntry=dlswBridgeIfEntry, dlswSdlcLsRemoteMac=dlswSdlcLsRemoteMac, dlswSdlcPortHoldq=dlswSdlcPortHoldq, dlswLLC2PortT1=dlswLLC2PortT1, dlswLocMacHashIndexSeqNum=dlswLocMacHashIndexSeqNum, dlswSdlcPortT1=dlswSdlcPortT1, dlswSdlcPortT2=dlswSdlcPortT2, dlswLocMacMac=dlswLocMacMac, dlswRemotePeerEntryStatus=dlswRemotePeerEntryStatus, dlswBridgeGroup=dlswBridgeGroup, dlswNodePriority=dlswNodePriority, dlswSdlcPortNrziEncoding=dlswSdlcPortNrziEncoding, dlswLocMacEntry=dlswLocMacEntry, dlswBridgeIfStatus=dlswBridgeIfStatus, dlswCircuitS2Mac=dlswCircuitS2Mac, dlswBridgeIfName=dlswBridgeIfName, dlswSdlcLsDsap=dlswSdlcLsDsap, dlswCircuitS1DlcType=dlswCircuitS1DlcType, dlswNodeLocalAddr=dlswNodeLocalAddr, dlswCircuitFCSendCurrentWndw=dlswCircuitFCSendCurrentWndw, dlswLocDirectory=dlswLocDirectory, dlswRemotePeerVersionString=dlswRemotePeerVersionString)
| 39,506 | 18,651 |
# Leetcode 98. Validate Binary Search Tree
#
# Link: https://leetcode.com/problems/validate-binary-search-tree/
# Difficulty: Medium
# Complexity:
# O(N) time | where N represent the number of elements in the input tree
# O(N) space | where N represent the number of elements in the input tree
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def isValidBST(self, root: Optional[TreeNode]) -> bool:
def is_valid(node, left_limit, right_limit):
if not node:
return True
if not (left_limit < node.val < right_limit):
return False
return (is_valid(node.left, left_limit, node.val) and is_valid(node.right, node.val, right_limit))
def dfs_bfs_check_iterative(node):
if not node:
return True
stack = []
previous = None
while node or stack:
while node:
stack.append(node)
node = node.left
node = stack.pop()
if previous and node.val <= previous.val:
return False
previous = node
node = node.right
return True
return dfs_bfs_check_iterative(root)
| 1,263 | 387 |
from setuptools import setup
version = open("ccb/__version__.py").read().strip('"\n')
setup_args = {
"name": "ccb",
"version": version,
"url": "https://github.com/earth-chris/ccb",
"license": "MIT",
"author": "Christopher Anderson",
"author_email": "cbanders@stanford.edu",
"description": "Species distribution modeling support tools",
"keywords": ["maxent", "biogeography", "SDM", "species distribution modeling", "ecologyy", "conservation"],
"packages": ["ccb"],
"include_package_data": True,
"platforms": "any",
"scripts": ["bin/gbif-to-vector.py", "bin/vector-to-maxent.py"],
"data_files": [("maxent", ["ccb/maxent/maxent.jar", "ccb/maxent/README.txt", "ccb/maxent/LICENSE.txt"])],
}
setup(**setup_args)
| 763 | 277 |
# import time
#
# def reader():
# """A generator that fakes a read from a file, socket, etc."""
# for i in range(101):
# yield '<< %s' % i
#
# def consumer():
# r = ''
# while True:
# #但是Python的yield不但可以返回一个值,它还可以接收调用者发出的参数。
# #此处的n是接受参数
# n = yield from reader()
# print("===",n)
# if not n:
# return
# print('[CONSUMER] Consuming %s...' % n)
# r = '200 OK'
#
# def produce(c):
# c.send(None)
# n = 0
# while n < 100:
# n = n + 1
# print('[PRODUCER] Producing %s...' % n)
# r = c.send(n)
# print('[PRODUCER] Consumer return: %s' % r)
# c.close()
#
# c = consumer()
# produce(c)
# def getIN():
# for x in range(1000):
# n = yield x
# print(n,"--rer",x)
#
# ge =getIN()
#
# #开始
# ge.send(None)
# ge.send("11")
# ge.send("222")
def accumulate(): # 子生成器,将传进的非None值累加,传进的值若为None,则返回累加结果
tally = 0
while 1:
next = yield
if next is None:
return tally
tally += next
def gather_tallies(tallies): # 外部生成器,将累加操作任务委托给子生成器
while 1:
tally = yield from accumulate()
tallies.append(tally)
tallies = []
acc = gather_tallies(tallies)
next(acc) # 使累加生成器准备好接收传入值
for i in range(4):
acc.send(i)
acc.send(None) # 结束第一次累加
for i in range(5):
acc.send(i)
acc.send(None) # 结束第二次累加
print(tallies)
def get():
n =1
while True:
n+=1
if n>10:
break
yield
for x in get():
print(x)
| 1,576 | 719 |
import maya.cmds as mc
import maya.OpenMaya as OpenMaya
import glTools.utils.base
def transferComponentSelection(sourceSelection,targetMesh,threshold=0.0001):
'''
'''
# Check selection target mesh
if not mc.objExists(targetMesh):
raise Exception('Target mesh "'+targetMesh+'" does not exist!')
# Flatten selection
sourceSelection = mc.ls(sourceSelection,fl=True)
# Get mesh points
tPtArray = glTools.utils.base.getMPointArray(targetMesh)
tPtLen = tPtArray.length()
# Initialize component selection transfer list
tPtBool = [False for i in range(tPtLen)]
# Initialize selection list
tSel = []
# Transfer selection
for sel in sourceSelection:
# Get selection point
pt = mc.pointPosition(sel)
pt = OpenMaya.MPoint(pt[0],pt[1],pt[2],1.0)
# Find closest component
cDist = 99999
cIndex = -1
for i in range(tPtLen):
# Check component selection transfer list
if tPtBool[i]: continue
# Check distance to current point
dist = (pt-tPtArray[i]).length()
if dist < cDist:
cDist = dist
cIndex = i
# Test threshold
if dist < threshold: break
# Append selection
tSel.append(targetMesh+'.vtx['+str(cIndex)+']')
# Update component selection transfer list
tPtBool[i] = True
# Return result
return tSel
| 1,288 | 503 |
"""
A Python package for recipe parsing and management.
"""
import yaml
try:
from cStringIO import StringIO
except ImportError:
from io import StringIO
from .exceptions import LoadError, ParseError, PyprikaError, FieldError # noqa
from .ingredient import Ingredient # noqa
from .quantity import Quantity # noqa
from .recipe import Recipe
from .version import __author__, __version__ # noqa
def load(fp, loader=None, **kw):
""" Load ``fp``, a file-like object
The file is assumed to be a pyprika-compliant YAML document. If the
document contains a sequence, a list of ``Recipe`` objects will be
returned. Otherwise, a single ``Recipe`` object should be returned.
Note that this function wraps the underlying exceptions thrown by
:meth:`Recipe.from_dict` under the assumption it is due to a malformed
document, but the original traceback is preserved.
:param file-like fp: the file-like object containing the document to load
:param callable loader: takes one positional argument and optional
arguments and returns a dict (defaults to
``yaml.load``)
:param kw: passed through to loader
:raises LoadError: if there was an error in the loading of the document,
usually indicative of a syntax error
:returns: the recipe data contained in the stream
:rtype: :class:`Recipe` or list of :class:`Recipe`
"""
loader = loader or yaml.load
try:
d = loader(fp)
if isinstance(d, (tuple, list)):
return [Recipe.from_dict(x) for x in d]
elif isinstance(d, dict):
return Recipe.from_dict(d)
else:
raise LoadError('Recipe did not decode as expected (got %s)' %
type(d).__name__)
except PyprikaError as e:
raise LoadError(*e.args, cause=e)
def loads(data, loader=None, **kw):
""" Load recipe from string data.
This wraps ``data`` in a :class:`cString.StringIO` and calls :func:`load`
on it.
See :func:`load` for more information.
:param str data: recipe document data
:returns: the recipe data contained in ``data``
:rtype: :class:`Recipe` or list of :class:`Recipe`
"""
return load(StringIO(data), loader=loader, **kw)
def dump(recipe, fp, dumper=None, **kw):
""" Dump recipe to a file-like object
:param Recipe recipe: the recipe to dump
:param file-like fp: the file stream to dump to
:param callable dumper: a callable which takes two positional arguments,
the first a dict and the second a file stream, and
optional keyword arguments and encodes the recipe
to the file stream (defaults to yaml.dump)
:param kw: passed through to dumper
"""
dumper = dumper or yaml.dump
d = recipe.to_dict(serialize=True)
dumper(d, fp, **kw)
def dumps(recipe, dumper=None, **kw):
""" Dump recipe object as a string.
This is a convenience method to dump to a StringIO object.
See :func:`dump` for parameter details.
:returns: recipe encoded as a string
:rtype: str
"""
fp = StringIO()
dump(recipe, fp, dumper=dumper, **kw)
return fp.getvalue()
| 3,289 | 969 |
def test_get_bond_infos(case_data):
"""
Test :meth:`.ConstructedMolecule.get_bond_infos`.
Parameters
----------
case_data : :class:`.CaseData`
A test case. Holds the constructed molecule to test and the
correct number of new bonds.
Returns
-------
None : :class:`NoneType`
"""
_test_get_bond_infos(
constructed_molecule=case_data.constructed_molecule,
num_new_bonds=case_data.num_new_bonds,
)
def _test_get_bond_infos(constructed_molecule, num_new_bonds):
"""
Test :meth:`.ConstructedMolecule.get_bond_infos`.
Parameters
----------
constructed_molecule : :class:`.ConstructedMolecule`
The constructed molecule to test.
num_new_bonds : :class:`int`
The correct number of new bonds added.
Returns
-------
None : :class:`NoneType`
"""
new_bonds = filter(
lambda bond_info: bond_info.get_building_block() is None,
constructed_molecule.get_bond_infos(),
)
assert sum(1 for _ in new_bonds) == num_new_bonds
assert (
constructed_molecule.get_num_bonds()
== sum(1 for _ in constructed_molecule.get_bond_infos())
)
| 1,202 | 438 |
from django.apps import AppConfig
class BaseViewsConfig(AppConfig):
name = 'base_views'
| 94 | 29 |
import json
import praw
reddit = praw.Reddit("dwight-schrute-bot")
for submission in reddit.subreddit('all').rising(limit=15):
submission.comments.replace_more(limit=None)
print(submission.subreddit.display_name)
if not submission.over_18:
for comment in submission.comments.list():
print() | 324 | 103 |
from pymavlink import mavutil
import time
# Create the connection
master = mavutil.mavlink_connection('udpin:0.0.0.0:14550')
# Wait a heartbeat before sending commands
master.wait_heartbeat()
# Send a positive x value, negative y, negative z,
# positive rotation and no button.
# http://mavlink.org/messages/common#MANUAL_CONTROL
# Warning: Because of some legacy workaround, z will work between [0-1000]
# where 0 is full reverse, 500 is no output and 1000 is full throttle.
# x,y and r will be between [-1000 and 1000].
master.mav.manual_control_send(
master.target_system,
500,
-500,
250,
500,
0)
# To active button 0 (first button), 3 (fourth button) and 7 (eighth button)
# It's possible to check and configure this buttons in the Joystick menu of QGC
buttons = 1 + 1 << 3 + 1 << 7
master.mav.manual_control_send(
master.target_system,
0,
0,
0,
0,
buttons)
# Request all parameters
master.mav.param_request_list_send(
master.target_system, master.target_component
)
while True:
time.sleep(0.01)
try:
message = master.recv_match(type='PARAM_VALUE', blocking=True).to_dict()
print('name: %s\tvalue: %d' % (message['param_id'].decode("utf-8"), message['param_value']))
except Exception as e:
print(e)
exit(0)
| 1,311 | 474 |
# -*- coding: utf-8 -*-
#############################################################
# IMPORTS #
#############################################################
import os
from PIL import Image
import pyheif
#############################################################
# PATH #
#############################################################
PATH = os.path.dirname(os.path.abspath(__file__))
os.chdir(PATH)
#############################################################
# CONTENT #
#############################################################
ImageFile.LOAD_TRUNCATED_IMAGES = True
EXTENSION = (".heic", ".HEIC")
FOLDER = [file for file in sorted(os.listdir()) if file.endswith(EXTENSION)]
TOTAL = len(FOLDER)
#############################################################
# MAIN #
#############################################################
for i, file in enumerate(FOLDER) :
filename, file_extension = os.path.splitext(file)
try :
heif_file = pyheif.read(file)
except Exception :
print(Exception)
else :
if not os.path.exists(PATH + "/JPG") :
os.makedirs(PATH + "/JPG")
image = Image.frombytes(
heif_file.mode,
heif_file.size,
heif_file.data,
"raw",
heif_file.mode,
heif_file.stride,
)
image = image.convert("RGB")
image.save(f"{PATH}/JPG/{filename}.jpg", dpi=(DPI, DPI), format='JPEG', subsampling=0, quality=100) | 1,734 | 474 |
import functools
import os
import ssl
import uuid
from typing import Any, Dict, List, Literal, Optional, Tuple
import jinja2
import pydantic
import yaml
from .exceptions import TplBuildException, TplBuildTemplateException
RESERVED_PROFILE_KEYS = {
"begin_stage",
"platform",
}
def _normalize_rel_path(path: str) -> str:
"""Normalize and coerce a path into a relative path."""
return f".{os.path.sep}{os.path.normpath(os.path.join(os.path.sep, path))[1:]}"
class TplContextConfig(pydantic.BaseModel):
"""
Config model representing a build context.
"""
#: The base directory (relative to the config base directory) of
#: the build context. This must be a relative path and cannot point
#: above the config base directory.
base_dir: str = "."
#: The umask as a three digit octal string. This may also be set to
#: None if the context permissions should be passed through directly.
umask: Optional[str] = "022"
#: The ignore_file to load patterns from. If this and :attr:`ignore`
#: are both None then this will attempt to load ".dockerignore", using
#: an empty list of patterns if that cannot be loaded.
ignore_file: Optional[str] = None
#: Ignore file string. If present this will be used over :attr:`ignore_file`.
ignore: Optional[str] = None
@pydantic.validator("umask")
def umask_valid_octal(cls, v):
"""Ensure that umask is three-digit octal sequence"""
if v is None:
return v
if 0 <= int(v, 8) <= 0o777:
raise ValueError("umask out of range")
return v
@pydantic.validator("base_dir")
def normalize_base_dir(cls, v):
"""Normalize the base directory"""
return _normalize_rel_path(v)
class ClientCommand(pydantic.BaseModel):
"""
Configuration to invoke an external build command.
Typically both :attr:`args` and the values of :attr:`environment` will be
subject to keyword substitutions. For instance build commands will substitute
any instance of the string "{image}" with the desired image tag. This is to
be similar to the typical Python format implementation (although does not
use `str.format` for security reasons).
"""
#: A jinja template used to construct invoke arguments and environment
#: variables based on the template arguments passed. Depending on the
#: build command different template arguments may be passed. All templates
#: are passed an `args` list and an `environment` dict that they should
#: populate with the command arguments and environment variables used
#: to invoke the build command. The output of the template will be ignored.
template: str
def render(
self,
jinja_env: jinja2.Environment,
params: Dict[str, str],
) -> Tuple[List[str], Dict[str, str]]:
"""Return the list of arguments after being rendered with the given params"""
args: List[str] = []
environment: Dict[str, str] = {}
try:
for _ in jinja_env.from_string(self.template).generate(
**params,
args=args,
environment=environment,
):
pass
except jinja2.TemplateError as exc:
raise TplBuildTemplateException(
"Failed to render command template"
) from exc
if not args:
print(self.template)
raise TplBuildException("command template rendered no command arguments")
return args, environment
class ClientConfig(pydantic.BaseModel):
"""
Configuration of commands to perform various container operations. This is
meant to be a generic interface that could plug into a variety of container
build systems. Typically you can just set :attr:`UserConfig.client_type` to
select from preconfigured client configurations.
"""
#: Build command config template. This should render an appropriate command
#: to build an image using a dockerfile named "Dockerfile" and build
#: context provided by stdin. The output should be tagged as the passed
#: argument `image`.
#:
#: Arguments:
#: image: str - The image name to tag the output
#: platform: str? - The build platform to use if known.
build: ClientCommand
#: Tag command config template. This should tag an existing image with
#: a new image name.
#:
#: Arguments:
#: source_image: str - The source image name
#: dest_image: str - The new name to tag `source_image` as
tag: ClientCommand
#: Pull command config template. This should pull the named image from
#: the remote registry into local storage.
#:
#: Arguments:
#: image: str - The name of the image to pull
pull: Optional[ClientCommand] = None
#: Push command config template. This should push the named image to
#: the remote registry from local storage.
#:
#: Arguments:
#: image: str - The name of the image to push
push: ClientCommand
#: Un-tag command config template. This should untag the named image
#: allowing data referenced by the image to be reclaimed.
#:
#: Arguments:
#: image: str - The name of the image to untag
untag: ClientCommand
#: Command that should print out the default build platform for the client.
#: This template is passed no additional arguments. If this command is not
#: available the default build platform will be calculated using the local
#: client platform instead. The output will be normalized to convert
#: e.g. "linux/x64_64" to "linux/amd64". This will only be used for
#: platform aware build configurations.
platform: Optional[ClientCommand] = None
UNSET_CLIENT_CONFIG = ClientConfig(
build=ClientCommand(template=""),
tag=ClientCommand(template=""),
push=ClientCommand(template=""),
untag=ClientCommand(template=""),
)
@functools.lru_cache
def get_builtin_configs() -> Dict[str, ClientConfig]:
"""
Return a cached mapping of preconfigured clients.
"""
path = os.path.join(os.path.dirname(__file__), "builtin_clients.yml")
with open(path, "r", encoding="utf-8") as fdata:
configs = yaml.safe_load(fdata)
return {
config_name: ClientConfig(**config_data)
for config_name, config_data in configs.items()
}
class UserSSLContext(pydantic.BaseModel):
"""Custom SSL context used to contact registries"""
#: Disable SSL/TLS verification
insecure: bool = False
#: File path to load CA certificates to trust.
cafile: Optional[str] = None
#: Folder container CA certificate files to trust.
capath: Optional[str] = None
#: Raw certificate data to trust.
cadata: Optional[str] = None
#: If True default system certs will be loaded in addition to any certs
#: implied by `cafile`, `capath`, or `cadata`. Normally these will only be
#: loaded if those are all unset.
load_default_certs: bool = False
def create_context(self) -> ssl.SSLContext:
"""Returns a SSLContext constructed from the passed options"""
ctx = ssl.create_default_context(
cafile=self.cafile,
capath=self.capath,
cadata=self.cadata,
)
if self.insecure:
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
if self.load_default_certs:
ctx.load_default_certs()
return ctx
class StageConfig(pydantic.BaseModel):
"""Configuration data for a named build stage"""
#: Is the stage a base stage
base: bool = False
#: All image names to assign to the built image. Must be empty for base images.
image_names: List[str] = []
#: All image names to assign and then push to remote registries.
#: Must be empty for base images.
push_names: List[str] = []
@pydantic.validator("image_names")
def image_names_empty_for_base(cls, v, values):
"""Ensure base images have no image_names"""
if v and values["base"]:
raise ValueError("image_names must be empty for base images")
return v
@pydantic.validator("push_names")
def push_names_empty_for_base(cls, v, values):
"""Ensure base images have no push_names"""
if v and values["base"]:
raise ValueError("push_names must be empty for base images")
return v
class UserConfig(pydantic.BaseModel):
"""User settings controlling tplbuild behavior"""
#: Must be "1.0"
version: Literal["1.0"] = "1.0"
#: If :attr:`client` is None this field will be used to set the client
#: configuration. Supported values are currently "docker" and "podman".
#: If :attr:`client` is not None this field is ignored.
client_type: str = "docker"
#: Client commands to use to perform different container actions. If unset
#: a default configuration will be provided based on the value of
#: :attr:`client_type`. If you wish to use a different builder or supply
#: additional arguments to the build this would be the place to do it.
client: ClientConfig = UNSET_CLIENT_CONFIG
#: Maximum number of concurrent build jbs. If set to 0 this will be set to
#: `os.cpu_count()`.
build_jobs: int = 0
#: Maximum number of concurrent push or pull jobs.
push_jobs: int = 4
#: Maximum number of concurrent tag jobs.
tag_jobs: int = 8
#: Maximum number of times a build will be retried before failing a build.
build_retry: int = 0
#: Maximum number of times a push or pull will be retried before failing a build.
push_retry: int = 0
#: Configure the SSL context used to contact registries. This only controls
#: accesses made by tplbuild itself. The client builder will need to be
#: configured separately.
ssl_context: UserSSLContext = UserSSLContext()
#: The path to the container auth configuration file to use when contacting
#: registries. By default this will check the default search paths and conform
#: to the syntax described in
#: https://github.com/containers/image/blob/main/docs/containers-auth.json.5.md.
auth_file: Optional[str] = None
@pydantic.validator("build_jobs", always=True)
def build_jobs_valid(cls, v):
"""ensure build_jobs is non-negative"""
if v == 0:
return os.cpu_count() or 4
if v < 0:
raise ValueError("build_jobs must be non-negative")
return v
@pydantic.validator("push_jobs")
def push_jobs_valid(cls, v):
"""ensure push_jobs is positive"""
if v <= 0:
raise ValueError("push_jobs must be positive")
return v
@pydantic.validator("tag_jobs")
def tag_jobs_valid(cls, v):
"""ensure tag_jobs is positive"""
if v <= 0:
raise ValueError("push_jobs must be positive")
return v
@pydantic.validator("client", always=True)
def default_replace_client(cls, v, values):
"""replace client with client_type if unset"""
if v.build.template:
return v
client_type = values["client_type"]
v = get_builtin_configs().get(client_type)
if v is None:
raise ValueError(f"no builtin client type named {repr(client_type)}")
return v
class TplConfig(pydantic.BaseModel):
"""Configuration settings for a single tplbuild project"""
#: Must be "1.0"
version: Literal["1.0"] = "1.0"
#: Jinja template that renders to the image name where a base image
#: will be stored. This should *not* include a tag as tplbuild uses
#: the tag itself to identify the content-addressed build. This
#: template is passed "stage_name", "profile", and "platform"
#: corresponding to the name of the stage, the name of the profile
#: that rendered the image, and the name of the build platform respectively.
base_image_name: Optional[str] = None
#: A Jinja template that renders to the default image name for a
#: given stage_name. Like :attr:`base_image_name` the template is passed
#: "stage_name", "profile", "and "platform" parameters.
stage_image_name: str = "{{ stage_name}}"
#: A Jinja template that renders to the default push name for a
#: given stage_name. Like :attr:`base_image_name` the template is passed
#: "stage_name", "profile", "and "platform" parameters.
stage_push_name: str = "{{ stage_name}}"
#: The dockerfile "syntax" to use as the build frontend when running against
#: builders that understand the "syntax" directive. For some build clients
#: specifiying a syntax may be required (e.g. the buildx client requires
#: docker/dockerfile:1.4 or later).
dockerfile_syntax: pydantic.constr(regex=r"^[^\s]*$") = "" # type: ignore
#: List of platforms to build images for. This defaults to linux/amd64
#: but should be explicitly configured. Base images will be built
#: for each platform listed here allowing for top-level images to
#: be built in any of those platforms or as manifest lists when
#: pushed.
platforms: List[str] = ["linux/amd64"]
#: A mapping of profile names to string-key template arguments to pass
#: to any documents rendered through Jinja for each profile. Defaults
#: to a single empty profile named 'default'.
profiles: Dict[str, Dict[str, Any]] = {"default": {}}
#: The name of the default profile to use. If this is empty
#: the first profile name from :attr:`profiles` will be used instead.
default_profile: str = ""
#: A set of named build context configurations. These contexts may
#: be referred to by name in the build file and should be unique
#: among all other stages.
contexts: Dict[str, TplContextConfig] = {"default": TplContextConfig()}
#: A mapping of stage names to stage configs. This can be used to override
#: the default behavior of tplbuild or apply different or more than just a
#: single image name to a given stage. See
#: :meth:`Tplbuild.default_stage_config` for information about default stage
#: configuration.
stages: Dict[str, StageConfig] = {}
#: Search directories for included tempates. Paths must be relative to the
#: project base directory.
template_paths: List[str] = ["."]
#: Template entrypoint to render to generate all build stages. Path should
#: be relative to one of the `template_paths`.
template_entrypoint: str = "Dockerfile"
@pydantic.validator("platforms")
def platform_nonempty(cls, v):
"""Ensure that platforms is non empty"""
if not v:
raise ValueError("platforms cannot be empty")
return v
@pydantic.validator("profiles")
def profile_not_empty(cls, v):
"""Make sure there is at least one profile"""
if not v:
raise ValueError("profiles cannot be empty")
return v
@pydantic.validator("profiles")
def profile_name_nonempty(cls, v):
"""Make sure all profile names are non-empty"""
if any(profile_name == "" for profile_name in v):
raise ValueError("profile name cannot be empty")
return v
@pydantic.validator("profiles")
def profile_reserved_keys(cls, v):
"""Make sure profile data does not use reserved keys"""
for profile, profile_data in v.items():
for reserved_key in RESERVED_PROFILE_KEYS:
if reserved_key in profile_data:
raise ValueError(
f"Profile {repr(profile)} cannot have reserved key {repr(reserved_key)}"
)
return v
@pydantic.validator("default_profile")
def default_profile_exists(cls, v, values):
"""Make sure default profile name exists if non-empty"""
if v and v not in values["profiles"]:
raise ValueError("default_profile must be a valid profile name")
return v
@pydantic.validator("template_paths")
def normalize_template_paths(cls, v):
"""Normalize the template search paths"""
return [_normalize_rel_path(path) for path in v]
@pydantic.validator("template_entrypoint")
def normalize_template_entrypoint(cls, v):
"""Normalize the template entrypoint path"""
return v # _normalize_rel_path(v)
class BaseImageBuildData(pydantic.BaseModel):
"""
Stores content addressed keys used to store and retrieve base images
from the remote registry.
"""
#: The hash of all inputs that go into defining the image build definition.
build_hash: str
#: The image digest stored in the registry.
image_digest: str
class BuildData(pydantic.BaseModel):
"""
Any build data that is managed by tplbuild itself rather than being
configuration data provided by the user. Right now this includes a
mapping of source images and base images to their content address
sources.
"""
#: Mapping of repo -> tag -> platform -> source image manifest digest.
source: Dict[str, Dict[str, Dict[str, str]]] = {}
#: Mapping of profile -> stage_name -> platform -> cached base image
#: build data.
base: Dict[str, Dict[str, Dict[str, BaseImageBuildData]]] = {}
#: A string combined with the base image definition hashes to produce
#: the final hash for base images. This ensures that different projects
#: use disjoint hash spaces, that the base image keys bear no information
#: by themselves, and to force rebuilds by changing the salt.
hash_salt: str = ""
@pydantic.validator("hash_salt", always=True)
def default_profile_exists(cls, v):
"""Fill in hash salt if not set"""
if not v:
return str(uuid.uuid4())
return v
| 17,863 | 5,070 |
#!/usr/bin/env python
from setuptools import setup
from distutils.command.build import build
from setuptools.command.develop import develop
class build_with_spurious(build):
def run(self):
import os
if "CC" in os.environ:
cc = os.environ['CC']
else:
cc = "cc"
os.system(
"{} -Wall -O3 peppercompiler/SpuriousDesign/spuriousSSM.c -o peppercompiler/_spuriousSSM -lm".
format(cc))
build.run(self)
class develop_with_spurious(develop):
def run(self):
import os
os.system(
"cc -Wall -O3 peppercompiler/SpuriousDesign/spuriousSSM.c -o peppercompiler/_spuriousSSM -lm"
)
develop.run(self)
setup(
name="peppercompiler",
version="0.1.3",
packages=['peppercompiler', 'peppercompiler.design'],
install_requires=["pyparsing", "six"],
include_package_data=True,
package_data={
'peppercompiler': ['_spuriousSSM', 'SpuriousDesign/spuriousSSM.c']
},
test_suite='peppercompiler.tests',
cmdclass={'build': build_with_spurious,
'develop': develop_with_spurious},
entry_points={
'console_scripts': [
'pepper-compiler = peppercompiler.compiler:main',
'pepper-design-spurious = peppercompiler.design.spurious_design:main',
'pepper-finish = peppercompiler.finish:main',
'spuriousSSM = peppercompiler._spuriousSSM_wrapper:main'
]
},
author="Constantine Evans et al (this version)",
author_email="cge@dna.caltech.edu",
description="PepperCompiler in a pythonic form")
| 1,634 | 532 |
import sys
sys.path.append("/Library/Frameworks/GStreamer.framework/Versions/0.10/lib/python2.7/site-packages/")
import gobject
gobject.threads_init()
import pygst
pygst.require("0.10")
import gst
class Client(object):
def __init__(self):
self.pipeline = gst.Pipeline('client')
self.videotestsrc = self.create_element('videotestsrc', 'video')
self.theoraenc = self.create_element('theoraenc', 'encoder')
self.oggmux = self.create_element('oggmux', 'muxer')
self.tcpserversink = self.create_element('tcpserversink', 'serversink')
self.tcpserversink.set_property('host', '0.0.0.0')
self.tcpserversink.set_property('port', 8080)
self.pipeline.add(self.videotestsrc, self.theoraenc, self.oggmux, self.tcpserversink)
gst.element_link_many(self.videotestsrc, self.theoraenc, self.oggmux, self.tcpserversink)
def create_element(self, element, name):
return gst.element_factory_make(element, name)
def start(self):
self.pipeline.set_state(gst.STATE_PLAYING)
client = Client()
client.start()
loop = gobject.MainLoop()
loop.run()
| 1,126 | 414 |
from django.contrib.auth.models import User
from django.core import mail
from django.shortcuts import resolve_url
from django.test import TestCase
from InternetSemLimites.core.forms import ProviderForm
from InternetSemLimites.core.models import Provider, State
class TestPostValid(TestCase):
def setUp(self):
User.objects.create_superuser(username='two', password='', email='42@xp.to')
sc, *_ = State.objects.get_or_create(abbr='SC', name='Santa Catarina')
go, *_ = State.objects.get_or_create(abbr='GO', name='Goiás')
self.provider = Provider.objects.create(
name='Xpto',
url='http://xp.to',
source='http://twitter.com/xpto',
category=Provider.SHAME,
other='Lorem ipsum'
)
self.provider.coverage.add(sc)
self.provider.coverage.add(go)
self.data = {
'name': 'XptoEdited',
'url': 'http://xpedited.to',
'source': 'http://twitter.com/xptoedited',
'coverage': [sc.pk],
'category': Provider.FAME,
'other': 'Lorem ipsum dolor'
}
self.resp = self.client.post(resolve_url('api:provider', self.provider.pk), self.data)
self.edited_provider = Provider.objects.last()
def test_not_allowed_methods(self):
url = resolve_url('api:provider', self.provider.pk)
for r in (self.client.delete(url), self.client.patch(url, self.data)):
with self.subTest():
self.assertEqual(405, r.status_code)
def test_post(self):
self.assertRedirects(self.resp, resolve_url('api:provider', self.edited_provider.pk))
def test_send_email(self):
self.assertEqual(1, len(mail.outbox))
def test_edit(self):
edited_provider_coverage_ids = [state.id for state in self.edited_provider.coverage.all()]
self.assertEqual(self.edited_provider.name, self.data['name'])
self.assertEqual(self.edited_provider.url, self.data['url'])
self.assertEqual(self.edited_provider.source, self.data['source'])
self.assertEqual(self.edited_provider.category, self.data['category'])
self.assertEqual(self.edited_provider.other, self.data['other'])
self.assertEqual(edited_provider_coverage_ids, self.data['coverage'])
class TestPostInvalid(TestCase):
def setUp(self):
User.objects.create_superuser(username='two', password='', email='42@xp.to')
sc, *_ = State.objects.get_or_create(abbr='SC', name='Santa Catarina')
go, *_ = State.objects.get_or_create(abbr='GO', name='Goiás')
self.provider = Provider.objects.create(
name='Xpto',
url='http://xp.to',
source='http://twitter.com/xpto',
category=Provider.SHAME,
other='Lorem ipsum'
)
self.provider.coverage.add(sc)
self.provider.coverage.add(go)
self.resp = self.client.post(resolve_url('api:provider', self.provider.pk), dict())
def test_post(self):
self.assertEqual(422, self.resp.status_code)
def test_has_errors_on_empty_form(self):
json_resp = self.resp.json()
self.assertTrue(json_resp['errors'])
def test_has_errors_on_non_empty_form(self):
invalid_data = {'name': 'Xpto', 'coverage': ['xp', 'to'], 'url': ''}
resp = self.client.post(resolve_url('api:provider', self.provider.pk), invalid_data)
json_resp = resp.json()
errors = json_resp['errors']
with self.subTest():
self.assertEqual('Este campo é obrigatório.', errors['category'][0])
self.assertEqual('Este campo é obrigatório.', errors['source'][0])
self.assertEqual('Este campo é obrigatório.', errors['url'][0])
self.assertIn('não é um valor válido', errors['coverage'][0])
| 3,852 | 1,217 |
from z3 import *
CONNECTIVE_OPS = [Z3_OP_NOT,Z3_OP_AND,Z3_OP_OR,Z3_OP_IMPLIES,Z3_OP_IFF,Z3_OP_ITE]
REL_OPS = [Z3_OP_EQ,Z3_OP_LE,Z3_OP_LT,Z3_OP_GE,Z3_OP_GT]
OPERATORS = CONNECTIVE_OPS + REL_OPS
# var is tuple (type, name)
def createVar(var):
t = var[0]
n = var[1]
if t=="int":
return Int('%s' % n)
elif t=="bool":
return Bool('%s' % n)
elif t=="real":
return Real('%s' % n)
raise NotImplementedError(varType)
def createExpression(v, expr):
return eval(expr)
# creates a constraint which allow only key variable to change
def createCounterConstraint(key, variables, model):
constraints = []
for k, v in variables.iteritems():
if k==key:
x = v
constraints.append(Not(x == model[x]))
else:
x = v
constraints.append(x == model[x])
return constraints
def modelToDict(model):
return {x.name():model[x] for x in model}
def pPrintDict(d):
res = ""
for k, v in sorted(d.items()):
res += "{} : {}, ".format(k, v)
return res[:-1]
def block_model(s):
m = s.model()
s.add(Or([ f() != m[f] for f in m.decls() if f.arity() == 0]))
def isOperand(self):
node = self.value
t = node.kind()
return is_bool(node) and (is_var(node) or (is_app(node) and t not in OPERATORS))
def isOperator(self):
return not self.isOperand()
class Node:
def __init__(self, value):
self.value = value
self.left = None
self.right = None
def __str__(self):
if self.value.decl() is not None:
return str(self.value.decl())
return str(self.value)
def insert(self, node):
if self.left is None:
self.left = Node(node)
for child in self.left.value.children():
self.left.insert(child)
elif self.right is None:
self.right = Node(node)
for child in self.right.value.children():
self.right.insert(child)
else:
self.left.insert(node)
def printTree(self, level=0, indent=0):
if level==0:
line = str(self)
indent = len(line)
else:
space = (level+indent) * ' '
dotted = level * '-'
line = "{}|{}{}".format(space, dotted, str(self))
#line = "{}|{}".format(space, str(self))
indent += len(str(self))
if self.right:
self.right.printTree(level+1, indent)
print line
if self.left:
self.left.printTree(level+1, indent)
# Returns root of constructed tree
def constructTree(variables, expr):
root = Node(expr)
print "constructing tree from ", expr
for child in expr.children():
root.insert(child)
s = Solver()
return root
variables = {x[1]:createVar(x) for x in [['bool','A'], ['bool','B'], ['bool','C']]}
expr = createExpression(variables, "And(Or(v['A'],v['B']), v['C'])")
r = constructTree(variables, expr)
r.printTree()
| 2,780 | 1,043 |
import random
data_list = random.sample(range(100), 50)
def selectionSort(arr):
for index1 in range(len(arr) - 1):
lowestIndex = index1
for index2 in range(index1, len(arr)):
if(arr[lowestIndex] > arr[index2]):
lowestIndex = index2
arr[index1] = arr[lowestIndex]
return arr
print(selectionSort(data_list))
| 351 | 129 |
import logging
from typing import Any, Mapping
import redis
import conf
import ocular
import workers
from constants.experiment_jobs import get_experiment_job_uuid
from db.redis.containers import RedisJobContainers
from db.redis.statuses import RedisStatuses
from lifecycles.jobs import JobLifeCycle
from options.registry.container_names import (
CONTAINER_NAME_BUILD_JOBS,
CONTAINER_NAME_EXPERIMENT_JOBS,
CONTAINER_NAME_JOBS,
CONTAINER_NAME_PLUGIN_JOBS,
CONTAINER_NAME_PYTORCH_JOBS,
CONTAINER_NAME_TF_JOBS
)
from options.registry.spawner import (
APP_LABELS_DOCKERIZER,
APP_LABELS_EXPERIMENT,
APP_LABELS_JOB,
APP_LABELS_NOTEBOOK,
APP_LABELS_TENSORBOARD,
ROLE_LABELS_DASHBOARD,
ROLE_LABELS_WORKER,
TYPE_LABELS_RUNNER
)
from options.registry.ttl import TTL_WATCH_STATUSES
from polyaxon.settings import K8SEventsCeleryTasks
logger = logging.getLogger('polyaxon.monitors.statuses')
def update_job_containers(event: Mapping,
status: str,
job_container_name: str) -> None:
job_containers = RedisJobContainers()
if JobLifeCycle.is_done(status):
# Remove the job monitoring
job_uuid = event['metadata']['labels']['job_uuid']
logger.info('Stop monitoring job_uuid: %s', job_uuid)
job_containers.remove_job(job_uuid)
if event['status']['container_statuses'] is None:
return
def get_container_id(container_id):
if not container_id:
return None
if container_id.startswith('docker://'):
return container_id[len('docker://'):]
return container_id
for container_status in event['status']['container_statuses']:
if container_status['name'] != job_container_name:
continue
container_id = get_container_id(container_status['container_id'])
if container_id:
job_uuid = event['metadata']['labels']['job_uuid']
if container_status['state']['running'] is not None:
logger.info('Monitoring (container_id, job_uuid): (%s, %s)',
container_id, job_uuid)
job_containers.monitor(container_id=container_id, job_uuid=job_uuid)
else:
job_containers.remove_container(container_id=container_id)
def get_restart_count(event: Mapping, job_container_name: str) -> int:
if event['status']['container_statuses'] is None:
return 0
for container_status in event['status']['container_statuses']:
if container_status['name'] != job_container_name:
continue
return container_status['restart_count'] or 0
return 0
def get_label_selector() -> str:
return 'role in ({},{}),type={}'.format(
conf.get(ROLE_LABELS_WORKER),
conf.get(ROLE_LABELS_DASHBOARD),
conf.get(TYPE_LABELS_RUNNER))
def should_handle_job_status(pod_state: Any, status: str) -> bool:
job_uuid = pod_state['details']['labels']['job_uuid']
current_status = RedisStatuses.get_status(job=job_uuid)
if not current_status: # If the status does not exist or is evicted
return True
try:
return JobLifeCycle.can_transition(status_from=RedisStatuses.get_status(job=job_uuid),
status_to=status)
except redis.connection.ConnectionError:
return True
def handle_job_condition(event_object,
pod_state,
status,
labels,
container_name,
task_name,
update_containers):
if update_containers:
try:
update_job_containers(event_object, status, container_name)
except redis.connection.ConnectionError:
pass
# Handle experiment job statuses
if should_handle_job_status(pod_state=pod_state, status=status):
logger.debug("Sending state to handler %s, %s", status, labels)
restart_count = get_restart_count(event_object, container_name)
pod_state['restart_count'] = restart_count or 0
workers.send(task_name, kwargs={'payload': pod_state}, countdown=None)
def run(k8s_manager: 'K8SManager') -> None:
# pylint:disable=too-many-branches
# Local cache
label_selector = get_label_selector()
container_name_experiment_job = conf.get(CONTAINER_NAME_EXPERIMENT_JOBS)
container_name_tf_job = conf.get(CONTAINER_NAME_TF_JOBS)
container_name_pytorch_job = conf.get(CONTAINER_NAME_PYTORCH_JOBS)
container_name_plugin_job = conf.get(CONTAINER_NAME_PLUGIN_JOBS)
container_name_job = conf.get(CONTAINER_NAME_JOBS)
container_name_build_job = conf.get(CONTAINER_NAME_BUILD_JOBS)
watch_ttl = conf.get(TTL_WATCH_STATUSES)
app_labels_experiment = conf.get(APP_LABELS_EXPERIMENT)
app_labels_job = conf.get(APP_LABELS_JOB)
app_labels_build_job = conf.get(APP_LABELS_DOCKERIZER)
app_labels_tensorboard = conf.get(APP_LABELS_TENSORBOARD)
app_labels_notebook = conf.get(APP_LABELS_NOTEBOOK)
for (event_object, pod_state) in ocular.monitor(k8s_manager.k8s_api,
namespace=k8s_manager.namespace,
container_names=(
container_name_experiment_job,
container_name_tf_job,
container_name_pytorch_job,
container_name_plugin_job,
container_name_job,
container_name_build_job),
label_selector=label_selector,
return_event=True,
watch_ttl=watch_ttl):
logger.debug('-------------------------------------------\n%s\n', pod_state)
if not pod_state:
continue
status = pod_state['status']
labels = None
if pod_state['details'] and pod_state['details']['labels']:
labels = pod_state['details']['labels']
logger.info("Updating job container %s, %s", status, labels)
experiment_condition = status and labels['app'] == app_labels_experiment
experiment_job_condition = (
container_name_experiment_job in pod_state['details']['container_statuses']
or 'job_uuid' in labels
)
tf_job_condition = (
container_name_tf_job in pod_state['details']['container_statuses']
or 'tf-replica-index' in labels
)
mpi_job_condition = 'mpi_job_name' in labels
pytorch_job_condition = (
container_name_pytorch_job in pod_state['details']['container_statuses']
or 'pytroch-replica-index' in labels
)
job_condition = (
container_name_job in pod_state['details']['container_statuses'] or
(status and labels['app'] == app_labels_job)
)
plugin_job_condition = (
container_name_plugin_job in pod_state['details']['container_statuses'] or
(status and
labels['app'] in (app_labels_tensorboard, app_labels_notebook))
)
dockerizer_job_condition = (
container_name_build_job in pod_state['details']['container_statuses']
or (status and labels['app'] == app_labels_build_job)
)
if experiment_condition:
if tf_job_condition:
# We augment the payload with standard Polyaxon requirement
pod_state['details']['labels']['job_uuid'] = get_experiment_job_uuid(
experiment_uuid=labels['experiment_uuid'],
task_type=labels['task_type'],
task_index=labels['tf-replica-index']
)
handle_job_condition(
event_object=event_object,
pod_state=pod_state,
status=status,
labels=labels,
container_name=container_name_tf_job,
task_name=K8SEventsCeleryTasks.K8S_EVENTS_HANDLE_EXPERIMENT_JOB_STATUSES,
update_containers=False
)
elif pytorch_job_condition:
# We augment the payload with standard Polyaxon requirement
pod_state['details']['labels']['job_uuid'] = get_experiment_job_uuid(
experiment_uuid=labels['experiment_uuid'],
task_type=labels['task_type'],
task_index=labels['pytorch-replica-index']
)
handle_job_condition(
event_object=event_object,
pod_state=pod_state,
status=status,
labels=labels,
container_name=container_name_pytorch_job,
task_name=K8SEventsCeleryTasks.K8S_EVENTS_HANDLE_EXPERIMENT_JOB_STATUSES,
update_containers=False
)
elif mpi_job_condition:
job_name = pod_state['details']['pod_name']
parts = job_name.split('-')
if len(parts) != 4:
continue
# We augment the payload with standard Polyaxon requirement
pod_state['details']['labels']['job_uuid'] = get_experiment_job_uuid(
experiment_uuid=labels['experiment_uuid'],
task_type=labels['task_type'],
task_index=parts[-1]
)
handle_job_condition(
event_object=event_object,
pod_state=pod_state,
status=status,
labels=labels,
container_name=container_name_experiment_job,
task_name=K8SEventsCeleryTasks.K8S_EVENTS_HANDLE_EXPERIMENT_JOB_STATUSES,
update_containers=False
)
elif experiment_job_condition:
handle_job_condition(
event_object=event_object,
pod_state=pod_state,
status=status,
labels=labels,
container_name=container_name_experiment_job,
task_name=K8SEventsCeleryTasks.K8S_EVENTS_HANDLE_EXPERIMENT_JOB_STATUSES,
update_containers=False
)
elif job_condition:
handle_job_condition(
event_object=event_object,
pod_state=pod_state,
status=status,
labels=labels,
container_name=container_name_job,
task_name=K8SEventsCeleryTasks.K8S_EVENTS_HANDLE_JOB_STATUSES,
update_containers=False
)
elif plugin_job_condition:
handle_job_condition(
event_object=event_object,
pod_state=pod_state,
status=status,
labels=labels,
container_name=container_name_plugin_job,
task_name=K8SEventsCeleryTasks.K8S_EVENTS_HANDLE_PLUGIN_JOB_STATUSES,
update_containers=False
)
elif dockerizer_job_condition:
handle_job_condition(
event_object=event_object,
pod_state=pod_state,
status=status,
labels=labels,
container_name=container_name_build_job,
task_name=K8SEventsCeleryTasks.K8S_EVENTS_HANDLE_BUILD_JOB_STATUSES,
update_containers=False
)
else:
logger.info("Lost state %s, %s", status, pod_state)
| 12,045 | 3,464 |
import sys
import json
import numpy as np
import matplotlib.pyplot as plt
class Wing:
"""A class for modeling a finite wing using the sine-series solution to Prandtl's lifting-line equation.
Parameters
----------
planform : str
May be "elliptic" or "tapered".
b : float
Wingspan.
AR : float
Aspect ratio.
RT : float
Taper ratio. Only required for "tapered" planform.
CL_a_section : float, optional
Section lift slope. Defaults to 2 pi.
washout : str
May be "none", "linear", or "optimum".
washout_mag : float
Magnitude of the washout at the tip in degrees.
washout_CLd : float
Design lift coefficient for washout. Only required if "washout"
is "optimum".
aileron_lims : list, optional
Aileron limits as a fraction of the span. Defaults to entire span.
aileron_cf: list, optional
Aileron chord fractions at the root and tip of the ailerons. Defaults to 0.0.
aileron_hinge_eff : float, optional
Aileron hinge efficiency. Defaults to 1.0.
"""
def __init__(self, **kwargs):
# Get planform parameters
self._planform_type = kwargs["planform"]
self._AR = kwargs["AR"]
self._b = kwargs["b"]
if self._planform_type == "tapered":
self._RT = kwargs["RT"]
self._CL_a_s = kwargs.get("CL_a_section", 2.0*np.pi)
# Get washout parameters
self._washout_type = kwargs.get("washout", "none")
if self._washout_type != "none":
self._W = np.radians(kwargs.get("washout_mag", 0.0))
else:
self._W = 0.0
if self._washout_type == "optimum":
self._CL_d = kwargs["washout_CLd"]
# Get aileron parameters
self._aln_lims = kwargs.get("aileron_lims", [0.0, 0.5])
self._aln_cf = kwargs.get("aileron_cf", [0.0, 0.0])
self._aln_e_hinge = kwargs.get("aileron_hinge_eff", 1.0)
def set_grid(self, N):
"""Sets the spanwise grid for the wing. Uses cosine clustering
Parameters
----------
N : int
Number of nodes per semispan to specify. Note that one node will
be placed at the root, making the total number of nodes 2N-1.
"""
np.set_printoptions(linewidth=np.inf, precision=12)
# Create theta and z distributions
self._N = 2*N-1
self._theta = np.linspace(0, np.pi, self._N)
self._z = -0.5*self._b*np.cos(self._theta)
# Calculate control point trig values
self._N_range = np.arange(1, self._N+1)
self._S_theta = np.sin(self._theta)
# Calculate chord distribution
if self._planform_type == "elliptic":
self._c_b = 4.0*self._S_theta/(np.pi*self._AR)
else:
self._c_b = 2.0*(1.0-(1.0-self._RT)*np.abs(np.cos(self._theta)))/(self._AR*(1.0+self._RT))
self._c_b = np.where(self._c_b==0.0, 1e-6, self._c_b)
# Calculate washout distribution
if self._washout_type == "none":
self._w = np.zeros(self._N)
elif self._washout_type == "linear":
self._w = np.abs(np.cos(self._theta))
elif self._washout_type == "optimum":
self._w = 1.0-self._S_theta*self._c_b[self._N//2]/self._c_b
self._W = 4.0*self._CL_d/(np.pi*self._AR*self._CL_a_s*self._c_b[self._N//2])
# Determine aileron chord fractions
self._cf = np.zeros(self._N)
z_in_aileron = ((self._z>self._aln_lims[0]) & (self._z<self._aln_lims[1])) | ((self._z>-self._aln_lims[1]) & (self._z<-self._aln_lims[0]))
if self._planform_type == "elliptic":
self._c_aln_tip = (4.0/(np.pi*self._AR)*np.sqrt(1.0-(2.0*self._aln_lims[1])**2))
self._c_aln_root = (4.0/(np.pi*self._AR)*np.sqrt(1.0-(2.0*self._aln_lims[0])**2))
else:
self._c_aln_tip = (2.0/(self._AR*(1.0+self._RT))*(1.0-(1.0-self._RT)*2.0*self._aln_lims[1]))
self._c_aln_root = (2.0/(self._AR*(1.0+self._RT))*(1.0-(1.0-self._RT)*2.0*self._aln_lims[0]))
self._x_h_tip = -(1.0-self._aln_cf[1]-0.25)*self._c_aln_tip
self._x_h_root = -(1.0-self._aln_cf[0]-0.25)*self._c_aln_root
aln_b = (self._x_h_tip-self._x_h_root)/(self._aln_lims[1]-self._aln_lims[0])
x_h = z_in_aileron[self._N//2:]*(self._x_h_root+(self._z[self._N//2:]-self._aln_lims[0])*aln_b)
self._cf[self._N//2:] = 1.0-(-x_h/self._c_b[self._N//2:]+0.25)
self._cf[self._N//2::-1] = 1.0-(-x_h/self._c_b[self._N//2:]+0.25)
self._cf *= z_in_aileron
# Determine flap efficiency
theta_f = np.arccos(2.0*self._cf-1.0)
self._e_f = (1.0-(theta_f-np.sin(theta_f))/np.pi)*self._aln_e_hinge
self._e_f[self._N//2:] *= -1.0
# Get C matrix
self._C = np.zeros((self._N, self._N))
self._C[0,:] = self._N_range**2
self._C[1:-1,:] = (4.0/(self._CL_a_s*self._c_b[1:-1,np.newaxis])+self._N_range[np.newaxis,:]/self._S_theta[1:-1,np.newaxis])*np.sin(self._N_range[np.newaxis,:]*self._theta[1:-1,np.newaxis])
self._C[-1,:] = (-1.0)**(self._N_range+1)*self._N_range**2
# Get C inverse (why on earth, I have no idea...)
self._C_inv = np.linalg.inv(self._C)
# Determine the Fourier coefficients
self._a_n = np.linalg.solve(self._C, np.ones(self._N))
self._b_n = np.linalg.solve(self._C, self._w)
self._c_n = np.linalg.solve(self._C, self._e_f)
self._d_n = np.linalg.solve(self._C, np.cos(self._theta))
# Determine coefficient slopes
self.CL_a = np.pi*self._AR*self._a_n[0]
# Determine the kappa factors due to planform
self.K_D = np.sum(np.arange(2, self._N+1)*self._a_n[1:]**2/self._a_n[0]**2)
A = (1+np.pi*self._AR/self._CL_a_s)*self._a_n[0]
self.K_L = (1.0-A)/A
# Determine span efficiency factor
self.e_s = 1.0/(1.0+self.K_D)
# Determine kappa factors due to washout
if self._washout_type != "none":
self.e_omega = self._b_n[0]/self._a_n[0]
self.K_DL = 2.0*self._b_n[0]/self._a_n[0]*np.sum(self._N_range[1:]*self._a_n[1:]/self._a_n[0]*(self._b_n[1:]/self._b_n[0]-self._a_n[1:]/self._a_n[0]))
self.K_Domega = (self._b_n[0]/self._a_n[0])**2*np.sum(self._N_range[1:]*(self._b_n[1:]/self._b_n[0]-self._a_n[1:]/self._a_n[0])**2)
self.K_Do = self.K_D-0.25*self.K_DL**2/self.K_Domega
else:
self.e_omega = 0.0
self.K_DL = 0.0
self.K_Domega = 0.0
self.K_Do = 0.0
# Determine aileron and roll derivatives
self.Cl_da = -0.25*np.pi*self._AR*self._c_n[1]
self.Cl_p = -0.25*np.pi*self._AR*self._d_n[1]
def set_condition(self, **kwargs):
"""Sets atmospheric condition for the wing.
Parameters
----------
alpha : float
Angle of attack in degrees.
V_inf : float
Freestream velocity.
da : float, optional
Aileron deflection in degrees. Defaults to 0.0.
p_bar : float or string, optional
Nondimensional rolling rate. May be "steady" to imply the steady roll rate should be solved for. Defaults to 0.0.
"""
# Store condition
self._alpha = np.radians(kwargs["alpha"])
self._V_inf = kwargs["V_inf"]
self._da = np.radians(kwargs.get("da", 0.0))
self._p_bar = kwargs.get("p_bar", 0.0)
def solve(self):
"""Solves for the aerodynamic coefficients at the current condition."""
# Determine rolling moment/rate
if self._p_bar == "steady":
self.Cl = 0.0
self.p_bar = -self.Cl_da*self._da/self.Cl_p
else:
self.p_bar = self._p_bar
self.Cl = self.Cl_da*self._da+self.Cl_p*self.p_bar
# Determine Fourier coefficients dependent on condition
self._A_n = self._a_n*(self._alpha)-self._b_n*self._W+self._c_n*self._da+self._d_n*self.p_bar
# Determine lift coefficient
self.CL = np.pi*self._AR*self._A_n[0]
# Calculate gamma distribution
An_sin_n0 = self._A_n[np.newaxis,:]*np.sin(self._N_range[np.newaxis,:]*self._theta[:,np.newaxis])
self.gamma = 2.0*self._b*self._V_inf*np.sum(An_sin_n0, axis=1).flatten()
# Determine drag coefficient with and without rolling and aileron effects
self.CD_i = np.pi*self._AR*np.sum(self._N_range*self._A_n**2) # With
self.CD_i_simp = (self.CL**2*(1.0+self.K_D)-self.K_DL*self.CL*self.CL_a*self._W+self.K_Domega*(self.CL_a*self._W)**2)/(np.pi*self._AR)
# Determine yawing moment
C = 0.0
for i in range(3, self._N):
C += (2.0*i+1)*self._A_n[i-1]*self._A_n[i]
self.Cn = 0.125*self.CL*(6.0*self._A_n[1]-self.p_bar)+0.125*np.pi*self._AR*(10.0*self._A_n[1]-self.p_bar)*self._A_n[2]+0.25*np.pi*self._AR*C
def plot_planform(self):
"""Shows a plot of the planform."""
# Get leading and trailing edge points
x_le = np.zeros(self._N+2)
x_te = np.zeros(self._N+2)
x_le[1:-1] = 0.25*self._c_b
x_te[1:-1] = -0.75*self._c_b
z = np.zeros(self._N+2)
z[0] = self._z[0]
z[1:-1] = self._z
z[-1] = self._z[-1]
# Plot outline and LQC
plt.figure()
plt.plot(z, x_le, 'k-')
plt.plot(z, x_te, 'k-')
plt.plot(z, np.zeros(self._N+2), 'b-', label='c/4')
# Plot spanwise stations
for i in range(self._N):
plt.plot([z[i+1], z[i+1]], [x_le[i+1], x_te[i+1]], 'b--')
# Plot ailerons
plt.plot([self._aln_lims[0], self._aln_lims[0], self._aln_lims[1], self._aln_lims[1]],
[-0.75*self._c_aln_root, self._x_h_root, self._x_h_tip, -0.75*self._c_aln_tip],
'k-')
plt.plot([-self._aln_lims[0], -self._aln_lims[0], -self._aln_lims[1], -self._aln_lims[1]],
[-0.75*self._c_aln_root, self._x_h_root, self._x_h_tip, -0.75*self._c_aln_tip],
'k-')
# Plot labels
plt.xlabel('z/b')
plt.ylabel('x/b')
plt.title('Planform')
plt.gca().set_aspect('equal', adjustable='box')
plt.legend(loc='upper right')
plt.show()
def plot_washout(self):
"""Plots the washout distribution on the wing."""
plt.figure()
plt.plot(self._z, self._w, 'k-')
plt.xlabel("z/b")
plt.ylabel("Washout [deg]")
plt.title("Washout Distribution")
plt.show()
def plot_aileron(self):
"""Plots the aileron deflection distribution on the wing."""
plt.figure()
plt.plot(self._z, self._e_f, 'k-')
plt.xlabel("z/b")
plt.ylabel("Aileron Effectiveness")
plt.title("Aileron Effectiveness")
plt.show()
if __name__=="__main__":
# Read in input
input_file = sys.argv[-1]
with open(input_file, 'r') as input_handle:
input_dict = json.load(input_handle)
# Initialize wing
wing_dict = input_dict["wing"]
washout_dict = input_dict["wing"]["washout"]
aileron_dict = input_dict["wing"]["aileron"]
wing = Wing(planform=wing_dict["planform"]["type"],
AR=wing_dict["planform"]["aspect_ratio"],
RT=wing_dict["planform"].get("taper_ratio"),
CL_a_section=wing_dict["airfoil_lift_slope"],
washout=washout_dict["distribution"],
washout_mag=washout_dict["magnitude[deg]"],
washout_CLd=washout_dict["CL_design"],
aileron_lims=[aileron_dict["begin[z/b]"], aileron_dict["end[z/b]"]],
aileron_cf=[aileron_dict["begin[cf/c]"], aileron_dict["end[cf/c]"]],
aileron_hinge_eff=aileron_dict["hinge_efficiency"])
# Set up grid
wing.set_grid(wing_dict["nodes_per_semispan"])
# Set condition
cond_dict = input_dict["condition"]
wing.set_condition(alpha=cond_dict["alpha_root[deg]"],
da=cond_dict["aileron_deflection[deg]"],
p_bar=cond_dict["pbar"])
# Solve
wing.solve()
print()
print("Wing")
print(" Type: {0}".format(wing._planform_type))
print(" Aspect Ratio: {0}".format(wing._AR))
try:
print(" Taper Ratio: {0}".format(wing._RT))
except AttributeError:
pass
print(" Nodes: {0}".format(wing._N))
print()
print("Condition")
print(" Alpha: {0} deg".format(np.degrees(wing._alpha)))
print(" p_bar: {0}".format(wing.p_bar))
print()
print("Aerodynamic Coefficients")
print(" CL: {0}".format(wing.CL))
print(" CD_i (without roll and aileron effects): {0}".format(wing.CD_i_simp))
print(" CD_i (with roll and airleron effects): {0}".format(wing.CD_i))
print(" Cl: {0}".format(wing.Cl))
print(" Cn: {0}".format(wing.Cn))
print()
print("Planform Effects")
print(" CL,a: {0}".format(wing.CL_a))
print(" K_L: {0}".format(wing.K_L))
print(" K_D: {0}".format(wing.K_D))
print(" Span efficiency: {0}".format(wing.e_s))
print()
print("Washout Effects")
print(" Washout effectiveness: {0}".format(wing.e_omega))
print(" K_DL: {0}".format(wing.K_DL))
print(" Washout contribution to induced drag: {0}".format(wing.K_Domega))
print(" K_Do: {0}".format(wing.K_Do))
print()
print("Aileron Effects")
print(" Cl,da: {0}".format(wing.Cl_da))
print()
print("Roll Effects")
print(" Cl,p: {0}".format(wing.Cl_p))
# Check for plot requests
if input_dict["view"]["planform"]:
wing.plot_planform()
if input_dict["view"]["washout_distribution"]:
wing.plot_washout()
if input_dict["view"]["aileron_distribution"]:
wing.plot_aileron()
# Write solution
with open("Solution.txt", 'w') as f:
C_str = np.array2string(wing._C)
C_inv_str = np.array2string(wing._C_inv)
a_n_str = np.array2string(wing._a_n)
b_n_str = np.array2string(wing._b_n)
c_n_str = np.array2string(wing._c_n)
d_n_str = np.array2string(wing._d_n)
print("C array", file=f)
print(C_str, file=f)
print("C_inv array", file=f)
print(C_inv_str, file=f)
print("a_n", file=f)
print(a_n_str, file=f)
print("b_n", file=f)
print(b_n_str, file=f)
print("c_n", file=f)
print(c_n_str, file=f)
print("d_n", file=f)
print(d_n_str, file=f) | 14,636 | 5,818 |
from django.db import models
class Task(models.Model):
title = models.CharField(max_length=40)
description = models.TextField(max_length=500)
date_created = models.DateTimeField(auto_now_add=True)
date_last_modified = models.DateTimeField(auto_now=True)
| 271 | 87 |
def elevate():
import ctypes, win32com, win32event, win32process, os, sys
outpath = r'%s\%s.out' % (os.environ["TEMP"], os.path.basename(__file__))
if ctypes.windll.shell32.IsUserAnAdmin():
if os.path.isfile(outpath):
sys.stderr = sys.stdout = open(outpath, 'w', 0)
return
with open(outpath, 'w+', 0) as outfile:
hProc = win32com.shell.shell.ShellExecuteEx(lpFile=sys.executable, \
lpVerb='runas', lpParameters=' '.join(sys.argv), fMask=64, nShow=0)['hProcess']
while True:
hr = win32event.WaitForSingleObject(hProc, 40)
while True:
line = outfile.readline()
if not line: break
sys.stdout.write(line)
if hr != 0x102: break
os.remove(outpath)
sys.stderr = ''
sys.exit(win32process.GetExitCodeProcess(hProc))
if __name__ == '__main__':
elevate()
main() | 927 | 328 |
from django.urls import path
from django.conf import settings
from django.conf.urls.static import static
from . import views
app_name = 'etikihead'
urlpatterns = [
path('', views.entry_mask, name='entrymask'),
path('contact/', views.contact, name='contact'),
path('privacy/', views.privacy, name='privacy'),
path('terms/', views.legal, name='legal'),
path('impressum/', views.impressum, name='impressum'),
path('about/', views.about, name='about'),
path('faq/', views.faq, name='faq'),
path('todo/', views.todo, name='todo'),
path('startinfo/', views.startinfo, name='startinfo'), #
]
| 629 | 213 |
# TC002 test case - Login in with new user data - exit
import data.data_tcA002 as da02
import func.func_01 as fu01
from selenium import webdriver
from selenium.webdriver.common.by import By
import time
from selenium.webdriver.chrome.options import Options
from webdriver_manager.chrome import ChromeDriverManager
options = Options()
options.headless = True
driver = webdriver.Chrome(executable_path=ChromeDriverManager().install(), options=options)
driver.get("http://localhost:1667")
# Wait for loading
fu01.wait(driver, By.ID, "app", 2)
# *** TC-A002 **************************************
def test_A002():
fu01.cookie_ok(driver)
fu01.sign_in(driver, da02.mail, da02.passw)
usern_text = fu01.login_check(driver)
fu01.out_close_driver(driver)
return usern_text
username_text = test_A002()
# ***************************************************
# Normal run
if __name__ == "__main__":
print(username_text)
try:
assert da02.name == username_text
except:
print("Hiba, az ellenőrző feltételnél nincs egyezés.")
| 1,070 | 362 |
"""Tests for the data utilities from lisc."""
from lisc.data.utils import *
###################################################################################################
###################################################################################################
def test_count_elements():
tdat = ['a', 'b', 'a', None]
out = count_elements(tdat)
assert out['a'] == 2
assert out['b'] == 1
assert None not in out
def test_combine_lists():
tdat = [['a', 'b'], None, ['c', 'd']]
out = combine_lists(tdat)
assert out == ['a', 'b', 'c', 'd']
def test_convert_string():
string_words = 'The Last wOrd, in the bRain!'
words_out = convert_string(string_words)
expected = ['last', 'word', 'brain']
assert words_out == expected
def test_lower_list():
words = ['The', 'Cool', 'Project']
words_out = lower_list(words)
expected = ['the', 'cool', 'project']
assert words_out == expected
| 966 | 298 |
from leapp import reporting
from leapp.actors import Actor
from leapp.models import FirewalldGlobalConfig, FirewallsFacts
from leapp.reporting import create_report, Report
from leapp.tags import ChecksPhaseTag, IPUWorkflowTag
class FirewalldCheckAllowZoneDrifting(Actor):
"""
This actor will check if AllowZoneDrifiting=yes in firewalld.conf. This
option has been removed in RHEL-9 and behavior is as if
AllowZoneDrifiting=no.
"""
name = 'firewalld_check_allow_zone_drifting'
consumes = (FirewallsFacts, FirewalldGlobalConfig)
produces = (Report,)
tags = (ChecksPhaseTag, IPUWorkflowTag)
def process(self):
# If firewalld is not enabled then don't bother the user about its
# configuration. This Report keys off a _default_ value and as such
# will trigger for all users that have not done one of the following:
# - disabled firewalld
# - manually set AllowZoneDrifting=no (as firewalld logs suggests)
#
for facts in self.consume(FirewallsFacts):
if not facts.firewalld.enabled:
return
for facts in self.consume(FirewalldGlobalConfig):
if not facts.allowzonedrifting:
return
create_report([
reporting.Title('Firewalld Configuration AllowZoneDrifting Is Unsupported'),
reporting.Summary('Firewalld has enabled configuration option '
'"{conf_key}" which has been removed in RHEL-9. '
'New behavior is as if "{conf_key}" was set to "no".'.format(
conf_key='AllowZoneDrifiting')),
reporting.Severity(reporting.Severity.HIGH),
reporting.Tags([reporting.Tags.SANITY, reporting.Tags.FIREWALL]),
reporting.Flags([reporting.Flags.INHIBITOR]),
reporting.ExternalLink(
url='https://access.redhat.com/articles/4855631',
title='Changes in firewalld related to Zone Drifting'),
reporting.Remediation(
hint='Set AllowZoneDrifting=no in /etc/firewalld/firewalld.conf',
commands=[['sed -i "s/^AllowZoneDrifting=.*/AllowZoneDrifting=no/" '
'/etc/firewalld/firewalld.conf']]),
])
| 2,316 | 644 |
import os
def run(path: str, return_full_path: bool = False):
"""Gets all files and folders from a path and stores them into $file_list
Arguments:
path {str} -- The path to get files and folders from
Keyword Arguments:
return_full_path {bool} -- True to return the full path of the file instead of just the file name (default: {False})
Returns:
file_list {List[str]} -- list of files and folders
"""
result = os.listdir(path)
if return_full_path:
for i, f in enumerate(result):
result[i] = os.path.join(path, f)
return {'file_list': result}
actions = {"list dir": run}
| 665 | 202 |
"""
Test for workspaces API
"""
# TODO: tests for authorization codes
import random
from collections.abc import Generator
from concurrent.futures import ThreadPoolExecutor
from contextlib import contextmanager
from wxc_sdk.rest import RestError
from wxc_sdk.all_types import *
from .base import TestCaseWithLog
TEST_WORKSPACES_PREFIX = 'workspace test '
class TestList(TestCaseWithLog):
def test_001_list(self):
workspaces = list(self.api.workspaces.list())
print(f'got {len(workspaces)} workspaces')
print('\n'.join(w.json() for w in workspaces))
class TestDetails(TestCaseWithLog):
def test_001_all(self):
"""
details for all workspaces
"""
ws = self.api.workspaces
ws_list = ws.list()
with ThreadPoolExecutor() as pool:
details = list(pool.map(lambda w: ws.details(workspace_id=w.workspace_id),
ws_list))
print(f'got details for {len(details)} workspaces')
class TestOutgoingPermissionsAutoTransferNumbers(TestCaseWithLog):
def test_001_get_all(self):
"""
get outgoing permissions auto transfer numbers for all workspaces
"""
wsa = self.api.workspaces
tna = self.api.workspace_settings.permissions_out.transfer_numbers
targets = [ws for ws in wsa.list()
if ws.calling == CallingType.webex]
if not targets:
self.skipTest('Need some WxC enabled workspaces to run this test')
with ThreadPoolExecutor() as pool:
_ = list(pool.map(lambda ws: tna.read(person_id=ws.workspace_id),
targets))
print(f'outgoing permissions auto transfer numbers for {len(targets)} workspaces')
@contextmanager
def target_ws_context(self, use_custom_enabled: bool = True) -> Workspace:
"""
pick a random workspace and make sure that the outgoing permission settings are restored
:return:
"""
po = self.api.workspace_settings.permissions_out
targets = [ws for ws in self.api.workspaces.list()
if ws.calling == CallingType.webex]
if not targets:
self.skipTest('Need some WxC enabled workspaces to run this test')
random.shuffle(targets)
# if enable == False then we need a workspace where custom_enabled is not set. Else setting it to False
# will clear all existing customer settings and we want to avoid that side effect of the test
po_settings = None
target_ws = next((ws for ws in targets
if use_custom_enabled or
not (po_settings := po.read(person_id=ws.workspace_id)).use_custom_enabled),
None)
if target_ws is None:
self.skipTest('No WxC enabled workspace with use_custom_enabled==False')
if po_settings is None:
po_settings = po.read(person_id=target_ws.workspace_id)
try:
if use_custom_enabled:
# enable custom settings: else auto transfer numbers can't be set
po.configure(person_id=target_ws.workspace_id,
settings=OutgoingPermissions(use_custom_enabled=use_custom_enabled))
yield target_ws
finally:
# restore old settings
if use_custom_enabled:
po.configure(person_id=target_ws.workspace_id, settings=po_settings)
po_restored = po.read(person_id=target_ws.workspace_id)
self.assertEqual(po_settings, po_restored)
def test_002_update_wo_custom_enabled(self):
"""
updating auto transfer numbers requires use_custom_enabled to be set
:return:
"""
tna = self.api.workspace_settings.permissions_out.transfer_numbers
with self.target_ws_context(use_custom_enabled=False) as target_ws:
target_ws: Workspace
numbers = tna.read(person_id=target_ws.workspace_id)
try:
# change auto transfer number 1
update = numbers.copy(deep=True)
transfer = f'+4961007739{random.randint(0, 999):03}'
update.auto_transfer_number1 = transfer
tna.configure(person_id=target_ws.workspace_id, settings=update)
# verify update
updated = tna.read(person_id=target_ws.workspace_id)
# update should not work with use_custom_enabled == False
self.assertEqual(numbers, updated)
finally:
# restore old settings
tna.configure(person_id=target_ws.workspace_id, settings=numbers.configure_unset_numbers)
restored = tna.read(person_id=target_ws.workspace_id)
self.assertEqual(numbers, restored)
# try
# with
def test_003_update_one_number(self):
"""
try to update auto transfer numbers for a workspace
"""
tna = self.api.workspace_settings.permissions_out.transfer_numbers
with self.target_ws_context() as target_ws:
target_ws: Workspace
numbers = tna.read(person_id=target_ws.workspace_id)
try:
# change auto transfer number 1
update = numbers.copy(deep=True)
transfer = f'+496100773{random.randint(0, 9999):03}'
update.auto_transfer_number1 = transfer
tna.configure(person_id=target_ws.workspace_id, settings=update)
# verify update
updated = tna.read(person_id=target_ws.workspace_id)
# number should be equal; ignore hyphens in number returned by API
self.assertEqual(transfer, updated.auto_transfer_number1.replace('-', ''))
# other than that the updated numbers should be identical to the numbers before
updated.auto_transfer_number1 = numbers.auto_transfer_number1
self.assertEqual(numbers, updated)
finally:
# restore old settings
tna.configure(person_id=target_ws.workspace_id, settings=numbers.configure_unset_numbers)
restored = tna.read(person_id=target_ws.workspace_id)
self.assertEqual(numbers, restored)
# try
# with
def test_002_update_one_number_no_effect_on_other_numbers(self):
"""
try to update auto transfer numbers for a workspace. Verify that updating a single number doesn't affect the
other numbers
"""
tna = self.api.workspace_settings.permissions_out.transfer_numbers
with self.target_ws_context() as target_ws:
target_ws: Workspace
numbers = tna.read(person_id=target_ws.workspace_id)
try:
all_numbers_set = AutoTransferNumbers(auto_transfer_number1='+4961007738001',
auto_transfer_number2='+4961007738002',
auto_transfer_number3='+4961007738003')
tna.configure(person_id=target_ws.workspace_id, settings=all_numbers_set)
all_numbers_set = tna.read(person_id=target_ws.workspace_id)
# change auto transfer number 1
transfer = f'+496100773{random.randint(0, 9999):03}'
update = AutoTransferNumbers(auto_transfer_number1=transfer)
tna.configure(person_id=target_ws.workspace_id, settings=update)
# verify update
updated = tna.read(person_id=target_ws.workspace_id)
# number should be equal; ignore hyphens in number returned by API
self.assertEqual(transfer, updated.auto_transfer_number1.replace('-', ''))
# other than that the updated numbers should be identical to the numbers before
updated.auto_transfer_number1 = all_numbers_set.auto_transfer_number1
self.assertEqual(all_numbers_set, updated)
finally:
# restore old settings
tna.configure(person_id=target_ws.workspace_id, settings=numbers.configure_unset_numbers)
restored = tna.read(person_id=target_ws.workspace_id)
self.assertEqual(numbers, restored)
# try
# with
class TestCreateUpdate(TestCaseWithLog):
def new_names(self) -> Generator[str, None, None]:
ws_list = list(self.api.workspaces.list())
ws_names = set(w.display_name for w in ws_list)
new_gen = (name for i in range(1000)
if (name := f'{TEST_WORKSPACES_PREFIX}{i:03}') not in ws_names)
return new_gen
@contextmanager
def target(self, no_edge: bool = False):
ws = self.api.workspaces
ws_list = list(ws.list())
if no_edge:
ws_list = [ws for ws in ws_list
if ws.calling != CallingType.edge_for_devices]
targat_ws = random.choice(ws_list)
targat_ws = ws.details(workspace_id=targat_ws.workspace_id)
try:
yield targat_ws
finally:
ws.update(workspace_id=targat_ws.workspace_id, settings=targat_ws)
restored = ws.details(workspace_id=targat_ws.workspace_id)
self.assertEqual(targat_ws, restored)
def test_001_trivial(self):
"""
create workspace with minimal settings
"""
ws = self.api.workspaces
name = next(self.new_names())
settings = Workspace.create(display_name=name)
workspace = ws.create(settings=settings)
print(f'new worksspace: {workspace.json()}')
self.assertEqual(name, workspace.display_name)
def test_002_edge_for_devices(self):
"""
create workspace with edge_for_devices
"""
ws = self.api.workspaces
name = next(self.new_names())
settings = Workspace(display_name=name, calling=CallingType.edge_for_devices)
workspace = ws.create(settings=settings)
print(f'new worksspace: {workspace.json()}')
self.assertEqual(name, workspace.display_name)
def test_003_change_name_full(self):
"""
change name of a workspace, full settings
"""
ws = self.api.workspaces
with self.target(no_edge=True) as target_ws:
target_ws: Workspace
settings: Workspace = target_ws.copy(deep=True)
new_name = next(self.new_names())
settings.display_name = new_name
after = ws.update(workspace_id=target_ws.workspace_id,
settings=settings)
self.assertEqual(new_name, after.display_name)
def test_004_change_name_name_only(self):
"""
change name of a workspace, only name update
"""
ws = self.api.workspaces
with self.target(no_edge=True) as target_ws:
target_ws: Workspace
new_name = next(self.new_names())
settings = Workspace(display_name=new_name)
after = ws.update(workspace_id=target_ws.workspace_id,
settings=settings)
self.assertEqual(new_name, after.display_name)
class TestDelete(TestCaseWithLog):
def test_001_delete_one(self):
"""
delete a random workspace
"""
ws = self.api.workspaces
ws_list = list(ws.list(display_name=TEST_WORKSPACES_PREFIX))
if not ws_list:
self.skipTest('No test workspace to delete')
target = random.choice(ws_list)
ws.delete_workspace(workspace_id=target.workspace_id)
with self.assertRaises(RestError) as exc:
ws.details(workspace_id=target.workspace_id)
rest_error: RestError = exc.exception
self.assertEqual(404, rest_error.response.status_code)
| 11,938 | 3,413 |
import tensorflow as tf
import glob
from configuration import OpenPoseCfg as cfg
from openpose.data.augmentation import Transformer
def get_tfrecord_filenames(path):
print("从"+path+"中提取TFRecords文件:")
tfrecord_files = glob.glob(path + "*")
tfrecord_files.sort()
if not tfrecord_files:
raise ValueError("未找到TFRecords文件!")
for filename in tfrecord_files:
print(filename)
return tfrecord_files
def place_label_func(label):
paf_tr = label["pafs"]
kpt_tr = label["kpts"]
image = label["image"]
return image, (paf_tr, paf_tr, paf_tr, paf_tr, kpt_tr, kpt_tr)
class TFRecordDataset:
def __init__(self, tfrecord_filenames, label_placement_func):
self.AUTOTUNE = tf.data.AUTOTUNE
self.label_place = label_placement_func
self.tfrecords = tfrecord_filenames
self.transformer = Transformer()
self.img_aug = cfg.image_aug_on
self.batch_size = cfg.batch_size
def generate(self):
dataset = tf.data.TFRecordDataset(filenames=self.tfrecords)
dataset = dataset.map(self.transformer.read_tfrecord, num_parallel_calls=self.AUTOTUNE)
dataset = dataset.map(self.transformer.read_image, num_parallel_calls=self.AUTOTUNE)
dataset = dataset.map(self.transformer.convert_label_to_tensors, num_parallel_calls=self.AUTOTUNE)
dataset = dataset.batch(self.batch_size)
if self.img_aug:
dataset = dataset.map(self.transformer.image_aug, num_parallel_calls=self.AUTOTUNE)
dataset = dataset.map(self.transformer.apply_mask, num_parallel_calls=self.AUTOTUNE)
dataset = dataset.map(self.label_place, num_parallel_calls=self.AUTOTUNE)
# dataset = dataset.repeat()
return dataset
def get_dataset():
tfrecord_files = get_tfrecord_filenames(cfg.train_tfrecords)
dataset = TFRecordDataset(tfrecord_files, place_label_func).generate()
return dataset | 1,935 | 659 |
dia = int(input('Quantos dias alugados? '))
km = float(input('Quantos KM rodados? '))
print('Como você ficou {} dias com ele e rodou {:.2f}KM, sua conta ficou em {:.2f}R$'.format(dia, km, (60*dia)+(0.15*km)))
| 209 | 93 |
""" Ip Info """
class Ip(object):
""" Ip Info """
def __init__(self, downspeed, online, upspeed, active, ip):
self._downspeed = downspeed
self._online = online
self._upspeed = upspeed
self._active = active
self._ip = ip
def get_downspeed(self):
return self._downspeed
def get_online(self):
return self._online
def get_upspeed(self):
return self._upspeed
def get_active(self):
return self._active
def get_ip(self):
return self._ip
def create_ip_from_json(json_entry):
# json_entry = json.loads(json_entry_as_string)
return Ip(json_entry['downspeed'], json_entry['online'],
json_entry['upspeed'], json_entry['active'], json_entry['ip'])
| 775 | 248 |
from django.shortcuts import render, redirect, HttpResponseRedirect
from django.contrib import messages
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from django.contrib.auth.forms import UserCreationForm
from django.urls import reverse
def user_login(request):
context = {}
if request.method == "POST":
username = request.POST['username']
password = request.POST['password']
remember_me = request.POST.get('remember_me', None)
print(remember_me)
if not remember_me:
print("remember me not checked")
request.session.set_expiry(0)
user = authenticate(request, username=username,
password=password)
if user:
login(request, user)
return HttpResponseRedirect(reverse('index'))
else:
context["error"] = "User Does Not Exist"
return render(request, "users/login.html", context=context)
else:
if request.user.is_authenticated:
return render(request, "lifecare/index.html")
return render(request, "users/login.html", context=context)
@login_required()
def user_logout(request):
logout(request)
return HttpResponseRedirect(reverse('index'))
# Create your views here.
# def login(request):
# if request.method == 'POST':
# print("wow!!!!!!!!!!!!!!!!!!!!!")
# messages.success(request, f'Welcome Back username!')
# return render(request, 'lifecare/result.html')
# else:
# print("no!!!!!")
| 1,608 | 437 |
from gbdxtools.images.worldview import WorldViewImage
from gbdxtools.images.geoeye01 import GeoEyeDriver
from gbdxtools.images.util import vector_services_query
band_types = {
'MS': 'BGRN',
'Panchromatic': 'PAN',
'Pan': 'PAN',
'pan': 'PAN'
}
class QB02Driver(GeoEyeDriver):
pass
class QB02(WorldViewImage):
__Driver__ = QB02Driver
@property
def _rgb_bands(self):
return [2,1,0]
@staticmethod
def _find_parts(cat_id, band_type):
query = "item_type:IDAHOImage AND attributes.catalogID:{} " \
"AND attributes.colorInterpretation:{}".format(cat_id, band_types[band_type])
return vector_services_query(query)
| 689 | 250 |
from meld import logger
def main():
logger.info('Starting move operation')
logger.info('Finished') | 102 | 32 |
import tensorflow as tf
def bpnet_model(seqlen=1000, numchars=4, num_dilated_convs=9, num_tasks=1,
name='bpnet_model'):
# original as per https://www.biorxiv.org/content/10.1101/737981v1.full.pdf
inp = tf.keras.layers.Input(shape=(seqlen, 4))
x = tf.keras.layers.Conv1D(
64, kernel_size=25, padding='same', activation='relu')(inp)
for i in range(num_dilated_convs):
conv_x = tf.keras.layers.Conv1D(
64, kernel_size=3, padding='same', activation='relu', dilation_rate=2**i)(x)
x = tf.keras.layers.Add()([conv_x, x])
bottleneck = x
# heads
outputs = []
for _ in range(num_tasks):
# profile shape head
px = tf.keras.layers.Reshape((-1, 1, 64))(bottleneck)
px = tf.keras.layers.Conv2DTranspose(
1, kernel_size=(25, 1), padding='same')(px)
outputs.append(tf.keras.layers.Flatten()(px))
# total counts head
cx = tf.keras.layers.GlobalAvgPool1D()(bottleneck)
outputs.append(tf.keras.layers.Dense(1)(cx))
model = tf.keras.Model(inputs=inp, outputs=outputs)
return model
| 1,137 | 454 |
#!/usr/bin/env python3
#-------------------------------------------------------------------------------
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def sortedArrayToBST(self, nums):
"""
:type nums: List[int]
:rtype: TreeNode
"""
def help(left, right):
if left > right: return None
mid = (left + right) // 2
root = TreeNode(nums[mid])
root.left = help(left, mid-1)
root.right = help(mid+1, right)
return root
return help(0, len(nums)-1)
#-------------------------------------------------------------------------------
# Testing
| 801 | 216 |
# Generated by Django 3.2.6 on 2021-08-14 19:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('collect', '0005_alter_courseinfo_name'),
]
operations = [
migrations.AlterField(
model_name='courseinfo',
name='name',
field=models.CharField(choices=[('Sapporo', '札幌'), ('Hakodate', '函館'), ('Fukushima', '福島'), ('Nigata', '新潟'), ('Tokyo', '東京'), ('Nakayama', '中山'), ('Chukyo', '中京'), ('Kyoto', '京都'), ('Hanshin', '阪神'), ('Ogura', '小倉')], max_length=32, unique=True),
),
]
| 600 | 244 |
import numpy as np
import xobjects as xo
import xpart as xp
def test_basics():
for context in xo.context.get_test_contexts():
print(f"Test {context.__class__}")
p1 = xp.Particles(x=[1,2,3], px=[10, 20, 30],
mass0=xp.ELECTRON_MASS_EV,
_context=context)
mask = p1.x > 1
p2 = p1.filter(mask)
assert p2._buffer.context == context
assert p2._capacity == 2
dct = p2.to_dict()
assert dct['mass0'] == xp.ELECTRON_MASS_EV
assert np.all(dct['px'] == np.array([20., 30.]))
| 596 | 226 |
from utils.loader import Loader
from utils.model import DeepSNN
import torch
import os
def feature_extraction(prop):
name = prop["name"]
epochs_l1 = prop["epochs_l1"]
epochs_l2 = prop["epochs_l2"]
trainset, testset = Loader(name)
model = DeepSNN(prop)
# Training The First Layer
print("-------Training the first layer-------")
if os.path.isfile(name+"_Layer1.net"):
model.load_state_dict(torch.load(name+"_Layer1.net"))
print("Loaded from disck!")
else:
for epoch in range(epochs_l1):
print("Epoch:", epoch)
for data,_ in trainset:
model.train_model(data, 1)
print("\nDone!")
torch.save(model.state_dict(), name+"_Layer1.net")
# Training The Second Layer
print("-------Training the second layer-------")
if os.path.isfile(name+"_Layer2.net"):
model.load_state_dict(torch.load(name+"_Layer2.net"))
print("Loaded from disck!")
else:
for epoch in range(epochs_l2):
print("Epoch:", epoch)
for data,_ in trainset:
model.train_model(data, 2)
print("\nDone!")
torch.save(model.state_dict(), name+"_Layer2.net")
# Classification on trainset and testset
# Get train data
for data,target in trainset:
train_X, train_y = model.test(data, target, 2)
# Get test data
for data,target in testset:
test_X, test_y = model.test(data, target, 2)
return train_X, train_y, test_X, test_y, (model.conv1.weight, model.conv2.weight)
def Classification(train_X, train_y, test_X, test_y, C=2.4):
# SVM
from sklearn.svm import LinearSVC
clf = LinearSVC(C=C)
clf.fit(train_X, train_y)
predicted_train = clf.predict(train_X)
predicted_test = clf.predict(test_X)
return predicted_train, predicted_test
def performance(x, y, predict):
correct = 0
silence = 0
for i in range(len(predict)):
if x[i].sum() == 0:
silence += 1
else:
if predict[i] == y[i]:
correct += 1
return (correct/len(x), (len(x)-(correct+silence))/len(x), silence/len(x))
def confussion_matrix(test_y, predicted_test, labels):
import matplotlib.pyplot as plt
from sklearn.metrics import confusion_matrix, ConfusionMatrixDisplay
cm = confusion_matrix(test_y, predicted_test)
cmd_obj = ConfusionMatrixDisplay(cm, display_labels=labels)
# print(cm)
cmd_obj.plot()
plt.show()
# %%
Caltech = { "name" : "Caltech",
"epochs_l1" : 20,
"epochs_l2" : 100,
"weight_mean" : 0.8,
"weight_std" : 0.05,
"lr" : (0.005, -0.0025),
"in_channel1" : 4,
"in_channel2" : 40,
"out_channel" : 150,
"k1" : 10,
"k2" : 25,
"r1" : 0,
"r2" : 2,}
train_X, train_y, test_X, test_y, weights = feature_extraction(Caltech)
predicted_train, predicted_test = Classification(train_X, train_y, test_X, test_y)
n = performance(train_X, train_y, predicted_train)
m = performance(test_X, test_y, predicted_test)
print(n)
print(m)
labels = ['Airplane', 'Car_side', 'Faces_easy', 'Motorbikes']
confussion_matrix(test_y, predicted_test, labels)
# %%
MNIST = {"name" : "MNIST",
"epochs_l1":2,
"epochs_l2":20,
"weight_mean" : 0.8,
"weight_std" : 0.05,
"lr" : (0.004, -0.003),
"in_channel1" : 2,
"in_channel2" : 32,
"out_channel" : 150,
"k1" : 5,
"k2" : 8,
"r1" : 2,
"r2" : 1,}
train_X, train_y, test_X, test_y, weights = feature_extraction(MNIST)
predicted_train, predicted_test = Classification(train_X, train_y, test_X, test_y)
n = performance(train_X, train_y, predicted_train)
m = performance(test_X, test_y, predicted_test)
print(n)
print(m)
labels = ['0','1','2','3','4','5','6','7','8','9']
confussion_matrix(test_y, predicted_test, labels)
# %%
# import cv2
# import numpy as np
# w1, w2 = weights
# w1 = torch.reshape(w1, (160, 5, 5))
# # w2 = torch.reshape(w2, (6000, 2, 2))
# def features_pic(w, i):
# # w = torch.squeeze(w)
# w -= w.min()
# w = (w/w.max()) * 255
# pic = cv2.resize(np.array(w), (100, 100))
# cv2.imwrite("features/feature" + str(i) + ".jpg", pic)
# for i in range(len(w1)):
# features_pic(w1[i], i) | 4,550 | 1,859 |
"""集合基礎
集合内包表記の使い方
[説明ページ]
https://tech.nkhn37.net/python-set-comprehension/#i
[内包表記まとめページ]
https://tech.nkhn37.net/python-comprehension/
"""
data = [10, 15, 20, 25, 30, 35, 40, 45, 50, 10, 15, 20, 25, 30]
# 集合内包表記
data_set = {dt for dt in data if dt % 2 == 0}
print(f'data_set : {data_set}')
| 296 | 195 |
class Solution:
def halvesAreAlike(self, s: str) -> bool:
vowel = {'a', 'e', 'i', 'o', 'u', 'A', 'E', 'I', 'O', 'U'}
first = s[:int(len(s)/2)]
second = s[int(len(s)/2):]
firstsum = sum([1 for f in first if f in vowel])
secondsum = sum([1 for s in second if s in vowel])
if firstsum==secondsum:
return True
else:
return False | 409 | 144 |
#!/usr/bin/env python
# * coding: utf8 *
'''
worker_bee.py
A module that contains logic for building traditional image-based caches.
'''
import os
import socket
import time
from os.path import join, dirname, realpath
from shutil import rmtree
import pygsheets
from datetime import date
import arcpy
from . import config, settings, update_data
from .messaging import send_email
spot_cache_name = 'spot cache'
error_001470_message = 'ERROR 001470: Failed to retrieve the job status from server. The Job is running on the server, please use the above URL to check the job status.\nFailed to execute (ManageMapServerCacheTiles).\n' # noqa
def parse_levels(levels_txt):
#: parse the levels parameter text into an array of scales
min, max = map(int, levels_txt.split('-'))
return settings.SCALES[min:max + 1]
def intersect_scales(scales, restrict_scales):
#: return the intersection of between scales and restrict_scales
intersection = set(scales) & set(restrict_scales)
return list(intersection)
class WorkerBee(object):
def __init__(self, s_name, missing_only=False, skip_update=False, skip_test=False, spot_path=False, levels=False):
print('caching {}'.format(s_name))
self.errors = []
self.start_time = time.time()
self.service_name = s_name
if not levels:
self.restrict_scales = settings.SCALES
else:
self.restrict_scales = parse_levels(levels)
try:
print('deleting previous *_GCS folder, if any')
rmtree(os.path.join(settings.CACHE_DIR, s_name + '_GCS'))
except Exception:
pass
if config.is_dev():
self.complete_num_bundles = 19
else:
self.complete_num_bundles = settings.COMPLETE_NUM_BUNDLES_LU[self.service_name]
ip = socket.gethostbyname(socket.gethostname())
self.preview_url = settings.PREVIEW_URL.format(ip, self.service_name)
self.service = os.path.join(config.get_ags_connection(), '{}.MapServer'.format(self.service_name))
self.email_subject = 'Cache Update ({})'.format(self.service_name)
if skip_update:
print('skipping data update...')
else:
update_data.main()
send_email(self.email_subject, 'Data update complete. Proceeding with caching...')
if skip_test:
print('skipping test cache...')
else:
self.cache_test_extent()
if missing_only:
print('caching empty tiles only...')
self.missing_only = missing_only
self.start_bundles = self.get_bundles_count()
if self.missing_only:
self.update_mode = 'RECREATE_EMPTY_TILES'
print('Caching empty tiles only')
else:
self.update_mode = 'RECREATE_ALL_TILES'
print('Caching all tiles')
if not spot_path:
self.cache(not levels)
else:
#: levels 0-17 include the entire state
print('spot caching levels 0-17...')
self.cache_extent(settings.SCALES[:18], spot_path, spot_cache_name)
#: levels 18-19 intersect with cache extent
print('intersecting spot cache polygon with level 18-19 cache extent...')
intersect = arcpy.analysis.Intersect([spot_path, join(settings.EXTENTSFGDB, settings.EXTENT_18_19)],
'in_memory/spot_cache_intersect',
join_attributes='ONLY_FID')
print('spot caching levels 18-19...')
self.cache_extent(settings.SCALES[18:], intersect, spot_cache_name)
def cache_extent(self, scales, aoi, name):
cache_scales = intersect_scales(scales, self.restrict_scales)
if len(cache_scales) == 0:
return
print('caching {} at {}'.format(name, cache_scales))
if config.is_dev() and name != spot_cache_name:
aoi = settings.TEST_EXTENT
try:
arcpy.server.ManageMapServerCacheTiles(self.service, cache_scales, self.update_mode, settings.NUM_INSTANCES, aoi)
except arcpy.ExecuteError as e:
if e.message == error_001470_message:
msg = 'ERROR 001470 thrown. Moving on and hoping the job completes successfully.'
print(msg)
send_email('Cache Warning (ERROR 001470)', 'e.message\n\narcpy.GetMessages:\n{}'.format(arcpy.GetMessages().encode('utf-8')))
else:
self.errors.append([cache_scales, aoi, name])
print(arcpy.GetMessages().encode('utf-8'))
send_email('Cache Update ({}) - arcpy.ExecuteError'.format(self.service_name), arcpy.GetMessages().encode('utf-8'))
def get_progress(self):
total_bundles = self.get_bundles_count()
bundles_per_hour = (total_bundles - self.start_bundles) / ((time.time() - self.start_time) / 60 / 60)
if bundles_per_hour != 0 and total_bundles > self.start_bundles:
hours_remaining = (self.complete_num_bundles - total_bundles) / bundles_per_hour
else:
self.start_time = time.time()
hours_remaining = '??'
percent = int(round(float(total_bundles) / self.complete_num_bundles * 100.00))
msg = '{} of {} ({}%) bundle files created.\nEstimated hours remaining: {}'.format(
total_bundles, self.complete_num_bundles, percent, hours_remaining)
print(msg)
return msg
def get_bundles_count(self):
totalfiles = 0
basefolder = os.path.join(settings.CACHE_DIR, self.service_name.replace('/', '_'), 'Layers', '_alllayers')
for d in os.listdir(basefolder):
if d != 'missing.jpg':
totalfiles += len(os.listdir(os.path.join(basefolder, d)))
return totalfiles
def cache_test_extent(self):
print('caching test extent')
cache_scales = intersect_scales(settings.SCALES, self.restrict_scales)
try:
arcpy.server.ManageMapServerCacheTiles(self.service, cache_scales, 'RECREATE_ALL_TILES', settings.NUM_INSTANCES, settings.TEST_EXTENT)
send_email('Cache Test Extent Complete ({})'.format(self.service_name), self.preview_url)
# if raw_input('Recache test extent (T) or continue with full cache (F): ') == 'T':
# self.cache_test_extent()
except arcpy.ExecuteError:
print(arcpy.GetMessages().encode('utf-8'))
send_email('Cache Test Extent Error ({}) - arcpy.ExecuteError'.format(self.service_name), arcpy.GetMessages().encode('utf-8'))
raise arcpy.ExecuteError
def cache(self, run_all_levels):
arcpy.env.workspace = settings.EXTENTSFGDB
for fc_name, scales in settings.CACHE_EXTENTS:
self.cache_extent(scales, fc_name, fc_name)
send_email(self.email_subject,
'Levels 0-9 completed.\n{}\n{}'.format(self.get_progress(), self.preview_url))
if config.is_dev():
settings.GRIDS = settings.GRIDS[:-4]
for grid in settings.GRIDS:
total_grids = int(arcpy.management.GetCount(grid[0])[0])
grid_count = 0
progress = ''
with arcpy.da.SearchCursor(grid[0], ['SHAPE@', 'OID@']) as cur:
for row in cur:
grid_count += 1
grid_percent = int(round((float(grid_count) / total_grids) * 100))
self.cache_extent([grid[1]], row[0], '{}: OBJECTID: {}'.format(grid[0], row[1]))
grit_percent_msg = 'Grids for this level completed: {}%'.format(grid_percent)
print(grit_percent_msg)
progress = self.get_progress()
send_email(self.email_subject, 'Level {} completed.\n{}\n{}\nNumber of errors: {}'.format(grid[0], progress, self.preview_url, len(self.errors)))
while (len(self.errors) > 0):
msg = 'Recaching errors. Errors left: {}'.format(len(self.errors))
print(msg)
send_email(self.email_subject, msg)
self.cache_extent(*self.errors.pop())
bundles = self.get_bundles_count()
if bundles < self.complete_num_bundles and run_all_levels:
msg = 'Only {} out of {} bundles completed. Recaching...'.format(bundles, self.complete_num_bundles)
print(msg)
send_email(self.email_subject, msg)
self.cache(True)
send_email(self.email_subject + ' Finished', 'Caching complete!\n\n{}'.format(self.preview_url))
print('updating google spreadsheets')
client = pygsheets.authorize(service_file=join(dirname(realpath(__file__)), 'service_account.json'))
sgid_sheet = client.open_by_key('11ASS7LnxgpnD0jN4utzklREgMf1pcvYjcXcIcESHweQ')
sgid_worksheet = sgid_sheet[0]
base_maps_sheet = client.open_by_key('1XnncmhWrIjntlaMfQnMrlcCTyl9e2i-ztbvqryQYXDc')
base_maps_worksheet = base_maps_sheet[0]
#: update sgid changelog
today = date.today().strftime(r'%m/%d/%Y')
matrix = sgid_worksheet.get_all_values(include_tailing_empty_rows=False, include_tailing_empty=False)
row = [today, 'Complete', self.service_name, 'Recache', 'Statewide cache rebuild and upload to GCP', 'stdavis', 'no', 'no', 'no', 'no', 'no', 'no', 'yes']
sgid_worksheet.insert_rows(len(matrix), values=row, inherit=True)
#: update base maps spreadsheet embedded in gis.utah.gov page
this_month = date.today().strftime(r'%b %Y')
results = base_maps_worksheet.find(self.service_name, matchEntireCell=True)
cell = results[0]
base_maps_worksheet.update_value((cell.row + 1, cell.col), this_month)
| 9,789 | 3,097 |
import taichi as ti
PI = 3.1415926
# http://www.glowinggoo.com/sph/bin/kelager.06.pdf
@ti.data_oriented
class W_poly6:
@staticmethod
@ti.func
def W(r_vec, h):
r2 = r_vec.dot(r_vec)
r2 = ti.max(r2, 1e-10)
k = 0.0
if r2 <= h ** 2:
k = ((h ** 2) - r2) ** 3
return 315 / (64 * PI * (h ** 9)) * k
@staticmethod
@ti.func
def W_grad(r_vec, h):
r2 = r_vec.dot(r_vec)
r2 = ti.max(r2, 1e-10)
k = ti.Vector([0.0 for i in range(r_vec.n)])
if r2 <= h ** 2:
k = ((h ** 2) - r2) ** 2 * r_vec
return -945 / (32 * PI * (h ** 9)) * k
@staticmethod
@ti.func
def W_grad2(r_vec, h):
r2 = r_vec.dot(r_vec)
r2 = ti.max(r2, 1e-10)
k = 0.0
if r2 <= h ** 2:
k = ((h ** 2) - r2) * (3 * h ** 2 - 7 * r2)
return -945 / (32 * PI * (h ** 9)) * k
@ti.data_oriented
class W_spiky:
# pressure
@staticmethod
@ti.func
def W(r_vec, h):
r = ti.sqrt(r_vec.dot(r_vec))
r = ti.max(r, 1e-5)
k = 0.0
if r <= h:
k = (h - r) ** 3
return 15 / (PI * (h ** 6)) * k
@staticmethod
@ti.func
def W_grad(r_vec, h):
r = ti.sqrt(r_vec.dot(r_vec))
r = ti.max(r, 1e-5)
k = ti.Vector([0.0 for i in range(r_vec.n)])
if r <= h:
k = (h - r) ** 2 / r * r_vec
return -45 / (PI * (h ** 6)) * k
@staticmethod
@ti.func
def W_grad2(r_vec, h):
r = ti.sqrt(r_vec.dot(r_vec))
r = ti.max(r, 1e-5)
k = 0.0
if r <= h:
k = (h - r) * (h - 2 * r) / r
return -90 / (PI * (h ** 6)) * k
@ti.data_oriented
class W_viscosity:
# viscosity
@staticmethod
@ti.func
def W(r_vec, h):
r = ti.sqrt(r_vec.dot(r_vec))
r = ti.max(r, 1e-5)
k = 0.0
if r <= h:
k = -(r ** 3) / (2 * (h ** 3)) + (r / h) ** 2 + (h / 2 / r) - 1
return 15 / (2 * PI * (h ** 3)) * k
@staticmethod
@ti.func
def W_grad(r_vec, h):
r = ti.sqrt(r_vec.dot(r_vec))
r = ti.max(r, 1e-5)
k = ti.Vector([0.0] * r_vec.n)
if r <= h:
k = r_vec * (-3 * r / (2 * (h**3)) + 2/h/h - (h / (2 * r**3)))
return 15 / (2 * PI * (h ** 3)) * k
@staticmethod
@ti.func
def W_grad2(r_vec, h):
r = ti.sqrt(r_vec.dot(r_vec))
r = ti.max(r, 1e-5)
k = 0.0
if r <= h:
k = h - r
return 45 / (PI * (h ** 6)) * k
if __name__ == '__main__':
ti.init(arch=ti.cpu, debug=True)
@ti.kernel
def test():
r0 = ti.Vector([1e-2, 0])
r1 = ti.Vector([0.2, 0.2])
r2 = ti.Vector([0.5, 0])
r3 = ti.Vector([1.0, 0])
h = 1
print(W_poly6.W(r0, h), W_poly6.W(r1, h), W_poly6.W(r2, h), W_poly6.W(r3, h))
print(W_poly6.W_grad(r0, h), W_poly6.W_grad(r1, h), W_poly6.W_grad(r2, h), W_poly6.W_grad(r3, h))
print(W_poly6.W_grad2(r0, h), W_poly6.W_grad2(r1, h), W_poly6.W_grad2(r2, h), W_poly6.W_grad2(r3, h))
print(W_spiky.W(r0, h), W_spiky.W(r1, h), W_spiky.W(r2, h), W_spiky.W(r3, h))
print(W_spiky.W_grad(r0, h), W_spiky.W_grad(r1, h), W_spiky.W_grad(r2, h), W_spiky.W_grad(r3, h))
print(W_spiky.W_grad2(r0, h), W_spiky.W_grad2(r1, h), W_spiky.W_grad2(r2, h), W_spiky.W_grad2(r3, h))
print(W_viscosity.W(r0, h), W_viscosity.W(r1, h), W_viscosity.W(r2, h), W_viscosity.W(r3, h))
print(W_viscosity.W_grad(r0, h), W_viscosity.W_grad(r1, h), W_viscosity.W_grad(r2, h), W_viscosity.W_grad(r3, h))
print(W_viscosity.W_grad2(r0, h), W_viscosity.W_grad2(r1, h), W_viscosity.W_grad2(r2, h), W_viscosity.W_grad2(r3, h))
test()
@ti.data_oriented
class W_cubic:
# cubic
@staticmethod
@ti.func
def W(r_vec, r, h):
k = 10. / (7. * PI * h**r_vec.n)
q = r / h
res = 0.0
if q <= 1.0:
res = k * (1 - 1.5 * q**2 + 0.75 * q**3)
elif q < 2.0:
res = k * 0.25 * (2 - q)**3
return res
@staticmethod
@ti.func
def W_grad(r_vec, r, h):
k = 10. / (7. * PI * h**r_vec.n)
q = r / h
res = 0.0
if q < 1.0:
res = (k / h) * (-3 * q + 2.25 * q**2)
elif q < 2.0:
res = -0.75 * (k / h) * (2 - q)**2
return res
| 4,418 | 2,184 |
import requests
import json
url_sehir = "https://ezanvakti.herokuapp.com/sehirler/2"
r = requests.get(url_sehir)
j = r.json()
sehir_adi = input("Şehir:")
ilce_adi = input("İlçe:")
for sehir in j:
if sehir_adi == sehir["SehirAdi"]:
ID = sehir["SehirID"]
print(ID)
url_ilce = "https://ezanvakti.herokuapp.com/ilceler/{}".format(ID)
re = requests.get(url_ilce)
je = re.json()
for ilce in je:
if ilce_adi == ilce["IlceAdi"]:
PD = ilce["IlceID"]
print(PD)
url_vakit = "https://ezanvakti.herokuapp.com/vakitler/{}".format(PD)
res = requests.get(url_vakit)
jes = res.json()
muzo = jes[0]
for vakit in muzo:
print(vakit,":",muzo[vakit])
input("Çıkmak için herhangi bir tuşa bas") | 747 | 339 |
#!/usr/bin/python3
"""
RobotArm API service config file
"""
import pathlib
from robotarm.armservice.views import api_views
from flask import (
Flask,
make_response,
jsonify
)
from robotarm.armservice import getenv
# initialize flask app
app = Flask(__name__)
# register/mount blueprint
app.register_blueprint(api_views)
# allow missing trailing
app.url_map.strict_slashes = False
@app.errorhandler(404)
def not_found(error):
"""
Handle non existing objects
Args:
error: [description]
Returns:
JSON: json object
"""
e = {
"error": "Not Found"
}
return make_response(jsonify(e), 404)
if __name__ == '__main__':
host = getenv("ARM_API_HOST", "0.0.0.0")
port = getenv("ARM_API_PORT", "5555")
app.run(host=host, port=port)
| 799 | 288 |
from xbrr.base.reader.base_element_schema import BaseElementSchema
from bs4.element import NavigableString, Tag
import bs4
class ElementSchema(BaseElementSchema):
def __init__(self,
name="", reference="", label="", alias="",
abstract="", data_type="",
period_type="", balance=""):
super().__init__()
self.name = name
self.reference = reference
self.label = label
self.alias = alias
self.abstract = abstract
self.period_type = period_type
self.balance = balance
self.verbose_label = ""
# data types:
# domain, textBlock, percent, perShare, boolean, date, decimal,
# monetary, nonNegativeInteger, shares, string
self.data_type = data_type
if data_type is not None and ':' in data_type:
self.data_type = data_type.split(':')[-1].replace('ItemType','')
def set_alias(self, alias):
self.alias = alias
return self
@classmethod
def create_from_reference(cls, reader, reference):
if not reader.xbrl_doc.has_schema: # for test purpose only
name = reference.split("#")[-1]
instance = cls(name=name, reference=reference)
return instance
instance = reader.get_schema_by_link(reference)
instance.reference = reference
return instance
@classmethod
def read_schema(cls, reader, xsduri):
xsd_dic = {}
xml = reader.read_uri(xsduri)
for element in xml.find_all("element"):
# <xsd:element id="jpcrp030000-asr_E00436-000_Subsidy" xbrli:balance="credit" xbrli:periodType="duration" abstract="false" name="Subsidy" nillable="true" substitutionGroup="xbrli:item" type="xbrli:monetaryItemType" />
instance = cls(name=element["id"], alias=element["name"],
data_type=element["type"],
period_type=element["xbrli:periodType"],
abstract=element["abstract"] if element.get("abstract") else "",
balance=element.get("xbrli:balance") if element.get("xbrli:balance") else "")
xsd_dic[element["id"]] = instance
return xsd_dic
@classmethod
def read_label_taxonomy(cls, reader, xsduri, xsd_dic):
label_xml = reader.read_label_of_xsd(xsduri)
loc_dic = {}
resource_dic = {}
def read_label(elem: bs4.element.Tag):
if elem.name == "loc":
attrs = elem.attrs
assert 'xlink:href' in attrs and 'xlink:label' in attrs
# href = jpcrp040300-q1r-001_E04251-000_2016-06-30_01_2016-08-12.xsd#jpcrp040300-q1r_E04251-000_ProvisionForLossOnCancellationOfContractEL
# label = ProvisionForLossOnCancellationOfContractEL
v = elem['xlink:href'].split('#')
assert len(v) == 2
loc_dic[elem['xlink:label']] = v[1]
elif elem.name == "label":
attrs = elem.attrs
if 'xlink:label' in attrs and 'xlink:role' in attrs:
label_role = "http://www.xbrl.org/2003/role/label"
verboseLabel_role = "http://www.xbrl.org/2003/role/verboseLabel"
if elem['xlink:role'] in [label_role, verboseLabel_role]:
resource_dic[elem['xlink:label']] = {'role': elem['xlink:role'], 'text': elem.text}
elif elem.name == "labelArc":
attrs = elem.attrs
if 'xlink:from' in attrs and 'xlink:to' in attrs and elem['xlink:to'] in resource_dic:
if elem['xlink:from'] in loc_dic and loc_dic[elem['xlink:from']] in xsd_dic:
ele = xsd_dic[loc_dic[elem['xlink:from']]]
res = resource_dic[elem['xlink:to']]
ele.set_label(**res) # Label(res['role'], res['text'])
for elem in label_xml.find_all('labelLink'): # "link:labelLink"
for child in elem.children:
if isinstance(child, Tag):
read_label(child)
def set_label(self, role, text):
if role.endswith('label'):
self.label = text
elif role.endswith('verboseLabel'):
self.verbose_label = text
def to_dict(self):
return {
"name": self.name,
"reference": self.reference,
"label": self.label,
"abstract": self.abstract,
"data_type": self.data_type,
"period_type": self.period_type,
"balance": self.balance
}
| 4,685 | 1,438 |
from datetime import datetime
class Sources:
"""
Sources class to define sources object
"""
def __init__(self, id, name, description, url, category, country):
self.id = id
self.name = name
self.description = description
self.url = url
self.category = category
self.country = country
class Articles:
"""
Articles class to define articles object
"""
def __init__(self, author, title, description, url, url_to_Image, published_at, content):
self.author = author
self.title = title
self.description = description
self.url = url
self.url_to_Image = url_to_Image
self.content = content
self.published_at = datetime.strptime(published_at, "%Y-%m-%dT%H:%M:%SZ").strftime("%B %d, %Y") | 817 | 235 |
# Generated by Django 3.1 on 2020-10-18 11:51
from django.conf import settings
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('post', '0002_auto_20200920_1217'),
]
operations = [
migrations.RenameModel(
old_name='Categories',
new_name='Category',
),
migrations.RenameModel(
old_name='Posts',
new_name='Post',
),
migrations.AlterModelOptions(
name='post',
options={'verbose_name_plural': 'posts'},
),
]
| 661 | 217 |
from .transcript_day import *
def test():
parsed = TranscriptDay(EXAMPLE_DATA)
print(parsed)
assert parsed.room_id == 11540
assert parsed.room_name == "Charcoal HQ"
assert parsed.first_day == datetime.date(2013, 11, 16)
assert parsed.previous_day == datetime.date(2017, 11, 16)
assert parsed.next_day == datetime.date(2017, 11, 18)
assert parsed.last_day == datetime.date(2017, 11, 22)
assert parsed.messages[0].id == 41197805
assert parsed.messages[0].parent_message_id is None
assert parsed.messages[0].owner_user_id == 205533
assert parsed.messages[0].owner_user_name == "Videonauth"
assert len(parsed.messages) == 61
EXAMPLE_DATA = r'''
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" >
<head>
<title>Charcoal HQ - 2017-11-17 (page 2 of 4)</title>
<link rel="shortcut icon" href="//cdn.sstatic.net/stackexchange/img/favicon.ico?v=da"><link rel="apple-touch-icon" href="//cdn.sstatic.net/stackexchange/img/apple-touch-icon.png?v=da"><link rel="search" type="application/opensearchdescription+xml" title="Chat for chat.stackexchange.com" href="/opensearch.xml">
<link rel="canonical" href="/transcript/11540/2017/11/17/1-2" />
<script type="text/javascript" src="//ajax.googleapis.com/ajax/libs/jquery/1.12.4/jquery.min.js"></script>
<script type="text/javascript" src="//cdn-chat.sstatic.net/chat/Js/master-chat.js?v=f1e5ed9ea207"></script>
<link rel="stylesheet" href="//cdn-chat.sstatic.net/chat/css/chat.stackexchange.com.css?v=7d154b0411cf">
<script type="text/javascript">
function IMAGE(f) { return ("//cdn-chat.sstatic.net/chat/img/" + f); }
</script>
<script type="text/javascript">
$(function() {
initTranscript(true,
1251, true,
true, 11540,
true);
popupDismisser();
});
</script>
</head>
<body id="transcript-body">
<div id="container">
<div id="main">
<a href="/transcript/11540/2013/11/16" class="button noprint" title="2013-11-16">« first day (1461 days earlier)</a>
<a href="/transcript/11540/2017/11/16" class="button noprint" rel="prev" title="2017-11-16">← previous day</a>
<link rel="prev" title="2017-11-16" href="/transcript/11540/2017/11/16" />
<a href="/transcript/11540/2017/11/18" class="button noprint" rel="next" title="2017-11-18">next day →</a>
<link rel="next" title="2017-11-18" href="/transcript/11540/2017/11/18" />
<a href="/transcript/11540/2017/11/22" class="button noprint" title="2017-11-22"> last day (5 days later) »</a>
<div class="clear-both"></div>
<div class="clear-both"></div><div class="pager"><a href="/transcript/11540/2017/11/17/0-1"><span class="page-numbers">00:00 - 01:00</span></a><span class="page-numbers current">01:00 - 02:00</span><a href="/transcript/11540/2017/11/17/2-13"><span class="page-numbers">02:00 - 13:00</span></a><a href="/transcript/11540/2017/11/17/13-24"><span class="page-numbers">13:00 - 00:00</span></a></div><div class="clear-both"></div>
<br/>
<div id="transcript">
<div class="monologue user-205533">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/idNpq.jpg?s=16&g=1" alt="Videonauth" />
</div>
<div class="username"><a href="/users/205533/videonauth" title="Videonauth">Videonauth</a></div>
</div></div>
<div class="messages">
<div class="timestamp">1:00 AM</div>
<div class="message" id="message-41197805">
<a name="41197805" href="/transcript/11540?m=41197805#41197805"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
<div class="onebox ob-image"><a rel="nofollow noopener noreferrer" href="//i.stack.imgur.com/mdGKA.jpg"><img src="//i.stack.imgur.com/mdGKA.jpg" class="user-image" alt="user image" /></a></div>
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41197807">
<a name="41197807" href="/transcript/11540?m=41197807#41197807"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
and this :)
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-137388">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/Ma6sp.jpg?s=16&g=1" alt="QPaysTaxes" />
</div>
<div class="username"><a href="/users/137388/qpaystaxes" title="QPaysTaxes">QPaysTaxes</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41197831">
<a name="41197831" href="/transcript/11540?m=41197831#41197831"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
Mhm
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-120914">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/WyV1l.png?s=16&g=1" alt="SmokeDetector" />
</div>
<div class="username"><a href="/users/120914/smokedetector" title="SmokeDetector">SmokeDetector</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41197902">
<a name="41197902" href="/transcript/11540?m=41197902#41197902"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
Merged SmokeDetector <a href="https://github.com/Charcoal-SE/SmokeDetector/pull/1236" rel="nofollow noopener noreferrer">#1236</a>.
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-167070">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/AYXNm.png?s=16&g=1" alt="quartata" />
</div>
<div class="username"><a href="/users/167070/quartata" title="quartata">quartata</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41197912">
<a name="41197912" href="/transcript/11540?m=41197912#41197912"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
sorry for the delay
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-120914">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/WyV1l.png?s=16&g=1" alt="SmokeDetector" />
</div>
<div class="username"><a href="/users/120914/smokedetector" title="SmokeDetector">SmokeDetector</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41197915">
<a name="41197915" href="/transcript/11540?m=41197915#41197915"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
<a href="https://codecov.io/gh/Charcoal-SE/SmokeDetector/compare/8c1cd7633587085fff94743dcc9096c646c7344a...afbbeba682ba4094d33c8d9dd2a522b2d713b665" rel="nofollow noopener noreferrer">CI</a> on <a href="https://github.com/Charcoal-SE/SmokeDetector/commit/afbbeba" rel="nofollow noopener noreferrer"><code>afbbeba</code></a> succeeded. Message contains 'autopull', pulling...
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41197923">
<a name="41197923" href="/transcript/11540?m=41197923#41197923"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
[ <a href="//github.com/Charcoal-SE/SmokeDetector" rel="nofollow noopener noreferrer">SmokeDetector</a> ] SmokeDetector started at <a href="//github.com/Charcoal-SE/SmokeDetector/commit/6ad928a" rel="nofollow noopener noreferrer">rev 6ad928a (metasmoke: <i>Merge pull request #1236 from Charcoal-SE/auto-blacklist-1510879822.8478458</i>)</a> (running on Henders/EC2)
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41197925">
<a name="41197925" href="/transcript/11540?m=41197925#41197925"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
Restart: API quota is 18014.
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41197984">
<a name="41197984" href="/transcript/11540?m=41197984#41197984"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
[ <a href="//goo.gl/eLDYqh" rel="nofollow noopener noreferrer">SmokeDetector</a> | <a href="//m.erwaysoftware.com/posts/by-url?url=//askubuntu.com/a/977247" rel="nofollow noopener noreferrer">MS</a> ] Potentially bad keyword in answer, blacklisted user: <a href="//askubuntu.com/a/977247">viewer for X.509 certificate</a> by <a href="//askubuntu.com/users/760491">vite11</a> on <code>askubuntu.com</code>
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198028">
<a name="41198028" href="/transcript/11540?m=41198028#41198028"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
[ <a href="//goo.gl/eLDYqh" rel="nofollow noopener noreferrer">SmokeDetector</a> | <a href="//m.erwaysoftware.com/posts/by-url?url=//es.stackoverflow.com/questions/118122" rel="nofollow noopener noreferrer">MS</a> ] Mostly dots in body: <a href="//es.stackoverflow.com/questions/118122">Por qué el organo varonil se llama pene?</a> by <a href="//es.stackoverflow.com/users/66574">Escroto Y Pene Gratis</a> on <code>es.stackoverflow.com</code>
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-137388">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/Ma6sp.jpg?s=16&g=1" alt="QPaysTaxes" />
</div>
<div class="username"><a href="/users/137388/qpaystaxes" title="QPaysTaxes">QPaysTaxes</a></div>
</div></div>
<div class="messages">
<div class="timestamp">1:15 AM</div>
<div class="message" id="message-41198159">
<a name="41198159" href="/transcript/11540?m=41198159#41198159"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
sd k
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198165">
<a name="41198165" href="/transcript/11540?m=41198165#41198165"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
sd - k
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-120914">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/WyV1l.png?s=16&g=1" alt="SmokeDetector" />
</div>
<div class="username"><a href="/users/120914/smokedetector" title="SmokeDetector">SmokeDetector</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198166">
<a name="41198166" href="/transcript/11540?m=41198166#41198166"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
Conflicting feedback across revisions: <a href="//metasmoke.erwaysoftware.com/post/93853" rel="nofollow noopener noreferrer">current</a>, <a href="//metasmoke.erwaysoftware.com/post/93852" rel="nofollow noopener noreferrer">#1</a>
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198171">
<a name="41198171" href="/transcript/11540?m=41198171#41198171"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
tpu by QPaysTaxes on <a href="//askubuntu.com/a/977247">viewer for X.509 certificate</a> [<a href="http://metasmoke.erwaysoftware.com/post/93852" rel="nofollow noopener noreferrer">MS</a>]
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198235">
<a name="41198235" href="/transcript/11540?m=41198235#41198235"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
[ <a href="//goo.gl/eLDYqh" rel="nofollow noopener noreferrer">SmokeDetector</a> | <a href="//m.erwaysoftware.com/posts/by-url?url=//es.stackoverflow.com/a/118124" rel="nofollow noopener noreferrer">MS</a> ] Blacklisted user: <a href="//es.stackoverflow.com/a/118124">Como mandar un registro de una celda DataGridView a un textbox de otro formulario?</a> by <a href="//es.stackoverflow.com/users/66574">Escroto Y Pene Gratis</a> on <code>es.stackoverflow.com</code>
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198237">
<a name="41198237" href="/transcript/11540?m=41198237#41198237"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
[ <a href="//goo.gl/eLDYqh" rel="nofollow noopener noreferrer">SmokeDetector</a> | <a href="//m.erwaysoftware.com/posts/by-url?url=//es.stackoverflow.com/a/118127" rel="nofollow noopener noreferrer">MS</a> ] Blacklisted user: <a href="//es.stackoverflow.com/a/118127">Como mandar un registro de una celda DataGridView a un textbox de otro formulario?</a> by <a href="//es.stackoverflow.com/users/66574">Escroto Y Pene Gratis</a> on <code>es.stackoverflow.com</code>
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198240">
<a name="41198240" href="/transcript/11540?m=41198240#41198240"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
[ <a href="//goo.gl/eLDYqh" rel="nofollow noopener noreferrer">SmokeDetector</a> | <a href="//m.erwaysoftware.com/posts/by-url?url=//es.stackoverflow.com/questions/118125" rel="nofollow noopener noreferrer">MS</a> ] Blacklisted user: <a href="//es.stackoverflow.com/questions/118125">¿Cómo puedo hacer este código funcional para mi website-blog de tecnología?</a> by <a href="//es.stackoverflow.com/users/66574">Escroto Y Pene Gratis</a> on <code>es.stackoverflow.com</code>
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198242">
<a name="41198242" href="/transcript/11540?m=41198242#41198242"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
[ <a href="//goo.gl/eLDYqh" rel="nofollow noopener noreferrer">SmokeDetector</a> | <a href="//m.erwaysoftware.com/posts/by-url?url=//es.stackoverflow.com/questions/118122" rel="nofollow noopener noreferrer">MS</a> ] Blacklisted user: <a href="//es.stackoverflow.com/questions/118122">Por que el organo varonil se le denomina pene</a> by <a href="//es.stackoverflow.com/users/66574">Escroto Y Pene Gratis</a> on <code>es.stackoverflow.com</code>
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-155243">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/fY1pd.jpg?s=16&g=1" alt="Nisse Engström" />
</div>
<div class="username"><a href="/users/155243/nisse-engstrom" title="Nisse Engström">Nisse Engström</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198272">
<a name="41198272" href="/transcript/11540?m=41198272#41198272"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198242#41198242"> </a>
<div class="content">
@SmokeDetector tpu-
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198287">
<a name="41198287" href="/transcript/11540?m=41198287#41198287"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198240#41198240"> </a>
<div class="content">
@SmokeDetector tpu-
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198300">
<a name="41198300" href="/transcript/11540?m=41198300#41198300"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198237#41198237"> </a>
<div class="content">
@SmokeDetector tpu-
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-205533">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/idNpq.jpg?s=16&g=1" alt="Videonauth" />
</div>
<div class="username"><a href="/users/205533/videonauth" title="Videonauth">Videonauth</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198304">
<a name="41198304" href="/transcript/11540?m=41198304#41198304"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
question: can anyone in here do: <code>!!/repor t <link></code> when a case linke above happens?
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-155243">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/fY1pd.jpg?s=16&g=1" alt="Nisse Engström" />
</div>
<div class="username"><a href="/users/155243/nisse-engstrom" title="Nisse Engström">Nisse Engström</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198305">
<a name="41198305" href="/transcript/11540?m=41198305#41198305"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198235#41198235"> </a>
<div class="content">
@SmokeDetector tpu-
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198345">
<a name="41198345" href="/transcript/11540?m=41198345#41198345"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198304#41198304"> </a>
<div class="content">
@Videonauth You need to be a <a href="https://charcoal-se.org/smokey/Commands#privileged-commands" rel="nofollow noopener noreferrer">privileged user</a>.
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-120914">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/WyV1l.png?s=16&g=1" alt="SmokeDetector" />
</div>
<div class="username"><a href="/users/120914/smokedetector" title="SmokeDetector">SmokeDetector</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198349">
<a name="41198349" href="/transcript/11540?m=41198349#41198349"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
Conflicting feedback across revisions: <a href="//metasmoke.erwaysoftware.com/post/93848" rel="nofollow noopener noreferrer">current</a>, <a href="//metasmoke.erwaysoftware.com/post/93847" rel="nofollow noopener noreferrer">#1</a>
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-205533">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/idNpq.jpg?s=16&g=1" alt="Videonauth" />
</div>
<div class="username"><a href="/users/205533/videonauth" title="Videonauth">Videonauth</a></div>
</div></div>
<div class="messages">
<div class="timestamp">1:28 AM</div>
<div class="message" id="message-41198350">
<a name="41198350" href="/transcript/11540?m=41198350#41198350"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
ah ok :)
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-155243">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/fY1pd.jpg?s=16&g=1" alt="Nisse Engström" />
</div>
<div class="username"><a href="/users/155243/nisse-engstrom" title="Nisse Engström">Nisse Engström</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198423">
<a name="41198423" href="/transcript/11540?m=41198423#41198423"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198350#41198350"> </a>
<div class="content">
@Videonauth There's a <a href="https://charcoal-se.org/pings/mods" rel="nofollow noopener noreferrer">list of mods to ping</a> when things get out of hand.
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-205533">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/idNpq.jpg?s=16&g=1" alt="Videonauth" />
</div>
<div class="username"><a href="/users/205533/videonauth" title="Videonauth">Videonauth</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198457">
<a name="41198457" href="/transcript/11540?m=41198457#41198457"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
oh yes i know a few i would ping then otherwise i drop you guys here a line if i stumble on a missed one
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-155243">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/fY1pd.jpg?s=16&g=1" alt="Nisse Engström" />
</div>
<div class="username"><a href="/users/155243/nisse-engstrom" title="Nisse Engström">Nisse Engström</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198542">
<a name="41198542" href="/transcript/11540?m=41198542#41198542"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198457#41198457"> </a>
<div class="content">
@Videonauth Do you understand Spanish?
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-205533">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/idNpq.jpg?s=16&g=1" alt="Videonauth" />
</div>
<div class="username"><a href="/users/205533/videonauth" title="Videonauth">Videonauth</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198550">
<a name="41198550" href="/transcript/11540?m=41198550#41198550"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
nope only english and german (native)
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-155243">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/fY1pd.jpg?s=16&g=1" alt="Nisse Engström" />
</div>
<div class="username"><a href="/users/155243/nisse-engstrom" title="Nisse Engström">Nisse Engström</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198594">
<a name="41198594" href="/transcript/11540?m=41198594#41198594"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
I have no idea what <a href="https://es.stackoverflow.com/a/118124/">this</a> means. It should probably be reported, but I don't know. It sort of looks like Italian though.
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198607">
<a name="41198607" href="/transcript/11540?m=41198607#41198607"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
No wait, it's already caught above.
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-205533">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/idNpq.jpg?s=16&g=1" alt="Videonauth" />
</div>
<div class="username"><a href="/users/205533/videonauth" title="Videonauth">Videonauth</a></div>
</div></div>
<div class="messages">
<div class="timestamp">1:45 AM</div>
<div class="message" id="message-41198629">
<a name="41198629" href="/transcript/11540?m=41198629#41198629"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
<span class="deleted">(removed)</span>
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198635">
<a name="41198635" href="/transcript/11540?m=41198635#41198635"><span style="display:inline-block;" class="action-link edits"><span class="img"> </span></span></a>
<div class="content">
use google translate, cant let this stand here :) at least not without getting a time out for naughtyness
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-137388">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/Ma6sp.jpg?s=16&g=1" alt="QPaysTaxes" />
</div>
<div class="username"><a href="/users/137388/qpaystaxes" title="QPaysTaxes">QPaysTaxes</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198641">
<a name="41198641" href="/transcript/11540?m=41198641#41198641"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198594#41198594"> </a>
<div class="content">
@NisseEngström It's spam.
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-155243">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/fY1pd.jpg?s=16&g=1" alt="Nisse Engström" />
</div>
<div class="username"><a href="/users/155243/nisse-engstrom" title="Nisse Engström">Nisse Engström</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198709">
<a name="41198709" href="/transcript/11540?m=41198709#41198709"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198635#41198635"> </a>
<div class="content">
@Videonauth Google Translate didn't work on that one.
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198714">
<a name="41198714" href="/transcript/11540?m=41198714#41198714"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198641#41198641"> </a>
<div class="content">
@QPaysTaxes Thanks.
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-205533">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/idNpq.jpg?s=16&g=1" alt="Videonauth" />
</div>
<div class="username"><a href="/users/205533/videonauth" title="Videonauth">Videonauth</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198715">
<a name="41198715" href="/transcript/11540?m=41198715#41198715"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
it did
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198719">
<a name="41198719" href="/transcript/11540?m=41198719#41198719"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
dont know if you can see deleted messages
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-155243">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/fY1pd.jpg?s=16&g=1" alt="Nisse Engström" />
</div>
<div class="username"><a href="/users/155243/nisse-engstrom" title="Nisse Engström">Nisse Engström</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198731">
<a name="41198731" href="/transcript/11540?m=41198731#41198731"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198719#41198719"> </a>
<div class="content">
@Videonauth Nope.
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-205533">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/idNpq.jpg?s=16&g=1" alt="Videonauth" />
</div>
<div class="username"><a href="/users/205533/videonauth" title="Videonauth">Videonauth</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198769">
<a name="41198769" href="/transcript/11540?m=41198769#41198769"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
its spanish, autodetection did work to english: will post it in a few seconds againhere for short please dont flagbann me
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198776">
<a name="41198776" href="/transcript/11540?m=41198776#41198776"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
<span class="deleted">(removed)</span>
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198787">
<a name="41198787" href="/transcript/11540?m=41198787#41198787"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
seen?
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-155243">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/fY1pd.jpg?s=16&g=1" alt="Nisse Engström" />
</div>
<div class="username"><a href="/users/155243/nisse-engstrom" title="Nisse Engström">Nisse Engström</a></div>
</div></div>
<div class="messages">
<div class="timestamp">1:54 AM</div>
<div class="message" id="message-41198799">
<a name="41198799" href="/transcript/11540?m=41198799#41198799"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198787#41198787"> </a>
<div class="content">
@Videonauth Yes, but that's not the one I linked to.
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-205533">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/idNpq.jpg?s=16&g=1" alt="Videonauth" />
</div>
<div class="username"><a href="/users/205533/videonauth" title="Videonauth">Videonauth</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198835">
<a name="41198835" href="/transcript/11540?m=41198835#41198835"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
ah the one above mhmmm yes doesnt work
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-137388">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/Ma6sp.jpg?s=16&g=1" alt="QPaysTaxes" />
</div>
<div class="username"><a href="/users/137388/qpaystaxes" title="QPaysTaxes">QPaysTaxes</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198853">
<a name="41198853" href="/transcript/11540?m=41198853#41198853"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198769#41198769"> </a>
<div class="content">
@Videonauth No one here flag-bans people.
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198854">
<a name="41198854" href="/transcript/11540?m=41198854#41198854"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
You get flag banned if you flag too many things that get declined by mods.
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-205533">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/idNpq.jpg?s=16&g=1" alt="Videonauth" />
</div>
<div class="username"><a href="/users/205533/videonauth" title="Videonauth">Videonauth</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198858">
<a name="41198858" href="/transcript/11540?m=41198858#41198858"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
its gibberish tho talking about a garden party at an uncles place
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-137388">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/Ma6sp.jpg?s=16&g=1" alt="QPaysTaxes" />
</div>
<div class="username"><a href="/users/137388/qpaystaxes" title="QPaysTaxes">QPaysTaxes</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198860">
<a name="41198860" href="/transcript/11540?m=41198860#41198860"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
Gibberish is a perfectly good reason to red-flag.
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198864">
<a name="41198864" href="/transcript/11540?m=41198864#41198864"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
Whether you flag as spam or r/a doesn't really matter.
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-205533">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/idNpq.jpg?s=16&g=1" alt="Videonauth" />
</div>
<div class="username"><a href="/users/205533/videonauth" title="Videonauth">Videonauth</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198869">
<a name="41198869" href="/transcript/11540?m=41198869#41198869"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
got it translated via leo.org
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-155243">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/fY1pd.jpg?s=16&g=1" alt="Nisse Engström" />
</div>
<div class="username"><a href="/users/155243/nisse-engstrom" title="Nisse Engström">Nisse Engström</a></div>
</div></div>
<div class="messages">
<div class="timestamp">1:58 AM</div>
<div class="message" id="message-41198872">
<a name="41198872" href="/transcript/11540?m=41198872#41198872"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
!!/report <a href="https://es.stackoverflow.com/a/118116/" rel="nofollow noopener noreferrer">es.stackoverflow.com/a/118116</a> <a href="https://es.stackoverflow.com/a/118114/" rel="nofollow noopener noreferrer">es.stackoverflow.com/a/118114</a> <a href="https://es.stackoverflow.com/a/118120/" rel="nofollow noopener noreferrer">es.stackoverflow.com/a/118120</a> <a href="https://es.stackoverflow.com/a/118119/" rel="nofollow noopener noreferrer">es.stackoverflow.com/a/118119</a>
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-120914">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/WyV1l.png?s=16&g=1" alt="SmokeDetector" />
</div>
<div class="username"><a href="/users/120914/smokedetector" title="SmokeDetector">SmokeDetector</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198873">
<a name="41198873" href="/transcript/11540?m=41198873#41198873"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
[ <a href="//goo.gl/eLDYqh" rel="nofollow noopener noreferrer">SmokeDetector</a> | <a href="//m.erwaysoftware.com/posts/by-url?url=//es.stackoverflow.com/a/118116" rel="nofollow noopener noreferrer">MS</a> ] Manually reported answer (batch report: post 1 out of 4): <a href="//es.stackoverflow.com/a/118116">No logro entender porque me da este error ArrayIndexOutOfBoundsException: 6</a> by <a href="//es.stackoverflow.com/users/66574">Escroto Y Pene Gratis</a> on <code>es.stackoverflow.com</code>
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198876">
<a name="41198876" href="/transcript/11540?m=41198876#41198876"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
[ <a href="//goo.gl/eLDYqh" rel="nofollow noopener noreferrer">SmokeDetector</a> | <a href="//m.erwaysoftware.com/posts/by-url?url=//es.stackoverflow.com/a/118114" rel="nofollow noopener noreferrer">MS</a> ] Manually reported answer (batch report: post 2 out of 4): <a href="//es.stackoverflow.com/a/118114">Asignar valores a un combobox html con JSOUP</a> by <a href="//es.stackoverflow.com/users/66574">Escroto Y Pene Gratis</a> on <code>es.stackoverflow.com</code>
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-137388">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/Ma6sp.jpg?s=16&g=1" alt="QPaysTaxes" />
</div>
<div class="username"><a href="/users/137388/qpaystaxes" title="QPaysTaxes">QPaysTaxes</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198880">
<a name="41198880" href="/transcript/11540?m=41198880#41198880"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198873#41198873"> </a>
<div class="content">
@SmokeDetector k
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-120914">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/WyV1l.png?s=16&g=1" alt="SmokeDetector" />
</div>
<div class="username"><a href="/users/120914/smokedetector" title="SmokeDetector">SmokeDetector</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198879">
<a name="41198879" href="/transcript/11540?m=41198879#41198879"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
[ <a href="//goo.gl/eLDYqh" rel="nofollow noopener noreferrer">SmokeDetector</a> | <a href="//m.erwaysoftware.com/posts/by-url?url=//es.stackoverflow.com/a/118120" rel="nofollow noopener noreferrer">MS</a> ] Manually reported answer (batch report: post 3 out of 4): <a href="//es.stackoverflow.com/a/118120">Crystal Reports Arroja "E_NOINTERFACE" cuando reporte.SetDataSource(ds);</a> by <a href="//es.stackoverflow.com/users/66574">Escroto Y Pene Gratis</a> on <code>es.stackoverflow.com</code>
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198882">
<a name="41198882" href="/transcript/11540?m=41198882#41198882"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<div class="content">
[ <a href="//goo.gl/eLDYqh" rel="nofollow noopener noreferrer">SmokeDetector</a> | <a href="//m.erwaysoftware.com/posts/by-url?url=//es.stackoverflow.com/a/118119" rel="nofollow noopener noreferrer">MS</a> ] Manually reported answer (batch report: post 4 out of 4): <a href="//es.stackoverflow.com/a/118119">Descargar archivos desde la terminal de Mac</a> by <a href="//es.stackoverflow.com/users/66574">Escroto Y Pene Gratis</a> on <code>es.stackoverflow.com</code>
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-137388">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/Ma6sp.jpg?s=16&g=1" alt="QPaysTaxes" />
</div>
<div class="username"><a href="/users/137388/qpaystaxes" title="QPaysTaxes">QPaysTaxes</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198884">
<a name="41198884" href="/transcript/11540?m=41198884#41198884"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198876#41198876"> </a>
<div class="content">
@SmokeDetector k
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-205533">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/idNpq.jpg?s=16&g=1" alt="Videonauth" />
</div>
<div class="username"><a href="/users/205533/videonauth" title="Videonauth">Videonauth</a></div>
</div></div>
<div class="messages">
<div class="message" id="message-41198888">
<a name="41198888" href="/transcript/11540?m=41198888#41198888"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198853#41198853"> </a>
<div class="content">
@QPaysTaxes well i posted the translation of that other spanish post which was not really PG friendly <i>coughs coughs</i>
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
<div class="monologue user-137388">
<div class="signature"><div class="tiny-signature">
<div class="avatar avatar-16">
<img width="16" height="16" src="https://i.stack.imgur.com/Ma6sp.jpg?s=16&g=1" alt="QPaysTaxes" />
</div>
<div class="username"><a href="/users/137388/qpaystaxes" title="QPaysTaxes">QPaysTaxes</a></div>
</div></div>
<div class="messages">
<div class="timestamp">1:59 AM</div>
<div class="message" id="message-41198889">
<a name="41198889" href="/transcript/11540?m=41198889#41198889"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198879#41198879"> </a>
<div class="content">
@SmokeDetector k
</div>
<span class="flash">
</span>
</div>
<div class="message" id="message-41198893">
<a name="41198893" href="/transcript/11540?m=41198893#41198893"><span style="display:inline-block;" class="action-link"><span class="img"> </span></span></a>
<a class="reply-info" href="/transcript/11540?m=41198882#41198882"> </a>
<div class="content">
@SmokeDetector k
</div>
<span class="flash">
</span>
</div>
</div>
<div class="clear-both" style="height:0"> </div>
</div>
</div>
<div class="clear-both"></div><div class="pager"><a href="/transcript/11540/2017/11/17/0-1"><span class="page-numbers">00:00 - 01:00</span></a><span class="page-numbers current">01:00 - 02:00</span><a href="/transcript/11540/2017/11/17/2-13"><span class="page-numbers">02:00 - 13:00</span></a><a href="/transcript/11540/2017/11/17/13-24"><span class="page-numbers">13:00 - 00:00</span></a></div><div class="clear-both"></div>
<br/>
<a href="/transcript/11540/2013/11/16" class="button noprint" title="2013-11-16">« first day (1461 days earlier)</a>
<a href="/transcript/11540/2017/11/16" class="button noprint" rel="prev" title="2017-11-16">← previous day</a>
<link rel="prev" title="2017-11-16" href="/transcript/11540/2017/11/16" />
<a href="/transcript/11540/2017/11/18" class="button noprint" rel="next" title="2017-11-18">next day →</a>
<link rel="next" title="2017-11-18" href="/transcript/11540/2017/11/18" />
<a href="/transcript/11540/2017/11/22" class="button noprint" title="2017-11-22"> last day (5 days later) »</a>
<div class="clear-both"></div>
<div id="sidebar">
<div id="sidebar-content">
<div id="info">
<form method="get" action="/search">
<input type="text" id="searchbox" name="q"/>
<input type="hidden" name="room" value="11540" />
</form>
<div style="padding-top:3px;"><a href="/" class="button">all rooms</a></div>
<br clear=left />
<h2>Transcript for</h2>
<a class="calendar-small-link" href="/transcript/11540/2017/11/16">
<div class="icon" title="2017-11-16"><div class="calendar-small"><span class="weekday-small">Nov</span>16</div></div>
</a>
<div class="icon" title="2017-11-17"><div class="calendar"><span class="weekday">Nov</span>17</div></div>
<a class="calendar-small-link" href="/transcript/11540/2017/11/18">
<div class="icon" title="2017-11-18"><div class="calendar-small"><span class="weekday-small">Nov</span>18</div></div>
</a>
<br clear=left />
<div class="room-mini"><div class="room-mini-header"><h3><span class="room-name"><a rel="noreferrer noopener" href="/rooms/11540/charcoal-hq">Charcoal HQ</a></span></h3><div title="Where diamonds are made, smoke is detected, and we break things by developing on production. 76,000 true positives and counting. [Recursive] oneboxes are awesome. Handy links: http://charcoal-se.org, https://github.com/Charcoal-SE, http://charcoal-se.org/blaze/" class="room-mini-description">Where diamonds are made, smoke is detected, and we break thing...<a href="http://charcoal-se.org" rel="nofollow noopener noreferrer"></a><a href="https://github.com/Charcoal-SE" rel="nofollow noopener noreferrer"></a><a href="http://charcoal-se.org/blaze/" rel="nofollow noopener noreferrer"></a></div></div><div class="room-current-user-count" title="current users"><a rel="noopener noreferrer" href="/rooms/info/11540/charcoal-hq">33</a></div><div class="room-message-count" title="messages in the last 2h"><a rel="noopener noreferrer" href="/transcript/11540">75</a></div><div class="mspark" style="height:25px;width:205px">
<div class="mspbar" style="width:8px;height:6px;left:0px;"></div><div class="mspbar" style="width:8px;height:7px;left:8px;"></div><div class="mspbar" style="width:8px;height:9px;left:16px;"></div><div class="mspbar" style="width:8px;height:9px;left:24px;"></div><div class="mspbar" style="width:8px;height:16px;left:32px;"></div><div class="mspbar" style="width:8px;height:20px;left:40px;"></div><div class="mspbar" style="width:8px;height:21px;left:48px;"></div><div class="mspbar" style="width:8px;height:25px;left:56px;"></div><div class="mspbar" style="width:8px;height:21px;left:64px;"></div><div class="mspbar" style="width:8px;height:25px;left:72px;"></div><div class="mspbar" style="width:8px;height:20px;left:80px;"></div><div class="mspbar" style="width:8px;height:20px;left:88px;"></div><div class="mspbar" style="width:8px;height:17px;left:96px;"></div><div class="mspbar" style="width:8px;height:13px;left:104px;"></div><div class="mspbar" style="width:8px;height:10px;left:112px;"></div><div class="mspbar" style="width:8px;height:9px;left:120px;"></div><div class="mspbar" style="width:8px;height:13px;left:128px;"></div><div class="mspbar" style="width:8px;height:12px;left:136px;"></div><div class="mspbar" style="width:8px;height:17px;left:144px;"></div><div class="mspbar" style="width:8px;height:15px;left:152px;"></div><div class="mspbar" style="width:8px;height:12px;left:160px;"></div><div class="mspbar" style="width:8px;height:9px;left:168px;"></div><div class="mspbar" style="width:8px;height:7px;left:176px;"></div><div class="mspbar" style="width:8px;height:4px;left:184px;"></div><div class="mspbar now" style="height:25px;left:166px;"></div></div>
<div class="clear-both"></div></div>
<div><a rel="noopener noreferrer" class="tag" href="http://stackexchange.com/tags/best-bad-practices/info">best-bad-practices</a> <a rel="noopener noreferrer" class="tag" href="http://stackexchange.com/tags/dev-on-prod/info">dev-on-prod</a> <a rel="noopener noreferrer" class="tag" href="http://stackexchange.com/tags/panic-driven-development/info">panic-driven-development</a> <a rel="noopener noreferrer" class="tag" href="http://stackexchange.com/tags/plastic-knives/info">plastic-knives</a></div>
<br class="clear-both" />
<div class="noprint">
<div id="transcript-links">
<a id="join-room" href="/rooms/11540/charcoal-hq" class="button">join this room</a><br />
<a href="/rooms/info/11540/charcoal-hq" class="button">about this room</a><br />
<a class="button" href="#" id="bookmark-button">bookmark a conversation</a><br />
</div>
<br />
<div class="mspark" style="height:300px;width:200px">
<div class="mspbar" style="height:12px;width:57px;top:0px;"></div><div class="msplab" style="top:0px;">00:00</div><div class="mspbar" style="height:12px;width:182px;top:12px;"></div><div class="mspbar" style="height:12px;width:54px;top:24px;"></div><div class="mspbar" style="height:12px;width:12px;top:36px;"></div><div class="mspbar" style="height:12px;width:131px;top:48px;"></div><div class="mspbar" style="height:12px;width:110px;top:60px;"></div><div class="mspbar" style="height:12px;width:161px;top:72px;"></div><div class="msplab" style="top:72px;">06:00</div><div class="mspbar" style="height:12px;width:99px;top:84px;"></div><div class="mspbar" style="height:12px;width:113px;top:96px;"></div><div class="mspbar" style="height:12px;width:200px;top:108px;"></div><div class="mspbar" style="height:12px;width:99px;top:120px;"></div><div class="mspbar" style="height:12px;width:110px;top:132px;"></div><div class="mspbar" style="height:12px;width:90px;top:144px;"></div><div class="msplab" style="top:144px;">12:00</div><div class="mspbar" style="height:12px;width:191px;top:156px;"></div><div class="mspbar" style="height:12px;width:18px;top:168px;"></div><div class="mspbar" style="height:12px;width:15px;top:180px;"></div><div class="mspbar" style="height:12px;width:6px;top:192px;"></div><div class="mspbar" style="height:12px;width:75px;top:204px;"></div><div class="mspbar" style="height:12px;width:90px;top:216px;"></div><div class="msplab" style="top:216px;">18:00</div><div class="mspbar" style="height:12px;width:45px;top:228px;"></div><div class="mspbar" style="height:12px;width:57px;top:240px;"></div><div class="mspbar" style="height:12px;width:36px;top:252px;"></div><div class="mspbar" style="height:12px;width:54px;top:264px;"></div><div class="mspbar" style="height:12px;width:27px;top:276px;"></div><a href="/transcript/11540/2017/11/17/0-1"><div class="msparea" style="top:0px;width:200px;height:12px" title="19 messages"></div></a><a href="/transcript/11540/2017/11/17/1-2"><div class="msparea now" style="top:12px;width:200px;height:12px" title="61 messages"></div></a><a href="/transcript/11540/2017/11/17/2-13"><div class="msparea" style="top:24px;width:200px;height:132px" title="395 messages"></div></a><a href="/transcript/11540/2017/11/17/13-24"><div class="msparea" style="top:156px;width:200px;height:132px" title="205 messages"></div></a></div>
<div class="msg-small">
all times are UTC
</div>
<br />
</div>
<br /><br /><div id="transcript-logo"><a rel="noreferrer noopener" href="http://stackexchange.com" title="The Stack Exchange Network"><img style="max-width:150px" src="//cdn-chat.sstatic.net/chat/img/se-logo-white.png?v=da" alt="The Stack Exchange Network"/></a>
</div>
<br class="clear-both" /><br />
<div id="copyright">
site design / logo © 2017 Stack Exchange Inc; <a rel="noopener noreferrer" href="http://stackexchange.com/legal">legal</a>
<br /><br />
<a href="#" class="mobile-on">mobile</a>
</div>
</div>
</div>
</div>
</div>
</div> <input id="fkey" name="fkey" type="hidden" value="64f0ae1fdde80a7b92d9281473795fde" />
</body>
</html>'''
| 70,946 | 25,445 |
"""
A set of functions required to pre-process TRNSYS simulation input files
"""
# Common imports
import os
import sys
import shutil
import re
# Custom imports
from pybps import util
# Handle Python 2/3 compatibility
from six.moves import configparser
import six
if six.PY2:
ConfigParser = configparser.SafeConfigParser
else:
ConfigParser = configparser.ConfigParser
def parse_deck_const(deck_abspath):
"""Parse constants in control cards and equations from TRNSYS deck file
Finds all constants in a TRNSYS deck file and stores constant name and
value in a dict.
Args:
deck_abspath: absolute path to TRNSYS deck file
Returns:
A dict containing all found constants and their values
"""
const_dict = {}
f = open(deck_abspath, 'r')
split_blocks_pat = re.compile(r'[*][-]+')
equa_pat = re.compile(r'[*]\sEQUATIONS\s"(.+?)"')
const_pat = re.compile(r'\b(\w+)\b\s=\s(\d+\.*\d*)\s')
with f:
data = f.read()
blocks = split_blocks_pat.split(data)
for block in blocks:
if block[0] == 'V':
match_par = const_pat.findall(block)
if match_par:
group = "Control Cards"
const_dict[group] = {}
for (m,v) in match_par:
const_dict[group][m] = v
else:
match_eq = equa_pat.findall(block)
if match_eq:
group = match_eq[0]
match_par = const_pat.findall(block)
if match_par:
const_dict[group] = {}
for (m,v) in match_par:
const_dict[group][m] = v
return const_dict
def prepare_deck_template(deck_abspath, param_list):
"""Prepare a template TRNSYS deck file for parametric analysis
Transforms a TRNSYS deck in a template file valid for parametric analysis
by replacing constant values with parameter search strings (the name of
the constant surrounded by '%' signs). That parameters are given in a list.
Args:
deck_abspath: absolute path to TRNSYS deck file
param_list: list of parameters to be included in template
Returns:
A valid template file for parametric analysis with PyBPS
"""
templ_deck_abspath = os.path.splitext(deck_abspath)[0] + "_Template.dck"
shutil.copyfile(deck_abspath, templ_deck_abspath)
f = open(templ_deck_abspath, 'r+')
with f:
data = f.read()
for par in param_list:
data = re.sub(r'(' + par + r')\s=\s(\d+\.*\d*)', r'\g<1> = %\g<1>%', data)
f.seek(0)
f.write(data)
f.truncate()
def gen_type56(model_abspath, select='all'):
"""Generate Type56 matrices and idf files
Calls TRNBUILD.exe with flags to generate matrices and IDF files.
Args:
model_abspath: absolute path to Type56 model file
select: selects which files should by generated by TRNBUILD.
'masks' generates insolation matrix, 'vfm' generates de view factor
matrix, 'matrices' generates both
'idf' generates the IDF file (similar to TRNBUILD 'export' funtion)
'all' generates everything
Returns:
Generated files.
"""
# Get information from config file
conf = SafeConfigParser()
conf_file = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'..\config.ini')
conf.read(conf_file)
trnbuild_path = os.path.abspath(conf.get('TRNSYS', 'TRNBuild_Path'))
trnsidf_path = os.path.abspath(conf.get('TRNSYS', 'trnsIDF_Path'))
# Get b17 file path from deck file
pattern = re.compile(r'ASSIGN "(.*b17)"')
with open(model_abspath, 'rU') as m_f:
temp = m_f.read()
match = pattern.search(temp)
# TRNBUILD is only called if Type56 is found in deck file.
if match:
b17_relpath = match.group(1)
b17_abspath = os.path.join(os.path.dirname(model_abspath), b17_relpath)
# Generate shading/insolation matrix
if select == 'all' or select == 'matrices' or select == 'masks':
cmd = [trnbuild_path, b17_abspath, '/N', '/masks']
util.run_cmd(cmd)
# Generate view factor matrix
if select == 'all' or select == 'matrices' or select == 'vfm':
cmd = [trnbuild_path, b17_abspath, '/N', '/vfm']
util.run_cmd(cmd)
# Generate trnsys3D idf file, to view geometry in Sketchup
if select == 'all' or select == 'idf':
cmd = [trnsidf_path, b17_abspath]
util.run_cmd(cmd)
| 4,709 | 1,501 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Game of Life console.
small play ground for leanning python, or just having fun.
To use the console with an empty grid:
python -i -mgof.console
or
bpython -i -mgof.console
To use the console with a pseudo animation:
python -i -mgof.demo
Avalailble variable :
* grid your playground a matrix of cellular automata
* patterns : pixel, still, oscillator, glider
they are singular patterns to play with in game of life.
* all_pattern : a list of all patterns (except pixel)
* matrix : the class name of grid is imported for educationnal purpose
* DEAD, ALIVE
Available functions:
* intro() : a short summary of all available functions
* bleach(...) a function to init the grid
* at(...) a function to draw a pattern on the grid
* rand_pattern() : a function to add random pattern in your grid
* evolve(...) make the game evolve for some time. If your terminal and/or
interactive python supports it, it will make a continuous animation
* use help(function_name) to know more yes it is a builtin ^_^
"""
from .matrix import matrix
from time import sleep
from .gof import glider, oscillator, still, pixel, all_pattern
from .gof import evolve, bleach,dirty, DEAD, ALIVE, at
#### Constants and globals
__all__ = [
"matrix","at",
"grid", "intro",
"glider", "oscillator", "still","pixel","all_pattern",
"evolve", "bleach", "dirty", "DEAD", "ALIVE" ]
x=10
y=30
def intro():
print(__doc__)
print("""
you are left with an empty grid of %dx%d to play with, have fun""" % (x,y))
grid=matrix(x,y,x*y*[DEAD])
if '__main__' == __name__:
print(__doc__)
| 1,650 | 536 |