id stringlengths 1 8 | text stringlengths 6 1.05M | dataset_id stringclasses 1
value |
|---|---|---|
3559574 | import voluptuous as vol
from esphomeyaml.automation import ACTION_REGISTRY
import esphomeyaml.config_validation as cv
from esphomeyaml.const import CONF_DATA, CONF_DATA_TEMPLATE, CONF_ID, CONF_PASSWORD, CONF_PORT, \
CONF_SERVICE, CONF_VARIABLES, CONF_REBOOT_TIMEOUT
from esphomeyaml.core import CORE
from esphomeyaml.cpp_generator import Pvariable, add, get_variable, process_lambda
from esphomeyaml.cpp_helpers import setup_component
from esphomeyaml.cpp_types import Action, App, Component, StoringController, esphomelib_ns
api_ns = esphomelib_ns.namespace('api')
APIServer = api_ns.class_('APIServer', Component, StoringController)
HomeAssistantServiceCallAction = api_ns.class_('HomeAssistantServiceCallAction', Action)
KeyValuePair = api_ns.class_('KeyValuePair')
TemplatableKeyValuePair = api_ns.class_('TemplatableKeyValuePair')
CONFIG_SCHEMA = vol.Schema({
cv.GenerateID(): cv.declare_variable_id(APIServer),
vol.Optional(CONF_PORT, default=6053): cv.port,
vol.Optional(CONF_PASSWORD, default=''): cv.string_strict,
vol.Optional(CONF_REBOOT_TIMEOUT): cv.positive_time_period_milliseconds,
}).extend(cv.COMPONENT_SCHEMA.schema)
def to_code(config):
rhs = App.init_api_server()
api = Pvariable(config[CONF_ID], rhs)
if config[CONF_PORT] != 6053:
add(api.set_port(config[CONF_PORT]))
if config.get(CONF_PASSWORD):
add(api.set_password(config[CONF_PASSWORD]))
if CONF_REBOOT_TIMEOUT in config:
add(api.set_reboot_timeout(config[CONF_REBOOT_TIMEOUT]))
setup_component(api, config)
BUILD_FLAGS = '-DUSE_API'
def lib_deps(config):
if CORE.is_esp32:
return 'AsyncTCP@1.0.3'
if CORE.is_esp8266:
return 'ESPAsyncTCP@1.1.3'
raise NotImplementedError
CONF_HOMEASSISTANT_SERVICE = 'homeassistant.service'
HOMEASSISTANT_SERVIC_ACTION_SCHEMA = vol.Schema({
cv.GenerateID(): cv.use_variable_id(APIServer),
vol.Required(CONF_SERVICE): cv.string,
vol.Optional(CONF_DATA): vol.Schema({
cv.string: cv.string,
}),
vol.Optional(CONF_DATA_TEMPLATE): vol.Schema({
cv.string: cv.string,
}),
vol.Optional(CONF_VARIABLES): vol.Schema({
cv.string: cv.lambda_,
}),
})
@ACTION_REGISTRY.register(CONF_HOMEASSISTANT_SERVICE, HOMEASSISTANT_SERVIC_ACTION_SCHEMA)
def homeassistant_service_to_code(config, action_id, arg_type, template_arg):
for var in get_variable(config[CONF_ID]):
yield None
rhs = var.make_home_assistant_service_call_action(template_arg)
type = HomeAssistantServiceCallAction.template(arg_type)
act = Pvariable(action_id, rhs, type=type)
add(act.set_service(config[CONF_SERVICE]))
if CONF_DATA in config:
datas = [KeyValuePair(k, v) for k, v in config[CONF_DATA].items()]
add(act.set_data(datas))
if CONF_DATA_TEMPLATE in config:
datas = [KeyValuePair(k, v) for k, v in config[CONF_DATA_TEMPLATE].items()]
add(act.set_data_template(datas))
if CONF_VARIABLES in config:
datas = []
for key, value in config[CONF_VARIABLES].items():
for value_ in process_lambda(value, []):
yield None
datas.append(TemplatableKeyValuePair(key, value_))
add(act.set_variables(datas))
yield act
| StarcoderdataPython |
3331643 | """Common settings and globals."""
from os.path import abspath, basename, dirname, join, normpath
from os import environ
from sys import path
# ######### PATH CONFIGURATION
# Absolute filesystem path to the Django project directory:
DJANGO_ROOT = dirname(dirname(abspath(__file__)))
# Absolute filesystem path to the top-level project folder:
SITE_ROOT = dirname(DJANGO_ROOT)
PROJECT_ROOT = abspath(join(dirname(__file__), '../'))
# remove apps prefix
APPS_ROOT = join(PROJECT_ROOT, "apps")
if APPS_ROOT not in path:
path.insert(0, APPS_ROOT)
# Site name:
SITE_NAME = basename(DJANGO_ROOT)
GRAPPELLI_ADMIN_TITLE = "app"
# Add our project to our pythonpath, this way we don't need to type our project
# name in our dotted import paths:
path.append(DJANGO_ROOT)
# ######### END PATH CONFIGURATION
# ######### DEBUG CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#debug
DEBUG = False
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug
TEMPLATE_DEBUG = DEBUG
# ######### END DEBUG CONFIGURATION
# ######### MANAGER CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#admins
ADMINS = (
('app administrator',
'<EMAIL>'),
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#managers
MANAGERS = ADMINS
# ######### END MANAGER CONFIGURATION
# ######### DATABASE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.',
'NAME': '',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
DATABASE_POOL_ARGS = {
'max_overflow': 7,
'pool_size': 7,
'recycle': 300,
}
# ######### END DATABASE CONFIGURATION
# ######### CELERY CONFIGURATION
#: Only add pickle to this list if your broker is secured
#: from unwanted access (see userguide/security.html)
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
# ######### END CELERY CONFIGURATION
# ######### REDIS CONFIGURATION
REDIS_HOST = 'redis://localhost:6379'
# ######### END REDIS CONFIGURATION
# ######### CACHE CONFIGURATION
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
CACHE_TIMEOUT = 60 * 60
# ######### END CACHE CONFIGURATION
# ######### GENERAL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#time-zone
TIME_ZONE = None
# See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code
LANGUAGE_CODE = 'en-us'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id
SITE_ID = 1
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n
USE_I18N = False
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n
USE_L10N = False
# See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz
USE_TZ = True
# Testing
TEST_RUNNER = 'testing.TestRunner'
# ######### END GENERAL CONFIGURATION
# ######### MEDIA CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = normpath(join(SITE_ROOT, 'media'))
# See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = '/media/'
# ######### END MEDIA CONFIGURATION
# ######### STATIC FILE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = join(PROJECT_ROOT, 'collected-static')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = '/static/'
SERVE_STATIC = False
# See: https://docs.djangoproject.com/en/dev/ref/\
# contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = (
normpath(join(PROJECT_ROOT, 'static')),
)
# See: https://docs.djangoproject.com/en/dev/ref/\
# contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# ######### END STATIC FILE CONFIGURATION
# ######### SECRET CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Note: This key only used for development and testing.
SECRET_KEY = r"h3zx^t61%h40@&+=wa&=ecf#3s)vo2v#suv9cem^0en1)2e%s2"
# ######### END SECRET CONFIGURATION
# ######### SITE CONFIGURATION
# Hosts/domain names that are valid for this site
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ('localhost', '127.0.0.1', )
# ######### END SITE CONFIGURATION
# ######### FIXTURE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/\
# settings/#std:setting-FIXTURE_DIRS
FIXTURE_DIRS = (
normpath(join(PROJECT_ROOT, 'fixtures')),
)
# ######### END FIXTURE CONFIGURATION
# ######### TEMPLATE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/\
# settings/#template-context-processors
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.tz',
'django.contrib.messages.context_processors.messages',
'django.core.context_processors.request',
'utils.context_processors.global_variables',
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
TEMPLATE_DIRS = (
normpath(join(PROJECT_ROOT, 'templates')),
)
# ######### END TEMPLATE CONFIGURATION
# ######### MIDDLEWARE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#middleware-classes
MIDDLEWARE_CLASSES = (
# Default Django middleware.
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.middleware.gzip.GZipMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
# ######### END MIDDLEWARE CONFIGURATION
# ######### URL CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#root-urlconf
ROOT_URLCONF = '%s.urls' % SITE_NAME
# ######### END URL CONFIGURATION
# ######### APP CONFIGURATION
DJANGO_APPS = (
'utils',
'grappelli',
# Default Django apps:
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Useful template tags:
# 'django.contrib.humanize',
# Admin panel and documentation:
'django.contrib.admin',
# 'django.contrib.admindocs',
)
THIRD_PARTY_APPS = (
'debug_toolbar',
'django_nose',
'chatterbox',
'require',
# Database migration helpers:
)
# Apps specific for this project go here.
LOCAL_APPS = (
)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
# ######### END APP CONFIGURATION
# ######### LOGGING CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#logging
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOG_FILENAME = '/var/log/django.log'
SEND_BROKEN_LINK_EMAILS = False
ENABLE_EMAIL_LOGGING = environ.get('ENABLE_EMAIL_LOGGING', 'NO') == 'YES'
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
},
'ratelimit': {
'()': 'utils.error_ratelimit_filter.RateLimitFilter',
}
},
'formatters': {
'verbose': {
'format': '%(name)s %(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
},
'simple': {
'format': '[%(levelname)s] %(asctime)s %(name)s.%(funcName)s %(message)s'
},
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false', 'ratelimit'],
'class': 'django.utils.log.AdminEmailHandler'
},
'stream': {
'level': 'ERROR',
'class': 'logging.StreamHandler',
'formatter': 'simple',
},
'file': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': LOG_FILENAME and LOG_FILENAME or '/dev/null',
'formatter': 'verbose'
}
},
'loggers': {
'': {
'handlers': ENABLE_EMAIL_LOGGING and ['stream', 'mail_admins'] or ['mail_admins'],
'level': 'DEBUG',
'propagate': False,
},
'requests': {
'handlers': ENABLE_EMAIL_LOGGING and ['stream', 'mail_admins'] or ['mail_admins'],
'level': 'DEBUG',
'propagate': False,
},
'oauthlib': {
'handlers': ENABLE_EMAIL_LOGGING and ['stream', 'mail_admins'] or ['mail_admins'],
'level': 'DEBUG',
'propagate': False,
},
'chatterbox': {
'handlers': ENABLE_EMAIL_LOGGING and ['stream', 'mail_admins'] or ['stream'],
'level': 'DEBUG',
'propagate': False,
},
'requests_oauthlib': {
'handlers': ENABLE_EMAIL_LOGGING and ['stream', 'mail_admins'] or ['mail_admins'],
'level': 'DEBUG',
'propagate': False,
},
'django.db': {
'handlers': ENABLE_EMAIL_LOGGING and ['stream', 'mail_admins'] or ['mail_admins'],
'level': 'WARNING',
'propagate': False,
},
'z.pool': {
'handlers': ENABLE_EMAIL_LOGGING and ['stream', 'mail_admins'] or ['mail_admins'],
'level': 'WARNING',
'propagate': False,
},
}
}
# ######### END LOGGING CONFIGURATION
# ######### WSGI CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application
WSGI_APPLICATION = 'wsgi.application'
# ######### END WSGI CONFIGURATION
# ######### SECURITY CONFIGURATION
USE_HTTPS_FOR_ASSETS = False
# ######### END SECURITY CONFIGURATION
# ######### REQUIRE CONFIGURATION
# The baseUrl to pass to the r.js optimizer.
REQUIRE_BASE_URL = "js"
# The name of a build profile to use for your project, relative to REQUIRE_BASE_URL.
# A sensible value would be 'app.build.js'. Leave blank to use the built-in default build profile.
REQUIRE_BUILD_PROFILE = "app.build.js"
# The name of the require.js script used by your project, relative to REQUIRE_BASE_URL.
REQUIRE_JS = "vendor/require.js"
# A dictionary of standalone modules to build with almond.js.
# See the section on Standalone Modules, below.
REQUIRE_STANDALONE_MODULES = {}
# Whether to run django-require in debug mode.
REQUIRE_DEBUG = False
# A tuple of files to exclude from the compilation result of r.js.
REQUIRE_EXCLUDE = ("build.txt",)
# The execution environment in which to run r.js: node or rhino.
REQUIRE_ENVIRONMENT = "node"
# ######### END REQUIRE CONFIGURATION
AWS_IS_GZIPPED = True
| StarcoderdataPython |
4874561 | __author__ = 'RAEON'
from time import time
class Cell(object):
def __init__(self, id, x, y, size, color, virus, agitated, name):
self.id = id
self.x = x
self.y = y
self.interpolated_x = x
self.interpolated_y = y
self.last_update = time()
self.vx = 0.0
self.vy = 0.0
self.ax = 0.0
self.ay = 0.0
self.size = size
self.color = color
self.virus = virus
self.agitated = agitated
self.name = name
self.watchers = []
self.owner = None
self.timestamp = None
def update_timestamp(self, timestamp):
if self.timestamp < timestamp:
self.timestamp = timestamp
return True
return False
def update_interpolation(self, current_time):
t = current_time - self.last_update
self.interpolated_x = self.x + self.vx*t + 0.5*self.ax*t*t
self.interpolated_y = self.y + self.vy*t + 0.5*self.ay*t*t
def add_watcher(self, watcher):
if not watcher in self.watchers:
self.watchers.append(watcher)
return True
return False
def remove_watcher(self, watcher):
if watcher in self.watchers:
self.watchers.remove(watcher)
return True
return False
def has_watcher(self, watcher):
return watcher in self.watchers
def has_watchers(self):
return len(self.watchers) > 0
| StarcoderdataPython |
1781463 | # -*- coding: utf-8 -*-
# Copyright (c) 2020-2021 <NAME>.
# All rights reserved.
# Licensed under BSD-3-Clause-Clear. See LICENSE file for details.
from django import forms
class OTPControleForm(forms.Form):
""" Dit formulier wordt gebruikt om de OTP code te ontvangen van de gebruiker """
otp_code = forms.CharField(
label='Code',
min_length=6,
max_length=6,
required=True,
widget=forms.TextInput(attrs={'autofocus': True, 'autocomplete': 'off'}))
def is_valid(self):
valid = super(forms.Form, self).is_valid()
if valid:
otp_code = self.cleaned_data.get('otp_code')
try:
code = int(otp_code)
except ValueError:
self.add_error(None, 'Voer de vereiste code in')
valid = False
else:
self.add_error(None, 'De gegevens worden niet geaccepteerd')
return valid
# end of file
| StarcoderdataPython |
3374694 | import matplotlib.pyplot as plt
import numpy as np
#x_length = [70, 90, 93.1, 115, 120, 125, 130, 135, 144] #black bars
#y_frequency = [1501, 936, 869, 587, 538, 488, 460, 429, 379] #measured frequency from black bars
#Fill in aquired lengths and frequency for your 13 bars.
x_length = [74.5, 79.2, 84.1, 86.8, 92.1, 97.8, 103.9, 107.1, 113.7, 120.9, 125, 133, 140] #white bars
y_frequency = [1371, 1219, 1072, 1020, 920, 820, 724, 671, 601, 545, 510, 451, 398] #measured frequency from white bars
xn = xn = np.linspace(70, 145, 750)
popt = np.polyfit(x_length, y_frequency, 4)
print(popt)
#yn_black = np.polyval(popt, xn)
#yn_white =0.9867*yn_black #factor for filament conversion, will differ between different filaments.
yn = np.polyval(popt, xn)
plt.plot(x_length, y_frequency, 'or')
plt.plot(xn, yn)
plt.show()
| StarcoderdataPython |
1979029 | #!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
import gentools
try:
import wasp.settings as settings
import wasp.xmlobject as xmlobject
except ImportError:
import settings
import xmlobject
import optparse
import re
import os.path
import sys
class _Writer(object):
def __init__(self, generated_from):
self.generated_from = generated_from
def preamble(self, outfile):
pass
def body(self, outfile):
pass
def postamble(self, outfile):
pass
class SettingsWriter(settings.SettingsFile, _Writer):
H = "SETTINGS_GENERATED_H"
def __init__(self, *args, **kwargs):
settings.SettingsFile.__init__(self, *args, **kwargs)
_Writer.__init__(self, kwargs["generated_from"])
def _print_typedefs(self, outfile):
print >> outfile, "typedef bool_t (*SettingSetterCallback_t)(uint8_t chan, void *data);"
print >> outfile, "typedef bool_t (*SettingGetterCallback_t)(uint8_t chan, void *data);"
print >> outfile
def _print_dynamic_settings(self, outfile):
min_ = -1
max_ = 0
for sect in self.all_sections:
for s in sect.settings:
if s.dynamic:
print >> outfile, "#define %s %d" % (s.id_str, s.id)
if s.type:
print >> outfile, "#define SETTING_TYPE_%s %s" % (s.name, s.type_enum)
if min_ == -1:
min_ = s.id
max_ = max(s.id, max_)
print >> outfile
print >> outfile, "#define SETTING_ID_MIN %s" % min_
print >> outfile, "#define SETTING_ID_MAX %d" % max_
print >> outfile
def _print_static_settings(self, outfile):
for sect in self.all_sections:
for s in sect.settings:
print >> outfile, "#define %s %s" % (s.name, s.value)
if s.rational_approximation:
val,num,den,err,approx = s.get_rational_approximation()
print >> outfile, "/* Fraction %d bit approximation, %0.3f%% error " % (s.rational_approximation, err*100.0)
print >> outfile, " * %.10f ~= %d / %d " % (val, num, den)
print >> outfile, " * %.10f ~= %.10f */" % (val, approx)
print >> outfile, "#define %s_NUM %d" % (s.name, num)
print >> outfile, "#define %s_DEN %d" % (s.name, den)
print >> outfile
print >> outfile
def preamble(self, outfile):
gentools.print_header(self.H, generatedfrom=self.generated_from, outfile=outfile)
print >> outfile, '#include "std.h"\n'
def body(self, outfile):
self._print_typedefs(outfile)
self._print_dynamic_settings(outfile)
self._print_static_settings(outfile)
def postamble(self, outfile):
gentools.print_footer(self.H, outfile=outfile)
class RSTWriter(settings.SettingsFile, _Writer, gentools.RSTHelper):
def __init__(self, *args, **kwargs):
settings.SettingsFile.__init__(self, *args, **kwargs)
def preamble(self, outfile):
self.rst_write_header("Settings", outfile, level=0)
print >> outfile
self.rst_write_comment(outfile, "begin-body")
print >> outfile
def body(self, outfile):
self.rst_write_header("Run-time Adjustable Settings", outfile, level=2)
print >> outfile
self.rst_write_table(outfile,
"",
("Name","Type"),
[(s.name, s.type) for s in self.all_settings if s.dynamic])
print >> outfile
self.rst_write_header("Compile-time Adjustable Settings", outfile, level=2)
print >> outfile
for sect in self.all_sections:
self.rst_write_header(sect.name, outfile, level=3)
print >> outfile
for s in sect.settings:
self.rst_write_list(outfile, s.name)
print >> outfile
if s.doc:
self.rst_write_list(outfile, "*%s*" % s.doc, 2)
if s.type != None:
val_s = "Value: %s (%s)" % (s.value,s.type)
else:
val_s = "Value: %s" % s.value
self.rst_write_list(outfile, val_s, 2)
if s.set:
self.rst_write_list(outfile, "Min: %s" % s.min, 2)
self.rst_write_list(outfile, "Max: %s" % s.max, 2)
print >> outfile
def postamble(self, outfile):
self.rst_write_comment(outfile, "end-body")
print >> outfile
if __name__ == "__main__":
OUTPUT_MODES = {
"header" : SettingsWriter,
"rst" : RSTWriter,
}
OUTPUT_MODES_DEFAULT = "header"
OUTPUT_MODES_LIST = ", ".join(OUTPUT_MODES)
parser = optparse.OptionParser()
parser.add_option("-s", "--settings",
default="settings.xml",
help="settings xml file", metavar="FILE")
parser.add_option("-f", "--format",
default=OUTPUT_MODES_DEFAULT,
help="output format: %s [default: %s]" % (OUTPUT_MODES_LIST, OUTPUT_MODES_DEFAULT))
options, args = parser.parse_args()
if not os.path.exists(options.settings):
parser.error("could not find settings.xml")
try:
klass = OUTPUT_MODES[options.format]
except KeyError:
parser.error("output mode must be one of %s" % OUTPUT_MODES_LIST)
try:
settings_path = os.path.abspath(options.settings)
except:
import traceback
parser.error("invalid xml\n%s" % traceback.format_exc())
f = sys.stdout
writer = klass(path=settings_path, generated_from=settings_path)
writer.preamble(outfile=f)
writer.body(outfile=f)
writer.postamble(outfile=f)
| StarcoderdataPython |
11202307 | import numpy as np
import pytest
import astropy.units as u
from ...constants import c, h
from ..quantum import (deBroglie_wavelength,
thermal_deBroglie_wavelength,
Fermi_energy,
Thomas_Fermi_length)
def test_deBroglie_wavelength():
dbwavelength1 = deBroglie_wavelength(2e7*u.cm/u.s, 'e')
assert np.isclose(dbwavelength1.value, 3.628845222852886e-11)
assert dbwavelength1.unit == u.m
dbwavelength2 = deBroglie_wavelength(0*u.m/u.s, 'e')
assert dbwavelength2 == np.inf*u.m
V_array = np.array([2e5, 0])*u.m/u.s
dbwavelength_arr = deBroglie_wavelength(V_array, 'e')
assert np.isclose(dbwavelength_arr.value[0], 3.628845222852886e-11)
assert dbwavelength_arr.value[1] == np.inf
assert dbwavelength_arr.unit == u.m
V_array = np.array([2e5, 2e5])*u.m/u.s
dbwavelength_arr = deBroglie_wavelength(V_array, 'e')
assert np.isclose(dbwavelength_arr.value[0], 3.628845222852886e-11)
assert np.isclose(dbwavelength_arr.value[1], 3.628845222852886e-11)
assert dbwavelength_arr.unit == u.m
assert deBroglie_wavelength(-5e5*u.m/u.s, 'p') == \
deBroglie_wavelength(5e5*u.m/u.s, 'p')
assert deBroglie_wavelength(-5e5*u.m/u.s, 'e+') == \
deBroglie_wavelength(5e5*u.m/u.s, 'e')
assert deBroglie_wavelength(1*u.m/u.s, 5*u.kg) == \
deBroglie_wavelength(100*u.cm/u.s, 5000*u.g)
with pytest.raises(ValueError):
deBroglie_wavelength(c*1.000000001, 'e')
with pytest.raises(UserWarning):
deBroglie_wavelength(0.79450719277, 'Be-7 1+')
with pytest.raises(u.UnitConversionError):
deBroglie_wavelength(8*u.m/u.s, 5*u.m)
with pytest.raises(ValueError):
deBroglie_wavelength(8*u.m/u.s, 'sddsf')
# defining some plasma parameters for tests
T_e = 1 * u.eV
n_e = 1e23 * u.cm**-3
# should probably change this to use unittest module
# add tests for numpy arrays as inputs
# add tests for different astropy units (random fuzzing method?)
def test_thermal_deBroglie_wavelength():
r"""Test the thermal_deBroglie_wavelength function in quantum.py."""
lambda_dbTh = thermal_deBroglie_wavelength(T_e)
# true value at 1 eV
lambda_dbTh_true = 6.919367518364532e-10
# test a simple case for expected value
expectStr = ("Thermal deBroglie wavelength at 1 eV should be "
f"{lambda_dbTh_true} and not {lambda_dbTh}")
assert np.isclose(lambda_dbTh.value,
lambda_dbTh_true,
rtol=1e-15,
atol=0.0), expectStr
# testing returned units
assert lambda_dbTh.unit == u.m
# testing exceptions
with pytest.raises(TypeError):
thermal_deBroglie_wavelength("Bad Input")
with pytest.raises(ValueError):
thermal_deBroglie_wavelength(T_e=-1*u.eV)
def test_Fermi_energy():
r"""Test the Fermi_energy function in quantum.py."""
energy_F = Fermi_energy(n_e)
# true value at 1e23 cm-3
energy_F_true = 1.2586761116196002e-18
# test a simple case for expected value
expectStr = ("Fermi energy at 1e23 cm^-3 should be "
f"{energy_F_true} and not {energy_F}.")
assert np.isclose(energy_F.value,
energy_F_true,
rtol=1e-15,
atol=0.0), expectStr
# testing returned units
assert energy_F.unit == u.J
# testing exceptions
with pytest.raises(TypeError):
Fermi_energy("Bad Input")
with pytest.raises(ValueError):
Fermi_energy(n_e=-1*u.m**-3)
def test_Thomas_Fermi_length():
r"""Test the Thomas_Fermi_length function in quantum.py."""
lambda_TF = Thomas_Fermi_length(n_e)
# true value at 1e23 cm-3
lambda_TF_true = 5.379914085596706e-11
# test a simple case for expected value
expectStr = ("Thomas-Fermi length at 1e23 cm^-3 should be "
f"{lambda_TF_true} and not {lambda_TF}.")
assert np.isclose(lambda_TF.value,
lambda_TF_true,
rtol=1e-15,
atol=0.0), expectStr
# testing returned units
assert lambda_TF.unit == u.m
# testing exceptions
with pytest.raises(TypeError):
Thomas_Fermi_length("Bad Input")
with pytest.raises(ValueError):
Thomas_Fermi_length(n_e=-1*u.m**-3)
| StarcoderdataPython |
8110323 | <reponame>drabusov/py-orbit
## \namespace orbit::utils::fitting
##
## Classes:
## - PolynomialFit - fitting a Function or SplineCH instances with the plynomial
from orbit.utils.fitting.PolynomialFit import PolynomialFit
from orbit.utils.fitting.general_minimization.SimplexSearch import SimplexSearchAlgorithm
from orbit.utils.fitting.general_minimization.GoldenSectionSearch1D import GoldenSectionSearchAlgorithm
from orbit.utils.fitting.general_minimization.BisectionSearch1D import BisectionSearchAlgorithm
from orbit.utils.fitting.general_minimization.Solver import Solver
from orbit.utils.fitting.general_minimization.Solver import TrialPoint
from orbit.utils.fitting.general_minimization.Solver import SolveStopper
from orbit.utils.fitting.general_minimization.Solver import SolveStopperFactory
from orbit.utils.fitting.general_minimization.Solver import ScoreboardActionListener
from orbit.utils.fitting.general_minimization.Solver import VariableProxy
from orbit.utils.fitting.general_minimization.Solver import Scorer
from orbit.utils.fitting.general_minimization.Solver import SearchAgorithm
__all__ = []
__all__.append("PolynomialFit")
__all__.append("SimplexSearchAlgorithm")
__all__.append("GoldenSectionSearchAlgorithm")
__all__.append("BisectionSearchAlgorithm")
__all__.append("Solver")
__all__.append("TrialPoint")
__all__.append("SolveStopper")
__all__.append("SolveStopperFactory")
__all__.append("ScoreboardActionListener")
__all__.append("VariableProxy")
__all__.append("Scorer")
__all__.append("SearchAgorithm")
| StarcoderdataPython |
6704051 | import os
import torch
from ._trainer import Trainer
r"""
Base class for adversarial trainers.
Functions:
self.record_rob : function for recording standard accuracy and robust accuracy against FGSM, PGD, and GN.
"""
class AdvTrainer(Trainer):
def __init__(self, name, model):
super(AdvTrainer, self).__init__(name, model)
self._flag_record_rob = False
def record_rob(self, train_loader, val_loader, eps, alpha, steps, std=0.1, n_limit=1000):
self.record_keys += ['Clean(Tr)', 'FGSM(Tr)', 'PGD(Tr)', 'GN(Tr)',
'Clean(Val)', 'FGSM(Val)', 'PGD(Val)', 'GN(Val)',]
self._flag_record_rob = True
self._train_loader_rob = self.get_sample_loader(train_loader, n_limit)
self._val_loader_rob = val_loader
self._eps_rob = eps
self._alpha_rob = alpha
self._steps_rob = steps
self._std_rob = std
# Update Records
def _update_record(self, records):
if self._flag_record_rob:
rob_records = []
for loader in [self._train_loader_rob, self._val_loader_rob]:
rob_records.append(self.model.eval_accuracy(loader))
rob_records.append(self.model.eval_rob_accuracy_fgsm(loader,
eps=self._eps_rob, verbose=False))
rob_records.append(self.model.eval_rob_accuracy_pgd(loader,
eps=self._eps_rob,
alpha=self._alpha_rob,
steps=self._steps_rob,
verbose=False))
rob_records.append(self.model.eval_rob_accuracy_gn(loader,
std=self._std_rob,
verbose=False))
self.rm.add([*records,
*rob_records,
self.optimizer.param_groups[0]['lr']])
else:
self.rm.add([*records,
self.optimizer.param_groups[0]['lr']])
def get_sample_loader(self, given_loader, n_limit):
final_loader = []
num = 0
for item in given_loader:
final_loader.append(item)
if isinstance(item, tuple) or isinstance(item, list):
batch_size = len(item[0])
else:
batch_size = len(item)
num += batch_size
if num >= n_limit:
break
return final_loader | StarcoderdataPython |
6582652 | print '***JUEGO DEL AHORCADO***'
from random import*
from turtle import*
setup(640,480,640,0)
title('EL JUEGO DEL AHORCADO')
def dibujar_ahorcado(errores):
pensize(4)
if errores==1:
goto(100,0)
elif errores==2:
penup()
goto(50,0)
pendown()
goto(50,150)
elif errores==3:
goto(120,150)
elif errores==4:
penup()
goto(100,150)
pendown()
goto(100,110)
elif errores==5:
penup()
goto(100,90)
pendown()
circle(10)
elif errores==6:
goto(100,50)
elif errores==7:
goto(90,20)
elif errores==8:
penup()
goto(100,50)
pendown()
goto(110,20)
elif errores==9:
penup()
goto(80,75)
pendown()
goto(120,75)
penup()
goto(50,-120)
write('Perdiste!!!!!',False,'right',('arial',20,'bold italic'))
lista_palabras = ['universidad' ,'Python','oso','gaviota','iracundo','salsa','Peru','Paris','Deadpool','Avengers','meme','cultura','naturaleza','etopeya']
errores = 0
intentos = 9
palabra = choice(lista_palabras)
espacio = ['_']*(len(palabra))
print ' '.join(espacio)
while intentos > 0:
letra = raw_input('=>')
acierto = False
for i,l in enumerate(palabra):
if l == letra:
espacio[i] = l
acierto = 1
if acierto:
print
print '<NAME>!'
else:
print
print '<NAME>! Sigue intentando'
intentos -= 1
errores += 1
dibujar_ahorcado(errores)
print 'Te quedan ',intentos,' intentos ahora.'
print ' '.join(espacio)
if '_' not in espacio:
print 'Lo lograste!!'
penup()
goto(50,-120)
write('Ganaste!!!!!',False,'right',('arial',20,'bold italic'))
break
if intentos == 0:
print
print 'Lo sentimos se le acabaron los intentos'
print 'La palabra era ' + palabra
| StarcoderdataPython |
79080 | <reponame>Triple-Z/Python-Crash-Course
# 8-3
def make_shirt(size, string):
"""Make shirt"""
print('Size: ' + size + ', String: ' + string)
make_shirt('M', 'Hello, World')
make_shirt(size='M', string='Hello, World again')
# 8-4
def make_shirt(size='L', string='I love Python'):
"""Make python shirt"""
print('Size: ' + size + ', String: ' + string)
make_shirt()
make_shirt(size='M')
make_shirt(string='Hello, World')
# 8-5
def describe_city(name='guangzhou', country='china'):
"""Describe a city you lived in"""
print(name.title() + ' is in ' + country.title())
describe_city('Nanjing')
describe_city(country='united states')
describe_city('palo alto', 'united states')
| StarcoderdataPython |
3220777 | '''
'''
from common.base import Base
class Cancel(Base):
cancel_loc = ('link text','ๅๆถ่ฎขๅ')
def cancel(self):
self.click(self.cancel_loc)
| StarcoderdataPython |
6476968 | #!/usr/bin/env python3
import functools
import sys
import qtutil
from PyQt5.QtGui import *
from .util.plugin import PluginDefault
from .util.plugin import WidgetDefault
from .util import project_functions as pfs
class Widget(QWidget, WidgetDefault):
class Labels(WidgetDefault.Labels):
example_sb_label = "Example Spinbox"
# todo: Define labels used as a key to save paramaters to file here
class Defaults(WidgetDefault.Defaults):
example_sb_default = 10
# todo: Define default values for this plugin and its UI components here
def __init__(self, project, plugin_position, parent=None):
super(Widget, self).__init__(parent=parent)
if not project or not isinstance(plugin_position, int):
return
# todo: Define global attributes and UI components here
# example: here a button and spin box are defined
self.main_button = QPushButton('Custom Analysis')
self.example_sb = QSpinBox()
# note the call to WidgetDefault AFTER defining attributes
WidgetDefault.__init__(self, project=project, plugin_position=plugin_position)
def setup_ui(self):
super().setup_ui()
# todo: setup UI component layout and properties here
# example: here the button is placed before the spinbox and the spinbox
# is given a input max value of 1000
# initially, before the spinbox a label is inserted which gets its text from Label class
self.example_sb.setMaximum(1000)
self.vbox.addWidget(QLabel(self.Labels.example_sb_label))
self.vbox.addWidget(self.example_sb)
self.vbox.addStretch()
self.vbox.addWidget(self.main_button)
def setup_signals(self):
super().setup_signals()
# todo: Setup signals (i.e. what ui components do) here
# example: main button that activates execute_primary_function when clicked
self.main_button.clicked.connect(self.execute_primary_function)
def setup_params(self, reset=False):
super().setup_params(reset)
if len(self.params) == 1 or reset:
# todo: setup plugin paramaters (e.g. UI component starting values) initial values here
# in this example the default value for the spinbox is associated with the label and saved to file
self.update_plugin_params(self.Labels.example_sb_label, self.Defaults.example_sb_default)
# todo: setup where plugin paramaters get their values from
# in this example the example spinbox gets its value from the param dictionary
# which is used to access plugin paramaters saved to file
self.example_sb.setValue(self.params[self.Labels.example_sb_label])
def setup_param_signals(self):
super().setup_param_signals()
# todo: setup how paramaters (e.g. UI component values) are stored
# e.g. saving the value after a user changes the spinbox value, keeping it from resetting
self.example_sb.valueChanged[int].connect(functools.partial(self.update_plugin_params,
self.Labels.example_sb_label))
def execute_primary_function(self, input_paths=None):
'''Primary function of plugin'''
if not input_paths:
if not self.selected_videos:
return
else:
selected_videos = self.selected_videos
else:
selected_videos = input_paths
# use selected_videos which are the paths to stacks the user has selected or have been input from automation
qtutil.info('This is only a template. Use it to code from. \n'
'Value of spinbox is: ' + str(self.example_sb.value()))
# todo: insert functionality here
# refresh_list can be used to refresh an input list that will have particular indices selected,
# specified content type shown as well as only showing content after a particular plugin manipulation
pfs.refresh_list(self.project, self.video_list,
self.params[self.Labels.video_list_indices_label],
self.Defaults.list_display_type,
self.params[self.Labels.last_manips_to_display_label])
# return the output path(s) of this function for automation
# return output_paths
def setup_whats_this(self):
'''Setup custom help messages'''
# todo: setup custom help messages to aid the user, each tied to one of your UI components.
# See overridden method for an example
super().setup_whats_this()
class MyPlugin(PluginDefault):
def __init__(self, project, plugin_position):
self.name = 'Empty Plugin' # Define plugin name here
self.widget = Widget(project, plugin_position)
super().__init__(self.widget, self.widget.Labels, self.name)
# todo: over-ride PluginDefault functions here to define custom behaviour
# (required for automation)
if __name__=='__main__':
app = QApplication(sys.argv)
app.aboutToQuit.connect(app.deleteLater)
w = QMainWindow()
w.setCentralWidget(Widget(None, None))
w.show()
app.exec_()
sys.exit()
| StarcoderdataPython |
8153496 | from models import (
familia,
denuncia,
caso,
crianca
) | StarcoderdataPython |
11212560 | <reponame>victorleal/Covid19-Twitter-Tracker
import os
import sys
sys.path.append("../..") # Adds higher directory to python modules path.
from functools import partial
from multiprocessing import cpu_count
from concurrent.futures import ProcessPoolExecutor
import time
from decouple import config
from ...database.conn import db
from ..processing.utils import divide_chunks
from .utils import run_hashtag, run_save_hashtag
if __name__ == "__main__":
n_posts_part = int(config("N_POSTS_PART", default=0))
hashtags = [
'"peguei covid"',
'"peguei covid19"',
'"peguei corona"',
'"estou com covid"',
'"estou com covid19"',
'"estou com corona"',
'"estou doente" covid',
'"estou doente" covid19',
'"estou doente" corona',
'"dor de cabeรงa" febre',
'"dor de cabeรงa" corona',
'"dor de cabeรงa" covid',
'"dor de cabeรงa" covid19',
'"falta de ar" corona',
'"falta de ar" covid',
'"falta de ar" covid19',
'"falta de ar"',
'"dor de garganta" corona',
'"dor de garganta" covid',
'"dor de garganta" covid19',
'"dor de garganta"',
'"tosse, febre e coriza"',
'"testei positivo" covid',
'"testei positivo" corona',
'"testei negativo" covid',
'"testei negativo" corona',
]
if n_posts_part == 1:
hashtags += [
"peguei covid",
"estou com covid",
"dor de cabeรงa febre",
"dor de cabeรงa corona",
"dor de cabeรงa covid",
"dor de cabeรงa covid19",
"diarrรฉia corona",
"diarrรฉia covid",
"diarrรฉia covid19",
"febre corona",
"febre covid",
"febre covid19",
"falta de ar corona",
"falta de ar covid",
"falta de ar covid19",
"tosse corona",
"tosse covid",
"tosse covid19",
"coriza corona",
"coriza covid",
"coriza covid19",
"dor de garganta corona",
"dor de garganta covid",
"dor de garganta covid19",
"febre",
"falta de ar",
"tosse",
"coriza",
"dor de garganta",
"tosse febre coriza",
]
qtd = cpu_count()
n_posts_2_extract = int(config("N_POSTS_TO_EXTRACT", default=1))
with ProcessPoolExecutor(max_workers=qtd) as executor:
for hashtags_ in divide_chunks(hashtags, qtd):
start_time = time.time()
contents = list(
executor.map(
partial(run_hashtag, n_posts_2_extract), hashtags_, chunksize=1
)
)
# os.system("pkill chrome")
# os.system("pkill chromedriver")
print(f"--- Load tweets took {round(time.time() - start_time, 2)}s ---")
start_time = time.time()
with db.atomic() as txn:
salvos = list(executor.map(run_save_hashtag, contents, chunksize=25))
txn.commit()
print(f"--- Save tweets took {round(time.time() - start_time, 2)}s ---")
for i, item in enumerate(salvos):
print(
f"--- # of tweets for : {item['hashtag']} => {len(contents[i]['comments'])}/{item['salvos']}"
)
| StarcoderdataPython |
5020737 | import logging
import boto3
from contextlib import closing
from django.conf import settings
from django.core.files.temp import gettempdir, NamedTemporaryFile
client = boto3.client('polly')
logger = logging.getLogger(__name__)
def synthesize_speech_from_page(request, page):
"""
Synthesize speech from a specific Wagtail page.
"""
if hasattr(page, 'get_speech_text'):
logger.info("Synthesize speech for %s" % page)
text = page.get_speech_text(request)
if not text:
logger.warning("No speech text found for %s" % page)
return False
text_type = 'ssml' if text.startswith('<speak>') else 'text'
output_format = getattr(
settings, 'WAGTAIL_SPEECH_OUTPUT_FORMAT', 'mp3')
response = client.synthesize_speech(
OutputFormat=output_format,
SampleRate=getattr(settings, 'WAGTAIL_SPEECH_SAMPLE_RATE', '8000'),
Text=text,
TextType=text_type,
VoiceId=getattr(settings, 'WAGTAIL_SPEECH_VOICE_ID', 'Joey'),
)
if 'AudioStream' in response and hasattr(page, 'audio_stream'):
with closing(response['AudioStream']) as stream:
try:
temp_dir = gettempdir()
temp_file = NamedTemporaryFile(dir=temp_dir)
temp_file.write(stream.read())
page.audio_stream.save("%s.%s" % (
page.slug, output_format[:3]), temp_file)
except IOError as error:
logger.error("An error occured: %s" % error)
def get_speech_text_from_stream_field(request, field, context=None):
"""
Get speech text from a StreamField.
This methods iterates over the stream field blocks which have the method
get_speech_text implemented. It concats these values into a compatible
multiline string value.
"""
values = []
for child in field:
if hasattr(child.block, 'get_speech_text'):
values.append(child.block.get_speech_text(child.value))
text = '<break strength=\"x-strong\"/>'.join(values)
return "<speak>%s</speak>" % text
| StarcoderdataPython |
3571954 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# =============================================================================
"""infer_attributes_gat - Infer attributes of an architecture graph using
a graph attention network"""
# =============================================================================
# Imports
# =============================================================================
import os
import datetime
import numpy as np
import pandas as pd
import networkx as nx
import stellargraph as sg
import matplotlib.pyplot as plt
from sklearn.utils import shuffle
from sklearn.manifold import TSNE
from itertools import combinations
from stellargraph.layer import GAT
from sklearn.decomposition import PCA
from stellargraph.mapper import FullBatchNodeGenerator
from sklearn import feature_extraction, model_selection
from keras.callbacks import EarlyStopping, ModelCheckpoint
from keras import layers, optimizers, losses, metrics, Model
def infer_attributes_gat(Gnx, savepred=True, plot=False):
# Define node data
feature_names = [
"in_degree",
"out_degree",
# "in_degree_centrality",
# "out_degree_centrality",
# "closeness_centrality",
# "betweenness_centrality",
"clustering_coefficient",
# "square_clustering",
"core_number",
# "pagerank",
# "constraint",
# "effective_size"
]
node_type = [v for k, v in nx.get_node_attributes(Gnx, 'data').items()]
d = {"node_type": node_type}
if "in_degree" in feature_names:
indeg = [v for k, v in Gnx.in_degree]
indeg = np.divide(indeg, max(indeg))
indeg[indeg>=0.5] = 1
indeg[indeg<0.5] = 0
d["in_degree"] = indeg
if "out_degree" in feature_names:
outdeg = [v for k, v in Gnx.out_degree]
outdeg = np.divide(outdeg, max(outdeg))
outdeg[outdeg>=0.5] = 1
outdeg[outdeg<0.5] = 0
d["out_degree"] = outdeg
if "in_degree_centrality" in feature_names:
indeg_cent = [v for k, v in nx.algorithms.in_degree_centrality(Gnx).items()]
indeg_cent = np.divide(indeg_cent, max(indeg_cent))
indeg_cent[indeg_cent>=0.5] = 1
indeg_cent[indeg_cent<0.5] = 0
d["in_degree_centrality"] = indeg_cent
if "out_degree_centrality" in feature_names:
outdeg_cent = [v for k, v in nx.algorithms.out_degree_centrality(Gnx).items()]
outdeg_cent = np.divide(outdeg_cent, max(outdeg_cent))
outdeg_cent[outdeg_cent>=0.5] = 1
outdeg_cent[outdeg_cent<0.5] = 0
d["out_degree_centrality"] = outdeg_cent
if "closeness_centrality" in feature_names:
close_cent = [v for k, v in nx.algorithms.closeness_centrality(Gnx).items()]
close_cent = np.divide(close_cent, max(close_cent))
close_cent[close_cent>=0.5] = 1
close_cent[close_cent<0.5] = 0
d["closeness_centrality"] = close_cent
if "betweenness_centrality" in feature_names:
between_cent = [v for k, v in nx.algorithms.betweenness_centrality(Gnx).items()]
between_cent = np.divide(between_cent, max(between_cent))
between_cent[between_cent>=0.5] = 1
between_cent[between_cent<0.5] = 0
d["betweenness_centrality"] = between_cent
if "clustering_coefficient" in feature_names:
clustering_co = [v for k, v in nx.algorithms.clustering(Gnx).items()]
clustering_co = np.divide(clustering_co, max(clustering_co))
clustering_co[clustering_co>=0.5] = 1
clustering_co[clustering_co<0.5] = 0
d["clustering_coefficient"] = clustering_co
if "square_clustering" in feature_names:
sq_clustering = [v for k, v in nx.algorithms.square_clustering(Gnx).items()]
sq_clustering = np.divide(sq_clustering, max(sq_clustering))
sq_clustering[sq_clustering>=0.5] = 1
sq_clustering[sq_clustering<0.5] = 0
d["square_clustering"] = sq_clustering
if "core_number" in feature_names:
core_number = [v for k, v in nx.algorithms.core_number(Gnx).items()]
core_number = np.divide(core_number, max(core_number))
core_number[core_number>=0.5] = 1
core_number[core_number<0.5] = 0
d["core_number"] = core_number
if "pagerank" in feature_names:
pagerank = [v for k, v in nx.algorithms.pagerank(Gnx).items()]
pagerank = np.divide(pagerank, max(pagerank))
pagerank[pagerank>=0.5] = 1
pagerank[pagerank<0.5] = 0
d["pagerank"] = pagerank
if "constraint" in feature_names:
constraint = [v for k, v in nx.algorithms.constraint(Gnx).items()]
constraint = np.divide(constraint, max(constraint))
constraint[np.isnan(constraint)] = 0
constraint[constraint>=0.5] = 1
constraint[constraint<0.5] = 0
d["constraint"] = constraint
if "effective_size" in feature_names:
effective_size = [v for k, v in nx.algorithms.effective_size(Gnx).items()]
effective_size = np.divide(effective_size, max(effective_size))
effective_size[np.isnan(effective_size)] = 0
effective_size[effective_size>=0.5] = 1
effective_size[effective_size<0.5] = 0
d["effective_size"] = effective_size
node_data = pd.DataFrame(data=d, index=nodes)
node_data = shuffle(node_data)
# Split the data
train_data, test_data = model_selection.train_test_split(node_data, train_size=int(0.80*len(Gnx)))
val_data, test_data = model_selection.train_test_split(test_data, train_size=int(0.15*len(Gnx)))
# Convert to numeric arrays
target_encoding = feature_extraction.DictVectorizer(sparse=False)
train_targets = target_encoding.fit_transform(train_data[["node_type"]].to_dict('records'))
val_targets = target_encoding.transform(val_data[["node_type"]].to_dict('records'))
test_targets = target_encoding.transform(test_data[["node_type"]].to_dict('records'))
node_features = node_data[feature_names]
# Create the GAT model in Keras
G = sg.StellarDiGraph(Gnx, node_features=node_features)
print(G.info())
generator = FullBatchNodeGenerator(G)
train_gen = generator.flow(train_data.index, train_targets)
gat = GAT(
layer_sizes=[8, train_targets.shape[1]],
attn_heads=8,
generator=generator,
bias=True,
in_dropout=0.5,
attn_dropout=0.5,
activations=["elu","softmax"],
normalize=None,
)
# Expose the input and output tensors of the GAT model for node prediction, via GAT.node_model() method:
x_inp, predictions = gat.node_model()
# Train the model
model = Model(inputs=x_inp, outputs=predictions)
model.compile(
optimizer=optimizers.Adam(lr=0.005),
loss=losses.categorical_crossentropy,
weighted_metrics=["acc"],
)
val_gen = generator.flow(val_data.index, val_targets)
if not os.path.isdir(".temp/logs"):
os.makedirs(".temp/logs")
if not os.path.isdir(".temp/output"):
os.makedirs(".temp/output")
es_callback = EarlyStopping(
monitor="val_weighted_acc",
patience=100 # patience is the number of epochs to wait before early stopping in case of no further improvement
)
mc_callback = ModelCheckpoint(
".temp/logs/best_model.h5",
monitor="val_weighted_acc",
save_best_only=True,
save_weights_only=True,
)
history = model.fit_generator(
train_gen,
epochs=2000,
validation_data=val_gen,
verbose=2,
shuffle=False, # this should be False, since shuffling data means shuffling the whole graph
callbacks=[es_callback, mc_callback],
)
# Reload the saved weights
model.load_weights(".temp/logs/best_model.h5")
# Evaluate the best nidek in the test set
test_gen = generator.flow(test_data.index, test_targets)
test_metrics = model.evaluate_generator(test_gen)
print("\nTest Set Metrics:")
for name, val in zip(model.metrics_names, test_metrics):
print("\t{}: {:0.4f}".format(name, val))
# Make predictions with the model
all_nodes = node_data.index
all_gen = generator.flow(all_nodes)
all_predictions = model.predict_generator(all_gen)
node_predictions = target_encoding.inverse_transform(all_predictions)
results = pd.DataFrame(node_predictions, index=G.nodes()).idxmax(axis=1)
df = pd.DataFrame({"Predicted": results, "True": node_data['node_type']})
print(df.head)
if savepred:
df.to_excel(".temp/output/output" + str(datetime.datetime.now()).replace(':','-') + ".xlsx")
if plot:
# Node embeddings
emb_layer = model.layers[3]
print("Embedding layer: {}, output shape {}".format(emb_layer.name, emb_layer.output_shape))
embedding_model = Model(inputs=x_inp, outputs=emb_layer.output)
emb = embedding_model.predict_generator(all_gen)
X = emb
y = np.argmax(target_encoding.transform(node_data.reindex(G.nodes())[["node_type"]].to_dict('records')), axis=1)
if X.shape[1] > 2:
transform = TSNE #PCA
trans = transform(n_components=2)
emb_transformed = pd.DataFrame(trans.fit_transform(X), index=list(G.nodes()))
emb_transformed['label'] = y
else:
emb_transformed = pd.DataFrame(X, index=list(G.nodes()))
emb_transformed = emb_transformed.rename(columns = {'0':0, '1':1})
def plot_emb(transform, emb_transformed):
fig, ax = plt.subplots(figsize=(7,7))
ax.scatter(emb_transformed[0], emb_transformed[1], c=emb_transformed['label'].astype("category"),
cmap="jet", alpha=0.7)
ax.set(aspect="equal", xlabel="$X_1$", ylabel="$X_2$")
plt.title('{} visualization of GAT embeddings for the fighter graph'.format(transform.__name__))
# Plot the training history
def remove_prefix(text, prefix):
return text[text.startswith(prefix) and len(prefix):]
def plot_history(history):
metrics = sorted(set([remove_prefix(m, "val_") for m in list(history.history.keys())]))
for m in metrics:
# summarize history for metric m
plt.figure()
plt.plot(history.history[m])
plt.plot(history.history['val_' + m])
plt.title(m)
plt.ylabel(m)
plt.xlabel('epoch')
plt.legend(['train', 'validation'], loc='best')
plot_history(history)
plot_emb(transform, emb_transformed)
plt.show()
return df
if __name__ == '__main__':
# Load the architecture graph
graphname = "arch_1"
Gnx = nx.read_graphml("dataset/" + graphname + ".graphml")
nodes = list(Gnx.nodes)
Gnx.node[nodes[0]]['data'] = 'Package'
Gnx.node[nodes[-1]]['data'] = 'Package'
# Increase number of input data points
# Gnx = nx.disjoint_union_all([Gnx, Gnx, Gnx, Gnx, Gnx, Gnx, Gnx, Gnx, Gnx, Gnx])
# Call the inference function
df = infer_attributes_gat(Gnx, savepred=True, plot=True) | StarcoderdataPython |
4838367 | import numpy as np
from ...core.utils import as_id_array
from ...graph.structured_quad.structured_quad import StructuredQuadGraphTopology
from . import _neighbors_at_link
def neighbors_at_link(shape, links):
"""Get neighbor links.
Examples
--------
>>> import numpy as np
>>> from landlab.components.overland_flow._links import neighbors_at_link
>>> neighbors_at_link((3, 2), np.arange(7)) # doctest: +NORMALIZE_WHITESPACE
array([[-1, 3, -1, -1],
[ 2, 4, -1, -1], [-1, 5, 1, -1],
[-1, 6, -1, 0],
[ 5, 7, -1, 1], [-1, -1, 4, 2],
[-1, -1, -1, 3]])
"""
links = np.asarray(links, dtype=int)
out = np.full((links.size, 4), -1, dtype=int)
_neighbors_at_link.neighbors_at_link(links, shape, out)
return out
def vertical_link_ids(shape):
"""Vertical links in a structured quad grid.
Parameters
----------
shape : tuple of int
Shape of grid of nodes.
Returns
-------
(M, N) ndarray :
Array of link IDs.
Examples
--------
>>> from landlab.components.overland_flow._links import vertical_link_ids
>>> vertical_link_ids((3, 4))
array([[ 3, 4, 5, 6],
[10, 11, 12, 13]])
"""
layout = StructuredQuadGraphTopology(shape)
return layout.vertical_links.reshape((shape[0] - 1, shape[1]))
def horizontal_link_ids(shape):
"""Horizontal links in a structured quad grid.
Parameters
----------
shape : tuple of int
Shape of grid of nodes.
Returns
-------
(M, N) ndarray :
Array of link IDs.
Examples
--------
>>> from landlab.components.overland_flow._links import horizontal_link_ids
>>> horizontal_link_ids((3, 4))
array([[ 0, 1, 2],
[ 7, 8, 9],
[14, 15, 16]])
"""
layout = StructuredQuadGraphTopology(shape)
return layout.horizontal_links.reshape((shape[0], shape[1] - 1))
def vertical_south_link_neighbor(shape, vertical_ids, bad_index_value=-1):
"""Link IDs of south, vertical link neighbor.
Parameters
----------
shape : tuple of int
Shape of grid of nodes.
vertical_ids : array of int
Array of all vertical link ids - MUST BE ARRAY OF LEN(VERTICAL_LINKS)
bad_index_value: int, optional
Value assigned to inactive indicies in the array.
Returns
-------
ndarray :
Link IDs of *south* vertical neighbor links. Length of
number_of_vertical_links.
Examples
--------
The following example uses this grid::
* * * * *
^ ^ ^ ^ ^
22 23 24 25 26
| | | | |
* * * * *
^ ^ ^ ^ ^
13 14 15 16 17
| | | | |
* * * * *
^ ^ ^ ^ ^
4 5 6 7 8
| | | | |
* * * * *
.. note::
Only vertical links are shown. When no neighbor is found,
bad_index_value is returned.
``*`` indicates nodes
Numeric values correspond to the vertical IDs.
>>> from landlab import RasterModelGrid
>>> from landlab.components.overland_flow._links import (
... vertical_link_ids, vertical_south_link_neighbor
... )
>>> rmg = RasterModelGrid((4, 5))
>>> vertical_links = vertical_link_ids(rmg.shape)
>>> vertical_south_link_neighbor(rmg.shape, vertical_links)
... # doctest: +NORMALIZE_WHITESPACE
array([-1, -1, -1, -1, -1,
4, 5, 6, 7, 8,
13, 14, 15, 16, 17])
"""
vertical_links = StructuredQuadGraphTopology(shape).vertical_links
vertical_links[shape[1] :] = vertical_links[: -shape[1]]
vertical_links[: shape[1]] = bad_index_value
return vertical_links
def vertical_west_link_neighbor(shape, vertical_ids, bad_index_value=-1):
"""Link IDs of west, vertical link neighbor.
Parameters
----------
shape : tuple of int
Shape of grid of nodes.
vertical_ids : array of int
Array of all vertical link ids- MUST BE ARRAY OF LEN(VERTICAL_LINKS)
bad_index_value: int, optional
Value assigned to inactive indicies in the array.
Returns
-------
ndarray :
Link IDs of *west* vertical neighbor links. Length of
number_of_vertical_links.
Examples
--------
The following example uses this grid::
* * * * *
^ ^ ^ ^ ^
22 23 24 25 26
| | | | |
* * * * *
^ ^ ^ ^ ^
13 14 15 16 17
| | | | |
* * * * *
^ ^ ^ ^ ^
4 5 6 7 8
| | | | |
* * * * *
.. note::
Only vertical links are shown. When no neighbor is found,
bad_index_value is returned.
``*`` indicates nodes
Numeric values correspond to the vertical IDs.
>>> from landlab import RasterModelGrid
>>> from landlab.components.overland_flow._links import vertical_link_ids, vertical_west_link_neighbor
>>> rmg = RasterModelGrid((4, 5))
>>> vertical_links = vertical_link_ids(rmg.shape)
>>> vertical_west_link_neighbor(rmg.shape, vertical_links)
... # doctest: +NORMALIZE_WHITESPACE
array([-1, 4, 5, 6, 7,
-1, 13, 14, 15, 16,
-1, 22, 23, 24, 25])
"""
vertical_links = StructuredQuadGraphTopology(shape).vertical_links.reshape(
(shape[0] - 1, shape[1])
)
vertical_links[:, 1:] = vertical_links[:, :-1]
vertical_links[:, 0] = bad_index_value
return vertical_links.reshape(-1)
def vertical_north_link_neighbor(shape, vertical_ids, bad_index_value=-1):
"""Link IDs of north, vertical link neighbor.
Parameters
----------
shape : tuple of int
Shape of grid of nodes.
vertical_ids : array of int
Array of all vertical link ids- MUST BE ARRAY OF LEN(VERTICAL_LINKS)
bad_index_value: int, optional
Value assigned to inactive indicies in the array.
Returns
-------
ndarray :
Link IDs of *north* vertical neighbor links. Length of
number_of_vertical_links.
Examples
--------
The following example uses this grid::
* * * * *
^ ^ ^ ^ ^
22 23 24 25 26
| | | | |
* * * * *
^ ^ ^ ^ ^
13 14 15 16 17
| | | | |
* * * * *
^ ^ ^ ^ ^
4 5 6 7 8
| | | | |
* * * * *
.. note::
Only vertical links are shown. When no neighbor is found,
bad_index_value is returned.
``*`` indicates nodes
Numeric values correspond to the vertical IDs.
>>> from landlab import RasterModelGrid
>>> from landlab.components.overland_flow._links import vertical_link_ids, vertical_north_link_neighbor
>>> rmg = RasterModelGrid((4, 5))
>>> vertical_ids = vertical_link_ids(rmg.shape)
>>> vertical_north_link_neighbor(rmg.shape, vertical_ids)
... # doctest: +NORMALIZE_WHITESPACE
array([13, 14, 15, 16, 17,
22, 23, 24, 25, 26,
-1, -1, -1, -1, -1])
"""
vertical_links = StructuredQuadGraphTopology(shape).vertical_links
vertical_links[: -shape[1]] = vertical_links[shape[1] :]
vertical_links[-shape[1] :] = bad_index_value
return vertical_links
def vertical_east_link_neighbor(shape, vertical_ids, bad_index_value=-1):
"""Link IDs of east, vertical link neighbor.
Parameters
----------
shape : tuple of int
Shape of grid of nodes.
vertical_ids : array of int
Array of all vertical link ids - MUST BE ARRAY OF LEN(VERTICAL_LINKS)
bad_index_value: int, optional
Value assigned to inactive indicies in the array.
Returns
-------
ndarray :
Link IDs of *east* vertical neighbor links. Length of
number_of_vertical_links.
Examples
--------
The following example uses this grid::
* * * * *
^ ^ ^ ^ ^
22 23 24 25 26
| | | | |
* * * * *
^ ^ ^ ^ ^
13 14 15 16 17
| | | | |
* * * * *
^ ^ ^ ^ ^
4 5 6 7 8
| | | | |
* * * * *
.. note::
Only vertical links are shown. When no neighbor is found,
bad_index_value is returned.
``*`` indicates nodes
Numeric values correspond to the vertical IDs.
>>> from landlab import RasterModelGrid
>>> from landlab.components.overland_flow._links import vertical_link_ids, vertical_east_link_neighbor
>>> rmg = RasterModelGrid((4, 5))
>>> vertical_links = vertical_link_ids(rmg.shape)
>>> vertical_east_link_neighbor(rmg.shape, vertical_links)
... # doctest: +NORMALIZE_WHITESPACE
array([ 5, 6, 7, 8, -1,
14, 15, 16, 17, -1,
23, 24, 25, 26, -1])
"""
vertical_links = StructuredQuadGraphTopology(shape).vertical_links.reshape(
(shape[0] - 1, shape[1])
)
vertical_links[:, :-1] = vertical_links[:, 1:]
vertical_links[:, -1] = bad_index_value
return vertical_links.base
def active_link_ids(shape, node_status):
"""Get active links.
Parameters
----------
shape : tuple of int
Shape of grid of nodes.
node_status : array_link
Status of nodes in grid.
Returns
-------
ndarray :
Links IDs at the active links.
Examples
--------
>>> from landlab.grid import RasterModelGrid
>>> from landlab.components.overland_flow._links import active_link_ids
>>> rmg = RasterModelGrid((3, 4))
>>> rmg.set_closed_boundaries_at_grid_edges(True, True, True, True)
>>> status = rmg.status_at_node
>>> status # doctest: +NORMALIZE_WHITESPACE
array([4, 4, 4, 4,
4, 0, 0, 4,
4, 4, 4, 4], dtype=uint8)
>>> active_link_ids((3, 4), status)
array([8])
"""
return as_id_array(np.where(is_active_link(shape, node_status))[0])
def is_active_link(shape, node_status):
"""Link IDs of active links.
Parameters
----------
shape : tuple of int
Shape of grid of nodes.
node_status : array_link
Status of nodes in grid.
Returns
-------
ndarray :
Links IDs at the active links.
Examples
--------
>>> from landlab.components.overland_flow._links import is_active_link
>>> from landlab.grid.nodestatus import NodeStatus
>>> status = [
... [NodeStatus.CLOSED, NodeStatus.CLOSED, NodeStatus.CLOSED],
... [NodeStatus.CLOSED, NodeStatus.CORE, NodeStatus.CLOSED],
... [NodeStatus.CLOSED, NodeStatus.CORE, NodeStatus.CLOSED],
... [NodeStatus.CLOSED, NodeStatus.CLOSED, NodeStatus.CLOSED],
... ]
>>> is_active_link((4, 3), status) # doctest: +NORMALIZE_WHITESPACE
array([False, False,
False, False, False,
False, False,
False, True, False,
False, False,
False, False, False,
False, False], dtype=bool)
"""
from ...grid.linkstatus import is_active_link
node_status = np.asarray(node_status).reshape(-1)
if np.prod(shape) != node_status.size:
raise ValueError(
"node status array does not match size of grid "
"(%d != %d)" % (np.prod(shape), len(node_status))
)
# status_at_link_start = node_status.flat[node_id_at_link_start(shape)]
# status_at_link_end = node_status.flat[node_id_at_link_end(shape)]
# status_at_link = node_status[StructuredQuadGraphTopology(shape).nodes_at_link]
return is_active_link(node_status[StructuredQuadGraphTopology(shape).nodes_at_link])
def vertical_active_link_ids(shape, active_ids, bad_index_value=-1):
"""ID of vertical active links.
Parameters
----------
shape : tuple of int
Shape of grid of nodes.
active_ids : array of int
Array of all active link ids
bad_index_value: int, optional
Value assigned to inactive indicies in the array.
Returns
-------
ndarray :
Link IDs at the VERTICAL active links. Length of
number_of_vertical_links.
Examples
--------
The following example uses this grid::
*---I-->*---I-->*---I-->*---I-->*
^ ^ ^ ^ ^
I I I I I
| | | | |
*---I-->o---H-->o---H-->o---I-->*
^ ^ ^ ^ ^
I 6 7 8 I
| | | | |
*---I-->o---H-->o---H-->o---I-->*
^ ^ ^ ^ ^
I I I I I
| | | | |
*---I-->*---I-->*---I-->*---I-->*
.. note::
``*`` indicates the nodes that are set to `NodeStatus.CLOSED`
``o`` indicates the nodes that are set to `NodeStatus.CORE`
``I`` indicates the links that are set to `LinkStatus.INACTIVE`
``H`` indicates horizontal active ids, which are ignored by this
function
Numeric values correspond to the vertical `LinkStatus.ACTIVE` IDs.
>>> from landlab import RasterModelGrid
>>> from landlab.components.overland_flow._links import (active_link_ids,
... vertical_active_link_ids)
>>> rmg = RasterModelGrid((4, 5))
>>> active_ids = active_link_ids((4, 5), rmg.status_at_node)
>>> active_ids # doctest: +NORMALIZE_WHITESPACE
array([ 5, 6, 7,
9, 10, 11, 12,
14, 15, 16,
18, 19, 20, 21,
23, 24, 25])
>>> vertical_active_link_ids((4, 5), active_ids)
... # doctest: +NORMALIZE_WHITESPACE
array([-1, 5, 6, 7, -1,
-1, 14, 15, 16, -1,
-1, 23, 24, 25, -1])
>>> rmg.set_closed_boundaries_at_grid_edges(True, True, True, True)
>>> status = rmg.status_at_node
>>> active_ids = active_link_ids((4, 5), status)
>>> vertical_active_link_ids((4, 5), active_ids)
... # doctest: +NORMALIZE_WHITESPACE
array([-1, -1, -1, -1, -1,
-1, 14, 15, 16, -1,
-1, -1, -1, -1, -1])
"""
number_of_vertical_links = (shape[0] - 1) * shape[1]
out = np.full(number_of_vertical_links, bad_index_value, dtype=int)
vertical_ids = active_ids[np.where(is_vertical_link(shape, active_ids))]
out[nth_vertical_link(shape, vertical_ids)] = vertical_ids
return out
def _number_of_links(shape):
"""Number of links in a structured quad grid.
Parameters
----------
shape : tuple of int
Shape of grid of nodes.
Returns
-------
int :
Number of links in grid.
Examples
--------
>>> from landlab.components.overland_flow._links import _number_of_links
>>> _number_of_links((3, 4))
17
"""
return (shape[0] - 1) * shape[1] + shape[0] * (shape[1] - 1)
# return number_of_vertical_links(shape) + number_of_horizontal_links(shape)
def number_of_vertical_links(shape):
"""Number of vertical links in a structured quad grid.
Parameters
----------
shape : tuple of int
Shape of grid of nodes.
Returns
-------
int :
Number of vertical links in grid.
Examples
--------
>>> from landlab.components.overland_flow._links import number_of_vertical_links
>>> number_of_vertical_links((3, 4))
8
"""
return (shape[0] - 1) * shape[1]
def number_of_horizontal_links(shape):
"""Number of horizontal links in a structured quad grid.
Parameters
----------
shape : tuple of int
Shape of grid of nodes.
Returns
-------
int :
Number of horizontal links in grid.
Examples
--------
>>> from landlab.components.overland_flow._links import number_of_horizontal_links
>>> number_of_horizontal_links((3, 4))
9
"""
return shape[0] * (shape[1] - 1)
def is_vertical_link(shape, links):
"""Test if links are vertical.
Parameters
----------
shape : tuple of int
Shape of grid of nodes.
links : array of int
Array of link ids to test.
Returns
-------
ndarray of bool
`True` for links that are vertical.
Examples
--------
>>> from landlab.components.overland_flow._links import (is_vertical_link,
... _number_of_links)
>>> import numpy as np
>>> shape = (3, 4)
>>> links = np.arange(_number_of_links(shape))
>>> is_vertical_link(shape, links) # doctest: +NORMALIZE_WHITESPACE
array([False, False, False, True, True, True, True,
False, False, False, True, True, True, True,
False, False, False], dtype=bool)
"""
return ((links % (2 * shape[1] - 1)) >= shape[1] - 1) & (
links < _number_of_links(shape)
)
def nth_vertical_link(shape, links):
"""Convert link ID to vertical link ID.
Parameters
----------
shape : tuple of int
Shape of grid of nodes.
links : array of int
Array of link ids to test.
Returns
-------
ndarray of int
The link ID as the nth vertical links.
Examples
--------
>>> from landlab.components.overland_flow._links import nth_vertical_link
>>> shape = (3, 4)
>>> nth_vertical_link(shape, 4)
1
>>> nth_vertical_link(shape, (3, 4, 11))
array([0, 1, 5])
"""
links = np.asarray(links, dtype=int)
return as_id_array(
(links // (2 * shape[1] - 1)) * shape[1]
+ links % (2 * shape[1] - 1)
- (shape[1] - 1)
)
def horizontal_active_link_ids(shape, active_ids, bad_index_value=-1):
"""ID of horizontal active links.
Parameters
----------
shape : tuple of int
Shape of grid of nodes.
active_ids : array of int
Array of all active link ids
bad_index_value: int, optional
Value assigned to inactive indicies in the array.
Returns
-------
ndarray :
Link IDs at the HORIZONTAL active links. Length of
number_of_horizontal_links.
Examples
--------
The following example uses this grid::
*---I-->*---I-->*---I-->*---I-->*
^ ^ ^ ^ ^
I I I I I
| | | | |
*---I-->o--24-->o--25-->o---I-->*
^ ^ ^ ^ ^
I V V V I
| | | | |
*---I-->o--20-->o--21-->o---I-->*
^ ^ ^ ^ ^
I I I I I
| | | | |
*---I-->*---I-->*---I-->*---I-->*
.. note::
``*`` indicates the nodes that are set to `NodeStatus.CLOSED`
``o`` indicates the nodes that are set to `NodeStatus.CORE`
``I`` indicates the links that are set to `LinkStatus.INACTIVE`
``V`` indicates vertical active ids, which are ignored by this
function.
Numeric values correspond to the horizontal `LinkStatus.ACTIVE` ID.
>>> from landlab import RasterModelGrid
>>> from landlab.components.overland_flow._links import (active_link_ids,
... horizontal_active_link_ids)
>>> rmg = RasterModelGrid((4, 5))
>>> rmg.set_closed_boundaries_at_grid_edges(True, True, True, True)
>>> status = rmg.status_at_node
>>> status # doctest: +NORMALIZE_WHITESPACE
array([4, 4, 4, 4, 4,
4, 0, 0, 0, 4,
4, 0, 0, 0, 4,
4, 4, 4, 4, 4], dtype=uint8)
>>> active_ids = active_link_ids((4,5), status)
>>> horizontal_active_link_ids((4,5), active_ids)
... # doctest: +NORMALIZE_WHITESPACE
array([-1, -1, -1, -1,
-1, 10, 11, -1,
-1, 19, 20, -1,
-1, -1, -1, -1])
"""
number_of_horizontal_links = shape[0] * (shape[1] - 1)
out = np.full(number_of_horizontal_links, bad_index_value, dtype=int)
horizontal_ids = active_ids[np.where(~is_vertical_link(shape, active_ids))]
out[nth_horizontal_link(shape, horizontal_ids)] = horizontal_ids
return out
def nth_horizontal_link(shape, links):
"""Convert link ID to horizontal link ID.
Parameters
----------
shape : tuple of int
Shape of grid of nodes.
links : array of int
Array of link ids to test.
Returns
-------
ndarray of int
The link ID as the nth horizontal links.
Examples
--------
>>> from landlab.components.overland_flow._links import nth_horizontal_link
>>> shape = (3, 4)
>>> nth_horizontal_link(shape, 16)
8
>>> nth_horizontal_link(shape, (1, 7, 8))
array([1, 3, 4])
"""
links = np.asarray(links, dtype=int)
return as_id_array(
(links // (2 * shape[1] - 1)) * (shape[1] - 1) + links % (2 * shape[1] - 1)
)
def is_horizontal_link(shape, links):
"""Test if a link is horizontal.
Parameters
----------
shape : tuple of int
Shape of grid of nodes.
links : array of int
Array of link ids to test.
Returns
-------
ndarray of bool
`True` for links that are horizontal.
Examples
--------
>>> from landlab.components.overland_flow._links import (is_horizontal_link,
... _number_of_links)
>>> import numpy as np
>>> shape = (3, 4)
>>> links = np.arange(_number_of_links(shape))
>>> is_horizontal_link(shape, links) # doctest: +NORMALIZE_WHITESPACE
array([ True, True, True, False, False, False, False,
True, True, True, False, False, False, False,
True, True, True], dtype=bool)
"""
return (~is_vertical_link(shape, links)) & (links < _number_of_links(shape))
def horizontal_west_link_neighbor(shape, horizontal_ids, bad_index_value=-1):
"""ID of west, horizontal link neighbor.
Parameters
----------
shape : tuple of int
Shape of grid of nodes.
horizontal_ids : array of int
Array of all horizontal link ids - *must be of len(horizontal_links)*
bad_index_value: int, optional
Value assigned to inactive indicies in the array.
Returns
-------
ndarray :
Link IDs of west horizontal neighbor links. Length of
number_of_horizontal_links.
Examples
--------
The following example uses this grid::
*--27-->*--28-->*--29-->*--30-->*
*--18-->*--19-->*--20-->*--21-->*
*---9-->*--10-->*--11-->*--12-->*
*---0-->*---1-->*---2-->*---3-->*
.. note::
Only horizontal links are shown. When no neighbor is found,
bad_index_value is returned.
``*`` indicates nodes
Numeric values correspond to the horizontal IDs.
>>> from landlab import RasterModelGrid
>>> from landlab.components.overland_flow._links import (horizontal_link_ids,
... horizontal_west_link_neighbor)
>>> rmg = RasterModelGrid((4, 5))
>>> horizontal_links = horizontal_link_ids(rmg.shape).flatten()
>>> horizontal_west_link_neighbor(rmg.shape, horizontal_links)
array([-1, 0, 1, 2, -1, 9, 10, 11, -1, 18, 19, 20, -1, 27, 28, 29])
"""
links = np.roll(horizontal_ids.reshape((shape[0], shape[1] - 1)), 1, axis=1)
links[:, 0] = bad_index_value
return links.reshape(-1)
def horizontal_east_link_neighbor(shape, horizontal_ids, bad_index_value=-1):
"""IDs of east, horizontal link neighbor.
Parameters
----------
shape : tuple of int
Shape of grid of nodes.
horizontal_ids : array of int
Array of all horizontal link ids - *must be of len(horizontal_links)*
bad_index_value: int, optional
Value assigned to inactive indicies in the array.
Returns
-------
ndarray :
Link IDs of east horizontal neighbor links. Length of
number_of_horizontal_links.
Examples
--------
The following example uses this grid::
*--27-->*--28-->*--29-->*--30-->*
*--18-->*--19-->*--20-->*--21-->*
*---9-->*--10-->*--11-->*--12-->*
*---0-->*---1-->*---2-->*---3-->*
.. note::
Only horizontal links are shown. When no neighbor is found,
bad_index_value is returned.
``*`` indicates nodes
Numeric values correspond to the horizontal `LinkStatus.ACTIVE` IDs.
>>> from landlab import RasterModelGrid
>>> from landlab.components.overland_flow._links import (horizontal_link_ids,
... horizontal_east_link_neighbor)
>>> rmg = RasterModelGrid((4, 5))
>>> horizontal_links = horizontal_link_ids(rmg.shape).flatten()
>>> horizontal_east_link_neighbor(rmg.shape, horizontal_links)
array([ 1, 2, 3, -1, 10, 11, 12, -1, 19, 20, 21, -1, 28, 29, 30, -1])
"""
links = np.roll(horizontal_ids.reshape((shape[0], shape[1] - 1)), -1, axis=1)
links[:, -1] = -1
return links.reshape(-1)
def horizontal_north_link_neighbor(shape, horizontal_ids, bad_index_value=-1):
"""ID of north, horizontal link neighbor.
Parameters
----------
shape : tuple of int
Shape of grid of nodes.
horizontal_ids : array of int
Array of all horizontal link ids - *must be of len(horizontal_links)*
bad_index_value: int, optional
Value assigned to inactive indicies in the array.
Returns
-------
ndarray :
Link IDs of north horizontal neighbor links. Length of
number_of_horizontal_links.
Examples
--------
The following example uses this grid::
*--27-->*--28-->*--29-->*--30-->*
*--18-->*--19-->*--20-->*--21-->*
*---9-->*--10-->*--11-->*--12-->*
*---0-->*---1-->*---2-->*---3-->*
.. note::
Only horizontal links are shown. When no neighbor is found,
bad_index_value is returned.
``*`` indicates nodes
Numeric values correspond to the horizontal `LinkStatus.ACTIVE` IDs.
>>> from landlab import RasterModelGrid
>>> from landlab.components.overland_flow._links import (horizontal_link_ids,
... horizontal_north_link_neighbor)
>>> rmg = RasterModelGrid((4, 5))
>>> horizontal_links = horizontal_link_ids(rmg.shape).flatten()
>>> horizontal_north_link_neighbor(rmg.shape, horizontal_links)
array([ 9, 10, 11, 12, 18, 19, 20, 21, 27, 28, 29, 30, -1, -1, -1, -1])
"""
links = np.roll(horizontal_ids.reshape((shape[0], shape[1] - 1)), -1, axis=0)
links[-1, :] = bad_index_value
return links.reshape(-1)
def horizontal_south_link_neighbor(shape, horizontal_ids, bad_index_value=-1):
"""ID of south horizontal link neighbor.
Parameters
----------
shape : tuple of int
Shape of grid of nodes.
horizontal_ids : array of int
Array of all horizontal link ids *must be of len(horizontal_links)*.
bad_index_value: int, optional
Value assigned to inactive indicies in the array.
Returns
-------
ndarray :
Link IDs of south horizontal neighbor links. Length of
number_of_horizontal_links.
Examples
--------
The following example uses this grid::
*--27-->*--28-->*--29-->*--30-->*
*--18-->*--19-->*--20-->*--21-->*
*---9-->*--10-->*--11-->*--12-->*
*---0-->*---1-->*---2-->*---3-->*
.. note::
Only horizontal links are shown. When no neighbor is found,
bad_index_value is returned.
``*`` indicates nodes
Numeric values correspond to the horizontal IDs.
>>> from landlab import RasterModelGrid
>>> from landlab.components.overland_flow._links import (horizontal_link_ids,
... horizontal_north_link_neighbor)
>>> rmg = RasterModelGrid((4, 5))
>>> horizontal_links = horizontal_link_ids(rmg.shape).flatten()
>>> horizontal_south_link_neighbor(rmg.shape, horizontal_links)
array([-1, -1, -1, -1, 0, 1, 2, 3, 9, 10, 11, 12, 18, 19, 20, 21])
"""
links = np.roll(horizontal_ids.reshape((shape[0], shape[1] - 1)), 1, axis=0)
links[0, :] = bad_index_value
return links.reshape(-1)
| StarcoderdataPython |
167605 | <gh_stars>10-100
# Copyright (c) 2012 <NAME> <<EMAIL>>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of mosquitto nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
This is an MQTT v3.1 client module. MQTT is a lightweight pub/sub messaging
protocol that is easy to implement and suitable for low powered devices.
"""
import errno
import random
import select
import socket
import ssl
import struct
import sys
import threading
import time
if sys.version_info[0] < 3:
PROTOCOL_NAME = "MQIsdp"
else:
PROTOCOL_NAME = b"MQIsdp"
PROTOCOL_VERSION = 3
# Message types
CONNECT = 0x10
CONNACK = 0x20
PUBLISH = 0x30
PUBACK = 0x40
PUBREC = 0x50
PUBREL = 0x60
PUBCOMP = 0x70
SUBSCRIBE = 0x80
SUBACK = 0x90
UNSUBSCRIBE = 0xA0
UNSUBACK = 0xB0
PINGREQ = 0xC0
PINGRESP = 0xD0
DISCONNECT = 0xE0
# Log levels
MOSQ_LOG_INFO = 0x01
MOSQ_LOG_NOTICE = 0x02
MOSQ_LOG_WARNING = 0x04
MOSQ_LOG_ERR = 0x08
MOSQ_LOG_DEBUG = 0x10
# CONNACK codes
CONNACK_ACCEPTED = 0
CONNACK_REFUSED_PROTOCOL_VERSION = 1
CONNACK_REFUSED_IDENTIFIER_REJECTED = 2
CONNACK_REFUSED_SERVER_UNAVAILABLE = 3
CONNACK_REFUSED_BAD_USERNAME_PASSWORD = 4
CONNACK_REFUSED_NOT_AUTHORIZED = 5
# Connection state
mosq_cs_new = 0
mosq_cs_connected = 1
mosq_cs_disconnecting = 2
mosq_cs_connect_async = 3
# Message direction
mosq_md_invalid = 0
mosq_md_in = 1
mosq_md_out = 2
# Message state
mosq_ms_invalid = 0,
mosq_ms_wait_puback = 1
mosq_ms_wait_pubrec = 2
mosq_ms_wait_pubrel = 3
mosq_ms_wait_pubcomp = 4
# Error values
MOSQ_ERR_AGAIN = -1
MOSQ_ERR_SUCCESS = 0
MOSQ_ERR_NOMEM = 1
MOSQ_ERR_PROTOCOL = 2
MOSQ_ERR_INVAL = 3
MOSQ_ERR_NO_CONN = 4
MOSQ_ERR_CONN_REFUSED = 5
MOSQ_ERR_NOT_FOUND = 6
MOSQ_ERR_CONN_LOST = 7
MOSQ_ERR_TLS = 8
MOSQ_ERR_PAYLOAD_SIZE = 9
MOSQ_ERR_NOT_SUPPORTED = 10
MOSQ_ERR_AUTH = 11
MOSQ_ERR_ACL_DENIED = 12
MOSQ_ERR_UNKNOWN = 13
MOSQ_ERR_ERRNO = 14
def _fix_sub_topic(subtopic):
# Convert ////some////over/slashed///topic/etc/etc//
# into some/over/slashed/topic/etc/etc
if subtopic[0] == '/':
return '/'+'/'.join(filter(None, subtopic.split('/')))
else:
return '/'.join(filter(None, subtopic.split('/')))
def error_string(mosq_errno):
"""Return the error string associated with a mosquitto error number."""
if mosq_errno == MOSQ_ERR_SUCCESS:
return "No error."
elif mosq_errno == MOSQ_ERR_NOMEM:
return "Out of memory."
elif mosq_errno == MOSQ_ERR_PROTOCOL:
return "A network protocol error occurred when communicating with the broker."
elif mosq_errno == MOSQ_ERR_INVAL:
return "Invalid function arguments provided."
elif mosq_errno == MOSQ_ERR_NO_CONN:
return "The client is not currently connected."
elif mosq_errno == MOSQ_ERR_CONN_REFUSED:
return "The connection was refused."
elif mosq_errno == MOSQ_ERR_NOT_FOUND:
return "Message not found (internal error)."
elif mosq_errno == MOSQ_ERR_CONN_LOST:
return "The connection was lost."
elif mosq_errno == MOSQ_ERR_TLS:
return "A TLS error occurred."
elif mosq_errno == MOSQ_ERR_PAYLOAD_SIZE:
return "Payload too large."
elif mosq_errno == MOSQ_ERR_NOT_SUPPORTED:
return "This feature is not supported."
elif mosq_errno == MOSQ_ERR_AUTH:
return "Authorisation failed."
elif mosq_errno == MOSQ_ERR_ACL_DENIED:
return "Access denied by ACL."
elif mosq_errno == MOSQ_ERR_UNKNOWN:
return "Unknown error."
elif mosq_errno == MOSQ_ERR_ERRNO:
return "Error defined by errno."
else:
return "Unknown error."
def connack_string(connack_code):
"""Return the string associated with a CONNACK result."""
if connack_code == 0:
return "Connection Accepted."
elif connack_code == 1:
return "Connection Refused: unacceptable protocol version."
elif connack_code == 2:
return "Connection Refused: identifier rejected."
elif connack_code == 3:
return "Connection Refused: broker unavailable."
elif connack_code == 4:
return "Connection Refused: bad user name or password."
elif connack_code == 5:
return "Connection Refused: not authorised."
else:
return "Connection Refused: unknown reason."
def topic_matches_sub(sub, topic):
"""Check whether a topic matches a subscription.
For example:
foo/bar would match the subscription foo/# or +/bar
non/matching would not match the subscription non/+/+
"""
result = True
local_sub = _fix_sub_topic(sub)
local_topic = _fix_sub_topic(topic)
multilevel_wildcard = False
slen = len(local_sub)
tlen = len(local_topic)
spos = 0;
tpos = 0;
while spos < slen and tpos < tlen:
if local_sub[spos] == local_topic[tpos]:
spos += 1
tpos += 1
else:
if local_sub[spos] == '+':
spos += 1
while tpos < tlen and local_topic[tpos] != '/':
tpos += 1
if tpos == tlen and spos == slen:
result = True
break
elif local_sub[spos] == '#':
multilevel_wildcard = True
if spos+1 != slen:
result = False
break
else:
result = True
break
else:
result = False
break
if tpos == tlen-1:
# Check for e.g. foo matching foo/#
if spos == slen-3 and local_sub[spos+1] == '/' and local_sub[spos+2] == '#':
result = True
multilevel_wildcard = True
break
if multilevel_wildcard == False and (tpos < tlen or spos < slen):
result = False
return result
class MosquittoMessage:
""" This is a class that describes an incoming message. It is passed to the
on_message callback as the message parameter.
Members:
topic : String. topic that the message was published on.
payload : String/bytes the message payload.
qos : Integer. The message Quality of Service 0, 1 or 2.
retain : Boolean. If true, the message is a retained message and not fresh.
mid : Integer. The message id.
"""
def __init__(self):
self.timestamp = 0
self.direction = mosq_md_invalid
self.state = mosq_ms_invalid
self.dup = False
self.mid = 0
self.topic = ""
self.payload = None
self.qos = 0
self.retain = False
class MosquittoInPacket:
"""Internal datatype."""
def __init__(self):
self.command = 0
self.have_remaining = 0
self.remaining_count = []
self.remaining_mult = 1
self.remaining_length = 0
self.packet = b""
self.to_process = 0
self.pos = 0
def cleanup(self):
self.__init__()
class MosquittoPacket:
"""Internal datatype."""
def __init__(self, command, packet, mid, qos):
self.command = command
self.mid = mid
self.qos = qos
self.pos = 0
self.to_process = len(packet)
self.packet = packet
class Mosquitto:
"""MQTT version 3.1 client class.
This is the main class for use communicating with an MQTT broker.
General usage flow:
* Use connect()/connect_async() to connect to a broker
* Call loop() frequently to maintain network traffic flow with the broker
* Or use loop_start() to set a thread running to call loop() for you.
* Or use loop_forever() to handle calling loop() for you in a blocking
* function.
* Use subscribe() to subscribe to a topic and receive messages
* Use publish() to send messages
* Use disconnect() to disconnect from the broker
Data returned from the broker is made available with the use of callback
functions as described below.
Callbacks
=========
A number of callback functions are available to receive data back from the
broker. To use a callback, define a function and then assign it to the
client:
def on_connect(mosq, userdata, rc):
print("Connection returned " + str(rc))
client.on_connect = on_connect
All of the callbacks as described below have a "mosq" and an "userdata"
argument. "mosq" is the Mosquitto instance that is calling the callback.
"userdata" is user data of any type and can be set when creating a new client
instance or with user_data_set(userdata).
The callbacks:
on_connect(mosq, userdata, rc): called when the broker responds to our connection
request. The value of rc determines success or not:
0: Connection successful
1: Connection refused - incorrect protocol version
2: Connection refused - invalid client identifier
3: Connection refused - server unavailable
4: Connection refused - bad username or password
5: Connection refused - not authorised
6-255: Currently unused.
on_disconnect(mosq, userdata, rc): called when the client disconnects from the broker.
The rc parameter indicates the disconnection state. If MOSQ_ERR_SUCCESS
(0), the callback was called in response to a disconnect() call. If any
other value the disconnection was unexpected, such as might be caused by
a network error.
on_message(mosq, userdata, message): called when a message has been received on a
topic that the client subscribes to. The message variable is a
MosquittoMessage that describes all of the message parameters.
on_publish(mosq, userdata, mid): called when a message that was to be sent using the
publish() call has completed transmission to the broker. For messages
with QoS levels 1 and 2, this means that the appropriate handshakes have
completed. For QoS 0, this simply means that the message has left the
client. The mid variable matches the mid variable returned from the
corresponding publish() call, to allow outgoing messages to be tracked.
This callback is important because even if the publish() call returns
success, it does not always mean that the message has been sent.
on_subscribe(mosq, userdata, mid, granted_qos): called when the broker responds to a
subscribe request. The mid variable matches the mid variable returned
from the corresponding subscribe() call. The granted_qos variable is a
list of integers that give the QoS level the broker has granted for each
of the different subscription requests.
on_unsubscribe(mosq, userdata, mid): called when the broker responds to an unsubscribe
request. The mid variable matches the mid variable returned from the
corresponding unsubscribe() call.
on_log(mosq, userdata, level, buf): called when the client has log information. Define
to allow debugging. The level variable gives the severity of the message
and will be one of MOSQ_LOG_INFO, MOSQ_LOG_NOTICE, MOSQ_LOG_WARNING,
MOSQ_LOG_ERR, and MOSQ_LOG_DEBUG. The message itself is in buf.
"""
def __init__(self, client_id="", clean_session=True, userdata=None):
"""client_id is the unique client id string used when connecting to the
broker. If client_id is zero length or None, then one will be randomly
generated. In this case, clean_session must be True. If this is not the
case a ValueError will be raised.
clean_session is a boolean that determines the client type. If True,
the broker will remove all information about this client when it
disconnects. If False, the client is a persistent client and
subscription information and queued messages will be retained when the
client disconnects.
Note that a client will never discard its own outgoing messages on
disconnect. Calling connect() or reconnect() will cause the messages to
be resent. Use reinitialise() to reset a client to its original state.
userdata is user defined data of any type that is passed as the "userdata"
parameter to callbacks. It may be updated at a later point with the
user_data_set() function.
"""
if clean_session == False and (client_id == "" or client_id == None):
raise ValueError('A client id must be provided if clean session is False.')
self._userdata = userdata
self._sock = None
self._keepalive = 60
self._message_retry = 20
self._last_retry_check = 0
self._clean_session = clean_session
if client_id == "":
self._client_id = "mosq/" + "".join(random.choice("0123456789ADCDEF") for x in range(23-5))
else:
self._client_id = client_id
self._username = ""
self._password = ""
self._in_packet = MosquittoInPacket()
self._out_packet = []
self._current_out_packet = None
self._last_msg_in = time.time()
self._last_msg_out = time.time()
self._ping_t = 0
self._last_mid = 0
self._state = mosq_cs_new
self._messages = []
self._will = False
self._will_topic = ""
self._will_payload = None
self._will_qos = 0
self._will_retain = False
self.on_disconnect = None
self.on_connect = None
self.on_publish = None
self.on_message = None
self.on_subscribe = None
self.on_unsubscribe = None
self.on_log = None
self._host = ""
self._port = 1883
self._in_callback = False
self._strict_protocol = False
self._callback_mutex = threading.Lock()
self._state_mutex = threading.Lock()
self._out_packet_mutex = threading.Lock()
self._current_out_packet_mutex = threading.Lock()
self._msgtime_mutex = threading.Lock()
self._thread = None
self._thread_terminate = False
self._ssl = None
self._tls_certfile = None
self._tls_keyfile = None
self._tls_ca_certs = None
self._tls_cert_reqs = None
self._tls_ciphers = None
def __del__(self):
pass
def reinitialise(self, client_id="", clean_session=True, userdata=None):
if self._ssl:
self._ssl.close()
self._ssl = None
self._sock = None
elif self._sock:
self._sock.close()
self._sock = None
self.__init__(client_id, clean_session, userdata)
def tls_set(self, ca_certs, certfile=None, keyfile=None, cert_reqs=ssl.CERT_REQUIRED, tls_version=ssl.PROTOCOL_TLSv1, ciphers=None):
"""Configure network encryption and authentication options. Enables SSL/TLS support.
ca_certs : a string path to the Certificate Authority certificate files
that are to be treated as trusted by this client. If this is the only
option given then the client will operate in a similar manner to a web
browser. That is to say it will require the broker to have a
certificate signed by the Certificate Authorities in ca_certs and will
communicate using TLS v1, but will not attempt any form of
authentication. This provides basic network encryption but may not be
sufficient depending on how the broker is configured.
certfile and keyfile are strings pointing to the PEM encoded client
certificate and private keys respectively. If these arguments are not
None then they will be used as client information for TLS based
authentication. Support for this feature is broker dependent. Note
that if either of these files in encrypted and needs a password to
decrypt it, Python will ask for the password at the command line. It is
not currently possible to define a callback to provide the password.
cert_reqs allows the certificate requirements that the client imposes
on the broker to be changed. By default this is ssl.CERT_REQUIRED,
which means that the broker must provide a certificate. See the ssl
pydoc for more information on this parameter.
tls_version allows the version of the SSL/TLS protocol used to be
specified. By default TLS v1 is used. Previous versions (all versions
beginning with SSL) are possible but not recommended due to possible
security problems.
ciphers is a string specifying which encryption ciphers are allowable
for this connection, or None to use the defaults. See the ssl pydoc for
more information.
Must be called before connect() or connect_async()."""
if sys.version < '2.7':
raise ValueError('Python 2.7 is the minimum supported version for TLS.')
if ca_certs == None:
raise ValueError('ca_certs must not be None.')
try:
f = open(ca_certs, "r")
except IOError as err:
raise IOError(ca_certs+": "+err.strerror)
else:
f.close()
if certfile != None:
try:
f = open(certfile, "r")
except IOError as err:
raise IOError(certfile+": "+err.strerror)
else:
f.close()
if keyfile != None:
try:
f = open(keyfile, "r")
except IOError as err:
raise IOError(keyfile+": "+err.strerror)
else:
f.close()
self._tls_ca_certs = ca_certs
self._tls_certfile = certfile
self._tls_keyfile = keyfile
self._tls_cert_reqs = cert_reqs
self._tls_version = tls_version
self._tls_ciphers = ciphers
def connect(self, host, port=1883, keepalive=60):
"""Connect to a remote broker.
host is the hostname or IP address of the remote broker.
port is the network port of the server host to connect to. Defaults to
1883. Note that the default port for MQTT over SSL/TLS is 8883 so if you
are using tls_set() the port may need providing.
keepalive: Maximum period in seconds between communications with the
broker. If no other messages are being exchanged, this controls the
rate at which the client will send ping messages to the broker.
"""
self.connect_async(host, port, keepalive)
return self.reconnect()
def connect_async(self, host, port=1883, keepalive=60):
"""Connect to a remote broker asynchronously. This is a non-blocking
connect call that can be used with loop_start() to provide very quick
start.
host is the hostname or IP address of the remote broker.
port is the network port of the server host to connect to. Defaults to
1883. Note that the default port for MQTT over SSL/TLS is 8883 so if you
are using tls_set() the port may need providing.
keepalive: Maximum period in seconds between communications with the
broker. If no other messages are being exchanged, this controls the
rate at which the client will send ping messages to the broker.
"""
if host == None or len(host) == 0:
raise ValueError('Invalid host.')
if port <= 0:
raise ValueError('Invalid port number.')
if keepalive < 0:
raise ValueError('Keepalive must be >=0.')
self._host = host
self._port = port
self._keepalive = keepalive
self._state_mutex.acquire()
self._state = mosq_cs_connect_async
self._state_mutex.release()
def reconnect(self):
"""Reconnect the client after a disconnect. Can only be called after
connect()/connect_async()."""
if len(self._host) == 0:
raise ValueError('Invalid host.')
if self._port <= 0:
raise ValueError('Invalid port number.')
self._in_packet.cleanup()
self._out_packet_mutex.acquire()
self._out_packet = []
self._out_packet_mutex.release()
self._current_out_packet_mutex.acquire()
self._current_out_packet = None
self._current_out_packet_mutex.release()
self._msgtime_mutex.acquire()
self._last_msg_in = time.time()
self._last_msg_out = time.time()
self._msgtime_mutex.release()
self._ping_t = 0
self._state_mutex.acquire()
self._state = mosq_cs_new
self._state_mutex.release()
if self._ssl:
self._ssl.close()
self._ssl = None
self._sock = None
elif self._sock:
self._sock.close()
self._sock = None
# Put messages in progress in a valid state.
self._messages_reconnect_reset()
self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# FIXME use create_connection here
if self._tls_ca_certs != None:
self._ssl = ssl.wrap_socket(self._sock,
certfile=self._tls_certfile,
keyfile=self._tls_keyfile,
ca_certs=self._tls_ca_certs,
cert_reqs=self._tls_cert_reqs,
ssl_version=self._tls_version,
ciphers=self._tls_ciphers)
try:
self.socket().connect((self._host, self._port))
except socket.error as err:
(msg) = err
if msg.errno != errno.EINPROGRESS:
raise
self._sock.setblocking(0)
return self._send_connect(self._keepalive, self._clean_session)
def loop(self, timeout=1.0, max_packets=1):
"""Process network events.
This function must be called regularly to ensure communication with the
broker is carried out. It calls select() on the network socket to wait
for network events. If incoming data is present it will then be
processed. Outgoing commands, from e.g. publish(), are normally sent
immediately that their function is called, but this is not always
possible. loop() will also attempt to send any remaining outgoing
messages, which also includes commands that are part of the flow for
messages with QoS>0.
timeout: The time in seconds to wait for incoming/outgoing network
traffic before timing out and returning.
max_packets: Not currently used.
Returns MOSQ_ERR_SUCCESS on success.
Returns >0 on error.
A ValueError will be raised if timeout < 0"""
if timeout < 0.0:
raise ValueError('Invalid timeout.')
self._current_out_packet_mutex.acquire()
self._out_packet_mutex.acquire()
if self._current_out_packet == None and len(self._out_packet) > 0:
self._current_out_packet = self._out_packet.pop(0)
if self._current_out_packet:
wlist = [self.socket()]
else:
wlist = []
self._out_packet_mutex.release()
self._current_out_packet_mutex.release()
rlist = [self.socket()]
try:
socklist = select.select(rlist, wlist, [], timeout)
except TypeError:
# Socket isn't correct type, in likelihood connection is lost
return MOSQ_ERR_CONN_LOST
if self.socket() in socklist[0]:
rc = self.loop_read(max_packets)
if rc or (self._ssl == None and self._sock == None):
return rc
if self.socket() in socklist[1]:
rc = self.loop_write(max_packets)
if rc or (self._ssl == None and self._sock == None):
return rc
return self.loop_misc()
def publish(self, topic, payload=None, qos=0, retain=False):
"""Publish a message on a topic.
This causes a message to be sent to the broker and subsequently from
the broker to any clients subscribing to matching topics.
topic: The topic that the message should be published on.
payload: The actual message to send. If not given, or set to None a
zero length message will be used. Passing an int or float will result
in the payload being converted to a string representing that number. If
you wish to send a true int/float, use struct.pack() to create the
payload you require.
qos: The quality of service level to use.
retain: If set to true, the message will be set as the "last known
good"/retained message for the topic.
Returns a tuple (result, mid), where result is MOSQ_ERR_SUCCESS to
indicate success or MOSQ_ERR_NO_CONN if the client is not currently
connected. mid is the message ID for the publish request. The mid
value can be used to track the publish request by checking against the
mid argument in the on_publish() callback if it is defined.
A ValueError will be raised if topic == None, has zero length or is
invalid (contains a wildcard), if qos is not one of 0, 1 or 2, or if
the length of the payload is greater than 268435455 bytes."""
if topic == None or len(topic) == 0:
raise ValueError('Invalid topic.')
if qos<0 or qos>2:
raise ValueError('Invalid QoS level.')
if isinstance(payload, str) == True or isinstance(payload, bytearray) == True:
local_payload = payload
elif isinstance(payload, int) == True or isinstance(payload, float) == True:
local_payload = str(payload)
elif payload == None:
local_payload = None
else:
raise TypeError('payload must be a string, bytearray, int, float or None.')
if local_payload != None and len(local_payload) > 268435455:
raise ValueError('Payload too large.')
if self._topic_wildcard_len_check(topic) != MOSQ_ERR_SUCCESS:
raise ValueError('Publish topic cannot contain wildcards.')
local_mid = self._mid_generate()
if qos == 0:
rc = self._send_publish(local_mid, topic, local_payload, qos, retain, False)
return (rc, local_mid)
else:
message = MosquittoMessage()
message.timestamp = time.time()
message.direction = mosq_md_out
if qos == 1:
message.state = mosq_ms_wait_puback
elif qos == 2:
message.state = mosq_ms_wait_pubrec
message.mid = local_mid
message.topic = topic
if local_payload == None or len(local_payload) == 0:
message.payload = None
else:
message.payload = local_payload
message.qos = qos
message.retain = retain
message.dup = False
self._messages.append(message)
rc = self._send_publish(message.mid, message.topic, message.payload, message.qos, message.retain, message.dup)
return (rc, local_mid)
def username_pw_set(self, username, password=None):
"""Set a username and optionally a password for broker authentication.
Must be called before connect() to have any effect.
Requires a broker that supports MQTT v3.1.
username: The username to authenticate with. Need have no relationship to the client id.
password: <PASSWORD> password to authenticate with. Optional, set to None if not required.
"""
self._username = username
self._password = password
def disconnect(self):
"""Disconnect a connected client from the broker."""
if self._sock == None and self._ssl == None:
return MOSQ_ERR_NO_CONN
self._state_mutex.acquire()
self._state = mosq_cs_disconnecting
self._state_mutex.release()
return self._send_disconnect()
def subscribe(self, topic, qos=0):
"""Subscribe the client to a topic.
sub: The subscription topic to subscribe to.
qos: The desired quality of service level for the subscription.
Returns a tuple (result, mid), where result is MOSQ_ERR_SUCCESS
to indicate success or MOSQ_ERR_NO_CONN if the client is not currently connected.
mid is the message ID for the subscribe request. The mid value can be
used to track the subscribe request by checking against the mid
argument in the on_subscribe() callback if it is defined.
Raises a ValueError if qos is not 0, 1 or 2, or if topic is None or has
zero string length.
"""
if qos<0 or qos>2:
raise ValueError('Invalid QoS level.')
if topic == None or len(topic) == 0:
raise ValueError('Invalid topic.')
topic = _fix_sub_topic(topic)
if self._sock == None and self._ssl == None:
return MOSQ_ERR_NO_CONN
return self._send_subscribe(False, topic, qos)
def unsubscribe(self, topic):
"""Unsubscribe the client from a topic.
sub: The subscription topic to unsubscribe from.
Returns a tuple (result, mid), where result is MOSQ_ERR_SUCCESS
to indicate success or MOSQ_ERR_NO_CONN if the client is not currently connected.
mid is the message ID for the unsubscribe request. The mid value can be
used to track the unsubscribe request by checking against the mid
argument in the on_unsubscribe() callback if it is defined.
Raises a ValueError if topic is None or has zero string length.
"""
if topic == None or len(topic) == 0:
raise ValueError('Invalid topic.')
topic = _fix_sub_topic(topic)
if self._sock == None and self._ssl == None:
return MOSQ_ERR_NO_CONN
return self._send_unsubscribe(False, topic)
def loop_read(self, max_packets=1):
"""Process read network events. Use in place of calling loop() if you
wish to handle your client reads as part of your own application.
Use socket() to obtain the client socket to call select() or equivalent
on.
Do not use if you are using the threaded interface loop_start()."""
if self._sock == None and self._ssl == None:
return MOSQ_ERR_NO_CONN
max_packets = len(self._messages)
if max_packets < 1:
max_packets = 1
for i in range(0, max_packets):
rc = self._packet_read()
if rc > 0:
return self._loop_rc_handle(rc)
elif rc == MOSQ_ERR_AGAIN:
return MOSQ_ERR_SUCCESS
return MOSQ_ERR_SUCCESS
def loop_write(self, max_packets=1):
"""Process read network events. Use in place of calling loop() if you
wish to handle your client reads as part of your own application.
Use socket() to obtain the client socket to call select() or equivalent
on.
Use want_write() to determine if there is data waiting to be written.
Do not use if you are using the threaded interface loop_start()."""
if self._sock == None and self._ssl == None:
return MOSQ_ERR_NO_CONN
max_packets = len(self._messages)
if max_packets < 1:
max_packets = 1
for i in range(0, max_packets):
rc = self._packet_write()
if rc > 0:
return self._loop_rc_handle(rc)
elif rc == MOSQ_ERR_AGAIN:
return MOSQ_ERR_SUCCESS
return MOSQ_ERR_SUCCESS
def want_write(self):
"""Call to determine if there is network data waiting to be written.
Useful if you are calling select() yourself rather than using loop().
"""
if self._current_out_packet or len(self._out_packet) > 0:
return True
else:
return False
def loop_misc(self):
"""Process miscellaneous network events. Use in place of calling loop() if you
wish to call select() or equivalent on.
Do not use if you are using the threaded interface loop_start()."""
if self._sock == None and self._ssl == None:
return MOSQ_ERR_NO_CONN
now = time.time()
self._check_keepalive()
if self._last_retry_check+1 < now:
# Only check once a second at most
self._message_retry_check()
self._last_retry_check = now
if self._ping_t > 0 and now - self._ping_t >= self._keepalive:
# mosq->ping_t != 0 means we are waiting for a pingresp.
# This hasn't happened in the keepalive time so we should disconnect.
if self._ssl:
self._ssl.close()
self._ssl = None
elif self._sock:
self._sock.close()
self._sock = None
self._callback_mutex.acquire()
if self._state == mosq_cs_disconnecting:
rc = MOSQ_ERR_SUCCESS
else:
rc = 1
if self.on_disconnect:
self._in_callback = True
self.on_disconnect(self, self._userdata, rc)
self._in_callback = False
self._callback_mutex.release()
return MOSQ_ERR_CONN_LOST
return MOSQ_ERR_SUCCESS
def message_retry_set(self, retry):
"""Set the timeout in seconds before a message with QoS>0 is retried.
20 seconds by default."""
if retry < 0:
raise ValueError('Invalid retry.')
self._message_retry = retry
def user_data_set(self, userdata):
"""Set the user data variable passed to callbacks. May be any data type."""
self._userdata = userdata
def will_set(self, topic, payload=None, qos=0, retain=False):
"""Set a Will to be sent by the broker in case the client disconnects unexpectedly.
This must be called before connect() to have any effect.
topic: The topic that the will message should be published on.
payload: The message to send as a will. If not given, or set to None a
zero length message will be used as the will. Passing an int or float
will result in the payload being converted to a string representing
that number. If you wish to send a true int/float, use struct.pack() to
create the payload you require.
qos: The quality of service level to use for the will.
retain: If set to true, the will message will be set as the "last known
good"/retained message for the topic.
Raises a ValueError if qos is not 0, 1 or 2, or if topic is None or has
zero string length.
"""
if topic == None or len(topic) == 0:
raise ValueError('Invalid topic.')
if qos<0 or qos>2:
raise ValueError('Invalid QoS level.')
if isinstance(payload, str) == True or isinstance(payload, bytearray) == True:
self._will_payload = payload
elif isinstance(payload, int) == True or isinstance(payload, float) == True:
self._will_payload = str(payload)
elif payload == None:
self._will_payload = None
else:
raise TypeError('payload must be a string, bytearray, int, float or None.')
self._will = True
self._will_topic = topic
self._will_qos = qos
self._will_retain = retain
def will_clear(self):
""" Removes a will that was previously configured with will_set().
Must be called before connect() to have any effect."""
self._will = False
self._will_topic = ""
self._will_payload = None
self._will_qos = 0
self._will_retain = False
def socket(self):
"""Return the socket or ssl object for this client."""
if self._ssl:
return self._ssl
else:
return self._sock
def loop_forever(self, timeout=1.0, max_packets=1):
"""This function call loop() for you in an infinite blocking loop. It
is useful for the case where you only want to run the MQTT client loop
in your program.
loop_forever() will handle reconnecting for you. If you call
disconnect() in a callback it will return."""
run = True
if self._state == mosq_cs_connect_async:
self.reconnect()
while run == True:
rc = MOSQ_ERR_SUCCESS
while rc == MOSQ_ERR_SUCCESS:
rc = self.loop(timeout, max_packets)
if self._state == mosq_cs_disconnecting:
run = False
else:
time.sleep(1)
self.reconnect()
return rc
def loop_start(self):
"""This is part of the threaded client interface. Call this once to
start a new thread to process network traffic. This provides an
alternative to repeatedly calling loop() yourself.
"""
if self._thread != None:
return MOSQ_ERR_INVAL
self._thread = threading.Thread(target=self._thread_main)
self._thread.daemon = True
self._thread.start()
def loop_stop(self, force=False):
"""This is part of the threaded client interface. Call this once to
stop the network thread previously created with loop_start(). This call
will block until the network thread finishes.
The force parameter is currently ignored.
"""
if self._thread == None:
return MOSQ_ERR_INVAL
self._thread_terminate = True
self._thread.join()
self._thread = None
# ============================================================
# Private functions
# ============================================================
def _loop_rc_handle(self, rc):
if rc:
if self._ssl:
self._ssl.close()
self._ssl = None
elif self._sock:
self._sock.close()
self._sock = None
self._state_mutex.acquire()
if self._state == mosq_cs_disconnecting:
rc = MOSQ_ERR_SUCCESS
self._state_mutex.release()
self._callback_mutex.acquire()
if self.on_disconnect:
self._in_callback = True
self.on_disconnect(self, self._userdata, rc)
self._in_callback = False
self._callback_mutex.release()
return rc
def _packet_read(self):
# This gets called if pselect() indicates that there is network data
# available - ie. at least one byte. What we do depends on what data we
# already have.
# If we've not got a command, attempt to read one and save it. This should
# always work because it's only a single byte.
# Then try to read the remaining length. This may fail because it is may
# be more than one byte - will need to save data pending next read if it
# does fail.
# Then try to read the remaining payload, where 'payload' here means the
# combined variable header and actual payload. This is the most likely to
# fail due to longer length, so save current data and current position.
# After all data is read, send to _mosquitto_handle_packet() to deal with.
# Finally, free the memory and reset everything to starting conditions.
if self._in_packet.command == 0:
try:
if self._ssl:
command = self._ssl.read(1)
else:
command = self._sock.recv(1)
except socket.error as err:
(msg) = err
if self._ssl and (msg.errno == ssl.SSL_ERROR_WANT_READ or msg.errno == ssl.SSL_ERROR_WANT_WRITE):
return MOSQ_ERR_AGAIN
if msg.errno == errno.EAGAIN:
return MOSQ_ERR_AGAIN
raise
else:
if len(command) == 0:
return 1
command = struct.unpack("!B", command)
self._in_packet.command = command[0]
if self._in_packet.have_remaining == 0:
# Read remaining
# Algorithm for decoding taken from pseudo code at
# http://publib.boulder.ibm.com/infocenter/wmbhelp/v6r0m0/topic/com.ibm.etools.mft.doc/ac10870_.htm
while True:
try:
if self._ssl:
byte = self._ssl.read(1)
else:
byte = self._sock.recv(1)
except socket.error as err:
(msg) = err
if self._ssl and (msg.errno == ssl.SSL_ERROR_WANT_READ or msg.errno == ssl.SSL_ERROR_WANT_WRITE):
return MOSQ_ERR_AGAIN
if msg.errno == errno.EAGAIN:
return MOSQ_ERR_AGAIN
raise
else:
byte = struct.unpack("!B", byte)
byte = byte[0]
self._in_packet.remaining_count.append(byte)
# Max 4 bytes length for remaining length as defined by protocol.
# Anything more likely means a broken/malicious client.
if len(self._in_packet.remaining_count) > 4:
return MOSQ_ERR_PROTOCOL
self._in_packet.remaining_length = self._in_packet.remaining_length + (byte & 127)*self._in_packet.remaining_mult
self._in_packet.remaining_mult = self._in_packet.remaining_mult * 128
if (byte & 128) == 0:
break
self._in_packet.have_remaining = 1
self._in_packet.to_process = self._in_packet.remaining_length
while self._in_packet.to_process > 0:
try:
if self._ssl:
data = self._ssl.read(self._in_packet.to_process)
else:
data = self._sock.recv(self._in_packet.to_process)
except socket.error as err:
(msg) = err
if self._ssl and (msg.errno == ssl.SSL_ERROR_WANT_READ or msg.errno == ssl.SSL_ERROR_WANT_WRITE):
return MOSQ_ERR_AGAIN
if msg.errno == errno.EAGAIN:
return MOSQ_ERR_AGAIN
raise
else:
self._in_packet.to_process = self._in_packet.to_process - len(data)
self._in_packet.packet = self._in_packet.packet + data
# All data for this packet is read.
self._in_packet.pos = 0
rc = self._packet_handle()
# Free data and reset values
self._in_packet.cleanup()
self._msgtime_mutex.acquire()
self._last_msg_in = time.time()
self._msgtime_mutex.release()
return rc
def _packet_write(self):
self._current_out_packet_mutex.acquire()
while self._current_out_packet:
packet = self._current_out_packet
try:
if self._ssl:
write_length = self._ssl.write(packet.packet[packet.pos:])
else:
write_length = self._sock.send(packet.packet[packet.pos:])
except AttributeError:
self._current_out_packet_mutex.release()
return MOSQ_ERR_SUCCESS
except socket.error as err:
self._current_out_packet_mutex.release()
(msg) = err
if self._ssl and (msg.errno == ssl.SSL_ERROR_WANT_READ or msg.errno == ssl.SSL_ERROR_WANT_WRITE):
return MOSQ_ERR_AGAIN
if msg.errno == errno.EAGAIN:
return MOSQ_ERR_AGAIN
raise
if write_length > 0:
packet.to_process = packet.to_process - write_length
packet.pos = packet.pos + write_length
if packet.to_process == 0:
if (packet.command & 0xF0) == PUBLISH and packet.qos == 0:
self._callback_mutex.acquire()
if self.on_publish:
self._in_callback = True
self.on_publish(self, self._userdata, packet.mid)
self._in_callback = False
self._callback_mutex.release()
self._out_packet_mutex.acquire()
if len(self._out_packet) > 0:
self._current_out_packet = self._out_packet.pop(0)
else:
self._current_out_packet = None
self._out_packet_mutex.release()
else:
pass # FIXME
self._current_out_packet_mutex.release()
self._msgtime_mutex.acquire()
self._last_msg_out = time.time()
self._msgtime_mutex.release()
return MOSQ_ERR_SUCCESS
def _easy_log(self, level, buf):
if self.on_log:
self.on_log(self, self._userdata, level, buf)
def _check_keepalive(self):
now = time.time()
self._msgtime_mutex.acquire()
last_msg_out = self._last_msg_out
last_msg_in = self._last_msg_in
self._msgtime_mutex.release()
if (self._sock != None or self._ssl != None) and (now - last_msg_out >= self._keepalive or now - last_msg_in >= self._keepalive):
if self._state == mosq_cs_connected and self._ping_t == 0:
self._send_pingreq()
self._msgtime_mutex.acquire()
self._last_msg_out = now
self._last_msg_in = now
self._msgtime_mutex.release()
else:
if self._ssl:
self._ssl.close()
self._ssl = None
elif self._sock:
self._sock.close()
self._sock = None
if self._state == mosq_cs_disconnecting:
rc = MOSQ_ERR_SUCCESS
else:
rc = 1
self._callback_mutex.acquire()
if self.on_disconnect:
self._in_callback = True
self.on_disconnect(self, self._userdata, rc)
self._in_callback = False
self._callback_mutex.release()
def _mid_generate(self):
self._last_mid = self._last_mid + 1
if self._last_mid == 65536:
self._last_mid = 1
return self._last_mid
def _topic_wildcard_len_check(self, topic):
# Search for + or # in a topic. Return MOSQ_ERR_INVAL if found.
# Also returns MOSQ_ERR_INVAL if the topic string is too long.
# Returns MOSQ_ERR_SUCCESS if everything is fine.
if '+' in topic or '#' in topic or len(topic) == 0 or len(topic) > 65535:
return MOSQ_ERR_INVAL
else:
return MOSQ_ERR_SUCCESS
def _send_pingreq(self):
self._easy_log(MOSQ_LOG_DEBUG, "Sending PINGREQ")
rc = self._send_simple_command(PINGREQ)
if rc == MOSQ_ERR_SUCCESS:
self._ping_t = time.time()
return rc
def _send_pingresp(self):
self._easy_log(MOSQ_LOG_DEBUG, "Sending PINGRESP")
return self._send_simple_command(PINGRESP)
def _send_puback(self, mid):
self._easy_log(MOSQ_LOG_DEBUG, "Sending PUBACK (Mid: "+str(mid)+")")
return self._send_command_with_mid(PUBACK, mid, False)
def _send_pubcomp(self, mid):
self._easy_log(MOSQ_LOG_DEBUG, "Sending PUBCOMP (Mid: "+str(mid)+")")
return self._send_command_with_mid(PUBCOMP, mid, False)
def _pack_remaining_length(self, packet, remaining_length):
remaining_bytes = []
while True:
byte = remaining_length % 128
remaining_length = remaining_length // 128
# If there are more digits to encode, set the top bit of this digit
if remaining_length > 0:
byte = byte | 0x80
remaining_bytes.append(byte)
packet.extend(struct.pack("!B", byte))
if remaining_length == 0:
# FIXME - this doesn't deal with incorrectly large payloads
return packet
def _pack_str16(self, packet, data):
if sys.version_info[0] < 3:
if isinstance(data, bytearray):
packet.extend(struct.pack("!H", len(data)))
packet.extend(data)
elif isinstance(data, str):
pack_format = "!H" + str(len(data)) + "s"
packet.extend(struct.pack(pack_format, len(data), data))
elif isinstance(data, unicode):
udata = data.encode('utf-8')
pack_format = "!H" + str(len(udata)) + "s"
packet.extend(struct.pack(pack_format, len(udata), udata))
else:
raise TypeError
else:
if isinstance(data, bytearray):
packet.extend(struct.pack("!H", len(data)))
packet.extend(data)
elif isinstance(data, str):
udata = data.encode('utf-8')
pack_format = "!H" + str(len(udata)) + "s"
packet.extend(struct.pack(pack_format, len(udata), udata))
else:
raise TypeError
def _send_publish(self, mid, topic, payload=None, qos=0, retain=False, dup=False):
if self._sock == None and self._ssl == None:
return MOSQ_ERR_NO_CONN
command = PUBLISH | ((dup&0x1)<<3) | (qos<<1) | retain
packet = bytearray()
packet.extend(struct.pack("!B", command))
if payload == None:
remaining_length = 2+len(topic)
self._easy_log(MOSQ_LOG_DEBUG, "Sending PUBLISH (d"+str(dup)+", q"+str(qos)+", r"+str(retain)+", m"+str(mid)+", '"+topic+"' (NULL payload)")
else:
remaining_length = 2+len(topic) + len(payload)
self._easy_log(MOSQ_LOG_DEBUG, "Sending PUBLISH (d"+str(dup)+", q"+str(qos)+", r"+str(retain)+", m"+str(mid)+", '"+topic+"', ... ("+str(len(payload))+" bytes)")
if qos > 0:
# For message id
remaining_length = remaining_length + 2
self._pack_remaining_length(packet, remaining_length)
self._pack_str16(packet, topic)
if qos > 0:
# For message id
packet.extend(struct.pack("!H", mid))
if payload != None:
if isinstance(payload, str):
if sys.version_info[0] < 3:
pack_format = str(len(payload)) + "s"
packet.extend(struct.pack(pack_format, payload))
else:
upayload = payload.encode('utf-8')
pack_format = str(len(upayload)) + "s"
packet.extend(struct.pack(pack_format, upayload))
elif isinstance(payload, bytearray):
packet.extend(payload)
elif isinstance(payload, unicode):
upayload = payload.encode('utf-8')
pack_format = str(len(upayload)) + "s"
packet.extend(struct.pack(pack_format, upayload))
else:
raise TypeError('payload must be a string, unicode or a bytearray.')
return self._packet_queue(PUBLISH, packet, mid, qos)
def _send_pubrec(self, mid):
self._easy_log(MOSQ_LOG_DEBUG, "Sending PUBREC (Mid: "+str(mid)+")")
return self._send_command_with_mid(PUBREC, mid, False)
def _send_pubrel(self, mid, dup=False):
self._easy_log(MOSQ_LOG_DEBUG, "Sending PUBREL (Mid: "+str(mid)+")")
return self._send_command_with_mid(PUBREL|2, mid, dup)
def _send_command_with_mid(self, command, mid, dup):
# For PUBACK, PUBCOMP, PUBREC, and PUBREL
if dup:
command = command | 8
remaining_length = 2
packet = struct.pack('!BBH', command, remaining_length, mid)
return self._packet_queue(command, packet, mid, 1)
def _send_simple_command(self, command):
# For DISCONNECT, PINGREQ and PINGRESP
remaining_length = 0
packet = struct.pack('!BB', command, remaining_length)
return self._packet_queue(command, packet, 0, 0)
def _send_connect(self, keepalive, clean_session):
remaining_length = 12 + 2+len(self._client_id)
connect_flags = 0
if clean_session:
connect_flags = connect_flags | 0x02
if self._will:
remaining_length = remaining_length + 2+len(self._will_topic) + 2+len(self._will_payload)
connect_flags = connect_flags | 0x04 | ((self._will_qos&0x03) << 3) | ((self._will_retain&0x01) << 5)
if self._username:
remaining_length = remaining_length + 2+len(self._username)
connect_flags = connect_flags | 0x80
if self._password:
connect_flags = connect_flags | 0x40
remaining_length = remaining_length + 2+len(self._password)
command = CONNECT
packet = bytearray()
packet.extend(struct.pack("!B", command))
self._pack_remaining_length(packet, remaining_length)
packet.extend(struct.pack("!H6sBBH", len(PROTOCOL_NAME), PROTOCOL_NAME, PROTOCOL_VERSION, connect_flags, keepalive))
self._pack_str16(packet, self._client_id)
if self._will:
self._pack_str16(packet, self._will_topic)
if len(self._will_payload) > 0:
self._pack_str16(packet, self._will_payload)
else:
packet.extend(struct.pack("!H", 0))
if self._username:
self._pack_str16(packet, self._username)
if self._password:
self._pack_str16(packet, self._password)
self._keepalive = keepalive
return self._packet_queue(command, packet, 0, 0)
def _send_disconnect(self):
return self._send_simple_command(DISCONNECT)
def _send_subscribe(self, dup, topic, topic_qos):
remaining_length = 2 + 2+len(topic) + 1
command = SUBSCRIBE | (dup<<3) | (1<<1)
packet = bytearray()
packet.extend(struct.pack("!B", command))
self._pack_remaining_length(packet, remaining_length)
local_mid = self._mid_generate()
pack_format = "!HH" + str(len(topic)) + "sB"
packet.extend(struct.pack("!H", local_mid))
self._pack_str16(packet, topic)
packet.extend(struct.pack("B", topic_qos))
return (self._packet_queue(command, packet, local_mid, 1), local_mid)
def _send_unsubscribe(self, dup, topic):
remaining_length = 2 + 2+len(topic)
command = UNSUBSCRIBE | (dup<<3) | (1<<1)
packet = bytearray()
packet.extend(struct.pack("!B", command))
self._pack_remaining_length(packet, remaining_length)
local_mid = self._mid_generate()
pack_format = "!HH" + str(len(topic)) + "sB"
packet.extend(struct.pack("!H", local_mid))
self._pack_str16(packet, topic)
return (self._packet_queue(command, packet, local_mid, 1), local_mid)
def _message_update(self, mid, direction, state):
for m in self._messages:
if m.mid == mid and m.direction == direction:
m.state = state
m.timestamp = time.time()
return MOSQ_ERR_SUCCESS
return MOSQ_ERR_NOT_FOUND
def _message_retry_check(self):
now = time.time()
for m in self._messages:
if m.timestamp + self._message_retry < now:
if m.state == mosq_ms_wait_puback or m.state == mosq_ms_wait_pubrec:
m.timestamp = now
m.dup = True
self._send_publish(m.mid, m.topic, m.payload, m.qos, m.retain, m.dup)
elif m.state == mosq_ms_wait_pubrel:
m.timestamp = now
m.dup = True
self._send_pubrec(m.mid)
elif m.state == mosq_ms_wait_pubcomp:
m.timestamp = now
m.dup = True
self._send_pubrel(m.mid, True)
def _messages_reconnect_reset(self):
for m in self._messages:
m.timestamp = 0
if m.direction == mosq_md_out:
if m.qos == 1:
m.state = mosq_ms_wait_puback
elif m.qos == 2:
m.state = mosq_ms_wait_pubrec
else:
self._messages.pop(self._messages.index(m))
def _packet_queue(self, command, packet, mid, qos):
mpkt = MosquittoPacket(command, packet, mid, qos)
self._out_packet_mutex.acquire()
self._out_packet.append(mpkt)
if self._current_out_packet_mutex.acquire(False) == True:
if self._current_out_packet == None and len(self._out_packet) > 0:
self._current_out_packet = self._out_packet.pop(0)
self._current_out_packet_mutex.release()
self._out_packet_mutex.release()
if self._in_callback == False:
return self.loop_write()
else:
return MOSQ_ERR_SUCCESS
def _packet_handle(self):
cmd = self._in_packet.command&0xF0
if cmd == PINGREQ:
return self._handle_pingreq()
elif cmd == PINGRESP:
return self._handle_pingresp()
elif cmd == PUBACK:
return self._handle_pubackcomp("PUBACK")
elif cmd == PUBCOMP:
return self._handle_pubackcomp("PUBCOMP")
elif cmd == PUBLISH:
return self._handle_publish()
elif cmd == PUBREC:
return self._handle_pubrec()
elif cmd == PUBREL:
return self._handle_pubrel()
elif cmd == CONNACK:
return self._handle_connack()
elif cmd == SUBACK:
return self._handle_suback()
elif cmd == UNSUBACK:
return self._handle_unsuback()
else:
# If we don't recognise the command, return an error straight away.
self._easy_log(MOSQ_LOG_ERR, "Error: Unrecognised command "+str(cmd))
return MOSQ_ERR_PROTOCOL
def _handle_pingreq(self):
if self._strict_protocol:
if self._in_packet.remaining_length != 0:
return MOSQ_ERR_PROTOCOL
self._easy_log(MOSQ_LOG_DEBUG, "Received PINGREQ")
return self._send_pingresp()
def _handle_pingresp(self):
if self._strict_protocol:
if self._in_packet.remaining_length != 0:
return MOSQ_ERR_PROTOCOL
# No longer waiting for a PINGRESP.
self._ping_t = 0
self._easy_log(MOSQ_LOG_DEBUG, "Received PINGRESP")
return MOSQ_ERR_SUCCESS
def _handle_connack(self):
if self._strict_protocol:
if self._in_packet.remaining_length != 2:
return MOSQ_ERR_PROTOCOL
if len(self._in_packet.packet) != 2:
return MOSQ_ERR_PROTOCOL
(resvd, result) = struct.unpack("!BB", self._in_packet.packet)
self._easy_log(MOSQ_LOG_DEBUG, "Received CONNACK ("+str(resvd)+", "+str(result)+")")
self._callback_mutex.acquire()
if self.on_connect:
self._in_callback = True
self.on_connect(self, self._userdata, result)
self._in_callback = False
self._callback_mutex.release()
if result == 0:
self._state = mosq_cs_connected
return MOSQ_ERR_SUCCESS
elif result > 0 and result < 6:
return MOSQ_ERR_CONN_REFUSED
else:
return MOSQ_ERR_PROTOCOL
def _handle_suback(self):
self._easy_log(MOSQ_LOG_DEBUG, "Received SUBACK")
pack_format = "!H" + str(len(self._in_packet.packet)-2) + 's'
(mid, packet) = struct.unpack(pack_format, self._in_packet.packet)
pack_format = "!" + "B"*len(packet)
granted_qos = struct.unpack(pack_format, packet)
self._callback_mutex.acquire()
if self.on_subscribe:
self._in_callback = True
self.on_subscribe(self, self._userdata, mid, granted_qos)
self._in_callback = False
self._callback_mutex.release()
return MOSQ_ERR_SUCCESS
def _handle_publish(self):
rc = 0
header = self._in_packet.command
message = MosquittoMessage()
message.direction = mosq_md_in
message.dup = (header & 0x08)>>3
message.qos = (header & 0x06)>>1
message.retain = (header & 0x01)
pack_format = "!H" + str(len(self._in_packet.packet)-2) + 's'
(slen, packet) = struct.unpack(pack_format, self._in_packet.packet)
pack_format = '!' + str(slen) + 's' + str(len(packet)-slen) + 's'
(message.topic, packet) = struct.unpack(pack_format, packet)
if len(message.topic) == 0:
return MOSQ_ERR_PROTOCOL
if sys.version_info[0] >= 3:
message.topic = message.topic.decode('utf-8')
message.topic = _fix_sub_topic(message.topic)
if message.qos > 0:
pack_format = "!H" + str(len(packet)-2) + 's'
(message.mid, packet) = struct.unpack(pack_format, packet)
message.payload = packet
self._easy_log(MOSQ_LOG_DEBUG, "Received PUBLISH (d"+str(message.dup)+
", q"+str(message.qos)+", r"+str(message.retain)+
", m"+str(message.mid)+", '"+message.topic+
"', ... ("+str(len(message.payload))+" bytes)")
message.timestamp = time.time()
if message.qos == 0:
self._callback_mutex.acquire()
if self.on_message:
self._in_callback = True
self.on_message(self, self._userdata, message)
self._in_callback = False
self._callback_mutex.release()
return MOSQ_ERR_SUCCESS
elif message.qos == 1:
rc = self._send_puback(message.mid)
self._callback_mutex.acquire()
if self.on_message:
self._in_callback = True
self.on_message(self, self._userdata, message)
self._in_callback = False
self._callback_mutex.release()
return rc
elif message.qos == 2:
rc = self._send_pubrec(message.mid)
message.state = mosq_ms_wait_pubrel
self._messages.append(message)
return rc
else:
return MOSQ_ERR_PROTOCOL
def _handle_pubrel(self):
if self._strict_protocol:
if self._in_packet.remaining_length != 2:
return MOSQ_ERR_PROTOCOL
if len(self._in_packet.packet) != 2:
return MOSQ_ERR_PROTOCOL
mid = struct.unpack("!H", self._in_packet.packet)
mid = mid[0]
self._easy_log(MOSQ_LOG_DEBUG, "Received PUBREL (Mid: "+str(mid)+")")
for i in range(len(self._messages)):
if self._messages[i].direction == mosq_md_in and self._messages[i].mid == mid:
# Only pass the message on if we have removed it from the queue - this
# prevents multiple callbacks for the same message.
self._callback_mutex.acquire()
if self.on_message:
self._in_callback = True
self.on_message(self, self._userdata, self._messages[i])
self._in_callback = False
self._callback_mutex.release()
self._messages.pop(i)
return self._send_pubcomp(mid)
return MOSQ_ERR_SUCCESS
def _handle_pubrec(self):
if self._strict_protocol:
if self._in_packet.remaining_length != 2:
return MOSQ_ERR_PROTOCOL
mid = struct.unpack("!H", self._in_packet.packet)
mid = mid[0]
self._easy_log(MOSQ_LOG_DEBUG, "Received PUBREC (Mid: "+str(mid)+")")
for i in range(len(self._messages)):
if self._messages[i].direction == mosq_md_out and self._messages[i].mid == mid:
self._messages[i].state = mosq_ms_wait_pubcomp
self._messages[i].timestamp = time.time()
return self._send_pubrel(mid, False)
return MOSQ_ERR_SUCCESS
def _handle_unsuback(self):
if self._strict_protocol:
if self._in_packet.remaining_length != 2:
return MOSQ_ERR_PROTOCOL
mid = struct.unpack("!H", self._in_packet.packet)
mid = mid[0]
self._easy_log(MOSQ_LOG_DEBUG, "Received UNSUBACK (Mid: "+str(mid)+")")
self._callback_mutex.acquire()
if self.on_unsubscribe:
self._in_callback = True
self.on_unsubscribe(self, self._userdata, mid)
self._in_callback = False
self._callback_mutex.release()
return MOSQ_ERR_SUCCESS
def _handle_pubackcomp(self, cmd):
if self._strict_protocol:
if self._in_packet.remaining_length != 2:
return MOSQ_ERR_PROTOCOL
mid = struct.unpack("!H", self._in_packet.packet)
mid = mid[0]
self._easy_log(MOSQ_LOG_DEBUG, "Received "+cmd+" (Mid: "+str(mid)+")")
for i in range(len(self._messages)):
try:
if self._messages[i].direction == mosq_md_out and self._messages[i].mid == mid:
# Only inform the client the message has been sent once.
self._callback_mutex.acquire()
if self.on_publish:
self._in_callback = True
self.on_publish(self, self._userdata, mid)
self._in_callback = False
self._callback_mutex.release()
self._messages.pop(i)
except IndexError:
# Have removed item so i>count.
# Not really an error.
pass
return MOSQ_ERR_SUCCESS
def _thread_main(self):
run = True
self._thread_terminate = False
self._state_mutex.acquire()
if self._state == mosq_cs_connect_async:
self._state_mutex.release()
self.reconnect()
else:
self._state_mutex.release()
while run == True:
rc = MOSQ_ERR_SUCCESS
while rc == MOSQ_ERR_SUCCESS:
rc = self.loop()
if self._thread_terminate == True:
rc = 1
run = False
self._state_mutex.acquire()
if self._state == mosq_cs_disconnecting:
run = False
self._state_mutex.release()
else:
self._state_mutex.release()
time.sleep(1)
self.reconnect()
| StarcoderdataPython |
8037946 | <reponame>faro1219/wot
# -*- coding: utf-8 -*-
from matplotlib import patches
from matplotlib import pyplot
def __make_figure(y = 1, x = 1, projection=None):
pyplot.clf()
return pyplot.subplots(y, x, figsize=(8 * x, 6 * y), projection=None)
def plot_2d_dataset(figure, dataset, x = 0, y = 1, title=None):
colors = "#808080"
if 'color' in dataset.row_meta.columns:
colors = dataset.row_meta['color'].values
figure.scatter(dataset.x[:,x], dataset.x[:,y], c=colors,
s=.2, marker=',', edgecolors='none')
if title is not None:
figure.title.set_text(title)
def legend_figure(figure, legend_list, loc=0):
patch_list = [ patches.Patch(color=c, label=l) for c, l in legend_list ]
figure.legend(handles = patch_list, loc = loc)
| StarcoderdataPython |
1745122 | <reponame>chrismurf/simulus<filename>examples/basics/twosims.py
import simulus
def handle(sim):
print("'%s' handles event at time %g" % (sim.name, sim.now))
sim1 = simulus.simulator(name="sim1", init_time=100)
sim2 = simulus.simulator(name="sim2", init_time=-100)
for i in range(5, 100, 20):
sim1.sched(handle, sim1, offset=i)
for i in range(5, 200, 30):
sim2.sched(handle, sim2, offset=i)
sim1.show_calendar()
sim2.show_calendar()
while True:
t1, t2 = sim1.peek(), sim2.peek()
if t1 < simulus.infinite_time:
sim1.step()
if t2 < simulus.infinite_time:
sim2.step()
if t1 == simulus.infinite_time and \
t2 == simulus.infinite_time:
break
| StarcoderdataPython |
190825 | <gh_stars>100-1000
"""
Copyright (c) 2017, <NAME>.
Distributed under the terms of the MIT License.
The full license is in the file LICENSE, distributed with this software.
Created on May 20, 2017
@author: jrm
"""
from atom.api import Typed, Instance, Property, Dict, set_default, observe
from enamlnative.widgets.list_view import ProxyListView, ProxyListItem
from .android_toolkit_object import AndroidToolkitObject
from .android_view_group import AndroidViewGroup, ViewGroup
from .bridge import JavaBridgeObject, JavaCallback, JavaMethod, encode
package = 'androidx.recyclerview.widget'
class RecylerView(ViewGroup):
__nativeclass__ = set_default('%s.RecyclerView' % package)
invalidate = JavaMethod()
setHasFixedSize = JavaMethod('boolean')
scrollTo = JavaMethod('int', 'int')
scrollToPosition = JavaMethod('int')
setItemViewCacheSize = JavaMethod('int')
setAdapter = JavaMethod('%s.RecyclerView$Adapter' % package)
setHasFixedSize = JavaMethod('boolean')
setLayoutManager = JavaMethod('%s.RecyclerView$LayoutManager' % package)
setRecyclerListener = JavaMethod('%s.RecyclerView$RecyclerListener' % package)
class LayoutManager(JavaBridgeObject):
__nativeclass__ = set_default('%s.RecyclerView$LayoutManager' % package)
scrollToPosition = JavaMethod('int')
setItemPrefetchEnabled = JavaMethod('boolean')
HORIZONTAL = 0
VERTICAL = 1
class StaggeredLayoutManager(RecylerView.LayoutManager):
__nativeclass__ = set_default('%s.StaggeredLayoutManager' % package)
__signature__ = set_default(('int', 'int'))
setOrientation = JavaMethod('int')
setSpanCount = JavaMethod('int')
class LinearLayoutManager(RecylerView.LayoutManager):
__nativeclass__ = set_default('%s.LinearLayoutManager' % package)
__signature__ = set_default(('android.content.Context', 'int', 'boolean'))
scrollToPositionWithOffset = JavaMethod('int', 'int')
setInitialPrefetchItemCount = JavaMethod('int')
setOrientation = JavaMethod('int')
setRecycleChildrenOnDetach = JavaMethod('boolean')
setReverseLayout = JavaMethod('boolean')
setSmoothScrollbarEnabled = JavaMethod('boolean')
setStackFromEnd = JavaMethod('boolean')
class GridLayoutManager(LinearLayoutManager):
__nativeclass__ = set_default('%s.GridLayoutManager' % package)
__signature__ = set_default(('android.content.Context', 'int', 'int',
'boolean'))
setSpanCount = JavaMethod('int')
# class BridgedListAdapter(JavaBridgeObject):
# """ An adapter that implements a recycleview pattern.
#
# """
# __nativeclass__ = set_default(
# 'com.codelv.enamlnative.adapters.BridgedListAdapter')
# setListView = JavaMethod('android.widget.ListView',
# 'com.codelv.enamlnative.adapters.'
# 'BridgedListAdapter$BridgedListAdapterListener')
# setCount = JavaMethod('int')
# setRecycleViews = JavaMethod('[Landroid.view.View;')
# clearRecycleViews = JavaMethod()
#
# #: BridgedListAdapterListener API
# onRecycleView = JavaCallback('int', 'int', 'int')
# onVisibleCountChanged = JavaCallback('int','int')
# onScrollStateChanged = JavaCallback('android.widget.AbsListView','int')
class BridgedRecyclerAdapter(JavaBridgeObject):
""" An adapter that implements a recycleview pattern.
"""
__nativeclass__ = set_default(
'com.codelv.enamlnative.adapters.BridgedRecyclerAdapter')
__signature__ = set_default(('%s.RecyclerView' % package,))
setRecyleListener = JavaMethod(
'com.codelv.enamlnative.adapters.BridgedRecyclerAdapter'
'$BridgedListAdapterListener'
)
setItemCount = JavaMethod('int')
setRecycleViews = JavaMethod('[Landroid.view.View;')
clearRecycleViews = JavaMethod()
#: BridgedListAdapterListener API
onRecycleView = JavaCallback('int', 'int')
onVisibleCountChanged = JavaCallback('int', 'int')
onScrollStateChanged = JavaCallback('android.widget.AbsListView','int')
notifyDataSetChanged = JavaMethod()
notifyItemChanged = JavaMethod('int')
notifyItemInserted = JavaMethod('int')
notifyItemRemoved = JavaMethod('int')
notifyItemRangeChanged = JavaMethod('int', 'int')
notifyItemRangeInserted = JavaMethod('int', 'int')
notifyItemRangeRemoved = JavaMethod('int', 'int')
class AndroidListView(AndroidViewGroup, ProxyListView):
""" An Android implementation of an Enaml ProxyListView.
"""
#: A reference to the widget created by the proxy.
widget = Typed(RecylerView)
#: Reference to adapter
adapter = Typed(BridgedRecyclerAdapter)
#: Layout manager
layout_manager = Instance(RecylerView.LayoutManager)
def _get_list_items(self):
return [c for c in self.children() if isinstance(c, AndroidListItem)]
#: List items
list_items = Property(lambda self: self._get_list_items(), cached=True)
#: List mapping from index to view
item_mapping = Dict()
# -------------------------------------------------------------------------
# Initialization API
# -------------------------------------------------------------------------
def create_widget(self):
""" Create the underlying widget.
"""
self.widget = RecylerView(self.get_context())
def init_widget(self):
""" Initialize the underlying widget.
"""
super(AndroidListView, self).init_widget()
d = self.declaration
self.set_arrangement(d.arrangement)
# w = self.widget
# w.setOnItemClickListener(w.getId())
# w.setOnItemLongClickListener(w.getId())
# w.onItemClick.connect(self.on_item_click)
# w.onItemLongClick.connect(self.on_item_long_click)
#self.widget.setOnScrollListener(self.widget.getId())
#self.widget.onScroll.connect(self.on_scroll)
#: Selection listener
#self.widget.setOnItemSelectedListener(self.widget.getId())
#self.widget.onItemSelected.connect(self.on_item_selected)
#self.widget.onNothingSelected.connect(self.on_nothing_selected)
def get_declared_items(self):
""" Override to do it manually
"""
for k, v in super(AndroidListView, self).get_declared_items():
if k == 'layout':
yield k, v
break
def init_layout(self):
""" Initialize the underlying widget.
"""
super(AndroidListView, self).init_layout()
d = self.declaration
w = self.widget
# Prepare adapter
adapter = self.adapter = BridgedRecyclerAdapter(w)
# I'm sure this will make someone upset haha
adapter.setRecyleListener(adapter.getId())
adapter.onRecycleView.connect(self.on_recycle_view)
#adapter.onVisibleCountChanged.connect(self.on_visible_count_changed)
#adapter.onScrollStateChanged.connect(self.on_scroll_state_changed)
self.set_items(d.items)
w.setAdapter(adapter)
#self.set_selected(d.selected)
self.refresh_views()
# -------------------------------------------------------------------------
# BridgedListAdapterListener API
# -------------------------------------------------------------------------
def on_recycle_view(self, index, position):
""" Update the item the view at the given index should display
"""
item = self.list_items[index]
self.item_mapping[position] = item
item.recycle_view(position)
def on_scroll_state_changed(self, view, state):
pass
# -------------------------------------------------------------------------
# ProxyListView API
# -------------------------------------------------------------------------
def refresh_views(self, change=None):
""" Set the views that the adapter will cycle through. """
adapter = self.adapter
# Set initial ListItem state
item_mapping = self.item_mapping
for i, item in enumerate(self.list_items):
item_mapping[i] = item
item.recycle_view(i)
if adapter:
adapter.clearRecycleViews()
adapter.setRecycleViews(
[encode(li.get_view()) for li in self.list_items])
def set_items(self, items):
adapter = self.adapter
adapter.setItemCount(len(items))
adapter.notifyDataSetChanged()
@observe('declaration.items')
def _on_items_changed(self, change):
""" Observe container events on the items list and update the
adapter appropriately.
"""
if change['type'] != 'container':
return
op = change['operation']
if op == 'append':
i = len(change['value'])-1
self.adapter.notifyItemInserted(i)
elif op == 'insert':
self.adapter.notifyItemInserted(change['index'])
elif op in ('pop', '__delitem__'):
self.adapter.notifyItemRemoved(change['index'])
elif op == '__setitem__':
self.adapter.notifyItemChanged(change['index'])
elif op == 'extend':
n = len(change['items'])
i = len(change['value'])-n
self.adapter.notifyItemRangeInserted(i, n)
elif op in ('remove', 'reverse', 'sort'):
# Reset everything for these
self.adapter.notifyDataSetChanged()
def set_arrangement(self, arrangement):
ctx = self.get_context()
d = self.declaration
reverse = False
orientation = (
LinearLayoutManager.VERTICAL if d.orientation == 'vertical'
else LinearLayoutManager.HORIZONTAL)
if arrangement == 'linear':
manager = LinearLayoutManager(ctx, orientation, reverse)
elif arrangement == 'grid':
manager = GridLayoutManager(ctx, d.span_count, orientation,
reverse)
elif arrangement == 'staggered':
manager = StaggeredLayoutManager(d.span_count, orientation)
self.layout_manager = manager
self.widget.setLayoutManager(manager)
def set_span_count(self, count):
if not self.layout_manager:
return
self.layout_manager.setSpanCount(count)
def set_orientation(self, orientation):
if not self.layout_manager:
return
orientation = (
LinearLayoutManager.VERTICAL if orientation == 'vertical'
else LinearLayoutManager.HORIZONTAL)
self.layout_manager.setOrientation(orientation)
def set_selected(self, index):
self.widget.setSelection(index)
def scroll_to(self, x, y):
self.widget.scrollTo(x, y)
def scroll_to_position(self, position):
self.widget.scrollToPosition(position)
class AndroidListItem(AndroidToolkitObject, ProxyListItem):
# -------------------------------------------------------------------------
# Initialization API
# -------------------------------------------------------------------------
def create_widget(self):
""" The list item has no widget, it's a placeholder. """
pass
def init_widget(self):
""" The list item has no widget, it's a placeholder. """
def init_layout(self):
""" The list item has no widget, it's a placeholder. """
pass
# -------------------------------------------------------------------------
# ListAdapter API
# -------------------------------------------------------------------------
def recycle_view(self, position):
""" Tell the view to render the item at the given position """
d = self.declaration
if position < len(d.parent.items):
d.index = position
d.item = d.parent.items[position]
else:
d.index = -1
d.item = None
def get_view(self):
""" Return the view for this item (first child widget) """
for w in self.child_widgets():
return w
| StarcoderdataPython |
3409955 | """Ray-Horovod Job unit tests.
This is currently not run on the Ray CI.
"""
import os
import sys
import socket
import pytest
import ray
from ray import services
import torch
from horovod.common.util import gloo_built
from horovod.ray.runner import (BaseHorovodWorker, NodeColocator, Coordinator,
MiniSettings, RayExecutor)
sys.path.append(os.path.dirname(__file__))
@pytest.fixture
def ray_start_2_cpus():
address_info = ray.init(num_cpus=2)
yield address_info
# The code after the yield will run as teardown code.
ray.shutdown()
@pytest.fixture
def ray_start_4_cpus():
address_info = ray.init(num_cpus=4)
yield address_info
# The code after the yield will run as teardown code.
ray.shutdown()
@pytest.fixture
def ray_start_6_cpus():
address_info = ray.init(num_cpus=6)
yield address_info
# The code after the yield will run as teardown code.
ray.shutdown()
@pytest.fixture
def ray_start_4_cpus_4_gpus():
os.environ["CUDA_VISIBLE_DEVICES"] = "0,1,2,3"
address_info = ray.init(num_cpus=4, num_gpus=4)
yield address_info
# The code after the yield will run as teardown code.
ray.shutdown()
del os.environ["CUDA_VISIBLE_DEVICES"]
def check_resources(original_resources):
for i in reversed(range(10)):
if original_resources == ray.available_resources():
return True
else:
print(ray.available_resources())
import time
time.sleep(0.5)
return False
def test_coordinator_registration():
settings = MiniSettings()
coord = Coordinator(settings)
assert coord.world_size == 0
assert coord.hoststring == ""
ranks = list(range(12))
for i, hostname in enumerate(["a", "b", "c"]):
for r in ranks:
if r % 3 == i:
coord.register(hostname, world_rank=r)
rank_to_info = coord.finalize_registration()
assert len(rank_to_info) == len(ranks)
assert all(info["NODE_WORLD_SIZE"] == 3 for info in rank_to_info.values())
assert {info["NODE_WORLD_RANK"]
for info in rank_to_info.values()} == {0, 1, 2}
assert all(info["LOCAL_SIZE"] == 4 for info in rank_to_info.values())
assert {info["LOCAL_RANK"]
for info in rank_to_info.values()} == {0, 1, 2, 3}
def test_colocator(tmpdir, ray_start_6_cpus):
SetColocator = NodeColocator.options(num_cpus=4)
colocator = SetColocator.remote(
node_rank=4, num_slots=4, world_size=5, use_gpu=False)
colocator.create_workers.remote()
worker_handles = ray.get(colocator.get_workers.remote())
assert len(set(ray.get(
[h.hostname.remote() for h in worker_handles]))) == 1
resources = ray.available_resources()
ip_address = services.get_node_ip_address()
assert resources.get("CPU", 0) == 2, resources
# TODO: https://github.com/horovod/horovod/issues/2438
# assert resources.get(f"node:{ip_address}", 0) == 1 - 4 * 0.01
@pytest.mark.skipif(
torch.cuda.device_count() < 4, reason='GPU colocator test requires 4 GPUs')
@pytest.mark.skipif(
not torch.cuda.is_available(), reason='GPU colocator test requires CUDA')
def test_colocator_gpu(tmpdir, ray_start_4_cpus_4_gpus):
SetColocator = NodeColocator.options(num_cpus=4, num_gpus=4)
colocator = SetColocator.remote(
node_rank=0, num_slots=4, world_size=4, use_gpu=True)
colocator.create_workers.remote()
worker_handles = ray.get(colocator.get_workers.remote())
assert len(set(ray.get(
[h.hostname.remote() for h in worker_handles]))) == 1
resources = ray.available_resources()
ip_address = services.get_node_ip_address()
assert resources.get("CPU", 0) == 0, resources
assert resources.get("GPU", 0) == 0, resources
# TODO: https://github.com/horovod/horovod/issues/2438
# assert resources.get(f"node:{ip_address}", 0) == 1 - 4 * 0.01
all_envs = ray.get([h.env_vars.remote() for h in worker_handles])
assert len({ev["CUDA_VISIBLE_DEVICES"] for ev in all_envs}) == 1
assert len(all_envs[0]["CUDA_VISIBLE_DEVICES"].split(",")) == 4
def test_horovod_mixin(ray_start_2_cpus):
class Test(BaseHorovodWorker):
pass
assert Test().hostname() == socket.gethostname()
actor = ray.remote(BaseHorovodWorker).remote()
DUMMY_VALUE = 1123123
actor.update_env_vars.remote({"TEST": DUMMY_VALUE})
assert ray.get(actor.env_vars.remote())["TEST"] == str(DUMMY_VALUE)
def test_local(ray_start_4_cpus):
original_resources = ray.available_resources()
setting = RayExecutor.create_settings(timeout_s=30)
hjob = RayExecutor(setting, num_hosts=1, num_slots=4)
hjob.start()
hostnames = hjob.execute(lambda _: socket.gethostname())
assert len(set(hostnames)) == 1, hostnames
hjob.shutdown()
assert check_resources(original_resources)
@pytest.mark.skipif(
not gloo_built(), reason='Gloo is required for Ray integration')
def test_ray_init(ray_start_4_cpus):
original_resources = ray.available_resources()
def simple_fn(worker):
import horovod.torch as hvd
hvd.init()
return hvd.rank()
setting = RayExecutor.create_settings(timeout_s=30)
hjob = RayExecutor(
setting, num_hosts=1, num_slots=4, use_gpu=torch.cuda.is_available())
hjob.start()
result = hjob.execute(simple_fn)
assert len(set(result)) == 4
hjob.shutdown()
assert check_resources(original_resources)
@pytest.mark.skipif(
not gloo_built(), reason='Gloo is required for Ray integration')
def test_ray_exec_func(ray_start_4_cpus):
def simple_fn(num_epochs):
import horovod.torch as hvd
hvd.init()
return hvd.rank() * num_epochs
setting = RayExecutor.create_settings(timeout_s=30)
hjob = RayExecutor(
setting, num_hosts=1, num_slots=4, use_gpu=torch.cuda.is_available())
hjob.start()
result = hjob.run(simple_fn, args=[0])
assert len(set(result)) == 1
hjob.shutdown()
def _train(batch_size=32, batch_per_iter=10):
import torch.backends.cudnn as cudnn
import torch.nn.functional as F
import torch.optim as optim
import torch.utils.data.distributed
from torchvision import models
import horovod.torch as hvd
import timeit
hvd.init()
# Set up fixed fake data
data = torch.randn(batch_size, 2)
target = torch.LongTensor(batch_size).random_() % 2
model = torch.nn.Sequential(torch.nn.Linear(2, 2))
optimizer = optim.SGD(model.parameters(), lr=0.01)
# Horovod: wrap optimizer with DistributedOptimizer.
optimizer = hvd.DistributedOptimizer(
optimizer, named_parameters=model.named_parameters())
# Horovod: broadcast parameters & optimizer state.
hvd.broadcast_parameters(model.state_dict(), root_rank=0)
hvd.broadcast_optimizer_state(optimizer, root_rank=0)
def benchmark_step():
optimizer.zero_grad()
output = model(data)
loss = F.cross_entropy(output, target)
loss.backward()
optimizer.step()
time = timeit.timeit(benchmark_step, number=batch_per_iter)
@pytest.mark.skipif(
not gloo_built(), reason='Gloo is required for Ray integration')
def test_horovod_train(ray_start_4_cpus):
def simple_fn(worker):
_train()
return True
setting = RayExecutor.create_settings(timeout_s=30)
hjob = RayExecutor(
setting, num_hosts=1, num_slots=4, use_gpu=torch.cuda.is_available())
hjob.start()
result = hjob.execute(simple_fn)
assert all(result)
hjob.shutdown()
if __name__ == "__main__":
import pytest
import sys
sys.exit(pytest.main(["-v", __file__]))
| StarcoderdataPython |
3499686 | <reponame>Metnew/neural-finance
import json
from flask import request, jsonify, g
import flask_restful as restful
from server import mongo
from datetime import timedelta, datetime
import numpy as np
import csv
def to_float(a):
return float(a.replace(',', '.'))
class Finance_Data(restful.Resource):
def get(self):
# store data in mongo
def store_in_mongo(name, *url):
finance_data = mongo.db['finance_data_' + name]# get collection e.g. finance_data_GSPC
finance_data.remove() # remove collection if already exists
arr = [] # temp data storing array
if not url:
url = './csv/' + name + '.csv'
with open(url, 'r') as file_data:
reader = csv.reader(file_data, delimiter=";")
next(reader, None)
for row in reader:
timestamp = datetime.strptime(row[2], '%Y-%m-%d %H:%M:%S').timestamp()
open_price = to_float(row[3])
high_price = to_float(row[4])
low_price = to_float(row[5])
close_price = to_float(row[6])
volume = to_float(row[7])
minute_data = Minute_data(open_price=open_price, timestamp=timestamp, volume=volume, low_price=low_price, close_price=close_price, high_price=high_price)
# append data to array
arr.append(minute_data.__dict__)
# if array size bigger then 1000 store array in mongo
# we cant store minutes in mongo one by one - it will be too slow
if len(arr) == 1000:
finance_data.insert_many(arr)
arr = []
store_in_mongo(name="GSPC")
store_in_mongo(name="DJI")
class Minute_data:
def __init__(self, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
| StarcoderdataPython |
3318672 | <reponame>avik-pal/social-driving<filename>sdriving/environments/highway.py<gh_stars>100-1000
import math
from collections import deque
from itertools import product
import numpy as np
import torch
from gym.spaces import Box, Discrete, Tuple
from sdriving.environments.base_env import BaseMultiAgentDrivingEnvironment
from sdriving.tsim import (
BatchedVehicle,
BicycleKinematicsModel,
Pedestrians,
Road,
RoadNetwork,
SplineModel,
World,
angle_normalize,
generate_intersection_world_4signals,
intervehicle_collision_check,
)
class MultiAgentHighwayBicycleKinematicsModel(
BaseMultiAgentDrivingEnvironment
):
def __init__(
self,
npoints: int = 360,
horizon: int = 200,
timesteps: int = 25,
history_len: int = 5,
nagents: int = 1,
device: torch.device = torch.device("cpu"),
lidar_noise: float = 0.0,
lateral_noise_variance: float = 0.8,
):
self.npoints = npoints
self.history_len = history_len
self.device = device
world, config = self.generate_world_without_agents()
for k, v in config.items():
setattr(self, k, v)
super().__init__(world, nagents, horizon, timesteps, device)
self.queue1 = None
self.queue2 = None
self.lidar_noise = lidar_noise
bool_buffer = torch.ones(self.nagents * 4, self.nagents * 4)
for i in range(0, self.nagents * 4, 4):
bool_buffer[i : (i + 4), i : (i + 4)] -= 1
self.bool_buffer = bool_buffer.bool()
self.lateral_noise_variance = lateral_noise_variance
def generate_world_without_agents(self):
network = RoadNetwork()
length = 250.0
width = 25.0
network.add_road(
Road(
f"highway",
torch.zeros(1, 2),
length,
width,
torch.zeros(1, 1),
can_cross=[False] * 4,
has_endpoints=[True, False, True, False],
)
)
return (
World(
network,
xlims=(-length / 2 - 10, length / 2 + 10),
ylims=(-length / 2 - 10, length / 2 + 10),
),
{"length": length, "width": width},
)
def get_observation_space(self):
return Tuple(
[
Box(
low=np.array([0.0, -1.0, 0.5, 0.5] * self.history_len),
high=np.array([1.0, 1.0, 1.0, 1.0] * self.history_len),
),
Box(0.0, np.inf, shape=(self.npoints * self.history_len,)),
]
)
def get_action_space(self):
return Box(low=np.array([-0.1, -1.0]), high=np.array([0.1, 1.0]))
def get_state(self):
a_ids = self.get_agent_ids_list()
dist = torch.cat(
[
(v.destination[:, 0:1] - v.position[:, 0:1]).abs()
for v in self.agents.values()
]
)
inv_dist = 1 / dist.clamp(min=1.0)
speed = torch.cat([self.agents[v].speed for v in a_ids])
obs = torch.cat(
[
inv_dist,
speed / self.dynamics.v_lim,
self.accln_rating,
self.vel_rating,
],
-1,
)
lidar = 1 / self.world.get_lidar_data_all_vehicles(self.npoints)
if self.lidar_noise > 0:
lidar *= torch.rand_like(lidar) > self.lidar_noise
if self.history_len > 1:
while len(self.queue1) <= self.history_len - 1:
self.queue1.append(obs)
self.queue2.append(lidar)
self.queue1.append(obs)
self.queue2.append(lidar)
return (
(
torch.cat(list(self.queue1), dim=-1),
torch.cat(list(self.queue2), dim=-1),
),
self.agent_names,
)
else:
return (obs, lidar), self.agent_names
def vehicle_collision_check(self, vehicle):
# if self.lateral_noise_variance == 0.0:
# return vehicle.collision_check()
# Hopefully this doesn't get messed up by some inplace operation
# position_dup = vehicle.position.clone()
# position_noise = (
# torch.rand_like(position_dup[:, 1]) * self.lateral_noise_variance
# )
# vehicle.position[:, 1] += position_noise
# vehicle.cached_coordinates = False
collision = vehicle.collision_check()
# vehicle.position = position_dup
# vehicle.cached_coordinates = False
return collision
def get_reward(self, new_collisions: torch.Tensor, action: torch.Tensor):
a_ids = self.get_agent_ids_list()
# Distance from destination
distances = torch.cat(
[
v.destination[:, 0:1] - v.position[:, 0:1]
for v in self.agents.values()
]
)
# Agent Speeds
speeds = torch.cat([self.agents[v].speed for v in a_ids])
# Goal Reach Bonus
reached_goal = distances <= 0.0
distances = distances.abs()
not_completed = ~self.completion_vector
goal_reach_bonus = (not_completed * reached_goal).float()
self.completion_vector = self.completion_vector + reached_goal
for v in a_ids:
self.agents[v].destination = self.agents[
v
].position * self.completion_vector + self.agents[
v
].destination * (
~self.completion_vector
)
distances *= not_completed / self.original_distances
# Collision
new_collisions = ~self.collision_vector * new_collisions
penalty = (
new_collisions.float()
+ new_collisions
* distances
* (self.horizon - self.nsteps - 1)
/ self.horizon
)
self.collision_vector += new_collisions
return (
-distances * ~self.collision_vector / self.horizon
- (speeds / 8.0).abs() * self.completion_vector / self.horizon
- penalty
+ goal_reach_bonus
)
def add_vehicles_to_world(self):
dims = torch.as_tensor([[4.48, 2.2]]).repeat(self.nagents, 1)
self.max_accln = 3.0
self.max_velocity = 16.0
diffs = torch.cumsum(
torch.as_tensor([0.0] + [10.0] * (self.nagents - 1)).unsqueeze(1),
dim=0,
)
diffs = torch.cat([diffs, torch.zeros(self.nagents, 1)], dim=-1)
spos = torch.as_tensor([[-self.length / 2 + 30.0, 0.0]]) + diffs
epos = torch.as_tensor([[self.length / 2 - 50.0, 0.0]]).repeat(
self.nagents, 1
)
orient = torch.zeros(self.nagents, 1)
dorient = torch.zeros(self.nagents, 1)
vehicle = BatchedVehicle(
position=spos,
orientation=orient,
destination=epos,
dest_orientation=dorient,
dimensions=dims,
initial_speed=torch.zeros(self.nagents, 1),
name="agent",
)
vehicle.add_bool_buffer(self.bool_buffer)
self.accln_rating = (torch.rand(self.nagents, 1) + 1) * 0.5
self.vel_rating = self.accln_rating
# Just for coloring the vehicles
self.world.initialize_communication_channel(self.actual_nagents, 1)
self.world.broadcast_data(self.accln_rating, vehicle.position)
self.world.add_vehicle(vehicle, False)
self.store_dynamics(vehicle)
self.agents[vehicle.name] = vehicle
self.original_distances = vehicle.distance_from_destination()
def store_dynamics(self, vehicle):
self.dynamics = BicycleKinematicsModel(
dim=vehicle.dimensions[:, 0],
v_lim=self.vel_rating[:, 0] * self.max_velocity,
)
def reset(self):
# Keep the environment fixed for now
world, config = self.generate_world_without_agents()
for k, v in config.items():
setattr(self, k, v)
self.world = world
self.add_vehicles_to_world()
self.queue1 = deque(maxlen=self.history_len)
self.queue2 = deque(maxlen=self.history_len)
return super().reset()
def discrete_to_continuous_actions(self, actions: torch.Tensor):
self.world.broadcast_data(
self.accln_rating, self.agents["agent"].position
)
actions[:, 1:] = (
actions[:, 1:]
* self.max_accln
* self.accln_rating.to(actions.device)
)
return actions
class MultiAgentHighwayBicycleKinematicsDiscreteModel(
MultiAgentHighwayBicycleKinematicsModel
):
def configure_action_space(self):
self.max_accln = 3.0
self.max_steering = 0.1
actions = list(
product(
torch.arange(
-self.max_steering, self.max_steering + 0.01, 0.05
),
torch.arange(-1, 1 + 0.05, 0.25),
)
)
self.action_list = torch.as_tensor(actions)
def get_action_space(self):
self.normalization_factor = torch.as_tensor(
[self.max_steering, self.max_accln]
)
return Discrete(self.action_list.size(0))
def discrete_to_continuous_actions(self, actions: torch.Tensor):
self.world.broadcast_data(
self.accln_rating, self.agents["agent"].position
)
actions = self.action_list[actions]
actions[:, 1:] = (
actions[:, 1:]
* self.max_accln
* self.accln_rating.to(actions.device)
)
return actions
class MultiAgentHighwayPedestriansFixedTrackDiscreteModel(
MultiAgentHighwayBicycleKinematicsDiscreteModel
):
def configure_action_space(self):
self.max_accln = 3.0
self.max_steering = 0.1
actions = torch.arange(
-self.max_accln, self.max_accln + 0.05, 0.25
).unsqueeze(1)
self.action_list = torch.cat(
[torch.zeros(actions.size(0), 1), actions], dim=-1
)
def discrete_to_continuous_actions(self, actions: torch.Tensor):
self.world.broadcast_data(
self.accln_rating, self.agents["agent"].position
)
return self.action_list[actions]
def generate_world_without_agents(self):
network = RoadNetwork()
length = 140.0
width = 25.0
network.add_road(
Road(
f"highway",
torch.zeros(1, 2),
length,
width,
torch.zeros(1, 1),
can_cross=[False] * 4,
has_endpoints=[True, False, True, False],
)
)
return (
World(
network,
xlims=(-length / 2 - 10, length / 2 + 10),
ylims=(-length / 2 - 10, length / 2 + 10),
),
{"length": length, "width": width},
)
def add_vehicles_to_world(self):
self.max_velocity = 10.0
vehicle = None
dims = torch.as_tensor([[4.48, 2.2]])
d1 = torch.as_tensor([[-self.length * 0.45, self.width * 0.375]])
d2 = torch.as_tensor([[-self.length * 0.3, -self.width * 0.375]])
epos = torch.as_tensor([[self.length * 0.3, 0.0]])
orient = dorient = torch.zeros(1, 1)
for _ in range(self.actual_nagents):
successful_placement = False
while not successful_placement:
spos = torch.rand(1, 2) * (d1 - d2) + d2
if vehicle is None:
vehicle = BatchedVehicle(
position=spos,
orientation=orient,
destination=epos,
dest_orientation=dorient,
dimensions=dims,
initial_speed=torch.zeros(1, 1),
name="agent",
)
break
else:
successful_placement = vehicle.add_vehicle(
position=spos,
orientation=orient,
destination=epos,
dest_orientation=dorient,
dimensions=dims,
initial_speed=torch.zeros(1, 1),
)
vehicle.add_bool_buffer(self.bool_buffer)
self.accln_rating = (torch.rand(self.nagents, 1) + 1) * 0.5
self.vel_rating = torch.ones_like(self.accln_rating)
# Just for coloring the vehicles
self.world.initialize_communication_channel(self.actual_nagents, 1)
self.world.broadcast_data(self.accln_rating, vehicle.position)
self.world.add_vehicle(vehicle, False)
self.store_dynamics(vehicle)
self.agents[vehicle.name] = vehicle
self.original_distances = vehicle.distance_from_destination()
_pos = []
# dims = torch.ones(10, 2) * 0.8
# orientation = torch.ones(10, 1) * math.pi / 2
for i in range(10):
if torch.rand(1) < 0.1:
continue
pos = torch.zeros(1, 2)
# CrossWalk is from -10.0 to 10.0
pos[0, 0] = (torch.rand(1) * 2.0 - 1.0) * 10.0
pos[0, 1] = -torch.rand(1) * self.width / 2
_pos.append(pos)
pedestrians = Pedestrians(
torch.cat(_pos),
torch.ones(len(_pos), 1) * 0.8,
torch.ones(len(_pos), 1) * math.pi / 2,
torch.rand(len(_pos), 1) + 1.0,
)
self.world.add_object(pedestrians)
class MultiAgentHighwaySplineAccelerationDiscreteModel(
MultiAgentHighwayBicycleKinematicsModel
):
def configure_action_space(self):
self.max_accln = 3.0
self.action_list = torch.arange(
-self.max_accln, self.max_accln + 0.05, step=0.25
).unsqueeze(1)
def get_observation_space(self):
return (
Box(low=np.array([0.5]), high=np.array([1.0])),
Tuple(
[
Box(
low=np.array([0.0, -1.0] * self.history_len),
high=np.array([1.0, 1.0] * self.history_len),
),
Box(0.0, np.inf, shape=(self.npoints * self.history_len,)),
]
),
)
def get_action_space(self):
return (
Box(low=np.array([-0.75]), high=np.array([0.75])),
Discrete(self.action_list.size(0)),
)
def discrete_to_continuous_actions(self, action: torch.Tensor):
self.world.broadcast_data(
self.accln_rating, self.agents["agent"].position
)
action = self.action_list[action]
return action * self.max_accln * self.accln_rating.to(action.device)
def discrete_to_continuous_actions_v2(self, action: torch.Tensor):
return action
def _get_spline_state(self):
self.got_spline_state = True
return self.accln_rating, self.agent_names
def get_state(self):
if not self.got_spline_state:
return self._get_spline_state()
a_ids = self.get_agent_ids_list()
dist = torch.cat(
[
(v.destination[:, 0:1] - v.position[:, 0:1]).abs()
for v in self.agents.values()
]
)
inv_dist = 1 / dist.clamp(min=1.0)
speed = torch.cat([self.agents[v].speed for v in a_ids])
obs = torch.cat([inv_dist, speed / self.dynamics.v_lim], -1)
lidar = 1 / self.world.get_lidar_data_all_vehicles(self.npoints)
if self.lidar_noise > 0:
lidar *= torch.rand_like(lidar) > self.lidar_noise
if self.history_len > 1:
while len(self.queue1) <= self.history_len - 1:
self.queue1.append(obs)
self.queue2.append(lidar)
self.queue1.append(obs)
self.queue2.append(lidar)
return (
(
torch.cat(list(self.queue1), dim=-1),
torch.cat(list(self.queue2), dim=-1),
),
self.agent_names,
)
else:
return (obs, lidar), self.agent_names
@torch.no_grad()
def step(
self,
stage: int, # Possible Values [0, 1]
action: torch.Tensor,
render: bool = False,
**render_kwargs,
):
assert stage in [0, 1]
if stage == 1:
return super().step(action, render, **render_kwargs)
action = self.discrete_to_continuous_actions_v2(action)
action = action.to(self.world.device)
vehicle = self.agents["agent"]
pos = vehicle.position
farthest_pt = torch.cat(
[torch.full((pos.size(0), 1), -self.length / 2), pos[:, 1:]],
dim=-1,
).to(pos.device)
mid_point_x = pos[:, :1] + 50.0
mid_point_y = action * self.width / 2
mid_point = torch.cat([mid_point_x, mid_point_y], dim=-1)
last_pt = torch.cat(
[torch.full((pos.size(0), 1), self.length / 2), mid_point_y],
dim=-1,
)
action = torch.cat(
[x.unsqueeze(1) for x in [pos, mid_point, last_pt, farthest_pt]],
dim=1,
)
if self.lateral_noise_variance != 0.0:
noise = (
torch.randn(action.shape[0], 2, 2)
* self.lateral_noise_variance
)
noise.clamp_(-5.0, 5.0)
action[:, 1:3, :] += noise
self.dynamics = SplineModel(
action, v_lim=self.vel_rating[:, 0] * self.max_velocity
)
return self.get_state()
def reset(self):
self.got_spline_state = False
return super().reset()
class MultiAgentHighwayPedestriansSplineAccelerationDiscreteModel(
MultiAgentHighwaySplineAccelerationDiscreteModel
):
def generate_world_without_agents(self):
network = RoadNetwork()
length = 140.0
width = 25.0
network.add_road(
Road(
f"highway",
torch.zeros(1, 2),
length,
width,
torch.zeros(1, 1),
can_cross=[False] * 4,
has_endpoints=[True, False, True, False],
)
)
return (
World(
network,
xlims=(-length / 2 - 10, length / 2 + 10),
ylims=(-length / 2 - 10, length / 2 + 10),
),
{"length": length, "width": width},
)
def add_vehicles_to_world(self):
dims = torch.as_tensor([[4.48, 2.2]]).repeat(self.nagents, 1)
self.max_accln = 3.0
self.max_velocity = 16.0
diffs = torch.cumsum(
torch.as_tensor([0.0] + [10.0] * (self.nagents - 1)).unsqueeze(1),
dim=0,
)
diffs = torch.cat([diffs, torch.zeros(self.nagents, 1)], dim=-1)
spos = torch.as_tensor([[-self.length / 2 + 30.0, 0.0]]) + diffs
epos = torch.as_tensor([[self.length / 2 - 50.0, 0.0]]).repeat(
self.nagents, 1
)
orient = torch.zeros(self.nagents, 1)
dorient = torch.zeros(self.nagents, 1)
vehicle = BatchedVehicle(
position=spos,
orientation=orient,
destination=epos,
dest_orientation=dorient,
dimensions=dims,
initial_speed=torch.zeros(self.nagents, 1),
name="agent",
)
vehicle.add_bool_buffer(self.bool_buffer)
self.accln_rating = (torch.rand(self.nagents, 1) + 1) * 0.5
self.vel_rating = self.accln_rating
# Just for coloring the vehicles
self.world.initialize_communication_channel(self.actual_nagents, 1)
self.world.broadcast_data(self.accln_rating, vehicle.position)
self.world.add_vehicle(vehicle, False)
self.store_dynamics(vehicle)
self.agents[vehicle.name] = vehicle
self.original_distances = vehicle.distance_from_destination()
_pos = []
# dims = torch.ones(10, 2) * 0.8
# orientation = torch.ones(10, 1) * math.pi / 2
for i in range(10):
if torch.rand(1) < 0.1:
continue
pos = torch.zeros(1, 2)
# CrossWalk is from -10.0 to 10.0
pos[0, 0] = (torch.rand(1) * 2.0 - 1.0) * 10.0
pos[0, 1] = -torch.rand(1) * self.width / 2
_pos.append(pos)
pedestrians = Pedestrians(
torch.cat(_pos),
torch.ones(len(_pos), 1) * 0.8,
torch.ones(len(_pos), 1) * math.pi / 2,
torch.rand(len(_pos), 1) + 1.0,
)
self.world.add_object(pedestrians)
| StarcoderdataPython |
5104661 | def bit(n, data=''):
if n <0:
print("Only Positive & Zero Number ! ! !")
return 0
if n == 0:
print(0)
return 0
if len(data) == n:
print(data)
else:
bit(n, data + '0')
bit(n, data + '1')
x = int(input("Enter Number : "))
bit(x) | StarcoderdataPython |
318792 | # This Python file uses the following encoding: utf-8
import unittest
from scripts.src.WordListCreator import WordListCreator
class TestWordListCreator(unittest.TestCase):
scenarios_get_words = [
([], []),
(["\n"], []),
(["# any line starting with hash"], []),
(["aaaa\n"], ["aaaa"]),
(["aaaa\n", "bbbb\n"], ["aaaa", "bbbb"]),
(["aaaa,bbbb\n"], ["aaaa", "bbbb"]),
(["aaaa,bbbb,cccc\n"], ["aaaa", "bbbb", "cccc"])
]
def test_get_words(self):
for lines, expected_list_of_words in self.scenarios_get_words:
with self.subTest():
self.assertEqual(expected_list_of_words, WordListCreator().get_words(lines))
scenarios_create_map_of_neighbours = [
(["aaaa"], {"aaaa": set()}),
(["aaaa", "bbbb"], {"aaaa":set(), "bbbb":set()}),
(["aaaa", "aaab"], {"aaaa":set(["aaab"]), "aaab":set(["aaaa"])}),
(["aaaa", "baaa", "abaa", "aaba", "aaab", "ffff"], {"aaaa":set(["baaa", "abaa", "aaba", "aaab"]), "baaa":set(["aaaa"]), "abaa":set(["aaaa"]), "aaba":set(["aaaa"]), "aaab":set(["aaaa"]), "ffff":set()})
]
def test_create_map_of_neighbours(self):
for words, expected_map_of_neighbours in self.scenarios_create_map_of_neighbours:
with self.subTest():
self.assertEqual(expected_map_of_neighbours, WordListCreator().create_map_of_neighbours(words))
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
131540 | # This is your project's main settings file that can be committed to your
# repo. If you need to override a setting locally, use settings_local.py
# Django settings file for a project based on the playdoh template.
# import * into your settings_local.py
import logging
import os
import socket
from django.utils.functional import lazy
import dj_database_url
from decouple import Csv, config
BASE_DIR = ROOT = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
def path(*a):
return os.path.join(ROOT, *a)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config('SECRET_KEY', default='')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = config('DEBUG', default=False, cast=bool)
TEMPLATE_DEBUG = config('DEBUG', default=DEBUG, cast=bool)
DEV = DEBUG
ALLOWED_HOSTS = config('ALLOWED_HOSTS', cast=Csv(),
default='badges-dev.allizom.org,badges.allizom.org,badges.mozilla.org')
BROWSERID_AUDIENCES = config('BROWSERID_AUDIENCES', cast=Csv(),
default='https://badges-dev.allizom.org,https://badges.allizom.org,https://badges.mozilla.org')
# For backwards compatability, (projects built based on cloning playdoh)
# we still have to have a ROOT_URLCONF.
# For new-style playdoh projects this will be overridden automatically
# by the new installer
ROOT_URLCONF = '%s.urls' % os.path.basename(ROOT)
# Is this a dev instance?
ADMINS = ()
MANAGERS = ADMINS
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': config(
'DATABASE_URL',
default='mysql://root:passwd@db/badges',
cast=dj_database_url.parse
)
}
SLAVE_DATABASES = []
DATABASE_ROUTERS = ('multidb.PinningMasterSlaveRouter',)
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': config('MEMCACHED', default='127.0.0.1:11211'),
}
}
EMAIL_BACKEND = config('EMAIL_BACKEND',
default='django.core.mail.backends.smtp.EmailBackend')
# Site ID is used by Django's Sites framework.
SITE_ID = 1
## Logging
LOG_LEVEL = logging.INFO
HAS_SYSLOG = True
SYSLOG_TAG = "http_app_playdoh" # Change this after you fork.
LOGGING_CONFIG = None
LOGGING = {}
# CEF Logging
CEF_PRODUCT = 'Playdoh'
CEF_VENDOR = 'Mozilla'
CEF_VERSION = '0'
CEF_DEVICE_VERSION = '0'
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = config('LANGUAGE_CODE', default='en-us')
TIME_ZONE = config('TIME_ZONE', default='UTC')
USE_I18N = config('USE_I18N', default=True, cast=bool)
USE_L10N = config('USE_L10N', default=True, cast=bool)
USE_TZ = config('USE_TZ', default=True, cast=bool)
# Gettext text domain
TEXT_DOMAIN = 'messages'
STANDALONE_DOMAINS = [TEXT_DOMAIN, 'javascript']
TOWER_KEYWORDS = {'_lazy': None}
TOWER_ADD_HEADERS = True
## Accepted locales
# Tells the product_details module where to find our local JSON files.
# This ultimately controls how LANGUAGES are constructed.
PROD_DETAILS_DIR = path('lib/product_details_json')
# On dev instances, the list of accepted locales defaults to the contents of
# the `locale` directory within a project module or, for older Playdoh apps,
# the root locale directory. A localizer can add their locale in the l10n
# repository (copy of which is checked out into `locale`) in order to start
# testing the localization on the dev server.
import glob
import itertools
DEV_LANGUAGES = None
try:
DEV_LANGUAGES = [
os.path.basename(loc).replace('_', '-')
for loc in itertools.chain(glob.iglob(ROOT + '/locale/*'), # old style
glob.iglob(ROOT + '/*/locale/*'))
if (os.path.isdir(loc) and os.path.basename(loc) != 'templates')
]
except OSError:
pass
# If the locale/ directory isn't there or it's empty, we make sure that
# we always have at least 'en-US'.
if not DEV_LANGUAGES:
DEV_LANGUAGES = ('en-US',)
# On stage/prod, the list of accepted locales is manually maintained. Only
# locales whose localizers have signed off on their work should be listed here.
PROD_LANGUAGES = (
'en-US',
)
def lazy_lang_url_map():
from django.conf import settings
langs = settings.DEV_LANGUAGES if settings.DEV else settings.PROD_LANGUAGES
return dict([(i.lower(), i) for i in langs])
LANGUAGE_URL_MAP = lazy(lazy_lang_url_map, dict)()
# Override Django's built-in with our native names
def lazy_langs():
from django.conf import settings
from product_details import product_details
langs = DEV_LANGUAGES if settings.DEV else settings.PROD_LANGUAGES
return dict([(lang.lower(), product_details.languages[lang]['native'])
for lang in langs if lang in product_details.languages])
LANGUAGES = lazy(lazy_langs, dict)()
# Tells the extract script what files to look for L10n in and what function
# handles the extraction. The Tower library expects this.
DOMAIN_METHODS = {
'messages': [
# Searching apps dirs only exists for historic playdoh apps.
# See playdoh's base settings for how message paths are set.
('apps/**.py',
'tower.management.commands.extract.extract_tower_python'),
('apps/**/templates/**.html',
'tower.management.commands.extract.extract_tower_template'),
('templates/**.html',
'tower.management.commands.extract.extract_tower_template'),
],
}
# Paths that don't require a locale code in the URL.
SUPPORTED_NONLOCALES = ['media', 'static', 'admin', 'browserid']
## Media and templates.
STATIC_ROOT = config('STATIC_ROOT', default=os.path.join(BASE_DIR, 'static'))
STATIC_URL = config('STATIC_URL', default='/static/')
MEDIA_ROOT = config('MEDIA_ROOT', default=os.path.join(BASE_DIR, 'media'))
MEDIA_URL = config('MEDIA_URL', default='/media/')
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'jingo.Loader',
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.media',
'django.core.context_processors.request',
'session_csrf.context_processor',
'django.contrib.messages.context_processors.messages',
'badgus.base.context_processors.i18n',
'badgus.base.context_processors.globals',
#'jingo_minify.helpers.build_ids',
)
def get_template_context_processors(exclude=(), append=(),
current={'processors': TEMPLATE_CONTEXT_PROCESSORS}):
"""
Returns TEMPLATE_CONTEXT_PROCESSORS without the processors listed in
exclude and with the processors listed in append.
The use of a mutable dict is intentional, in order to preserve the state of
the TEMPLATE_CONTEXT_PROCESSORS tuple across multiple settings files.
"""
current['processors'] = tuple(
[p for p in current['processors'] if p not in exclude]
) + tuple(append)
return current['processors']
TEMPLATE_DIRS = (
path('templates'),
)
# Storage of static files
COMPRESS_ROOT = STATIC_ROOT
COMPRESS_CSS_FILTERS = (
'compressor.filters.css_default.CssAbsoluteFilter',
'compressor.filters.cssmin.CSSMinFilter'
)
COMPRESS_PRECOMPILERS = (
#('text/coffeescript', 'coffee --compile --stdio'),
('text/less', 'lessc {infile} {outfile}'),
#('text/x-sass', 'sass {infile} {outfile}'),
#('text/x-scss', 'sass --scss {infile} {outfile}'),
)
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'compressor.finders.CompressorFinder',
)
def JINJA_CONFIG():
# import jinja2
# from django.conf import settings
# from caching.base import cache
config = {'extensions': ['tower.template.i18n', 'jinja2.ext.do',
'jinja2.ext.with_', 'jinja2.ext.loopcontrols'],
'finalize': lambda x: x if x is not None else ''}
# if 'memcached' in cache.scheme and not settings.DEBUG:
# We're passing the _cache object directly to jinja because
# Django can't store binary directly; it enforces unicode on it.
# Details: http://jinja.pocoo.org/2/documentation/api#bytecode-cache
# and in the errors you get when you try it the other way.
# bc = jinja2.MemcachedBytecodeCache(cache._cache,
# "%sj2:" % settings.CACHE_PREFIX)
# config['cache_size'] = -1 # Never clear the cache
# config['bytecode_cache'] = bc
return config
## Middlewares, apps, URL configs.
MIDDLEWARE_CLASSES = (
'badgus.base.middleware.LocaleURLMiddleware',
'multidb.middleware.PinningRouterMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'session_csrf.CsrfMiddleware', # Must be after auth middleware.
'django.contrib.messages.middleware.MessageMiddleware',
'commonware.middleware.FrameOptionsHeader',
'mobility.middleware.DetectMobileMiddleware',
'mobility.middleware.XMobileMiddleware',
)
def get_middleware(exclude=(), append=(),
current={'middleware': MIDDLEWARE_CLASSES}):
"""
Returns MIDDLEWARE_CLASSES without the middlewares listed in exclude and
with the middlewares listed in append.
The use of a mutable dict is intentional, in order to preserve the state of
the MIDDLEWARE_CLASSES tuple across multiple settings files.
"""
current['middleware'] = tuple(
[m for m in current['middleware'] if m not in exclude]
) + tuple(append)
return current['middleware']
INSTALLED_APPS = [
'constance',
'constance.backends.database',
'badgus.base',
# Local apps
'compressor',
'tower', # for ./manage.py extract (L10n)
'cronjobs', # for ./manage.py cron * cmd line tasks
'django_browserid',
# Django contrib apps
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.staticfiles',
# Third-party apps, patches, fixes
'commonware.response.cookies',
'django_nose',
'session_csrf',
# L10n
'product_details',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.admin',
'taggit',
'valet_keys',
'badgus.profiles',
'badger',
'badgus.badger_api',
'notification',
#'csp',
#'south',
]
for app in config('EXTRA_APPS', default='', cast=Csv()):
INSTALLED_APPS.append(app)
def get_apps(exclude=(), append=(), current={'apps': INSTALLED_APPS}):
"""
Returns INSTALLED_APPS without the apps listed in exclude and with the apps
listed in append.
The use of a mutable dict is intentional, in order to preserve the state of
the INSTALLED_APPS tuple across multiple settings files.
"""
current['apps'] = tuple(
[a for a in current['apps'] if a not in exclude]
) + tuple(append)
return current['apps']
# Path to Java. Used for compress_assets.
JAVA_BIN = '/usr/bin/java'
# Sessions
#
# By default, be at least somewhat secure with our session cookies.
SESSION_COOKIE_SECURE = config('SESSION_COOKIE_SECURE',
default=not DEBUG, cast=bool)
SESSION_COOKIE_HTTPONLY = config('SESSION_COOKIE_HTTPONLY',
default=not DEBUG, cast=bool)
## Auth
# The first hasher in this list will be used for new passwords.
# Any other hasher in the list can be used for existing passwords.
# Playdoh ships with Bcrypt+HMAC by default because it's the most secure.
# To use bcrypt, fill in a secret HMAC key in your local settings.
BASE_PASSWORD_HASHERS = (
'django_sha2.hashers.BcryptHMACCombinedPasswordVerifier',
'django_sha2.hashers.SHA512PasswordHasher',
'django_sha2.hashers.SHA256PasswordHasher',
'django.contrib.auth.hashers.SHA1PasswordHasher',
'django.contrib.auth.hashers.MD5PasswordHasher',
'django.contrib.auth.hashers.UnsaltedMD5PasswordHasher',
)
HMAC_KEYS = { # for bcrypt only
#'2012-06-06': 'cheesecake',
}
from django_sha2 import get_password_hashers
PASSWORD_HASHERS = get_password_hashers(BASE_PASSWORD_HASHERS, HMAC_KEYS)
## Tests
TEST_RUNNER = 'test_utils.runner.RadicalTestSuiteRunner'
# For absolute urls
try:
DOMAIN = socket.gethostname()
except socket.error:
DOMAIN = 'localhost'
PROTOCOL = "http://"
PORT = 80
## django-mobility
MOBILE_COOKIE = 'mobile'
##############################
# TODO: Merge the below stuff with the above stuff, since it came from two different settings files
##############################
SITE_TITLE = 'badges-local.allizom.org'
# Make sure South stays out of the way during testing
#SOUTH_TESTS_MIGRATE = False
#SKIP_SOUTH_TESTS = True
SOUTH_MIGRATION_MODULES = {
'taggit': 'taggit.south_migrations',
}
# Bundles is a dictionary of two dictionaries, css and js, which list css files
# and js files that can be bundled together by the minify app.
MINIFY_BUNDLES = {
'css': {
'base': (
'css/base.css',
),
'example_css': (
'css/examples/main.css',
),
'example_mobile_css': (
'css/examples/mobile.css',
),
'bootstrap': (
'bootstrap/css/bootstrap.css',
'bootstrap/css/bootstrap-responsive.css',
)
},
'js': {
'base': (
'js/libs/jquery-1.7.1.min.js',
'js/libs/jquery.cookie.js',
'js/libs/browserid.js',
'js/base.js',
),
'example_js': (
'js/examples/libs/jquery-1.4.4.min.js',
'js/examples/libs/jquery.cookie.js',
'js/examples/init.js',
),
'bootstrap': (
'bootstrap/js/bootstrap.js',
),
}
}
# HACK: HMAC_KEYS default to make funfactory happier
HMAC_KEYS = {
'2011-01-01': 'this is fake; we use persona and do not store passwords',
'2010-06-01': 'OldSharedKey',
'2010-01-01': 'EvenOlderSharedKey'
}
# Defines the views served for root URLs.
ROOT_URLCONF = 'badgus.urls'
# Authentication
BROWSERID_CREATE_USER = True
SITE_URL = 'http://localhost:8000'
LOGIN_URL = '/accounts/login'
LOGIN_REDIRECT_URL = '/profiles/home'
LOGIN_REDIRECT_URL_FAILURE = '/'
LOGOUT_REDIRECT_URL = '/'
def username_algo(email):
from django.contrib.auth.models import User
cnt, base_name = 0, email.split('@')[0]
username = base_name
while User.objects.filter(username=username).count() > 0:
cnt += 1
username = '%s_%s' % (base_name, cnt)
return username
BROWSERID_USERNAME_ALGO = username_algo
AUTHENTICATION_BACKENDS = (
'django_browserid.auth.BrowserIDBackend',
'django.contrib.auth.backends.ModelBackend'
)
AUTH_PROFILE_MODULE = "profiles.UserProfile"
TEMPLATE_CONTEXT_PROCESSORS = list(TEMPLATE_CONTEXT_PROCESSORS) + [
'constance.context_processors.config',
'django.contrib.messages.context_processors.messages',
'notification.context_processors.notification',
]
CONSTANCE_BACKEND = 'constance.backends.database.DatabaseBackend'
CONSTANCE_DATABASE_CACHE_BACKEND = 'default'
MIDDLEWARE_CLASSES = list(MIDDLEWARE_CLASSES) + [
'django.contrib.messages.middleware.MessageMiddleware',
'commonware.response.middleware.StrictTransportMiddleware',
#'csp.middleware.CSPMiddleware',
]
# Because Jinja2 is the default template loader, add any non-Jinja templated
# apps here:
JINGO_EXCLUDE_APPS = [
'admin',
'browserid',
]
def JINJA_CONFIG():
import jinja2
config = {
'extensions': ['jinja2.ext.i18n', 'jinja2.ext.with_',
'jinja2.ext.loopcontrols', 'jinja2.ext.autoescape'],
'finalize': lambda x: x if x is not None else ''
}
return config
# Tells the extract script what files to look for L10n in and what function
# handles the extraction. The Tower library expects this.
DOMAIN_METHODS = {
'messages': [
('**/badgus/**.py',
'tower.management.commands.extract.extract_tower_python'),
('**/badgus/**/templates/**.html',
'tower.management.commands.extract.extract_tower_template')
],
}
# Tells the extract script what files to look for L10n in and what function
# handles the extraction. The Tower library expects this.
# # Use this if you have localizable HTML files:
# DOMAIN_METHODS['lhtml'] = [
# ('**/templates/**.lhtml',
# 'tower.management.commands.extract.extract_tower_template'),
# ]
# # Use this if you have localizable HTML files:
# DOMAIN_METHODS['javascript'] = [
# # Make sure that this won't pull in strings from external libraries you
# # may use.
# ('media/js/**.js', 'javascript'),
# ]
MESSAGE_STORAGE = 'django.contrib.messages.storage.cookie.CookieStorage'
# Always generate a CSRF token for anonymous users
ANON_ALWAYS = True
LOGGING = dict(loggers=dict(playdoh = {'level': logging.DEBUG}))
# Django-CSP
CSP_IMG_SRC = ("'self'",
'http://localhost',
'http://localhost:8000',
'http://localhost:8888',
'http://www.mozilla.org',
'https://www.mozilla.org',
'http://beta.openbadges.org',
'https://beta.openbadges.org',
'http://backpack.openbadges.org',
'https://backpack.openbadges.org',
'http://cf.cdn.vid.ly',
'http://www.gravatar.com',
'https://www.gravatar.com',
'https://secure.gravatar.com',
'http://chart.apis.google.com',
'https://chart.apis.google.com',
'http://plusone.google.com', 'https://plusone.google.com',
'http://ssl.gstatic.com', 'https://ssl.gstatic.com',
'http://apis.google.com/', 'https://apis.google.com/')
CSP_STYLE_SRC = ("'self'",
'http://localhost',
'http://localhost:8000',
'http://localhost:8888',
'http://www.mozilla.org',
'https://www.mozilla.org',
'http://beta.openbadges.org',
'https://beta.openbadges.org',
'http://backpack.openbadges.org',
'https://backpack.openbadges.org',
'https://fonts.googleapis.com',
'http://plusone.google.com', 'https://plusone.google.com',
'http://ssl.gstatic.com', 'https://ssl.gstatic.com',
'http://apis.google.com', 'https://apis.google.com')
CSP_FONT_SRC = ("'self'",
'https://themes.googleusercontent.com',)
CSP_SCRIPT_SRC = ("'self'",
'http://localhost',
'http://localhost:8000',
'http://localhost:8888',
'http://www.mozilla.org',
'https://www.mozilla.org',
'http://beta.openbadges.org',
'https://beta.openbadges.org',
'http://backpack.openbadges.org',
'https://backpack.openbadges.org',
'http://login.persona.org',
'https://login.persona.org',
'http://platform.twitter.com', 'https://platform.twitter.com',
'http://apis.google.com', 'https://apis.google.com',
'http://plusone.google.com', 'https://plusone.google.com',
'http://ssl.gstatic.com', 'https://ssl.gstatic.com',
'http://connect.facebook.net', 'https://connect.facebook.net',)
CSP_FRAME_SRC = ("'self'",
'http://localhost',
'http://localhost:8000',
'http://localhost:8888',
'http://www.mozilla.org',
'https://www.mozilla.org',
'http://beta.openbadges.org',
'https://beta.openbadges.org',
'http://backpack.openbadges.org',
'https://backpack.openbadges.org',
'http://apis.google.com', 'https://apis.google.com',
'http://plusone.google.com', 'https://plusone.google.com',
'http://ssl.gstatic.com', 'https://ssl.gstatic.com',
'http://platform.twitter.com', 'https://platform.twitter.com',
'https://www.facebook.com',)
CSP_OPTIONS = ('eval-script',)
BADGER_ALLOW_ADD_BY_ANYONE = config('BADGER_ALLOW_ADD_BY_ANYONE', default=False, cast=bool)
DEFAULT_FROM_EMAIL = '<EMAIL>'
OBI_BASE_URL = "//backpack.openbadges.org/"
OBI_ISSUER_URL = "//backpack.openbadges.org/issuer.js"
CONSTANCE_CONFIG = dict(
BADGER_ALLOW_ADD_ONLY_BY_MOZILLIANS = (
False,
'Whether to restrict login to vouched mozillians.org members',
),
MOZILLIANS_API_BASE_URL = (
'https://mozillians.org/api/v1',
'Mozillians.org API base URL',
),
MOZILLIANS_API_APPNAME = (
'badges_mozilla_org',
'Mozillians.org API app name',
),
MOZILLIANS_API_KEY = (
'',
'Mozillians.org API key',
),
MOZILLIANS_API_CACHE_KEY_PREFIX = (
'mozillians_api',
'Mozillians.org API result cache key prefix',
),
MOZILLIANS_API_CACHE_TIMEOUT = (
1800,
'Mozillians.org API result cache timeout',
),
)
BROWSERID_VERIFY_CLASS = 'django_browserid.views.Verify'
SQL_RESET_SEQUENCES = False
| StarcoderdataPython |
116904 | #!/usr/env python
from pybedtools import BedTool as Bed
from Cigar import Cigar
import numpy as np
def overlap(s1,e1,s2,e2):
s=sorted([s1,s2])
e=sorted([e1,e2])
ovr = e[0]-s[1]+1
o=sorted([float(ovr)/(e2-s2+1),float(ovr)/(e1-s1+1)])
return o[0]
def within(p,ci):
if ci[0]<=p<=ci[1]: return True
else: return False
class Alignment():
def __init__(self,al=None):
strand='+'
if al.is_reverse: strand='-'
self.pos=al.get_reference_positions()
self.strand=strand
self.mapq=int(al.mapping_quality)
self.startClip=None
self.endClip=None
self.qStart=None
self.qEnd=None
self.insertion=None
def queryPos(self,cig):
cig.qPos()
self.qStart,self.qEnd = cig.qStart,cig.qEnd
def setClips(self,cig,leftPos,rightPos,svtype,leftCI,rightCI):
if svtype=='DEL':
if cig.rightClip==True and within(rightPos,leftCI)==True: self.startClip=rightPos
if cig.leftClip==True and within(leftPos,rightCI)==True: self.endClip=leftPos
elif svtype=='DUP':
if cig.leftClip==True and within(leftPos,leftCI)==True: self.startClip=leftPos
if cig.rightClip==True and within(rightPos,rightCI)==True: self.endClip=rightPos
elif svtype=='INV':
if cig.leftClip==True and leftCI[0]<=leftPos<=leftCI[1]: self.startClip=leftPos
elif cig.leftClip==True and rightCI[0]<=leftPos<=rightCI[1]:self.endClip=leftPos
if cig.rightClip==True and leftCI[0]<=rightPos<=leftCI[1]: self.startClip=rightPos
elif cig.rightClip==True and rightCI[0]<=rightPos<=rightCI[1]: self.endClip=rightPos
elif svtype=='INS':
if cig.rightClip==True and within(rightPos,leftCI)==True: self.startClip=rightPos
if cig.leftClip==True and within(leftPos,leftCI)==True: self.startClip=leftPos
def cigarSV(self,cig=None,left=None,SV=None,minLen=None,minOvr=None):
ind=0
for (flg,leng) in cig:
if (flg==1 or flg==2) and leng > (minLen-1):
s1 = left+ind
e1 = left+ind+leng-1
ovr1 = overlap(SV.start,SV.end,s1,e1)
if SV.svtype=='DUP':
s2,e2= left+ind-leng,s1-1
ovr2 = overlap(SV.start,SV.end,s2,e2)
if ovr1 < ovr2: s1,e1,ovr1=s2,e2,ovr2
if ovr1 >= minOvr:
if SV.svtype=='INS': self.insertion=leng
if SV.leftCI[0]<=s1<=SV.leftCI[1]: self.startClip=s1
if SV.rightCI[0]<=e1<=SV.rightCI[1]: self.endClip=e1
if flg==0 or flg==2 or flg==3 or flg==7 or flg==8: ind+=leng
| StarcoderdataPython |
9749334 | <reponame>ddebrunner/streamsx.topology
# coding=utf-8
# Licensed Materials - Property of IBM
# Copyright IBM Corp. 2017,2019
"""
SPL toolkit integration.
********
Overview
********
SPL operators are defined by an SPL toolkit. When a ``Topology``
contains invocations of SPL operators, their defining toolkit must
be made known using :py:func:`add_toolkit`.
Toolkits shipped with the IBM Streams product under
``$STREAMS_INSTALL/toolkits`` are implictly known and
must not be added through ``add_toolkit``.
"""
__all__ = ['add_toolkit', 'add_toolkit_dependency']
import os
def add_toolkit(topology, location):
"""Add an SPL toolkit to a topology.
Args:
topology(Topology): Topology to include toolkit in.
location(str): Location of the toolkit directory.
"""
import streamsx.topology.topology
assert isinstance(topology, streamsx.topology.topology.Topology)
tkinfo = dict()
tkinfo['root'] = os.path.abspath(location)
topology.graph._spl_toolkits.append(tkinfo)
def add_toolkit_dependency(topology, name, version):
"""Add a version dependency on an SPL toolkit to a topology.
To specify a range of versions for the dependent toolkits,
use brackets (``[]``) or parentheses. Use brackets to represent an
inclusive range and parentheses to represent an exclusive range.
The following examples describe how to specify a dependency on a range of toolkit versions:
* ``[1.0.0, 2.0.0]`` represents a dependency on toolkit versions 1.0.0 - 2.0.0, both inclusive.
* ``[1.0.0, 2.0.0)`` represents a dependency on toolkit versions 1.0.0 or later, but not including 2.0.0.
* ``(1.0.0, 2.0.0]`` represents a dependency on toolkits versions later than 1.0.0 and less than or equal to 2.0.0.
* ``(1.0.0, 2.0.0)`` represents a dependency on toolkit versions 1.0.0 - 2.0.0, both exclusive.
Args:
topology(Topology): Topology to include toolkit in.
name(str): Toolkit name.
version(str): Toolkit version dependency.
.. seealso::
`Toolkit information model file <https://www.ibm.com/support/knowledgecenter/SSCRJU_4.3.0/com.ibm.streams.dev.doc/doc/toolkitinformationmodelfile.html>`_
.. versionadded:: 1.12
"""
import streamsx.topology.topology
assert isinstance(topology, streamsx.topology.topology.Topology)
tkinfo = dict()
tkinfo['name'] = name
tkinfo['version'] = version
topology.graph._spl_toolkits.append(tkinfo)
| StarcoderdataPython |
12862158 | <filename>wildlifelicensing/apps/returns/models.py
from __future__ import unicode_literals
from django.db import models
from django.contrib.postgres.fields.jsonb import JSONField
from django.core.exceptions import ValidationError
import datapackage
import jsontableschema
from ledger.accounts.models import RevisionedMixin, EmailUser
from wildlifelicensing.apps.main.models import WildlifeLicenceType, WildlifeLicence, CommunicationsLogEntry
class ReturnType(models.Model):
licence_type = models.OneToOneField(WildlifeLicenceType)
# data_descriptor should follow the Tabular Data Package format described at:
# http://data.okfn.org/doc/tabular-data-package
# also in:
# http://dataprotocols.org/data-packages/
# The schema inside the 'resources' must follow the JSON Table Schema defined at:
# http://dataprotocols.org/json-table-schema/
data_descriptor = JSONField()
month_frequency = models.IntegerField(choices=WildlifeLicence.MONTH_FREQUENCY_CHOICES,
default=WildlifeLicence.DEFAULT_FREQUENCY)
def clean(self):
"""
Validate the data descriptor
"""
# Validate the data package
validator = datapackage.DataPackage(self.data_descriptor)
try:
validator.validate()
except Exception:
raise ValidationError('Data package errors: {}'.format([str(e[0]) for e in validator.iter_errors()]))
# Check that there is at least one resources defined (not required by the standard)
if len(self.resources) == 0:
raise ValidationError('You must define at least one resource')
# Validate the schema for all resources
for resource in self.resources:
if 'schema' not in resource:
raise ValidationError("Resource without a 'schema'.")
else:
schema = resource.get('schema')
try:
jsontableschema.validate(schema)
except Exception:
raise ValidationError(
'Schema errors for resource "{}": {}'.format(
resource.get('name'),
[str(e[0]) for e in jsontableschema.validator.iter_errors(schema)]))
@property
def resources(self):
return self.data_descriptor.get('resources', [])
def get_resource_by_name(self, name):
for resource in self.resources:
if resource.get('name') == name:
return resource
return None
def get_resources_names(self):
return [r.get('name') for r in self.resources]
def get_schema_by_name(self, name):
resource = self.get_resource_by_name(name)
return resource.get('schema', {}) if resource else None
class Return(RevisionedMixin):
STATUS_CHOICES = [
('current', 'Current'),
('future', 'Future'),
('draft', 'Draft'),
('submitted', 'Submitted'),
('amendment_required', 'Amendment Required'),
('amended', 'Amended'),
('accepted', 'Accepted'),
('declined', 'Declined')
]
DEFAULT_STATUS = STATUS_CHOICES[1][0]
CUSTOMER_EDITABLE_STATE = ['current', 'draft', 'amendment_required']
return_type = models.ForeignKey(ReturnType)
licence = models.ForeignKey(WildlifeLicence)
status = models.CharField(max_length=20, choices=STATUS_CHOICES, default=DEFAULT_STATUS)
lodgement_number = models.CharField(max_length=9, blank=True, default='')
lodgement_date = models.DateField(blank=True, null=True)
due_date = models.DateField(null=False, blank=False)
proxy_customer = models.ForeignKey(EmailUser, blank=True, null=True)
nil_return = models.BooleanField(default=False)
comments = models.TextField(blank=True, null=True)
@property
def reference(self):
return '{}'.format(self.lodgement_number)
@property
def can_user_edit(self):
"""
:return: True if the return is in one of the editable status.
"""
return self.status in self.CUSTOMER_EDITABLE_STATE
@property
def pending_amendments_qs(self):
return ReturnAmendmentRequest.objects.filter(ret=self, status='requested')
class ReturnAmendmentRequest(models.Model):
STATUS_CHOICES = (('requested', 'Requested'), ('amended', 'Amended'))
ret = models.ForeignKey(Return)
status = models.CharField('Status', max_length=30, choices=STATUS_CHOICES, default=STATUS_CHOICES[0][0])
reason = models.TextField(blank=False)
officer = models.ForeignKey(EmailUser, null=True)
class ReturnTable(RevisionedMixin):
ret = models.ForeignKey(Return)
name = models.CharField(max_length=50)
class ReturnRow(RevisionedMixin):
return_table = models.ForeignKey(ReturnTable)
data = JSONField(blank=True, null=True)
class ReturnLogEntry(CommunicationsLogEntry):
ret = models.ForeignKey(Return)
def save(self, **kwargs):
# save the application reference if the reference not provided
if not self.reference:
self.reference = self.ret.reference
super(ReturnLogEntry, self).save(**kwargs)
| StarcoderdataPython |
3495959 | <gh_stars>0
# This file is part of spot_motion_monitor.
#
# Developed for LSST System Integration, Test and Commissioning.
#
# See the LICENSE file at the top-level directory of this distribution
# for details of code ownership.
#
# Use of this source code is governed by a 3-clause BSD-style
# license that can be found in the LICENSE file.
import numpy as np
from scipy.fftpack import fftfreq, rfft
__all__ = ['fft_calculator']
def fft_calculator(xVals, yVals, collectRate):
"""Calculate the FFT of the given arrays.
Parameters
----------
xVals : numpy.array
The x coordinates of the centroids.
yVals : numpy.array
The y coordinates of the centroids.
collectRate : float
The rate at which the data was collected (FPS).
Returns
-------
(numpy.array, numpy.array, numpy.array)
The FFTX, FFTY and Frequency arrays on the positive Frequency portion.
"""
# Assume both arrays are same length.
arrayLen = xVals.size
xMean = np.mean(xVals)
yMean = np.mean(yVals)
xFft = rfft(xVals - xMean)
yFft = rfft(yVals - yMean)
frequencies = fftfreq(arrayLen, 1 / collectRate)
dslice = slice(1, arrayLen // 2)
return xFft[dslice], yFft[dslice], frequencies[dslice]
| StarcoderdataPython |
6493614 | <filename>tests/test_slice.py<gh_stars>1-10
# -*- coding: utf-8 -*-
__author__ = "<NAME>"
__date__ = "30 Jun 2021"
def parse_slice_str(slice_str):
parts = slice_str.split(':')
if len(parts) == 1:
try:
return int(slice_str)
except ValueError:
return slice(None)
intParts = []
for p in parts:
try:
intp = int(p)
except ValueError:
intp = None
intParts.append(intp)
return slice(*intParts)
if __name__ == '__main__':
print(parse_slice_str('1:10:2'))
print(parse_slice_str('1:10'))
print(parse_slice_str(':-1'))
print(parse_slice_str(':10'))
print(parse_slice_str('1:'))
print(parse_slice_str('22'))
print(parse_slice_str(''))
print()
print(parse_slice_str(' 1:10:2'))
print(parse_slice_str(' 1:10'))
print(parse_slice_str(' :-1'))
print(parse_slice_str(' :10'))
print(parse_slice_str(' 1:'))
print(parse_slice_str(' 22'))
print(parse_slice_str(' '))
| StarcoderdataPython |
9726813 | <gh_stars>0
from app.main.conf import conf_data
from app.main.server.hostgroup import HostGroupCmd
from app.utils.NetCache import NetCache
from app import work_log
class Nginx_Acl(object):
"""docstring for Nginx_Acl"""
def __init__(self):
super(Nginx_Acl, self).__init__()
self.user = conf_data("user_info", "default_user")
service_script = conf_data("service_info", "local_service_script")
self.reload_cmd = " ".join([service_script, "nginx", "reload"])
self.deny_conf = conf_data("service_info", "nginx", "deny_conf")
self.cache = NetCache()
def get_run_data(self, data):
total = 0
count = len(data)
for i in data:
total += i[0]
if total == 0:
# ๅ
จ้จๆๅ
return {'recode': 0, 'redata': 'success'}
if total < 2*count:
# ้จๅ้่ฏฏ
return {'recode': 8, 'redata': data}
if total == 2*count:
# ๅ
จ้จ้่ฏฏ
return {'recode': 9, 'redata': data}
def run_task_list(self, cmdlist):
cmdlist.append(self.reload_cmd) # add reload
work_log.debug(f"{cmdlist}")
hosts = conf_data("service_info", "nginx", "dmz")
try:
info = HostGroupCmd(hosts, self.user)
for i in cmdlist:
work_log.info(f"nginx lock task, exec: {i}")
run_data_list = info.run_cmd_task(i)
work_log.info(str(run_data_list))
work_log.debug("--------------------------")
# ้็้ฎ้ข๏ผๆญคๅคๅชๅค็ไบๆๅไธไธช่ฟๅ็ปๆ
return self.get_run_data(run_data_list)
work_log.debug("lock task all run")
# return {"redata": "success", "recode": 0}
except Exception as e:
work_log.error(str(e))
return {"recode": 2, "redata": "nginx server error"}
def clear_lock(self):
data = []
cmd = f"""sed -i '/http_x_forwarded_for/s/".*"/"172.16.31.10"/' {self.deny_conf}"""
data.append(cmd)
data.append(self.reload_cmd)
rundata = self.run_task_list(data)
if rundata.get('recode') == 0:
self.cache.setDel() # ๆธ
็ฉบredis็ผๅญ
work_log.info('cache clear')
elif rundata.get('recode') == 8:
self.cache.setDel()
work_log.info('ไปปๅกไธๅฎๅ
จๆๅ๏ผไป็ถๆธ
้คไบ cache lock')
elif rundata.get('recode') == 9:
work_log.info('not clear all lock')
return rundata
def show_lock(self):
lock_list = self.cache.setSmembers() # ่ทๅ้ๅๅ
จ้จๆฐๆฎ
work_log.info(f"task: showlock {lock_list}")
return {"redata": list(lock_list), "recode": 0}
def lock_ip(self, iplist):
cmdlist = []
lock_list = self.cache.setSmembers()
for ip in iplist:
work_log.debug(f"{ip}")
if ip not in lock_list:
cmd = (
f"""sed -i '/http_x_forwarded_for/s/")/|{ip}")/' {self.deny_conf}"""
)
cmdlist.append(cmd)
else:
work_log.error(f"lock: {ip} in lock_list")
if not cmdlist:
# ้่งฃ้็IPๅนถๆชๅฑ่ฝ
# ๅคไธชIPไธญ้จๅๅญๅจ็ๆ
ๅตๆ็ผ
return {"redata": str(iplist) + " IPๅฐๅๅทฒ็ปๅจ้ๅฎๅ่กจ", "recode": 1}
rundata = self.run_task_list(cmdlist)
if rundata.get('recode') == 0:
self.cache.setAdd(ip)
work_log.info('cache lock')
elif rundata.get('recode') == 8:
self.cache.setAdd(ip)
work_log.info(f'server run error, yes cache +lock')
elif rundata.get('recode') == 9:
work_log.info(f'server run error, not cache +lock')
return rundata
def unlock_ip(self, iplist):
cmdlist = []
lock_list = self.cache.setSmembers()
for ip in iplist:
work_log.debug(f"{ip}")
if ip in lock_list:
cmd = f"""sed -i '/http_x_forwarded_for/s/|{ip}//' {self.deny_conf}"""
cmdlist.append(cmd)
else:
work_log.error(f"unlock: {ip} no in lock_list")
if not cmdlist:
# ้่งฃ้็IPๅนถๆชๅฑ่ฝ
return {"redata": str(iplist) + " IPๅฐๅๆชๅจ้ๅฎๅ่กจ", "recode": 1}
rundata = self.run_task_list(cmdlist)
if rundata.get('recode') == 0:
self.cache.setRemove(ip)
work_log.info('cache unlock')
elif rundata.get('recode') == 8:
self.cache.setRemove(ip)
work_log.info('server run error, yes cache unlock')
elif rundata.get('recode') == 9:
work_log.info('server run error, not cache unlock')
return rundata
def run_task(self, iplist, task):
"""้ๅฎ/่งฃ้/ๆฅ็้/ๆธ
้ค้
"""
if task == "showlock":
return self.show_lock()
elif task == "clearlock":
return self.clear_lock()
elif task == "lock":
return self.lock_ip(iplist)
elif task == "unlock":
return self.unlock_ip(iplist)
else:
return {"redata": "task error", "recode": 9}
| StarcoderdataPython |
1902360 | from .asteroid_bot import AsteroidBot # noqa: F401
from .checks import ( # noqa: F401
_cog_is_enabled,
bot_owner_or_permissions,
cog_is_enabled,
is_administrator_or_bot_owner,
is_enabled,
)
from .consts import DiscordColors, SystemChannels # noqa: F401
from .database import ( # noqa: F401
GlobalData,
GlobalUser,
GuildAutoRole,
GuildConfiguration,
GuildData,
GuildPrivateVoice,
GuildStarboard,
GuildTag,
GuildUser,
)
from .discord_overrides import Cog, Embed # noqa: F401
from .errors import CogDisabledOnGuild # noqa: F401
from .errors import ( # noqa: F401
BotNotConnectedToVoice,
CommandDisabled,
DontHavePrivateRoom,
ForbiddenTag,
NoData,
NotConnectedToVoice,
NotGuild,
NotPlaying,
NotTagOwner,
PrivateVoiceNotSetup,
TagNotFound,
TagsIsPrivate,
UIDNotBinded,
)
from .functions import format_voice_time, transform_permission # noqa: F401
from .locales import get_content, load_localization # noqa: F401
| StarcoderdataPython |
1939712 | <gh_stars>0
from factory import Faker, Sequence, make_factory
from lms import models
HGroup = make_factory( # pylint:disable=invalid-name
models.HGroup,
name=Sequence(lambda n: f"Test Group {n}"),
authority_provided_id=Faker("hexify", text="^" * 40),
)
| StarcoderdataPython |
3258990 | DATABASES = {
"default": {
"ENGINE": "django.db.backends.postgresql_psycopg2",
"NAME": "app_test",
"USER": "go",
"PASSWORD": "go",
"HOST": "localhost",
}
}
CACHES = {
'default': {
'BACKEND': 'django_pylibmc.memcached.PyLibMCCache',
'LOCATION': 'localhost:11211',
'TIMEOUT': 3,
'BINARY': False,
'OPTIONS': { # Maps to pylibmc "behaviors"
'tcp_nodelay': True,
'ketama': True
}
}
}
INSTALLED_BACKENDS = {
"HTTP": {
"ENGINE": "rapidsms.backends.database.DatabaseBackend",
},
}
LETTUCE_AVOID_APPS = (
'django_nose',
'south',
'django_extensions',
'rapidsms.contrib.locations',
'rapidsms.contrib.locations.nested',
'bootstrap_pagination',
'rapidsms.backends.database',
'rapidsms.contrib.httptester',
'djcelery',
)
| StarcoderdataPython |
248442 | import torch.nn as nn
from pedrec.configs.pedrec_net_config import PedRecNetConfig
from pedrec.networks.net_pedrec.pedrec_conv_transpose_base import PedRecConvTransposeBase
from pedrec.networks.net_pedrec.pedrec_pose_head_2d import PedRecPose2DHead
from pedrec.networks.net_resnet.resnet_feature_extractor import ResNetHeadless
from pedrec.utils.torch_utils.loss_functions import Pose2DL1Loss
class PedRecNet(nn.Module):
def __init__(self, cfg: PedRecNetConfig):
super(PedRecNet, self).__init__()
self.cfg = cfg
self.feature_extractor = ResNetHeadless(cfg.layer.block, cfg.layer.layers)
self.conv_transpose_shared = PedRecConvTransposeBase(cfg, self.feature_extractor.inplanes, num_heads=1)
self.head_pose_2d = PedRecPose2DHead(cfg, self.conv_transpose_shared.deconv_heads[0])
def forward(self, x):
x = self.feature_extractor(x)
x_deconv = self.conv_transpose_shared(x)
pose_coords_2d, pose_map_sigmoid_2d = self.head_pose_2d(x_deconv)
return pose_coords_2d
def init_weights(self):
self.conv_transpose_shared.init_weights()
self.head_pose_2d.init_weights()
class PedRecNetLossHead(nn.Module):
def __init__(self, device):
super(PedRecNetLossHead, self).__init__()
self.pose_loss_2d = Pose2DL1Loss()
self.device = device
def forward(self, outputs, targets):
pose_2d_preds = outputs
pose_2d_targets = targets["skeleton"]
return self.pose_loss_2d(pose_2d_preds, pose_2d_targets)
| StarcoderdataPython |
8146867 | import os
import numpy as np
from PIL import Image
import tensorflow as tf
from tensorflow.python.platform import gfile
from scipy.misc import imsave
## Convert BAIR robot pushing data to numpy to use with PyTorch
## Based on <NAME>'s script: https://github.com/edenton/svg/blob/master/data/convert_bair.py
def convert(data_path):
# iterate through the data splits
for data_split in ['train', 'test']:
os.makedirs(os.path.join(data_path, data_split))
data_split_path = os.path.join(data_path, 'softmotion30_44k', data_split)
data_split_files = gfile.Glob(os.path.join(data_split_path, '*'))
# iterate through the TF records
for f in data_split_files:
print('Current file: ' + f)
ind = int(f.split('/')[-1].split('_')[1]) # starting video index
# iterate through the sequences in this TF record
for serialized_example in tf.python_io.tf_record_iterator(f):
os.makedirs(os.path.join(data_path, data_split, str(ind)))
example = tf.train.Example()
example.ParseFromString(serialized_example)
# iterate through the sequence
for i in range(30):
image_name = str(i) + '/image_aux1/encoded'
byte_str = example.features.feature[image_name].bytes_list.value[0]
img = Image.frombytes('RGB', (64, 64), byte_str)
img = np.array(img.getdata()).reshape(img.size[1], img.size[0], 3) / 255.
imsave(os.path.join(data_path, data_split, str(ind), str(i) + '.png'), img)
print(' Finished processing sequence ' + str(ind))
ind += 1
| StarcoderdataPython |
8075699 | <gh_stars>100-1000
import unittest
from unittest import TestCase
from evcouplings.utils import *
import tempfile
class TestUtilsHelpers(TestCase):
def test_wrap(self):
"""
Test whether string is correctly wrap
"""
out = wrap("Test", width=2)
self.assertEqual("Te\nst", out)
def test_range_overlap_noOverlapPosNumber(self):
"""
Test whether range overlaps are correctly calculated
"""
overlap = range_overlap((1,2), (3,4))
self.assertEqual(overlap, 0)
def test_range_overlap_overlapPosNumber(self):
"""
Test whether range overlaps are correctly calculated
"""
overlap = range_overlap((1, 3), (2, 4))
self.assertEqual(overlap, 1)
def test_range_overlap_start_greater_end(self):
"""
Test whether range overlaps are correctly calculated
"""
self.assertRaises(InvalidParameterError, range_overlap, (-2, -4), (-3, -1))
class TestUtilsProgressbar(TestCase):
def test_initiation(self):
p = Progressbar(10, 10)
def test_update(self):
p = Progressbar(5, 5)
for i in range(5):
p.update(i)
class TestDefaultOrderdDict(TestCase):
def test_defaultOrderedDict(self):
"""
test if order is maintained
"""
d = DefaultOrderedDict()
d["one"] = 1
d["a"] = 3
d["two"] = 2
self.assertEqual("DefaultOrderedDict([('one', 1), ('a', 3), ('two', 2)])", str(d))
class TestPersistentDict(TestCase):
def setUp(self):
self.tmp_db = tempfile.NamedTemporaryFile(delete=False)
def test_add_element(self):
"""
Tests whether adding an elements provokes to sync the dict with a file system (it should not)
"""
d = PersistentDict(self.tmp_db.name)
d["test"] = "insert"
self.assertFalse(valid_file(self.tmp_db.name))
def test_get_element(self):
"""
Tests whether adding an elements provokes to sync the dict with a file system (it should not)
"""
d = PersistentDict(self.tmp_db.name)
d["test"] = "insert"
self.assertEqual(d["test"], "insert")
def test_sync(self):
"""
Tests whether adding an elements provokes to sync the dict with a file system (it should not)
"""
d = PersistentDict(self.tmp_db.name)
d["test"] = "insert"
d.sync()
self.assertTrue(valid_file(self.tmp_db.name))
def test_sync_empty(self):
"""
Tests whether adding an elements provokes to sync the dict with a file system (it should not)
"""
d = PersistentDict(self.tmp_db.name)
d.sync()
self.assertFalse(valid_file(self.tmp_db.name))
def test_dump(self):
"""
Tests whether adding an elements provokes to sync the dict with a file system (it should not)
"""
d = PersistentDict(self.tmp_db.name)
tmp2 = tempfile.NamedTemporaryFile(mode="w", delete=False)
d["test"] = "insert"
d.dump(tmp2)
tmp2.close()
self.assertTrue(valid_file(tmp2.name))
os.unlink(tmp2.name)
def test_dump_empty(self):
"""
Tests whether adding an elements provokes to sync the dict with a file system (it should not)
"""
d = PersistentDict(self.tmp_db.name)
tmp2 = tempfile.NamedTemporaryFile(mode="w", delete=False)
d.dump(tmp2)
tmp2.close()
self.assertFalse(valid_file(tmp2.name))
os.unlink(tmp2.name)
def test_load(self):
"""
Tests whether adding an elements provokes to sync the dict with a file system (it should not)
"""
d = PersistentDict(self.tmp_db.name)
d["test"] = "insert"
d.sync()
d.close()
d2 = PersistentDict(self.tmp_db.name)
d2.load(self.tmp_db)
def tearDown(self):
os.unlink(self.tmp_db.name)
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
1954344 | #!/usr/bin/env python3
from .metric import SourceAndReferencedMetric
from collections import Counter
import sacrebleu
import sacremoses
class SARI(SourceAndReferencedMetric):
"""SARI score for evaluating paraphrasing and other text generation models.
The score is introduced in the following paper:
Optimizing Statistical Machine Translation for Text Simplification
<NAME>, <NAME>, <NAME>, <NAME> and <NAME>
In Transactions of the Association for Computational Linguistics (TACL) 2015
http://cs.jhu.edu/~napoles/res/tacl2016-optimizing.pdf
This implementation is adapted from Tensorflow's tensor2tensor implementation [3].
It has two differences with the original GitHub [1] implementation:
(1) Define 0/0=1 instead of 0 to give higher scores for predictions that match
a target exactly.
(2) Fix an alleged bug [2] in the keep score computation.
[1] https://github.com/cocoxu/simplification/blob/master/SARI.py
(commit 0210f15)
[2] https://github.com/cocoxu/simplification/issues/6
[3] https://github.com/tensorflow/tensor2tensor/blob/master/tensor2tensor/utils/sari_hook.py
"""
def compute(self, cache, predictions, references, sources):
srcs = [self.normalize(sent) for sent in sources.untokenized]
preds = [self.normalize(sent) for sent in predictions.untokenized]
refs = [
[self.normalize(sent) for sent in ref_sents]
for ref_sents in references.untokenized
]
sari_scores = {}
for i in range(len(srcs)):
score = {"sari": self.SARIsent(srcs[i], preds[i], refs[i]) * 100}
sari_scores[predictions.ids[i]] = score
# Write to cache if not None.
if cache is not None:
cache_key = (
self.__class__.__name__,
predictions.filename,
predictions.ids[i],
)
cache[cache_key] = score
return sari_scores
def SARIngram(self, sgrams, cgrams, rgramslist, numref):
rgramsall = [rgram for rgrams in rgramslist for rgram in rgrams]
rgramcounter = Counter(rgramsall)
sgramcounter = Counter(sgrams)
sgramcounter_rep = Counter()
for sgram, scount in sgramcounter.items():
sgramcounter_rep[sgram] = scount * numref
cgramcounter = Counter(cgrams)
cgramcounter_rep = Counter()
for cgram, ccount in cgramcounter.items():
cgramcounter_rep[cgram] = ccount * numref
# KEEP
keepgramcounter_rep = sgramcounter_rep & cgramcounter_rep
keepgramcountergood_rep = keepgramcounter_rep & rgramcounter
keepgramcounterall_rep = sgramcounter_rep & rgramcounter
keeptmpscore1 = 0
keeptmpscore2 = 0
for keepgram in keepgramcountergood_rep:
keeptmpscore1 += (
keepgramcountergood_rep[keepgram] / keepgramcounter_rep[keepgram]
)
# Fix an alleged bug [2] in the keep score computation.
# keeptmpscore2 += keepgramcountergood_rep[keepgram] / keepgramcounterall_rep[keepgram]
keeptmpscore2 += keepgramcountergood_rep[keepgram]
# Define 0/0=1 instead of 0 to give higher scores for predictions that match
# a target exactly.
keepscore_precision = 1
keepscore_recall = 1
if len(keepgramcounter_rep) > 0:
keepscore_precision = keeptmpscore1 / len(keepgramcounter_rep)
if len(keepgramcounterall_rep) > 0:
# Fix an alleged bug [2] in the keep score computation.
# keepscore_recall = keeptmpscore2 / len(keepgramcounterall_rep)
keepscore_recall = keeptmpscore2 / sum(keepgramcounterall_rep.values())
keepscore = 0
if keepscore_precision > 0 or keepscore_recall > 0:
keepscore = (
2
* keepscore_precision
* keepscore_recall
/ (keepscore_precision + keepscore_recall)
)
# DELETION
delgramcounter_rep = sgramcounter_rep - cgramcounter_rep
delgramcountergood_rep = delgramcounter_rep - rgramcounter
delgramcounterall_rep = sgramcounter_rep - rgramcounter
deltmpscore1 = 0
deltmpscore2 = 0
for delgram in delgramcountergood_rep:
deltmpscore1 += (
delgramcountergood_rep[delgram] / delgramcounter_rep[delgram]
)
deltmpscore2 += (
delgramcountergood_rep[delgram] / delgramcounterall_rep[delgram]
)
# Define 0/0=1 instead of 0 to give higher scores for predictions that match
# a target exactly.
delscore_precision = 1
delscore_recall = 1
if len(delgramcounter_rep) > 0:
delscore_precision = deltmpscore1 / len(delgramcounter_rep)
if len(delgramcounterall_rep) > 0:
delscore_recall = deltmpscore1 / len(delgramcounterall_rep)
delscore = 0
if delscore_precision > 0 or delscore_recall > 0:
delscore = (
2
* delscore_precision
* delscore_recall
/ (delscore_precision + delscore_recall)
)
# ADDITION
addgramcounter = set(cgramcounter) - set(sgramcounter)
addgramcountergood = set(addgramcounter) & set(rgramcounter)
addgramcounterall = set(rgramcounter) - set(sgramcounter)
addtmpscore = 0
for addgram in addgramcountergood:
addtmpscore += 1
# Define 0/0=1 instead of 0 to give higher scores for predictions that match
# a target exactly.
addscore_precision = 1
addscore_recall = 1
if len(addgramcounter) > 0:
addscore_precision = addtmpscore / len(addgramcounter)
if len(addgramcounterall) > 0:
addscore_recall = addtmpscore / len(addgramcounterall)
addscore = 0
if addscore_precision > 0 or addscore_recall > 0:
addscore = (
2
* addscore_precision
* addscore_recall
/ (addscore_precision + addscore_recall)
)
return (keepscore, delscore_precision, addscore)
def SARIsent(self, ssent, csent, rsents):
numref = len(rsents)
s1grams = ssent.split(" ")
c1grams = csent.split(" ")
s2grams = []
c2grams = []
s3grams = []
c3grams = []
s4grams = []
c4grams = []
r1gramslist = []
r2gramslist = []
r3gramslist = []
r4gramslist = []
for rsent in rsents:
r1grams = rsent.split(" ")
r2grams = []
r3grams = []
r4grams = []
r1gramslist.append(r1grams)
for i in range(0, len(r1grams) - 1):
if i < len(r1grams) - 1:
r2gram = r1grams[i] + " " + r1grams[i + 1]
r2grams.append(r2gram)
if i < len(r1grams) - 2:
r3gram = r1grams[i] + " " + r1grams[i + 1] + " " + r1grams[i + 2]
r3grams.append(r3gram)
if i < len(r1grams) - 3:
r4gram = (
r1grams[i]
+ " "
+ r1grams[i + 1]
+ " "
+ r1grams[i + 2]
+ " "
+ r1grams[i + 3]
)
r4grams.append(r4gram)
r2gramslist.append(r2grams)
r3gramslist.append(r3grams)
r4gramslist.append(r4grams)
for i in range(0, len(s1grams) - 1):
if i < len(s1grams) - 1:
s2gram = s1grams[i] + " " + s1grams[i + 1]
s2grams.append(s2gram)
if i < len(s1grams) - 2:
s3gram = s1grams[i] + " " + s1grams[i + 1] + " " + s1grams[i + 2]
s3grams.append(s3gram)
if i < len(s1grams) - 3:
s4gram = (
s1grams[i]
+ " "
+ s1grams[i + 1]
+ " "
+ s1grams[i + 2]
+ " "
+ s1grams[i + 3]
)
s4grams.append(s4gram)
for i in range(0, len(c1grams) - 1):
if i < len(c1grams) - 1:
c2gram = c1grams[i] + " " + c1grams[i + 1]
c2grams.append(c2gram)
if i < len(c1grams) - 2:
c3gram = c1grams[i] + " " + c1grams[i + 1] + " " + c1grams[i + 2]
c3grams.append(c3gram)
if i < len(c1grams) - 3:
c4gram = (
c1grams[i]
+ " "
+ c1grams[i + 1]
+ " "
+ c1grams[i + 2]
+ " "
+ c1grams[i + 3]
)
c4grams.append(c4gram)
(keep1score, del1score, add1score) = self.SARIngram(
s1grams, c1grams, r1gramslist, numref
)
(keep2score, del2score, add2score) = self.SARIngram(
s2grams, c2grams, r2gramslist, numref
)
(keep3score, del3score, add3score) = self.SARIngram(
s3grams, c3grams, r3gramslist, numref
)
(keep4score, del4score, add4score) = self.SARIngram(
s4grams, c4grams, r4gramslist, numref
)
avgkeepscore = sum([keep1score, keep2score, keep3score, keep4score]) / 4
avgdelscore = sum([del1score, del2score, del3score, del4score]) / 4
avgaddscore = sum([add1score, add2score, add3score, add4score]) / 4
finalscore = (avgkeepscore + avgdelscore + avgaddscore) / 3
return finalscore
def normalize(
self,
sentence,
lowercase: bool = True,
tokenizer: str = "13a",
return_str: bool = True,
):
# Normalization is requried for the ASSET dataset to allow using space
# to split the sentence. Even though Wiki-Auto and TURK datasets,
# do not require normalization, we do it for consistency.
# Code adapted from the EASSE library [1] written by the authors of the ASSET dataset.
# [1] https://github.com/feralvam/easse/blob/580bba7e1378fc8289c663f864e0487188fe8067/easse/utils/preprocessing.py#L7
if lowercase:
sentence = sentence.lower()
if tokenizer in ["13a", "intl"]:
normalized_sent = sacrebleu.TOKENIZERS[tokenizer]()(sentence)
elif tokenizer == "moses":
normalized_sent = sacremoses.MosesTokenizer().tokenize(
sentence, return_str=True, escape=False
)
elif tokenizer == "penn":
normalized_sent = sacremoses.MosesTokenizer().penn_tokenize(
sentence, return_str=True
)
else:
normalized_sent = sentence
if not return_str:
normalized_sent = normalized_sent.split()
return normalized_sent
| StarcoderdataPython |
103377 | <gh_stars>0
def convolve(pixel, kernel, i, j):
return pixel[i][j]
fin = open("image.ppm", "rb")
#reading the file from image.ppm
kernel_identity = [
[0, 0, 0],
[0, 1, 0],
[0, 0, 0]
]
#file headers getting stored at _raw
filetype_raw = next(fin)
width_raw = next(fin)
height_raw = next(fin)
maxval_raw = next(fin)
#converting header to int as we have striped the file in binary form
width = int(width_raw.decode('ascii'))
height = int(height_raw.decode('ascii'))
maxval = int(maxval_raw.decode('ascii'))
pixel = [[0 for j in range(width)] for i in range(height)]
#i,j acts as tuples
for i in range(height):
for j in range(width):
R = int.from_bytes(fin.read(1), 'big')
G = int.from_bytes(fin.read(1), 'big')
B = int.from_bytes(fin.read(1), 'big')
pixel[i][j] = (R, G, B)
fin.close()
#Now, writing the to file:
out = []
for i in range(height):
for j in range(width):
RGB = convolve(pixel, kernel_identity, i, j)
#variable for each pixel x,y,z
RGB[0] = x
RGB[1] = y
RGB[2] = z
if( x = 255 and y = 255 and z = 255):
out.append(0)
out.append(0)
out.append(0)
else:
out.append(255)
out.append(255)
out.append(255)
fout = open("round.ppm", "wb")
fout.write(filetype_raw)
fout.write(width_raw)
fout.write(height_raw)
fout.write(maxval_raw)
fout.write(bytes(out))
fout.close()
| StarcoderdataPython |
4894349 | from sklearn import cluster, datasets
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
# import pylab
df = pd.read_excel('120_data.xlsx', header=None)
X = df.to_numpy()
# KMeans ๆผ็ฎๆณ
kmeans_fit = cluster.KMeans(n_clusters = 3).fit(X)
# ๅฐๅบๅ็พค็ตๆ
cluster_labels = kmeans_fit.labels_
# print("ๅ็พค็ตๆ๏ผ")
# print(cluster_labels)
# print("---")
# plot
fig = plt.figure()
ax = Axes3D(fig)
ax.scatter(X[:, 0], X[:, 1], X[:, 2], c=cluster_labels, cmap='Set1')
plt.show() | StarcoderdataPython |
3437917 | <reponame>giyyanan/liota
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------------#
# Copyright ยฉ 2015-2016 VMware, Inc. All Rights Reserved. #
# #
# Licensed under the BSD 2-Clause License (the โLicenseโ); you may not use #
# this file except in compliance with the License. #
# #
# The BSD 2-Clause License #
# #
# Redistribution and use in source and binary forms, with or without #
# modification, are permitted provided that the following conditions are met:#
# #
# - Redistributions of source code must retain the above copyright notice, #
# this list of conditions and the following disclaimer. #
# #
# - Redistributions in binary form must reproduce the above copyright #
# notice, this list of conditions and the following disclaimer in the #
# documentation and/or other materials provided with the distribution. #
# #
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"#
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE #
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE #
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE #
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR #
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF #
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS #
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN #
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) #
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF #
# THE POSSIBILITY OF SUCH DAMAGE. #
# ----------------------------------------------------------------------------#
import logging
from liota.device_comms.device_comms import DeviceComms
from liota.lib.transports.mqtt import Mqtt
log = logging.getLogger(__name__)
class MqttDeviceComms(DeviceComms):
"""
DeviceComms for MQTT Transport
"""
def __init__(self, url, port, identity=None, tls_conf=None, qos_details=None,
client_id="", clean_session=False, userdata=None, protocol="MQTTv311", transport="tcp", keep_alive=60,
enable_authentication=False, conn_disconn_timeout=10):
"""
:param url: MQTT Broker URL or IP
:param port: MQTT Broker Port
:param identity: Identity object
:param tls_conf: TLSConf object
:param qos_details: QoSDetails object
:param client_id: Client ID
:param clean_session: Connect with Clean session or not
:param userdata: userdata is user defined data of any type that is passed as the "userdata"
parameter to callbacks.
:param protocol: allows explicit setting of the MQTT version to use for this client
:param transport: Set transport to "websockets" to use WebSockets as the transport
mechanism. Set to "tcp" to use raw TCP, which is the default.
:param keep_alive: KeepAliveInterval
:param enable_authentication: Enable user-name password authentication or not
:param conn_disconn_timeout: Connect-Disconnect-Timeout
"""
self.url = url
self.port = port
self.identity = identity
self.tls_conf = tls_conf
self.client_id = client_id
self.qos_details = qos_details
self.clean_session = clean_session
self.userdata = userdata
self.protocol = protocol
self.transport = transport
self.keep_alive = keep_alive
self.enable_authentication = enable_authentication
self.conn_disconn_timeout = conn_disconn_timeout
self._connect()
def _connect(self):
"""
Initializes Mqtt Transport and connects to MQTT broker.
:return:
"""
self.client = Mqtt(self.url, self.port, self.identity, self.tls_conf, self.qos_details, self.client_id,
self.clean_session, self.userdata, self.protocol, self.transport, self.keep_alive,
self.enable_authentication, self.conn_disconn_timeout)
def _disconnect(self):
"""
Disconnects from MQTT broker.
:return:
"""
self.client.disconnect()
def publish(self, topic, message, qos, retain=False):
"""
Publishes message to the MQTT Broker
:param topic: Publish topic
:param message: Message to be published
:param qos: Publish QoS
:param retain: Message to be retained or not
:return:
"""
self.client.publish(topic, message, qos, retain)
def subscribe(self, topic, qos, callback):
"""
Subscribes to a topic with given callback
:param topic: Subscribe topic
:param qos: Subscribe QoS
:param callback: Callback for the topic
:return:
"""
self.client.subscribe(topic, qos, callback)
def send(self, message):
"""
Send message to MQTT broker.
TODO: To be implemented.
Use publish directly.
:param message: String message to be sent.
:return:
"""
raise NotImplementedError
def receive(self):
"""
Receive message from MQTT broker.
TODO: To be implemented.
USe subscribe directly.
:return:
"""
raise NotImplementedError
| StarcoderdataPython |
1693813 | from django.db import models
from .ubicacion import Ubicacion
class Nodo(models.Model):
ubicacion = models.OneToOneField(Ubicacion, on_delete=models.CASCADE, verbose_name="Ubicacion")
nombre = models.CharField(max_length=100)
def __str__(self):
return self.nombre | StarcoderdataPython |
11318125 | <gh_stars>0
# one_step_forward_euler.py
import numpy as np
from ..model import *
def get_one_step_forward_euler_method(mu,lam,gamma):
compute_one_step_forward_euler_method=get_compute_one_step_forward_euler_method(mu,lam,gamma)
zero_mat = np.zeros((4,3))
@njit
def one_step_forward_euler_method(t, K_index, element_array_index, tauK, tau,
vertices, velocities, node_array_mass, element_array_inverse_equilibrium_position):
#compute the namespace of the force computation of the elemental configuration
Ka = element_array_index[K_index]
tau_of_K = tauK[K_index]
K_tau = tau[Ka]
K_vertices = vertices[Ka].copy()
K_velocities = velocities[Ka].copy()
K_masses = node_array_mass[Ka]
Bm = element_array_inverse_equilibrium_position[K_index]
# K_W = compute_element_volume(node_array_position=vertices, element_array_index=element_array_index, K_index=K_index)
#Ds = get_D_mat(K_vertices)
K_vertices, K_velocities = compute_one_step_forward_euler_method(t, K_vertices, K_velocities, K_masses, K_tau, tau_of_K, Bm, zero_mat) #updates nodal times
# #update element's time
# tauK[K_index] = t
# #update node's time
# tau[Ka] = t
return K_vertices, K_velocities
return one_step_forward_euler_method
def get_compute_one_step_forward_euler_method(mu,lam,gamma):
compute_force = get_compute_force(mu,lam,gamma)
@njit
def compute_one_step_forward_euler_method(t, K_vertices, K_velocities, K_masses, K_tau, tau_of_K, Bm, zero_mat):
'''integrates the inputed element configuration up tot time t. element time is not updated by this function.
also updates K_velocities, K_vertices, K_tau, to time t.'''
Na = 4 # number of tetrahedral nodes, which is 4
# t, K_index, vertices, velocities, Ka, tau, tauK, elements, element_array_inverse_equilibrium_position, zero_mat, node_array_mass):
# Delta_x = np.multiply (K_velocities , (t - K_tau))
#update a copy of the velocities to next time
for a in range(Na):
K_vertices[a] += K_velocities[a] * (t - K_tau[a]) #+ Delta_x[a]
# K_vertices[a] = K_vertices[a] + K_velocities[a] * (t - K_tau[a]) #+ Delta_x[a]
#compute the Ds matrix
Ds = get_D_mat(K_vertices)
K_W = get_element_volume(Ds)
# #update node times
# for a in range(Na):
# K_tau[a] = t
# #but with what acceleration do I define the rate of change of velocity? Which t^* is correct? Let's say the next one.
# v = K_velocities.copy()
# K_velocities[a] += K_accelerations[a] * (t - K_tau[a])
#compute the nodal forces for the tetrahedral element at the next time
force = compute_force(K_velocities, Ds, K_W, Bm, zero_mat.copy())
# force = compute_force(K_velocities, Ds, K_W, Bm, zero_mat) #is this faster? also doesn't update zero_mat?
# Delta_v = np.multiply ( (t - tau_of_K) / K_masses , force )
#update node velocities
for a in range(Na):
K_velocities[a] += (t - tau_of_K) / K_masses[a] * force[a] #+ Delta_v[a]
# K_velocities[a] = K_velocities[a] + (t - tau_of_K) / K_masses[a] * force[a] #+ Delta_v[a]
#TODO(later): if node is not a boundary node, set velocity to zero
# return Delta_x, Delta_v
return K_vertices, K_velocities
return compute_one_step_forward_euler_method
# ##################################################################
# # Example Usage: update one element 50,000 times per second
# ##################################################################
# one_step_forward_euler_method = get_one_step_forward_euler_method(mu=1.,lam=1.,gamma=1.)
# K_index = 245
# for t in np.linspace(18, 19, 50000):
# one_step_forward_euler_method(t, K_index, element_array_index, tauK, tau, vertices, velocities, node_array_mass, element_array_inverse_equilibrium_position)
######################################################
# The following is deprecated
######################################################
def get_compute_one_step_map_forward_euler(mu,lam):
comp_nodal_elastic_forces = get_comp_nodal_elastic_forces(mu, lam)
@njit
def compute_one_step_map_forward_euler(t, K_vertices, K_velocities, K_masses, K_tau, tau_of_K, Ds, Bm, K_W, zero_mat):
'''returns Delta_x, Delta_v, which updated K_vertices, K_velocities to time t.
also updates K_velocities, K_vertices, K_tau, tau_of_K to time t.'''
Na = 4 # number of tetrahedral nodes, which is 4
# t, K_index, vertices, velocities, Ka, tau, tauK, elements, element_array_inverse_equilibrium_position, zero_mat, node_array_mass):
Delta_x = np.multiply (K_velocities[a] , (t - K_tau[a]))
for a in range(Na):
K_vertices[a] = K_vertices[a] + Delta_x[a]
#update node times
for a in range(Na):
K_tau[a] = t
#compute the nodal forces for each tetrahedral node
f = comp_nodal_elastic_forces(K_W, Bm, Ds, zero_mat.copy())
#TODO(later): include any other forces, such as nodal forces, pressure forces, etc.
#net nodal forces
force = f
Delta_v = np.multiply ( (t - tau_of_K) / K_masses , force )
#update node velocities
for a in range(Na):
K_velocities[a] = K_velocities[a] + Delta_v[a]
#update element's time
tau_of_K = t
#TODO(later): if node is not a boundary node, set velocity to zero
return Delta_x, Delta_v
return compute_one_step_map_forward_euler
def get_one_step_forward_euler_simplified(mu,lam):
comp_nodal_elastic_forces = get_comp_nodal_elastic_forces(mu, lam)
@njit
def one_step_forward_euler_simplified(t, K_vertices, K_velocities, K_masses, K_tau, tau_of_K, Ds, Bm, K_W, zero_mat):
'''returns K_vertices, K_velocities, having been updated to time t.'''
Na = 4 # number of tetrahedral nodes, which is 4
# t, K_index, vertices, velocities, Ka, tau, tauK, elements, element_array_inverse_equilibrium_position, zero_mat, node_array_mass):
for a in range(Na):
K_vertices[a] = K_vertices[a] + K_velocities[a] * (t - K_tau[a])
#update node times
for a in range(Na):
K_tau[a] = t
#compute the nodal forces for each tetrahedral node
f = comp_nodal_elastic_forces(K_W, Bm, Ds, zero_mat.copy())
#TODO(later): include any other forces, such as nodal forces, pressure forces, etc.
#net nodal forces
force = f
#update node velocities
for a in range(Na):
K_velocities[a] = K_velocities[a] + ( (t - tau_of_K) / K_masses[a]) * force[a]
#TODO(later): if node is not a boundary node, set velocity to zero
return K_vertices, K_velocities
return one_step_forward_euler_simplified
# @njit
def one_step_forward_euler_bulky(t, K_index, vertices, velocities, Ka, tau, tauK, elements,
element_array_inverse_equilibrium_position, zero_mat, node_array_mass):
'''doesn't update times for nodes or elements'''
for a in Ka:
vertices[a] = vertices[a] + velocities[a] * (t - tau[a])
#compute the nodal forces for each tetrahedral
K_W = compute_element_volume(node_array_position=vertices, element_array_index=elements, K_index=K_index)
Bm = element_array_inverse_equilibrium_position[K_index]
f = compute_nodal_elastic_forces(K_vertices, K_W, Bm, f = zero_mat.copy())
#TODO(later): include any other forces, such as nodal forces, pressure forces, etc.
#net nodal forces
force = f
#update node velocities
for j, a in enumerate(Ka):
velocities[a] = velocities[a] + ( (t - tauK[K_index]) / node_array_mass[a]) * force[j]
#TODO(later): if node is not a boundary node, set velocity to zero
return vertices, velocities
# ##################################################################
# # Example Usage: one update task within an AVI
# ##################################################################
# #one elemental time update
# #update node positions
# Ka = elements[K_index]
# K_vertices = vertices[Ka]
# vertices, velocities = one_step_forward_euler_bulky(t, K_index, Ka, vertices, velocities, tau, tauK,
# elements, element_array_inverse_equilibrium_position, zero_mat, node_array_mass)
# #update node times
# for a in Ka:
# tau[a] = t
# #update element's time
# tauK[K_index] = t
# #compute next time for element's evaluation
# tKnext = t + stepsize #_compute_next_time(K, t, stepsize)
# ##################################################################
# # Example Usage: one update task within an AVI
# ##################################################################
# #given initialization as in explicit.py
# #simplified elemental time update
# Ka = elements[K_index]
# K_vertices = vertices[Ka]
# K_velocities = velocities[Ka]
# K_masses = node_array_mass[Ka]
# K_tau = tau[Ka]
# tau_of_K = tauK[K_index]
# Ds = get_D_mat(K_vertices)
# Bm = element_array_inverse_equilibrium_position[K_index]
# K_W = get_element_volume(Ds)
# K_vertices, K_velocities = one_step_forward_euler_simplified(t, K_vertices, K_velocities,
# K_masses, K_tau, tau_of_K, Ds, Bm, K_W, zero_mat)
# #TODO(later): if node is a boundary node, set velocity to zero
# #update element's time
# tauK[K_index] = t
# #compute next time for element's evaluation
# tKnext = t + stepsize #_compute_next_time(K, t, stepsize)
| StarcoderdataPython |
1945888 | """
BUFR - Hodograph
"""
# (C) Copyright 2017- ECMWF.
#
# This software is licensed under the terms of the Apache Licence Version 2.0
# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
#
# In applying this licence, ECMWF does not waive the privileges and immunities
# granted to it by virtue of its status as an intergovernmental organisation
# nor does it submit to any jurisdiction.
#
import metview as mv
import math
# read BUFR data
filename = "temp.bufr"
if mv.exist(filename):
b = mv.read(filename)
else:
b = mv.gallery.load_dataset(filename)
# define station id
statid = "78583"
# extract thermo profile
nc = mv.thermo_bufr(data=b, station=mv.stations(search_key="ident", ident=statid))
# get individual profiles as vectors. Values are sorted by descending
# pressure, no missing values includes.
info = mv.thermo_data_values(nc, 0)
p = info["p_wind"]
u = info["u"]
v = info["v"]
# define the hodograph background
hodo_incr = 5
hodo_highlight = [10, 20, 30]
hodo_labels = [10, 20, 30]
hodo_max = 35
hodo_colour = "black"
# define the wind speed bins and their associated colours
pres_bins = [1050, 700, 500, 300, 200, 50]
pres_colours = ["red", "kelly_green", "sky", "blue", "magenta"]
# define horizontal and vertical axes
h_axis = mv.maxis(axis_position="left", axis_tick_label_height=0.4)
v_axis = mv.maxis(axis_position="bottom", axis_tick_label_height=0.4)
# the view
view = mv.cartesianview(
x_automatic="off",
x_min=-hodo_max,
x_max=hodo_max,
y_automatic="off",
y_min=-hodo_max,
y_max=hodo_max,
horizontal_axis=h_axis,
vertical_axis=h_axis,
subpage_x_position=10,
subpage_y_position=5,
subpage_x_length=80,
subpage_y_length=80,
)
# define the plot page and superpage.
# NOTE: In order to correctly render the hodograph (we want
# concentric circles instead of ellipses) we have to make sure
# that the physical width and height of the plot are the same.
# Please note that while the page size is defined in % the
# superpage size is defined in cm! See also subpage size in the view.
# size is in % of the physical size of the superpage!
hodo_page = mv.plot_page(top=0, bottom=100, left=0, right=100, view=view)
# size is in cm!
dw = mv.plot_superpage(
layout_size="custom", custom_width=15, custom_height=15, pages=hodo_page
)
gr_lst = []
# build the concentric circles
sp = hodo_incr
angle_incr = 2 * math.pi / 180
while sp <= hodo_max:
xp = [math.cos(i * angle_incr) * sp for i in range(1, 182)]
yp = [math.sin(i * angle_incr) * sp for i in range(1, 182)]
if sp in hodo_highlight:
gr = mv.xy_curve(xp, yp, hodo_colour, "solid", 3)
else:
gr = mv.xy_curve(xp, yp, hodo_colour, "solid", 1)
gr_lst.append(gr)
sp += hodo_incr
# build horizontal and vertical lines going
# throug the centre
gr_lst.append(mv.xy_curve([-hodo_max, hodo_max], [0, 0], hodo_colour, "solid", 1))
gr_lst.append(mv.xy_curve([0, 0], [-hodo_max, hodo_max], hodo_colour, "solid", 1))
# build labels on the horizontal line
vis = mv.input_visualiser(
input_plot_type="xy_point",
input_x_values=[-v for v in hodo_labels] + hodo_labels,
input_y_values=[0 for i in range(len(hodo_labels) * 2)],
input_values=hodo_labels + hodo_labels,
)
sym = mv.msymb(
symbol_colour=hodo_colour,
symbol_text_font_size=0.5,
symbol_text_font_style="bold",
symbol_text_position="bottom",
)
gr_lst.extend([vis, sym])
# build the graphical objects for the wind data (per bin)
gr_wind = []
for i in range(len(pres_bins) - 1):
# collect wind data in bin
u_val = []
v_val = []
for k in range(len(p)):
if (
not math.isnan(p[k])
and not math.isnan(u[k])
and not math.isnan(v[k])
and p[k] <= pres_bins[i]
and p[k] >= pres_bins[i + 1]
):
u_val.append(u[k])
v_val.append(v[k])
# build graph object
if u_val and v_val:
vis = mv.input_visualiser(input_x_values=u_val, input_y_values=v_val)
gr = mv.mgraph(
legend="on",
graph_line_colour=pres_colours[i],
graph_line_style="solid",
graph_line_thickness=5,
)
gr_wind.extend([vis, gr])
# define legend with custom labels
legend_text = []
for i in range(len(pres_bins) - 1):
legend_text.append(str(pres_bins[i]) + "-" + str(pres_bins[i + 1]))
legend = mv.mlegend(
legend_display_type="disjoint",
legend_text_font_size=0.5,
legend_text_composition="user_text_only",
legend_user_lines=legend_text,
)
# define title
title_txt = "HODOGRAPH Date: {} {} Station: {} Lat/Lon: {}/{}".format(
info["date"], info["time"], info["station"], info["lat"], info["lon"]
)
title = mv.mtext(text_lines=title_txt, text_font_size=0.5, text_colour="charcoal")
# define the output plot file
mv.setoutput(mv.pdf_output(output_name="hodograph"))
# generate the plot
mv.plot(dw, gr_lst, gr_wind, legend, title)
| StarcoderdataPython |
1806183 | <filename>examples/step_dm_control_env.py
#!/usr/bin/env python3
"""Example of how to load, step, and visualize an environment.
This example requires that garage[dm_control] be installed.
"""
import argparse
from garage.envs.dm_control import DmControlEnv
parser = argparse.ArgumentParser()
parser.add_argument('--n_steps',
type=int,
default=1000,
help='Number of steps to run')
args = parser.parse_args()
# Construct the environment
env = DmControlEnv.from_suite('walker', 'run')
# Reset the environment and launch the viewer
env.reset()
env.render()
# Step randomly until interrupted
steps = 0
while True:
if steps == args.n_steps:
break
env.step(env.action_space.sample())
env.render()
steps += 1
| StarcoderdataPython |
3330053 | <reponame>coquid/pygcn
from __future__ import division
from __future__ import print_function
import time
import argparse
import numpy as np
import torch
import torch.nn.functional as F
import torch.optim as optim
from pygcn.utils import load_my_data
from pygcn.models import GCN, MyGCN_v1, MyGCN_v2, MyGCN_v3, MyGCN_v4, MyGCN_v5, MyGCN_v6
import pickle
# Training settings
parser = argparse.ArgumentParser()
parser.add_argument('--no-cuda', action='store_true', default=False,
help='Disables CUDA training.')
parser.add_argument('--fastmode', action='store_true', default=False,
help='Validate during training pass.')
parser.add_argument('--seed', type=int, default=42, help='Random seed.')
parser.add_argument('--epochs', type=int, default=75,
help='Number of epochs to train.')
parser.add_argument('--lr', type=float, default=0.00002,
help='Initial learning rate.')
parser.add_argument('--weight_decay', type=float, default=5e-4,
help='Weight decay (L2 loss on parameters).')
parser.add_argument('--hidden', type=int, default=10,
help='Number of hidden units.')
parser.add_argument('--dropout', type=float, default=0.3,
help='Dropout rate (1 - keep probability).')
parser.add_argument('--batch_size', type=int, default=1,
help='Batch size')
parser.add_argument('--cost_func', type=str, default="mse_loss",
help='cost_func : mse_loss,l1_loss , smooth_l1_loss , ')
parser.add_argument('--trial', type=int, default=1,
help='trial')
parser.add_argument('--model', type=str, default="MyGCN_v3",
help='MyGCN')
args = parser.parse_args()
args.cuda = not args.no_cuda and torch.cuda.is_available()
np.random.seed(args.seed)
torch.manual_seed(args.seed)
if args.cuda:
torch.cuda.manual_seed(args.seed)
# Load data
adj, features, out_feature, test_in_features, test_out_features = load_my_data()
loss_function_dict = {
'mse_loss': F.mse_loss,
'l1_loss': F.l1_loss,
'smooth_l1_loss': F.smooth_l1_loss
}
model_dict = {
'MyGCN_v1': MyGCN_v1,
'MyGCN_v2': MyGCN_v2,
'MyGCN_v3': MyGCN_v3,
'MyGCN_v4': MyGCN_v4,
'MyGCN_v5': MyGCN_v5,
'MyGCN_v6': MyGCN_v6
}
loss_fuction = loss_function_dict[args.cost_func]
_model = model_dict[args.model]
# Model and optimizer
model = _model(nfeat=features.shape[2],
nhid=args.hidden,
nout=out_feature.shape[2],
dropout=args.dropout)
optimizer = optim.Adam(model.parameters(),
lr=args.lr, weight_decay=args.weight_decay)
if args.cuda:
model.cuda()
features = features.cuda()
adj = adj.cuda()
out_feature = out_feature.cuda()
test_in_features = test_in_features.cuda()
test_out_features = test_out_features.cuda()
def train(epoch):
t = time.time()
num_data = features.shape[0]
rand_sample = np.random.randint(num_data, size=10)
if not args.fastmode:
model.eval()
output = model(features[rand_sample], adj)
else:
model.train()
output = model(features[rand_sample], adj)
prev_loss = loss_fuction(output, out_feature[rand_sample])
model.train()
optimizer.zero_grad()
for ind in range(num_data):
prev_batch_ind = ind
curr_batch_ind = ind+1
if(ind == 0):
batch = features[:curr_batch_ind]
batch_out = out_feature[:curr_batch_ind]
else:
batch = features[prev_batch_ind:curr_batch_ind]
batch_out = out_feature[prev_batch_ind:curr_batch_ind]
output = model(batch, adj)
loss_train = loss_fuction(output, batch_out)
loss_train.backward()
optimizer.step()
if not args.fastmode:
model.eval()
output = model(features[rand_sample], adj)
else:
output = model(features[rand_sample], adj)
loss_val = loss_fuction(output, out_feature[rand_sample])
print('Epoch: {:04d}'.format(epoch+1),
'loss_prev: {:.6f}'.format(prev_loss.item()),
'loss_curr: {:.6f}'.format(loss_val.item()),
'time: {:.4f}s'.format(time.time() - t))
def save_tensor(trial=1):
path = "../result/model/" + "trial_{}".format(trial)
torch.save(model.state_dict(), path+".pt")
torch.save(optimizer.state_dict(), path+".opt")
arg_dict = {
'seed': args.seed,
'lr': args.lr,
'hidden': args.hidden,
'dropout': args.dropout,
'batch_size': args.batch_size,
'cost_func': args.cost_func,
}
f = open(path+"_arg.pkl", "wb")
pickle.dump(arg_dict, f)
f.close()
def test():
model.eval()
output = model(test_in_features, adj)
loss_test = loss_fuction(output, test_out_features)
print("Test set results:",
"loss= {:.4f}".format(loss_test.item()))
def print_model_info():
print("Model Info")
print("seed : {}".format(args.seed))
print("learning rate : {}".format(args.lr))
print("num hidden layer : {}".format(args.hidden))
print("dropout : {}".format(args.dropout))
print("batch_size : {}".format(args.batch_size))
print("cost_func : {}".format(args.cost_func))
pass
# Train model
t_total = time.time()
print_model_info()
for epoch in range(args.epochs):
train(epoch)
print("Optimization Finished!")
print("Total time elapsed: {:.4f}s".format(time.time() - t_total))
# Testing
save_tensor(trial=args.trial)
test()
| StarcoderdataPython |
3435302 | # Copyright 2020 Board of Trustees of the University of Illinois.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import tempfile
import boto3
from .localfile import deletefile
import controllers.configs as cfg
class S3EventsImages:
def __init__(self):
self.bucket = cfg.BUCKET
self.client = boto3.client('s3')
def download(self, event_id, image_id):
try:
fileobj = '%s/%s/%s.jpg' % (cfg.AWS_IMAGE_FOLDER_PREFIX, event_id, image_id)
_, tmpfolder = os.path.split(tempfile.mkdtemp())
tmpfolder = cfg.IMAGE_FILE_MOUNTPOINT + tmpfolder
os.mkdir(tmpfolder)
tmpfile = os.path.join(tmpfolder, event_id+"."+image_id)
with open(tmpfile, 'wb') as f:
self.client.download_fileobj(self.bucket, fileobj, f)
except Exception as ex:
deletefile(tmpfile)
raise
return tmpfile
def delete(self, event_id, image_id):
try:
fileobj = '%s/%s/%s.jpg' % (cfg.AWS_IMAGE_FOLDER_PREFIX, event_id, image_id)
if not self.__find(event_id, image_id):
raise
self.client.delete_object(Bucket=self.bucket, Key=fileobj)
except Exception as ex:
raise
def upload(self, imagefile, event_id, image_id):
try:
fileobj = '%s/%s/%s.jpg' % (cfg.AWS_IMAGE_FOLDER_PREFIX, event_id, image_id)
with open(imagefile, 'rb') as f:
self.client.upload_fileobj(f, self.bucket, fileobj)
except Exception as ex:
raise
def __find(self, event_id, image_id):
try:
fileobj = '%s/%s/%s.jpg' % (cfg.AWS_IMAGE_FOLDER_PREFIX, event_id, image_id)
get_folder_objects = self.client.list_objects_v2(
Bucket=self.bucket,
Delimiter='',
EncodingType='url',
MaxKeys=1000,
Prefix=fileobj,
FetchOwner=False,
StartAfter=''
)
if not get_folder_objects.get('Contents'):
return False
except Exception as ex:
raise
return True
| StarcoderdataPython |
301643 | <filename>pipeline/__init__.py<gh_stars>1-10
from pipeline.pipeline import Pipeline as Pipeline
from pipeline.pipeline import Stream as Stream
| StarcoderdataPython |
5031269 | <filename>ChatBot/chatbot_search/chatbot_tfserving/TFServing_preprocess.py<gh_stars>1000+
# !/usr/bin/python
# -*- coding: utf-8 -*-
# @time : 2021/4/15 21:59
# @author : Mo
# @function: encode of bert-whiteing
from __future__ import print_function, division, absolute_import, division, print_function
# ้้
linux
import sys
import os
path_root = os.path.abspath(os.path.join(os.path.dirname(__file__), "./."))
sys.path.append(path_root)
print(path_root)
from argparse import Namespace
import unicodedata, six, re
is_py2 = six.PY2
if not is_py2:
basestring = str
def is_string(s):
"""ๅคๆญๆฏๅฆๆฏๅญ็ฌฆไธฒ
"""
return isinstance(s, basestring)
def load_vocab(dict_path, encoding='utf-8', simplified=False, startswith=None):
"""ไปbert็่ฏๅ
ธๆไปถไธญ่ฏปๅ่ฏๅ
ธ
"""
token_dict = {}
with open(dict_path, encoding=encoding) as reader:
for line in reader:
token = line.strip()
token_dict[token] = len(token_dict)
if simplified: # ่ฟๆปคๅไฝ้จๅtoken
new_token_dict, keep_tokens = {}, []
startswith = startswith or []
for t in startswith:
new_token_dict[t] = len(new_token_dict)
keep_tokens.append(token_dict[t])
for t, _ in sorted(token_dict.items(), key=lambda s: s[1]):
if t not in new_token_dict:
keep = True
if len(t) > 1:
for c in Tokenizer.stem(t):
if (
Tokenizer._is_cjk_character(c) or
Tokenizer._is_punctuation(c)
):
keep = False
break
if keep:
new_token_dict[t] = len(new_token_dict)
keep_tokens.append(token_dict[t])
return new_token_dict, keep_tokens
else:
return token_dict
class BasicTokenizer(object):
"""ๅ่ฏๅจๅบ็ฑป
"""
def __init__(self, token_start='[CLS]', token_end='[SEP]'):
"""ๅๅงๅ
"""
self._token_pad = '[PAD]'
self._token_unk = '[UNK]'
self._token_mask = '[MASK]'
self._token_start = token_start
self._token_end = token_end
def tokenize(self, text, max_length=None):
"""ๅ่ฏๅฝๆฐ
"""
tokens = self._tokenize(text)
if self._token_start is not None:
tokens.insert(0, self._token_start)
if self._token_end is not None:
tokens.append(self._token_end)
if max_length is not None:
index = int(self._token_end is not None) + 1
self.truncate_sequence(max_length, tokens, None, -index)
return tokens
def token_to_id(self, token):
"""token่ฝฌๆขไธบๅฏนๅบ็id
"""
raise NotImplementedError
def tokens_to_ids(self, tokens):
"""tokenๅบๅ่ฝฌๆขไธบๅฏนๅบ็idๅบๅ
"""
return [self.token_to_id(token) for token in tokens]
def truncate_sequence(
self, max_length, first_sequence, second_sequence=None, pop_index=-1
):
"""ๆชๆญๆป้ฟๅบฆ
"""
if second_sequence is None:
second_sequence = []
while True:
total_length = len(first_sequence) + len(second_sequence)
if total_length <= max_length:
break
elif len(first_sequence) > len(second_sequence):
first_sequence.pop(pop_index)
else:
second_sequence.pop(pop_index)
def encode(
self,
first_text,
second_text=None,
max_length=None,
first_length=None,
second_length=None
):
"""่พๅบๆๆฌๅฏนๅบtoken idๅsegment id
ๅฆๆไผ ๅ
ฅfirst_length๏ผๅๅผบ่กpadding็ฌฌไธไธชๅฅๅญๅฐๆๅฎ้ฟๅบฆ๏ผ
ๅ็๏ผๅฆๆไผ ๅ
ฅsecond_length๏ผๅๅผบ่กpadding็ฌฌไบไธชๅฅๅญๅฐๆๅฎ้ฟๅบฆใ
"""
if is_string(first_text):
first_tokens = self.tokenize(first_text)
else:
first_tokens = first_text
if second_text is None:
second_tokens = None
elif is_string(second_text):
idx = int(bool(self._token_start))
second_tokens = self.tokenize(second_text)[idx:]
else:
second_tokens = second_text
if max_length is not None:
self.truncate_sequence(max_length, first_tokens, second_tokens, -2)
first_token_ids = self.tokens_to_ids(first_tokens)
if first_length is not None:
first_token_ids = first_token_ids[:first_length]
first_token_ids.extend([self._token_pad_id] *
(first_length - len(first_token_ids)))
first_segment_ids = [0] * len(first_token_ids)
if second_text is not None:
second_token_ids = self.tokens_to_ids(second_tokens)
if second_length is not None:
second_token_ids = second_token_ids[:second_length]
second_token_ids.extend([self._token_pad_id] *
(second_length - len(second_token_ids)))
second_segment_ids = [1] * len(second_token_ids)
first_token_ids.extend(second_token_ids)
first_segment_ids.extend(second_segment_ids)
return first_token_ids, first_segment_ids
def id_to_token(self, i):
"""idๅบๅไธบๅฏนๅบ็token
"""
raise NotImplementedError
def ids_to_tokens(self, ids):
"""idๅบๅ่ฝฌๆขไธบๅฏนๅบ็tokenๅบๅ
"""
return [self.id_to_token(i) for i in ids]
def decode(self, ids):
"""่ฝฌไธบๅฏ่ฏปๆๆฌ
"""
raise NotImplementedError
def _tokenize(self, text):
"""ๅบๆฌๅ่ฏๅฝๆฐ
"""
raise NotImplementedError
class Tokenizer(BasicTokenizer):
"""Bertๅ็ๅ่ฏๅจ
็บฏPythonๅฎ็ฐ๏ผไปฃ็ ไฟฎๆน่ชkeras_bert็tokenizerๅฎ็ฐ
"""
def __init__(self, token_dict, do_lower_case=False, *args, **kwargs):
"""ๅๅงๅ
"""
super(Tokenizer, self).__init__(*args, **kwargs)
if is_string(token_dict):
token_dict = load_vocab(token_dict)
self._do_lower_case = do_lower_case
self._token_dict = token_dict
self._token_dict_inv = {v: k for k, v in token_dict.items()}
self._vocab_size = len(token_dict)
for token in ['pad', 'unk', 'mask', 'start', 'end']:
try:
_token_id = token_dict[getattr(self, '_token_%s' % token)]
setattr(self, '_token_%s_id' % token, _token_id)
except:
pass
def token_to_id(self, token):
"""token่ฝฌๆขไธบๅฏนๅบ็id
"""
return self._token_dict.get(token, self._token_unk_id)
def id_to_token(self, i):
"""id่ฝฌๆขไธบๅฏนๅบ็token
"""
return self._token_dict_inv[i]
def decode(self, ids, tokens=None):
"""่ฝฌไธบๅฏ่ฏปๆๆฌ
"""
tokens = tokens or self.ids_to_tokens(ids)
tokens = [token for token in tokens if not self._is_special(token)]
text, flag = '', False
for i, token in enumerate(tokens):
if token[:2] == '##':
text += token[2:]
elif len(token) == 1 and self._is_cjk_character(token):
text += token
elif len(token) == 1 and self._is_punctuation(token):
text += token
text += ' '
elif i > 0 and self._is_cjk_character(text[-1]):
text += token
else:
text += ' '
text += token
text = re.sub(' +', ' ', text)
text = re.sub('\' (re|m|s|t|ve|d|ll) ', '\'\\1 ', text)
punctuation = self._cjk_punctuation() + '+-/={(<['
punctuation_regex = '|'.join([re.escape(p) for p in punctuation])
punctuation_regex = '(%s) ' % punctuation_regex
text = re.sub(punctuation_regex, '\\1', text)
text = re.sub('(\d\.) (\d)', '\\1\\2', text)
return text.strip()
def _tokenize(self, text):
"""ๅบๆฌๅ่ฏๅฝๆฐ
"""
if self._do_lower_case:
if is_py2:
text = unicode(text)
text = text.lower()
text = unicodedata.normalize('NFD', text)
text = ''.join([
ch for ch in text if unicodedata.category(ch) != 'Mn'
])
spaced = ''
for ch in text:
if self._is_punctuation(ch) or self._is_cjk_character(ch):
spaced += ' ' + ch + ' '
elif self._is_space(ch):
spaced += ' '
elif ord(ch) == 0 or ord(ch) == 0xfffd or self._is_control(ch):
continue
else:
spaced += ch
tokens = []
for word in spaced.strip().split():
tokens.extend(self._word_piece_tokenize(word))
return tokens
def _word_piece_tokenize(self, word):
"""wordๅ
ๅๆsubword
"""
if word in self._token_dict:
return [word]
tokens = []
start, stop = 0, 0
while start < len(word):
stop = len(word)
while stop > start:
sub = word[start:stop]
if start > 0:
sub = '##' + sub
if sub in self._token_dict:
break
stop -= 1
if start == stop:
stop += 1
tokens.append(sub)
start = stop
return tokens
@staticmethod
def stem(token):
"""่ทๅtoken็โ่ฏๅนฒโ๏ผๅฆๆๆฏ##ๅผๅคด๏ผๅ่ชๅจๅปๆ##๏ผ
"""
if token[:2] == '##':
return token[2:]
else:
return token
@staticmethod
def _is_space(ch):
"""็ฉบๆ ผ็ฑปๅญ็ฌฆๅคๆญ
"""
return ch == ' ' or ch == '\n' or ch == '\r' or ch == '\t' or \
unicodedata.category(ch) == 'Zs'
@staticmethod
def _is_punctuation(ch):
"""ๆ ็น็ฌฆๅท็ฑปๅญ็ฌฆๅคๆญ๏ผๅ
จ/ๅ่งๅๅจๆญคๅ
๏ผ
ๆ้๏ผunicodedata.category่ฟไธชๅฝๆฐๅจpy2ๅpy3ไธ็
่กจ็ฐๅฏ่ฝไธไธๆ ท๏ผๆฏๅฆu'ยง'ๅญ็ฌฆ๏ผๅจpy2ไธ็็ปๆไธบ'So'๏ผ
ๅจpy3ไธ็็ปๆๆฏ'Po'ใ
"""
code = ord(ch)
return 33 <= code <= 47 or \
58 <= code <= 64 or \
91 <= code <= 96 or \
123 <= code <= 126 or \
unicodedata.category(ch).startswith('P')
@staticmethod
def _cjk_punctuation():
return u'\uff02\uff03\uff04\uff05\uff06\uff07\uff08\uff09\uff0a\uff0b\uff0c\uff0d\uff0f\uff1a\uff1b\uff1c\uff1d\uff1e\uff20\uff3b\uff3c\uff3d\uff3e\uff3f\uff40\uff5b\uff5c\uff5d\uff5e\uff5f\uff60\uff62\uff63\uff64\u3000\u3001\u3003\u3008\u3009\u300a\u300b\u300c\u300d\u300e\u300f\u3010\u3011\u3014\u3015\u3016\u3017\u3018\u3019\u301a\u301b\u301c\u301d\u301e\u301f\u3030\u303e\u303f\u2013\u2014\u2018\u2019\u201b\u201c\u201d\u201e\u201f\u2026\u2027\ufe4f\ufe51\ufe54\u00b7\uff01\uff1f\uff61\u3002'
@staticmethod
def _is_cjk_character(ch):
"""CJK็ฑปๅญ็ฌฆๅคๆญ๏ผๅ
ๆฌไธญๆๅญ็ฌฆไนๅจๆญคๅ๏ผ
ๅ่๏ผhttps://en.wikipedia.org/wiki/CJK_Unified_Ideographs_(Unicode_block)
"""
code = ord(ch)
return 0x4E00 <= code <= 0x9FFF or \
0x3400 <= code <= 0x4DBF or \
0x20000 <= code <= 0x2A6DF or \
0x2A700 <= code <= 0x2B73F or \
0x2B740 <= code <= 0x2B81F or \
0x2B820 <= code <= 0x2CEAF or \
0xF900 <= code <= 0xFAFF or \
0x2F800 <= code <= 0x2FA1F
@staticmethod
def _is_control(ch):
"""ๆงๅถ็ฑปๅญ็ฌฆๅคๆญ
"""
return unicodedata.category(ch) in ('Cc', 'Cf')
@staticmethod
def _is_special(ch):
"""ๅคๆญๆฏไธๆฏๆ็นๆฎๅซไน็็ฌฆๅท
"""
return bool(ch) and (ch[0] == '[') and (ch[-1] == ']')
def rematch(self, text, tokens):
"""็ปๅบๅๅง็textๅtokenizeๅ็tokens็ๆ ๅฐๅ
ณ็ณป
"""
if is_py2:
text = unicode(text)
if self._do_lower_case:
text = text.lower()
normalized_text, char_mapping = '', []
for i, ch in enumerate(text):
if self._do_lower_case:
ch = unicodedata.normalize('NFD', ch)
ch = ''.join([c for c in ch if unicodedata.category(c) != 'Mn'])
ch = ''.join([
c for c in ch
if not (ord(c) == 0 or ord(c) == 0xfffd or self._is_control(c))
])
normalized_text += ch
char_mapping.extend([i] * len(ch))
text, token_mapping, offset = normalized_text, [], 0
for token in tokens:
if self._is_special(token):
token_mapping.append([])
else:
token = self.stem(token)
start = text[offset:].index(token) + offset
end = start + len(token)
token_mapping.append(char_mapping[start:end])
offset = end
return token_mapping
# ่ถ
ๅๆฐๅฏ้
็ฝฎ
# dict_path = "bert_white/vocab.txt" # bertๅญๅ
ธ
# maxlen = 128
# ๆ่
ๆฏๆ token_dictๅญๅ
ธ ๆพๅฐpyๆไปถ้่พน
from bertWhiteConf import bert_white_config
config = Namespace(**bert_white_config)
tokenizer = Tokenizer(os.path.join(config.bert_dir, config.dict_path), do_lower_case=True)
text = "ไฝ ่ฟไผไปไน"
token_id = tokenizer.encode(text, max_length=config.maxlen)
print(token_id)
def covert_text_to_id(data_input):
""" ๅฐๆๆฌ่ฝฌไธบBERT้่ฆ็ ids """
data = data_input.get("data", {})
token_ids = []
for d in data:
text = d.get("text", "")
token_id = tokenizer.encode(text, max_length=config.maxlen)
token_ids.append({"Input-Token": token_id[0], "Input-Segment": token_id[1]})
return {"instances": token_ids}
if __name__ == '__main__':
data_input = {"data": [{"text": "ไฝ ๆฏ่ฐๅ"}, {"text": "ไฝ ๅซไปไน"}, {"text": "ไฝ ๅฅฝ"}]}
res = covert_text_to_id(data_input)
print(res)
# {"instances": [{"Input-Token": [101, 872, 3221, 6443, 1435, 102], "Input-Segment": [0, 0, 0, 0, 0, 0]},
# {"Input-Token": [101, 872, 1373, 784, 720, 102], "Input-Segment": [0, 0, 0, 0, 0, 0]},
# {"Input-Token": [101, 872, 1962, 102], "Input-Segment": [0, 0, 0, 0]}]}
| StarcoderdataPython |
9680465 | <reponame>QuatZo/swstats<filename>website/celery.py<gh_stars>1-10
from __future__ import absolute_import, unicode_literals
import os
from celery import Celery
from celery.schedules import crontab
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'swstatisticsweb.settings')
app = Celery('swstatisticsweb', backend='redis://localhost',
broker='redis://localhost')
# Using a string here means the worker doesn't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')
app.conf.update(
result_expires=1200, # 1200 secs = 20 minutes
)
app.conf.broker_transport_options = {"visibility_timeout": 1200} # 20min
app.conf.timezone = 'UTC' # UTC server time
# Load task modules from all registered Django app configs.
app.autodiscover_tasks()
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
| StarcoderdataPython |
9647510 | __author__ = "<NAME> (<EMAIL>)"
__version__ = "0.0.1"
__date__ = "$Date: 2014/07/09 $"
from cell import Cell
class Playground(object):
"""
An abstract Cell playground.
"""
def __init__(self, n):
"""
This default initializer inits with n random cells.
In general, initializing a Playground should involve adding
some number of Cells, or at least initing the structure in which
Cells will exist.
"""
self.cells = []
for i in range(0, n):
self.cells.append(Cell(type="random", xlim=[-10, 10], ylim=[-100, 100], zlim=[-1000, 1000]))
def play(self):
"""
Does one step of playing for the Cells in this Playground.
Should be implemented by subclass to do whatever that calls for.
"""
print "Playground's open!"
for c in self.cells:
print "%d %d %d" % ( c.x, c.y, c.z )
def neighborhood(self, cell):
"""
Returns a structure representing the nearest neighborhood
around the given cell (excluding that cell)
"""
pass | StarcoderdataPython |
9600137 | # Load in a .x3d file, store to a .dgs file with a checkerboard pattern
import sys
import os
import numpy as np
import dataguzzler as dg
import dg_file as dgf
import dg_metadata as dgm
try:
from cStringIO import StringIO # python 2.x
pass
except ImportError:
from io import StringIO # python 3.x
pass
from spatialnde.coordframes import coordframe
from spatialnde.ndeobj import ndepart
from spatialnde.exporters.vrml import VRMLAppearance
imageshape=(1024,1024)
cbrows=20
cbcols=20
x3dname=sys.argv[1]
(x3dpath,x3dfile)=os.path.split(x3dname)
x3dbasename=os.path.splitext(x3dfile)[0]
objframe=coordframe()
obj = ndepart.fromx3d(objframe,None,x3dname,tol=1e-6)
VRMLBuf=StringIO()
VRMLAppear=VRMLAppearance.Simple(DefName="ImageMap")
obj.VRMLFile(VRMLBuf,objframe,UVparameterization=None,appearance=VRMLAppear)
VRMLAppear.reset() # needed if we want to write a new file
wfmdict={}
wfmdict[x3dbasename]=dg.wfminfo()
wfmdict[x3dbasename].Name=x3dbasename
wfmdict[x3dbasename].dimlen=np.array((),dtype='i8')
dgm.AddMetaDatumWI(wfmdict[x3dbasename],dgm.MetaDatum("VRML97Geom",VRMLBuf.getvalue()))
x3dparamname=x3dbasename+"_parameterization"
wfmdict[x3dparamname]=dg.wfminfo()
wfmdict[x3dparamname].Name=x3dparamname
wfmdict[x3dparamname].ndim=2
wfmdict[x3dparamname].dimlen=np.array(imageshape,dtype='i8')
wfmdict[x3dparamname].n=np.prod(wfmdict[x3dparamname].dimlen)
wfmdict[x3dparamname].data=np.zeros(wfmdict[x3dparamname].dimlen,dtype='f')
xpos=np.arange(imageshape[0],dtype='d')
ypos=np.arange(imageshape[1],dtype='d')
xchecker = (xpos//(imageshape[0]*1.0/(cbcols)) % 2).astype(np.bool)
ychecker = (ypos//(imageshape[1]*1.0/(cbrows)) % 2).astype(np.bool)
wfmdict[x3dparamname].data[:,:]=xchecker.reshape(imageshape[0],1) ^ ychecker.reshape(1,imageshape[1]) # XOR operator
dgm.AddMetaDatumWI(wfmdict[x3dbasename],dgm.MetaDatum("TextureChan_ImageMap",x3dparamname+":0"))
dgf.savesnapshot(os.path.join(x3dpath,x3dbasename+".dgs"),wfmdict)
| StarcoderdataPython |
175669 | """Mรณdulo com os modelos da Avaliaรงรฃo."""
from datetime import date, datetime
from enum import Enum, auto
from project.configs.server import server
db = server.db
class TypeRate(Enum):
"""Enum com os tipos de Avaliaรงรตes possรญveis."""
ANIME = "Anime"
MOVIE = "Movie"
SERIES = "Series"
CARTOON = "Cartoon"
class RatingModel(db.Model):
"""Modelo de Avaliaรงรฃo.
atributos:
id* (Integer): Identificador da avaliaรงรฃo
title* (String): O tรญtulo da avaliaรงรฃo
content* (Text): Descriรงรฃo da Avaliaรงรฃo
rate_type* (Enum): O tipo da Avaliaรงรฃo
rate_pic (String): o caminho da imagem da Avaliaรงรฃo
rate* (Integer): A nota para a Avaliaรงรฃo
date_posted (Datetime): A data em que a avaliaรงรฃo foi criada
rater_id (Integer): O identificador do usuรกrio que realizou a avaliaรงรฃo
seasons (SeasonModel): As temporadas relacionadas com a avaliaรงรฃo
"""
id = db.Column(db.Integer, primary_key=True)
# original_title = db.Column(db.String(255))
title = db.Column(db.String(255), nullable=False)
content = db.Column(db.Text, nullable=True)
rate_type = db.Column(db.Enum(TypeRate), default=TypeRate.MOVIE)
rate = db.Column(db.Integer, nullable=False)
rate_pic = db.Column(db.String(), nullable=True)
date_posted = db.Column(db.DateTime, default=datetime.utcnow)
# Foreign Key
rater_id = db.Column(db.Integer, db.ForeignKey("users_model.id"))
# User can have many Rates {post.rater.email}
seasons = db.relationship("SeasonModel", cascade="all,delete", backref="seasons")
| StarcoderdataPython |
8064491 | from direct.showbase.ShowBase import ShowBase
from direct.interval.IntervalGlobal import LerpFunc
from direct.gui.DirectGui import DirectButton
from direct.gui.DirectGui import DGG
from panda3d_logos.splashes import Colors
from panda3d_logos.splashes import Pattern
from stageflow import Flow
from stageflow import Stage
from stageflow.prefab import Quit
from stageflow.panda3d import Panda3DSplash
class Repeat(Stage):
def enter(self, data):
self.btn_repeat = DirectButton(
text=("Repeat"),
text_pos=(0, 0.15),
text_scale=0.2,
frameSize=(-1, 1, 0.05, 0.35),
borderWidth=(0.01, 0.01),
command=self.repeat,
)
self.btn_quit = DirectButton(
text=("Quit"),
text_pos=(0, -0.25),
text_scale=0.2,
frameSize=(-1, 1, -0.35, -0.05),
borderWidth=(0.01, 0.01),
command=self.quit,
)
self.buttons = [self.btn_repeat, self.btn_quit]
def exit(self, data):
for btn in self.buttons:
btn.destroy()
return data
def repeat(self):
base.flow.transition('splashes')
def quit(self):
base.flow.transition('quit')
ShowBase()
base.flow = Flow(
stages=dict(
splashes=Panda3DSplash(
exit_stage='repeat',
splash_args=dict(
pattern=Pattern.WHEEL,
colors=Colors.RAINBOW,
pattern_freq=1,
cycle_freq=5,
),
),
repeat=Repeat(),
quit=Quit(),
),
initial_stage='splashes',
)
base.run()
| StarcoderdataPython |
1729780 | import cv2
import random
import numpy as np
class RandomAdjustHue(object):
"""Adjust the hue of the given image using a randomly chosen amount in the given range.
Args:
min_angle (int): Minimum of the amount added to the hue channel.
max_angle (int): Maximum of the amount added to the hue channel.
Returns:
dest_img (ndarray): Hue adjusted image.
target (dict): Given target dictionary.
"""
def __init__(self, min_angle=-9, max_angle=9):
self.min_angle = min_angle
self.max_angle = max_angle
def __call__(self, image, target):
image = image.transpose(1,2,0)
angle = random.randint(self.min_angle, self.max_angle)
hsv_img = cv2.cvtColor(image, cv2.COLOR_RGB2HSV)
hsv_img = hsv_img.astype(np.uint16)
hsv_img[:, :, 0] = (hsv_img[:, :, 0] + angle) % 180
hsv_img = hsv_img.astype(np.uint8)
dest_img = cv2.cvtColor(hsv_img, cv2.COLOR_HSV2RGB)
return dest_img.transpose(2,0,1), target
| StarcoderdataPython |
1921860 | # import argparse
# import torch.nn as nn
# import torch.nn.functional as F
# import torch.optim as optim
# from torch.optim.lr_scheduler import StepLR
# import os
# from PIL import Image
# from copy import deepcopy
# from tqdm import tqdm
# import time
from data.domain import *
import torch
import numpy as np
np.set_printoptions(precision=2, suppress=True)
DEVICE = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
ROOT_DIR = './data/'
IMG_DIR = ROOT_DIR + 'symbol_images/'
IMG_SIZE = 45
from torchvision import transforms
IMG_TRANSFORM = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.5,), (1,))])
def compute_rewards(preds, res, seq_len):
expr_preds, res_preds = eval_expr(preds, seq_len)
rewards = equal_res(res_preds, res)
rewards = [1.0 if x else 0. for x in rewards]
return np.array(rewards) | StarcoderdataPython |
214300 | __author__ = 'HaoBin'
from XGraph2 import *
import copy, cProfile
def check_graph(graph):
# this method checks the graph for Eulerian Trail validity
# which is: max 1 vertex with inD-outD=1 and max 1 vertex with outD-inD=1,
# and all other vertices must have equal inD and outD
# O(|V|^2) complexity
vs = graph.getVerticesList()
# initialising validity
valid = True
in_out_delta = 0
out_in_delta = 0
for v in vs:
in_d = graph.getInDegree(v)
out_d = graph.getDegree(v)
if in_d != out_d:
valid = False
if in_d - out_d == 1:
in_out_delta += 1
if out_d - in_d == 1:
out_in_delta += 1
if in_out_delta <= 1 and out_in_delta <= 1:
# if still within valid bounds, valid again
valid = True
if in_out_delta > 1 or out_in_delta > 1:
valid = False
return valid
def e_graph_build(graph):
#e_path_exists = check_graph(graph)
#print("Checking graph... E-path exists?", e_path_exists)
e_graph = []
e_path_exists = True
if e_path_exists:
vs = graph.getVerticesList()
u = vs[0]
# Start from the vertex with 0 in-degree
for v in vs:
if graph.getInDegree(v) == 0:
u = v
e_graph.append(u)
while graph.getEdgeCount() > 0:
# while there are still unvisited edges
if graph.getDegree(u) == 0 and graph.getEdgeCount() > 0:
print("No E-path found!")
break
elif graph.getDegree(u) == 0 and graph.getEdgeCount() == 0:
print("E-path found!")
break
elif graph.getDegree(u) == 1:
# left 1 out-degree
v = graph.getConnections(u)[0][0]
e_graph.append(v)
graph.deleteEdge(u,v)
u = v
elif graph.getDegree(u) >= 2:
#print("c4")
# if the vertex u has 2 or more out-degree
# go through self loops first
for connection in graph.getConnections(u):
if connection[0] == u:
graph.deleteEdge(u,connection[0])
e_graph.append(connection[0])
u_outgoing = graph.getConnections(u)
#print(u_outgoing)
continuex = True
i = 0
# check each outgoing paths
while continuex and i < len(u_outgoing):
#print(i)
v = u_outgoing[i][0]
ori_edge_cost, ori_edge_direct = graph.getEdge(u, v).getCost(), graph.getEdge(u, v).isDirected()
# tries to traverse/delete the edge, and check for validity
# ** does DFS to check if the edge is a bridge
init_c = dfs(copy.deepcopy(graph),u)
graph.deleteEdge(u, v)
continuex = False
after_c = dfs(copy.deepcopy(graph),v)
# now check if doing this edge will leave it disconnected
if graph.getInDegree(v) == 0 and graph.getDegree(v) == 0:
graph.addEdge(u, v, ori_edge_cost, ori_edge_direct)
continuex = True
else:
if init_c > after_c:
# Bridge detected
graph.addEdge(u, v, ori_edge_cost, ori_edge_direct)
continuex = True
else:
# valid next-edge, proceed to next loop
e_graph.append(v)
u = v
u_outgoing = graph.getConnections(u)
i += 1
if i >= len(u_outgoing):
print("No E-path found! No valid next vertex found.")
return e_graph
print(len(e_graph),end="\r")
return e_graph
else:
return []
def dfs(graph, u):
# O(|V|)
# performs a depth first search and return the number of nodes traversable
# * count reachable node from u *
count = 0
connections = graph.getConnections(u)
graph.deleteVertex(u)
count += dfs_helper(graph, connections,u) + 1
return count
def dfs_helper(graph, connections, u):
# helper recursive part for dfs()
count = 0
for v in connections:
if v[0] != u and graph[v[0]] is not None:
v_connection = graph.getConnections(v[0])
graph.deleteVertex(v[0])
count += dfs_helper(graph,v_connection,v[0]) + 1
return count
def rbk(pat,txt):
n = len(txt)
m = len(pat)
h_pat = rolling_hash(pat)
h_txt = rolling_hash(txt[0:m])
for i in range(n-m+1):
if h_pat == h_txt:
for j in range(m):
match = True
if txt[i+j] != pat[j]:
match = False
if match is True:
return i
h_txt = update_rolling_hash(h_txt, txt[i:i+m+1])
return -1
def rolling_hash(string,d=131):
q = 32452843
hash = ord(string[0]) * d + ord(string[1])
for i in range(2,len(string)):
hash = hash * d + ord(string[i])
return hash % q
def update_rolling_hash(hash,txt,d=131):
q = 32452843
h = (hash - (ord(txt[0]) * (d**(len(txt)-2))) ) * d + ord(txt[len(txt)-1])
return h % q
def d_edges_build(graph,n):
# Complexity: O(|V|)
# build the D-graph and edges
vertices = graph.getVertices()
for vertex in vertices:
for target in vertices:
vid = vertex[1].getID()
tid = target[1].getID()
# PENDING COMPLETION: STRING MATCHING #
if rbk(vid[1:n], tid) == 0:
#if vid[1:n] == tid[0:n-2]:
graph.addEdge(vid,tid,0,True)
return graph
def d_edges_build_2(graph,n):
hash_head = XHashTable()
hash_tail = XHashTable()
vertices = graph.getVerticesList()
for v in vertices:
h = hash_head[v[0:n-2]]
t = hash_tail[v[1:n]]
if h is None:
hash_head[v[0:n-2]] = [v]
if t is None:
hash_tail[v[1:n]] = [v]
if h is not None:
hash_head[v[0:n-2]] = h + [v]
if t is not None:
hash_tail[v[1:n]] = t + [v]
for v in vertices:
h_v = hash_head[v[1:n]]
if h_v is not None:
for h in h_v:
if graph.getEdge(v,h) is None:
graph.addEdge(v,h,0,True)
t_v = hash_tail[v[0:n-1]]
if t_v is not None:
for t in t_v:
if graph.getEdge(t,v) is None:
graph.addEdge(t,v,0,True)
return graph
def print_extend(graph):
# Complexity: O(|E|)
# printing out the final E-path
if len(graph) > 0:
n = len(graph[0])
print(graph[0], end="")
i = 1
while i < len(graph):
print(graph[i][n-1:], end="")
i += 1
print()
def main():
# Load the file
total_set = []
n = 0
file = open("substrings.txt",'r')
for line in file:
total_set.append(line.strip())
if n == 0:
n = len(line)
# initialise D-graph and build the edges
d_graph = XGraph()
for s in total_set:
d_graph.addVertex(s)
graph = d_edges_build_2(d_graph,n)
print("Edge Count:", graph.getEdgeCount())
# build the E-graph
e_graph = e_graph_build(graph)
print("E",e_graph)
print("E-path length:",len(e_graph)-1)
print_extend(e_graph)
print()
if __name__ == "__main__":
cProfile.run('main()')
| StarcoderdataPython |
362385 | <reponame>healem/sauri
class Type:
DS18B20 = 'ds18b20'
TEMPC = 'C'
TEMPF = 'F' | StarcoderdataPython |
336463 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
from useradmin.models import HuePermission
try:
perm = HuePermission.objects.get(app='metastore', action='read_only_access')
perm.delete()
except HuePermission.DoesNotExist:
pass
def backwards(self, orm):
perm, created = HuePermission.objects.get_or_create(app='metastore', action='read_only_access')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'useradmin.grouppermission': {
'Meta': {'object_name': 'GroupPermission'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Group']"}),
'hue_permission': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['useradmin.HuePermission']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'useradmin.huepermission': {
'Meta': {'object_name': 'HuePermission'},
'action': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'app': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'through': "orm['useradmin.GroupPermission']", 'symmetrical': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'useradmin.ldapgroup': {
'Meta': {'object_name': 'LdapGroup'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'group'", 'to': "orm['auth.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'useradmin.userprofile': {
'Meta': {'object_name': 'UserProfile'},
'creation_method': ('django.db.models.fields.CharField', [], {'default': "'HUE'", 'max_length': '64'}),
'home_directory': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'unique': 'True'})
}
}
complete_apps = ['useradmin']
symmetrical = True
| StarcoderdataPython |
6400706 | <reponame>palisadoes/pattoo
#!/usr/bin/env python3
"""Test pattoo configuration."""
import os
import unittest
import sys
from random import random
import time
# Try to create a working PYTHONPATH
EXEC_DIR = os.path.dirname(os.path.realpath(__file__))
ROOT_DIR = os.path.abspath(os.path.join(
os.path.abspath(os.path.join(
os.path.abspath(os.path.join(
os.path.abspath(os.path.join(
EXEC_DIR,
os.pardir)), os.pardir)), os.pardir)), os.pardir))
_EXPECTED = '{0}pattoo{0}tests{0}pattoo_{0}db{0}table'.format(os.sep)
if EXEC_DIR.endswith(_EXPECTED) is True:
# We need to prepend the path in case the repo has been installed
# elsewhere on the system using PIP. This could corrupt expected results
sys.path.insert(0, ROOT_DIR)
else:
print('''This script is not installed in the "{0}" directory. Please fix.\
'''.format(_EXPECTED))
sys.exit(2)
from pattoo_shared import data, times
from pattoo_shared.constants import DATA_FLOAT, PattooDBrecord
from pattoo.db.table import datapoint, agent
from pattoo.db.table import data as lib_data
from pattoo.db.table.datapoint import DataPoint
from pattoo.db.models import DataPoint as _DataPoint
from pattoo.db import db
from pattoo.constants import IDXTimestampValue
from tests.libraries.configuration import UnittestConfig
class TestBasicFunctions(unittest.TestCase):
"""Checks all functions and methods."""
#########################################################################
# General object setup
#########################################################################
def test_idx_datapoint(self):
"""Testing method / function idx_datapoint."""
# Initialize key variables
checksum = data.hashstring(str(random()))
pattoo_db_record = PattooDBrecord(
pattoo_checksum=checksum,
pattoo_metadata=[('key', 'value')],
pattoo_data_type=32,
pattoo_key='polar_bear',
pattoo_value=0.0,
pattoo_timestamp=1575789070108,
pattoo_agent_polled_target='panda_bear',
pattoo_agent_program='koala_bear',
pattoo_agent_hostname='grizzly_bear',
pattoo_agent_id='red_stripe_beer',
pattoo_agent_polling_interval=10000)
# Checksum should not exist
self.assertFalse(datapoint.checksum_exists(checksum))
# Test creation
result = datapoint.idx_datapoint(pattoo_db_record)
expected = datapoint.checksum_exists(checksum)
self.assertEqual(result, expected)
# Test after creation
result = datapoint.idx_datapoint(pattoo_db_record)
expected = datapoint.checksum_exists(checksum)
self.assertEqual(result, expected)
def test_checksum_exists(self):
"""Testing method / function checksum_exists."""
# Initialize key variables
result = datapoint.checksum_exists(-1)
polling_interval = 1
self.assertFalse(result)
# Create a new Agent entry
agent_id = data.hashstring(str(random()))
agent_target = data.hashstring(str(random()))
agent_program = data.hashstring(str(random()))
agent.insert_row(agent_id, agent_target, agent_program)
idx_agent = agent.exists(agent_id, agent_target)
# Create entry and check
_checksum = data.hashstring(str(random()))
result = datapoint.checksum_exists(_checksum)
self.assertFalse(result)
datapoint.insert_row(
_checksum, DATA_FLOAT, polling_interval, idx_agent)
result = datapoint.checksum_exists(_checksum)
self.assertTrue(bool(result))
self.assertTrue(isinstance(result, int))
def test_insert_row(self):
"""Testing method / function insert_row."""
# Initialize key variables
result = datapoint.checksum_exists(-1)
polling_interval = 1
self.assertFalse(result)
# Create a new Agent entry
agent_id = data.hashstring(str(random()))
agent_target = data.hashstring(str(random()))
agent_program = data.hashstring(str(random()))
agent.insert_row(agent_id, agent_target, agent_program)
idx_agent = agent.exists(agent_id, agent_target)
# Create entry and check
checksum = data.hashstring(str(random()))
result = datapoint.checksum_exists(checksum)
self.assertFalse(result)
datapoint.insert_row(checksum, DATA_FLOAT, polling_interval, idx_agent)
result = datapoint.checksum_exists(checksum)
self.assertTrue(bool(result))
self.assertTrue(isinstance(result, int))
def test__counters(self):
"""Testing method / function _counters."""
# Create a counter-like dict
increment = 2
inputs = {}
for item in range(0, 20, increment):
inputs[item] = item
result = datapoint._counters(inputs, 1, 1)
self.assertEqual(len(inputs) - 1, len(result))
for item in result:
self.assertTrue(item['timestamp'] in inputs)
self.assertEqual(item['value'], increment * 1000)
def test__response(self):
"""Testing method / function _response."""
# Initialize variables
inputs = {
1: 1 * 3,
2: 2 * 3,
3: 3 * 3,
4: 4 * 3
}
result = datapoint._response(inputs)
self.assertEqual(len(inputs), len(result))
for item in result:
timestamp = item['timestamp']
self.assertEqual(item['value'], inputs[timestamp])
class TestDataPoint(unittest.TestCase):
"""Checks all functions and methods."""
def test___init__(self):
"""Testing method / function __init__."""
# Tested by other methods
pass
def test_enabled(self):
"""Testing method / function enabled."""
# Create a new row in the database and test
idx_datapoint = _idx_datapoint()
obj = DataPoint(idx_datapoint)
# Get the result
with db.db_query(20105) as session:
result = session.query(_DataPoint.enabled).filter(
_DataPoint.idx_datapoint == idx_datapoint).one()
self.assertEqual(bool(result.enabled), obj.enabled())
def test_idx_agent(self):
"""Testing method / function idx_agent."""
# Create a new row in the database and test
idx_datapoint = _idx_datapoint()
obj = DataPoint(idx_datapoint)
# Get the result
with db.db_query(20104) as session:
result = session.query(_DataPoint.idx_agent).filter(
_DataPoint.idx_datapoint == idx_datapoint).one()
self.assertEqual(result.idx_agent, obj.idx_agent())
def test_checksum(self):
"""Testing method / function checksum."""
# Create a new row in the database and test
idx_datapoint = _idx_datapoint()
obj = DataPoint(idx_datapoint)
# Get the result
with db.db_query(20103) as session:
result = session.query(_DataPoint.checksum).filter(
_DataPoint.idx_datapoint == idx_datapoint).one()
self.assertEqual(result.checksum.decode(), obj.checksum())
def test_data_type(self):
"""Testing method / function data_type."""
# Create a new row in the database and test
idx_datapoint = _idx_datapoint()
obj = DataPoint(idx_datapoint)
# Get the result
with db.db_query(20102) as session:
result = session.query(_DataPoint.data_type).filter(
_DataPoint.idx_datapoint == idx_datapoint).one()
self.assertEqual(result.data_type, obj.data_type())
def test_exists(self):
"""Testing method / function exists."""
# Create a new row in the database and test
idx_datapoint = _idx_datapoint()
obj = DataPoint(idx_datapoint)
self.assertTrue(obj.exists())
def test_last_timestamp(self):
"""Testing method / function last_timestamp."""
# Create a new row in the database and test
idx_datapoint = _idx_datapoint()
obj = DataPoint(idx_datapoint)
# Get the result
with db.db_query(20101) as session:
result = session.query(_DataPoint.last_timestamp).filter(
_DataPoint.idx_datapoint == idx_datapoint).one()
self.assertEqual(result.last_timestamp, obj.last_timestamp())
def test_polling_interval(self):
"""Testing method / function polling_interval."""
# Create a new row in the database and test
idx_datapoint = _idx_datapoint()
obj = DataPoint(idx_datapoint)
# Get the result
with db.db_query(20106) as session:
result = session.query(_DataPoint.polling_interval).filter(
_DataPoint.idx_datapoint == idx_datapoint).one()
self.assertEqual(result.polling_interval, obj.polling_interval())
def test_data(self):
"""Testing method / function data."""
# Initialize key variables
_data = []
expected = []
checksum = data.hashstring(str(random()))
pattoo_key = data.hashstring(str(random()))
agent_id = data.hashstring(str(random()))
polling_interval = 300 * 1000
data_type = DATA_FLOAT
_pattoo_value = 27
_timestamp = int(time.time() * 1000)
ts_start = _timestamp
for count in range(0, 10):
timestamp = _timestamp + (polling_interval * count)
ts_stop = timestamp
pattoo_value = _pattoo_value * count
insert = PattooDBrecord(
pattoo_checksum=checksum,
pattoo_key=pattoo_key,
pattoo_agent_id=agent_id,
pattoo_agent_polling_interval=polling_interval,
pattoo_timestamp=timestamp,
pattoo_data_type=data_type,
pattoo_value=pattoo_value * count,
pattoo_agent_polled_target='pattoo_agent_polled_target',
pattoo_agent_program='pattoo_agent_program',
pattoo_agent_hostname='pattoo_agent_hostname',
pattoo_metadata=[]
)
# Create checksum entry in the DB, then update the data table
idx_datapoint = datapoint.idx_datapoint(insert)
_data.append(IDXTimestampValue(
idx_datapoint=idx_datapoint,
polling_interval=polling_interval,
timestamp=timestamp,
value=pattoo_value))
# Append to expected results
expected.append(
{'timestamp': times.normalized_timestamp(
polling_interval, timestamp), 'value': pattoo_value}
)
# Insert rows of new data
lib_data.insert_rows(_data)
# Test
obj = DataPoint(idx_datapoint)
result = obj.data(ts_start, ts_stop)
self.assertEqual(result, expected)
def _idx_datapoint():
"""Create a new DataPoint db entry.
Args:
value: Value to convert
Returns:
result: idx_datapoint value for new DataPoint
"""
# Initialize key variables
polling_interval = 1
# Create a new Agent entry
agent_id = data.hashstring(str(random()))
agent_target = data.hashstring(str(random()))
agent_program = data.hashstring(str(random()))
agent.insert_row(agent_id, agent_target, agent_program)
idx_agent = agent.exists(agent_id, agent_target)
# Create entry and check
_checksum = data.hashstring(str(random()))
result = datapoint.checksum_exists(_checksum)
datapoint.insert_row(
_checksum, DATA_FLOAT, polling_interval, idx_agent)
result = datapoint.checksum_exists(_checksum)
return result
if __name__ == '__main__':
# Make sure the environment is OK to run unittests
UnittestConfig().create()
# Do the unit test
unittest.main()
| StarcoderdataPython |
9357 | <gh_stars>10-100
from boa3.builtin import public
from boa3.builtin.interop.contract import destroy_contract
@public
def Main():
destroy_contract()
| StarcoderdataPython |
95019 | <reponame>opus49/py3odb
"""Used to share fixtures among tests."""
import pathlib
import pytest
from .context import py3odb
from .context import main
MOCK_CURSOR_DATA = {
"SELECT * FROM <odb>": {
"description": (
('lat@hdr', 2, None, None, None, None, None),
('lon@hdr', 2, None, None, None, None, None),
('varno@body', 1, None, None, None, None, None),
('obsvalue@body', 2, None, None, None, None, None)
),
"rows": (
py3odb.row.Row(
{"lat@hdr": 23.1, "lon@hdr": 120.3, "varno@body": 1, "obsvalue": 3.2}
),
py3odb.row.Row(
{"lat@hdr": -13.2, "lon@hdr": -10.3, "varno@body": 2, "obsvalue": 7.8}
),
py3odb.row.Row(
{"lat@hdr": 3.8, "lon@hdr": 40.2, "varno@body": 3, "obsvalue": -1.2}
)
)
},
"SELECT DISTINCT varno@body FROM <odb>": {
"description": (
('varno@body', 1, None, None, None, None, None),
),
"rows": (
py3odb.row.Row({"varno@body": 1}),
py3odb.row.Row({"varno@body": 2}),
py3odb.row.Row({"varno@body": 3})
)
},
"empty": {
"description": (),
"rows": ()
}
}
class MockReader: # pylint: disable=too-few-public-methods
"""Mock Reader object."""
def __init__(self, sql_command):
self._iter_index = 0
self._rows = MOCK_CURSOR_DATA[sql_command]["rows"]
self._description = MOCK_CURSOR_DATA[sql_command]["description"]
def __enter__(self):
return self
def __exit__(self, *args):
pass
def __iter__(self):
return self
def __next__(self):
if self._iter_index >= len(self._rows):
raise StopIteration
value = self._rows[self._iter_index]
self._iter_index += 1
return value
@property
def description(self):
"""Return the mock cursor description."""
return self._description
@pytest.fixture(name="mock_reader_distinct_varno")
def mock_reader_distinct_varno_fixture(monkeypatch):
"""Fixture for mocking a Reader object that acts like a SELECT DISTINCT varno query."""
def mock_reader(*args):
"""mock reader function"""
return MockReader("SELECT DISTINCT varno@body FROM <odb>")
monkeypatch.setattr(py3odb.cli.dump, 'Reader', mock_reader)
@pytest.fixture(name="mock_reader_select_all")
def mock_reader_select_all_fixture(monkeypatch):
"""Fixture for mocking a Reader object that acts like a SELECT * FROM <odb> query."""
def mock_reader(*args):
"""mock reader function"""
return MockReader("SELECT * FROM <odb>")
monkeypatch.setattr(py3odb.cli.dump, 'Reader', mock_reader)
monkeypatch.setattr(py3odb.cli.query, 'Reader', mock_reader)
@pytest.fixture(name="mock_reader_empty")
def mock_reader_empty_fixture(monkeypatch):
"""Fixture for mocking a Reader object that returns no data."""
def mock_reader(*args):
"""mock reader function"""
return MockReader("empty")
monkeypatch.setattr(py3odb.cli.dump, 'Reader', mock_reader)
@pytest.fixture(name="mock_subparsers")
def mock_subparsers_fixture():
"""Fixture for mocking a subparsers object from argparse."""
class MockSubparsers: # pylint: disable=too-few-public-methods
"""Mock subparsers from argparse."""
def __init__(self):
self.parsers = []
def add_parser(self, *args, **kwargs):
"""Add a parser"""
self.parsers.append(MockParser(*args, **kwargs))
return self.parsers[-1]
class MockParser:
"""Mock parser from argparse."""
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
self.arguments = []
self.defaults = {}
def add_argument(self, *args, **kwargs):
"""Add an argument"""
self.arguments.append((args, kwargs))
def set_defaults(self, **kwargs):
"""Set a default"""
for keyname, value in kwargs.items():
self.defaults[keyname] = value
return MockSubparsers()
@pytest.fixture(name="usage")
def usage_fixture(capsys):
"""Get the usage output from main."""
main.usage()
return capsys.readouterr().out.splitlines()
@pytest.fixture(name="dump_command")
def dump_command_fixture(mock_subparsers):
"""Get a DumpCommand object."""
return py3odb.cli.dump.DumpCommand(mock_subparsers)
@pytest.fixture(name="query_command")
def query_command_fixture(mock_subparsers):
"""Get a QueryCommand object."""
return py3odb.cli.query.QueryCommand(mock_subparsers)
@pytest.fixture(name="geopoints_command")
def geopoints_command_fixture(mock_subparsers):
"""Get a GeopointsCommand object."""
return py3odb.cli.geopoints.GeopointsCommand(mock_subparsers)
@pytest.fixture(name="sample_odb")
def sample_odb_fixture():
"""Get the fully qualified path to the sample odb file."""
return str(pathlib.Path(__file__).parent.parent / "resources" / "sample.odb")
@pytest.fixture(name="main_program")
def main_program_fixture():
"""Get the fully qualified path to the main program file."""
return str(pathlib.Path(__file__).parent.parent / "py3odb" / "cli" / "main.py")
| StarcoderdataPython |
5093767 | <filename>run.py
from kaggle_environments import evaluate, make
from IPython.display import display_html
import time
ts = time.time()
env = make("halite", debug=True)
env.run(["submission.py", "random", "random", "random"])
print(str(time.time() - ts))
html = env.render(mode="html")
json = env.render(mode="json")
with open("latest.html", "w") as f:
f.write(html)
with open("replays/"+str(int(ts))+".json", "w") as f:
f.write(json)
| StarcoderdataPython |
9714452 | <filename>hashword-gui/hashword-long-gui.py
#!/usr/bin/python
######################################################################################
# hashword-gui written by:
# Joshua "MrSchism" Embrey [<EMAIL>]
# Joseph "Arcarna" Preston [<EMAIL>]
# Intial commit: December 2, 2013
# Current version: January 22, 2015
######################################################################################
# Declare imports
import wx # Import wx for GUI elements
import hashlib # Import hashlib for sha2 functionality.
# Declare global constant VER
global VER
VER = '20150122'
# Create Seed Dialog box
class SeedDialog(wx.Dialog):
def __init__(self, parent, id=-1, title="hashword"):
wx.Dialog.__init__(self, parent, id, title, size=(-1, 200))
self.mainSizer = wx.BoxSizer(wx.VERTICAL) # define mainSizer BoxSizer element
self.buttonSizer = wx.BoxSizer(wx.HORIZONTAL) # Define buttonSizer BoxSizer element
self.label = wx.StaticText(self, label="Hashword generator v 2.0.3 (build" + VER + ")\nPlease enter your hashword seed.\n\n*NOTE: hashword seeds are case sensitive.*") # Define label StaticText element
self.label2 = wx.StaticText(self, label="Hashword Seed:") # Define label2 StaticText element
self.field = wx.TextCtrl(self, value="", size=(300, 20)) # Define field TextCtrl element
self.okbutton = wx.Button(self, label="Generate", id=wx.ID_OK) # Define okbutton Button element
self.mainSizer.Add(self.label, 0, wx.ALL, 8 ) # Add Label to mainSizer
self.mainSizer.Add(self.label2, 0, wx.ALL, 8 ) # Add label2 to mainSizer
self.mainSizer.Add(self.field, 0, wx.ALL, 8 ) # Add field to mainSizer
self.buttonSizer.Add(self.okbutton, 0, wx.ALL, 8 ) # Add okbutton to buttonSizer
self.mainSizer.Add(self.buttonSizer, 0, wx.ALL, 0) # Add buttonSizer to mainSizer
self.Bind(wx.EVT_BUTTON, self.onOK, id=wx.ID_OK) # Bind "OK" function to ID_OK (ok button)
self.Bind(wx.EVT_TEXT_ENTER, self.onOK) # Bind OK function to enter key
self.SetSizer(self.mainSizer) # Set sizer as mainSizer
self.result = None # set result to none
# Define okay and cancel events
def onOK(self, event): # On OK...
self.result = self.field.GetValue() # ...make the result the value of the field...
self.Destroy() # Then self-destruct window
def onCancel(self, event): # on cancel (or window close)...
self.result = None # ...leave the result blank...
self.Destroy() # Then self-destruct window
# Create initial frame
class Frame(wx.Frame):
def __init__(self, parent, title):
wx.Frame.__init__(self, parent, title=title, size=(320, 80))
self.panel = wx.Panel(self) # Define panel as a panel element
self.Bind(wx.EVT_CLOSE, self.OnCloseWindow) # Bind close to close window
self.btn = wx.Button(self.panel, -1, "Press here for hashword input") # define btn as a button element
self.Bind(wx.EVT_BUTTON, self.GetSeed, self.btn) # bind opening GetSeed to btn
self.txt = wx.TextCtrl(self.panel, -1, size=(300, -1)) # define txt as a TxtCtrl element
self.txt.SetValue('') # set txt's value to blank
sizer = wx.BoxSizer(wx.VERTICAL) # define sizer as a BoxSizer element
sizer.Add(self.btn) # add btn to sizer
sizer.Add(self.txt) # add txt to sizer
self.panel.SetSizer(sizer) # set sizer for panel to sizer
self.Show() # show frame
# Define core function
def GetSeed(self, e):
dlg = SeedDialog(self) # set dlg as SeedDialog
dlg.ShowModal() # Show dialog when the OK from SeedDialog is used
self.txt.SetValue("#" + hashlib.sha256(dlg.result).hexdigest()) # set the value of txt as a hash symbol followed by the sha256 hash of the result of what was input into SeedDialog
# Define exit window
def OnCloseWindow(self, e):
self.Destroy() # Self-destruct
# Standard end-application info
app = wx.App() # Define app as an application
frame = Frame(None, 'Long Hashword Generator') # Define frame with name
app.MainLoop() # Loop the app to keep it open
| StarcoderdataPython |
1831732 | #!/usr/bin/env python
#
interactive = False
import sys, os
here = os.path.dirname(__file__)
datadir = os.path.join(here, "../data")
sys.path.insert(0, datadir)
import unittest, warnings
import numpy as np, histogram.hdf as hh, histogram as H
from multiphonon.backward import sqe2dos
from dos import loadDOS
class TestCase(unittest.TestCase):
def test1a(self):
S = hh.load(os.path.join(datadir, "V-iqe-dE_0.2.h5"))
DOS = sqe2dos.singlephonon_sqe2dos(
S, T=300, Ecutoff=55., elastic_E_cutoff=(0.,0.), M=50.94)
E = DOS.E
g = DOS.I
# plot
if interactive:
import pylab
pylab.plot(E, g)
pylab.show()
return
pass # end of TestCase
if __name__ == "__main__":
interactive = True
import pylab
unittest.main()
# End of file
| StarcoderdataPython |
1745338 | # https://www.kaggle.com/wolfram77/puzzlef-pagerank-cuda-static-vs-incremental
import os
from IPython.display import FileLink
src="pagerank-cuda-static-vs-incremental"
inp="/kaggle/input/temporal-graphs"
out="{}.txt".format(src)
!printf "" > "$out"
display(FileLink(out))
!ulimit -s unlimited && echo ""
!nvidia-smi && echo ""
# Download program
!rm -rf $src
!git clone https://github.com/puzzlef/$src
!echo ""
# Run
!nvcc -std=c++17 -Xcompiler -DNVGRAPH_DISABLE -O3 $src/main.cu
!stdbuf --output=L ./a.out $inp/email-Eu-core-temporal.txt 2>&1 | tee -a "$out"
!stdbuf --output=L ./a.out $inp/CollegeMsg.txt 2>&1 | tee -a "$out"
!stdbuf --output=L ./a.out $inp/sx-mathoverflow.txt 2>&1 | tee -a "$out"
!stdbuf --output=L ./a.out $inp/sx-askubuntu.txt 2>&1 | tee -a "$out"
!stdbuf --output=L ./a.out $inp/sx-superuser.txt 2>&1 | tee -a "$out"
!stdbuf --output=L ./a.out $inp/wiki-talk-temporal.txt 2>&1 | tee -a "$out"
!stdbuf --output=L ./a.out $inp/sx-stackoverflow.txt 2>&1 | tee -a "$out"
| StarcoderdataPython |
4940345 | import glob
import logging
import os
from contextlib import contextmanager
from cave.utils.exceptions import NotUniqueError
@contextmanager
def changedir(newdir):
olddir = os.getcwd()
os.chdir(os.path.expanduser(newdir))
try:
yield
finally:
os.chdir(olddir)
class BaseReader(object):
"""
Abstract base class to inherit reader from.
Reader load necessary objects (scenario, runhistory, trajectory) from files for different formats.
Please note that it is strongly encouraged to build a converter (from
`BaseConverter <apidoc/cave.reader.conversion.base_converter.BaseConverter>`_ ) instead of designing a new reader.
Conversion aims to make it easy and feasible to quickly support new file-formats.
"""
def __init__(self, folder, ta_exec_dir):
self.logger = logging.getLogger("cave.reader")
self.folder = folder
self.ta_exec_dir = ta_exec_dir
self.scen = None
def get_scenario(self):
"""Expects `self.folder/scenario.txt` with appropriately formatted
scenario-information (`<https://automl.github.io/SMAC3/stable/options.html#scenario>`_)"""
raise NotImplemented()
def get_runhistory(self, config_space):
"""Create RunHistory-object from files."""
raise NotImplemented()
def get_validated_runhistory(self, config_space):
"""Create validated runhistory from files, if available."""
raise NotImplemented()
def get_trajectory(self, config_space):
"""Create trajectory (list with dicts as entries)"""
raise NotImplemented()
@classmethod
def check_for_files(cls, path):
raise NotImplemented()
@classmethod
def get_glob_file(cls, folder, fn, raise_on_failure=True):
"""
If a file is not found in the expected path structure, we can check if it's unique in the subfolders and if so, return it.
"""
globbed = glob.glob(os.path.join(folder, '**', fn), recursive=True)
if len(globbed) == 1:
return globbed[0]
elif len(globbed) < 1:
if raise_on_failure:
raise FileNotFoundError("The file \"{}\" does not exist in \"{}\".".format(fn, folder))
elif len(globbed) > 1:
if raise_on_failure:
raise NotUniqueError("The file \"{}\" exists {} times in \"{}\", but not in the expected place.".format(
fn, len(globbed), folder))
return ""
| StarcoderdataPython |
12816018 | <gh_stars>0
from . import db_api
from . import misc
from .notify_admins import on_startup_notify | StarcoderdataPython |
1897412 | import os
import tempfile
import ffmpeg
import pytest
from django.conf import settings
from django.contrib.auth import get_user_model
from django.core.files import File
from django.core.files.uploadedfile import SimpleUploadedFile
from django.test import TestCase
from django.urls import reverse
from rest_framework.test import APIClient, APIRequestFactory
from converter.utils.my_converter import convert
from converter.video.models import VideoConverted, VideoRaw
User = get_user_model()
UPLOAD_URL = reverse("videos:upload")
def fileUploader(filename, test_file):
file = File(open(f"{settings.APPS_DIR}/test_video_files/{test_file}", "rb"))
uploaded_file = SimpleUploadedFile(
filename, file.read(), content_type="multipart/form-data"
)
return uploaded_file
class TestUploadVideoView(TestCase):
"""
Tests regarding the upload process through the API view
"""
def setUp(self) -> None:
"""
Setup for tests
:return: None
"""
settings.MEDIA_ROOT = tempfile.mkdtemp()
self.conv_dir = os.path.join(settings.MEDIA_ROOT, "converted_videos")
os.makedirs(self.conv_dir)
self.filename = "vid.3gp"
self.file = File(
open(f"{settings.APPS_DIR}/test_video_files/sample-3gp.3gp", "rb")
)
self.uploaded_file = SimpleUploadedFile(
self.filename, self.file.read(), content_type="multipart/form-data"
)
self.factory = APIRequestFactory()
self.client = APIClient()
self.user = User.objects.create(username="test", password="<PASSWORD>")
self.user.is_active = True
self.user.save()
self.raw_obj_general = VideoRaw.objects.create(
user=self.user, file=self.uploaded_file, req_format="avi"
)
self.raw_obj_general.save()
self.conv_obj_general = VideoConverted.objects.create(
user=self.user, file=None, raw=self.raw_obj_general
)
self.conv_obj_general.save()
@pytest.mark.conversion
def test_convert_general(self):
"""
A general test with any video file converted to any format
:return:
"""
conversion = convert(self.raw_obj_general.uuid, self.conv_obj_general.uuid)
stream = ffmpeg.probe(conversion.file.path, v="quiet")
self.assertEqual(stream["streams"][0]["codec_type"], "video")
self.assertEqual(stream["streams"][0]["codec_name"], "mpeg4")
# Series of tests that run all the possible conversions, from all 4 formats to all 4 formats (16 cases)
@pytest.mark.conversion
def test_convert_mp4_to_mp4(self):
uploaded_file = fileUploader("vid.mp4", "sample-mp4.mp4")
raw_obj_mp4 = VideoRaw.objects.create(
user=self.user, file=uploaded_file, req_format="mp4"
)
raw_obj_mp4.save()
conv_obj_mp4 = VideoConverted.objects.create(
user=self.user, file=None, raw=raw_obj_mp4
)
conv_obj_mp4.save()
conversion = convert(raw_obj_mp4.uuid, conv_obj_mp4.uuid)
stream = ffmpeg.probe(conversion.file.path, v="quiet")
self.assertEqual(stream["streams"][0]["codec_type"], "video")
self.assertEqual(stream["streams"][0]["codec_name"], "h264")
@pytest.mark.conversion
def test_convert_mp4_to_avi(self):
uploaded_file = fileUploader("vid.mp4", "sample-mp4.mp4")
raw_obj_mp4 = VideoRaw.objects.create(
user=self.user, file=uploaded_file, req_format="avi"
)
raw_obj_mp4.save()
conv_obj_mp4 = VideoConverted.objects.create(
user=self.user, file=None, raw=raw_obj_mp4
)
conv_obj_mp4.save()
conversion = convert(raw_obj_mp4.uuid, conv_obj_mp4.uuid)
stream = ffmpeg.probe(conversion.file.path, v="quiet")
self.assertEqual(stream["streams"][0]["codec_type"], "video")
self.assertEqual(stream["streams"][0]["codec_name"], "mpeg4")
@pytest.mark.conversion
def test_convert_mp4_to_mkv(self):
uploaded_file = fileUploader("vid.mp4", "sample-mp4.mp4")
raw_obj_mp4 = VideoRaw.objects.create(
user=self.user, file=uploaded_file, req_format="mkv"
)
raw_obj_mp4.save()
conv_obj_mp4 = VideoConverted.objects.create(
user=self.user, file=None, raw=raw_obj_mp4
)
conv_obj_mp4.save()
conversion = convert(raw_obj_mp4.uuid, conv_obj_mp4.uuid)
stream = ffmpeg.probe(conversion.file.path, v="quiet")
self.assertEqual(stream["streams"][0]["codec_type"], "video")
self.assertEqual(stream["streams"][0]["codec_name"], "vp8")
@pytest.mark.conversion
def test_convert_mp4_to_3gp(self):
uploaded_file = fileUploader("vid.mp4", "sample-mp4.mp4")
raw_obj_mp4 = VideoRaw.objects.create(
user=self.user, file=uploaded_file, req_format="3gp"
)
raw_obj_mp4.save()
conv_obj_mp4 = VideoConverted.objects.create(
user=self.user, file=None, raw=raw_obj_mp4
)
conv_obj_mp4.save()
conversion = convert(raw_obj_mp4.uuid, conv_obj_mp4.uuid)
stream = ffmpeg.probe(conversion.file.path, v="quiet")
self.assertEqual(stream["streams"][0]["codec_type"], "video")
self.assertEqual(stream["streams"][0]["codec_name"], "h263")
@pytest.mark.conversion
def test_convert_avi_to_mp4(self):
uploaded_file = fileUploader("vid.avi", "sample-avi.avi")
raw_obj_mp4 = VideoRaw.objects.create(
user=self.user, file=uploaded_file, req_format="mp4"
)
raw_obj_mp4.save()
conv_obj_mp4 = VideoConverted.objects.create(
user=self.user, file=None, raw=raw_obj_mp4
)
conv_obj_mp4.save()
conversion = convert(raw_obj_mp4.uuid, conv_obj_mp4.uuid)
stream = ffmpeg.probe(conversion.file.path, v="quiet")
self.assertEqual(stream["streams"][0]["codec_type"], "video")
self.assertEqual(stream["streams"][0]["codec_name"], "h264")
@pytest.mark.conversion
def test_convert_avi_to_avi(self):
uploaded_file = fileUploader("vid.avi", "sample-avi.avi")
raw_obj_mp4 = VideoRaw.objects.create(
user=self.user, file=uploaded_file, req_format="avi"
)
raw_obj_mp4.save()
conv_obj_mp4 = VideoConverted.objects.create(
user=self.user, file=None, raw=raw_obj_mp4
)
conv_obj_mp4.save()
conversion = convert(raw_obj_mp4.uuid, conv_obj_mp4.uuid)
stream = ffmpeg.probe(conversion.file.path, v="quiet")
self.assertEqual(stream["streams"][0]["codec_type"], "video")
self.assertEqual(stream["streams"][0]["codec_name"], "mpeg4")
@pytest.mark.conversion
def test_convert_avi_to_mkv(self):
uploaded_file = fileUploader("vid.avi", "sample-avi.avi")
raw_obj_mp4 = VideoRaw.objects.create(
user=self.user, file=uploaded_file, req_format="mkv"
)
raw_obj_mp4.save()
conv_obj_mp4 = VideoConverted.objects.create(
user=self.user, file=None, raw=raw_obj_mp4
)
conv_obj_mp4.save()
conversion = convert(raw_obj_mp4.uuid, conv_obj_mp4.uuid)
stream = ffmpeg.probe(conversion.file.path, v="quiet")
self.assertEqual(stream["streams"][0]["codec_type"], "video")
self.assertEqual(stream["streams"][0]["codec_name"], "vp8")
@pytest.mark.conversion
def test_convert_avi_to_3gp(self):
uploaded_file = fileUploader("vid.avi", "sample-avi.avi")
raw_obj_mp4 = VideoRaw.objects.create(
user=self.user, file=uploaded_file, req_format="3gp"
)
raw_obj_mp4.save()
conv_obj_mp4 = VideoConverted.objects.create(
user=self.user, file=None, raw=raw_obj_mp4
)
conv_obj_mp4.save()
conversion = convert(raw_obj_mp4.uuid, conv_obj_mp4.uuid)
stream = ffmpeg.probe(conversion.file.path, v="quiet")
self.assertEqual(stream["streams"][0]["codec_type"], "video")
self.assertEqual(stream["streams"][0]["codec_name"], "h263")
@pytest.mark.conversion
def test_convert_mkv_to_mp4(self):
uploaded_file = fileUploader("vid.mkv", "sample-mkv.mkv")
raw_obj_mp4 = VideoRaw.objects.create(
user=self.user, file=uploaded_file, req_format="mp4"
)
raw_obj_mp4.save()
conv_obj_mp4 = VideoConverted.objects.create(
user=self.user, file=None, raw=raw_obj_mp4
)
conv_obj_mp4.save()
conversion = convert(raw_obj_mp4.uuid, conv_obj_mp4.uuid)
stream = ffmpeg.probe(conversion.file.path, v="quiet")
self.assertEqual(stream["streams"][0]["codec_type"], "video")
self.assertEqual(stream["streams"][0]["codec_name"], "h264")
@pytest.mark.conversion
def test_convert_mkv_to_avi(self):
uploaded_file = fileUploader("vid.mkv", "sample-mkv.mkv")
raw_obj_mp4 = VideoRaw.objects.create(
user=self.user, file=uploaded_file, req_format="avi"
)
raw_obj_mp4.save()
conv_obj_mp4 = VideoConverted.objects.create(
user=self.user, file=None, raw=raw_obj_mp4
)
conv_obj_mp4.save()
conversion = convert(raw_obj_mp4.uuid, conv_obj_mp4.uuid)
stream = ffmpeg.probe(conversion.file.path, v="quiet")
self.assertEqual(stream["streams"][0]["codec_type"], "video")
self.assertEqual(stream["streams"][0]["codec_name"], "mpeg4")
@pytest.mark.conversion
def test_convert_mkv_to_mkv(self):
uploaded_file = fileUploader("vid.mkv", "sample-mkv.mkv")
raw_obj_mp4 = VideoRaw.objects.create(
user=self.user, file=uploaded_file, req_format="mkv"
)
raw_obj_mp4.save()
conv_obj_mp4 = VideoConverted.objects.create(
user=self.user, file=None, raw=raw_obj_mp4
)
conv_obj_mp4.save()
conversion = convert(raw_obj_mp4.uuid, conv_obj_mp4.uuid)
stream = ffmpeg.probe(conversion.file.path, v="quiet")
self.assertEqual(stream["streams"][0]["codec_type"], "video")
self.assertEqual(stream["streams"][0]["codec_name"], "vp8")
@pytest.mark.conversion
def test_convert_mkv_to_3gp(self):
uploaded_file = fileUploader("vid.mkv", "sample-mkv.mkv")
raw_obj_mp4 = VideoRaw.objects.create(
user=self.user, file=uploaded_file, req_format="3gp"
)
raw_obj_mp4.save()
conv_obj_mp4 = VideoConverted.objects.create(
user=self.user, file=None, raw=raw_obj_mp4
)
conv_obj_mp4.save()
conversion = convert(raw_obj_mp4.uuid, conv_obj_mp4.uuid)
stream = ffmpeg.probe(conversion.file.path, v="quiet")
self.assertEqual(stream["streams"][0]["codec_type"], "video")
self.assertEqual(stream["streams"][0]["codec_name"], "h263")
@pytest.mark.conversion
def test_convert_3gp_to_mp4(self):
uploaded_file = fileUploader("vid.3gp", "sample-3gp.3gp")
raw_obj_mp4 = VideoRaw.objects.create(
user=self.user, file=uploaded_file, req_format="mp4"
)
raw_obj_mp4.save()
conv_obj_mp4 = VideoConverted.objects.create(
user=self.user, file=None, raw=raw_obj_mp4
)
conv_obj_mp4.save()
conversion = convert(raw_obj_mp4.uuid, conv_obj_mp4.uuid)
stream = ffmpeg.probe(conversion.file.path, v="quiet")
self.assertEqual(stream["streams"][0]["codec_type"], "video")
self.assertEqual(stream["streams"][0]["codec_name"], "h264")
@pytest.mark.conversion
def test_convert_3gp_to_avi(self):
uploaded_file = fileUploader("vid.3gp", "sample-3gp.3gp")
raw_obj_mp4 = VideoRaw.objects.create(
user=self.user, file=uploaded_file, req_format="avi"
)
raw_obj_mp4.save()
conv_obj_mp4 = VideoConverted.objects.create(
user=self.user, file=None, raw=raw_obj_mp4
)
conv_obj_mp4.save()
conversion = convert(raw_obj_mp4.uuid, conv_obj_mp4.uuid)
stream = ffmpeg.probe(conversion.file.path, v="quiet")
self.assertEqual(stream["streams"][0]["codec_type"], "video")
self.assertEqual(stream["streams"][0]["codec_name"], "mpeg4")
@pytest.mark.conversion
def test_convert_3gp_to_mkv(self):
uploaded_file = fileUploader("vid.3gp", "sample-3gp.3gp")
raw_obj_mp4 = VideoRaw.objects.create(
user=self.user, file=uploaded_file, req_format="mkv"
)
raw_obj_mp4.save()
conv_obj_mp4 = VideoConverted.objects.create(
user=self.user, file=None, raw=raw_obj_mp4
)
conv_obj_mp4.save()
conversion = convert(raw_obj_mp4.uuid, conv_obj_mp4.uuid)
stream = ffmpeg.probe(conversion.file.path, v="quiet")
self.assertEqual(stream["streams"][0]["codec_type"], "video")
self.assertEqual(stream["streams"][0]["codec_name"], "vp8")
@pytest.mark.conversion
def test_convert_3gp_to_3gp(self):
uploaded_file = fileUploader("vid.3gp", "sample-3gp.3gp")
raw_obj_mp4 = VideoRaw.objects.create(
user=self.user, file=uploaded_file, req_format="3gp"
)
raw_obj_mp4.save()
conv_obj_mp4 = VideoConverted.objects.create(
user=self.user, file=None, raw=raw_obj_mp4
)
conv_obj_mp4.save()
conversion = convert(raw_obj_mp4.uuid, conv_obj_mp4.uuid)
stream = ffmpeg.probe(conversion.file.path, v="quiet")
self.assertEqual(stream["streams"][0]["codec_type"], "video")
self.assertEqual(stream["streams"][0]["codec_name"], "h263")
| StarcoderdataPython |
8164325 | <reponame>ComputerScienceHouse/Jumpstart-Next<filename>components/calendar/__init__.py
from fastapi import APIRouter, Response
from starlette.status import *
import os, json
from pymongo import MongoClient
import logging
import time
import requests
import threading
from component_util import post_update
import datetime
conf = json.loads(os.environ["CONFIG"])
mongo_client = MongoClient(
host=conf["mongodb"]["host"],
port=conf["mongodb"]["port"],
username=conf["mongodb"]["username"],
password=conf["mongodb"]["password"],
tls=conf["mongodb"]["tls"],
)
db = mongo_client[conf["mongodb"]["database"]]
class CalendarWrapper:
def __init__(self, update=30, records=25) -> None:
self.update = update
self.records = records
def fetchLoop(self):
while True:
try:
data = requests.get(
"https://calendar.csh.rit.edu/events/upcoming",
params={"number": self.records},
).json()
for d in data:
d["startsIn"] = {
"days": (
datetime.datetime.fromtimestamp(d["start"]["timestamp"])
- datetime.datetime.fromtimestamp(time.time())
).days,
"hours": (
datetime.datetime.fromtimestamp(d["start"]["timestamp"])
- datetime.datetime.fromtimestamp(time.time())
).seconds
// 3600,
"minutes": (
datetime.datetime.fromtimestamp(d["start"]["timestamp"])
- datetime.datetime.fromtimestamp(time.time())
).seconds
// 60,
}
data = {"events": data, "record": "events"}
db.calendar.replace_one({"record": "events"}, data, upsert=True)
post_update("calendar.events")
except:
logging.warning("Failed to fetch event data.")
time.sleep(self.update)
def start(self):
self.thread = threading.Thread(
name="thread_jumpstart-next_component_calendar_events",
target=self.fetchLoop,
daemon=True,
)
self.thread.start()
logging.info("Started calendar.events thread")
CalendarComponentRouter = APIRouter(
prefix="/api/components/calendar", tags=["component", "calendar"]
)
@CalendarComponentRouter.get("/events")
async def get_events(r: Response):
event_data = db.calendar.find_one(filter={"record": "events"})
if event_data:
return {"result": "success", "data": event_data["events"]}
else:
return {"result": "success", "data": []}
| StarcoderdataPython |
5064933 | <filename>rl_helper/fps.py
import time
import numpy as np
from gym.vector.async_vector_env import AsyncVectorEnv
from gym.vector.sync_vector_env import SyncVectorEnv
from gym.vector.vector_env import VectorEnv, VectorEnvWrapper
def fps(env,steps_per_test=100,episode=10):
result_multiply=env.num_envs if isinstance(env,AsyncVectorEnv) else 1
for e in range(episode):
count=0
start_t=time.time()
for i in range(steps_per_test):
a = env.action_space.sample() # Sample an action
obs, reward, done, info = env.step(a) # Step the environoment with the sampled random action
if isinstance(done,int):
if done:
obs = env.reset()
elif isinstance(done,np.ndarray):
if done.any():
obs = env.reset() # Reset environment
env.reset()
dt=time.time()-start_t
print("fps = {}".format(steps_per_test/dt*result_multiply)) | StarcoderdataPython |
6565792 | <gh_stars>0
from cab.abm.agent import CabAgent
__author__ = '<NAME>'
__version__ = '1.0'
class UrbanAgent(CabAgent):
def __init__(self, x, y, gc):
super().__init__(x, y, gc)
self.id = "UrbanAgent"
def perceive_and_act(self, ca, abm):
pass
| StarcoderdataPython |
6642203 | import logging
import requests
import json
import os
from access.search.solrclient import SolrClient
from access.search.solrquery import SolrQuery
from access.search.solrserver import SolrServer
from config.configuration import verify_certificate, solr_protocol
logger = logging.getLogger(__name__)
def index_repository():
from config.configuration import solr_host
from config.configuration import solr_port
from config.configuration import config_path_storage
solr_server = SolrServer(solr_protocol, solr_host, solr_port)
logger.info("Solr server base url: %s" % solr_server.get_base_url())
sq = SolrQuery(solr_server)
r = requests.get(sq.get_base_url(), verify=verify_certificate)
if not r.status_code == 200:
logger.error("Solr server is not available at: %s" % sq.get_base_url())
return
else:
logger.info("Using Solr server at: %s" % sq.get_base_url())
# delete index first
url_part = "storagecore1/update?stream.body=%3Cdelete%3E%3Cquery%3E*%3C/query%3E%3C/delete%3E&commit=true"
requests.get(sq.get_base_url() + url_part, verify=verify_certificate)
package_count = 0
solr_client = SolrClient(solr_server, "storagecore1")
for dirpath, _, filenames in os.walk(config_path_storage):
for f in filenames:
package_abs_path = os.path.abspath(os.path.join(dirpath, f))
if package_abs_path.endswith(".tar"):
logger.info("=========================================================")
logger.info(package_abs_path)
logger.info("=========================================================")
_, file_name = os.path.split(package_abs_path)
identifier = file_name[0:-4]
results = solr_client.post_tar_file(package_abs_path, identifier, version=1)
logger.info("Total number of files posted: %d" % len(results))
num_ok = sum(1 for result in results if result['status'] == 200)
logger.info("Number of files posted successfully: %d" % num_ok)
num_failed = sum(1 for result in results if result['status'] != 200)
logger.info("Number of plain documents: %d" % num_failed)
package_count += 1
logger.info("Indexing of %d packages available in local storage finished" % package_count)
class SolrUtility(object):
"""
SolrUtility offers functions to access a Solr instance.
"""
def __init__(self):
"""
Initialise class.
@return:
"""
self.solr_unique_key = None
self.solr_instance = None
def availability(self, solr_base_url, solr_unique_key):
"""
Call this method with a Solr url and the unique key as arguments. If the Solr instance is reachable, they are
set as class variables and 200 is returned. If not, variables remain 'None' and the status code is returned.
@param solr_base_url: base URL of solr instance to be used: http://<ip>:<port>/solr/<core>/
@param solr_unique_key: key that is used as unique identifier within the Solr index (e.g. 'id', 'lily.key')
@return:
"""
# add 'admin/ping' to URL to check if Solr is reachable
url = os.path.join(solr_base_url, 'admin/ping/')
solr_status = requests.get(url, verify=verify_certificate).status_code
if solr_status == 200:
self.solr_unique_key = solr_unique_key
self.solr_instance = solr_base_url
print('Solr instance %s reachable.' % solr_base_url)
return solr_status
else:
print('GET request to %s/admin/ping returned HTML status: %d.' % (solr_base_url, solr_status))
return solr_status
def send_query(self, query_string):
"""
Send query to Solr, return result.
@return:
"""
url_suffix = 'select?wt=json&q=' # q=*:*
query_url = os.path.join(self.solr_instance, url_suffix)
query = requests.get(query_url + query_string, verify=verify_certificate)
if query.status_code == 200:
return query.json()['response']['docs']
else:
# return query.status_code
return False
def set_field(self, record_identifier, field, content):
"""
Update a field with new content. Substitutes previous content.
@param record_identifier:
@param field:
@param content:
@return:
"""
url_suffix = 'update'
update_url = os.path.join(self.solr_instance, url_suffix)
update_headers = {'Content-Type': 'application/json'}
update_data = json.dumps([{field: {'set': content}, self.solr_unique_key: record_identifier}])
update = requests.post(update_url, data=update_data, headers=update_headers, verify=verify_certificate)
print(update.text)
return update.status_code
def set_multiple_fields(self, record_identifier, kv_tuple_list):
"""
Update a solr document given a list of fieldname/value pairs
example: set_multiple_fields("bd74c030-3161-4962-9f98-47f6d00c89cc",
[("field1_s", "value1"), ("field2_b", "true")])
@param record_identifier: record identifier (solr identifier)
@param kv_tuple_list: list of fieldname/value pairs
@return: status code
"""
url_suffix = 'update'
update_url = os.path.join(self.solr_instance, url_suffix)
update_headers = {'Content-Type': 'application/json'}
update_data = dict()
update_data[self.solr_unique_key] = record_identifier
for kv_tuple in kv_tuple_list:
update_data[kv_tuple[0]] = {'set': kv_tuple[1]}
update_doc = json.dumps([update_data])
update = requests.post(update_url, data=update_doc, headers=update_headers, verify=verify_certificate)
print(update.text)
return update.status_code
def update_document(self, record_identifier, kv_pairs):
"""
Update a solr document given a list of fieldname/value pairs
example: update_document("bd74c030-3161-4962-9f98-47f6d00c89cc",
[{"field_value": "value1", "field_name": "field1_s"}, {"field_value": "value2", "field_name": "field2_s"}])
@param record_identifier: record identifier (solr identifier)
@param kv_pairs: list of fieldname/value pairs
@return: status code
"""
url_suffix = 'update'
update_url = os.path.join(self.solr_instance, url_suffix)
update_headers = {'Content-Type': 'application/json'}
update_data = dict()
update_data[self.solr_unique_key] = record_identifier
for kv_pair in kv_pairs:
update_data[kv_pair['field_name']] = {'set': kv_pair['field_value']}
update_doc = json.dumps([update_data])
update = requests.post(update_url, data=update_doc, headers=update_headers, verify=verify_certificate)
print(update_url)
print(json.dumps([update_data], indent=4))
return update.status_code
def get_doc_id_from_path(self, safe_urn_identifier, entry_path):
"""
Get identifier from solr document
@param safe_urn_identifier: safe identifier, e.g. urn:uuid:0426f626-d51d-449c-84fa-d5c32d728509
@param entry_path: entry path, e.g. /submission/representations/rep1/data/Example1.docx
@return: document identifier, e.g. "d66c0d7b-0b9d-4a4f-a1d5-7d829f109018"
"""
solr_query = 'path:"%s%s"' % (safe_urn_identifier, entry_path)
query_result = self.send_query(solr_query)
if query_result is False:
raise RuntimeError("No query result")
try:
identifier = query_result[0][self.solr_unique_key]
except KeyError as e:
raise RuntimeError("Unable to get document identifier: %s" % e)
# if "lily.key" in query_result[0]:
# identifier = query_result[0]['lily.key']
# else:
# identifier = query_result[0]['id']
# if not identifier:
# raise RuntimeError("Unable to get document identifier")
return identifier
# def add_to_field(self, record_identifier, field, content):
# """
# Add a new entry to a field. Existing field needs to be a multi-valued string field (a list).
# @param record_identifier:
# @param field:
# @param content:
# @return:
# """
# # field needs to be multi-valued string (adding to a list)
# url_suffix = 'update'
# add_url = self.solr_instance + url_suffix
# update_headers = {'Content-Type': 'application/json'}
# update_data = json.dumps([{field: {'add': content}, self.solr_unique_key: record_identifier}])
# update = requests.post(add_url, data=update_data, headers=update_headers, verify=verify_certificate)
# return update.status_code
# def remove_from_field(self):
# # remove from a field (which needs to be a list)
# pass
| StarcoderdataPython |
5019080 | """
Choose test runner class from --runner command line option
and execute test cases.
"""
import unittest
import optparse
import sys
def get_runner():
parser = optparse.OptionParser()
parser.add_option('--runner', default='unittest.TextTestRunner',
help='name of test runner class to use')
opts, args = parser.parse_args()
fromsplit = opts.runner.rsplit('.', 1)
if len(fromsplit) < 2:
raise ValueError('Can\'t use module as a runner')
else:
runnermod = __import__(fromsplit[0])
return getattr(runnermod, fromsplit[1])
def run(suite=None):
runner = get_runner()
if suite:
if not runner().run(suite).wasSuccessful():
sys.exit(1)
else:
unittest.main(argv=sys.argv[:1], testRunner=runner)
if __name__ == '__main__':
run()
| StarcoderdataPython |
4889549 | #!/usr/bin/env python3
# stdlib
import sys
import datetime
from collections import defaultdict
from tabulate import tabulate
from tqdm import tqdm
# project
import tool.discovery as discovery
from tool.config import CONFIG
from tool.model import Result, Submission
from tool.utils import BColor
class DifferentAnswersException(Exception):
pass
def run(days, parts, authors, ignored_authors, languages, force, silent, all_days_parts, restricted, expand):
problems = discovery.get_problems(days, parts, all_days_parts)
printed_day_header = set()
errors = []
for problem in problems:
if problem.day not in printed_day_header:
printed_day_header.add(problem.day)
print_day_header(problem)
print_part_header(problem)
submissions = discovery.get_submissions(problem, authors, ignored_authors, languages, force)
inputs = discovery.get_inputs(problem)
results_by_author = defaultdict(list)
results_by_input = defaultdict(list)
pbar = tqdm(total=len(inputs)*len(submissions) if not restricted else len(submissions))
for input in inputs:
previous = None
for submission in submissions:
pbar.update(1)
# The split allows having author.lang and author.x.lang files, on the same input
if restricted and input.author != submission.author.split('.')[0]:
continue
try:
result = run_submission(problem, submission, input, previous)
results_by_author[submission.author].append(result)
results_by_input[input.author].append(result)
previous = result
except DifferentAnswersException as e:
errors.append(
"{}ERROR: {}{}".format(BColor.RED, e, BColor.ENDC))
for submission in submissions:
submission.runnable.cleanup()
pbar.close()
if restricted:
print_restrict_results(problem, results_by_author)
elif expand:
print_expanded_results(problem, results_by_input)
else:
print_aggregated_results(problem, results_by_author)
for err in errors:
print(err, file=sys.stderr)
if errors:
exit(1)
def run_submission(problem, submission, input, previous=None):
start = datetime.datetime.now()
answer = str(submission.runnable.run(input.content))
end = datetime.datetime.now()
msecs = (end - start).total_seconds() * 1000
answer, msecs = duration_from_answer(answer, msecs)
if previous is not None and answer != previous.answer:
raise DifferentAnswersException("""different answers day:{} part:{}
input: {}
{}: {}
{}:ย {}""".format(
problem.day,
problem.part,
input.path(),
previous.submission.path(),
previous.answer,
submission.path(),
answer))
return Result(problem, submission, input, answer, msecs)
def print_results(results):
results.sort(key=lambda x: x.duration)
print(tabulate([[
" {color}{author}{end} ".format(
color=(BColor.BOLD if result.submission.author == CONFIG.user else BColor.GREEN),
author=result.submission.author,
end=BColor.ENDC),
" {color}{answer}{end} ".format(
color=(BColor.BOLD if result.submission.author == CONFIG.user else BColor.BLUE),
answer=result.answer,
end=BColor.ENDC),
" {color}{msecs:8.2f} ms{end}".format(
color=BColor.BOLD,
msecs=result.duration,
end=BColor.ENDC),
" {color}{language}{end}".format(
color=(BColor.BOLD if result.submission.author == CONFIG.user else ""),
language=result.submission.language,
end=BColor.ENDC,
)
] for result in results]))
def print_expanded_results(problem, results_by_input):
for input_author, submission_results in results_by_input.items():
print("---------------------------------------------------")
print("On input from {yellow}{author}{end}".format(
yellow=BColor.YELLOW,
end=BColor.ENDC,
author=input_author))
print("---------------------------------------------------")
results = []
for result in submission_results:
results.append(result)
print_results(results)
def print_restrict_results(problem, results_by_author):
print("---------------------------------------------------")
print("On own inputs")
print("---------------------------------------------------")
results = []
for author, results_by_input in results_by_author.items():
for result in results_by_input:
results.append(result)
print_results(results)
def print_aggregated_results(problem, results_by_author):
print("---------------------------------------------------")
print("Avg over all inputs")
print("---------------------------------------------------")
results = []
# Loop for all authors, get all the results they produced
for author, results_by_input in results_by_author.items():
res_by_language = {}
count_by_language = defaultdict(int)
# The results can be made by different languages. Make a virtual result (storing total duration) by language
for result in results_by_input:
result_language = result.submission.language
count_by_language[result_language] += 1
# New language: make the virtual result
if result_language not in res_by_language:
res = Result(problem, Submission(problem, author, result_language, init_runnable=False), None, "-", 0)
res_by_language[result_language] = res
# The author is on his own input, get his answer (split to allow author.x.lang on input author.txt)
if author.split('.')[0] == result.input.author:
res_by_language[result_language].answer = result.answer
res_by_language[result_language].input = result.input
res_by_language[result_language].submission = result.submission
# Add up the duration of this result
res_by_language[result_language].duration += result.duration
# For each language of the author, make the average and store the final result
for lang, res in res_by_language.items():
if count_by_language[lang] > 0:
res.duration /= count_by_language[lang]
results.append(res)
print_results(results)
def print_day_header(problem):
print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
print(BColor.RED + BColor.BOLD + "Running submissions for day %02d:" % problem.day + BColor.ENDC)
def print_part_header(problem):
print("\n" + BColor.MAGENTA + BColor.BOLD + "* part %d:" % problem.part + BColor.ENDC)
def duration_from_answer(answer, msec):
DURATION_HEADER_PREFIX = "_duration:"
split = answer.split("\n")
if len(split) < 2 or (not split[0].startswith(DURATION_HEADER_PREFIX)):
return answer, msec
try:
return "\n".join(split[1:]), float(split[0][len(DURATION_HEADER_PREFIX):])
except ValueError:
pass
return answer, msec
| StarcoderdataPython |
1725209 | import psycopg2
import os
import requests
import json
import time
import arrow
import logging
from typing import List, Tuple, Dict, Any
from collections import defaultdict
from apscheduler.schedulers.blocking import BlockingScheduler
from telegram_bot import Bot
logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',
level=logging.INFO)
REVERB_LISTING_API_URL = 'https://api.reverb.com/api/listings'
REVERB_API_CODE = os.getenv('REVERB_API_CODE')
PG_USER = os.getenv('PG_USER')
PG_PASS = os.getenv('PG_PASS')
PG_PORT = os.getenv('PG_PORT')
PG_HOST = os.getenv('PG_HOST')
PG_DB = os.getenv('PG_DB')
if REVERB_API_CODE is None:
logging.error(
'Cannot run batch import listing: no REVERB_API_KEY env found')
exit(1)
req_headers = {
'Accept-Version': '3.0',
'Authorization': 'Bearer ' + str(REVERB_API_CODE),
'Content-Type': 'application/hal+json'
}
insert_query = """
INSERT INTO reverb_guitar_data.listing (id, make, model, year, condition, price_cents, currency, offers_enabled, thumbnail, full_json) VALUES %s
"""
conn = psycopg2.connect(
f'dbname={PG_DB} user={PG_USER} password={<PASSWORD>} port={PG_PORT} host={PG_HOST}'
)
conn.autocommit = True
cur = conn.cursor()
bot = Bot(str(os.getenv('TELEGRAM_TOKEN')))
sched = BlockingScheduler()
class Listing():
def __init__(self, values):
self.full_json: Dict = values
self.id: int = int(values['id'])
self.make: str = values['make']
self.model: str = values['model']
self.year: str = values['year']
self.condition: str = values['condition']['display_name']
self.price_cents: int = values['buyer_price']['amount_cents']
self.currency: str = values['buyer_price']['currency']
self.offers_enabled: bool = values['offers_enabled']
self.created_at: arrow.Arrow = arrow.get(values['created_at'])
self.thumbnail: str = values['photos'][0]['_links']['thumbnail'][
'href']
def get_listings_with_query(query: Dict[str, Any]
) -> Tuple[List[Listing], str]:
logging.info('Getting listings with params: %s', query)
response = requests.get(REVERB_LISTING_API_URL,
params=query,
headers=req_headers)
resp_json = response.json()
return list(
map(lambda x: Listing(x),
resp_json['listings'])), resp_json['_links']['next']['href']
def get_listings_for_url(url) -> Tuple[List[Listing], str]:
response = requests.get(url, headers=req_headers)
resp_json = response.json()
if 'Used Electric Guitars' not in resp_json['humanized_params']:
logging.warn('`humanized_params` looks wonky - %s',
resp_json['humanized_params'])
return list(
map(lambda x: Listing(x),
resp_json['listings'])), resp_json['_links']['next']['href']
def insert_listings(cur, listings: List[Listing]
) -> Tuple[List[Listing], List[Listing]]:
duplicates = []
inserted = []
for listing in listings:
try:
cur.execute(
'INSERT INTO listing (id, make, model, year, condition, price_cents, currency, offers_enabled, created_at, thumbnail, full_json) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s);',
(listing.id, listing.make, listing.model, listing.year,
listing.condition, listing.price_cents, listing.currency,
listing.offers_enabled, listing.created_at.datetime,
listing.thumbnail, json.dumps(listing.full_json)))
inserted.append(listing)
except psycopg2.IntegrityError as e: # 23505 is constraint unique
if e.pgcode == '23505':
duplicates.append(listing)
continue
else:
raise e
return inserted, duplicates
def process_bot_notifications(bot: Bot, listings: List[Listing]) -> None:
bot_terms = bot.get_terms()
all_results: Dict[str, List[Listing]] = defaultdict(list)
for listing in listings:
for term in bot_terms:
lower_term = term.lower()
if lower_term in listing.full_json['title'].lower(
) or lower_term in listing.model.lower(
) or lower_term in listing.make.lower(
) or lower_term in listing.full_json['description'].lower():
all_results[term].append(listing)
for term, listings in all_results.items():
strings = list(
map(
lambda lst:
f"[{lst.full_json['title']}]({lst.full_json['_links']['web']['href']})",
listings))
bot.send_update(term, strings)
@sched.scheduled_job('cron', minute='*/7')
def update_listings():
logging.info('Running batch reverb update')
query = {
'page': 1,
'per_page': 50,
'product_type': 'electric-guitars',
'condition': 'used',
# 'price_max': 401,
}
listings, next_url = get_listings_with_query(query)
logging.info('Processing first page')
inserted, duplicates = insert_listings(cur, listings)
process_bot_notifications(bot, inserted)
if len(duplicates) > 0:
logging.info('Found duplicates: %d', len(duplicates))
if len(inserted) == 0:
logging.info('Exiting: Found full page of duplicates')
return
while next_url is not None:
time.sleep(10)
logging.info('Processing url %s', next_url)
listings, next_url = get_listings_for_url(next_url)
inserted, duplicates = insert_listings(cur, listings)
process_bot_notifications(bot, inserted)
if len(duplicates) > 0:
logging.info('Found duplicates: %d', len(duplicates))
if len(inserted) == 0:
logging.info('Exiting: Found full page of duplicates')
return
if __name__ == '__main__':
try:
sched.start()
except KeyboardInterrupt:
logging.info('Got SIGTERM! Terminating...')
| StarcoderdataPython |
6681475 | # appsite/__init__
from appsite.application import app | StarcoderdataPython |
3357448 | <reponame>EuroPOND/deformetrica
class AbstractStatisticalModel:
"""
AbstractStatisticalModel object class.
A statistical model is a generative function, which tries to explain an observed stochastic process.
"""
####################################################################################################################
### Constructor:
####################################################################################################################
def __init__(self):
self.name = 'undefined'
self.fixed_effects = {}
self.priors = {}
self.population_random_effects = {}
self.individual_random_effects = {}
self.has_maximization_procedure = None
####################################################################################################################
### Common methods, not necessarily useful for every model.
####################################################################################################################
def clear_memory(self):
pass
| StarcoderdataPython |
3519920 | <gh_stars>0
# Generated by Django 3.0.2 on 2020-02-07 12:50
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Question', '0009_merge_20200206_2300'),
]
operations = [
migrations.AddField(
model_name='choice',
name='count',
field=models.IntegerField(default=0),
),
]
| StarcoderdataPython |
91599 | <filename>tools/build/get_header_directory.py
#!/usr/bin/env python3
import pathlib
import sys
import os
def main():
argv = sys.argv
if len(argv) != 3:
raise RuntimeError(
f"Usage: {argv[0]} <header install directory> <absolute path of current header directory>"
)
anchor = 'source' # should this name ever change, this code will be broken
# all source code with headers must be below `anchor`
install_base = os.path.join(
*[x for x in pathlib.PurePath(argv[1].strip()).parts])
path = pathlib.PurePath(argv[2].strip())
subpath = []
append = False
for part in path.parts:
if part == anchor:
append = True
continue
if append:
subpath.append(part)
print(os.path.join(install_base, *subpath)) # stripped directory name
if __name__ == "__main__":
main()
| StarcoderdataPython |
3352436 | <gh_stars>1-10
from maya import mel
from ..abstract import afnfbx
import logging
logging.basicConfig()
log = logging.getLogger(__name__)
log.setLevel(logging.INFO)
class FnFbx(afnfbx.AFnFbx):
"""
Overload of AFnFbx that defines function set behaviours for fbx in Maya.
"""
__slots__ = ()
def setMeshExportParams(self, **kwargs):
"""
Adopts the export settings from the supplied kwargs.
:rtype: None
"""
commands = [
'FBXExportAnimationOnly -v false;',
'FBXExportBakeComplexAnimation -v false;',
'FBXExportAxisConversionMethod "none";',
'FBXExportCacheFile -v false;',
'FBXExportCameras -v true;',
'FBXExportConstraints -v false;',
'FBXExportConvertUnitString "cm";',
'FBXExportDxfTriangulate -v true;',
'FBXExportDxfDeformation -v true;',
'FBXExportEmbeddedTextures -v false;',
'FBXExportFileVersion "FBX201600";',
'FBXExportGenerateLog -v true;',
'FBXExportHardEdges -v true;',
'FBXExportInAscii -v false;',
'FBXExportInputConnections -v false;',
'FBXExportInstances -v false;',
'FBXExportQuickSelectSetAsCache -v false;',
'FBXExportReferencedAssetsContent -v false;',
'FBXExportScaleFactor {scale};'.format(scale=kwargs['scale']),
'FBXExportShapes -v {includeBlendshapes};'.format(includeBlendshapes=kwargs['includeBlendshapes']),
'FBXExportSkeletonDefinitions -v false;',
'FBXExportSkins -v {includeSkins};'.format(includeSkins=kwargs['includeSkins']),
'FBXExportSmoothingGroups -v {includeSmoothings};'.format(includeSmoothings=kwargs['includeSmoothings']),
'FBXExportSmoothMesh -v false;',
'FBXExportSplitAnimationIntoTakes -v false;',
'FBXExportTangents -v {includeTangentsAndBinormals};'.format(includeTangentsAndBinormals=kwargs['includeTangentsAndBinormals']),
'FBXExportTriangulate -v true;',
'FBXExportUpAxis "z";',
'FBXExportUseSceneName -v false;'
]
mel.eval('\n'.join(commands))
def setAnimExportParams(self, **kwargs):
"""
Adopts the animation settings from the supplied kwargs.
:rtype: None
"""
commands = [
'FBXExportAnimationOnly -v true;',
'FBXExportBakeComplexAnimation -v true;',
'FBXExportBakeComplexStart {startFrame};'.format(startFrame=kwargs['startFrame']),
'FBXExportBakeComplexEnd {endFrame};'.format(endFrame=kwargs['endFrame']),
'FBXExportBakeComplexStep {step};'.format(step=kwargs['step']),
'FBXExportBakeResampleAnimation -v true;',
'FBXExportApplyConstantKeyReducer -v false;',
'FBXExportQuaternion -v false;',
]
mel.eval('\n'.join(commands))
def exportSelection(self, filePath):
"""
Exports the active selection to the specified file path.
:type filePath: str
:rtype: None
"""
mel.eval(f'FBXExport -f {filePath} -s;')
| StarcoderdataPython |
3476052 | <reponame>sturmianseq/fparser
# Copyright (c) 2018-2019 Science and Technology Facilities Council
# All rights reserved.
# Modifications made as part of the fparser project are distributed
# under the following license:
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''Test Fortran 2003 rule R201 : This file tests the support for zero
or more program-units.
'''
import pytest
from fparser.two.utils import FortranSyntaxError
from fparser.api import get_reader
from fparser.two.Fortran2003 import Program
# Test no content or just white space. This is not officially a
# Fortran rule but fortran compilers tend to accept empty content so
# we follow their lead.
def test_empty_input(f2003_create):
'''Test that empty input or input only containing white space can be
parsed succesfully
'''
for code in ["", " ", " \n \n\n"]:
reader = get_reader(code)
ast = Program(reader)
assert str(ast) == ""
# Test single program units
@pytest.mark.usefixtures("f2003_create")
def test_single():
'''Test that a single program_unit can be parsed successfully.'''
reader = get_reader('''\
subroutine test()
end subroutine
''')
ast = Program(reader)
assert "SUBROUTINE test\n" \
"END SUBROUTINE" in str(ast)
# Check that the Name of the subroutine has the correct parent
assert ast.content[0].content[0].items[1].parent is \
ast.content[0].content[0]
@pytest.mark.usefixtures("f2003_create")
def test_single_with_end_name():
'''Test that a single program_unit can be parsed successfully when it
has a name specified on the end clause.'''
reader = get_reader('''\
subroutine test()
end subroutine test
''')
ast = Program(reader)
assert "SUBROUTINE test\n" \
"END SUBROUTINE test" in str(ast)
# Check parent information has been set-up correctly
end_sub = ast.content[0].content[-1]
assert end_sub.items[1].parent is end_sub
@pytest.mark.xfail(reason="5 spaces causes the error exception to occur at "
"the end of the file")
def test_single2(f2003_create):
'''Test that a single program_unit with 5 or more spaces at the start
of the line reports an error on the correct (first) line
'''
reader = get_reader('''\
subroutin test()
end subroutine
''')
with pytest.raises(FortranSyntaxError) as excinfo:
dummy_ = Program(reader)
assert ("at line 1\n>>> subroutin test()\n"
in str(excinfo.value))
@pytest.mark.xfail(reason="5 spaces causes the error exception to occur at "
"the end of the file")
def test_single3(f2003_create):
'''Test that a single program_unit with 5 or more spaces at the start
of the line reports an error on the correct (second) line
'''
reader = get_reader('''\
subroutine test()
end subroutin
''')
with pytest.raises(FortranSyntaxError) as excinfo:
dummy_ = Program(reader)
assert ("at line 2\n>>> end subroutin\n"
in str(excinfo.value))
def test_single_error1(f2003_create):
'''Test that a single program_unit with an error in the initial
statement raises an appropriate exception
'''
reader = get_reader('''\
subroutin test()
end subroutine
''')
with pytest.raises(FortranSyntaxError) as excinfo:
dummy_ = Program(reader)
assert ("at line 1\n>>> subroutin test()\n"
in str(excinfo.value))
def test_single_error2(f2003_create):
'''Test that a single program_unit with an error in the final
statement raises an appropriate exception
'''
reader = get_reader(
"subroutine test()\n\n"
"end subroutin\n\n\n")
with pytest.raises(FortranSyntaxError) as excinfo:
dummy = Program(reader)
assert ("at line 3\n>>>end subroutin\n"
in str(excinfo.value))
# Test multiple program units
def test_multiple(f2003_create):
'''Test that multiple program_units can be parsed successfully.'''
reader = get_reader('''\
subroutine test()
end subroutine
subroutine test2()
end subroutine test2
''')
ast = Program(reader)
assert "SUBROUTINE test\n" \
"END SUBROUTINE" in str(ast)
@pytest.mark.xfail(reason="Having the same name in different program_units "
"does not raise an exception")
def test_multiple_error1(f2003_create):
'''Test that multiple program_units with the same name raise an
exception
'''
reader = get_reader('''\
subroutine test()
end subroutine
subroutine test()
end subroutine
''')
with pytest.raises(FortranSyntaxError) as excinfo:
dummy_ = Program(reader)
assert ("XXX"
in str(excinfo.value))
def test_multiple_error2(f2003_create):
'''Test that a single program_unit with an error raises an appropriate
exception
'''
reader = get_reader('''\
subroutine 1test()
end subroutine
subroutine test()
end subroutine
''')
with pytest.raises(FortranSyntaxError) as excinfo:
dummy_ = Program(reader)
assert ("at line 1\n>>> subroutine 1test()\n"
in str(excinfo.value))
def test_multiple_error3(f2003_create):
'''Test that multiple program_units with an error raises an
appropriate exception
'''
reader = get_reader('''\
subroutine test()
end subroutine
subroutine test()
end subroutin''')
with pytest.raises(FortranSyntaxError) as excinfo:
dummy_ = Program(reader)
assert ("at line 4\n>>> end subroutin\n"
in str(excinfo.value))
# Test a program unit with a missing program statement
def test_missing_prog(f2003_create):
'''Test that a main program program_unit without a program declaration
can be parsed successfully. This should not really be a test here,
but this case is currently treated separately by the match method
in Program.
'''
reader = get_reader('''\
end
''')
ast = Program(reader)
assert "END" in str(ast)
@pytest.mark.xfail(reason="Only the main program is output")
def test_missing_prog_multi(f2003_create):
'''Test that a main program program_unit without a program declaration
can be parsed successfully when it is not the first program_unit.
'''
reader = get_reader('''\
subroutine first
end
end
''')
ast = Program(reader)
assert "SUBROUTINE first\n" \
"END SUBROUTINE" in str(ast)
assert "END PROGRAM" in str(ast)
# A program should contain only only one main program
@pytest.mark.xfail(reason="Only one main program is allowed in a program")
def test_one_main1(f2003_create):
'''Test that multiple main programs raise an exception.'''
reader = get_reader('''\
program first
end
program second
end
''')
with pytest.raises(FortranSyntaxError) as excinfo:
dummy_ = Program(reader)
assert ("XXX"
in str(excinfo.value))
# Check comments are supported at this level
def test_comment0(f2003_create):
'''Test that a single program_unit without comments can be parsed
successfully with comment processing switched on.
'''
reader = get_reader((
"subroutine test()\n"
"end subroutine\n"), ignore_comments=False)
ast = Program(reader)
assert ("SUBROUTINE test\n"
"END SUBROUTINE") in str(ast)
def test_comment1(f2003_create):
'''Test that a single program_unit can be parsed successfully with
comments being ignored.'''
reader = get_reader('''\
! comment1
subroutine test()
end subroutine
! comment2
''')
ast = Program(reader)
assert "SUBROUTINE test\n" \
"END SUBROUTINE" in str(ast)
assert "! comment" not in str(ast)
def test_comment2(f2003_create):
'''Test that a single program_unit can be parsed successfully with
comments being included.'''
reader = get_reader((
"! comment1\n"
"subroutine test()\n"
"end subroutine\n"
"! comment2\n"), ignore_comments=False)
ast = Program(reader)
assert ("! comment1\n"
"SUBROUTINE test\n"
"END SUBROUTINE\n"
"! comment2") in str(ast)
def test_comment3(f2003_create):
'''Test that multiple program_units can be parsed successfully with
comments being ignored.'''
reader = get_reader('''\
! comment1
subroutine test()
end subroutine
! comment2
module example
end module
! comment3
''', ignore_comments=True)
ast = Program(reader)
assert ("SUBROUTINE test\n"
"END SUBROUTINE\n"
"MODULE example\n"
"END MODULE") in str(ast)
assert "! comment" not in str(ast)
def test_comment4(f2003_create):
'''Test that multiple program_units can be parsed successfully with
comments being included.'''
reader = get_reader((
"! comment1\n"
"subroutine test()\n"
"end subroutine\n"
"! comment2\n"
"module example\n"
"end module\n"
"! comment3\n"
), ignore_comments=False)
ast = Program(reader)
assert ("! comment1\n"
"SUBROUTINE test\n"
"END SUBROUTINE\n"
"! comment2\n"
"MODULE example\n"
"END MODULE\n"
"! comment3") in str(ast)
# Check includes are supported at this level
def test_include0(f2003_create):
'''Test that a single program_unit with includes can be parsed
succesfully.
'''
reader = get_reader((
"include '1'\n"
"subroutine test()\n"
"end subroutine\n"
"include '2'\n"))
ast = Program(reader)
assert ("INCLUDE '1'\n"
"SUBROUTINE test\n"
"END SUBROUTINE\n"
"INCLUDE '2'") in str(ast)
def test_include1(f2003_create):
'''Test that multiple program_units with includes can be parsed
successfully.
'''
reader = get_reader(
"include '1'\n"
"subroutine test()\n"
"end subroutine\n"
"include '2'\n"
"module example\n"
"end module\n"
"include '3'\n", ignore_comments=True)
ast = Program(reader)
assert ("INCLUDE '1'\n"
"SUBROUTINE test\n"
"END SUBROUTINE\n"
"INCLUDE '2'\n"
"MODULE example\n"
"END MODULE\n"
"INCLUDE '3'") in str(ast)
assert "! comment" not in str(ast)
# Check a mix of includes and comments are supported at this level
def test_mix(f2003_create):
'''Test that multiple program_units can be parsed successfully with a
mix of includes and comments.
'''
reader = get_reader((
"include '1'\n"
"! comment1\n"
"include '2'\n"
"subroutine test()\n"
"end subroutine\n"
"include '3'\n"
"include '4'\n"
"! comment2\n"
"! comment3\n"
"module example\n"
"end module\n"
"! comment4\n"
"include '5'\n"
"! comment5\n"
), ignore_comments=False)
ast = Program(reader)
assert ("INCLUDE '1'\n"
"! comment1\n"
"INCLUDE '2'\n"
"SUBROUTINE test\n"
"END SUBROUTINE\n"
"INCLUDE '3'\n"
"INCLUDE '4'\n"
"! comment2\n"
"! comment3\n"
"MODULE example\n"
"END MODULE\n"
"! comment4\n"
"INCLUDE '5'\n"
"! comment5") in str(ast)
| StarcoderdataPython |
1752185 | import sys
from scrapy import cmdline
cmdline.execute(["scrapy"] + sys.argv[1:]) | StarcoderdataPython |
11353973 | import os
from ..logging import get_logger
import yaml
from ._exceptions import ProjectDirectoryNotSet, ProjectDirectoryNotExists
from typing import List
from ._environment import Environment
class Project:
# def __init__(self, **kwargs):
# for key, value in kwargs.items():
# setattr(self, key, value)
def __init__(self, apiVersion: str, environments: List[Environment]) -> None:
self.environments = environments
self.apiVersion = apiVersion
class ProjectDeprecated:
def __init__(self, project_path: str, project_path_is_variable: bool):
logger = get_logger(__name__)
logger.info(
f"building project path={project_path}, path_is_variable={project_path_is_variable}"
)
self.directory = self._get_source_directory(
project_path, project_path_is_variable
)
self.project_file_path = os.path.join(self.directory, "project.yml")
project_dict = self._load_project()
logger.info(project_dict)
def _get_source_directory(self, project_path: str, project_path_is_variable: bool):
"""Validate and the project directory property.
Sets the project directory validating that either a valid path has been provided
or an environment variable name that holds the path
"""
if not project_path:
raise ProjectDirectoryNotSet()
if project_path_is_variable:
directory = os.getenv(project_path)
else:
directory = project_path
if not directory:
raise ProjectDirectoryNotSet()
if directory and not os.path.exists(directory):
raise ProjectDirectoryNotExists(directory)
directory = os.path.abspath(directory)
return directory
def _load_project(self):
with open(self.project_file_path, "r") as f:
project_dict = yaml.safe_load(f)
return project_dict
| StarcoderdataPython |
6634113 | <reponame>dasepli/fastNLP
r"""undocumented"""
__all__ = [
"CoReferencePipe"
]
import collections
import numpy as np
from fastNLP.core.vocabulary import Vocabulary
from .pipe import Pipe
from ..data_bundle import DataBundle
from ..loader.coreference import CoReferenceLoader
from ...core.const import Const
class CoReferencePipe(Pipe):
r"""
ๅฏนCoreference resolution้ฎ้ข่ฟ่กๅค็๏ผๅพๅฐๆ็ซ ็ง็ฑป/่ฏด่ฏ่
/ๅญ็ฌฆ็บงไฟกๆฏ/ๅบๅ้ฟๅบฆใ
ๅค็ๅฎๆๅๆฐๆฎๅ
ๅซๆ็ซ ็ฑปๅซใspeakerไฟกๆฏใๅฅๅญไฟกๆฏใๅฅๅญๅฏนๅบ็indexใcharใๅฅๅญ้ฟๅบฆใtarget๏ผ
.. csv-table::
:header: "words1", "words2","words3","words4","chars","seq_len","target"
"bc", "[[0,0],[1,1]]","[['I','am'],[]]","[[1,2],[]]","[[[1],[2,3]],[]]","[2,3]","[[[2,3],[6,7]],[[10,12],[20,22]]]"
"[...]", "[...]","[...]","[...]","[...]","[...]","[...]"
dataset็print_field_meta()ๅฝๆฐ่พๅบ็ๅไธชfield็่ขซ่ฎพ็ฝฎๆinputๅtarget็ๆ
ๅตไธบ::
+-------------+-----------+--------+-------+---------+
| field_names | raw_chars | target | chars | seq_len |
+-------------+-----------+--------+-------+---------+
| is_input | False | True | True | True |
| is_target | False | True | False | True |
| ignore_type | | False | False | False |
| pad_value | | 0 | 0 | 0 |
+-------------+-----------+--------+-------+---------+
"""
def __init__(self, config):
super().__init__()
self.config = config
def process(self, data_bundle: DataBundle):
r"""
ๅฏนload่ฟๆฅ็ๆฐๆฎ่ฟไธๆญฅๅค็ๅๅงๆฐๆฎๅ
ๅซ๏ผraw_key,raw_speaker,raw_words,raw_clusters
.. csv-table::
:header: "raw_key", "raw_speaker","raw_words","raw_clusters"
"bc/cctv/00/cctv_0000_0", "[[Speaker#1, Speaker#1],[]]","[['I','am'],[]]","[[[2,3],[6,7]],[[10,12],[20,22]]]"
"bc/cctv/00/cctv_0000_1", "[['Speaker#1', 'peaker#1'],[]]","[['He','is'],[]]","[[[2,3],[6,7]],[[10,12],[20,22]]]"
"[...]", "[...]","[...]","[...]"
:param data_bundle:
:return:
"""
genres = {g: i for i, g in enumerate(["bc", "bn", "mz", "nw", "pt", "tc", "wb"])}
vocab = Vocabulary().from_dataset(*data_bundle.datasets.values(), field_name= Const.RAW_WORDS(3))
vocab.build_vocab()
word2id = vocab.word2idx
data_bundle.set_vocab(vocab, Const.INPUTS(0))
if self.config.char_path:
char_dict = get_char_dict(self.config.char_path)
else:
char_set = set()
for i,w in enumerate(word2id):
if i < 2:
continue
for c in w:
char_set.add(c)
char_dict = collections.defaultdict(int)
char_dict.update({c: i for i, c in enumerate(char_set)})
for name, ds in data_bundle.datasets.items():
# genre
ds.apply(lambda x: genres[x[Const.RAW_WORDS(0)][:2]], new_field_name=Const.INPUTS(0))
# speaker_ids_np
ds.apply(lambda x: speaker2numpy(x[Const.RAW_WORDS(1)], self.config.max_sentences, is_train=name == 'train'),
new_field_name=Const.INPUTS(1))
# sentences
ds.rename_field(Const.RAW_WORDS(3),Const.INPUTS(2))
# doc_np
ds.apply(lambda x: doc2numpy(x[Const.INPUTS(2)], word2id, char_dict, max(self.config.filter),
self.config.max_sentences, is_train=name == 'train')[0],
new_field_name=Const.INPUTS(3))
# char_index
ds.apply(lambda x: doc2numpy(x[Const.INPUTS(2)], word2id, char_dict, max(self.config.filter),
self.config.max_sentences, is_train=name == 'train')[1],
new_field_name=Const.CHAR_INPUT)
# seq len
ds.apply(lambda x: doc2numpy(x[Const.INPUTS(2)], word2id, char_dict, max(self.config.filter),
self.config.max_sentences, is_train=name == 'train')[2],
new_field_name=Const.INPUT_LEN)
# clusters
ds.rename_field(Const.RAW_WORDS(2), Const.TARGET)
ds.set_ignore_type(Const.TARGET)
ds.set_padder(Const.TARGET, None)
ds.set_input(Const.INPUTS(0), Const.INPUTS(1), Const.INPUTS(2), Const.INPUTS(3), Const.CHAR_INPUT, Const.INPUT_LEN)
ds.set_target(Const.TARGET)
return data_bundle
def process_from_file(self, paths):
'''
example:
bundle = CoReferencePipe(config).process_from_file({'train': config.train_path, 'dev': config.dev_path,
'test': config.test_path})
'''
bundle = CoReferenceLoader().load(paths)
return self.process(bundle)
# helper
def doc2numpy(doc, word2id, chardict, max_filter, max_sentences, is_train):
docvec, char_index, length, max_len = _doc2vec(doc, word2id, chardict, max_filter, max_sentences, is_train)
assert max(length) == max_len
assert char_index.shape[0] == len(length)
assert char_index.shape[1] == max_len
doc_np = np.zeros((len(docvec), max_len), int)
for i in range(len(docvec)):
for j in range(len(docvec[i])):
doc_np[i][j] = docvec[i][j]
return doc_np, char_index, length
def _doc2vec(doc,word2id,char_dict,max_filter,max_sentences,is_train):
max_len = 0
max_word_length = 0
docvex = []
length = []
if is_train:
sent_num = min(max_sentences,len(doc))
else:
sent_num = len(doc)
for i in range(sent_num):
sent = doc[i]
length.append(len(sent))
if (len(sent) > max_len):
max_len = len(sent)
sent_vec =[]
for j,word in enumerate(sent):
if len(word)>max_word_length:
max_word_length = len(word)
if word in word2id:
sent_vec.append(word2id[word])
else:
sent_vec.append(word2id["UNK"])
docvex.append(sent_vec)
char_index = np.zeros((sent_num, max_len, max_word_length),dtype=int)
for i in range(sent_num):
sent = doc[i]
for j,word in enumerate(sent):
char_index[i, j, :len(word)] = [char_dict[c] for c in word]
return docvex,char_index,length,max_len
def speaker2numpy(speakers_raw,max_sentences,is_train):
if is_train and len(speakers_raw)> max_sentences:
speakers_raw = speakers_raw[0:max_sentences]
speakers = flatten(speakers_raw)
speaker_dict = {s: i for i, s in enumerate(set(speakers))}
speaker_ids = np.array([speaker_dict[s] for s in speakers])
return speaker_ids
# ๅฑๅนณ
def flatten(l):
return [item for sublist in l for item in sublist]
def get_char_dict(path):
vocab = ["<UNK>"]
with open(path) as f:
vocab.extend(c.strip() for c in f.readlines())
char_dict = collections.defaultdict(int)
char_dict.update({c: i for i, c in enumerate(vocab)})
return char_dict
| StarcoderdataPython |
4810697 | <gh_stars>0
"Functions implementing widget editing"
import re, html, json
from ... import skilift
from ....skilift import fromjson, editsection, editpage, editwidget, versions
from .. import utils
from ... import FailPage, ValidateError, ServerError, GoTo
from ....ski.project_class_definition import SectionData
# a search for anything none-alphanumeric and not an underscore
_AN = re.compile('[^\w]')
def _field_name(widget, field_argument):
"Returns a field name"
if "set_names" not in widget:
return field_argument
name_dict = widget["set_names"]
if field_argument in name_dict:
return name_dict[field_argument]
return field_argument
def _field_value(widget, field_argument):
"Returns value,string value"
value = widget["fields"][field_argument]
if value is None:
field_value = ''
elif isinstance(value, list):
if value:
field_value = ','.join(str(val) for val in value)
else:
field_value = ''
else:
field_value = str(value)
return value, field_value
def _field_ref(widgetdescription, field_argument):
"Returns a field textblock reference string"
if field_argument == 'show':
return 'widgets.show'
elif field_argument == 'widget_class':
return 'widgets.widget_class'
elif field_argument == 'widget_style':
return 'widgets.widget_style'
elif field_argument == 'show_error':
return 'widgets.show_error'
elif field_argument == 'clear_error':
return 'widgets.clear_error'
else:
return ".".join(("widgets", widgetdescription.modulename, widgetdescription.classname, field_argument))
def retrieve_widget(skicall):
"Fills in the edit a widget page"
call_data = skicall.call_data
pd = call_data['pagedata']
sd = SectionData("adminhead")
# get the widget name
if ("left_nav","navbuttons","nav_links") in call_data:
# should be submitted as widgetname from left navigation links
widget_name = call_data["left_nav","navbuttons","nav_links"]
elif 'widget_name' in call_data:
widget_name = call_data['widget_name']
elif 'part_tuple' in call_data:
# called from dom table, via responder that finds what is being edited
# and has set it into part_tuple
part_tuple = call_data['part_tuple']
widget_name = part_tuple.name
else:
raise FailPage(message="Invalid widget")
if not widget_name:
raise FailPage(message="Invalid widget")
# and this is the widget to be edited, it is now set into session data
call_data['widget_name'] = widget_name
# Fill in header
sd["page_head","large_text"] = "Widget " + widget_name
pd.update(sd)
project = call_data['editedprojname']
section_name = None
pagenumber = None
if 'section_name' in call_data:
section_name = call_data['section_name']
elif 'page_number' in call_data:
pagenumber = call_data['page_number']
else:
raise FailPage(message="No section or page given")
try:
if section_name:
widgetdescription = editwidget.section_widget_description(project, section_name, call_data['schange'], widget_name)
widget = editwidget.section_widget(project, section_name, call_data['schange'], widget_name)
else:
widgetdescription = editwidget.page_widget_description(project, pagenumber, call_data['pchange'], widget_name)
widget = editwidget.page_widget(project, pagenumber, call_data['pchange'], widget_name)
except ServerError as e:
raise FailPage(e.message)
pd['widget_type','para_text'] = "This widget is of type %s.%s." % (widgetdescription.modulename, widgetdescription.classname)
pd['widget_textblock','textblock_ref'] = ".".join(("widgets", widgetdescription.modulename, widgetdescription.classname))
pd['widget_name','input_text'] = widget_name
pd['widget_brief','input_text'] = widgetdescription.brief
# widgetdescription.fields_single is a list of namedtuples, each inner namedtuple representing a field
# with items ['field_arg', 'field_type', 'valdt', 'jsonset', 'cssclass', 'cssstyle']
args = widgetdescription.fields_single
arg_list = widgetdescription.fields_list
arg_table = widgetdescription.fields_table
arg_dict = widgetdescription.fields_dictionary
if arg_list or arg_table or arg_dict:
pd['args_multi','show'] = True
else:
pd['args_multi','show'] = False
# args is shown on a LinkTextBlockTable2
# contents row is
# col 0 is the visible text to place in the link,
# col 1 is the get field of the link
# col 2 is the second get field of the link
# col 3 is text appearing in the second table column
# col 4 is the reference string of a textblock to appear the third table column
# col 5 is text to appear if the reference cannot be found in the database
# col 6 normally empty string, if set to text it will replace the textblock
args_valdt = False
args_content = []
if args:
for arg in args:
name = _field_name(widget, arg.field_arg)
ref = _field_ref(widgetdescription, arg.field_arg)
if arg.valdt:
name = "* " + name
args_valdt = True
# field value
value,field_value = _field_value(widget, arg.field_arg)
if len(field_value) > 20:
field_value = field_value[:18]
field_value += '...'
arg_row = [ name, arg.field_arg, '',field_value, ref, 'No description for %s' % (ref,), '']
args_content.append(arg_row)
pd['args','link_table'] = args_content
else:
pd['args','show'] = False
pd['args_description','show'] = False
# arg_list, arg_table and arg_dict are shown on LinkTextBlockTable widgets
# contents row is
# col 0 is the visible text to place in the link,
# col 1 is the get field of the link
# col 2 is the second get field of the link
# col 3 is the reference string of a textblock to appear in the column adjacent to the link
# col 4 is text to appear if the reference cannot be found in the database
# col 5 normally empty string, if set to text it will replace the textblock
arg_list_content = []
if arg_list:
for arg in arg_list:
name = _field_name(widget, arg.field_arg)
ref = _field_ref(widgetdescription, arg.field_arg)
if arg.valdt:
name = "* " + name
args_valdt = True
arg_row = [ name, arg.field_arg, '', ref, 'No description for %s' % (ref,), '']
arg_list_content.append(arg_row)
pd['arg_list','link_table'] = arg_list_content
else:
pd['arg_list','show'] = False
pd['arg_list_description','show'] = False
arg_table_content = []
if arg_table:
for arg in arg_table:
name = _field_name(widget, arg.field_arg)
ref = _field_ref(widgetdescription, arg.field_arg)
if arg.valdt:
name = "* " + name
args_valdt = True
arg_row = [ name, arg.field_arg, '', ref, 'No description for %s' % (ref,), '']
arg_table_content.append(arg_row)
pd['arg_table','link_table'] = arg_table_content
else:
pd['arg_table','show'] = False
pd['arg_table_description','show'] = False
arg_dict_content = []
if arg_dict:
for arg in arg_dict:
name = _field_name(widget, arg.field_arg)
ref = _field_ref(widgetdescription, arg.field_arg)
if arg.valdt:
name = "* " + name
args_valdt = True
arg_row = [ name, arg.field_arg, '', ref, 'No description for %s' % (ref,), '']
arg_dict_content.append(arg_row)
pd['arg_dict','link_table'] = arg_dict_content
else:
pd['arg_dict','show'] = False
pd['arg_dict_description','show'] = False
pd['args_valdt','show'] = args_valdt
# display the widget html
pd['widget_code','pre_text'] = widgetdescription.illustration
if widgetdescription.containers:
pd['containerdesc','show'] = True
# remove any unwanted fields from session call_data
if 'container' in call_data:
del call_data['container']
if 'location' in call_data:
del call_data['location']
if 'part' in call_data:
del call_data['part']
if 'field_arg' in call_data:
del call_data['field_arg']
if 'validx' in call_data:
del call_data['validx']
def set_widget_params(skicall):
"Sets widget name and brief"
call_data = skicall.call_data
project = call_data['editedprojname']
section_name = None
pagenumber = None
if 'section_name' in call_data:
section_name = call_data['section_name']
elif 'page_number' in call_data:
pagenumber = call_data['page_number']
else:
raise FailPage(message="No section or page given")
if 'widget_name' in call_data:
widget_name = call_data['widget_name']
else:
raise FailPage(message="Widget not identified")
new_name = None
brief = None
if 'new_widget_name' in call_data:
new_name = call_data['new_widget_name']
elif 'widget_brief' in call_data:
brief = call_data['widget_brief']
else:
raise FailPage(message="No new name or brief given")
try:
if section_name:
if new_name:
call_data['schange'] = editwidget.rename_section_widget(project, section_name, call_data['schange'], widget_name, new_name)
call_data['status'] = "Widget name changed"
call_data['widget_name'] = new_name
else:
call_data['schange'] = editwidget.new_brief_in_section_widget(project, section_name, call_data['schange'], widget_name, brief)
call_data['status'] = "Widget brief changed"
else:
if new_name:
call_data['pchange'] = editwidget.rename_page_widget(project, pagenumber, call_data['pchange'], widget_name, new_name)
call_data['status'] = "Widget name changed"
call_data['widget_name'] = new_name
else:
call_data['pchange'] = editwidget.new_brief_in_page_widget(project, pagenumber, call_data['pchange'], widget_name, brief)
call_data['status'] = "Widget brief changed"
except ServerError as e:
raise FailPage(e.message)
def retrieve_editfield(skicall):
"Fills in the edit a widget field page"
call_data = skicall.call_data
pd = call_data['pagedata']
sd = SectionData("adminhead")
project = call_data['editedprojname']
section_name = None
pagenumber = None
if 'section_name' in call_data:
section_name = call_data['section_name']
elif 'page_number' in call_data:
pagenumber = call_data['page_number']
else:
raise FailPage(message="No section or page given")
if 'widget_name' in call_data:
widget_name = call_data['widget_name']
else:
raise FailPage(message="Widget not identified")
if 'field_arg' in call_data:
field_arg = call_data['field_arg']
else:
raise FailPage("Field not identified")
try:
if section_name:
widgetdescription = editwidget.section_widget_description(project, section_name, call_data['schange'], widget_name)
widget = editwidget.section_widget(project, section_name, call_data['schange'], widget_name)
else:
widgetdescription = editwidget.page_widget_description(project, pagenumber, call_data['pchange'], widget_name)
widget = editwidget.page_widget(project, pagenumber, call_data['pchange'], widget_name)
except ServerError as e:
raise FailPage(e.message)
pd['widget_type','para_text'] = "Widget type : %s.%s" % (widgetdescription.modulename, widgetdescription.classname)
pd['widget_name','para_text'] = "Widget name : %s" % (widget_name,)
pd['field_type','para_text'] = "Field type : %s" % (field_arg,)
# widgetdescription.fields_single is a list of namedtuples, each inner namedtuple representing a field
# with items ['field_arg', 'field_type', 'valdt', 'jsonset', 'cssclass', 'cssstyle']
# create dictionaries of {field_arg : namedtuples }
fields_single = { arg.field_arg:arg for arg in widgetdescription.fields_single }
fields_list = { arg.field_arg:arg for arg in widgetdescription.fields_list }
fields_table = { arg.field_arg:arg for arg in widgetdescription.fields_table }
fields_dictionary = { arg.field_arg:arg for arg in widgetdescription.fields_dictionary }
if field_arg in fields_single:
field_datalist = fields_single[field_arg]
elif field_arg in fields_list:
field_datalist = fields_list[field_arg]
elif field_arg in fields_table:
field_datalist = fields_table[field_arg]
elif field_arg in fields_dictionary:
field_datalist = fields_dictionary[field_arg]
else:
raise FailPage("Field not identified")
if field_datalist.jsonset:
pd['json_enabled','para_text'] = "JSON Enabled : Yes"
else:
pd['json_enabled','para_text'] = "JSON Enabled : No"
if field_arg in fields_single:
if field_datalist.cssclass or field_datalist.cssstyle:
default_value = skilift.fromjson.get_widget_default_field_value(project, widgetdescription.modulename, widgetdescription.classname, field_arg)
if default_value:
pd['field_default','para_text'] = "Default value : " + default_value
pd['field_default','show'] = True
field_name = _field_name(widget, field_arg)
sd["page_head","large_text"] = "(\'%s\',\'%s\')" % (widget_name, field_name)
pd.update(sd)
pd['show_field_name','para_text'] = "Field name : %s" % (field_name,)
value, field_value = _field_value(widget, field_arg)
# show the textblock description with .full, or if it doesnt exist, without the .full
ref = _field_ref(widgetdescription, field_arg)
full_textref = ref + '.full' # the field reference string
adminaccesstextblocks = skilift.get_accesstextblocks(skicall.project)
if adminaccesstextblocks.textref_exists(full_textref):
pd['widget_field_textblock','textblock_ref'] = full_textref
else:
pd['widget_field_textblock','textblock_ref'] = ref
pd['field_name','input_text'] = field_name
replace_strings = [widget_name+'\",\"'+field_name]
if field_arg in fields_single:
if field_datalist.field_type == 'boolean':
pd["field_submit",'show'] = True
pd["boolean_field_value", "radio_checked"] = value
else:
pd["field_value",'show'] = True
pd["field_value",'input_text'] = field_value
if field_datalist.cssclass or field_datalist.cssstyle:
# add button to set given css class or style to defaults.json
pd["css_default_desc",'show'] = True
pd["set_field_default",'show'] = True
else:
pd["css_default_desc",'show'] = False
pd["set_field_default",'show'] = False
pd["show_field_value",'show'] = True
pd["show_field_value",'para_text'] = "Field value : %s" % (field_value,)
pd["widget_args_desc",'show'] = True
pd["widget_args_desc",'replace_strings'] = replace_strings
elif field_arg in fields_list:
pd["widget_arg_list_desc",'show'] = True
pd["widget_arg_list_desc",'replace_strings'] = replace_strings
pd["css_default_desc",'show'] = False
pd["set_field_default",'show'] = False
elif field_arg in fields_table:
pd["widget_arg_table_desc",'show'] = True
pd["widget_arg_table_desc",'replace_strings'] = replace_strings
pd["css_default_desc",'show'] = False
pd["set_field_default",'show'] = False
elif field_arg in fields_dictionary:
pd["widget_arg_dict_desc",'show'] = True
pd["widget_arg_dict_desc",'replace_strings'] = replace_strings
pd["css_default_desc",'show'] = False
pd["set_field_default",'show'] = False
# Show validators
if field_datalist.valdt:
pd["validators_desc",'show'] = True
pd["validators_desc2",'show'] = True
pd["add_validator",'show'] = True
# create the contents for the validator_table
contents = []
if ("validators" in widget) and (field_arg in widget["validators"]):
val_list = widget["validators"][field_arg]
pd["validator_table","show"] = True
max_validator_index = len(val_list) - 1
for index,validator in enumerate(val_list):
if index:
up = True
else:
# first item (index zero) has no up button
up = False
if index < max_validator_index:
down = True
else:
# last item has no down button
down = False
table_pos = str(index)
contents.append([validator['class'], table_pos, table_pos, table_pos, table_pos, True, up, down, True])
pd["validator_table","contents"] = contents
# set field_arg into session call_data
call_data['field_arg'] = field_arg
if 'validx' in call_data:
del call_data['validx']
def set_field_name(skicall):
"Sets a widget field name"
call_data = skicall.call_data
project = call_data['editedprojname']
section_name = None
pagenumber = None
if 'section_name' in call_data:
section_name = call_data['section_name']
elif 'page_number' in call_data:
pagenumber = call_data['page_number']
else:
raise FailPage(message="No section or page given")
if 'widget_name' in call_data:
widget_name = call_data['widget_name']
else:
raise FailPage(message="Widget not identified")
if 'field_arg' in call_data:
field_arg = call_data['field_arg']
else:
raise FailPage("Field not identified")
if 'field_name' in call_data:
field_name = call_data['field_name']
else:
raise FailPage("New field name not identified")
try:
if section_name:
call_data['schange'] = editwidget.set_widget_field_name_in_section(project, section_name, call_data['schange'], widget_name, field_arg, field_name)
else:
call_data['pchange'] = editwidget.set_widget_field_name_in_page(project, pagenumber, call_data['pchange'], widget_name, field_arg, field_name)
except ServerError as e:
raise FailPage(e.message)
call_data['status'] = "Field name changed"
def set_field_value(skicall):
"Sets a widget field value"
call_data = skicall.call_data
project = call_data['editedprojname']
section_name = None
pagenumber = None
if 'section_name' in call_data:
section_name = call_data['section_name']
elif 'page_number' in call_data:
pagenumber = call_data['page_number']
else:
raise FailPage(message="No section or page given")
if 'widget_name' in call_data:
widget_name = call_data['widget_name']
else:
raise FailPage(message="Widget not identified")
if 'field_arg' in call_data:
field_arg = call_data['field_arg']
else:
raise FailPage("Field not identified")
if 'field_value' in call_data:
field_value = call_data['field_value']
else:
raise FailPage("New field value not identified")
try:
if section_name:
call_data['schange'] = editwidget.set_widget_field_value_in_section(project, section_name, call_data['schange'], widget_name, field_arg, field_value)
else:
call_data['pchange'] = editwidget.set_widget_field_value_in_page(project, pagenumber, call_data['pchange'], widget_name, field_arg, field_value)
except ServerError as e:
raise FailPage(e.message)
call_data['status'] = "Field value changed"
def set_field_default(skicall):
"Sets a widget field default value"
call_data = skicall.call_data
project = call_data['editedprojname']
section_name = None
pagenumber = None
if 'section_name' in call_data:
section_name = call_data['section_name']
elif 'page_number' in call_data:
pagenumber = call_data['page_number']
else:
raise FailPage(message="No section or page given")
if 'widget_name' in call_data:
widget_name = call_data['widget_name']
else:
raise FailPage(message="Widget not identified")
if 'field_arg' in call_data:
field_arg = call_data['field_arg']
else:
raise FailPage("Field not identified")
try:
if section_name:
widgetdescription = editwidget.section_widget_description(project, section_name, call_data['schange'], widget_name)
widget = editwidget.section_widget(project, section_name, call_data['schange'], widget_name)
else:
widgetdescription = editwidget.page_widget_description(project, pagenumber, call_data['pchange'], widget_name)
widget = editwidget.page_widget(project, pagenumber, call_data['pchange'], widget_name)
except ServerError as e:
raise FailPage(e.message)
fields_single = { arg.field_arg:arg for arg in widgetdescription.fields_single }
if field_arg not in fields_single:
raise FailPage("Cannot set a default value on this field")
field_info = fields_single[field_arg]
value, str_value = _field_value(widget, field_arg)
if field_info.cssclass or field_info.cssstyle:
# set the default value
result = fromjson.save_widget_default_field_value(project,
widgetdescription.modulename,
widgetdescription.classname,
field_arg,
str_value)
if result:
if str_value:
call_data['status'] = "Field default value set to %s" % (str_value,)
else:
call_data['status'] = "Field default value removed"
return
raise FailPage("Unable to set default")
def retrieve_container(skicall):
"Edits a widget container"
call_data = skicall.call_data
pd = call_data['pagedata']
sd_adminhead = SectionData("adminhead")
sd_widgetinserts = SectionData("widgetinserts")
# remove any unwanted fields from session call_data
if 'location' in call_data:
del call_data['location']
if 'part' in call_data:
del call_data['part']
# get data
if ("left_nav","navbuttons","nav_links") in call_data:
# should be submitted as widgetname-containernumber
widget_container = call_data["left_nav","navbuttons","nav_links"].split("-")
if len(widget_container) != 2:
raise FailPage(message="Invalid container")
widget_name = widget_container[0]
try:
container = int(widget_container[1])
except Exception:
raise FailPage(message="Invalid container")
elif ('widget_name' in call_data) and ('container' in call_data):
widget_name = call_data['widget_name']
container = call_data['container']
else:
raise FailPage(message="Invalid container")
# and this is the container to be edited, it is now set into session data
call_data['widget_name'] = widget_name
call_data['container'] = container
project = call_data['editedprojname']
section_name = None
pagenumber = None
if 'section_name' in call_data:
section_name = call_data['section_name']
elif 'page_number' in call_data:
pagenumber = call_data['page_number']
else:
raise FailPage(message="No section or page given")
try:
if section_name:
widgetdescription = editwidget.section_widget_description(project, section_name, call_data['schange'], widget_name)
containerinfo = editwidget.container_in_section(project, section_name, call_data['schange'], widget_name, container)
# going into a section, so cannot add sections
sd_widgetinserts["insert_section", "show"] = False
else:
widgetdescription = editwidget.page_widget_description(project, pagenumber, call_data['pchange'], widget_name)
containerinfo = editwidget.container_in_page(project, pagenumber, call_data['pchange'], widget_name, container)
sd_widgetinserts["insert_section", "show"] = True
except ServerError as e:
raise FailPage(e.message)
pd.update(sd_widgetinserts)
# containerinfo is a namedtuple ('container', 'empty')
call_data['widgetdescription'] = widgetdescription
if containerinfo.empty:
# empty container
# set location, where item is to be inserted
call_data['location'] = (widget_name, container, ())
# go to page showing empty contaier
raise GoTo(target = 54600, clear_submitted=True)
# Fill in header
sd_adminhead["page_head","large_text"] = "Widget " + widget_name + " container: " + str(container)
pd.update(sd_adminhead)
# so header text and navigation done, now continue with the page contents
pd['container_description','textblock_ref'] = ".".join(("widgets",widgetdescription.modulename, widgetdescription.classname, "container" + str(container)))
pd['further_description','para_text'] = "Choose an item to edit."
# fill in the table
call_data['location_string'] = widget_name
retrieve_container_dom(skicall)
# and do show the download button
pd['download_description', 'show'] = True
pd['containerdownload', 'show'] = True
def empty_container(skicall):
"Fills in empty_container page"
call_data = skicall.call_data
pd = call_data['pagedata']
sd = SectionData("adminhead")
# location is (widget_name, container, ())
location = call_data['location']
widgetdescription = call_data['widgetdescription']
sd["page_head","large_text"] = "Widget " + location[0] + " container: " + str(location[1])
pd.update(sd)
pd['container_description','textblock_ref'] = ".".join(("widgets",widgetdescription.modulename, widgetdescription.classname, "container" + str(location[1])))
def retrieve_container_dom(skicall):
"this call fills in the container dom table"
call_data = skicall.call_data
pd = call_data['pagedata']
sd = SectionData("editdom")
project = call_data['editedprojname']
pagenumber = None
section_name = None
if "page_number" in call_data:
pagenumber = call_data["page_number"]
elif "section_name" in call_data:
section_name = call_data["section_name"]
else:
raise FailPage(message = "No page or section given")
# location_string is the widget name
if 'location_string' in call_data:
location_string = call_data['location_string']
elif 'widget_name' in call_data:
location_string = call_data['widget_name']
else:
raise FailPage(message = "widget_name not in call_data")
if 'container' not in call_data:
raise FailPage(message = "container not in call_data")
container = call_data["container"]
try:
domcontents, dragrows, droprows = _container_domcontents(project, pagenumber, section_name, location_string, container)
except ServerError as e:
raise FailPage(message=e.message)
# widget editdom,domtable is populated with fields
# dragrows: A two element list for every row in the table, could be empty if no drag operation
# 0 - True if draggable, False if not
# 1 - If 0 is True, this is data sent with the call wnen a row is dropped
# droprows: A two element list for every row in the table, could be empty if no drop operation
# 0 - True if droppable, False if not
# 1 - text to send with the call when a row is dropped here
# dropident: ident or label of target, called when a drop occurs which returns a JSON page
# cols: A three element list for every column in the table, must be given with empty values if no links
# 0 - target HTML page link ident of buttons in each column, if col1 not present or no javascript
# 1 - target JSON page link ident of buttons in each column,
# 2 - session storage key 'ski_part'
# contents: A list for every element in the table, should be row*col lists
# 0 - text string, either text to display or button text
# 1 - A 'style' string set on the td cell, if empty string, no style applied
# 2 - Is button? If False only text will be shown, not a button, button class will not be applied
# If True a link to link_ident/json_ident will be set with button_class applied to it
# 3 - The get field value of the button link, empty string if no get field
# create the table
sd['domtable', 'contents'] = domcontents
sd['domtable', 'dragrows'] = dragrows
sd['domtable', 'droprows'] = droprows
# for each column: html link, JSON link, storage key
sd['domtable', 'cols'] = [ ['','',''], # tag name, no link
['','',''], # brief, no link
['no_javascript','move_up_in_container_dom',''], # up arrow
['no_javascript','move_up_right_in_container_dom',''], # up right
['no_javascript','move_down_in_container_dom',''], # down
['no_javascript','move_down_right_in_container_dom',''], # down right
['edit_container_dom','',''], # edit, html only
['no_javascript',44205,''], # insert/append
['no_javascript',44580,''], # copy
['no_javascript',44590,'ski_part'], # paste
['no_javascript','cut_container_dom',''], # cut
['no_javascript','delete_container_dom',''] # delete
]
sd['domtable', 'dropident'] = 'move_in_container_dom'
pd.update(sd)
def _container_domcontents(project, pagenumber, section_name, location_string, container):
"Return the info for domtable contents"
parttext, partdict = fromjson.container_outline(project, pagenumber, section_name, location_string, container)
# create first row of the table
top_row_widget = "Widget %s" % location_string
top_row_container = "Container %s" % container
domcontents = [
[top_row_widget, '', False, '' ],
[top_row_container, '', False, '' ],
['', '', False, '' ],
['', '', False, '' ],
['', '', False, '' ],
['', '', False, '' ],
['', '', False, '' ],
['', '', False, '' ],
['', '', False, '' ],
['', '', False, '' ],
['', '', False, '' ],
['', '', False, '' ]
]
# add further items to domcontents
part_string_list = []
part_loc = location_string + '-' + str(container)
rows = _domtree(partdict, part_loc, domcontents, part_string_list)
# for every row in the table
dragrows = [[ False, '']]
droprows = [[ True, part_loc]]
# for each row
if rows>1:
for row in range(0, rows-1):
dragrows.append( [ True, part_string_list[row]] )
droprows.append( [ True, part_string_list[row]] )
return domcontents, dragrows, droprows
def copy_container(skicall):
"Gets container part and return it in page_data['localStorage'] with key ski_part for browser session storage"
call_data = skicall.call_data
pd = call_data['pagedata']
project = call_data['editedprojname']
pagenumber = None
section_name = None
if "page_number" in call_data:
pagenumber = call_data["page_number"]
elif "section_name" in call_data:
section_name = call_data["section_name"]
else:
raise FailPage(message = "No page or section given")
if ('editdom', 'domtable', 'contents') not in call_data:
raise FailPage(message = "item missing")
part = call_data['editdom', 'domtable', 'contents']
# so part is widget_name, container with location string of integers
# create location which is a tuple or list consisting of three items:
# a string of widget name
# a container integer
# a tuple or list of location integers
location_list = part.split('-')
# first item should be a string, rest integers
if len(location_list) < 3:
raise FailPage("Item has not been recognised")
try:
widget_name = location_list[0]
container = int(location_list[1])
location_integers = [ int(i) for i in location_list[2:]]
except Exception:
raise FailPage("Item has not been recognised")
# location is a tuple of widget_name, container, tuple of location integers
location = (widget_name, container, location_integers)
# get a json string dump of the item outline, however change any Sections to Parts
itempart, itemdict = fromjson.item_outline(project, pagenumber, section_name, location)
if itempart == 'Section':
jsonstring = json.dumps(['Part',itemdict], indent=0, separators=(',', ':'))
else:
jsonstring = json.dumps([itempart,itemdict], indent=0, separators=(',', ':'))
pd.localStorage = {'ski_part':jsonstring}
call_data['status'] = 'Item copied, and can now be pasted.'
def paste_container(skicall):
"Gets submitted json string and inserts it"
call_data = skicall.call_data
pd = call_data['pagedata']
sd = SectionData("editdom")
project = call_data['editedprojname']
pagenumber = None
section_name = None
if "page_number" in call_data:
pagenumber = call_data["page_number"]
elif "section_name" in call_data:
section_name = call_data["section_name"]
else:
raise FailPage(message = "No page or section given")
if ('editdom', 'domtable', 'contents') not in call_data:
raise FailPage(message = "item missing")
part = call_data['editdom', 'domtable', 'contents']
if ('editdom', 'domtable', 'cols') not in call_data:
raise FailPage(message = "item to paste missing")
json_string = call_data['editdom', 'domtable', 'cols']
# so part is widget_name, container with location string of integers
# create location which is a tuple or list consisting of three items:
# a string of widget name
# a container integer
# a tuple or list of location integers
location_list = part.split('-')
# first item should be a string, rest integers
if len(location_list) < 3:
raise FailPage("Item has not been recognised")
try:
widget_name = location_list[0]
container = int(location_list[1])
location_integers = [ int(i) for i in location_list[2:]]
except Exception:
raise FailPage("Item has not been recognised")
# location is a tuple of widget_name, container, tuple of location integers
location = (widget_name, container, location_integers)
if section_name:
call_data['schange'] = editsection.create_item_in_section(project, section_name, call_data['schange'], location, json_string)
else:
call_data['pchange'] = editpage.create_item_in_page(project, pagenumber, call_data['pchange'], location, json_string)
domcontents, dragrows, droprows = _container_domcontents(project, pagenumber, section_name, widget_name, container)
sd['domtable', 'dragrows'] = dragrows
sd['domtable', 'droprows'] = droprows
sd['domtable', 'contents'] = domcontents
pd.update(sd)
def downloadcontainer(skicall):
"Gets container, and returns a json dictionary, this will be sent as an octet file to be downloaded"
call_data = skicall.call_data
pd = call_data['pagedata']
project = call_data['editedprojname']
pagenumber = None
section_name = None
if "page_number" in call_data:
pagenumber = call_data["page_number"]
elif "section_name" in call_data:
section_name = call_data["section_name"]
else:
raise FailPage(message = "No page or section given")
if 'widget_name' in call_data:
widget_name = call_data['widget_name']
else:
raise FailPage(message = "widget_name not in call_data")
if 'container' not in call_data:
raise FailPage(message = "container not in call_data")
container = call_data["container"]
parttext, part_dict = fromjson.container_outline(project, pagenumber, section_name, widget_name, container)
# set contents into a div
part_dict["hide_if_empty"] = False
part_dict.move_to_end("hide_if_empty", last=False)
part_dict["show"] = True
part_dict.move_to_end("show", last=False)
part_dict["brief"] = "From widget %s container %s" % (widget_name, container)
part_dict.move_to_end("brief", last=False)
part_dict["tag_name"] = "div"
part_dict.move_to_end("tag_name", last=False)
# set version and skipole as the first two items in the dictionary
versions_tuple = versions(project)
part_dict["skipole"] = versions_tuple.skipole
part_dict.move_to_end('skipole', last=False)
part_dict["version"] = versions_tuple.project
part_dict.move_to_end('version', last=False)
jsonstring = json.dumps(part_dict, indent=4, separators=(',', ':'))
line_list = []
n = 0
for line in jsonstring.splitlines(True):
binline = line.encode('utf-8')
n += len(binline)
line_list.append(binline)
pd.mimetype = 'application/octet-stream'
pd.content_length = str(n)
return line_list
def back_to_parent_container(skicall):
"Sets call_data['widget_name'] to parent_widget and call_data['container'] to parent_container"
call_data = skicall.call_data
project = call_data['editedprojname']
section_name = None
pagenumber = None
if 'section_name' in call_data:
section_name = call_data['section_name']
elif 'page_number' in call_data:
pagenumber = call_data['page_number']
else:
raise FailPage(message="No section or page given")
try:
if section_name:
widgetdescription = editwidget.section_widget_description(project, section_name, call_data['schange'], widget_name)
else:
widgetdescription = editwidget.page_widget_description(project, pagenumber, call_data['pchange'], widget_name)
except ServerError as e:
raise FailPage(e.message)
utils.clear_call_data(call_data)
if section_name:
call_data['section_name'] = section_name
else:
call_data['page_number'] = pagenumber
call_data['widget_name'] = widgetdescription.parent_widget
call_data['container'] = widgetdescription.parent_container
def edit_container_dom(skicall):
"Called by domtable to edit an item in a container"
call_data = skicall.call_data
project = call_data['editedprojname']
pagenumber = None
section_name = None
if "page_number" in call_data:
pagenumber = call_data["page_number"]
elif "section_name" in call_data:
section_name = call_data["section_name"]
else:
raise FailPage(message = "No page or section given")
if ('editdom', 'domtable', 'contents') not in call_data:
raise FailPage(message = "item to edit missing")
part = call_data['editdom', 'domtable', 'contents']
# so part is widget_name, container with location string of integers
# create location which is a tuple or list consisting of three items:
# a string of widget name
# a container integer
# a tuple or list of location integers
location_list = part.split('-')
# first item should be a string, rest integers
if len(location_list) < 3:
raise FailPage("Item to edit has not been recognised")
try:
widget_name = location_list[0]
container = int(location_list[1])
location_integers = [ int(i) for i in location_list[2:]]
except Exception:
raise FailPage("Item to edit has not been recognised")
part_tuple = skilift.part_info(project, pagenumber, section_name, [widget_name, container, location_integers])
if part_tuple is None:
raise FailPage("Item to edit has not been recognised")
if part_tuple.name:
# item to edit is a widget
call_data['part_tuple'] = part_tuple
raise GoTo(target = 54006, clear_submitted=True)
if part_tuple.part_type == "Part":
# edit the html part
call_data['part_tuple'] = part_tuple
raise GoTo(target = 53007, clear_submitted=True)
if part_tuple.part_type == "ClosedPart":
# edit the html closed part
call_data['part_tuple'] = part_tuple
raise GoTo(target = 53007, clear_submitted=True)
if part_tuple.part_type == "HTMLSymbol":
# edit the symbol
call_data['part_tuple'] = part_tuple
raise GoTo(target = 51107, clear_submitted=True)
if part_tuple.part_type == "str":
# edit the text
call_data['part_tuple'] = part_tuple
raise GoTo(target = 51017, clear_submitted=True)
if part_tuple.part_type == "TextBlock":
# edit the TextBlock
call_data['part_tuple'] = part_tuple
raise GoTo(target = 52017, clear_submitted=True)
if part_tuple.part_type == "Comment":
# edit the Comment
call_data['part_tuple'] = part_tuple
raise GoTo(target = 51207, clear_submitted=True)
if (not section_name) and (part_tuple.part_type == "SectionPlaceHolder"):
# edit the SectionPlaceHolder
call_data['part_tuple'] = part_tuple
raise GoTo(target = 55007, clear_submitted=True)
raise FailPage("Item to edit has not been recognised")
def cut_container_dom(skicall):
"Called by domtable to cut an item in a container"
call_data = skicall.call_data
pd = call_data['pagedata']
sd = SectionData("editdom")
project = call_data['editedprojname']
pagenumber = None
section_name = None
if "page_number" in call_data:
pagenumber = call_data["page_number"]
elif "section_name" in call_data:
section_name = call_data["section_name"]
else:
raise FailPage(message = "No page or section given")
if ('editdom', 'domtable', 'contents') not in call_data:
raise FailPage(message = "item to remove missing")
part = call_data['editdom', 'domtable', 'contents']
# so part is widget_name, container with location string of integers
# create location which is a tuple or list consisting of three items:
# a string of widget name
# a container integer
# a tuple or list of location integers
location_list = part.split('-')
# first item should be a string, rest integers
if len(location_list) < 3:
raise FailPage("Item to remove has not been recognised")
try:
widget_name = location_list[0]
container = int(location_list[1])
location_integers = [ int(i) for i in location_list[2:]]
except Exception:
raise FailPage("Item to remove has not been recognised")
# location is a tuple of widget_name, container, tuple of location integers
location = (widget_name, container, location_integers)
part_tuple = skilift.part_info(project, pagenumber, section_name, location)
if part_tuple is None:
raise FailPage("Item to remove has not been recognised")
# prior to deleting, take a copy
# get a json string dump of the item outline, however change any Sections to Parts
itempart, itemdict = fromjson.item_outline(project, pagenumber, section_name, location)
if itempart == 'Section':
jsonstring = json.dumps(['Part',itemdict], indent=0, separators=(',', ':'))
else:
jsonstring = json.dumps([itempart,itemdict], indent=0, separators=(',', ':'))
pd.localStorage = {'ski_part':jsonstring}
# remove the item using functions from skilift.editsection and skilift.editpage
if pagenumber is None:
# remove the item from a section
try:
call_data['schange'] = editsection.del_location(project, section_name, call_data['schange'], location)
containerinfo = editwidget.container_in_section(project, section_name, call_data['schange'], widget_name, container)
except ServerError as e:
raise FailPage(message = e.message)
else:
# remove the item from a page
try:
call_data['pchange'] = editpage.del_location(project, pagenumber, call_data['pchange'], location)
containerinfo = editwidget.container_in_page(project, pagenumber, call_data['pchange'], widget_name, container)
except ServerError as e:
raise FailPage(message = e.message)
# containerinfo is a namedtuple ('container', 'empty')
# once item is deleted, no info on the item should be
# left in call_data - this may not be required in future
if 'location' in call_data:
del call_data['location']
if 'part' in call_data:
del call_data['part']
if 'part_loc' in call_data:
del call_data['part_loc']
call_data['container'] = container
call_data['widget_name'] = widget_name
# If deleting item has left container empty, return a full retrieve of the container page
if containerinfo.empty:
raise GoTo("back_to_container")
# and get info to re-draw the table
domcontents, dragrows, droprows = _container_domcontents(project, pagenumber, section_name, widget_name, container)
# otherwise just redraw the table
sd['domtable', 'dragrows'] = dragrows
sd['domtable', 'droprows'] = droprows
sd['domtable', 'contents'] = domcontents
pd.update(sd)
call_data['status'] = 'Item copied and then deleted. Use paste to recover or move it.'
def delete_container_dom(skicall):
"Called by domtable to delete an item in a container"
call_data = skicall.call_data
pd = call_data['pagedata']
sd = SectionData("editdom")
project = call_data['editedprojname']
pagenumber = None
section_name = None
if "page_number" in call_data:
pagenumber = call_data["page_number"]
elif "section_name" in call_data:
section_name = call_data["section_name"]
else:
raise FailPage(message = "No page or section given")
if ('editdom', 'domtable', 'contents') not in call_data:
raise FailPage(message = "item to remove missing")
part = call_data['editdom', 'domtable', 'contents']
# so part is widget_name, container with location string of integers
# create location which is a tuple or list consisting of three items:
# a string of widget name
# a container integer
# a tuple or list of location integers
location_list = part.split('-')
# first item should be a string, rest integers
if len(location_list) < 3:
raise FailPage("Item to remove has not been recognised")
try:
widget_name = location_list[0]
container = int(location_list[1])
location_integers = [ int(i) for i in location_list[2:]]
except Exception:
raise FailPage("Item to remove has not been recognised")
# location is a tuple of widget_name, container, tuple of location integers
location = (widget_name, container, location_integers)
part_tuple = skilift.part_info(project, pagenumber, section_name, location)
if part_tuple is None:
raise FailPage("Item to remove has not been recognised")
# remove the item using functions from skilift.editsection and skilift.editpage
if pagenumber is None:
# remove the item from a section
try:
call_data['schange'] = editsection.del_location(project, section_name, call_data['schange'], location)
containerinfo = editwidget.container_in_section(project, section_name, call_data['schange'], widget_name, container)
except ServerError as e:
raise FailPage(message = e.message)
else:
# remove the item from a page
try:
call_data['pchange'] = editpage.del_location(project, pagenumber, call_data['pchange'], location)
containerinfo = editwidget.container_in_page(project, pagenumber, call_data['pchange'], widget_name, container)
except ServerError as e:
raise FailPage(message = e.message)
# containerinfo is a namedtuple ('container', 'empty')
# once item is deleted, no info on the item should be
# left in call_data - this may not be required in future
if 'location' in call_data:
del call_data['location']
if 'part' in call_data:
del call_data['part']
if 'part_loc' in call_data:
del call_data['part_loc']
call_data['container'] = container
call_data['widget_name'] = widget_name
# If deleting item has left container empty, return a full retrieve of the container page
if containerinfo.empty:
raise GoTo("back_to_container")
# and get info to re-draw the table
domcontents, dragrows, droprows = _container_domcontents(project, pagenumber, section_name, widget_name, container)
# otherwise just redraw the table
sd['domtable', 'dragrows'] = dragrows
sd['domtable', 'droprows'] = droprows
sd['domtable', 'contents'] = domcontents
pd.update(sd)
call_data['status'] = 'Item deleted.'
def _item_to_move(call_data):
"Gets the item to be moved"
project = call_data['editedprojname']
pagenumber = None
section_name = None
if "page_number" in call_data:
pagenumber = call_data["page_number"]
elif "section_name" in call_data:
section_name = call_data["section_name"]
else:
raise FailPage(message = "No page or section given")
if ('editdom', 'domtable', 'contents') not in call_data:
raise FailPage(message = "item to move missing")
part = call_data['editdom', 'domtable', 'contents']
# so part is widget_name, container with location string of integers
# create location which is a tuple or list consisting of three items:
# a string of widget name
# a container integer
# a tuple or list of location integers
location_list = part.split('-')
# first item should be a string, rest integers
if len(location_list) < 3:
raise FailPage("Item to move has not been recognised")
try:
widget_name = location_list[0]
container = int(location_list[1])
location_integers = [ int(i) for i in location_list[2:]]
except Exception:
raise FailPage("Item to move has not been recognised")
# location is a tuple of widget_name, container, tuple of location integers
location = (widget_name, container, location_integers)
call_data['container'] = container
call_data['widget_name'] = widget_name
try:
part_tuple = skilift.part_info(project, pagenumber, section_name, location)
except ServerError as e:
raise FailPage(message = e.message)
if part_tuple is None:
raise FailPage("Item to move has not been recognised")
return part_tuple
def move_up_in_container_dom(skicall):
"Called by domtable to move an item in a container up"
call_data = skicall.call_data
pd = call_data['pagedata']
sd = SectionData("editdom")
try:
part_tuple = _item_to_move(call_data)
location = part_tuple.location
widget_name = location[0]
container = int(location[1])
location_integers = location[2]
if (len(location_integers) == 1) and (location_integers[0] == 0):
# at top, cannot be moved
raise FailPage("Cannot be moved up")
if location_integers[-1] == 0:
# move up to next level
new_location_integers = location_integers[:-1]
else:
# swap parts on same level
new_location_integers = list(location_integers[:-1])
new_location_integers.append(location_integers[-1] - 1)
# after a move, location is wrong, so remove from call_data
if 'location' in call_data:
del call_data['location']
if 'part' in call_data:
del call_data['part']
if 'part_top' in call_data:
del call_data['part_top']
if 'part_loc' in call_data:
del call_data['part_loc']
# move the item
if part_tuple.section_name:
# move the part in a section, using skilift.editsection.move_location(project, section_name, schange, from_location, to_location)
call_data['schange'] = editsection.move_location(part_tuple.project, part_tuple.section_name, call_data['schange'], location, (widget_name, container, new_location_integers))
domcontents, dragrows, droprows = _container_domcontents(part_tuple.project, None, part_tuple.section_name, widget_name, container)
else:
# move the part in a page, using skilift.editpage.move_location(project, pagenumber, pchange, from_location, to_location)
call_data['pchange'] = editpage.move_location(part_tuple.project, part_tuple.pagenumber, call_data['pchange'], location, (widget_name, container, new_location_integers))
domcontents, dragrows, droprows = _container_domcontents(part_tuple.project, part_tuple.pagenumber, None, widget_name, container)
except ServerError as e:
raise FailPage(message = e.message)
# redraw the table
sd['domtable', 'dragrows'] = dragrows
sd['domtable', 'droprows'] = droprows
sd['domtable', 'contents'] = domcontents
pd.update(sd)
def move_up_right_in_container_dom(skicall):
"Called by domtable to move an item in a container up and to the right"
call_data = skicall.call_data
pd = call_data['pagedata']
sd = SectionData("editdom")
try:
part_tuple = _item_to_move(call_data)
location = part_tuple.location
widget_name = location[0]
container = int(location[1])
location_integers = location[2]
if location_integers[-1] == 0:
# at top of a part, cannot be moved
raise FailPage("Cannot be moved up")
new_parent_integers = list(location_integers[:-1])
new_parent_integers.append(location_integers[-1] - 1)
new_parent_location = (location[0], location[1], new_parent_integers)
new_parent_tuple = skilift.part_info(part_tuple.project, part_tuple.pagenumber, part_tuple.section_name, new_parent_location)
if new_parent_tuple is None:
raise FailPage("Cannot be moved up")
if new_parent_tuple.part_type != "Part":
raise FailPage("Cannot be moved up")
items_in_new_parent = len(skilift.part_contents(part_tuple.project, part_tuple.pagenumber, part_tuple.section_name, new_parent_location))
new_location_integers = tuple(new_parent_integers + [items_in_new_parent])
# after a move, location is wrong, so remove from call_data
if 'location' in call_data:
del call_data['location']
if 'part' in call_data:
del call_data['part']
if 'part_top' in call_data:
del call_data['part_top']
if 'part_loc' in call_data:
del call_data['part_loc']
# move the item
if part_tuple.section_name:
# move the part in a section, using skilift.editsection.move_location(project, section_name, schange, from_location, to_location)
call_data['schange'] = editsection.move_location(part_tuple.project, part_tuple.section_name, call_data['schange'], location, (widget_name, container, new_location_integers))
domcontents, dragrows, droprows = _container_domcontents(part_tuple.project, None, part_tuple.section_name, widget_name, container)
else:
# move the part in a page, using skilift.editpage.move_location(project, pagenumber, pchange, from_location, to_location)
call_data['pchange'] = editpage.move_location(part_tuple.project, part_tuple.pagenumber, call_data['pchange'], location, (widget_name, container, new_location_integers))
domcontents, dragrows, droprows = _container_domcontents(part_tuple.project, part_tuple.pagenumber, None, widget_name, container)
except ServerError as e:
raise FailPage(message = e.message)
# redraw the table
sd['domtable', 'dragrows'] = dragrows
sd['domtable', 'droprows'] = droprows
sd['domtable', 'contents'] = domcontents
pd.update(sd)
def move_down_in_container_dom(skicall):
"Called by domtable to move an item in a container down"
call_data = skicall.call_data
pd = call_data['pagedata']
sd = SectionData("editdom")
try:
part_tuple = _item_to_move(call_data)
location = part_tuple.location
widget_name = location[0]
container = int(location[1])
location_integers = location[2]
if len(location_integers) == 1:
# Just at immediate level below top
parent_location = (widget_name, container, ())
items_in_parent = len(skilift.part_contents(part_tuple.project, part_tuple.pagenumber, part_tuple.section_name, parent_location))
if location_integers[0] == (items_in_parent-1):
# At end, cannot be moved
raise FailPage("Cannot be moved down")
new_location_integers = (location_integers[0]+2,)
else:
parent_integers = tuple(location_integers[:-1])
parent_location = (widget_name, container, parent_integers)
items_in_parent = len(skilift.part_contents(part_tuple.project, part_tuple.pagenumber, part_tuple.section_name, parent_location))
if location_integers[-1] == (items_in_parent-1):
# At end of a part, so move up a level
new_location_integers = list(parent_integers[:-1])
new_location_integers.append(parent_integers[-1] + 1)
else:
# just insert into current level
new_location_integers = list(parent_integers)
new_location_integers.append(location_integers[-1] + 2)
# after a move, location is wrong, so remove from call_data
if 'location' in call_data:
del call_data['location']
if 'part' in call_data:
del call_data['part']
if 'part_top' in call_data:
del call_data['part_top']
if 'part_loc' in call_data:
del call_data['part_loc']
# move the item
if part_tuple.section_name:
# move the part in a section, using skilift.editsection.move_location(project, section_name, schange, from_location, to_location)
call_data['schange'] = editsection.move_location(part_tuple.project, part_tuple.section_name, call_data['schange'], location, (widget_name, container, new_location_integers))
domcontents, dragrows, droprows = _container_domcontents(part_tuple.project, None, part_tuple.section_name, widget_name, container)
else:
# move the part in a page, using skilift.editpage.move_location(project, pagenumber, pchange, from_location, to_location)
call_data['pchange'] = editpage.move_location(part_tuple.project, part_tuple.pagenumber, call_data['pchange'], location, (widget_name, container, new_location_integers))
domcontents, dragrows, droprows = _container_domcontents(part_tuple.project, part_tuple.pagenumber, None, widget_name, container)
except ServerError as e:
raise FailPage(message = e.message)
# redraw the table
sd['domtable', 'dragrows'] = dragrows
sd['domtable', 'droprows'] = droprows
sd['domtable', 'contents'] = domcontents
pd.update(sd)
def move_down_right_in_container_dom(skicall):
"Called by domtable to move an item in a container down and to the right"
call_data = skicall.call_data
pd = call_data['pagedata']
sd = SectionData("editdom")
try:
part_tuple = _item_to_move(call_data)
location = part_tuple.location
widget_name = location[0]
container = int(location[1])
location_integers = location[2]
if len(location_integers) == 1:
parent_location = (widget_name, container, ())
else:
parent_integers = list(location_integers[:-1])
parent_location = (widget_name, container, parent_integers)
items_in_parent = len(skilift.part_contents(part_tuple.project, part_tuple.pagenumber, part_tuple.section_name, parent_location))
if location_integers[-1] == (items_in_parent-1):
# At end of a block, cannot be moved
raise FailPage("Cannot be moved down")
new_parent_integers = list(location_integers[:-1])
new_parent_integers.append(location_integers[-1] + 1)
new_parent_location = (location[0], location[1], new_parent_integers)
new_parent_tuple = skilift.part_info(part_tuple.project, part_tuple.pagenumber, part_tuple.section_name, new_parent_location)
if new_parent_tuple is None:
raise FailPage("Cannot be moved down")
if not (new_parent_tuple.part_type == 'Part' or new_parent_tuple.part_type == 'Section'):
raise FailPage("Cannot be moved down")
new_location_integers = tuple(new_parent_integers+[0])
# after a move, location is wrong, so remove from call_data
if 'location' in call_data:
del call_data['location']
if 'part' in call_data:
del call_data['part']
if 'part_top' in call_data:
del call_data['part_top']
if 'part_loc' in call_data:
del call_data['part_loc']
# move the item
if part_tuple.section_name:
# move the part in a section, using skilift.editsection.move_location(project, section_name, schange, from_location, to_location)
call_data['schange'] = editsection.move_location(part_tuple.project, part_tuple.section_name, call_data['schange'], location, (widget_name, container, new_location_integers))
domcontents, dragrows, droprows = _container_domcontents(part_tuple.project, None, part_tuple.section_name, widget_name, container)
else:
# move the part in a page, using skilift.editpage.move_location(project, pagenumber, pchange, from_location, to_location)
call_data['pchange'] = editpage.move_location(part_tuple.project, part_tuple.pagenumber, call_data['pchange'], location, (widget_name, container, new_location_integers))
domcontents, dragrows, droprows = _container_domcontents(part_tuple.project, part_tuple.pagenumber, None, widget_name, container)
except ServerError as e:
raise FailPage(message = e.message)
# redraw the table
sd['domtable', 'dragrows'] = dragrows
sd['domtable', 'droprows'] = droprows
sd['domtable', 'contents'] = domcontents
pd.update(sd)
def move_in_container_dom(skicall):
"Called by domtable to move an item in a container after a drag and drop"
call_data = skicall.call_data
pd = call_data['pagedata']
sd = SectionData("editdom")
if ('editdom', 'domtable', 'dragrows') not in call_data:
raise FailPage(message = "item to drop missing")
editedprojname = call_data['editedprojname']
pagenumber = None
section_name = None
if "page_number" in call_data:
pagenumber = call_data["page_number"]
elif "section_name" in call_data:
section_name = call_data["section_name"]
else:
raise FailPage(message = "No page or section given")
part_to_move = call_data['editdom', 'domtable', 'dragrows']
# so part_to_move is widget name with container and location string of integers
# create location which is a tuple or list consisting of three items:
# a string of widget name
# a container integer
# a tuple or list of location integers
location_list = part_to_move.split('-')
# first item should be a string, rest integers
if len(location_list) < 3:
raise FailPage("Item to move has not been recognised")
try:
widget_name = location_list[0]
container = int(location_list[1])
location_to_move_integers = [ int(i) for i in location_list[2:]]
except Exception:
raise FailPage("Item to move has not been recognised")
# location is a tuple of widget_name, container, tuple of location integers
location_to_move = (widget_name, container, location_to_move_integers)
call_data['container'] = container
call_data['widget_name'] = widget_name
# new target location
target_part = call_data['editdom', 'domtable', 'droprows']
# so target_part is widget name with location string of integers
# create location which is a tuple or list consisting of three items:
# a string of widget name
# a container integer
# a tuple or list of location integers
location_list = target_part.split('-')
# first item should be a string, rest integers
if len(location_list) < 2:
raise FailPage("target of move has not been recognised")
if widget_name != location_list[0]:
raise FailPage("Invalid move, widget name differs")
if container != int(location_list[1]):
raise FailPage("Invalid move, container number differs")
if len(location_list) == 2:
# At the container top row
new_location_integers = [0]
else:
try:
target_location_integers = [ int(i) for i in location_list[2:]]
except Exception:
raise FailPage("Invalid move, location not accepted")
# location is a tuple of widget_name, container, tuple of location integers
target_location = (widget_name, container, target_location_integers)
# get target part_tuple from project, pagenumber, section_name, target_location
target_part_tuple = skilift.part_info(editedprojname, pagenumber, section_name, target_location)
if target_part_tuple is None:
raise FailPage("Target has not been recognised")
if (target_part_tuple.part_type == "Part") or (target_part_tuple.part_type == "Section"):
# insert
if target_location_integers:
new_location_integers = list(target_location_integers)
new_location_integers.append(0)
else:
new_location_integers = [0]
else:
# append
new_location_integers = list(target_location_integers)
new_location_integers[-1] = new_location_integers[-1] + 1
# after a move, location is wrong, so remove from call_data
if 'location' in call_data:
del call_data['location']
if 'part' in call_data:
del call_data['part']
if 'part_top' in call_data:
del call_data['part_top']
if 'part_loc' in call_data:
del call_data['part_loc']
# move the item
try:
if section_name:
# move the part in a section, using skilift.editsection.move_location(project, section_name, schange, from_location, to_location)
call_data['schange'] = editsection.move_location(editedprojname, section_name, call_data['schange'], location_to_move, (widget_name, container, new_location_integers))
domcontents, dragrows, droprows = _container_domcontents(editedprojname, None, section_name, widget_name, container)
else:
# move the part in a page, using skilift.editpage.move_location(project, pagenumber, pchange, from_location, to_location)
call_data['pchange']= editpage.move_location(editedprojname, pagenumber, call_data['pchange'], location_to_move, (widget_name, container, new_location_integers))
domcontents, dragrows, droprows = _container_domcontents(editedprojname, pagenumber, None, widget_name, container)
except ServerError as e:
raise FailPage(message = e.message)
# redraw the table
sd['domtable', 'dragrows'] = dragrows
sd['domtable', 'droprows'] = droprows
sd['domtable', 'contents'] = domcontents
pd.update(sd)
def _domtree(partdict, part_loc, contents, part_string_list, rows=1, indent=1):
"Creates the contents of the domtable"
# note: if in a container
# part_loc = widget_name + '-' + container_number
# otherwise part_loc = body, head, svg, section_name
indent += 1
padding = "padding-left : %sem;" % (indent,)
u_r_flag = False
last_row_at_this_level = 0
parts = partdict['parts']
# parts is a list of items
last_index = len(parts)-1
#Text #characters.. #up #up_right #down #down_right #edit #insert #copy #paste #cut #delete
for index, part in enumerate(parts):
part_location_string = part_loc + '-' + str(index)
part_string_list.append(part_location_string)
rows += 1
part_type, part_dict = part
# the row text
if part_type == 'Widget' or part_type == 'ClosedWidget':
part_name = 'Widget ' + part_dict['name']
if len(part_name)>40:
part_name = part_name[:35] + '...'
contents.append([part_name, padding, False, ''])
part_brief = html.escape(part_dict.get('brief',''))
if len(part_brief)>40:
part_brief = part_brief[:35] + '...'
if not part_brief:
part_brief = '-'
contents.append([part_brief, '', False, ''])
elif part_type == 'TextBlock':
contents.append(['TextBlock', padding, False, ''])
part_ref = part_dict['textref']
if len(part_ref)>40:
part_ref = part_ref[:35] + '...'
if not part_ref:
part_ref = '-'
contents.append([part_ref, '', False, ''])
elif part_type == 'SectionPlaceHolder':
section_name = part_dict['placename']
if section_name:
section_name = "Section " + section_name
else:
section_name = "Section -None-"
if len(section_name)>40:
section_name = section_name[:35] + '...'
contents.append([section_name, padding, False, ''])
part_brief = html.escape(part_dict.get('brief',''))
if len(part_brief)>40:
part_brief = part_brief[:35] + '...'
if not part_brief:
part_brief = '-'
contents.append([part_brief, '', False, ''])
elif part_type == 'Text':
contents.append(['Text', padding, False, ''])
# in this case part_dict is the text string rather than a dictionary
if len(part_dict)<40:
part_str = html.escape(part_dict)
else:
part_str = html.escape(part_dict[:35] + '...')
if not part_str:
part_str = '-'
contents.append([part_str, '', False, ''])
elif part_type == 'HTMLSymbol':
contents.append(['Symbol', padding, False, ''])
part_text = part_dict['text']
if len(part_text)<40:
part_str = html.escape(part_text)
else:
part_str = html.escape(part_text[:35] + '...')
if not part_str:
part_str = '-'
contents.append([part_str, '', False, ''])
elif part_type == 'Comment':
contents.append(['Comment', padding, False, ''])
part_text = part_dict['text']
if len(part_text)<33:
part_str = "<!--" + part_text + '-->'
else:
part_str = "<!--" + part_text[:31] + '...'
if not part_str:
part_str = '<!---->'
contents.append([part_str, '', False, ''])
elif part_type == 'ClosedPart':
if 'attribs' in part_dict:
tag_name = "<%s ... />" % part_dict['tag_name']
else:
tag_name = "<%s />" % part_dict['tag_name']
contents.append([tag_name, padding, False, ''])
part_brief = html.escape(part_dict.get('brief',''))
if len(part_brief)>40:
part_brief = part_brief[:35] + '...'
if not part_brief:
part_brief = '-'
contents.append([part_brief, '', False, ''])
elif part_type == 'Part':
if 'attribs' in part_dict:
tag_name = "<%s ... >" % part_dict['tag_name']
else:
tag_name = "<%s>" % part_dict['tag_name']
contents.append([tag_name, padding, False, ''])
part_brief = html.escape(part_dict.get('brief',''))
if len(part_brief)>40:
part_brief = part_brief[:35] + '...'
if not part_brief:
part_brief = '-'
contents.append([part_brief, '', False, ''])
else:
contents.append(['UNKNOWN', padding, False, ''])
contents.append(['ERROR', '', False, ''])
# UP ARROW
if rows == 2:
# second line in table cannot move upwards
contents.append(['', '', False, '' ])
else:
contents.append(['↑', 'width : 1%;', True, part_location_string])
# UP RIGHT ARROW
if u_r_flag:
contents.append(['↗', 'width : 1%;', True, part_location_string])
else:
contents.append(['', '', False, '' ])
# DOWN ARROW
if (indent == 2) and (index == last_index):
# the last line at this top indent has been added, no down arrow
contents.append(['', '', False, '' ])
else:
contents.append(['↓', 'width : 1%;', True, part_location_string])
# DOWN RIGHT ARROW
# set to empty, when next line is created if down-right not applicable
contents.append(['', '', False, '' ])
# EDIT
contents.append(['Edit', 'width : 1%;', True, part_location_string])
# INSERT or APPEND
if part_type == 'Part':
contents.append(['Insert', 'width : 1%;text-align: center;', True, part_location_string])
else:
contents.append(['Append', 'width : 1%;text-align: center;', True, part_location_string])
# COPY
contents.append(['Copy', 'width : 1%;', True, part_location_string])
# PASTE
contents.append(['Paste', 'width : 1%;', True, part_location_string])
# CUT
contents.append(['Cut', 'width : 1%;', True, part_location_string])
# DELETE
contents.append(['Delete', 'width : 1%;', True, part_location_string])
u_r_flag = False
if part_type == 'Part':
if last_row_at_this_level and (part_dict['tag_name'] != 'script') and (part_dict['tag_name'] != 'pre'):
# add down right arrow in previous row at this level, get loc_string from adjacent edit cell
editcell = contents[last_row_at_this_level *12-6]
loc_string = editcell[3]
contents[last_row_at_this_level *12-7] = ['↘', 'width : 1%;', True, loc_string]
last_row_at_this_level = rows
rows = _domtree(part_dict, part_location_string, contents, part_string_list, rows, indent)
# set u_r_flag for next item below this one
if (part_dict['tag_name'] != 'script') and (part_dict['tag_name'] != 'pre'):
u_r_flag = True
else:
last_row_at_this_level =rows
return rows
| StarcoderdataPython |
1709290 | # -*- coding: utf-8 -*-
# MIT License
#
# Copyright (c) 2018 ZhicongYan
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ==============================================================================
import os
import sys
sys.path.append('.')
sys.path.append("../")
import tensorflow as tf
import tensorflow.contrib.layers as tcl
import numpy as np
from netutils.learning_rate import get_learning_rate
from netutils.learning_rate import get_global_step
from netutils.optimizer import get_optimizer
from netutils.optimizer import get_optimizer_by_config
from netutils.loss import get_loss
from .base_model import BaseModel
class BEGAN(BaseModel):
""" Implementation of "BEGAN: Boundary Equilibrium Generative Adversarial Networks"
<NAME>, <NAME>, <NAME>
@article{DBLP:journals/corr/BerthelotSM17,
author = {<NAME> and
<NAME> and
<NAME>},
title = {{BEGAN:} Boundary Equilibrium Generative Adversarial Networks},
journal = {CoRR},
volume = {abs/1703.10717},
year = {2017},
url = {http://arxiv.org/abs/1703.10717},
archivePrefix = {arXiv},
eprint = {1703.10717},
timestamp = {Wed, 07 Jun 2017 14:42:35 +0200},
biburl = {https://dblp.org/rec/bib/journals/corr/BerthelotSM17},
bibsource = {dblp computer science bibliography, https://dblp.org}
}
"""
def __init__(self, config):
super(BEGAN, self).__init__(config)
raise NotImplementedError
self.input_shape = config['input shape']
self.z_dim = config['z_dim']
self.config = config
self.discriminator_warm_up_steps = int(config.get('discriminator warm up steps', 40))
self.discriminator_training_steps = int(config.get('discriminator training steps', 5))
self.build_model()
self.build_summary()
def build_model(self):
# network config
self.config['discriminator params']['name'] = 'Discriminator'
self.config['generator params']['name'] = 'Generator'
self.discriminator = self._build_discriminator('discriminator')
self.generator = self._build_generator('generator')
# build model
self.x_real = tf.placeholder(tf.float32, shape=[None, ] + list(self.input_shape), name='x_input')
self.z = tf.placeholder(tf.float32, shape=[None, self.z_dim], name='z')
self.x_fake = self.generator(self.z)
self.dis_real = self.discriminator(self.x_real)
self.dis_fake = self.discriminator(self.x_fake)
# loss config
self.d_loss = get_loss('adversarial down', 'cross entropy', {'dis_real' : self.dis_real, 'dis_fake' : self.dis_fake})
self.g_loss = get_loss('adversarial up', 'cross entropy', {'dis_fake' : self.dis_fake})
# optimizer config
self.global_step, self.global_step_update = get_global_step()
# optimizer of discriminator configured without global step update
# so we can keep the learning rate of discriminator the same as generator
(self.d_train_op,
self.d_learning_rate,
self.d_global_step) = get_optimizer_by_config(self.config['discriminator optimizer'],
self.config['discriminator optimizer params'],
self.d_loss, self.discriminator.vars,
self.global_step)
(self.g_train_op,
self.g_learning_rate,
self.g_global_step) = get_optimizer_by_config(self.config['generator optimizer'],
self.config['generator optimizer params'],
self.g_loss, self.generator.vars,
self.global_step, self.global_step_update)
# model saver
self.saver = tf.train.Saver(self.discriminator.store_vars
+ self.generator.store_vars
+ [self.global_step])
def build_summary(self):
if self.has_summary:
# summary scalars are logged per step
sum_list = []
sum_list.append(tf.summary.scalar('discriminator/loss', self.d_loss))
sum_list.append(tf.summary.scalar('discriminator/lr', self.d_learning_rate))
self.d_sum_scalar = tf.summary.merge(sum_list)
sum_list = []
sum_list.append(tf.summary.scalar('generator/loss', self.g_loss))
sum_list.append(tf.summary.scalar('generator/lr', self.g_learning_rate))
self.g_sum_scalar = tf.summary.merge(sum_list)
# summary hists are logged by calling self.summary()
sum_list = []
sum_list += [tf.summary.histogram('discriminator/'+var.name, var) for var in self.discriminator.vars]
sum_list += [tf.summary.histogram('generator/'+var.name, var) for var in self.generator.vars]
self.histogram_summary = tf.summary.merge(sum_list)
else:
self.d_sum_scalar = None
self.g_sum_scalar = None
self.histogram_summary = None
@property
def vars(self):
return self.discriminator.vars + self.generator.vars
'''
train operations
'''
def train_on_batch_supervised(self, sess, x_batch, y_batch):
raise NotImplementedError
def train_on_batch_unsupervised(self, sess, x_batch):
dis_train_step = self.discriminator_training_steps
summary_list = []
for i in range(dis_train_step):
feed_dict = {
self.x_real : x_batch,
self.z : np.random.randn(x_batch.shape[0], self.z_dim),
self.is_training : True
}
step_d, lr_d, loss_d, summary_d = self.train(sess, feed_dict, update_op=self.d_train_op,
step=self.d_global_step,
learning_rate=self.d_learning_rate,
loss=self.d_loss,
summary=self.d_sum_scalar)
summary_list.append((step_d, summary_d))
feed_dict = {
self.z : np.random.randn(x_batch.shape[0], self.z_dim),
self.is_training : True
}
step_g, lr_g, loss_g, summary_g = self.train(sess, feed_dict, update_op=self.g_train_op,
step=self.g_global_step,
learning_rate=self.g_learning_rate,
loss=self.g_loss,
summary=self.g_sum_scalar)
summary_list.append((step_g, summary_g))
return step_g, {'d':lr_d, 'g':lr_g}, {'d':loss_d,'g':loss_g}, summary_list,
'''
test operation
'''
def generate(self, sess, z_batch):
feed_dict = {
self.z : z_batch,
self.is_training : False
}
x_batch = sess.run([self.x_fake], feed_dict = feed_dict)[0]
return x_batch
def discriminate(self, sess, x_batch):
feed_dict = {
self.x_real : x_batch,
self.is_training : False
}
dis_x = sess.run([self.dis_real], feed_dict = feed_dict)[0][:, 0]
return dis_x
'''
summary operation
'''
def summary(self, sess):
if self.has_summary:
summ = sess.run(self.histogram_summary)
return summ
else:
return None
| StarcoderdataPython |
6565341 | <reponame>qstanczyk/kaggle-environments
# Copyright 2020 Kaggle Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import traceback
import copy
import json
import uuid
from multiprocessing import Pool
from time import perf_counter
from .agent import Agent
from .errors import DeadlineExceeded, FailedPrecondition, Internal, InvalidArgument
from .utils import get, has, get_player, process_schema, schemas, structify
# Registered Environments.
environments = {}
# Registered Interactive Sessions.
interactives = {}
def register(name, environment):
"""
Register an environment by name. An environment contains the following:
* specification - JSON Schema representing the environment.
* interpreter - Function(state, environment) -> new_state
* renderer - Function(state, environment) -> string
* html_renderer - Function(environment) -> JavaScript HTML renderer function.
* agents(optional) - List of default agents [Function(observation, config) -> action]
"""
environments[name] = environment
def evaluate(environment, agents=[], configuration={}, steps=[], num_episodes=1):
"""
Evaluate and return the rewards of one or more episodes (environment and agents combo).
Args:
environment (str|Environment):
agents (list):
configuration (dict, optional):
steps (list, optional):
num_episodes (int=1, optional): How many episodes to execute (run until done).
Returns:
list of list of int: List of final rewards for all agents for all episodes.
"""
e = make(environment, configuration, steps)
rewards = [[]] * num_episodes
for i in range(num_episodes):
last_state = e.run(agents)[-1]
rewards[i] = [state.reward for state in last_state]
return rewards
def make(environment, configuration={}, info={}, steps=[], logs=[], debug=False, state=None):
"""
Creates an instance of an Environment.
Args:
environment (str|Environment):
configuration (dict, optional):
info (dict, optional):
steps (list, optional):
debug (bool=False, optional):
Returns:
Environment: Instance of a specific environment.
"""
if has(environment, str) and has(environments, dict, path=[environment]):
return Environment(**environments[environment], configuration=configuration, info=info, steps=steps, logs=logs, debug=debug, state=state)
elif callable(environment):
return Environment(interpreter=environment, configuration=configuration, info=info, steps=steps, logs=logs, debug=debug, state=state)
elif has(environment, path=["interpreter"], is_callable=True):
return Environment(**environment, configuration=configuration, info=info, steps=steps, logs=logs, debug=debug, state=state)
raise InvalidArgument("Unknown Environment Specification")
def act_agent(args):
agent, state, configuration, none_action = args
if state["status"] != "ACTIVE":
return None, {}
elif agent is None:
return none_action, {}
else:
return agent.act(state["observation"])
class Environment:
def __init__(
self,
specification={},
configuration={},
info={},
steps=[],
logs=[],
agents={},
interpreter=None,
renderer=None,
html_renderer=None,
debug=False,
state=None,
):
self.id = str(uuid.uuid1())
self.debug = debug
self.info = info
err, specification = self.__process_specification(specification)
if err:
raise InvalidArgument("Specification Invalid: " + err)
self.specification = structify(specification)
err, configuration = process_schema(
{"type": "object", "properties": self.specification.configuration},
{} if configuration is None else configuration,
)
if err:
raise InvalidArgument("Configuration Invalid: " + err)
self.configuration = structify(configuration)
if not callable(interpreter):
raise InvalidArgument("Interpreter is not Callable.")
self.interpreter = interpreter
if not callable(renderer):
raise InvalidArgument("Renderer is not Callable.")
self.renderer = renderer
if not callable(html_renderer):
raise InvalidArgument("Html_renderer is not Callable.")
self.html_renderer = html_renderer
if not all([callable(a) for a in agents.values()]):
raise InvalidArgument("Default agents must be Callable.")
self.agents = structify(agents)
if steps is not None and len(steps) > 0:
self.__set_state(steps[-1])
self.steps = steps[0:-1] + self.steps
elif state is not None:
step = [{}] * self.specification.agents[0]
step[0] = state
self.__set_state(step)
else:
self.reset()
self.logs = logs
self.pool = None
def step(self, actions, logs=None):
"""
Execute the environment interpreter using the current state and a list of actions.
Args:
actions (list): Actions to pair up with the current agent states.
logs (list): Logs to pair up with each agent for the current step.
Returns:
list of dict: The agents states after the step.
"""
if self.done:
raise FailedPrecondition("Environment done, reset required.")
if not actions or len(actions) != len(self.state):
raise InvalidArgument(f"{len(self.state)} actions required.")
action_state = [0] * len(self.state)
for index, action in enumerate(actions):
action_state[index] = {**self.state[index], "action": None}
if isinstance(action, DeadlineExceeded):
self.debug_print(f"Timeout: {str(action)}")
action_state[index]["status"] = "TIMEOUT"
elif isinstance(action, BaseException):
self.debug_print(f"Error: {traceback.format_exception(None, action, action.__traceback__)}")
action_state[index]["status"] = "ERROR"
else:
err, data = process_schema(
self.__state_schema.properties.action, action)
if err:
self.debug_print(f"Invalid Action: {str(err)}")
action_state[index]["status"] = "INVALID"
else:
action_state[index]["action"] = data
self.state = self.__run_interpreter(action_state)
# Max Steps reached. Mark ACTIVE/INACTIVE agents as DONE.
if len(self.steps) == self.configuration.episodeSteps - 1:
for s in self.state:
if s.status == "ACTIVE" or s.status == "INACTIVE":
s.status = "DONE"
self.steps.append(self.state)
if logs is not None:
self.logs.append(logs)
return self.state
def run(self, agents):
"""
Steps until the environment is "done" or the runTimeout was reached.
Args:
agents (list of any): List of agents to obtain actions from.
Returns:
tuple of:
list of list of dict: The agent states of all steps executed.
list of list of dict: The agent logs of all steps executed.
"""
if self.state is None or len(self.steps) == 1 or self.done:
self.reset(len(agents))
if len(self.state) != len(agents):
raise InvalidArgument(
f"{len(self.state)} agents were expected, but {len(agents)} was given.")
runner = self.__agent_runner(agents)
start = perf_counter()
while not self.done and perf_counter() - start < self.configuration.runTimeout:
actions, logs = runner.act()
self.step(actions, logs)
return self.steps
def reset(self, num_agents=None):
"""
Resets the environment state to the initial step.
Args:
num_agents (int): Resets the state assuming a fixed number of agents.
Returns:
list of dict: The agents states after the reset.
"""
if num_agents is None:
num_agents = self.specification.agents[0]
# Get configuration default state.
self.__set_state([{} for _ in range(num_agents)])
# Reset all agents to status=INACTIVE (copy out values to reset afterwards).
statuses = [a.status for a in self.state]
for agent in self.state:
agent.status = "INACTIVE"
# Give the interpreter an opportunity to make any initializations.
self.__set_state(self.__run_interpreter(self.state))
# Replace the starting "status" if still "done".
if self.done and len(self.state) == len(statuses):
for i in range(len(self.state)):
self.state[i].status = statuses[i]
return self.state
def render(self, **kwargs):
"""
Renders a visual representation of the current state of the environment.
Args:
mode (str): html, ipython, ansi, human (default)
**kwargs (dict): Other args are directly passed into the html player.
Returns:
str: html if mode=html or ansi if mode=ansi.
None: prints ansi if mode=human or prints html if mode=ipython
"""
mode = get(kwargs, str, "human", path=["mode"])
if mode == "ansi" or mode == "human":
args = [self.state, self]
out = self.renderer(*args[:self.renderer.__code__.co_argcount])
if mode == "ansi":
return out
elif mode == "html" or mode == "ipython":
window_kaggle = {
"debug": get(kwargs, bool, self.debug, path=["debug"]),
"autoplay": get(kwargs, bool, self.done, path=["autoplay"]),
"step": 0 if get(kwargs, bool, self.done, path=["autoplay"]) else (len(self.steps) - 1),
"controls": get(kwargs, bool, self.done, path=["controls"]),
"environment": self.toJSON(),
"logs": self.logs,
**kwargs,
}
args = [self]
player_html = get_player(window_kaggle,
self.html_renderer(*args[:self.html_renderer.__code__.co_argcount]))
if mode == "html":
return player_html
from IPython.display import display, HTML
player_html = player_html.replace('"', '"')
width = get(kwargs, int, 300, path=["width"])
height = get(kwargs, int, 300, path=["height"])
html = f'<iframe srcdoc="{player_html}" width="{width}" height="{height}" frameborder="0"></iframe> '
display(HTML(html))
elif mode == "json":
return json.dumps(self.toJSON(), sort_keys=True)
else:
raise InvalidArgument("Available render modes: human, ansi, html, ipython")
def play(self, agents=[], **kwargs):
"""
Renders a visual representation of the environment and allows interactive action selection.
Args:
**kwargs (dict): Args directly passed into render(). Mode is fixed to ipython.
Returns:
None: prints directly to an IPython notebook
"""
env = self.clone()
trainer = env.train(agents)
interactives[env.id] = (env, trainer)
env.render(mode="ipython", interactive=True, **kwargs)
def train(self, agents=[]):
"""
Setup a lightweight training environment for a single agent.
Note: This is designed to be a lightweight starting point which can
be integrated with other frameworks (i.e. gym, stable-baselines).
The reward returned by the "step" function here is a diff between the
current and the previous step.
Example:
env = make("tictactoe")
# Training agent in first position (player 1) against the default random agent.
trainer = env.train([None, "random"])
obs = trainer.reset()
done = False
while not done:
action = 0 # Action for the agent being trained.
obs, reward, done, info = trainer.step(action)
env.render()
Args:
agents (list): List of agents to obtain actions from while training.
The agent to train (in position), should be set to "None".
Returns:
`dict`.reset: Reset def that reset the environment, then advances until the agents turn.
`dict`.step: Steps using the agent action, then advance until agents turn again.
"""
runner = None
position = None
for index, agent in enumerate(agents):
if agent is None:
if position is not None:
raise InvalidArgument(
"Only one agent can be marked 'None'")
position = index
if position is None:
raise InvalidArgument("One agent must be marked 'None' to train.")
def advance():
while not self.done and self.state[position].status == "INACTIVE":
actions, logs = runner.act()
self.step(actions, logs)
def reset():
nonlocal runner
self.reset(len(agents))
runner = self.__agent_runner(agents)
advance()
return self.__get_shared_state(position).observation
def step(action):
actions, logs = runner.act(action)
self.step(actions, logs)
advance()
agent = self.__get_shared_state(position)
reward = agent.reward
if len(self.steps) > 1 and reward is not None:
reward -= self.steps[-2][position].reward
return [
agent.observation, reward, agent.status != "ACTIVE", agent.info
]
reset()
return structify({"step": step, "reset": reset})
@property
def name(self):
"""str: The name from the specification."""
return get(self.specification, str, "", ["name"])
@property
def version(self):
"""str: The version from the specification."""
return get(self.specification, str, "", ["version"])
@property
def done(self):
"""bool: If any agents have an ACTIVE status."""
return all(s.status != "ACTIVE" for s in self.state)
def toJSON(self):
"""
Returns:
dict: Specifcation and current state of the Environment instance.
"""
spec = self.specification
return copy.deepcopy(
{
"id": self.id,
"name": spec.name,
"title": spec.title,
"description": spec.description,
"version": spec.version,
"configuration": self.configuration,
"specification": {
"action": spec.action,
"agents": spec.agents,
"configuration": spec.configuration,
"info": spec.info,
"observation": spec.observation,
"reward": spec.reward
},
"steps": self.steps,
"rewards": [state.reward for state in self.steps[-1]],
"statuses": [state.status for state in self.steps[-1]],
"schema_version": 1,
}
)
def clone(self):
"""
Returns:
Environment: A copy of the current environment.
"""
return Environment(
specification=self.specification,
configuration=self.configuration,
steps=self.steps,
agents=self.agents,
interpreter=self.interpreter,
renderer=self.renderer,
html_renderer=self.html_renderer,
debug=self.debug,
)
@property
def __state_schema(self):
if not hasattr(self, "__state_schema_value"):
spec = self.specification
self.__state_schema_value = {
**schemas["state"],
"properties": {
"action": {
**schemas.state.properties.action,
**get(spec, dict, path=["action"], fallback={})
},
"reward": {
**schemas.state.properties.reward,
**get(spec, dict, path=["reward"], fallback={})
},
"info": {
**schemas.state.properties.info,
"properties": get(spec, dict, path=["info"], fallback={})
},
"observation": {
**schemas.state.properties.observation,
"properties": get(spec, dict, path=["observation"], fallback={})
},
"status": {
**schemas.state.properties.status,
**get(spec, dict, path=["status"], fallback={})
},
},
}
return structify(self.__state_schema_value)
def __set_state(self, state=[]):
if len(state) not in self.specification.agents:
raise InvalidArgument(
f"{len(state)} is not a valid number of agent(s).")
self.state = structify([self.__get_state(index, s)
for index, s in enumerate(state)])
self.steps = [self.state]
return self.state
def __get_state(self, position, state):
key = f"__state_schema_{position}"
if not hasattr(self, key):
# Update a property default value based on position in defaults.
# Remove shared properties from non-first agents.
def update_props(props):
for k, prop in list(props.items()):
if get(prop, bool, path=["shared"], fallback=False) and position > 0:
del props[k]
continue
if has(prop, list, path=["defaults"]) and len(prop["defaults"]) > position:
prop["default"] = prop["defaults"][position]
del prop["defaults"]
if has(prop, dict, path=["properties"]):
update_props(prop["properties"])
return props
props = structify(update_props(
copy.deepcopy(self.__state_schema.properties)))
setattr(self, key, {**self.__state_schema, "properties": props})
err, data = process_schema(getattr(self, key), state)
if err:
raise InvalidArgument(
f"Default state generation failed for #{position}: " + err
)
return data
def __run_interpreter(self, state):
try:
args = [structify(state), self]
new_state = structify(self.interpreter(
*args[:self.interpreter.__code__.co_argcount]))
for agent in new_state:
if agent.status not in self.__state_schema.properties.status.enum:
self.debug_print(f"Invalid Action: {agent.status}")
agent.status = "INVALID"
if agent.status in ["ERROR", "INVALID", "TIMEOUT"]:
agent.reward = None
return new_state
except Exception as err:
raise Internal("Error running environment: " + repr(err))
def __process_specification(self, spec):
if has(spec, path=["reward"]):
reward = spec["reward"]
reward_type = get(reward, str, "number", ["type"])
if reward_type not in ["integer", "number"]:
return ("type must be an integer or number", None)
reward["type"] = [reward_type, "null"]
# Allow environments to extend the default configuration.
configuration = copy.deepcopy(
schemas["configuration"]["properties"])
for k, v in get(spec, dict, {}, ["configuration"]).items():
# Set a new default value.
if not isinstance(v, dict):
if not has(configuration, path=[k]):
raise InvalidArgument(
f"Configuration was unable to set default of missing property: {k}")
configuration[k]["default"] = v
# Add a new configuration.
elif not has(configuration, path=[k]):
configuration[k] = v
# Override an existing configuration if types match.
elif configuration[k]["type"] == get(v, path=["type"]):
configuration[k] = v
# Types don't match - unable to extend.
else:
raise InvalidArgument(
f"Configuration was unable to extend: {k}")
spec["configuration"] = configuration
return process_schema(schemas.specification, spec)
def __agent_runner(self, agents):
# Generate the agents.
agents = [
Agent(agent, self)
if agent is not None
else None
for agent in agents
]
def act(none_action=None):
if len(agents) != len(self.state):
raise InvalidArgument(
"Number of agents must match the state length")
act_args = [
(
agent,
self.__get_shared_state(i),
self.configuration,
none_action,
)
for i, agent in enumerate(agents)
]
if all((agent is None or agent.is_parallelizable) for agent in agents):
if self.pool is None:
self.pool = Pool(processes=len(agents))
results = self.pool.map(act_agent, act_args)
else:
results = list(map(act_agent, act_args))
actions, logs = zip(*results)
return actions, logs
return structify({"act": act})
def __get_shared_state(self, position):
if position == 0:
return self.state[0]
state = copy.deepcopy(self.state[position])
# Note: state and schema are required to be in sync (apart from shared ones).
def update_props(shared_state, state, schema_props):
for k, prop in schema_props.items():
if get(prop, bool, path=["shared"], fallback=False):
state[k] = shared_state[k]
elif has(prop, dict, path=["properties"]):
update_props(shared_state[k], state[k], prop["properties"])
return state
return update_props(self.state[0], state, self.__state_schema.properties)
def debug_print(self, message):
if self.debug:
print(message)
| StarcoderdataPython |
12827685 | <gh_stars>1-10
import statistics
import ast
import os
import matplotlib.pyplot as plt
import numpy as np
import urllib.request
from bs4 import BeautifulSoup
script_directory = str(os.path.dirname(os.path.realpath(__file__)))
file_name = "prelimsavefile.txt"
path_to_file = script_directory + '\\' + file_name
"""
README:
If last_season (see constants below) isn't the last season in the AFL in which prelims were played OR you haven't
run this file on this computer before (Because of the 120+ webpages that need to be accessed, the amount of
processing that needs to be done on that accessed data and the fact that 99.99% of the times you run this file
the data doesn't need updating I save the data to a txt file in the folder this program is in rather than
gather it anew):
1. change it to the correct season
2. uncomment the RETRIEVE DATA section below
3. run the program
4. close the graph that opens up
5. recomment that section
6. save this file
I could've made that above process automatic but couldn't be bothered/didn't want to bother afltables.com every time
someone runs this
"""
#Constants
last_season = 2020 #?
universalURL = 'https://afltables.com/afl/seas/{}.html'
year_started = 1990 # 1897<- interesting
colours = {"GoldCoast":"yellow", "Geelong":"royalblue", "Essendon":"red", "Carlton":"navy", "Collingwood":"black", "Melbourne":"lime", "Hawthorn":"brown", "Fitzroy":"grey", "St Kilda":"crimson", "Richmond":"yellow", "North Melbourne":"blue", "Western Bulldogs":"green", "Fremantle":"purple","Greater Western Sydney":"orange", "Brisbane Lions": "orangered", "Port Adelaide":"cyan", "West Coast":"darkgoldenrod", "Sydney":"deeppink", "Adelaide":"royalblue"} #ugh takes so long to write out
def getURL(url):
stream = urllib.request.urlopen(url)
text = stream.read().decode('utf-8')
stream.close()
return text
"""
Convert float to 2 decimal place percentage string with percent sign on the end
Input (float): f
returns (str): f * 100, rouded to 2 decimal, with percent symbol on end
"""
def p(f):
return str(round(100 * f, 2)) + '%'
with open(path_to_file, "r") as f:
clubs = ast.literal_eval(f.read())
#MAIN:
""" RETRIEVE DATA
clubs = {} # {"club":[[years total], [years won]]}
for k in range(year_started, last_season + 1):
text = getURL(universalURL.format(k))
soup = BeautifulSoup(text, 'html.parser')
tables = soup.findAll('table')
if tables[-2].text != "Grand Final":
#1987 & 1924
continue
flag = False
for i in tables:
if flag == True:
flag = False
data = i.findAll('tr')
team1 = data[0].find('a').text
team2 = data[1].find('a').text
if team1 == "Kangaroos":
team1 = "North Melbourne"
elif team1 == "Brisbane Bears":
team1 = "Brisbane Lions"
elif team1 == "Footscray":
team1 = "Western Bulldogs"
elif team1 == "South Melbourne":
team1 = "Sydney"
if team2 == "Kangaroos":
team2 = "North Melbourne"
elif team2 == "Brisbane Bears":
team2 = "Brisbane Lions"
elif team2 == "Footscray":
team2 = "Western Bulldogs"
elif team2 == "South Melbourne":
team2 = "Sydney"
if team1 in clubs:
clubs[team1][0].append(k)
else:
clubs[team1] = [[k], []]
if team2 in clubs:
clubs[team2][0].append(k)
else:
clubs[team2] = [[k], []]
if i.text == "Preliminary Final":
flag = True
gfdata = tables[len(tables) - 1].findAll('tr')
team1 = gfdata[0].find('a').text
team2 = gfdata[1].find('a').text
if team1 == "Kangaroos":
team1 = "North Melbourne"
elif team1 == "Brisbane Bears":
team1 = "Brisbane Lions"
elif team1 == "Footscray":
team1 = "Western Bulldogs"
elif team1 == "South Melbourne":
team1 = "Sydney"
if team2 == "Kangaroos":
team2 = "North Melbourne"
elif team2 == "Brisbane Bears":
team2 = "Brisbane Lions"
elif team2 == "Footscray":
team2 = "Western Bulldogs"
elif team2 == "South Melbourne":
team2 = "Sydney"
if team1 in clubs:
clubs[team1][1].append(k)
if k not in clubs[team1][0]:
clubs[team1][0].append(k)
else:
clubs[team1] = [[k], [k]]
if team2 in clubs:
clubs[team2][1].append(k)
if k not in clubs[team2][0]:
clubs[team2][0].append(k)
else:
clubs[team2] = [[k], [k]]
with open(path_to_file, "w") as f:
f.write(str(clubs))
#"""
all_clubs_windows = 0
all_club_window_lengths = []
all_clubs_prelim_distances = []
all_clubs_years_twixt_clusters = []
all_clubs_years_twixt_clusters_1990 = []
prelims_1990 = 0
club_windows_1990 = 0
club_window_lengths_1990 = []
club_prelim_distances_1990 = []
fig = plt.figure()
ax = fig.add_subplot(111, alpha=0.7)
for i in clubs:
ax.set_prop_cycle(color=colours[i])
year_finished = clubs[i][0][-1]
years_b4_pre = (clubs[i][0][0] - year_started) * [0]
years_since_pre = (last_season + 1 - year_finished) * [len(clubs[i][0])]
seasons = list(range(1, len(clubs[i][0]) + 1))
x = (len(years_b4_pre) * [clubs[i][0][0]]) + clubs[i][0] + [last_season + 1]
y = years_b4_pre + seasons + [len(clubs[i][0])]
wins_y = [seasons[clubs[i][0].index(k)] for k in clubs[i][1]]
ax.scatter(clubs[i][1], wins_y)
last = clubs[i][0][0]
record = [last]
total_windows = 0
window_lengths = []
years_between_prelims = np.diff(np.array(clubs[i][0])).tolist()
years_between_prelims.append(last_season + 1 - clubs[i][0][-1])
all_clubs_prelim_distances += years_between_prelims
years_between_clusters = []
years_between_clusters_1990 = []
flag = True
for k in clubs[i][0][1:]:
if k > 1990 and flag and clubs[i][0][0] < 1990:
years_between_clusters_1990.append(k - 1990)
flag = False
if k >= 1990:
prelims_1990 += 1
if last >= 1990:
club_prelim_distances_1990.append(k - last)
if k - last < 3: #
record.append(k)
last = k
if k != clubs[i][0][-1]:
continue
if k != record[-1]:
years_between_clusters.append(k - record[-1])
if record[-1] >= 1990:
years_between_clusters_1990.append(k - record[-1])
if len(record) > 1:
total_windows += 1
all_clubs_windows += len(record)
if (record[0] >= 1990):
club_windows_1990 += len(record)
club_window_lengths_1990.append(record[-1] + 1 - record[0])
window_lengths.append(record[-1] + 1 - record[0])
x2 = [record[0], record[-1]]
y2 = [y[x.index(record[0])], y[x.index(record[-1])]]
ax.set_prop_cycle(color=colours[i])
ax.plot(x2, y2, ':')
record = [k]
last = k
all_club_window_lengths += window_lengths
diff_last_Season_and_last_prelim = last_season + 1 - clubs[i][0][-1]
years_between_clusters.append(diff_last_Season_and_last_prelim)
years_between_clusters_1990.append(diff_last_Season_and_last_prelim)
club_prelim_distances_1990.append(diff_last_Season_and_last_prelim)
all_clubs_years_twixt_clusters += years_between_clusters
all_clubs_years_twixt_clusters_1990 += years_between_clusters_1990
ax.step(x, y, alpha=0.7, where='post', label=("{} {} {} {} {} {} {} {}".format(
i,
len(clubs[i][0]),
p(len(clubs[i][1])/len(clubs[i][0])),
total_windows,
round(statistics.mean(window_lengths), 2),
round(statistics.mean(years_between_prelims), 2),
round(statistics.mean(years_between_clusters), 2),
' ' #round(statistics.mean(years_between_clusters_1990), 2)
)))
ax.set_xticks([i for i in range(year_started - int(str(year_started)[-1]), (last_season + (last_season % 10) + 10), 10)])
plt.ylabel('Prelim Finals w/ wins as dots ')
'''+
p(club_windows_1990/prelims_1990) +
" " +
str(round(statistics.mean(club_window_lengths_1990), 2)) +
" " +
str(round(statistics.mean(club_prelim_distances_1990), 2)) +
" " +
str(round(statistics.mean(all_clubs_years_twixt_clusters_1990), 2))
)'''
plt.xlabel('Years')
plt.title("Prelim finals by club " +
p(all_clubs_windows/sum(len(clubs[i][0]) for i in clubs)) +
" " +
str(round(statistics.mean(all_club_window_lengths), 2)) +
" " +
str(round(statistics.mean(all_clubs_prelim_distances), 2)) +
" " +
str(round(statistics.mean(all_clubs_years_twixt_clusters), 2))
)
plt.legend()
plt.minorticks_on()
plt.grid(which='minor')
plt.grid(which='major', color="black")
plt.show() | StarcoderdataPython |
5082645 | <gh_stars>1-10
from django.db import models
from django.db.models.fields.related import ManyToManyField
app_label = "ict"
class PrintModel(models.Model):
def __repr__(self):
return str(self.to_dict())
def to_dict(self):
opts = self._meta
data = {}
for f in opts.concrete_fields + opts.many_to_many:
if isinstance(f, ManyToManyField):
if self.pk is None:
data[f.name] = []
else:
data[f.name] = list(f.value_from_object(self).values_list('pk', flat=True))
else:
data[f.name] = f.value_from_object(self)
return data
class Meta:
abstract = True
class ProjectDocumentUpload(models.Model):
title = models.CharField(max_length=500, null=True)
uploaded_by = models.ForeignKey('Users', related_name='uploaded_documents', on_delete=models.SET('0'))
upload_date = models.DateTimeField(auto_now_add=True)
description = models.TextField()
associate_id = models.IntegerField()
record_type = models.IntegerField()
id = models.AutoField(primary_key=True)
class Users(models.Model):
username = models.TextField()
userid = models.IntegerField(primary_key=True)
first_name = models.TextField()
last_name = models.TextField()
password = models.TextField()
class Clients(models.Model):
name = models.TextField()
id = models.IntegerField(primary_key=True)
external_id = models.IntegerField(unique=True)
external_name = models.TextField(null=True)
class Firewall_Brand(models.Model):
id = models.AutoField(primary_key=True)
name = models.TextField()
class Firewall_Model(models.Model):
id = models.AutoField(primary_key=True)
name = models.TextField()
brandid = models.ForeignKey(to=Firewall_Brand, on_delete=models.SET('0'))
class Switch_Brand(models.Model):
id = models.AutoField(primary_key=True)
name = models.TextField()
class Switch_Model(models.Model):
id = models.AutoField(primary_key=True)
name = models.TextField()
class OnboardingWizard(PrintModel):
id = models.AutoField(primary_key=True)
date_start = models.DateField()
saved_stage = models.IntegerField()
complete = models.BooleanField()
f1_client_id = models.IntegerField()
f1_client_name = models.TextField()
client_address = models.TextField(default='MISSING ADDR')
client_phone = models.TextField(default='MISSING PHONE')
def to_dict(instance):
opts = instance._meta
data = {}
for f in opts.concrete_fields + opts.many_to_many:
if isinstance(f, ManyToManyField):
if instance.pk is None:
data[f.name] = []
else:
data[f.name] = list(f.value_from_object(instance).values_list('pk', flat=True))
else:
data[f.name] = f.value_from_object(instance)
return data
class ContactInformation(PrintModel):
id = models.AutoField(primary_key=True)
contact_id = models.IntegerField()
contact_firstname = models.TextField()
contact_lastname = models.TextField()
contact_type = models.IntegerField() ## 1 = PC, 2 = ITSupport, 3 = applications, 4 = vendors, 5 = accounts payable, 6 = emergency access, 7 = compliance
onboarding_id = models.ForeignKey(OnboardingWizard, on_delete=models.SET('0'))
def to_dict(instance):
opts = instance._meta
data = {}
for f in opts.concrete_fields + opts.many_to_many:
if isinstance(f, ManyToManyField):
if instance.pk is None:
data[f.name] = []
else:
data[f.name] = list(f.value_from_object(instance).values_list('pk', flat=True))
else:
data[f.name] = f.value_from_object(instance)
return data
class NetworkInformation(PrintModel):
id = models.IntegerField(primary_key=True)
onboarding_id = models.ForeignKey(OnboardingWizard, on_delete=models.SET('0'))
type = models.TextField() ## 1 - Switch, 2 - WAP,3 - FW,4 - Other
brand = models.CharField(max_length=500)
model = models.CharField(max_length=500)
replace = models.CharField(max_length=500)
def to_dict(instance):
opts = instance._meta
data = {}
for f in opts.concrete_fields + opts.many_to_many:
if isinstance(f, ManyToManyField):
if instance.pk is None:
data[f.name] = []
else:
data[f.name] = list(f.value_from_object(instance).values_list('pk', flat=True))
else:
data[f.name] = f.value_from_object(instance)
return data
class ServerInformation(PrintModel):
id = models.IntegerField(primary_key=True)
onboarding_id = models.ForeignKey(OnboardingWizard, on_delete=models.SET('0'))
type = models.CharField(max_length=500) ## Physical, Virtual
brand = models.CharField(blank=True,max_length=500)
model = models.CharField(blank=True,max_length=500)
os = models.CharField(max_length=500) ## Windows 2XXX, Linux, Mac OS, Other
is_vhost = models.BooleanField()
vhost_os = models.CharField(blank=True,max_length=500)
def to_dict(instance):
opts = instance._meta
data = {}
for f in opts.concrete_fields + opts.many_to_many:
if isinstance(f, ManyToManyField):
if instance.pk is None:
data[f.name] = []
else:
data[f.name] = list(f.value_from_object(instance).values_list('pk', flat=True))
else:
data[f.name] = f.value_from_object(instance)
return data
class WorkstationInformation(PrintModel):
id = models.IntegerField(primary_key=True)
onboarding_id = models.ForeignKey(OnboardingWizard, on_delete=models.SET('0'))
type = models.CharField(max_length=500) ## Desktop / Laptop / Other
brand = models.CharField(max_length=500)
model = models.CharField(max_length=500)
os = models.CharField(max_length=500) ## Windows / Linux / Mac / Android / iOS
count = models.IntegerField()
replace = models.TextField(max_length=500)
def to_dict(instance):
opts = instance._meta
data = {}
for f in opts.concrete_fields + opts.many_to_many:
if isinstance(f, ManyToManyField):
if instance.pk is None:
data[f.name] = []
else:
data[f.name] = list(f.value_from_object(instance).values_list('pk', flat=True))
else:
data[f.name] = f.value_from_object(instance)
return data
class PrinterInformation(PrintModel):
id = models.IntegerField(primary_key=True)
onboarding_id = models.ForeignKey(OnboardingWizard, on_delete=models.SET('0'))
brand = models.CharField(max_length=500)
model = models.CharField(max_length=500)
shared = models.BooleanField()
def to_dict(instance):
opts = instance._meta
data = {}
for f in opts.concrete_fields + opts.many_to_many:
if isinstance(f, ManyToManyField):
if instance.pk is None:
data[f.name] = []
else:
data[f.name] = list(f.value_from_object(instance).values_list('pk', flat=True))
else:
data[f.name] = f.value_from_object(instance)
return data
class ApplicationInformation(PrintModel):
id = models.IntegerField(primary_key=True)
onboarding_id = models.ForeignKey(OnboardingWizard, on_delete=models.SET('0'))
name = models.CharField(max_length=500)
version = models.CharField(blank=True,max_length=500)
license = models.CharField(blank=True,max_length=500)
location = models.CharField(blank=True,max_length=500)
criticality = models.CharField(max_length=500)
def to_dict(instance):
opts = instance._meta
data = {}
for f in opts.concrete_fields + opts.many_to_many:
if isinstance(f, ManyToManyField):
if instance.pk is None:
data[f.name] = []
else:
data[f.name] = list(f.value_from_object(instance).values_list('pk', flat=True))
else:
data[f.name] = f.value_from_object(instance)
return data
| StarcoderdataPython |
6489476 | import os
import sys
where_am_i = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, where_am_i+"/python_modules")
try:
from tensorflow_core.keras.preprocessing.image import img_to_array
from tensorflow_core.keras.models import load_model
except:
from tensorflow.keras.preprocessing.image import img_to_array
from tensorflow.keras.models import load_model
import numpy as np
import argparse
#import imutils
import pickle
import cv2
def predictRoads(full = False):
# cargar las imagenes
#images = [cv2.imread(f"images/{x}") for x in os.listdir("images")]
images = []
cant = len(list(filter(lambda s: s.endswith(".jpg"), os.listdir("images/"))))
for i in range(cant):
images.append(cv2.imread(f"images/image-{i}.jpg"))
if full:
images = []
images.append(cv2.imread(f"files/stitched-images.png"))
input_data = []
for image in images:
width, height, channels = image.shape
for x in range (int(width / 32)):
for y in range (int(height / 32)):
img = np.full((32, 32, 3), 0)
for X in range(32):
for Y in range(32):
img[X, Y, 0] = image[x * 32 + X, y * 32 + Y, 0]
img[X, Y, 1] = image[x * 32 + X, y * 32 + Y, 1]
img[X, Y, 2] = image[x * 32 + X, y * 32 + Y, 2]
input_data.append(img)
# pre-procesar las imagenes para la clasificacion
input_data = [input_data[x].astype("float") / 255 for x in range(len(input_data))]
input_data = np.array(input_data)
# cargar la red neuronal entrenada
print("[INFO] loading network...")
model = load_model("models/street.model")
# clasifical las imagenes de entrada
print("[INFO] classifying images...")
proba = model.predict(input_data)
idx = [np.argmax(proba[x]) for x in range(len(input_data))]
# crear nuevas imagenes de salida
count = 0
img_count = 0
for image in images:
width, height, channels = image.shape
out_img = np.full((int(width / 32), int(height / 32)), 0)
for x in range (int(width / 32)):
for y in range (int(height / 32)):
out_img[x, y] = idx[count] * 255
count += 1
width, height = out_img.shape
for x in range(1, width - 1):
for y in range(1, height - 1):
next = out_img[x + 1, y] / 255 + out_img[x, y + 1] / 255 + out_img[x - 1, y] / 255 + out_img[x, y - 1] / 255
if next <= 1:
out_img[x, y] = 0
elif next >= 3:
out_img[x, y] = 255
cv2.imwrite("predictions/prediction-{}.bmp".format(img_count), out_img)
img_count += 1 | StarcoderdataPython |
380137 | <filename>gala-ragdoll/ragdoll/test/test_reverse_analy.py
import requests
import libyang
import os
import sys
import importlib
import argparse
import subprocess
from flask import json
from six import BytesIO
from ragdoll.test import BaseTestCase
from ragdoll.utils.yang_module import YangModule
from ragdoll.utils.object_parse import ObjectParse
from ragdoll.test.test_analy import TestAnaly
class TestReverseAnaly():
""" Test reverse analy """
def __init__(self, module, d_file):
self._module = module
self._file = d_file
def create_object_with_content(self):
"""
desc: create the object with the content of the input file.
"""
test_analy = TestAnaly(self._module, self._file)
module, conf_type, d_object = test_analy.create_object_by_module()
rest_object = test_analy.check_analy_object(module, conf_type, d_object)
return rest_object
def check_reverse_analy_object(self, d_object):
"""
desc: check the inverse analy from object
"""
print("############ object -> content ############")
object_parse = ObjectParse()
content = object_parse.parse_object_to_ini_content(d_object)
if content:
print("The object is successfully converted to content!")
print("The content is : {}".format(content))
else:
print("The object is failed converted to content, please check the analy script!")
return content
def parse_command_line():
"""Parse the command line arguments."""
parser = argparse.ArgumentParser(prog="test_analy")
parser.add_argument("-m", "--module", nargs="?", required=True,
help="The object which you want to analy")
parser.add_argument("-f", "--file", nargs="?", required=True,
help="The file used as input for parsing")
config = parser.parse_args()
if config.module is None:
parser.print_help()
sys.exit(0)
else:
return config
def main():
"""Entry point for test_analy"""
config = parse_command_line()
test_reverse_analy = TestReverseAnaly(config.module, config.file)
d_object = test_reverse_analy.create_object_with_content()
content = test_reverse_analy.check_reverse_analy_object(d_object)
if __name__ == '__main__':
main()
| StarcoderdataPython |
6409063 | # -*- coding: utf-8 -*-
# Resource object code
#
# Created by: The Resource Compiler for PyQt5 (Qt v5.9.6)
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore
qt_resource_data = b"\
\x00\x00\x01\x57\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x18\x00\x00\x00\x18\x08\x06\x00\x00\x00\xe0\x77\x3d\xf8\
\x00\x00\x01\x1e\x49\x44\x41\x54\x78\x01\xdd\xd3\x01\x64\xc3\x40\
\x14\x80\xe1\x21\x28\x8a\x00\x00\x86\xa1\x18\x8a\x61\x18\xc2\x10\
\x04\xc3\x50\x14\xc5\x50\x04\xc3\x00\x03\x14\x43\x11\x0c\x50\xc0\
\x50\x0c\xc5\x30\x14\x50\x14\x43\x01\x05\x04\xc1\x10\x0c\x03\x14\
\xaf\x3f\x82\x73\x97\x27\x97\x6a\xa1\x3f\x1f\x40\xde\xc9\xbd\xbb\
\x38\xfb\x02\xc4\x98\x61\x8d\xbc\xb2\x42\x86\x08\x07\x97\x60\x0b\
\x69\xb0\xc6\x5d\xdb\x53\x4f\x20\x2d\xec\x90\xc2\xab\xa9\xf2\x81\
\x39\x22\xf4\x91\x41\x5c\xcd\x43\x06\x90\x1a\xcf\xb0\xfb\x52\x0e\
\xa2\xfe\xae\x0e\x72\x48\x8d\x10\x76\x89\x7a\x27\x4a\x23\x88\xe2\
\x12\x76\x3d\x88\x22\x82\xd3\x02\xa2\x78\x83\xdd\x2b\x44\x91\xc1\
\xa9\x80\x28\xfe\x31\x44\x17\xd7\x78\xc7\x0e\xa2\x58\xc1\x49\x8e\
\x28\x3f\xf5\x80\x02\x4e\xbf\xca\xda\x7d\x62\x88\x2b\xe3\x21\xf6\
\xf0\x84\xef\x36\x9b\xb4\x84\x18\x36\xb8\x41\x53\xf7\x35\xeb\x3d\
\x83\xd3\x18\x62\xe8\xc3\xb7\x07\x88\x21\x86\x53\x88\x12\x52\x79\
\x84\x6f\x2f\x90\xca\x16\x01\x6a\x4b\xcd\xd5\xf4\x1c\x92\x5a\x2b\
\x9b\x80\xf4\x3e\x20\x86\x39\x6e\x61\x17\xd7\x5c\xf2\x04\x8d\x75\
\x9d\x21\x40\x89\x1f\x6c\xf0\x07\xb1\x4c\x11\xc0\xbb\x14\xa5\xe7\
\xa3\x1a\xe0\xa0\x42\x8c\xb1\xb4\xde\x49\x81\x05\x46\xe8\xe0\x8c\
\xdb\x03\xad\x2f\xe6\x67\x69\xf4\x85\xa8\x00\x00\x00\x00\x49\x45\
\x4e\x44\xae\x42\x60\x82\
"
qt_resource_name = b"\
\x00\x07\
\x07\x3b\xe0\xb3\
\x00\x70\
\x00\x6c\x00\x75\x00\x67\x00\x69\x00\x6e\x00\x73\
\x00\x0f\
\x08\xe5\xf4\xc2\
\x00\x70\
\x00\x72\x00\x6f\x00\x66\x00\x69\x00\x6c\x00\x65\x00\x5f\x00\x6d\x00\x61\x00\x6e\x00\x61\x00\x67\x00\x65\x00\x72\
\x00\x08\
\x0a\x61\x5a\xa7\
\x00\x69\
\x00\x63\x00\x6f\x00\x6e\x00\x2e\x00\x70\x00\x6e\x00\x67\
"
qt_resource_struct_v1 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x14\x00\x02\x00\x00\x00\x01\x00\x00\x00\x03\
\x00\x00\x00\x38\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
"
qt_resource_struct_v2 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x14\x00\x02\x00\x00\x00\x01\x00\x00\x00\x03\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x38\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x01\x73\x28\x91\x1a\xa8\
"
qt_version = QtCore.qVersion().split('.')
if qt_version < ['5', '8', '0']:
rcc_version = 1
qt_resource_struct = qt_resource_struct_v1
else:
rcc_version = 2
qt_resource_struct = qt_resource_struct_v2
def qInitResources():
QtCore.qRegisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| StarcoderdataPython |
6579708 | <gh_stars>1000+
from __future__ import unicode_literals
from future.utils import iteritems, itervalues
from snips_nlu.constants import (
DATA, ENTITIES, ENTITY, INTENTS, TEXT, UTTERANCES)
from snips_nlu.entity_parser.builtin_entity_parser import is_gazetteer_entity
def extract_utterance_entities(dataset):
entities_values = {ent_name: set() for ent_name in dataset[ENTITIES]}
for intent in itervalues(dataset[INTENTS]):
for utterance in intent[UTTERANCES]:
for chunk in utterance[DATA]:
if ENTITY in chunk:
entities_values[chunk[ENTITY]].add(chunk[TEXT].strip())
return {k: list(v) for k, v in iteritems(entities_values)}
def extract_intent_entities(dataset, entity_filter=None):
intent_entities = {intent: set() for intent in dataset[INTENTS]}
for intent_name, intent_data in iteritems(dataset[INTENTS]):
for utterance in intent_data[UTTERANCES]:
for chunk in utterance[DATA]:
if ENTITY in chunk:
if entity_filter and not entity_filter(chunk[ENTITY]):
continue
intent_entities[intent_name].add(chunk[ENTITY])
return intent_entities
def extract_entity_values(dataset, apply_normalization):
from snips_nlu_utils import normalize
entities_per_intent = {intent: set() for intent in dataset[INTENTS]}
intent_entities = extract_intent_entities(dataset)
for intent, entities in iteritems(intent_entities):
for entity in entities:
entity_values = set(dataset[ENTITIES][entity][UTTERANCES])
if apply_normalization:
entity_values = {normalize(v) for v in entity_values}
entities_per_intent[intent].update(entity_values)
return entities_per_intent
def get_text_from_chunks(chunks):
return "".join(chunk[TEXT] for chunk in chunks)
def get_dataset_gazetteer_entities(dataset, intent=None):
if intent is not None:
return extract_intent_entities(dataset, is_gazetteer_entity)[intent]
return {e for e in dataset[ENTITIES] if is_gazetteer_entity(e)}
def get_stop_words_whitelist(dataset, stop_words):
"""Extracts stop words whitelists per intent consisting of entity values
that appear in the stop_words list"""
entity_values_per_intent = extract_entity_values(
dataset, apply_normalization=True)
stop_words_whitelist = dict()
for intent, entity_values in iteritems(entity_values_per_intent):
whitelist = stop_words.intersection(entity_values)
if whitelist:
stop_words_whitelist[intent] = whitelist
return stop_words_whitelist
| StarcoderdataPython |
5048693 | from lccal import lccal
from unconex import unconex
ans = unconex(lccal(27.89021,36.39717,0.1,False),'sn')
print('UNCONEX')
for i in ans:
print(i[0],i[1],i[2])
| StarcoderdataPython |
3334940 | <reponame>antopen/alipay-sdk-python-all
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.ScenicFaceInfo import ScenicFaceInfo
class AlipayCommerceEducateFacefeatureDeleteModel(object):
def __init__(self):
self._biz_code = None
self._biz_id = None
self._ext_info = None
self._group_id = None
self._inst_id = None
self._isv_name = None
self._scenic_face_info = None
@property
def biz_code(self):
return self._biz_code
@biz_code.setter
def biz_code(self, value):
self._biz_code = value
@property
def biz_id(self):
return self._biz_id
@biz_id.setter
def biz_id(self, value):
self._biz_id = value
@property
def ext_info(self):
return self._ext_info
@ext_info.setter
def ext_info(self, value):
self._ext_info = value
@property
def group_id(self):
return self._group_id
@group_id.setter
def group_id(self, value):
self._group_id = value
@property
def inst_id(self):
return self._inst_id
@inst_id.setter
def inst_id(self, value):
self._inst_id = value
@property
def isv_name(self):
return self._isv_name
@isv_name.setter
def isv_name(self, value):
self._isv_name = value
@property
def scenic_face_info(self):
return self._scenic_face_info
@scenic_face_info.setter
def scenic_face_info(self, value):
if isinstance(value, list):
self._scenic_face_info = list()
for i in value:
if isinstance(i, ScenicFaceInfo):
self._scenic_face_info.append(i)
else:
self._scenic_face_info.append(ScenicFaceInfo.from_alipay_dict(i))
def to_alipay_dict(self):
params = dict()
if self.biz_code:
if hasattr(self.biz_code, 'to_alipay_dict'):
params['biz_code'] = self.biz_code.to_alipay_dict()
else:
params['biz_code'] = self.biz_code
if self.biz_id:
if hasattr(self.biz_id, 'to_alipay_dict'):
params['biz_id'] = self.biz_id.to_alipay_dict()
else:
params['biz_id'] = self.biz_id
if self.ext_info:
if hasattr(self.ext_info, 'to_alipay_dict'):
params['ext_info'] = self.ext_info.to_alipay_dict()
else:
params['ext_info'] = self.ext_info
if self.group_id:
if hasattr(self.group_id, 'to_alipay_dict'):
params['group_id'] = self.group_id.to_alipay_dict()
else:
params['group_id'] = self.group_id
if self.inst_id:
if hasattr(self.inst_id, 'to_alipay_dict'):
params['inst_id'] = self.inst_id.to_alipay_dict()
else:
params['inst_id'] = self.inst_id
if self.isv_name:
if hasattr(self.isv_name, 'to_alipay_dict'):
params['isv_name'] = self.isv_name.to_alipay_dict()
else:
params['isv_name'] = self.isv_name
if self.scenic_face_info:
if isinstance(self.scenic_face_info, list):
for i in range(0, len(self.scenic_face_info)):
element = self.scenic_face_info[i]
if hasattr(element, 'to_alipay_dict'):
self.scenic_face_info[i] = element.to_alipay_dict()
if hasattr(self.scenic_face_info, 'to_alipay_dict'):
params['scenic_face_info'] = self.scenic_face_info.to_alipay_dict()
else:
params['scenic_face_info'] = self.scenic_face_info
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AlipayCommerceEducateFacefeatureDeleteModel()
if 'biz_code' in d:
o.biz_code = d['biz_code']
if 'biz_id' in d:
o.biz_id = d['biz_id']
if 'ext_info' in d:
o.ext_info = d['ext_info']
if 'group_id' in d:
o.group_id = d['group_id']
if 'inst_id' in d:
o.inst_id = d['inst_id']
if 'isv_name' in d:
o.isv_name = d['isv_name']
if 'scenic_face_info' in d:
o.scenic_face_info = d['scenic_face_info']
return o
| StarcoderdataPython |
1907362 | # -*- coding: utf-8 -*-
"""
.. invisible:
_ _ _____ _ _____ _____
| | | | ___| | | ___/ ___|
| | | | |__ | | | |__ \ `--.
| | | | __|| | | __| `--. \
\ \_/ / |___| |___| |___/\__/ /
\___/\____/\_____|____/\____/
Created on Jul 9, 2014
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
"""
import numpy
import os
import unittest
from veles.znicz.diversity import get_similar_kernels
class Test(unittest.TestCase):
def testSimilarSets(self):
weights = numpy.load(os.path.join(os.path.dirname(__file__),
'data/diversity_weights.npy'))
sims = get_similar_kernels(weights)
self.assertEqual(sims, [{1, 27, 4}, {18, 13}])
"""
# Visualize a 2-D matrix
from pylab import pcolor, show, colorbar, xticks, yticks
pcolor(matrix)
colorbar()
xticks(numpy.arange(0.5, corr_matrix.shape[0] + 0.5),
range(0, corr_matrix.shape[0]))
yticks(numpy.arange(0.5, corr_matrix.shape[1] + 0.5),
range(0, corr_matrix.shape[1]))
show()
"""
if __name__ == "__main__":
unittest.main()
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.