prefix stringlengths 0 918k | middle stringlengths 0 812k | suffix stringlengths 0 962k |
|---|---|---|
from kubeflow.kubeflow.crud_backend import api, status
def pvc_status(pvc):
"""
Set the status of the pvc
"""
if pvc.metadata.deletion_timestamp is not None:
return status.create_status(status.STATUS_PHASE.TERMINATING,
"Deleting Volume...")
if pvc.statu... | status of that viewer. If a deletion
timestamp is set we want to return a `Terminating` state.
"""
try:
ready = viewer["status"]["ready"]
except KeyError:
return status.STATUS_PHASE.UNINITIALIZED
if "deletionTimestamp" in viewer["metadata"]:
return status.STATUS_PHASE.TERMI... | ASE.WAITING
return status.STATUS_PHASE.READY
|
ource, destination) ]
def generate_favicon_resources():
fav_tpl = lambda r: "favicon-{0}x{0}.png".format(r)
and_tpl = lambda r: "touch-icon-{0}x{0}.png".format(r)
app_tpl = lambda r: "apple-touch-icon-{0}x{0}.png".format(r)
pra_tpl = lambda r: "apple-touch-icon-{0}x{0}-precomposed.png"... | s),
| ('rel="icon" type="image/png" sizes="{0}x{0}" href="/{1}"', favicons) ]:
fav_head += "".join( gen_head(*fav_set) )
return fav_head
def get_opengraph_head():
og_head_string = """\
% url = request.environ['HTTP_HOST']
<meta property="og:url" content="http://{{url}}/">
<meta property="og:ty... |
"""Model. We are modeling Person objects with a collection
of Address objects. Each Address has a PostalCode, which
in turn references a City and then a Country:
Person --(1..n)--> Address
Address --(has a)--> PostalCode
PostalCode --(has a)--> City
City --(has a)--> Country
"""
from sqlalchemy import Column, Int... | butes to be loaded from cache.
cache_address_bit | s = RelationshipCache("default", "byid", PostalCode.city).\
and_(
RelationshipCache("default", "byid", City.country)
).and_(
RelationshipCache("default", "byid", Address.postal_code)
)
|
return p
if isinstance(p, string_types):
p = p.replace(' ', '')
try:
# we might have a Float
neg_pow, digits, expt = decimal.Decimal(p).as_tuple()
p = [1, -1][neg_pow]*int("".join(str(x) for x in digits))
... | __(self, other)
__truediv__ = __div__
@_sympifyit('other', NotImplemented)
def __mod__(self, other):
if isinstance(other, Rational):
n = (self.p*other.q) // (other.p*self.q)
return Rational(self.p*other.q - n*other.p*self.q, self.q*other.q)
if isinstance(other, Floa... | other._prec))
return Number.__mod__(self, other)
@_sympifyit('other', NotImplemented)
def __rmod__(self, other):
if isinstance(other, Rational):
return Rational.__mod__(other, self)
return Number.__rmod__(self, other)
def _eval_power(self, expt):
if isinstance(e... |
from django.core import serializers
from rest_framework.response import Response
from django.http import JsonResponse
try:
from urllib import quote_plus # python 2
except:
pass
try:
from urllib.parse import quote_plus # python 3
except:
pass
from django.contrib import messages
from django.contrib.co... |
paginator = Paginator(queryset_list, 8) # Show 25 contacts per page
page_request_var = "page"
page = request.GET.get(page_request_var)
try:
queryset = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
queryset = paginator.page(1)... | last page of results.
queryset = paginator.page(paginator.num_pages)
context = {
"object_list": queryset,
"title": "List",
"page_request_var": page_request_var,
"today": today,
}
return render(request, "post_list.html", context)
def post_update(request, slug=None)... |
import eventlet
import gettext
import sys
from staccato.common import config
import staccato.openstack.common.wsgi as os_wsgi
import staccato.openstack.common.pastedeploy as os_pastedeploy
# Monkey patch socket and time
eventlet.patcher.monkey_patch(all=False, socket=True, time=True)
gettext.install('staccato', unic... | 'staccato-api',
| conf)
server = os_wsgi.Service(wsgi_app, conf.bind_port)
server.start()
server.wait()
except RuntimeError as e:
fail(1, e)
main()
|
"""Utility methods for handling Entities.
These methods can be shared between entity generation (invoked through
the Entities class) at the start of prod data generation, and between
post processing methods (such as adding edges between family members
and neighbours).
"""
import codecs
import collections
import re
... | me`:
entity_name = entity_name.replace("\n", " ")
# Trim name of Zivnost, followed by first occurrence of (' - ').
p = entity_name.find(' - ')
if (p > 0):
| name = entity_name[:p]
else:
name = entity_name
if verbose:
print('name = |%s|' % (name))
# Trim academic titles from the start and end of the name.
match = titles_parser.match(name).groupdict()
titles_pre = match['titles_pre'] if 'titles_pre' in match else ''
titles_suf = m... |
[f[1] for f in fields]
f_string = [ "%s %s" %(f,t) for (f,t) in zip(self.all_fields,_types)]
sql = "CREATE TABLE %s (%s)" %(self.name,
",".join(f_string))
self.cursor.execute(sql)
return self
def open(self):
"""Open an existing database"""
... | ),record["__id__"])
self.cursor.execute(sql)
def _make_sql_params(self,kw):
|
"""Make a list of strings to pass to an SQL statement
from the dictionary kw with Python types"""
vals = []
for k,v in kw.iteritems():
vals.append('%s=%s' %(k,self._conv(v)))
return vals
def _conv(self,v):
if isinstance(v,str):
v = v... |
r\n'
' SCSI ID: IET 00010000\n'
' SCSI SN: beaf10\n'
' Size: 0 MB, Block size: 1\n'
' Online: Yes\n'
' Removable media: No\n'
' Prevent removal: No\n'
' ... | self.testvol_1,
self.fake_volumes_dir))
def test_ensure_export(self):
ctxt = context.get_admin_context()
| with mock.patch.object(self.target, 'create_iscsi_target'):
self.target.ensure_export(ctxt,
self.testvol_1,
self.fake_volumes_dir)
self.target.create_iscsi_target.assert_called_on |
.exp(x1_k)
z = tf.reduce_sum(u_k)
return tf.log(z) + m
def linear(x, out_size, do_bias=True, alpha=1.0, identity_if_possible=False,
normalized=False, name=None, c | ollections=None):
"""Linear (affine) transformation, y = x W + b, for a variety of
configurations.
Args:
x: input The tensor to tranformation.
out_size: The int | eger size of non-batch output dimension.
do_bias (optional): Add a learnable bias vector to the operation.
alpha (optional): A multiplicative scaling for the weight initialization
of the matrix, in the form \alpha * 1/\sqrt{x.shape[1]}.
identity_if_possible (optional): just return identity,
if x... |
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 1024 , FREQ | = 'D', seed = 0, trendtype = "PolyTrend", cycle_len | gth = 0, transform = "Anscombe", sigma = 0.0, exog_count = 20, ar_order = 12); |
from sympy.external import i | mport_module
from sympy.utilities.pytest import warns |
# fixes issue that arose in addressing issue 6533
def test_no_stdlib_collections():
'''
make sure we get the right collections when it is not part of a
larger list
'''
import collections
matplotlib = import_module('matplotlib',
__import__kwargs={'fromlist': ['cm', 'collections']},
... |
# coding: utf-8
"""
Server API
Reference for Server API (REST/Json)
OpenAPI spec version: 2.0.6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class PrepaymentBonusResponse(object):
"""
NOTE: This cl... | Response.
:rtype: int
"""
return self._id
@id.setter
def id(self, id):
"""
Sets the id of this PrepaymentBonusResponse.
:param id: The id of th | is PrepaymentBonusResponse.
:type: int
"""
self._id = id
@property
def name(self):
"""
Gets the name of this PrepaymentBonusResponse.
:return: The name of this PrepaymentBonusResponse.
:rtype: str
"""
return self._name
@name.setter
... |
import pygame
from pygame.locals import *
import random
import itertools
import state
import block
import tetros
import states
from text import Text
from colors import Colors
from engine import Engine
from playfield import Playfield
from countdown import Countdown
class GameState(state.State):
tetro_classes = ... | self.falling_tetro = None
break
else:
# update row
self.falling_tetro.row += 1
# reset counter
self.falling_tetro.drop_delay_counter = 0
# new tetro | if needed
if self.falling_tetro is None:
color = random.choice(self.tetro_colors)
tetro_cls = random.choice(self.tetro_classes)
#
# not giving the startx-y may get the tetromino and playfield out
# of sync because startx-y default to zero
s... |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import unittest
from .estestcase import ESTestCase
from pyes.facets import DateHistogramFacet
from pyes.filters import TermFilter, RangeFilter
from pyes.query import FilteredQuery, MatchAllQuery, Search
from pyes.utils import ESRange
import datetime
class ... | "tag": "foo",
"date": datetime.date(2011, 4, 16)},
self.index_name, self.document_type, 2)
self.conn.index({"name": "Bill Clinton",
"parsedtext": " | Bill is not nice guy",
"uuid": "33333",
"position": 3,
"tag": "bar",
"date": datetime.date(2011, 4, 28)},
self.index_name, self.document_type, 3)
self.conn.refresh(self.index_name)
def test_terms... |
t)
if not default.get('name'):
default['name'] = _("%s (copy)") % current.name
if 'remaining_hours' not in default:
default['remaining_hours'] = current.planned_hours
return super(task, self).copy_data(cr, uid, id, default, context)
_columns = {
'active': fi... | by the assignee of the task."),
'user_id': fields.many2one('res.users', 'Assigned to', select=True, track_visibility='onchange'),
'partner_id': fields.many2one('res.partner', 'Customer'),
'manager_id': fields.related('project_id', 'user_id', type='many2one', relation='res.users', string='Project... | e),
'color': fields.integer('Color Index'),
'user_email': fields.related('user_id', 'email', type='char', string='User Email', readonly=True),
'attachment_ids': fields.one2many('ir.attachment', 'res_id', domain=lambda self: [('res_model', '=', self._name)], auto_join=True, string='Attachments'),... |
ls.ForeignKey(UserProfile, related_name='user_target')
deleted = models.BooleanField(default=False)
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
class Category(models.Model):
name = models.CharField(m... | rField(default=0)
has_data_set = models.BooleanField(default=False)
data_set_location = models.CharField(max_le | ngth=256, default='No data set', null=True)
task_time = models.FloatField(default=0) # in minutes
deleted = models.BooleanField(default=False)
created_timestamp = models.DateTimeField(auto_now_add=True, auto_now=False)
last_updated = models.DateTimeField(auto_now_add=False, auto_now=True)
template ... |
from __future__ import (absolute_import, division, print_function, unicode_literals)
from builtins import *
import wizzat.testutil
import wizzat.pghelper
class DBTestCase(wizzat.testutil.TestCase):
db_info = {
'host' : 'localhost',
'port' : 5432,
'user' : 'wizzat',
... | n(self, name = | 'testconn'):
conn = self.db_mgr.name(name)
conn.autocommit = True
return conn
|
"""A block Davidson solver for finding a fixed number of eigenvalues.
Adapt | ed from https://joshuagoings.com/2013/08/23/davidsons-method/
"""
import time
from typing import Tuple
import numpy as np
from tqdm import tqdm
def davidson(A: np.ndarray, k: int, eig: int) -> Tuple[np.ndarray, np.ndarray]:
assert len(A.shape) == 2
assert A.shape[0] == A.shape[1]
n = A.shape[0]
## s... | .eye(n)
for m in tqdm(range(k, mmax, k)):
if m <= k:
for j in range(k):
V[:, j] = t[:, j] / np.linalg.norm(t[:, j])
theta_old = 1
elif m > k:
theta_old = theta[:eig]
V, R = np.linalg.qr(V)
T = V[:, : (m + 1)].T @ A @ V[:, : (m + 1)... |
import tests.units.tournaments
import lib.datalayer
import games
import games.settlers
import tournaments
import hruntime
from tests import *
from tests.units.tournaments import create_and_populate_tournament
class Tests(TestCase):
@classmethod
def setup_class(cls):
super(Tests, cls).setup_class()
hrun... | t = lib.datala | yer.Root()
hruntime.dbroot.users['SYSTEM'] = tests.DummyUser('SYSTEM')
def test_sanity(self):
patched_events = {
'tournament.Created': 2,
'tournament.PlayerJoined': 12,
'game.GameCreated': 8,
'game.PlayerJoined': 4,
'game.PlayerInvited': 8
}
with EventPatcherWithCounter... |
# Copyright (c) 2014 by Ecreall under licence AGPL terms
# available on http://www.gnu.org/licenses/agpl.html
# licence: AGPL
# author: Amen Souissi
import deform
from pyramid.view import view_config
from dace.processinstance.core import DEFAULTMAPPING_ACTIONS_VIEWS
from pontus.default_behavior import Cancel
from p... | cel]
validate_behaviors = False
def before_update(self):
self.action = self.request.resource_url(
self.context, 'novaideoapi',
query={'op': 'update_action_view',
'node_id': RemoveSmartFolder.node_definition.id})
self.schema.widget = deform.widget.FormW... |
)
class RemoveSmartFolderViewMultipleView(MultipleView):
title = _('Remove the topic of interest')
name = 'removesmartfolder'
viewid = 'removesmartfolder'
template = 'pontus:templates/views_templates/simple_multipleview.pt'
views = (RemoveSmartFolderViewStudyReport, RemoveSmartFolderView)
v... |
from .variables import *
def Cell(node):
# cells must stand on own line
if node.parent.cls not in ("Assign", "Assigns"):
node.auxiliary("cell")
return "{", ",", | "}"
def Assign(node):
if node.name == 'varargin':
out = "%(0)s = va_arg(varargin, " + node[0].type + ") ;"
else:
out = "%(0)s.clear() ;"
# append t | o cell, one by one
for elem in node[1]:
out = out + "\n%(0)s.push_back(" + str(elem) + ") ;"
return out
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-26 09:42
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, m | odels
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('exercises', '0004_exercise_author'),
]
operations = [
migrations.AlterField(
model_name='exercise',
name='author',
field=models.ForeignKey(on_delete=django... | s.AUTH_USER_MODEL),
),
]
|
# Copyright (c) 2017 Cedric Bellegarde <cedric.bellegarde@adishatz.org>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
... | s():
(bus, subscribe_id) = self.__signals[page_id]
bus.signal_unsubscribe(subscribe_id)
del self.__signals[page_id]
#######################
# PRIVATE | #
#######################
def __on_get_proxy(self, source, result, call, dbus_args, callback, *args):
"""
Launch call and connect it to callback
@param source as GObject.Object
@param result as Gio.AsyncResult
@param call as str
@param dbus... |
from __future__ import print_function, unicode_literals, division, absolute_import
import datetime
import time
import ntplib
from pyotp import utils
from pyotp.otp import OTP
class TOTP(OTP):
systime_offset = None
def __init__(self, *args, **kwargs):
"""
@option options [Integer] interval ... | stamp integer or a Time object.
Time objects will be adjusted to UTC automatically
@param [Time/Integer] time the time to generate an OTP for
@param [Integer] counter_offset an amount of ticks to add to the time counter
"""
if not isinstance(for_time, datetime.datetime):
... | stamp(int(for_time))
return self.generate_otp(self.timecode(for_time) + counter_offset)
def now(self):
"""
Generate the current time OTP
@return [Integer] the OTP as an integer
"""
return self.generate_otp(self.timecode(datetime.datetime.now()))
def verify(self,... |
import csv
from | . import WorksheetBase, WorkbookBase, CellMode
class CSVWorksheet(WorksheetBase):
def __init__(self, raw_sheet, ordinal):
super().__init__(raw_sheet, ordinal)
self.name = "Sheet 1"
self.nrows = len(self.raw_sheet)
self.ncols = max([len(r) for r in self.raw_sheet])
def parse_cell(self, cell, coor... | de.cooked):
try:
return int(cell)
except ValueError:
pass
try:
return float(cell)
except ValueError:
pass
# TODO Check for dates?
return cell
def get_row(self, row_index):
return self.raw_sheet[row_index]
class CSVWorkbook(WorkbookBase):
def iterate_sheets(self... |
"""The tests for the Graphite component."""
import socket
import unittest
from unittest import mock
import blumate.core as ha
import blumate.components.graphite as graphite
from blumate.const import (
EVENT_STATE_CHANGED,
EVENT_BLUMATE_START, EVENT_BLUMATE_STOP,
STATE_ON, STATE_OFF)
from tests.common impo... | 'new_state': mock.MagicMock()})
def fak | e_get():
if len(runs) >= 2:
return self.gf._quit_object
elif runs:
runs.append(1)
return mock.MagicMock(event_type='somethingelse',
data={'new_event': None})
else:
runs.append(1)
... |
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use th... | # there which cleans things up.
sys.exit(0)
# Register a SIGTERM signal handler which calls sys.exit which causes SystemExit to
# be thrown. We catch SystemExit and handle cleanup there.
signal.signal(signal.SIGTERM, | sigterm_handler)
def _to_sensor_object(self, sensor_db):
file_path = sensor_db.artifact_uri.replace('file://', '')
class_name = sensor_db.entry_point.split('.')[-1]
sensor_obj = {
'pack': sensor_db.pack,
'file_path': file_path,
'class_name': class_name,
... |
"""Runs fast tests."""
import unittest
from tests.kernel_tests import SwiftKernelTes | ts, OwnKernelTests
from tests.simple_notebook_tests import *
if __name__ == '__main__':
unittest.main()
| |
# Copyright (C) 2014 Robby Zeitfuchs (@robbyFux)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundati | on, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details... | cense
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from lib.cuckoo.common.abstracts import Signature
class Cridex(Signature):
name = "banker_cridex"
description = "Cridex banking trojan"
severity = 3
alert = True
categories = ["Banking", "Trojan"]
families = ["Cridex... |
from django.contrib.gis.db.models import GeometryField
from django.contrib.gis.db.models.functions import Distance
from django.contrib.gis.measure import (
Area as AreaMeasure, Distance as DistanceMeasure,
)
from django.db.utils import NotSupportedError
from django.utils.functional i | mport cached_property
class BaseSpatialOperations:
# Quick booleans for the type of this spatial backend, and
# an attribute for the spatial database version tuple (if applicable)
postgis = False
spatialite = False
mysql = False
oracle = False
spatial_version = None
# How the geometry... | @cached_property
def select_extent(self):
return self.select
# Does the spatial database have a geometry or geography type?
geography = False
geometry = False
# Aggregates
disallowed_aggregates = ()
geom_func_prefix = ''
# Mapping between Django function names and backend n... |
# -*- coding: utf- | 8 -*-
# Generated by Django 1.10.3 on 2016-11-26 22:42
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('parliament', '0002_auto_20161123_1157'),
]
operations = [
migrations.AlterField(
... | ),
]
|
import re
import logging
import urllib
import csv
import os
import shutil
from datetime import datetime
import StringIO
from scrapy.spider import BaseSpider
from scrapy import signals
from scrapy.selector import HtmlXPathSelector
from scrapy.http import Request, HtmlResponse
from scrapy.utils.response import get_base_... | loader.add_value('image_url', row['image_url'])
loader.add_value('shipping_cost', row['shi | pping_cost'])
yield loader.load_item()
|
import matplotlib.pyplot as plt
#stores information about laser structure
#saves refraction and electric field profiles in text and graphic form to HDD
class Laser:
refraction = []
field = []
| gridX = []
gridN = []
field = []
def __init__(self, (wavelength, concentration, thickness)):
if isinstance(wavelength, (int, float)) == False:
raise TypeError("wavelength should be a number")
if isinstance(concentration, list) == False:
raise TypeError("c... | nstance(concentration[i], (int, float)) == False or isinstance( thickness[i], (int, float)) == False:
raise TypeError("concentration and thickness elements should be numbers")
if wavelength is None:
raise ValueError("wavelength is undefined")
if concentration is None:
... |
"""
The main script
"""
import argparse
import summaryrank.features
import summaryrank.importers
import summaryrank.tools
DESCRIPTION = '''
SummaryRank is a set of tools that help producing machine-learned
summary/sentence rankers. It supports a wide range of functions such
as generating judgments in trec_eval forma... | ("import_mobileclick", summaryrank.importers.import_mobileclick),
]
FEATURE_FUNCTIONS = [
("gen_term", summaryrank.features.gen_term),
("gen_freqstats", summaryrank.features.gen_freqstats),
("gen_esa", summaryrank.features.gen_esa),
("gen_tagme", summaryrank.features.gen_tagme),
("extract", summa... | t),
("contextualize", summaryrank.features.contextualize),
]
GENERAL_FUNCTIONS = [
("describe", summaryrank.tools.describe),
("cut", summaryrank.tools.cut),
("join", summaryrank.tools.join),
("shuffle", summaryrank.tools.shuffle),
("split", summaryrank.tools.split),
("normalize", summaryran... |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or ... |
from validator.api.middleware.ssl import SSLMiddleware
from validator.tests.base import ValidatorTestCase
class SSLMiddlewareTestCase(ValidatorTestCase):
""" Tests for class SSLMiddleware """
def setUp(self):
""" Create a SSLMiddleware instance """
super(SSLMiddlewareTestCase, self).setUp()... | = "MyInput"
expected = "OK"
self.item.external.return_value = "OK"
observed = self.item.process_request(input)
self.assertEqual(expected, observed)
def tearDown(self):
""" Cleanup the SSLMiddleware instance """
super(SSLMiddlewareTestCase, self).tearDown()
se... |
# Copyright (c) | 2017 Nick Gashkov
#
# Distributed under MIT License. See LICENSE file for details.
cl | ass ValidationError(Exception):
def __init__(self, *args, **kwargs):
self.error_dict = kwargs.pop('error_dict')
super(ValidationError, self).__init__(*args, **kwargs)
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2005-2010 TUBITAK/UEKAE
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# ... | gsBox.show()
self.ui.defaultSettings.setChecked(True)
def fillDrives(self):
self.ui.drives.clear()
for drive in self.bootloader.drives:
device = ctx.storage.devicetree.getDeviceByName(drive)
item = u"%s" % (device.name)
self.ui.drives.addItem(item, d | evice)
def shown(self):
if ctx.flags.install_type == ctx.STEP_RESCUE:
ctx.mainScreen.disableBack()
self.bootloader = ctx.bootloader
self.bootloader.storage = ctx.storage
self.fillDrives()
self.activateChoices()
def backCheck(self):
if ctx.storage.doA... |
#!/usr/bin/python
#
# to run an example
# python RunMakeFigures.py -p Demo -i 0 -j 1 -f 3FITC_4PE_004.fcs -h ./projects/Demo
#
import getopt,sys,os
import numpy as np
## important line to fix popup error in mac osx
import matplotlib
matplotlib.use('Agg')
from cytostream import Model
import matplotlib.pyplot as plt
#... | [:-4],channel1,channel2,plotType))
fig.savefig(fileName,transparent=False,dpi=50)
else:
fileName = os.path.join(altDir,"%s_%s_%s.%s"%(selectedFile[: | -4],channel1,channel2,plotType))
fig.savefig(fileName,transparent=False,dpi=50)
## error checking
if altDir == 'None':
altDir = None
if homeDir == 'None':
homeDir = None
if modelName == 'None':
modelName = None
statModel,statModelClasses = None,None
if altDir == None and homeDir == None:
... |
import json
import os
import socket
import sys
import uuid
import etcd
from tendrl.commons import objects
from tendrl.commons.utils import etcd_utils
from tendrl.commons.utils import event_utils
from tendrl.commons.utils import log_utils as logger
NODE_ID = None
class NodeContext(objects.BaseObject):
def __i... | },
"typ | e": "node"
}
_job_id = str(uuid.uuid4())
NS.tendrl.objects.Job(
job_id=_job_id,
status="new",
payload=payload
).save()
logger.log(
... |
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2011,2013 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obta | in a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the ... | r the License.
def addpkg(*args, **kwargs):
pass
|
#coding=utf8 |
'''
Created on 2012-9-19
@author: senon
'''
from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('friendship | s.views',
url(r'^concerned_about_friends/', 'concerned_about_friends')
) |
import ply.yacc as yacc
from bsi_lexer import tokens
from bsi_object import BsiObject
from bsi_array import BsiArray
def p_object_pairs(p):
'obj : pairs'
p[0] = BsiObject()
for pair in p[1]:
p[0].set(pair[0], pair[1])
def p_pairs_pair(p):
'pairs : pair'
p[0] = [p[1]]
def p_pairs_pair_pair... | 'val : STRING'
p[0] = p[1]
def p_val_array(p):
'val : L_SQ_BR vals R_SQ_BR'
p[0] = BsiArray(p[2])
def p_array_val(p):
'vals : val'
p[0] = [p[1]]
def p_array_vals(p):
'vals : val vals'
p[0] = [p[1]] + p[2]
def p_val_nested_obj(p):
'val : L_BRACE obj R_BRACE'
p[0] = p[2]
def p... | "
bsi_parser = yacc.yacc()
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
#... | mport AirflowException
from airflow.task.task_runner.standard_task_runner import StandardTaskRunner
_TASK_RUNNER = conf.get('core', 'TASK_RUNNER')
def get_task_runner(local_task_job):
"""
Get the task runner that can be used to run the given job.
:param local_task_job: The LocalTaskJob associated with t... | eeds to be executed.
:type local_task_job: airflow.jobs.LocalTaskJob
:return: The task runner to use to run the task.
:rtype: airflow.task.task_runner.base_task_runner.BaseTaskRunner
"""
if _TASK_RUNNER == "StandardTaskRunner":
return StandardTaskRunner(local_task_job)
elif _TASK_RUNNER ... |
##################################################################################################
# $HeadURL$
##################################################################################################
"""Collection of DIRAC useful statistics related modules.
.. warning::
By default on Error they return N... | mbers: data sample
:param mixed posMean: mean of a sample or 'Empty' str
"""
if not len(numbers):
return
if posMean == 'Empty':
mean = getMean(numbers)
else:
mean = posMean
numbers = sorted( [float(x) for x in numbers] )
# Subtract the mean from each data item and square the difference.
... | n(numbers, variance='Empty', mean='Empty'):
"""Determine the measure of the dispersion of the data set based on the
variance.
:param list numbesr: data sample
:param mixed variance: variance or str 'Empty'
:param mixed mean: mean or str 'Empty'
"""
if not len(numbers):
return
# Take the square roo... |
#!/ho | me/jojoriveraa/Dropbox/Capacitación/Platzi/Python-Django/NFCow/venv/bin/python3
from django.core import management
if __name__ == "__main__":
m | anagement.execute_from_command_line()
|
"""
The Plaid API
The Plaid REST API. Please see https://plaid.com/docs/api for more details. # noqa: E501
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from plaid.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal... | openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
'value': (str,),
}
@cached_property
def discriminator():
return None
attribute_map = {}
_composed_schemas = None
required_properties = ... | sited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
"""TransactionsRuleField - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] (str): Transaction field for... |
'''Example script to generate text from Nietzsche's writings.
At least 20 epochs are required before the generated text
starts sounding coherent.
It is recommended to run this script on GPU, as recurrent
networks are quite computationally intensive.
If you try this script on new data, make sure your corpus
has at le... | har]] = 1.
preds = model.predict(x, verbose=0)[0]
next_index = sample(preds, diversity)
next_char = indices_char[next_index]
gene | rated += next_char
sentence = sentence[1:] + next_char
sys.stdout.write(next_char)
sys.stdout.flush()
print()
model.save_weights('data/nietzsche_simple_TF.h5')
|
#!/usr/bin/env python3
# Copyright (c) 2020-2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test block-relay-only anchors functionality"""
import os
from test_framework.p2p import P2PInterface
... | 2)
# 127.0.0.1
ip = "7f000001"
# Since the ip is always 127.0.0.1 for this case,
# we store only the port to identify the peers
block_relay_nodes_port = []
inbound_nodes_port = []
for p in self.nodes[0].getpeerinfo(): |
addr_split = p["addr"].split(":")
if p["connection_type"] == "block-relay-only":
block_relay_nodes_port.append(hex(int(addr_split[1]))[2:])
else:
inbound_nodes_port.append(hex(int(addr_split[1]))[2:])
self.log.info("Stop node 0")
self... |
import ray
from ray._private.test_utils import run_string_a | s_driver
# This tests the queue transitions for infeasible tasks. This has been an issue
# in the past, e.g., https://github.com/ray-project/ray/issues/3275.
def test_infeasible_tasks(ray_start_cluster):
cluster = ray_start_cluster
@ray.remote
def f():
return
cluster.add_node(resources={str(... | ray.init(address=cluster.address)
# Submit an infeasible task.
x_id = f._remote(args=[], kwargs={}, resources={str(1): 1})
# Add a node that makes the task feasible and make sure we can get the
# result.
cluster.add_node(resources={str(1): 100})
ray.get(x_id)
# Start a driver that submit... |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Finds revisions from the Thunderbird migration that don't have based_on
set correctly, and are still relavent, and fixes that.
Run this script like `./manage.py runscript fix_tb_basedon`.
"""
import sys
from traceback import print_exc
from django.db.models import Q
... | n__based_on=None,
products__slug='thunderbird'))
if len(to_process) == 0:
print 'Nothing to do.'
prog = Progress(len(to_process))
for doc in to_process:
prog.tick()
oldest_parent_rev = (Revision.objects.filter(document=doc.parent)
.order_by('id... | doc.parent.save()
doc.current_revision.based_on = oldest_parent_rev
doc.current_revision.save()
|
from pidW | X impor | t *
|
# -*- coding: utf-8 -*-
import os
from future.moves.urllib.parse import quote
import uuid
import ssl
from pymongo import MongoClient
import requests
from django.apps import apps
from addons.wiki import settings as wiki_settings
from addons.wiki.exceptions import InvalidVersionError
from osf.utils.permissions import A... | a = {
'complete': True,
'wiki_content': wiki_html if wiki_html else None,
'wiki | _content_url': node.api_url_for('wiki_page_content', wname='home'),
'rendered_before_update': rendered_before_update,
'more': more,
'include': False,
}
wiki_widget_data.update(wiki.config.to_json())
return wiki_widget_data
|
# -*- coding: utf-8 -*-
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
( | 'cfp', '0004_paperapplication_duration'),
]
operations = [
migrations.AlterField(
model_name='applicant',
name='user',
field=models.OneToOneField(related_name='applicant', to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE),
preserve_default=True,
... | migrations.AlterField(
model_name='paperapplication',
name='applicant',
field=models.ForeignKey(related_name='applications', to='cfp.Applicant', on_delete=models.CASCADE),
preserve_default=True,
),
]
|
# BEGIN_COPYRIGHT
#
# Copyright 2009-2021 CRS4.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licen | ses/LICENSE-2.0
#
# U | nless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# END... |
#
# This file is part of pyasn1 software.
#
# Copyright (c) 2005-2017, Ilya Etingof <etingof@gmail.com>
# License: http://snmplabs.com/pyasn1/license.html
#
import logging
from pyasn1 import __version__
from pyasn1 import error
from pyasn1.compat.octets import octs2ints
__all__ = ['Debug', 'setLogger', 'hexdump']
fl... | : flagNone,
'encoder': flagEncoder,
'decoder': flagDecod | er,
'all': flagAll
}
class Printer(object):
# noinspection PyShadowingNames
def __init__(self, logger=None, handler=None, formatter=None):
if logger is None:
logger = logging.getLogger('pyasn1')
logger.setLevel(logging.DEBUG)
if handler is None:
handler = ... |
# This file is part of BurnMan - a thermoelastic and thermodynamic toolkit for
# the Earth and Planetary Sciences
# Copyright (C) 2012 - 2015 by the BurnMan team, released under the GNU
# GPL v2 or later.
"""
example_perplex
---------------
This minimal example demonstrates how burnman can be used
to read and interro... | man.PerplexMaterial('../burnman/data/input_perplex/in23_1.tab')
P = 1.e9
T = 1650.
rock.set_state(P, T)
print('P: {0:.1f} GPa, T: {1:.1f} K, density: {2:.1f} kg/m^3'.format(P/1.e9, T, rock.rho))
pressures = np.linspace(10.e9, 25.e9, 151)
temperatur | es = [T] * len(pressures)
densities = rock.evaluate(['rho'], pressures, temperatures)[0]
plt.plot(pressures/1.e9, densities)
plt.xlabel('Pressure (GPa)')
plt.ylabel('Density (kg/m^3)')
plt.show()
pressures = np.linspace(10.e9, 25.e9, 151)
temperatures = np.linspace(1600., 1800., 3)
T =... |
assert_array_almost_equal(ivim_fit_multi.model_params, ivim_params_trr)
assert_array_almost_equal(est_signal, data_multi)
def test_ivim_errors():
"""
Test if errors raised in the module are working correctly.
Scipy introduced bounded least squares fitting in the version 0.17
and is not supported... | # Generate a signal with multiple b0
# This gives an isotropic signal.
signal = multi_tensor(gtab_with_multiple_b0, mevals, snr=None, S0=S0,
fractions=[f * 100, 100 * (1 - f)])
# Single | voxel data
data_single = signal[0]
ivim_model_multiple_b0 = IvimModel(gtab_with_multiple_b0, fit_method='trr')
ivim_model_multiple_b0.fit(data_single)
# Test if all signals are positive
def test_no_b0():
assert_raises(ValueError, IvimModel, gtab_no_b0)
def test_noisy_fit():
"""
Test f... |
import os
import shutil
from cumulusci.core.exceptions import DependencyResolutionError
from cumulusci.core.github import get_github_api_for_repo
from cumulusci.core.github import find_latest_release
from cumulusci.core.github import find_previous_release
from cumulusci.utils import download_extract_github
class Git... | The spec may include one of:
- commit: a commit hash
- ref: a git ref
- branch: a git branch
- tag: a git tag
- release: "latest" | "previous" | "latest_beta"
If none of these are specified, CumulusCI will look for the latest release.
If there is no release, it... | None
if "commit" in self.spec:
self.commit = self.description = self.spec["commit"]
return
elif "ref" in self.spec:
ref = self.spec["ref"]
elif "tag" in self.spec:
ref = "tags/" + self.spec["tag"]
elif "branch" in self.spec:
ref... |
# assign epitope fitness to each node in the phylogeny
import time
from io_util import *
from tree_util import *
from date_util import *
from seq_util import *
import numpy as np
from itertools import izip
from collections import defaultdict
def append_nonepitope_sites(viruses):
for virus in viruses:
sites_ne = no... | )
def normalize(viruses):
"""Normalizing non-epitope distances to give non-epitope fitness"""
print "Normalizing non-epitope distances"
distan | ces = [v['distance_ne'] for v in viruses]
mean = np.mean(distances)
sd = np.std(distances)
for virus in viruses:
virus['fitness_ne'] = -1 * ( ( virus['distance_ne'] - mean) / sd )
print virus['strain'] + ": " + str(virus['fitness_ne'])
def main(in_fname = None):
print "--- Non-epitope fitness at " + time.strft... |
from ctypes import c_void_p
from django.contrib.gis.geos.error import GEOSException
# Trying to import GDAL libraries, if available. Have to place in
# try/except since this package may be used outside GeoDjango.
try:
from django.contrib.gis import gdal
except ImportError:
# A 'dummy' gdal module.
... | HAS_GDAL = False
gdal = GDALInfo()
# NumPy supported?
try:
import numpy
except ImportError:
numpy = False
class GEOSBase(object):
"""
Base object for GEOS objects that has a pointer access property
that controls access to the underlying C pointer.
"""
# Initia | lly the pointer is NULL.
_ptr = None
# Default allowed pointer type.
ptr_type = c_void_p
# Pointer access property.
def _get_ptr(self):
# Raise an exception if the pointer isn't valid don't
# want to be passing NULL pointers to routines --
# that's very bad.
... |
from tensorboardX import SummaryWriter
import unittest
from tensorboardX.record_writer import S3RecordWriter, make_valid_tf_name, GCSRecordWriter
import os
import boto3
from moto import mock_s | 3
os.environ.setdefault("AWS_ACCESS_KEY_ID", "foobar_key")
os.environ.setdefault("AWS_SECRET_ACCESS_KEY", "foobar_secret")
class RecordWriterTest(unittest.TestCase):
@mock_s3
def test_record_writer_s3(self):
client = boto3.client('s3', region_name=' | us-east-1')
client.create_bucket(Bucket='this')
writer = S3RecordWriter('s3://this/is/apen')
bucket, path = writer.bucket_and_path()
assert bucket == 'this'
assert path == 'is/apen'
writer.write(bytes(42))
writer.flush()
def test_make_valid_tf_name(self):
... |
name_lat=latname_dist[i], latpath=self.folder)
self.runlattice(jobname=latname_dist[i], folder=self.folder)
already = False
job = self.create_jobname(self.jobname + jobname_dist[i])
self.emto.set_values(
sws=self.sws, jobname=job, latn... | 0 / volume * self.RyBohr3_to_GPa
c11 = self.bmod + c66 + self.ec_analyze_cs * \
(2 * self.e | c_analyze_R - 1)**2 / 18.0
c12 = self.bmod - c66 + self.ec_analyze_cs * \
(2 * self.ec_analyze_R - 1)**2 / 18.0
c13 = self.bmod + 1.0 / 9.0 * self.ec_analyze_cs * (
2 * self.ec_analyze_R**2 + self.ec_analyze_R - 1)
c33 = self.bmod + 2.0 / 9.0 * \
... |
from Module import AbstractModule
class Module(AbstractModule):
def __init__(self):
AbstractModule.__init__(self)
def run(
self, network, antecedents, out_attributes, user_options, num_cores,
outfile):
from genomicode import mplgraph
from genomicode import filelib
... |
return filenam | e
|
# Copyr | ight 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project | Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PerlConfigGeneral(PerlPackage):
"""Config::General - Generic Config Module"""
homepage = "https://metacpan.org/pod/Config::General"
url = "https://cpan.metacpan.org/... |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'openPathTool.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(... | Layout.setObjectName(_fromUtf8("buttonLayout"))
self.explorerButton = QtGui.QPushButton(self.centralwidget)
self.explorerButton.setObjectName(_fromUtf8("explorerButton"))
self.buttonLayout.addWidget(self.explorerButton)
spacerItem = QtGui.QS | pacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.buttonLayout.addItem(spacerItem)
self.convertButton = QtGui.QPushButton(self.centralwidget)
self.convertButton.setObjectName(_fromUtf8("convertButton"))
self.buttonLayout.addWidget(self.convertButton)
... |
# Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law o... | (
scale_diag=[level_scale, 1.]))
local_level_ssm = LocalLevelStateSpaceModel(
num_timesteps=num_timesteps,
level_scale=level_scale,
initial_state_prior=tfd.MultivariateNormalDiag(
scale_diag=[level_scale]))
ar1_lp, ar2_lp, ll_lp = self.evaluate(
(ar1_ssm... | self.assertAllClose(ar1_lp, ll_lp)
self.assertAllClose(ar2_lp, ll_lp)
def testLogprobCorrectness(self):
# Compare the state-space model's log-prob to an explicit implementation.
num_timesteps = 10
observed_time_series_ = np.random.randn(num_timesteps)
coefficients_ = np.array([.7, -.1]).astype(s... |
# Removing stop words
# What to do with the Retweets (RT)?
# Make adjust so that the # and @ are attached to their associated word (i.e. #GOP, @twitter)
from nltk.corpus import stopwords
from nltk.tokenize import word_tokenize
import sys
def remove_stopwords(tweets):
with open(tweets, 'r', buffering=1028) as re... | main():
tweets = "/Users/alanseciwa/Desktop/Independent_Study/Sep16-GOP-TweetsONLY/clean_d | ata-TWEETONLY.csv"
remove_stopwords(tweets)
if __name__ == '__main__':
main()
sys.exit() |
"""
WSGI config for Texas LAN Web project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICAT... | e same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.production"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production")
# This application object is used by any WSGI server conf... | ment server, if the WSGI_APPLICATION
# setting points here.
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
"""
PySAR
Polarimetric SAR decomposition
Contents
--------
decomp_fd(hhhh,vvvv,hvhv,hhvv,numthrd=None) : Freeman-Durden 3-component decomposition
"""
from __future__ import print_function, division
import sys,os
import numpy as np
###===============================================================================... | matform = 'T')
hvvv : ndarray
HV.VV* cross-product (or HV(HH-VV)* for matform = 'T')
matform : str {'C' or 'T'}
form of input matrix entries: 'C' for covariance matrix and
'T' for coherency matrix ['C'] (see ref. 1)
null : | float or None
null value to exclude from decomposition
numthrd : int or None
number of pthreads; None sets numthrd based on the data array size [None]
maxthrd : int or None
maximum allowable numthrd [8]
Returns
-------
H : nd... |
isted with its state")
self.failUnlessEqual(
value.method(2.5), 5.0,
"Class instance wasn't properly persisted with its method")
dList = []
for name in ('foo', 'bar'):
dList.append(self.i.t.load(name).addCallback(gotValue, name))
... | rDeleted)
def getAfterDeleted(null):
return self.i.t.load('foo').addCallback(checkIfDeleted)
| def checkIfDeleted(value):
self.failUnless(isinstance(value, items.Missing))
return self.i.t.load('foo').addCallback(gotOriginal)
def test_deleteMultiple(self):
def getAfterDeleted(null):
return self.i.t.loadAll().addCallback(checkIfDeleted)
def checkIfDeleted(valu... |
import json
import time
from _md5 import md5
import requests
import RolevPlayer as r
def now_playing_last_fm(artist, track):
update_now_playing_sig = md5(("api_key" + r.API_KEY +
"artist" + artist +
"method" + "track.updateNowPlaying" +
... | + trac | k
req = requests.post(url).text
json_obj = json.loads(req)
def scrobble(artist, track):
# this gives us a timestamp, casted to integer
ts = time.time()
scrobbling_sig = md5(("api_key" + r.API_KEY +
"artist" + artist +
"method" + "track.scro... |
#!/usr/bin/env p | ython
data = {
"default_prefix": "OSVC_COMP_REMOVE_FILES_",
"example_value": """
[
"/tmp/foo",
"/bar/to/delete"
]
""",
"description": """* Verify files and file trees are uninstalled
""",
"form_definition": """
Desc: | |
A rule defining a set of files to remove, fed to the 'remove_files' compliance object.
Css: comp48
Outputs:
-
Dest: compliance variable
Class: remove_files
Type: json
Format: list
Inputs:
-
Id: path
Label: File path
DisplayModeLabel: ""
LabelCss: edit16
Mandatory: Yes
... |
conanfile = """from conans import ConanFile, CMake, tools
import os
class {package_name}Conan(ConanFile):
name = "{name}"
version = "{version}"
license = "<Put the package license here>"
| url = "<Package recipe repository url here, for issues about the package>"
settings = "os", "compiler", "build_type", "arch"
options = {{"shared": [True, False]}}
default_options = "shared=False"
generators = "cmake"
def s | ource(self):
self.run("git clone https://github.com/memsharded/hello.git")
self.run("cd hello && git checkout static_shared")
# This small hack might be useful to guarantee proper /MT /MD linkage in MSVC
# if the packaged project doesn't have variables to set it properly
tools.replace... |
"""Tests for the Linky config flow."""
from pylinky.exceptions import (
PyLinkyAccessException,
PyLinkyEnedisException,
PyLinkyException,
PyLinkyWrongLoginException,
)
import pytest
from homeassistant import data_entry_flow
from homeassistant.components.linky.const import DEFAULT_TIMEOUT, DOMAIN
from h... | _PASSWORD: PASSWORD},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "access"}
| hass.config_entries.flow.async_abort(result["flow_id"])
login.return_value.fetch_data.side_effect = PyLinkyEnedisException()
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={CONF_USERNAME: USERNAME, CONF_PASSWORD: PASSWORD},
)
a... |
s:\n{}'.format(pprint.pformat(late_variables)))
return late_variables
def get_secret_variables(sources):
return list(set(var_name for source in sources for var_name in source.secret))
def get_final_arguments(resolver):
return {k: v.value for k, v in resolver.arguments.items() if v.is_finalized}
def f... | loud-con | fig.yaml'] = cc
# Add utils that need to be def |
# disk.py
#
# Copyright (C) 2014-2016 Kano Computing Ltd.
# License: http://www.gnu.org/licenses/gpl-2.0.txt GNU GPL v2
#
# Utilities relating to disk manangement
from kano.utils.shell import run_cmd
def get_free_space(path="/"):
"""
Returns the amount of free space in certain location | in MB
:param path: The location to measure the free space at.
:type path: str
:return: Number of free megabytes.
:rtype: int
"""
out, dummy_err, dummy_rv = run_cmd("df {}".format(path))
dummy_device, dummy_size, dummy_used, free, dummy_percent, dummy_ | mp = \
out.split('\n')[1].split()
return int(free) / 1024
def get_partition_info():
device = '/dev/mmcblk0'
try:
cmd = 'lsblk -n -b {} -o SIZE'.format(device)
stdout, dummy_stderr, returncode = run_cmd(cmd)
if returncode != 0:
from kano.logging import logger
... |
# -*- coding: utf-8 -*-
from __future__ import unic | ode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0008_auto_20150819_0050'),
]
operations = [
migrations.AlterUniqueTogether(
name='test',
unique_togeth | er=set([('owner', 'name')]),
),
]
|
# Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implemen... | ITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
|
microcode = '''
# All the memory versions need to use LOCK, regardless of if it was set
def macroop XCHG_R_R
{
# Use the xor trick instead of moves to reduce register pressure.
# This probably doesn't make much of a difference, but it's easy.
xor reg, reg, regm
xor regm, regm, reg
xor reg, reg, r... |
import EightP | uzzleWithHeuristics as Problem
# puzzle0:
CREATE_INITIAL_STATE = lambda: Problem.St | ate([0, 1, 2, 3, 4, 5, 6, 7, 8]) |
expiry(604800)
log.debug("Setting user session to never expire")
else:
request.session.set_expiry(0)
except Exception as e:
AUDIT_LOG.critical("Login failed - Could not create session. Is memcached running?")
log.critical("Login failed - Could ... | value'] = _("To enroll, you must follow the honor code.").format(field=a)
js['field'] = 'honor_code'
return HttpResponse(json.dumps(js))
# Can't have terms of service for certain SHIB users, like at Stanford
tos_not_required = settings.MITX_FEATURES.g | et("AUTH_USE_SHIB") \
and settings.MITX_FEATURES.get('SHIB_DISABLE_TOS') \
and DoExternalAuth and ("shib" in eamap.external_domain)
if not tos_not_required:
if post_vars.get('terms_of_service', 'false') != u'true':
js['value'] = _("You must accept t... |
#!/usr/bin/env python3
from os import environ, system
from subprocess import Popen
print('\nUltimate Doom (Classic)')
print('Link: https://store.steampowered.com/app/2280/Ultimate_Doom/\n')
home = environ['HOME']
core = home + '/bin/games/steam | -connect/steam-connect-core.py'
logo = home + '/bin/games/steam-connect/doom-logo.txt'
game = 'doom-1'
stid = '2280'
proc = 'gzdoom'
flag = ' +set dmflags 4521984'
conf = ' -config ' + home + '/.config/gzdoom/gzdoom-classic.ini'
save = ' -savedir ' + home + '/.config/gzdoom/saves/' + game
iwad = ' -iwad DOOM.WAD'
mod... | weapons.zip'
args = proc + flag + conf + save + iwad + mods
system('cat ' + logo)
Popen([core, stid, args]).wait()
|
import csv
from django.db import transaction
from django import forms
from django.forms.forms import NON_FIELD_ERRORS
from django.utils import six
from django.utils.translation import ugettext_lazy as _
class CSVImportError(Exception):
pass
class ImportCSVForm(forms.Form):
csv_file = forms.FileField(requir... | f, *args, **kwargs):
self.importer_class = kwargs.pop('importer_class')
self.dialect = kwargs.pop('dialect')
super(ImportCSVForm, self).__init__(*args, **kwargs)
self.fields['csv_file'].help_text = "Expected f | ields: {}".format(self.expected_fields)
def clean_csv_file(self):
if six.PY3:
# DictReader expects a str, not bytes in Python 3.
csv_text = self.cleaned_data['csv_file'].read()
csv_decoded = six.StringIO(csv_text.decode('utf-8'))
return csv_decoded
el... |
# This file is part of Indico.
# Copyright (C) 2002 - 2015 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (a... | )
elif isinstance(event, MaKaC.conference.Conference):
text = _("Inherit from parent event")
else:
text = str(repr(parent))
return text
def roomInfo(event, level='real'):
# gets inherited/real/own location/room properties
if level == 'inherited':
room = event.getInh | eritedRoom()
location = event.getInheritedLocation()
text = decideInheritanceText(event.getLocationParent())
elif level == 'real':
room = event.getRoom()
location = event.getLocation()
text = decideInheritanceText(event)
elif level == 'own':
room = event.getOwnR... |
#
# Here is a more complicated example that loads a .csv file and
# then creates a plot from the x,y data in it.
# The data file is the saved curve from partsim.com of the low pass filter.
# It was saved as xls file and then opened in Excel and exported to csv
#
# First import the csv parser, the numeric tools and plot... | # select the first column
# y1_ar = dat[:,1] # select the second column
# y2_ar = dat[:,2] # select the third column
x_ar = [] # Create a new list (array) called dat to hold the data.
y1_ar = []
y2_ar = []
for (x,y1, | y2) in data: # Unpack the csv data into x,y1,y2 variables.
x_ar.append( float(x))
y1_ar.append(float(y1))
y2_ar.append(float(y2)) # Convert the variable from string to float and add to dat
#
# Now plot the data. plt.plot returns a tuple (plot, )
#
(p1,) = plt.plot(x_ar,y1_ar,color='green',label=headers[1]... |
ean=experiment.mean,
# # variance=experiment.variance,
# # slope=experiment.slope,
# # limits=experiment.limits,
# # csv=experiment.csv,
# # normalize=experiment.normalize)
# #
# # f = open(experiment.output_file... | #
# logger.debug("Extracting features for %s", job.filename)
# T0 = time.time()
# feats.calculate_features_per_ | band(a)
# T1 = time.time()
# logger.debug("Feature extraction took %f seconds", T1 - T0)
#
# return feats.band_features
def tza_bands_parallel(experiment, n_processes = 1):
"""
:type experiment: BandExperiment
:type n_processes: int
"""
jobs = []
with open(experiment.mirex_list_fi... |
MDMzQTczRUY3NUE3NzA5QzdFNUYzMDQxNEM=',
'Content-Type': 'text/html',
'Expires': 'Thu, 17 Nov 2005 18:49:58 GMT',
'X-OSS-Meta-Author': 'foo@bar.com',
'X-OSS-Magic': 'abracadabra',
'Host': 'oss-example.oss-cn-hangzhou.aliyuncs.com'
}
action = '/os... | ):
return (httplib.NO_CONTENT,
body,
self.base_headers,
httplib.responses[httplib.NO_CONTENT])
def _foo_bar_object_not_found(self, method, url, body, headers):
# test_delete_object_not_found
return (httplib.NOT_FOUND,
body,
... | .OK])
def _foo_bar_object_delete(self, method, url, body, headers):
# test_delete_object
return (httplib.NO_CONTENT,
body,
headers,
httplib.responses[httplib.OK])
def _list_multipart(self, method, url, body, headers):
query_string = urlpa... |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
u"""
Задание 1: классный Человек.
УСЛОВИЕ:
Реализовать класс Person, который отображает запись в книге контактов.
Класс имеет 4 атрибута:
- surname - строка - фамилия контакта (обязательный)
- first_name - строка - имя контакта (обязательный)
- nickname - строк... | ")
res_date = datetime.date(var_date.year,
var_date.month, var_date.day)
except TypeError:
print | "Incorrect type of birthday date!"
res_date = None
except ValueError:
print "Wrong value of birthday date!"
res_date = None
self.surname = surname
self.first_name = first_name
self.birth_date = res_date
if nickname is not None:
... |
# -*- coding:utf-8 -*-
'''
x1 | 1perf测试工具执行脚本
'''
import os, shutil, re
from test import BaseTest
from lpt.lib.error import *
from lpt.lib import lptxml
from lpt.lib import | lptlog
from lpt.lib.share import utils
from lpt.lib import lptreport
import glob
glxgears_keys = ["gears"]
class TestControl(BaseTest):
'''
继承BaseTest属性和方法
'''
def __init__(self, jobs_xml, job_node, tool, tarball='UnixBench5.1.3-1.tar.bz2'):
super(TestControl, self).__init__(jobs_xml, jo... |
"""The tests for hls streams."""
from datetime import timedelta
from io import BytesIO
from unittest.mock import patch
from homeassistant.setup import async_setup_component
from homeassistant.components.stream.core import Segment
from homeassistant.components.stream.recorder import recorder_save_worker
import homeassi... | : {}
})
with patch(
'homeassistant.components.stream.recorder.RecorderOutput.cleanup'
) as mock_cleanup:
# Setup demo track
source = generate_h264_video()
stream = preload_stream(hass, source)
recorder = stream.add_provider('recorder')
stream.star... | # Wait a minute
future = dt_util.utcnow() + timedelta(minutes=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
assert mock_cleanup.called
async def test_recorder_save():
"""Test recorder save."""
# Setup
source = generate_h264_video()
ou... |
#!/usr/bin/env python3
"""hello with args"""
import sys
import os
args = sys.argv
if len(args) != 2:
script = os.path.basename(args[0])
print('Usage: {} NAME'.format(scrip | t))
sys.exit(1)
name = args[1]
prin | t('Hello, {}!'.format(name))
|
import os
import platform
import subprocess
import cat_service
from apps.general import headers
def main():
headers.print_header('LOLCAT FACTORY')
# look for a directory if not there create it
dir_path = get_or_create_output_folder()
n_cats = get_number_cats()
# contact the lol cat api, get binar... | lif platform.system() == 'Windows':
print('with windows')
| subprocess.call(['explorer', folder])
elif platform.system() == 'Linux':
subprocess.call(['xdg-open', folder])
else:
print('Do not support your os "{}"'.format(platform.system()))
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*- |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
| dependencies = [
('twit', '0006_auto_20160419_0248'),
]
operations = [
migrations.CreateModel(
name='Retweet',
fields=[
('id', models.BigIntegerField(serialize=False, help_text='Unique id that comes from Twitter', primary_key=True)),
(... |
# -*- coding: utf-8 -*-
import socket
import struct
import signal
signal.signal(signal.SIGPIPE, signal.SIG_IGN)
class Connection(object):
"""
サーバとの通信用クラス
"""
BINARY_INT = '!1I'
BINARY_TABLE = '!120I'
def __init__(self, addr='127.0.0.1', port=42485):
self.sock = socket.socket(socket.A... | return self
def __exit__(self, *exc):
self.sock.close()
def recv_int(self):
unpacked_value = self._recv_msg(byte_length=4)
| s = struct.Struct(self.BINARY_INT)
integer = s.unpack(unpacked_value)
return integer[0]
def recv_table(self):
unpacked_value = self._recv_msg(byte_length=480)
s = struct.Struct(self.BINARY_TABLE)
ls = s.unpack(unpacked_value)
table = [ls[15 * i: 15 * (i + 1)][:] fo... |
from share.transform.chain.exceptions import * # noqa
from share.transform.chain.links import * # noqa
from share.transform.chain.parsers import * # noqa
from share.transf | orm.chain.transformer import ChainTransformer # noqa
from share.transform.chain.links import Context
# Context singleton to be used f | or parser definitions
# Class SHOULD be thread safe
# Accessing subattribtues will result in a new copy of the context
# to avoid leaking data between chains
ctx = Context()
|
True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codenam... | harField', [], {'max_length': '100'})
},
'sentry.filtervalue': {
'Meta': {'unique_together': "(('project', 'key', 'value'),)", 'object_name': 'FilterValue'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.Cha... | elated.ForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.groupedmessage': {
'Meta': {'unique_together': "(('project', 'logger', 'view', 'checksum'),)", 'object_name': 'GroupedMes... |
from_response(location1)
location2 = ""
content_disposition = ""
for header in headers:
logger.info("header2="+str(header))
if header[0]=="location":
location2 = header[1]
location = location2
if location=="":
... | '
logger.info("[wupload. | py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos).findall(data)
for match in matches:
titulo = "[wupload]"
url = match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'wupload' ] )
encontra... |
import urllib
import urllib2
import cookielib
import logging
class GISTokenGenerator:
def __init__(self, email, password):
self.cj = cookielib.CookieJar()
self.opener = urllib2.build_opener(urllib | 2.HTTPCookieProcessor(self.cj))
self.email = email
self.login_data = urllib.urlencode({'user[email]': email, 'user[password]': password})
def generate_token(self):
| logging.info('Generating a token for {0}...'.format(self.email))
self.opener.open('https://auth.aiesec.org/users/sign_in', self.login_data)
token = None
for cookie in self.cj:
if cookie.name == 'expa_token':
token = cookie.value
if token is None:
r... |
import sys, os
from pythonparser import diagnostic
from ...language.environment import ProcessArgumentManager
from ...master.databases import DeviceDB, DatasetDB
from ...master.worker_db import DeviceManager, DatasetManager
from ..module import Module
from ..embedding import Stitcher
from ..targets import OR1KTarget
fr... | )
target = OR1KTarget()
llvm_ir = target.compile(module)
elf_obj = target.assemble(llvm_ir)
elf_shlib = target.link([elf_obj])
benchmark(lambda: embed(),
"ARTIQ embedding")
benchmark(lambda: Module(stitcher),
"ARTIQ transforms and validators")
benchmark(lambda:... | rk(lambda: target.assemble(llvm_ir),
"LLVM machine code emission")
benchmark(lambda: target.link([elf_obj]),
"Linking")
benchmark(lambda: target.strip(elf_shlib),
"Stripping debug information")
if __name__ == "__main__":
main()
|
e'
SORT_METHOD_FULL_PATH = 'full_path'
SORT_METHOD_GENRE = 'genre'
SORT_METHOD_LABEL = 'label'
SORT_METHOD_LABEL_IGNORE_FOLDERS = 'label_ignore_folders'
SORT_METHOD_LABEL_IGNORE_THE = 'label_ignore_the'
SORT_METHOD_LAST_PLAYED = 'last_played'
SORT_METHOD_LISTENERS = 'listeners'
SORT_METH... |
def get_ico | n(self):
return os.path.join(self.get_native_path(), 'icon.png')
def get_fanart(self):
return os.path.join(self.get_native_path(), 'fanart.jpg')
def create_resource_path(self, relative_path):
relative_path = utils.path.normalize(relative_path)
path_comps = relative_path.split('... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.