prefix stringlengths 0 918k | middle stringlengths 0 812k | suffix stringlengths 0 962k |
|---|---|---|
import csv
from bs4 import BeautifulSoup
from collections import Counter
import re
import os
OUTPUT_NAME = os.getenv('OUTPUT_NAME',
'data_detikcom_labelled_740_7_class.csv')
csv_file = open('data_detikcom_labelled_740.csv')
csv_reader = csv.DictReader(csv_file)
|
# Tranform individual label to candidate pair label
l | abel_map = {
'pos_ahok': 'pos_ahok_djarot',
'pos_djarot': 'pos_ahok_djarot',
'pos_anies': 'pos_anies_sandi',
'pos_sandi': 'pos_anies_sandi',
'pos_agus': 'pos_agus_sylvi',
'pos_sylvi': 'pos_agus_sylvi',
'neg_ahok': 'neg_ahok_djarot',
'neg_djarot': 'neg_ahok_djarot',
'neg_anies': 'neg_... |
# Copyright (c) 2016 Iotic Labs Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://github.com/Iotic-Labs/py-IoticAgent/blob/master/LICENSE
#
# Unless re... | ed 8-bit integer in the range [-128 -> +127]. Derived from the short datatype.'''
UN | SIGNED_BYTE = 'unsignedByte'
'''An unsigned 8-bit integer in the range [0, 255]. Derived from the unsignedShort datatype.'''
DATE = 'date'
'''Represents a specific date. The syntax is the same as that for the date part of dateTime,
with an optional time zone indicator. Example: "1889-09-24".
'''
DATETIME = 'dateTime'
'... |
'''
Kaya Baber
Physics 440 - Computational Physics
Assignment 3
Problem 1
Hamiltonian Dynamics of a Nonlinear Pendulum
Consider a simple pendulum of length in
gravitational field g. The frequency in the limit of small angles is Ω_0 ≡ radical(g/l) , but do not assume the limit
of small angles for the following calculat... | P_θ_dot = -mlsin(θ)
(b) Use a second-order leapfrog algorithm to compute the motion of the pendulum. If we choose a
computational unit of time [T ] = Ω_0^(−1) , then 2π computational time units equals one period in the limit of
small oscillations. Another way to think about it is that we can choose a set of... | s.
(c) Liouville’s Theorem states that the phase-space volume of a infinitesimally close ensemble of states is
conserved. Demonstrate Liouville’s Theorem by considering an ensemble of closely spaced initial conditions.
'''
|
# Copyright (c) 2013 Jordan Halterman <jordan.halterman@gmail.com>
# See LICENSE for details.
import sys, os
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
from active_redis import ActiveRedis
redis = ActiveRedis()
# Create an unnamed set.
myset = redis.set()
# Add items to the set.
myset.add('foo')
... | et = redis.set('myset')
pr | int myset # set([u'foo'])
myset.delete()
print myset # set()
|
#!/usr/bin/env python
# encoding: utf-8
"""A test module"""
import datetime
import tempfile
import os
import shutil
import scores.common as common
class TestCommon(object):
""" A Test class"""
def test_date_function(self):
"""Test"""
a_date = datetime.datetime.now()
a_dat | e = a_date.replace(microsecond=0)
tst | amp = common.datetime_to_timestamp(a_date)
assert tstamp > 0
converted_bck = common.timestamp_to_datetime(tstamp)
assert converted_bck == a_date
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-31 00:22
from __future__ import unicode_literals
from django.db import migrations
class | Migration(migrations.Migration):
dependencies = [
('configuration_management_tools', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='smslibgateways',
options={'managed': False, 'verbose_name': 'Gateways'},
),
migrations.AlterMo... | options={'managed': False, 'verbose_name': 'Routes'},
),
]
|
import os, sys, shutil
# change back to the build dir
if os.path.dirname( sys.argv[0] ) != "":
os.chdir( os.path.dirname( sys.argv[0] ) )
# find setuptools
scramble_lib = os.path.join( "..", "..", "..", "lib" )
sys.path.append( scramble_lib )
import get_platform # fixes fat python 2.5
from ez_setup import use_set... | amble.py: removing dir:", dir
shutil.rmtree( dir )
# reset args for distutils
me = sys.argv[0]
sys.argv = [ me ]
sys.argv.append( "egg_info" )
if tag is not None:
#sys.argv.append( "egg_info" )
sys.argv.append( "--tag-build=%s" | %tag )
# svn revision (if any) is handled directly in tag-build
sys.argv.append( "--no-svn-revision" )
sys.argv.append( "bdist_egg" )
# do it
execfile( "setup.py", globals(), locals() )
|
"""
__graph_MT_post__OUT2.py___________________________________________________________
Automatically generated graphical appearance ---> MODIFY DIRECTLY WITH CAUTION
__________________________________________________________________________
"""
import tkFont
from graphEntity import *
from GraphicalForm import ... | gesAtRunTime = 0
self.constraintList = []
if self.semanticObject: atribs = self.semanticObject.attributesToDraw()
else: atribs = None
self.graphForms = []
self.imageDict = self.getImageDict()
def DrawObject(self, drawing, showGG = 0):
self.dc = drawing
if sho... | ector'), outline = '', fill = '' )
self.connectors.append( h )
h = drawing.create_rectangle(self.translate([20.0, 20.0, 190.0, 100.0]), tags = self.tag, stipple = '', width = 1, outline = 'black', fill = 'moccasin')
self.gf4 = GraphicalForm(drawing, h, "gf4")
self.graphForms.append(self... |
r'''
<license>
CSPLN_MaryKeelerEdition; Manages images to which notes can be added.
Copyright (C) 2015-2016, Thomas Kercheval
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the L... | p version) into each web2py frame.
Renames scaffolding application to 'MKE_Static_Name'.
Currently:
To Do:
Done:
'''
import os | , sys, shutil
from the_decider import resolve_relative_path as resolve_path
def check_file_exist(path):
"""Check if the file at the given path exists."""
if os.path.exists(path):
pass
else:
sys.exit('File {} doesn\'t exist'.format(path))
return None
def grab_out_paths(number_apps, app_... |
t os.path.getsize(data), '%r is an empty file' % data
result = self.read_html(data, 'Arizona', header=1)[0]
assert result['sq mi'].dtype == np.dtype('float64')
def test_decimal_rows(self):
# GH 12907
data = StringIO('''<html>
<body>
<table>
... | ml_df)
expected_df = DataFrame({'a': [np.nan, np.nan]})
html_df = read_html(html_data, keep_default_na=True)[0]
tm.assert_frame_equal(expected_df, html_df)
def test_multiple_header_rows(self):
# Issue #13434
expected_df = DataFrame(data=[("Hillary", 68, "D"),
... | "Bernie", 74, "D"),
("Donald", 69, "R")])
expected_df.columns = [["Unnamed: 0_level_0", "Age", "Party"],
["Name", "Unnamed: 1_level_1",
"Unnamed: 2_level_1"]]
html = expected_df.to_html(index=False)
... |
class Solution(object):
def strStr(self, haystack, needle):
"""
:type haystack: str
:type needl | e: str
:rtype: int
"""
for i in range(len(haystack)-len(needle) + 1):
if haystack[i: i + len(needle)] == needle:
return i
return | -1
|
import logging
from ..models import Activity
from .date import activity_stream_date_to_datetime, datetime_to_string
log = logging.getLogger(__name__)
def activity_from_dict(data):
log.debug("Converting YouTube dict to Activity Model")
activity_dict = activity_dict_from_dict(data)
return Activity.from_ac... | stream_object["@context"] = "http://www.w3.org/ns/activitystreams"
stream_object["@type"] = "Activity"
date = blob.get("snippet").get("publishedAt")
date = activity_stream_date_to_datetime(date)
stream_object["published"] = datetime_to_string(date)
stream_object["provider"] = {
"@type": "S... | e.com/user/{}".format(snippet.get("channelTitle")),
"displayName": snippet.get("channelTitle"),
}
stream_object["object"] = {
"@id": "https://www.youtube.com/watch?v={}".format(blob.get("id").get("videoId")),
"@type": "Video",
"displayName": snippet.get("title"),
"url": ... |
# -*- coding: utf-8 -*-
"""
Created on Fri Oct 7 13:10:05 2016
@author: thasegawa
"""
import os
import pandas as pd
economic_list = list(pd.read_excel('data\\fields\\economicIndicators_Real.xlsx', header=None)[0])
#fuel_list = list(pd.read_excel('data\\fields\\fuel_binary.xlsx', header=None)[0]) + [None]
fuel_list ... | = pd.DataFrame({'Economic Indicator': economic_list,
group: coef_list})
else:
R2_out[group] = R2_l | ist
coef_out[group] = coef_list
R2_out.to_excel('data\\regress_out\\regresssummary_R2.xlsx', index = False)
coef_out.to_excel('data\\regress_out\\regresssummary_coef.xlsx', index = False) |
# Copyright 2010-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# This file is licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# This f... |
# snippet-sourcedescription:[MyCodeCommitFunction.py demonstrates how to use an AWS Lambda function to return the URLs used for cloning an AWS CodeCo | mmit repository to a CloudWatch log.]
# snippet-service:[codecommit]
# snippet-keyword:[Python]
# snippet-sourcesyntax:[python]
# snippet-sourcesyntax:[python]
# snippet-keyword:[AWS CodeCommit]
# snippet-keyword:[Code Sample]
# snippet-keyword:[GetRepository]
# snippet-sourcetype:[full-example]
# snippet-source... |
idom import _append_child, _set_attribute_node
from xml.dom.NodeFilter import NodeFilter
from xml.dom.minicompat import *
TEXT_NODE = Node.TEXT_NODE
CDATA_SECTION_NODE = Node.CDATA_SECTION_NODE
DOCUMENT_NODE = Node.DOCUMENT_NODE
FILTER_ACCEPT = xmlbuilder.DOMBuilderFilter.FILTER_ACCEPT
FILTER_REJECT = xmlbuilder.DOM... | CTYPE_EMPTY
else:
return False
def isId(self, aname):
for info in self._attr_info:
if info[1] == aname:
return info[-2] == "ID"
return False
def isIdNS(self, euri, ename, auri, aname):
# not sure this is meaningful
return self.isI... | n_setdefault(s, s)
def _parse_ns_name(builder, name):
assert ' ' in name
parts = name.split(' ')
intern = builder._intern_setdefault
if len(parts) == 3:
uri, localname, prefix = parts
prefix = intern(prefix, prefix)
qname = "%s:%s" % (prefix, localname)
qname = intern(qn... |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-17 19:24
from __future__ import unicode_literals
import c3nav.mapdata.fields
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('mapdata', '0047_remove_mapupdat... | models.DecimalField(db_index=True, decimal_places=2, max_digits=6, verbose_name='min x coordinate')),
('miny', models.DecimalField(db_index=True, decimal_places=2, max_digits=6, verbose_name='min y coordinate')),
('maxx', models.DecimalField(db_index=True, decimal_places=2, max_digits=6... | ('geometry', c3nav.mapdata.fields.GeometryField(default=None, geomtype='polygon')),
('space', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='ramps', to='mapdata.Space', verbose_name='space')),
],
options={
'verbose_nam... |
def _checkInput(index):
if index < 0:
raise ValueError("Indice negativo non supportato [{}]".format(index))
elif type(index) != int:
raise TypeError("Inserire un intero [tipo input {}]".format(type(index).__name__))
def fib_from_string(index):
_c | heckInput(index)
serie = "0 1 1 2 3 5 8".replace(" ", "")
return int(serie[index])
def fib_from_list(index):
_checkInput(index)
serie = [0,1,1,2,3,5,8]
return serie[index]
def fib_from_algo(index):
_checkInput(index)
current_number = current_index = 0
base = 1
while current_index ... | ber
def recursion(index):
if index <= 1:
return index
return recursion(index - 1) + recursion(index - 2)
def fib_from_recursion_func(index):
_checkInput(index)
return recursion(index)
calculate = fib_from_recursion_func |
#!/usr/bin/env python
import analyze_conf
import sys
import datetime, glob, job_stats, os, subprocess, time
import operator
import matplotlib
# Set the matplotlib output mode from config if it exists
if not 'matplotlib.pyplot' in sys.modules:
try:
matplotlib.use(analyze_conf.matplotlib_output_mode)
except NameE... | (ename,ld.equiv_patterns),8)
if ename=='unknown':
return retval
flag=False
if mean_mem_rate < 75.*1000000000./16.:
flag=True
return (scipy.stats.tmean(stall_frac),
scipy.stats.tmean((load_rate - (l1_rate))/load_rate),
scipy.stats.tmean(clock_rate/inst_rate),ename,
flag)
... | 'hosts for a pair of keys')
parser.add_argument('filearg', help='File, directory, or quoted'
' glob pattern', nargs='?',default='jobs')
parser.add_argument('-p', help='Set number of processes',
nargs=1, type=int, default=[1])
n=parser.parse_args()
filelist=tspl_... |
def populate(template, values):
# template is a string containing tags. the tags get replaced with the entries from the values dictionary.
# example:
# > templat | e = "hello there <<your name>>!"
# > values = {"your name": "bukaroo banzai"}
# > populateTemplate( template, values)
# "hello there bukaroo banzai!"
result = tem | plate["text"]
name = "None"
try:
for name in template["parameters"]:
result = result.replace("<<%s>>" % name, str(values[name]))
except KeyError:
print "Template value dictionary is missing the entry:", name
return result
### dts file template
dtsContents = {
"parameters... |
# Copyright (c) 2012-2015 Netforce Co. Ltd.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publ... | on_delete="cascade"),
"product_id": fields.Many2One("product", "Product", required=True),
"qty": fields.Decimal("Qty", required=True, scale=6),
"uom_id": fields.Many2One("uom", "UoM", required=True),
"location_id": fiel | ds.Many2One("stock.location", "RM Warehouse"),
"container": fields.Selection([["sale", "From Sales Order"]], "RM Container"),
"lot": fields.Selection([["production", "From Production Order"]], "RM Lot"),
"issue_method": fields.Selection([["manual", "Manual"], ["backflush", "Backflush"]], "Issue ... |
#!/usr/bin/python
import re
userInput = raw_input("input equation\n")
numCount = 0
operandCount = 0
entryBracketCount = 0
exitBracketCount = 0
charCount = 0
endOfLine = len(userInput) - 1
for i in range(len(userInput)):
if (re.search('[\s*a-z\s*A-Z]+', userInput[i])):
charCount = charCount + 1
print operandCoun... | expression'
print '4'
exit(0)
elif (re.search('[\(]+', userInput[i])):
entryBracketCount = entryBracketCount + 1
print | operandCount, " 4"
elif (re.search('[\)]+', userInput[i])):
exitBracketCount = exitBracketCount + 1
print operandCount, " 5"
if(re.search('[\)]+', userInput[endOfLine])):
continue
else:
if(re.search('[\(]+', userInput[i + 1])):
print 'invalid expression'
print '5'
exit(0)
print operandCount,... |
"""Support for Zigbee switches."""
import voluptuous as vol
from homeassistant.components.switch import SwitchDevice
from . import PLATFORM_SCHEMA, ZigBeeDigitalOut, ZigBeeDigitalOutConfig
CONF_ON_STATE = "on_state" |
DEFAULT_ON_STATE = "high"
STATES = ["high", "low"]
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({vol.Optional(CONF_ON_STATE): vol.In(STATES)})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Zigbee switch platform."""
add_entities([ZigBeeSwitch(hass, ZigBeeDigitalOutConfig(c... | ntation of a Zigbee Digital Out device."""
pass
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Python 3.2 code
#
# Copyright (c) 2012 Jeff Smits
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your opt... | his program. If not, see <http://www.gnu.org/licenses/>.
#
# C3P - C-compatible code preprocessor
# This commandline tool reads a file and expands macro's.
#
# This file is a utility file and doesn't contain the whole tool | .
# Also it does not run standalone.
#
# This file imports all the tests
from .acceptance_tests import Acc_test
from .unit_tests import * |
#!/usr/bin/env python2
#
# wsi_bot_codebook3
#
# Version 3 of codebook construction:
#
# -uses OpenCV for faster operation - but different local descriptors than in the 1st version;
# -uses annotation files for defining the regions from where the descriptors are to be
# extracted
# - try to optimize the codebook with... | frq = np.zeros((len(unique_image_names), k))
for i in range(vq.labels_.size):
frq[unique_image_names.index(all_image_names[i]), vq.labels_[i]] += 1.0
for i in range(len(unique_image_names)):
if frq[i, :].sum() > 0:
frq[i, :] /= frq[i, :].sum()
if args... | for i in range(k):
_, pv[i] = ttest_ind(frq[y == 0, i], frq[y == 1, i])
idx = np.argsort(pv) # order of the p-values
if args.verbose:
print("\t...classification performance estimation")
clsf = LDA(solver='lsqr', shrinkage='auto').fit(frq[:,idx[:10]], y) # ke... |
"""Image renderer modul | e."""
from mfr.core import RenderResult
def render_img_tag(fp, src=None, alt=''):
"""A simple image tag renderer.
:param fp: File pointer
:param src: Path to file
:param alt: Alternate text for the image
:return: RenderResult object containing the content html
"""
# Default src to the fil... | return RenderResult(content)
|
self._disable_message_config()
if not self.text:
raise ValueError('No text configured')
kw = {}
kw.upd | ate(self.config)
kw['message'] = self
try:
notifier = zope.component.getUtility(
zeit.push.interfaces.IPushNotifier, name=self.type)
notifier.send(self.text, self.url, **kw)
self.log_success()
log.info('Push notification for %s sent', self... | ror during push to %s with config %s',
self.type, self.config, exc_info=True)
def _disable_message_config(self):
push = zeit.push.interfaces.IPushMessages(self.context)
push.set(self.config, enabled=False)
@property
def text(self):
push = zeit.push.interfaces.... |
import sys
import os
from scale_model import StartupDataModel, VCModel
from flask.ext.restful import Resource, reqparse
from flask import Flask, jsonify, request, make_response
import os
from database import db
from flask.ext.security import current_user
from json import dumps
class Scale_DAO(object):
def __init_... | cb_url, angellist_url=al_url, description=description)
db.session.add(new_data)
db.session.commit()
return jsonify(status=200,msg="Data added successfully!")
except:
| jsonify(msg="Error adding your data.")
|
import numpy
# from nmt import train
# from nmtlm import train
from nmt import train
def main(job_id, params):
print params
trainerr, validerr, testerr = train(saveto=params['model'][0],
reload_=params['reload'][0],
dim_word=param... | saveFreq=5000,
sampleFreq=10,
use_dropout=params['use-dropout'][0])
return validerr
if __name__ == '__main__':
main(0, {
'model': ['model.npz'],
'dim_word': [38 | 4],
'dim': [512],
'n-words': [30000],
'optimizer': ['adam'],
'decay-c': [0.],
'clip-c': [10.],
'use-dropout': [False],
'learning-rate': [0.0001],
'reload': [False]})
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
... | n=dict(required=True),
token=dict(required=True, no_log=True),
state=dict(required=True, choices=['started', 'finished', 'failed']),
hosts=dict(required=False, default=[socket.gethostname()], aliases=['host']),
env=dict(required=False),
owner=dict(required=Fal... | source_system=dict(required=False, default='ansible'),
validate_certs=dict(default='yes', type='bool'),
url=dict(required=False, default='https://api.bigpanda.io'),
),
supports_check_mode=True,
check_invalid_arguments=False,
)
token = module.params['token']
... |
import socket
import random
from PIL import Image
import json
import sys, getopt
import math
import pika
# Screen VARS
offset_x = 80
offset_y = 24
screen_width = 240
screen_height = 240
# Internal options
queueAddress = ''
fileName = ''
workers = 36
Matrix = []
def main(argv):
global fileName, workers
inp... | inCredentials(username='pomper',
password='pomper')))
channel = connection.channel()
channel.queue_declare(queue='pomper', durable=False,)
channe | l.queue_purge(queue='pomper')
for worker in arrayOfWorkers:
for pixels in worker:
channel.basic_publish(exchange='',
routing_key='pomper',
body=json.dumps(pixels))
if __name__ == "__main__":
main(sys.argv[1:])
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2011 OpenStack, LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache... | OUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language gov | erning permissions and limitations
# under the License.
from sqlalchemy import Column, Table, MetaData, String
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
networks = Table('networks', meta, autoload=True)
networks.c.dns.alter(name='dns1')
dns2 = Column('dns2', S... |
# Generated by Django 2.1. | 5 on 2019-10-01 19:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('comment', '0005_auto_20191001_1559'),
]
operations = [
migrations.AlterField(
model_name='dataset',
name='dts_type',
field=models.... | or defect comments.', max_length=1, verbose_name='Type'),
),
]
|
#!/usr/bin/env python
#coding:utf-8
# Author: mozman --<mozm | an@gmx.at>
# Purpose: test mixin Clipping
# Created: 31.10.2010
# Copyright (C) 2010, Manfred Moitzi
# License: GPLv3
import unittest
from svgwrite.mixins import Clipping
from svgwrite.base import BaseElement
class SVGMock(BaseElement, Clipping):
elementname = 'svg'
class TestClipping(unittest.Tes... | GMock(debug=True)
obj.clip_rect(1, 2, 3, 4)
self.assertEqual(obj['clip'], 'rect(1,2,3,4)')
def test_clip_rect_auto(self):
obj = SVGMock(debug=True)
obj.clip_rect('auto', 'auto', 'auto', 'auto')
self.assertEqual(obj['clip'], 'rect(auto,auto,auto,auto)')
if __name__=... |
from typing import Iterable, Callable, Optional, Any, List, Iterator
from dupescan.fs._fileentry import FileEntry
from dupescan.fs._root import Root
from dupescan.types import AnyPath
FSPredicate = Callable[[FileEntry], bool]
ErrorHandler = Callable[[EnvironmentError], Any]
def catch_filter(inner_filter: FSPredicat... | er: Optional[FSPredicate]=None,
onerror: Optional[ErrorHandler]=None
):
self._recursive = bool(recursive)
self._onerror = noerror if onerror is None else onerror
self._dir_filter = catch_filter(dir_object_filter, self._onerror)
self._file_filter = catch_filter(file_object... | erator[FileEntry]:
for root_index, root_path in enumerate(paths):
root_spec = Root(root_path, root_index)
try:
root_obj = FileEntry.from_path(root_path, root_spec)
except EnvironmentError as env_error:
self._onerror(env_error)
... |
#!/usr | /bin/env python3
from pyserv.databrow | se import main
main()
|
import os, sys
up_path = os.path.abspath('..')
sys.path.append(up_path)
from numpy import *
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.patches as mpatches
from matplotlib import rc
from objects import SimObject
from utils import scalar
from covar import draw_ellipsoid, vec2cov, cov2vec,\
pr... | t.camera_transform(x[0:self.bot.NX])
obj_pos = x[self.bot.NX:]
z = mat(zeros((10,1)))
z[0:7] = camera_transform
z[7:10] = obj_pos
return z
"""
def fov_state(self, x):
xy = mat(self.bot.traj_pos(x)).T
theta = self.bot.orientation(x)
#print vstack... | at(x).T
if isinstance(xy, tuple) or xy.shape[0] < xy.shape[1]:
xy = mat(xy).T
return vstack((xy, theta, x[self.bot.NX:]))
"""
def observe(self, scene, x=None):
zs = self.bot.observe(scene, x[0:self.bot.NX])
return vstack((zs, robots.Robot.observe(self, scene, x)))
... |
class Solution:
def toLowerCase(self, str: str) -> | str:
rs = ""
# 32
section = ord("a") - ord("A")
for s in str:
if ord(s) >= ord("A") and ord(s) <= ord("Z"):
rs = rs + chr(ord(s) + section)
else:
rs = rs + s |
return rs
sol = Solution()
print(sol.toLowerCase("Hello"))
|
from .nucleicacidpartitemcontroller import NucleicAcidPartItemController
from .oligoitemcontroller import OligoItemController
from .stranditemcontroller import StrandItemController
from .viewrootcontroller import ViewRootController
from .virtualhelixitemcontrolle | r import | VirtualHelixItemController |
ure__ import absolute_import
from __future__ import division
# Import Type Annotations
from __future__ import print_function
import io
import os
import apache_beam as beam
from apache_beam.metrics import Metrics
import matplotlib.pyplot as plot
import tensorflow as tf
from tf import gfile
from typing import List
from t... | rn '\n'.join([str(x) for x in result])
def proven_or_open(proof_stat):
if proof_stat.num_theorems_proved > 0:
yield beam.pvalue.TaggedOutput('proven',
| '%d' % proof_stat.theorem_fingerprint)
else:
yield beam.pvalue.TaggedOutput('open',
'%d' % proof_stat.theorem_fingerprint)
def make_proof_logs_collection(root, proof_logs: Text):
return (root | 'Create' >> recordio.ReadFromRecordIO(
proof_logs,... |
de | f propagate(la): # la: [list(int)]
print la, la # [str], [str]
propagate([1]) # []
propagate([2]) # []
| |
from brms.settings.base import *
import dj_database_url
|
DEBUG = False
ALLOWED_HOSTS = ['.example.com']
# Use the cached template loader so template is compiled once and read from
# memory instead of reading from disk on each load.
TEMPLATES[0]['OPTIONS']['loaders'] = [
('django.template.loaders.cached.Loader', [
'django.template.loaders.filesystem.Loader',
... | ase_url.config() |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-07 19:20
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
| ('posts', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='post',
name='url',
field=m | odels.URLField(),
),
]
|
# -*- coding: utf-8 -*-
import os,math
from qgis.core import NULL
from mole import oeq_global
from mole.project import config
from mole.extensions import OeQExtension
from mole.stat_corr import rb_contemporary_base_uvalue_by_bui | lding_age_lookup
def calculation(self=None, parameters={},feature = None):
from math import floor, ceil
from PyQt4.QtCore import QVariant
ahde = NULL
if not oeq_global.isnull([parameters['HLAE']]):
ahde=float(parameters['HLAE']) + 40.0 * 0.8
# Air Change Heatloss for standard Rooms 40 | kWh/m2a nach Geiger Lüftung im Wohnungsbau
# 20% of the Total Area are used for stairs and floors
return {'AHDE': {'type': QVariant.Double, 'value': ahde}}
extension = OeQExtension(
extension_id=__name__,
category='Evaluation',
subcategory='Building',
extension_name='AHD Building per Livi... |
"""
Serializers for Video Abstraction Layer
Serialization is usually sent through the VideoSerializer which uses the
EncodedVideoSerializer which uses the profile_name as it's profile field.
"""
from rest_framework import serializers
from django.core.exceptions import ValidationError
from edxval.models import Profile... | eoSerializer(serializers.ModelSerializer):
"""
Serializer for EncodedVideo object.
Uses the profile_name as it's profile value instead of a Profile object.
"""
profile = serializers.SlugRelatedField(slug_field="profile_name")
class Meta: # pylint: disable=C1001, C0111
model = EncodedV... | ty(self, data):
"""
This hook is required for bulk update.
We need to override the default, to use the slug as the identity.
"""
return data.get('profile', None)
class SubtitleSerializer(serializers.ModelSerializer):
"""
Serializer for Subtitle objects
"""
conte... |
from django.conf.urls.defaults | import patterns, url
urlpatter | ns = ()
|
c = s.Deserialize(fs)
fs.Close()
return bc
def writeLatestBlockNotificationFile(cluster, blockIndex):
fs = FileStream(Environs.FileSystem.Paths["settingsPath"] + "\\BlockHead\\latestBlock.txt", FileMode.Create)
sw = StreamWriter(fs)
sw.WriteLine(cluster + "\t" + str(blockIndex))
sw.Close()
fs.Close()
... |
bc.GetModulationByName("RF2F").PhysicalStep = hc.RF2FrequencyStep
bc.GetModulationByName("LF1").Centre = hc.FLPZTVoltage
bc.GetModulationByName("LF1").Step = hc.FLPZTStep
bc.GetModulationByName("LF1").PhysicalCentre = hc.I2LockAOMFrequencyCentre
bc.GetModulationByName("LF1").PhysicalStep = hc.I2LockAOMFreque... | int("Generating waveform codes ...")
eWave = bc.GetModulationByName("E").Waveform
eWave.Name = "E"
lf1Wave = bc.GetModulationByName("LF1").Waveform
lf1Wave.Name = "LF1"
ws = WaveformSetGenerator.GenerateWaveforms( (eWave, lf1Wave), ("B","DB","PI","RF1A","RF2A","RF1F","RF2F") )
bc.GetModulationByName("B").Wa... |
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
long_desc = '''
This package contains the ${name} Sphinx extension.
.. add description here ..
'''
requires = ['Sphinx>=0.6']
setup(
name='sphinxcontrib-${name}',
version='0.1',
url='http://bitbucket.org/birkenfeld/sphinx-contrib',
... | long_desc,
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Progr... | c :: Utilities',
],
platforms='any',
packages=find_packages(),
include_package_data=True,
install_requires=requires,
namespace_packages=['sphinxcontrib'],
)
|
* beta[0][1]
assert all(abs((a[0] * b[0] + a[1] * b[1]) / x - 1) < 0.00001 for a, b in zip(alpha, beta))
@staticmethod
def getGamma(gammas, k, layout, intent):
index = 2 * (1 if layout[k + 1] else 0) + (1 if intent else 0)
return gammas[index]
@staticmethod
def getForwardBackw... | f c != 0), (alpha, beta, varphi, clicks)
# calculate P(C | I, G) for k = 0
sessionEstimate['C'] = alpha[0][0] * beta[0][0] + alpha[0][1] * beta[0][1] # == 0 + 1 * beta[0][1]
for k, C_k in enumerate(clicks):
a_u = positionRelevances['a'][k]
s_u = positionRelevances['s... | er --- P(S_k = 0 | C_k) P(C_k | E_k = 1)
if C_k == 0:
sessionEstimate['a'][k] = a_u * varphi[k][0]
sessionEstimate['s'][k] = 0.0
else:
sessionEstimate['a'][k] = 1.0
sessionEstimate['s'][k] = varphi[k + 1][0] * s_u / (s_u + (1 - gamm... |
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This progra... | ies)
def getStatsFilename(filename, statsInfo, filters=[]):
if not os.path.isabs( | filename):
raise RuntimeError("Filename %s is not an absolute path" % filename)
if not filename.endswith(".csv"):
raise RuntimeError("generateStats only supports csv files: %s" % filename)
d = os.path.dirname(filename)
basename = os.path.basename(filename).replace("csv", "stats")
sstring = "stats"
for... |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-08-02 21:54
from __future__ import unicode_literals
from django.db import migrations, models
def copy_to_question_page(apps, schema_editor):
current_database = schema_editor.connection.alias
QuestionPage = apps.get_model('wizard_builder.QuestionPag... | verbose_name='position'),
),
migrations.AddField(
model_name='questionpage',
name='new_section',
field=models.IntegerField(choices=[(1, 'When'), (2, 'Where'), (3, 'What'), (4, 'Who')], default=1),
) | ,
migrations.AddField(
model_name='questionpage',
name='new_sites',
field=models.ManyToManyField(to='sites.Site'),
),
migrations.RunPython(
copy_to_question_page,
reverse_code=migrations.RunPython.noop,
),
]
|
f.screen.write_process_input = \
lambda data: p_in.write(data.encode())
self.stream = pyte.ByteStream()
self.stream.attach(self.screen)
def feed(self, data):
self.stream.feed(data)
def updateAttributes(self, initialize = False):
buffer = self.screen.buffer
li... | pty'
def injectTextToScreen(self, msgBytes, screen = None):
if not screen:
screen = self.p_out.fileno()
if isinstance(msgBytes, str):
msgBytes = bytes(msgBytes, 'UTF-8')
os.write(screen, msgBytes)
def getSessionInformation(self):
self.env['screen']['aut... | gnoreScreens'] = []
self.env['general']['prevUser'] = getpass.getuser()
self.env['general']['currUser'] = getpass.getuser()
def readAll(self, fd, timeout = 0.3, interruptFd = None, len = 65536):
msgBytes = b''
fdList = []
fdList += [fd]
if interruptFd:
fd... |
# -*- coding: utf-8 -*-
# Generated by Django 1. | 10.1 on 2016-12-17 20:50
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('jordbruksmark', '0002_auto_20161217_2140'),
]
operations = [
migrations.AlterModelOptions(
name='wochen_menge',
... | s={'verbose_name': 'Wochen Menge', 'verbose_name_plural': 'Wochen Mengen'},
),
]
|
S = [
NETWORK_LINK_TYPE_PHY,
NETWORK_LINK_TYPE_BOND,
NETWORK_LINK_TYPE_VLAN,
NETWORK_SERVICE_NAMESERVER
]
def _parse_subnets(self, subnets, link_name):
networks = []
if not subnets or not isinstance(subnets, list):
LOG.warning("Subnets '%s' is empty ... | bond_xmit_hash_policy)
bond_interfaces = item.get('bond_interfaces')
b | ond = network_model.Bond(
members=bond_interfaces,
type=bond_mode,
lb_algorithm=bond_xmit_hash_policy,
lacp_rate=bond_lacp_rate,
)
link = network_model.Link(
id=item.get('name'),
name=item.get('name'),
type=network_mode... |
#!/usr/bin/env python
import sys
def inv(s):
if s[0] == '-':
return s[1:]
elif s[0] == '+':
return '-' + s[1:]
else: # plain number
return '-' | + s
if len(sys.argv) != 1:
print 'Usage:', sys.argv[0]
sys.exit(1)
for line in sys.stdin:
linesplit = line.strip().split()
if len(linesplit) == 3:
assert(linesplit[0] == 'p')
print('p ' + inv(linesplit[2]) + ' ' + linesplit[1])
elif len(linesplit) == 5:
assert(linesplit[0] == 's')
print('s '... | + \
inv(linesplit[4]) + ' ' + linesplit[3] )
elif len(linesplit) == 0:
print
|
# -*- coding: utf-8 -*-
"""
anparser - an Open Source Android Artifact Parser
Copyright (C) 2015 Preston Miller
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(a... | a
import pandas as pd
path = None
match = None
yara_list = []
def yara_parser(file_list, rules_path):
"""
Parses files for Malware signatures with Yara
:param file_list: List of all files
:param rules_path: Path t | o custom Yara rules
:return: Dictionary of matches
"""
try:
rules = yara.compile(rules_path)
except (yara.libyara_wrapper.YaraSyntaxError, IOError) as exception:
msg = 'Yara Rule Compilation Error: {0:s}'.format(rules_path + ' > ' + str(exception))
print(msg)
logging.erro... |
from dart.model.base import BaseModel | , dictable
@dictable
class ApiKey(BaseModel):
def __init__(self, id, user_id, api_key, api_secret):
"""
:type user_id: str
:type api_key: str
:type api_secret: str
"""
self.id = id
self.user_id = user_id
self.api_key = api_key
self.api_secret ... | pi_secret
|
import time
import recordlib
if __name__ == "__main__":
recordlib.initialize()
print("waiting for input")
recordlib.logging.info("waiting for input")
try:
# define interrupt, get rising signal, debounce pin
recordlib.GPIO.add_event_detect(
recordlib.TASTER_1,
rec... |
recordlib.GPIO.RISING,
c | allback=recordlib.stop_recording,
bouncetime=1000
)
# keep script running
while True:
time.sleep(0.5)
finally:
recordlib.GPIO.cleanup()
print("\nQuit\n")
|
from __future__ import print_function
from imports import *
import common
class Base( common.Base ):
pass
class TestUnitMiSeqToNewbler( Base ):
def _C( self, *args, **kwargs ):
from bactpipeline.fix_fastq import miseq_to_newbler_id
return | miseq_to_newbler_id( *args, **kwargs )
def test_r1_correct( self ):
r = self._C( 'abcd 1' )
eq_( 'abcd#0/1 (abcd 1)', r )
def test_r2_correct( self ):
r = self._C( 'abcd 2' )
eq_( 'abcd#0/2 (abcd 2)', r )
class TestUnitModFqRead( Base ):
def _C( self, *args, **kwargs ):
... | fix_fastq import mod_fq_read
return mod_fq_read( *args, **kwargs )
def test_mods_correctly( self ):
from bactpipeline.fix_fastq import miseq_to_newbler_id as mtni
id = 'abcd 1'
seq = 'ATGC'
qual = 'IIII'
r = self._C( id, seq, qual )
read = '{0}\n{1}\n+\n{2}\n... |
"""
@brief test log(time=200s)
"""
import os
import unittest
import math
import warnings
from pyquickhelper.loghelper import fLOG
from pyquickhelper.pycode import get_temp_folder, is_travis_or_appveyor
from ensae_teaching_cs.special.image.image_synthese_base import Vecteur, Couleur, Source, Repere
from ensae_teach... | pygame.image.save(screen, os.path.join(temp, "scene_bette.png"))
if __name__ == "__main__":
wait_event(pygame)
if __name__ == "__main | __":
unittest.main()
|
# ===========================================================================
# Copyright 2013 University of Limerick
#
# This file is part of DREAM.
#
# DREAM is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Founda... |
Created on 18 Feb 2013
@author: George
'''
'''
models a frame entity. This can flow through the system and carry parts
'''
from simpy import Resource
from Globals import G
from Entity import Entity
#The entity object
class Frame(Entity):
type="Frame"
capacity=4 #the number of parts that the frame ca... | Entity.__init__(self,id=id,name = name)
self.Res=Resource(self.capacity)
#dimension data
self.width=2.0
self.height=2.0
self.lenght=2.0
def getFrameQueue(self):
return self.Res.users
|
#
# This is a parser that generates the document tree for you.
#
# To use this parser, create an instance of XElementParser:
# parser = saxexts.make_parser()
# xp = XElementParser(parser)
#
# If you have defined classes in the current environment, you might want ot
# pass this environment *to* the parser, so your... | g
import sys
import types
from xml.sax import saxexts
from xml.sax import saxlib
from xelement import XElement, XTreeHandler
class XElementParser:
def __init__(self, outer_env={}, parser=None):
if parser == None:
self.parser = saxexts.XMLValParserFa | ctory.make_parser()
else:
self.parser = parser
self.parser_error_handler = ErrorPrinter()
self.parser.setErrorHandler(self.parser_error_handler)
self.xth = XTreeHandler(IgnoreWhiteSpace='yes',
RemoveWhiteSpace='yes',
CreateElementMap='yes',
... |
import os
import re
BROKER_URL = os.getenv("CLOUDAMQP_URL", 'amqp://')
# BROKER_POOL_LIMIT = None
MONGOLAB_URI = None
MONGOLAB_DB = None
URI_WITH_AUTH = None
mongolab = os.getenv("MONGOLAB_URI")
if mongolab is not None:
uri_pat = r"mongodb://([^:]+):([^@]+)@([^:]+):(\d+)/(.+)"
user, passwd, host, port, db = ... | , mongolab).groups()
uri = "mongodb://{}:{}".format(host, port)
MONGOLAB_URI = uri
MONGOLAB_DB = db
# CELERY_RESULT_BACKEND = uri
# | CELERY_MONGODB_BACKEND_SETTINGS = {
# 'database': db,
# 'user': user,
# 'password': passwd
# }
CELERY_RESULT_BACKEND = BROKER_URL
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_ACCEPT_CONTENT = ['json']
|
r_class(
[_('An error occurred in our system. '
'Please try again later.')]
)
form.errors['__all__'] = msg
else:
return HttpResponseRedirect(
reverse('firefox.android.sms-thankyou'))
return l10n_utils.render(request, 'fir... | eta': _('Firefox Beta'),
'aurora': _('Firefox Aurora'),
'esr': _('Firefox Extended Support Release'),
}
return l10n_utils.render(request, 'firefox/all.html', {
| 'full_builds': firefox_details.get_filtered_full_builds(version, query),
'test_builds': firefox_details.get_filtered_test_builds(version, query),
'query': query,
'channel': channel,
'channel_name': channel_names[channel],
})
@csrf_protect
def firefox_partners(request):
# I... |
# -*- coding: utf-8 -*-
#
# This file is part of REANA.
# Copyright (C) 2019 CERN.
#
# REANA is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Job Manager."""
import json
import shlex
from flask import current_app
from reana_commons... | """
raise NotImplementedError
def get_status(self):
"""Get job status.
:returns: job status.
:rtype: str
"""
raise NotImplementedError
def get_logs(self):
"""Get job log.
:returns: stderr, stdout of a job.
:rtype: dict
"""
... | end_job_id):
"""Create job in db."""
job_db_entry = JobTable(
backend_job_id=backend_job_id,
workflow_uuid=self.workflow_uuid,
status=JobStatus.created.name,
compute_backend=self.compute_backend,
cvmfs_mounts=self.cvmfs_mounts or "",
... |
#!/usr/bin/env python
#encode=utf-8
#vim: tabstop=4 shiftwidth=4 softtabstop=4
#Created on 2013-6-24
#Copyright 2013 nuoqingyun xuqifeng
from bson.code import Cod | e
traffic_map = Code("function () {"
"emit(th | is.domain, this.bytes);"
"}")
traffic_reduce = Code("function (key, values) {"
" var sum = 0;"
" var count = 0;"
" values.forEach(function(byte){"
" sum += byte;"
" count ++;"
... |
ndaModules import Vec3
from direct.interval.IntervalGlobal import Sequence, Parallel, Wait, Func
from direct.interval.IntervalGlobal import LerpScaleInterval
from direct.interval.IntervalGlobal import WaitInterval, ActorInterval, FunctionInterval
from direct.task.Task import Task
from direct.directnotify import DirectN... | f):
self.notify.debug('enterFallBack')
if self.isLocal:
base.playSfx(self.activity.sndOof)
duration = 1.0
animName = self.FallBackAnim
startFrame = 12
totalFrames = self.toon.getNumFrames(animName)
frames = totalFrames - 1 - startFrame
frameRat... | fsm.request('normal')
self.fallBackIval = Sequence(ActorInterval(self.toon, animName, startTime=startFrame / newRate, endTime=totalFrames / newRate, playRate=playRate), FunctionInterval(resume))
self.fallBackIval.start()
def exitFallBack(self):
self.fallBackIval.pause()
del self.fa... |
rue
self.save()
def is_unseen(self):
"""
returns value of self.unseen but also changes it to false.
Use this in a template to mark an unseen notice differently the first
time it is shown.
"""
unseen = self.unseen
if unseen:
self.unseen = ... | 'message': mes | sages['email_full.html'],
}, context)
#notice = Notice.objects.create(user=user, message=messages['notice.html'], notice_type=notice_type, on_site=on_site)
if should_send(user, notice_type, "1") and user.email \
and user.is_active: # Email
recipients.append(user.emai... |
# coding=utf-8
# Copyright 2018 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicab... | 2, 1], dtype=tf.float32)
def test_get_trajecto | ry_lengths(self):
"""Checks if the length of each trajectory in the batch is correct."""
# pylint: disable=invalid-name
TF_TEST_MASK = tf.constant(NP_TEST_MASK)
TF_TEST_MASK_TF_F64 = tf.cast(TF_TEST_MASK, tf.float64)
NP_TEST_MASK_NP_F64 = NP_TEST_MASK.astype(np.float64)
ALL_MASKS = [
TF... |
self.assertEqual(vl, form.attributes[str(nm)])
self.assertEqual(len(form.attributes), attributeMap.count() - cnt)
mydoc = form.node.firstChildElement(str("doc"))
text = DomTools.getText(mydoc)
olddoc = unicode(text).strip() if text else ""
self.assertEqual(olddoc, form.... | c = doc.createElement(dname)
qdn.appendChild(mdoc)
ndcs = self.__rnd.randint(0, 10)
for n in range(ndcs):
dks.append(doc.createTextNode("\nText\n %s\n" % n))
mdoc.appendChild(dks[-1])
for | m = DefinitionDlg()
form.show()
form.node = qdn
self.assertEqual(form.name, '')
self.assertEqual(form.content, '')
self.assertEqual(form.doc, '')
self.assertEqual(form.attributes, {})
self.assertEqual(form.subItems,
["group", "field", "att... |
#!/usr/bin/env python3
import sys
import nu | matuned
dryrun = False
if len(sys.argv) > 1:
if sys.argv[1] == '-n':
dryrun | = True
numatuned.fire(60, dryrun)
|
import tflearn
from tflearn.data_utils import to_categorical, pad_sequences
from tflearn.datasets import imdb
# IMDB Dataset loading
train, test, _ = imdb.load_data(path='imdb.pkl', n_words=10000,
valid_portion=0.1)
trainX, tra | inY = train
testX, testY = test
# Data preprocessing
# Sequen | ce padding
trainX = pad_sequences(trainX, maxlen=100, value=0.)
testX = pad_sequences(testX, maxlen=100, value=0.)
# Converting labels to binary vectors
trainY = to_categorical(trainY, nb_classes=2)
testY = to_categorical(testY, nb_classes=2)
# Network building
net = tflearn.input_data([None, 100])
net = tflearn.embed... |
# Copyright (c) 2020, Djaodjin Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and t... | t to docu | ment API.
"""
def __init__(self, *args, **kwargs):
pass
|
from fabric.api import env, local, run, sudo
env.user = 'root'
env.hosts = ['204.232.205.6']
env.code_dir = '/home/docs/sites/readthedocs.org/checkouts/readthedocs.org'
env.virtualenv = '/home/docs/sites/readthedocs.org'
env.rundir = '/home/docs/sites/readthedocs.org/run'
env.chef_executable = '/var/lib/gems | /1.8/bin/chef-solo'
def install_chef():
sudo('apt-get update', pty=True)
sudo('apt-get install -y git-core rubygems ruby ruby-dev', pty=True)
sudo('gem install chef --no-ri - | -no-rdoc', pty=True)
def sync_config():
local('rsync -av . %s@%s:/etc/chef' % (env.user, env.hosts[0]))
def update():
sync_config()
sudo('cd /etc/chef && %s' % env.chef_executable, pty=True)
def reload():
"Reload the server."
env.user = "docs"
run("kill -HUP `cat %s/gunicorn.pid`" % env.rundi... |
class Solution(object):
def maxProfit(self, prices):
"""
:type prices: List[int]
:rtype: int
"""
| low=1<<31
profit=0
for p in prices:
if p<low:
low=p
if p-low> | profit:
profit=p-low
return profit |
import sqlalchemy as sa
from oslo_db.sqlalchemy import types as db_types
from nca47.db.sqlalchemy.models import base as model_base
from nca47.objects import attributes as attr
HasTenant = model_base.HasTenant
HasId = model_base.HasId
HasStatus = model_base.HasStatus
HasOperationMode = model_base.HasOperationMode
c... | .String(attr.NAME_MAX_LEN))
class Zone(model_base.BASE, HasId, HasOpe | rationMode):
"""Represents a dns zone."""
__tablename__ = 'dns_zone_info'
zone_name = sa.Column(sa.String(attr.NAME_MAX_LEN))
tenant_id = sa.Column(sa.String(attr.NAME_MAX_LEN))
zone_id = sa.Column(sa.String(attr.NAME_MAX_LEN))
vres_id = sa.Column(sa.String(attr.NAME_MAX_LEN))
masters = sa... |
from utile import pretty_xml, xml_to_dict, element_to_dict
from testsuite.support import etree, TestCase
import unittest
XML_DATA = "<html><body><h1> | test1</h1><h2>test2</h2></body></html>"
XML_PRETTY = """\
<html>
<body>
<h1>test1</h1>
<h2>test2</h2>
</body>
</html>
"""
XML_DICT = {'body': {'h2': 'test2', 'h1': 'test1'}}
@unittest.skipUnless(etree, 'lxml not installed')
class XMLTestCase(TestCase):
def test_pretty_xml(self):
self.assertEqu... | nt_to_dict(etree.XML(XML_DATA)), XML_DICT)
def test_xml_to_dict(self):
self.assertEqual(xml_to_dict(XML_DATA), XML_DICT)
|
#!/usr/bin/env python
#
# Original filename: config.py
#
# Author: Tim Brandt
# Email: tbrandt@astro.princeton.edu
# Date: August 2011
#
# Summary: Set configuration parameters to sensible values.
#
import re
from subprocess import *
import multiprocessing
import numpy as np
def config(nframes, framesize):
####... | tswith("Linux"):
print "You are running Linux."
elif osver.startswith("Darwin"):
print "You are running Mac OS-X."
else:
print "Your operating system is not recognized."
if osver.startswith("Linux"):
mem = Popen(["free", "-b"], stdout=PIPE).stdout.read()
| mem = int(mem.split('\n')[1].split()[1])
elif osver.startswith("Darwin"):
mem = Popen(["vm_stat"], stdout=PIPE).stdout.read().split('\n')
blocksize = re.search('.*size of ([0-9]+) bytes.*', mem[0]).group(1)
totmem = 0.
for line in mem:
if np.any(["Pages free:" in line, ... |
# (c) 2018 Red Hat Inc.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from ansible.module_utils._text import to_text
from ansible.module_utils.connection import Connectio... | te, output='config')
for response in to_list(responses):
if response != '{}':
resp.append(response)
if not re | sp:
resp = ['']
return resp
def run_commands(self, commands, check_rc=True):
"""Runs list of commands on remote device and returns results
"""
try:
out = self.send_request(commands)
except ConnectionError as exc:
if check_rc is True:
... |
']['uuid'] = str(project.app_uuid)
manifest['pebble']['enableMultiJS'] = project.app_modern_multi_js
manifest['pebble']['displayName'] = project.app_long_name
if project.app_is_hidden:
manifest['pebble']['watchapp']['hiddenApp'] = project.app_is_hidden
if project.app_platforms:
... | gex
if resource_id.tracking:
d['trackingAdjust'] = resource_id.tracking
if resource_id.memory_format:
d['memoryFormat'] = resource_id.memory_format
if resource_id.storage_format:
d['storageFormat'] = resource_id | .storage_format
if resource_id.space_optimisation:
d['spaceOptimization'] = resource_id.space_optimisation
if resource.is_menu_icon:
d['menuIcon'] = True
if resource_id.compatibility is not None:
d['compatibility'] = resource_id.compati... |
lated.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'html': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '2048'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveInte... | rue', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_exercises'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked | ': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_exerci... |
# -*- coding=utf8 -*-
#******************************************************************************
# MediaTypes.py
#------------------------------------------------------------------------------
#
# Copyright (c) 2015 LivingOn <LivingOn@xmail.net>
#
# This program is free software; you can redistribute it and/or mod... | except IOError:
pass
return result
def _append_to_inactive_file(strmfile, librarypath):
inactive_file = "%sinactive" % librarypath
entryline = "%s\n" % strmfile
try:
open(inactive_file, "a+").write(entryline)
except IOError:
pass
def _get_inactive_str... | pass
result = []
[result.append(i.strip("\n")) for i in content if not i in result]
return result
def _is_not_in_strm_file(strmfile, entryline):
content = []
try:
content = open(strmfile, "rU").readlines()
except IOError:
pass
return not entryline in content
def _a... |
"""
This module is meant for vendorizing Python libraries. Most libraries will need
to have some ``sys.path`` alterations done unless the | y are doing relative
imports.
Do **not** add anything to this module that does not represent a vendorized
library | .
Vendored libraries should go into the ``vendor`` directory and imported from
there. This is so we allow libraries that are installed normally to be imported
if the vendored module is not available.
The import dance here is done so that all other imports throught ceph-deploy
are kept the same regardless of where the... |
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RPbdzmq(RPackage):
"""Programming with Big Data -- Interface to 'ZeroMQ'
'ZeroMQ' is... | 79f3b53baf65119efaba1636eca')
version('0.3-2', sha256='ece2a2881c662f77126e4801ba4e01c991331842b0d636ce5a2b591b9de3fc37')
version('0.2-4', sha256='bfacac88b0d4156c70cf63fc4cb9969a950693996901a4fa3dcd59949ec065f6')
depends_on('r@3.0.0:', type=('build', 'run'))
depends_on('r@3.2.0:', when='@0.2-6:', type... | ds_on('libzmq@4.0.4:')
|
ort, proxy, proxy_port, proxy_user, proxy_pass,
self.region.endpoint, debug, https_connection_factory, path)
def get_all_topics(self, next_token=None):
"""
:type next_token: string
:param next_token: Token returned by the previous call to
... | '/', 'GET')
body = response.read()
if response.status == 200:
return json.loads(body)
else:
boto.log.error('%s %s' % (response.status, response.reason))
boto.log.error('%s' % body)
raise self.ResponseError(response.status, response.reason, body)
... | :param topic: The ARN of the topic
"""
params = {'ContentType' : 'JSON',
'TopicArn' : topic}
response = self.make_request('DeleteTopic', params, '/', 'GET')
body = response.read()
if response.status == 200:
return json.loads(body)
else:
... |
'''
Created on Jul 28, 2013
@author: Rob
'''
import os, yaml
config = {
'names': [
'NT',
'VGTestServer'
],
'servers':{
'irc.server.tld': {
'port':6667,
'password':None,
'channels':{
'#vgstati... | , 'w') as cw:
yaml.dump(config, cw, default_flow_style=False)
with open(config_file, 'r') as cr:
config = yaml.load(cr)
# if config['database']['username'] == '' or config['database']['password'] == '' or config['database']['schema'] == '':
# print('!!! Default conf... | value = config[parts[0]]
if len(parts) == 1:
return value
for part in parts[1:]:
value = value[part]
return value
except KeyError:
return default |
# -*- coding: utf-8 -*-
#from __future__ import print_function, division, absolute_import, unicode_literals
#from gmusicapi.clients.webclient import Webclient
#from gmusicapi.clients | .musicmanager import Musicmanager
from gmusicapi.clients.mobile | client import Mobileclient
#(Webclient, Musicmanager, Mobileclient) # noqa
|
l = []
for x in range(int( | input())):
l.append(int(input()))
l.sort()
print(' '.join(str(x) fo | r x in l[::-1]))
|
d_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.removeinstancedatabase': {
'Meta': {'object_name': 'RemoveInstanceDatabase'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
... | jango.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'ended_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [] | , {'primary_key': 'True'}),
'object_class': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'relevance': ('django.db.models.fields.IntegerF... |
from mrq.task import Task
from mrq.context import connections
class MongoTimeout(Task):
def run(self, params):
res = connections.mongodb_jobs.eval("""
function() {
var a;
for (i=0;i<10000000;i++) {
for (y=0;y<10000000;y++) { |
a = Math.max(y);
}
| }
return a;
}
""")
return res
|
# -*- coding: utf-8 -*-
# Copyright 2017-2019 Barroux Abbey (www.barroux.org)
# Copyright 2017-2019 Akretion France (www.akretion.com)
# @author: Alexis de Lattre <alexis.delattre@akretion.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import models, fields
class BaseConfigSettings(mo... | ne(
related=' | company_id.mass_validation_account_id')
mass_validation_analytic_account_id = fields.Many2one(
related='company_id.mass_validation_analytic_account_id')
mass_validation_journal_id = fields.Many2one(
related='company_id.mass_validation_journal_id')
mass_post_move = fields.Boolean(related='com... |
# coding=utf-8
import socket
import thread
import time
import Queue
import re
import random
class IrcClient:
def __init__(self, host, port, nick, realname, printAll=True, isMibbitBot=False):
self.nick = nick
self.realname = realname
self.host = host
self.port = port
self.sock = socket.socket()
self.RecvQ... | e)
except:
print("WARNING: You didnt ignore "+name+" in the first place!")
pass
def IsIgnored(self, name):
if name in self.ignoredNicks:
return True
else:
return False
def Identify(self, password):
| self.SendMessage("nickserv", "identify "+password)
|
#!/usr/bin/env python
import setuptools |
if __name__ == "__main__":
setuptools.setup(
name="aecg100",
version="1.1.0.18",
author="WHALETEQ Co., LTD",
description="WHALETEQ Co., LTD AECG100 Linux SDK",
url="https://www.whaleteq.com/en/Support/Download/7/Linux%20SDK",
include_package_data=True,
package_data={
'': ['sdk/ | *.so', 'sdk/*.h', 'sample/python/*.txt']
},
)
|
# Copyright (c) 2012 - 2015 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
from jenkinsflow.flow import serial
from .framework import api_select
prefixed_jobs = """
serial flow: [
job: 'top_quick1'
serial flow: [
job: 'top_x_quick2-1'
]
... | apsys):
with api_select.api(__file__, api_type) as api:
def job(name):
api.job(name, max_fails=0, expect_invocations=0, expect_order=None, params=None)
api.flow_job()
job('quick1')
index = 0
for index in 1, 2, 3:
job('x_quick2-' + str(index))
... | pi, timeout=70, report_interval=3, job_name_prefix='top_', just_dump=True) as ctrl1:
ctrl1.invoke('quick1')
for index in 1, 2, 3:
with ctrl1.serial(timeout=20, report_interval=3, job_name_prefix='x_') as ctrl2:
ctrl2.invoke('quick2-' + str(index))
... |
#!/usr/bin/python
|
import apt_pkg
import logging
import os
import mock
import sys
import tempfile
import unittest
sys.path.insert(0, "..")
from unattended_upgrade import _setup_logging
class MockOptions:
dry_run = False
debug = False
class TestLogdir(unittest.TestCase):
def setUp(self):
self.tempdir = tempfile.mk... | test log
logdir = os.path.join(self.tempdir, "mylog")
apt_pkg.config.set("Unattended-Upgrade::LogDir", logdir)
logging.root.handlers = []
_setup_logging(self.mock_options)
self.assertTrue(os.path.exists(logdir))
def test_logdir_depreated(self):
# test if the deprecat... |
def countingsort(sortableli | st):
maxval = max(sortablelist)
m = maxval + 1
count = [0] * m # init with zeros
for a in sortablelist:
count[a] += 1 # count occurences
i = 0
for a in range(m): # emit
for c in range(count[a]): # - emit 'count[a]' copies | of 'a'
sortablelist[i] = a
i += 1
def main():
import random
a = [random.randint(0, 1000) for i in range(100)]
countingsort(a)
print (a)
main()
|
# $Id$
#
from rdkit import Chem
from rdkit.Chem import rdReducedGraphs as rdRG
from rdkit import RDConfig
import numpy
import unittest
class TestCase(unittest.TestCase) :
def setUp(self):
pass
def test1(self):
m = Chem.MolFromSmiles('OCCc1ccccc1')
mrg = rdRG.GenerateMolExtendedReducedGraph(m)
mrg.... | l(0.0,md,4)
def test4(self):
m = Chem.MolF | romSmiles('OCCc1ccccc1')
fp1 = rdRG.GetErGFingerprint(m)
fp2 = rdRG.GetErGFingerprint(m,fuzzIncrement=0.1)
md = max(abs(fp1-fp2))
self.failUnlessAlmostEqual(0.2,md,4)
if __name__ == '__main__':
unittest.main()
|
# -*- encoding: utf- | 8 -*-
{
'name': 'Export Inventory Costs',
'version': '3.0.0.0',
'category': "Warehouse Management",
'description': """
Export Inventory Costs
""",
'author': 'Didotech SRL',
'website': 'http://www.didotech.com',
'license': 'AGPL-3',
"depends": [
'base',
'stock... | iews/stock_view.xml'
],
"demo": [],
"active": False,
"installable": True,
"application": True,
}
|
r += [ f ]
return r
def getInitParameters(self, slave_tid):
"""
hand over parameters to slave once.
@param slave_tid: slave task id
@type slave_tid: int
@return: dictionary with init parameters
@rtype: {param:value}
"""
host = self.... | 'chains':S.cl } )
r['fcom'] = fcp( d, {'traj':'%s+%s'%(trec, tlig),
'ex':(ex_frec, ex_flig),
'ref':S.ref_com, 'split':1 } )
## if doshift:
## r['fcom_shift'] = fcp( r['fcom'], {'shift':1 } )
r['fcom_shuff'] = fcp( r['fc... | 'ref':'%s+%s' % (S.ref_frec, S.ref_flig) } )
r['com_split'] = fcp( r['com'], { 'split':1, 'border':S.cl[0] } )
## r['com_shuff'] = fcp( r['com'], { 'shuffle':1, 'border':S.cl[0] } )
r['com_split_shuff'] = fcp( r['com'],
{'split':1,'shuffle':1,'bord... |
#! /usr/bin/env python
#coding=utf-8
## @Configuration of Preprocessing for SEIMS
#
# TODO, give more detailed description here.
import os,platform
## Directionaries
if platform.system() == "Windows":
DATA_BASE_DIR = r'E:\github-zlj\model_data\model_dianbu_30m_longterm\data_prepare'
PREPROC_SCRIPT_DIR = r'E... |
# LANDUSE_ATTR_LI | ST and SOIL_ATTR_LIST is selected from sqliteFile database
LANDUSE_ATTR_LIST = ["Manning", "Interc_max", "Interc_min", "RootDepth", \
"USLE_C", "SOIL_T10","USLE_P"]
LANDUSE_ATTR_DB = ["manning","i_max","i_min", "root_depth", "usle_c", "SOIL_T10"]
## Be caution, the sequence from "Sand" to "Poreindex... |
is None:
return self.sendError(Failure(), raw)
emit_async(catalog.SMTP_SEND_MESSAGE_START,
self._from_address, recipient.dest.addrstr)
d.addCallback(self.sendSuccess)
d.addErrback(self.sendError, raw)
return d
def _maybe_encrypt_and_sign(self, raw, r... | 8bit)
encmsg.add_header('content-disposition', 'attachment',
filename='msg.asc')
# create meta message
metamsg = PGPEncrypted()
metamsg.add_header('Content-Disposition', 'attachment')
# attach pgp message parts to new message
... | newmsg.attach(encmsg)
return newmsg
d = self._fix_headers(
origmsg,
MultipartEncrypted('application/pgp-encrypted'),
sign_address)
d.addCallback(encrypt)
d.addCallback(create_encrypted_message)
return d
def _sign(self, origmsg... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.