commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
0ee023d29f613f718f5b88c158b120adb8b2fe2e
|
add new package (#16289)
|
var/spack/repos/builtin/packages/py-murmurhash/package.py
|
var/spack/repos/builtin/packages/py-murmurhash/package.py
|
Python
| 0
|
@@ -0,0 +1,621 @@
+# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other%0A# Spack Project Developers. See the top-level COPYRIGHT file for details.%0A#%0A# SPDX-License-Identifier: (Apache-2.0 OR MIT)%0A%0A%0Aclass PyMurmurhash(PythonPackage):%0A %22%22%22Cython bindings for MurmurHash.%22%22%22%0A%0A homepage = %22https://github.com/explosion/murmurhash%22%0A url = %22https://pypi.io/packages/source/m/murmurhash/murmurhash-1.0.2.tar.gz%22%0A%0A version('1.0.2', sha256='c7a646f6b07b033642b4f52ae2e45efd8b80780b3b90e8092a0cec935fbf81e2')%0A%0A depends_on('py-setuptools', type='build')%0A depends_on('py-wheel@0.32.0:0.32.999', type='build')%0A
|
|
e4bc3edf4180ac1385e125a11d01f222747b13f7
|
send File Over FTP using ftplib
|
python/sendFileOverFTP.py
|
python/sendFileOverFTP.py
|
Python
| 0
|
@@ -0,0 +1,565 @@
+#---License---%0A#This is free and unencumbered software released into the public domain.%0A%0A#Anyone is free to copy, modify, publish, use, compile, sell, or%0A#distribute this software, either in source code form or as a compiled%0A#binary, for any purpose, commercial or non-commercial, and by any%0A#means.%0A%0A#by frainfreeze%0A%0A#---Description---%0A#sends file over FTP using ftplib%0A%0A%0A#---code---%0Aimport ftplib%0Asession = ftplib.FTP('myserver.com','login','passord')%0Amyfile = open('theFile.txt','rb')%0Asession.storbinary('STOR theFile.txt', myfile)%0Amyfile.close()%0Asession.quit()%0A
|
|
cc4df17dbab6f2fd44b7e53f0638a47d342d76bb
|
Rename chars to caps as capabilities, not some characters.
|
astm/records.py
|
astm/records.py
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2012 Alexander Shorin
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
"""Common ASTM records structure."""
from datetime import datetime
from .mapping import (
Record, ConstantField, DateTimeField, IntegerField, NotUsedField,
TextField, RepeatedComponentField, Component
)
__all__ = ['HeaderRecord', 'PatientRecord', 'OrderRecord',
'ResultRecord', 'CommentRecord', 'TerminatorRecord']
HeaderRecord = Record.build(
ConstantField(name='type', default='H'),
RepeatedComponentField(Component.build(
ConstantField(name='_', default=''),
TextField(name='__')
), name='delimeter'),
# ^^^ workaround to define field:
# ConstantField(name='delimeter', default='\^&'),
NotUsedField(name='message_id'),
NotUsedField(name='password'),
NotUsedField(name='sender'),
NotUsedField(name='address'),
NotUsedField(name='reserved'),
NotUsedField(name='phone'),
NotUsedField(name='chars'),
NotUsedField(name='receiver'),
NotUsedField(name='comments'),
ConstantField(name='processing_id', default='P'),
NotUsedField(name='version'),
DateTimeField(name='timestamp', default=datetime.now, required=True),
)
PatientRecord = Record.build(
ConstantField(name='type', default='P'),
IntegerField(name='seq', default=1, required=True),
NotUsedField(name='practice_id'),
NotUsedField(name='laboratory_id'),
NotUsedField(name='id'),
NotUsedField(name='name'),
NotUsedField(name='maiden_name'),
NotUsedField(name='birthdate'),
NotUsedField(name='sex'),
NotUsedField(name='race'),
NotUsedField(name='address'),
NotUsedField(name='reserved'),
NotUsedField(name='phone'),
NotUsedField(name='physician_id'),
NotUsedField(name='special_1'),
NotUsedField(name='special_2'),
NotUsedField(name='height'),
NotUsedField(name='weight'),
NotUsedField(name='diagnosis'),
NotUsedField(name='medication'),
NotUsedField(name='diet'),
NotUsedField(name='practice_field_1'),
NotUsedField(name='practice_field_2'),
NotUsedField(name='admission_date'),
NotUsedField(name='admission_status'),
NotUsedField(name='location'),
NotUsedField(name='diagnostic_code_nature'),
NotUsedField(name='diagnostic_code'),
NotUsedField(name='religion'),
NotUsedField(name='martial_status'),
NotUsedField(name='isolation_status'),
NotUsedField(name='language'),
NotUsedField(name='hospital_service'),
NotUsedField(name='hospital_institution'),
NotUsedField(name='dosage_category'),
)
OrderRecord = Record.build(
ConstantField(name='type', default='O'),
IntegerField(name='seq', default=1, required=True),
NotUsedField(name='sample_id'),
NotUsedField(name='instrument'),
NotUsedField(name='test'),
NotUsedField(name='priority'),
NotUsedField(name='created_at'),
NotUsedField(name='sampled_at'),
NotUsedField(name='collected_at'),
NotUsedField(name='volume'),
NotUsedField(name='collector'),
NotUsedField(name='action_code'),
NotUsedField(name='danger_code'),
NotUsedField(name='clinical_info'),
NotUsedField(name='delivered_at'),
NotUsedField(name='biomaterial'),
NotUsedField(name='physician'),
NotUsedField(name='physician_phone'),
NotUsedField(name='user_field_1'),
NotUsedField(name='user_field_2'),
NotUsedField(name='laboratory_field_1'),
NotUsedField(name='laboratory_field_2'),
NotUsedField(name='modified_at'),
NotUsedField(name='instrument_charge'),
NotUsedField(name='instrument_section'),
NotUsedField(name='report_type'),
NotUsedField(name='reserved'),
NotUsedField(name='location_ward'),
NotUsedField(name='infection_flag'),
NotUsedField(name='specimen_service'),
NotUsedField(name='laboratory')
)
ResultRecord = Record.build(
ConstantField(name='type', default='R'),
IntegerField(name='seq', default=1, required=True),
NotUsedField(name='test'),
NotUsedField(name='value'),
NotUsedField(name='units'),
NotUsedField(name='references'),
NotUsedField(name='abnormal_flag'),
NotUsedField(name='abnormality_nature'),
NotUsedField(name='status'),
NotUsedField(name='normatives_changed_at'),
NotUsedField(name='operator'),
NotUsedField(name='started_at'),
NotUsedField(name='completed_at'),
NotUsedField(name='instrument'),
)
CommentRecord = Record.build(
ConstantField(name='type', default='C'),
IntegerField(name='seq', default=1, required=True),
NotUsedField(name='source'),
NotUsedField(name='data'),
NotUsedField(name='ctype')
)
TerminatorRecord = Record.build(
ConstantField(name='type', default='L'),
ConstantField(name='seq', default='1'),
ConstantField(name='code', default='N')
)
|
Python
| 0
|
@@ -1089,19 +1089,18 @@
(name='c
-har
+ap
s'),%0A
|
b33725e2a3153b27312e820797bbc8375dbe8970
|
Create beta_interweaving_strings_and_removing_digits.py
|
Solutions/beta_interweaving_strings_and_removing_digits.py
|
Solutions/beta_interweaving_strings_and_removing_digits.py
|
Python
| 0.000066
|
@@ -0,0 +1,201 @@
+from itertools import zip_longest as zlo%0Afrom string import digits%0A%0Ainterweave = lambda a,b: ''.join((i if i not in digits else '')+(j if j not in digits else '') for i,j in zlo(a, b, fillvalue = ''))%0A
|
|
6b53e890958251bd34c29b09f597c8221f4bc98b
|
Add sublime text utils module
|
modules/st_utils.py
|
modules/st_utils.py
|
Python
| 0
|
@@ -0,0 +1,109 @@
+import sublime%0A%0A%0Adef open_window():%0A sublime.run_command(%22new_window%22)%0A return sublime.active_window()%0A
|
|
ff2fba1c09cff57c9fb01ff3c12f076aff23d56a
|
Create __init__.py
|
__init__.py
|
__init__.py
|
Python
| 0.000429
|
@@ -0,0 +1,232 @@
+#!/usr/bin/python%0A%0A%0A#-------------------------------IMPORT--------------------------------#%0Afrom lib import *%0A%0A%0A#-------------------------------EXPORT--------------------------------#%0A__all__ = %5B'%3C#PREFIX#%3E_app','%3C#PREFIX#%3E_index'%5D%0A
|
|
1641de48deab3e6cc18de7eb40e1d02ab28dd88c
|
Create StarTrek.py
|
StarTrek.py
|
StarTrek.py
|
Python
| 0
|
@@ -0,0 +1,13 @@
+# Star Treck%0A
|
|
ae93eaf84487339c5fba696c7900485f2918546e
|
Add __init__.py
|
__init__.py
|
__init__.py
|
Python
| 0.006636
|
@@ -0,0 +1,62 @@
+from .pytypecheck import tc, tc_opts%0Afrom . import predicates%0A
|
|
59e9281d94acf529113697057d80bb6a1eac6191
|
Add global init file
|
__init__.py
|
__init__.py
|
Python
| 0.000001
|
@@ -0,0 +1,200 @@
+# -*- coding: utf-8 -*-%0A%22%22%22Marble: analyse social stratification in cities%22%22%22%0A%0A%0A__author__ = %22R%C3%A9mi Louf%22%0A__email__ = %22remilouf@sciti.es%22%0A__website__ = %22www.sciti.es%22%0A__copyright__ = %222015, R%C3%A9mi Louf%22%0A
|
|
f06ebc1da601de961311c4b753e966227eadb911
|
Create __init__.py
|
__init__.py
|
__init__.py
|
Python
| 0.000429
|
@@ -0,0 +1,1326 @@
+%22%22Backport of importlib.import_module from 3.x.%22%22%22%0A# While not critical (and in no way guaranteed!), it would be nice to keep this%0A# code compatible with Python 2.3.%0Aimport sys%0A%0Adef _resolve_name(name, package, level):%0A %22%22%22Return the absolute name of the module to be imported.%22%22%22%0A if not hasattr(package, 'rindex'):%0A raise ValueError(%22'package' not set to a string%22)%0A dot = len(package)%0A for x in xrange(level, 1, -1):%0A try:%0A dot = package.rindex('.', 0, dot)%0A except ValueError:%0A raise ValueError(%22attempted relative import beyond top-level %22%0A %22package%22)%0A return %22%25s.%25s%22 %25 (package%5B:dot%5D, name)%0A%0A%0Adef import_module(name, package=None):%0A %22%22%22Import a module.%0A%0A The 'package' argument is required when performing a relative import. It%0A specifies the package to use as the anchor point from which to resolve the%0A relative import to an absolute import.%0A%0A %22%22%22%0A if name.startswith('.'):%0A if not package:%0A raise TypeError(%22relative imports require the 'package' argument%22)%0A level = 0%0A for character in name:%0A if character != '.':%0A break%0A level += 1%0A name = _resolve_name(name%5Blevel:%5D, package, level)%0A __import__(name)%0A return sys.modules%5Bname%5D%0A
|
|
a09fa218a918fbde70ea99a67fa1d964b17c5e2c
|
add init
|
__init__.py
|
__init__.py
|
Python
| 0.998609
|
@@ -0,0 +1,49 @@
+__all__ = %5B'bust'%5D%0A%0Afrom .detector import bust%0A%0A%0A
|
|
7b8136d77f2968ac02d17991eca30862bdf9e104
|
add __init__ file
|
__init__.py
|
__init__.py
|
Python
| 0.000128
|
@@ -0,0 +1,21 @@
+from resnet import *%0A
|
|
da874ace234dbac4f0fc8f428cf43d3f415cc596
|
Create __init__.py
|
__init__.py
|
__init__.py
|
Python
| 0.000429
|
@@ -0,0 +1,14 @@
+# __init__.py%0A
|
|
1e716efd7e275068a18d309f42ec8e955309b4b7
|
Create __init__.py
|
__init__.py
|
__init__.py
|
Python
| 0.000429
|
@@ -0,0 +1 @@
+%0A
|
|
f7cc1a858ae88ac14ac18548d70d00c932359b7f
|
finish question 1060
|
p1060_Missing_Element_In_Sorted_Array.py
|
p1060_Missing_Element_In_Sorted_Array.py
|
Python
| 0.999999
|
@@ -0,0 +1,1604 @@
+'''%0A- Leetcode problem: 1060%0A%0A- Difficulty: Medium%0A%0A- Brief problem description:%0A%0A Given a sorted array A of unique numbers, find the K-th missing number starting from the leftmost number of the array.%0A%0AExample 1:%0A%0AInput: A = %5B4,7,9,10%5D, K = 1%0AOutput: 5%0AExplanation:%0AThe first missing number is 5.%0AExample 2:%0A%0AInput: A = %5B4,7,9,10%5D, K = 3%0AOutput: 8%0AExplanation:%0AThe missing numbers are %5B5,6,8,...%5D, hence the third missing number is 8.%0AExample 3:%0A%0AInput: A = %5B1,2,4%5D, K = 3%0AOutput: 6%0AExplanation:%0AThe missing numbers are %5B3,5,6,7,...%5D, hence the third missing number is 6.%0A%0ANote:%0A%0A1 %3C= A.length %3C= 50000%0A1 %3C= A%5Bi%5D %3C= 1e7%0A1 %3C= K %3C= 1e8%0A%0A- Solution Summary:%0A%0A%5B4, 7, 10, 13%5D k = 3%0A%0Amissing number = nums%5Bi%5D - nums%5B0%5D - i%0A%0A1. Binary Search to find the missing number.%0A%0A7: 2 missing number before it%0A10: 4 missing number before it%0ASo result is 7 + (k - missing number at 7)%0AEdge case: if k is larger enough that it's not contained in the list, we can return it directly.%0A%0A- Used Resources:%0A%0A--- Bo Zhou%0A'''%0A%0A%0Aclass Solution:%0A def missingElement(self, nums, k) -%3E int:%0A if (nums%5B-1%5D - nums%5B0%5D - (len(nums) - 1)) %3C k:%0A return k - (nums%5B-1%5D - nums%5B0%5D - (len(nums) - 1)) + nums%5B-1%5D%0A low, high = 0, len(nums) - 1%0A while low %3C high - 1:%0A mid = (high + low) // 2%0A if nums%5Bmid%5D - nums%5B0%5D - mid %3C k:%0A low = mid%0A else:%0A high = mid%0A return k - (nums%5Blow%5D - nums%5B0%5D - low) + nums%5Blow%5D%0A%0A%0Aif __name__ == %22__main__%22:%0A solution = Solution()%0A testList = %5B1, 2, 4%5D%0A print(solution.missingElement(testList, 3))%0A
|
|
d72f11fbfc23de44af8a2600a7310adafe3e2ffe
|
Create a.py
|
agc015/a.py
|
agc015/a.py
|
Python
| 0.000489
|
@@ -0,0 +1,204 @@
+def main():%0A n, a, b = map(int, input().split())%0A%0A if a %3E b or (n == 1 and a != b):%0A print(0)%0A else:%0A print((n - 1) * (b - a) - (b - a - 1))%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
47ba8815c7a0de0191fb363c22c42732781a8e38
|
Fix blank index_for
|
daiquiri/metadata/migrations/0020_blank_index_for.py
|
daiquiri/metadata/migrations/0020_blank_index_for.py
|
Python
| 0.999194
|
@@ -0,0 +1,594 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.11.4 on 2018-04-05 15:25%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('daiquiri_metadata', '0019_column_index_for'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='column',%0A name='index_for',%0A field=models.CharField(blank=True, default=b'', help_text='The columns which this column is an index for (e.g. for pgSphere).', max_length=256, verbose_name='Index for'),%0A ),%0A %5D%0A
|
|
a9d5c1dcb059f02f6c3ec5dbff6b07f54c20194d
|
Add an example directory
|
example/main.py
|
example/main.py
|
Python
| 0.000002
|
@@ -0,0 +1,819 @@
+from beckett import clients, resources%0A%0A%0Aclass PersonResource(resources.BaseResource):%0A class Meta:%0A name = 'Person'%0A resource_name = 'people'%0A identifier = 'url'%0A attributes = (%0A 'name',%0A 'birth_year',%0A 'eye_color',%0A 'gender',%0A 'height',%0A 'mass',%0A 'url',%0A )%0A valid_status_codes = (%0A 200,%0A )%0A methods = (%0A 'get',%0A )%0A pagination_key = None%0A%0A%0Aclass StarWarsClient(clients.BaseClient):%0A class Meta:%0A name = 'Star Wars API Client'%0A base_url = 'http://swapi.co/api'%0A resources = (%0A PersonResource,%0A )%0A%0Aswapi = StarWarsClient()%0Aresults_list = swapi.get_person(uid=1)%0Aperson = results_list%5B0%5D%0Aprint(person.name)%0A
|
|
3367f9d1e394bf686bc6bbd6316265c9feef4f03
|
Add basic tests for config usb
|
test/on_yubikey/test_cli_config.py
|
test/on_yubikey/test_cli_config.py
|
Python
| 0
|
@@ -0,0 +1,1421 @@
+from .util import (DestructiveYubikeyTestCase, ykman_cli)%0A%0A%0Aclass TestConfigUSB(DestructiveYubikeyTestCase):%0A%0A def setUp(self):%0A ykman_cli('config', 'usb', '--enable-all', '-f')%0A%0A def tearDown(self):%0A ykman_cli('config', 'usb', '--enable-all', '-f')%0A%0A def test_disable_otp(self):%0A ykman_cli('config', 'usb', '--disable', 'OTP', '-f')%0A output = ykman_cli('config', 'usb', '--list')%0A self.assertNotIn('OTP', output)%0A%0A def test_disable_u2f(self):%0A ykman_cli('config', 'usb', '--disable', 'U2F', '-f')%0A output = ykman_cli('config', 'usb', '--list')%0A self.assertNotIn('FIDO U2F', output)%0A%0A def test_disable_openpgp(self):%0A ykman_cli('config', 'usb', '--disable', 'OPGP', '-f')%0A output = ykman_cli('config', 'usb', '--list')%0A self.assertNotIn('OpenPGP', output)%0A%0A def test_disable_piv(self):%0A ykman_cli('config', 'usb', '--disable', 'PIV', '-f')%0A output = ykman_cli('config', 'usb', '--list')%0A self.assertNotIn('PIV', output)%0A%0A def test_disable_oath(self):%0A ykman_cli('config', 'usb', '--disable', 'OATH', '-f')%0A output = ykman_cli('config', 'usb', '--list')%0A self.assertNotIn('OATH', output)%0A%0A def test_disable_fido2(self):%0A ykman_cli('config', 'usb', '--disable', 'FIDO2', '-f')%0A output = ykman_cli('config', 'usb', '--list')%0A self.assertNotIn('FIDO2', output)%0A
|
|
4dbe1b21ab0f82eeba82be7db2e141260942b998
|
add num_states to mixture convenience wrapper
|
basic/models.py
|
basic/models.py
|
# These classes make aliases of class members and properties so as to make
# pybasicbayes mixture models look more like pyhsmm models. When comparing
# H(S)MM model fits to pybasicbayes mixture model fits, it's easier to write one
# code path by using these models.
import pybasicbayes
from ..util.general import rle
class _Labels(pybasicbayes.internals.labels.Labels):
@property
def T(self):
return self.N
@property
def stateseq(self):
return self.z
@stateseq.setter
def stateseq(self,stateseq):
self.z = stateseq
@property
def stateseqs_norep(self):
return rle(self.z)[0]
@property
def durations(self):
return rle(self.z)[1]
class _MixturePropertiesMixin(object):
_labels_class = _Labels
@property
def states_list(self):
return self.labels_list
@property
def stateseqs(self):
return [s.stateseq for s in self.states_list]
@property
def stateseqs_norep(self):
return [s.stateseq_norep for s in self.states_list]
@property
def durations(self):
return [s.durations for s in self.states_list]
@property
def obs_distns(self):
return self.components
@obs_distns.setter
def obs_distns(self,distns):
self.components = distns
class Mixture(_MixturePropertiesMixin,pybasicbayes.models.Mixture):
pass
class MixtureDistribution(_MixturePropertiesMixin,pybasicbayes.models.MixtureDistribution):
pass
|
Python
| 0.000001
|
@@ -775,16 +775,93 @@
Labels%0A%0A
+ @property%0A def num_states(self):%0A return len(self.obs_distns)%0A%0A
@pro
|
fb7a5b279da36b9dbd6338867168a79011edd0d6
|
Create new package (#7208)
|
var/spack/repos/builtin/packages/glimmer/package.py
|
var/spack/repos/builtin/packages/glimmer/package.py
|
Python
| 0
|
@@ -0,0 +1,1791 @@
+##############################################################################%0A# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.%0A# Produced at the Lawrence Livermore National Laboratory.%0A#%0A# This file is part of Spack.%0A# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.%0A# LLNL-CODE-647188%0A#%0A# For details, see https://github.com/spack/spack%0A# Please also see the NOTICE and LICENSE files for our notice and the LGPL.%0A#%0A# This program is free software; you can redistribute it and/or modify%0A# it under the terms of the GNU Lesser General Public License (as%0A# published by the Free Software Foundation) version 2.1, February 1999.%0A#%0A# This program is distributed in the hope that it will be useful, but%0A# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and%0A# conditions of the GNU Lesser General Public License for more details.%0A#%0A# You should have received a copy of the GNU Lesser General Public%0A# License along with this program; if not, write to the Free Software%0A# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA%0A##############################################################################%0Afrom spack import *%0A%0A%0Aclass Glimmer(MakefilePackage):%0A %22%22%22Glimmer is a system for finding genes in microbial DNA, especially the%0A genomes of bacteria, archaea, and viruses.%22%22%22%0A%0A homepage = %22https://ccb.jhu.edu/software/glimmer%22%0A%0A version('3.02b', '344d012ae12596de905866fe9eb7f16c')%0A%0A build_directory = 'src'%0A%0A def url_for_version(self, version):%0A url = %22https://ccb.jhu.edu/software/glimmer/glimmer%7B0%7D.tar.gz%22%0A return url.format(version.joined)%0A%0A def install(self, spec, prefix):%0A install_tree('bin', prefix.bin)%0A
|
|
ec54ae0e59058cadecec38fb70be2947d0907d6a
|
Hello World
|
pyexamples/conditions.py
|
pyexamples/conditions.py
|
Python
| 0.99998
|
@@ -0,0 +1,22 @@
+%0Aprint('Hello World')%0A
|
|
fe1af6449ec4feeaf75a248422e806ad9c818749
|
remove doc
|
python/qidoc/__init__.py
|
python/qidoc/__init__.py
|
## Copyright (c) 2012 Aldebaran Robotics. All rights reserved.
## Use of this source code is governed by a BSD-style license that can be
## found in the COPYING file.
""" qidoc : handle generating sphinx and doxygen documentation of qibuild
projects
qiDoc: documentation generator
==============================
qiDoc helps you easily write and merge several
documentation formats, doxygen and sphinx for the moment.
Usage:
-----
qidoc is controlled by a simple config file, looking like
.. code-block:: xml
<qidoc>
<repo name="qibuild">
<sphinxdoc name="qibuild" src="doc" />
</repo>
<repo name="libnaoqi" >
<doxydoc name="libalcommon" src="libalcommon/doc" />
<doxydoc name="libalvision" src="libalvisio/doc" />
</repo>
<repo name="doc">
<sphinxdoc name="doc" src="source" dest="." />
</repo>
<defaults>
<root_project name="doc" />
</defaults>
<templates>
<doxygen
doxyfile="soure/tools/Doxyfile.template"
css="soure/tools/doxygen.template.css"
header="soure/tools/header.template.html"
footer="soure/tools/footer.template.html"
/>
<sphinx
config="source/conf.py"
/>
</templates>
</qidoc>
Such a file will produce a documentation looking like
::
doc/ index.html (doc)
/ libalmotion/index (doxy libnaoqi/almotion)
/ libalvision/index (doxy libnaoqi/avisiion)
/ qibuild/index (sphinx qibuild)
"""
|
Python
| 0.00001
|
@@ -249,1288 +249,8 @@
ts%0A%0A
-%0A%0AqiDoc: documentation generator%0A==============================%0A%0A%0AqiDoc helps you easily write and merge several%0Adocumentation formats, doxygen and sphinx for the moment.%0A%0AUsage:%0A-----%0A%0Aqidoc is controlled by a simple config file, looking like%0A%0A.. code-block:: xml%0A%0A %3Cqidoc%3E%0A %3Crepo name=%22qibuild%22%3E%0A %3Csphinxdoc name=%22qibuild%22 src=%22doc%22 /%3E%0A %3C/repo%3E%0A%0A %3Crepo name=%22libnaoqi%22 %3E%0A %3Cdoxydoc name=%22libalcommon%22 src=%22libalcommon/doc%22 /%3E%0A %3Cdoxydoc name=%22libalvision%22 src=%22libalvisio/doc%22 /%3E%0A %3C/repo%3E%0A%0A %3Crepo name=%22doc%22%3E%0A %3Csphinxdoc name=%22doc%22 src=%22source%22 dest=%22.%22 /%3E%0A %3C/repo%3E%0A%0A %3Cdefaults%3E%0A %3Croot_project name=%22doc%22 /%3E%0A %3C/defaults%3E%0A%0A %3Ctemplates%3E%0A %3Cdoxygen%0A doxyfile=%22soure/tools/Doxyfile.template%22%0A css=%22soure/tools/doxygen.template.css%22%0A header=%22soure/tools/header.template.html%22%0A footer=%22soure/tools/footer.template.html%22%0A /%3E%0A %3Csphinx%0A config=%22source/conf.py%22%0A /%3E%0A %3C/templates%3E%0A%0A%0A %3C/qidoc%3E%0A%0A%0ASuch a file will produce a documentation looking like%0A%0A::%0A%0A doc/ index.html (doc)%0A / libalmotion/index (doxy libnaoqi/almotion)%0A / libalvision/index (doxy libnaoqi/avisiion)%0A / qibuild/index (sphinx qibuild)%0A%0A%0A
%22%22%22%0A
|
ceb8ec420e5e894644aecce8b96463cc3769ce1d
|
Add process_alerts management command
|
cityhallmonitor/management/commands/process_alerts.py
|
cityhallmonitor/management/commands/process_alerts.py
|
Python
| 0.000003
|
@@ -0,0 +1,3081 @@
+from django.conf import settings%0Afrom django.core.mail import send_mail%0Afrom django.core.management.base import BaseCommand, CommandError%0Afrom django.utils import timezone%0Afrom cityhallmonitor.models import Subscription%0Afrom documentcloud import DocumentCloud%0A%0A%0ADEFAULT_PROJECT = 'Chicago City Hall Monitor'%0A%0AEMAIL_SUBJECT = 'City Hall Monitor Search Alert'%0A%0AEMAIL_FROM = 'KnightLab@northwestern.edu'%0A%0AEMAIL_TEMPLATE = %22%22%22%0A%3Cp%3EYou alert subscription on City Hall Monitor:%0A%3C/p%3E%0A%3Cp%3E%25(query)s%0A%3C/p%3E%0A%3Cp%3EMatched %25(n)d new documents:%3C/p%3E%0A%22%22%22%0A%0AEMAIL_DOC_TEMPLATE = %22%22%22%0A%3Cp%3E%25(matter)s%3Cbr%3E%0A%3Ca href=%22%25(link_url)s%22%3E%25(link_text)s%3C/a%3E%0A%3C/p%3E%0A%22%22%22%0A%0A%0Aclass Command(BaseCommand):%0A help = 'Process user alert subscriptions.'%0A _client = None%0A%0A def client(self):%0A %22%22%22Using un-authenticated client...%22%22%22%0A if self._client is None:%0A self._client = DocumentCloud()%0A return self._client%0A%0A def add_arguments(self, parser):%0A pass # noop%0A%0A def search(self, query):%0A return self.client().documents.search(query)%0A%0A def send_subscription_alert(self, subscription, document_list):%0A %22%22%22Send user subscription alert%22%22%22%0A n_documents = len(document_list)%0A %0A html_message = EMAIL_TEMPLATE %25 (%7B%0A 'query': subscription.query,%0A 'n': n_documents%0A %7D) %0A for doc in document_list:%0A html_message += EMAIL_DOC_TEMPLATE %25 %7B%0A 'matter': doc.data%5B'MatterTitle'%5D,%0A 'link_url': doc.published_url,%0A 'link_text': doc.title%0A %7D%0A %0A print('Sending alert for %25d documents %5B%25s%5D' %25 (%0A n_documents, subscription)) %0A send_mail(%0A EMAIL_SUBJECT,%0A '',%0A EMAIL_FROM,%0A %5Bsubscription.email%5D,%0A fail_silently=False,%0A html_message=html_message)%0A %0A def process_subscription(self, subscription):%0A %22%22%22Process subscription%22%22%22%0A query = 'account:%25s project:%22%25s%22 %25s' %25 (%0A settings.DOCUMENT_CLOUD_ACCOUNT, %0A DEFAULT_PROJECT, %0A subscription.query)%0A print(query)%0A %0A r = self.search(query)%0A if subscription.last_check:%0A r = %5Bd for d in r if d.updated_at %3E subscription.last_check%5D%0A %0A try:%0A if len(r):%0A self.send_subscription_alert(subscription, r)%0A %0A subscription.last_check = timezone.now()%0A subscription.save()%0A except SMTPException as se:%0A self.stdout.write(%0A 'ERROR sending email for subscription %25d: %25s' %25 %5C%0A (subscription.id, str(se))) %0A %0A def handle(self, *args, **options):%0A %22%22%22Process subscriptions%22%22%22%0A subscription_list = Subscription.objects.all()%0A print('Processing %25d subscriptions' %25 len(subscription_list))%0A%0A for subscription in subscription_list: %0A self.process_subscription(subscription) %0A%0A self.stdout.write('Done')%0A %0A
|
|
44597a9b9f5e2ef2eb391b096d3240b81960ce68
|
fix doc generation on plot_lasso_coordinate_descent_path.py example (pb on my box)
|
examples/glm/plot_lasso_coordinate_descent_path.py
|
examples/glm/plot_lasso_coordinate_descent_path.py
|
"""
=====================
Lasso and Elastic Net
=====================
Lasso and elastic net (L1 and L2 penalisation) implemented using a
coordinate descent.
"""
print __doc__
# Author: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# License: BSD Style.
from itertools import cycle
import numpy as np
import pylab as pl
from scikits.learn.glm import lasso_path, enet_path
from scikits.learn import datasets
diabetes = datasets.load_diabetes()
X = diabetes.data
y = diabetes.target
X /= X.std(0) # Standardize data (easier to set the rho parameter)
################################################################################
# Compute paths
eps = 5e-3 # the smaller it is the longer is the path
print "Computing regularization path using the lasso..."
models = lasso_path(X, y, eps=eps)
alphas_lasso = np.array([model.alpha for model in models])
coefs_lasso = np.array([model.coef_ for model in models])
print "Computing regularization path using the elastic net..."
models = enet_path(X, y, eps=eps, rho=0.8)
alphas_enet = np.array([model.alpha for model in models])
coefs_enet = np.array([model.coef_ for model in models])
################################################################################
# Display results
color_iter = cycle(['b', 'g', 'r', 'c', 'm', 'y', 'k'])
for color, coef_lasso, coef_enet in zip(color_iter,
coefs_lasso.T, coefs_enet.T):
pl.plot(-np.log10(alphas_lasso), coef_lasso, color)
pl.plot(-np.log10(alphas_enet), coef_enet, color + '--')
pl.xlabel('-Log(lambda)')
pl.ylabel('weights')
pl.title('Lasso and Elastic-Net Paths')
pl.legend(['Lasso','Elastic-Net'])
pl.axis('tight')
pl.show()
|
Python
| 0
|
@@ -1239,40 +1239,58 @@
ts%0A%0A
-color_iter =
+ax = pl.gca()%0Aax.set_color_
cycle(
+2 *
%5B'b', '
-g
+r
', '
-r
+g
', '
@@ -1298,244 +1298,74 @@
', '
-m', 'y', 'k'%5D)%0Afor color, coef_lasso, coef_enet in zip(color_iter,%0A coefs_lasso.T, coefs_enet.T):%0A pl.plot(-np.log10(alphas_lasso),
+k'%5D)%0Al1 = pl.plot(
coef
+s
_lasso
-, color)%0A pl.plot(-np.log10(alpha
+)%0Al2 = pl.plot(coef
s_enet
-)
,
-coef_enet, color +
+linestyle=
'--'
@@ -1468,17 +1468,35 @@
end(
-%5B
+(l1%5B-1%5D, l2%5B-1%5D), (
'Lasso',
'Ela
@@ -1491,16 +1491,17 @@
'Lasso',
+
'Elastic
@@ -1505,17 +1505,35 @@
tic-Net'
-%5D
+), loc='lower left'
)%0Apl.axi
|
c12305c59f9c95149e95094179768ac627d7faf9
|
Add test file for upload client class
|
tests/client/test_upload_client.py
|
tests/client/test_upload_client.py
|
Python
| 0
|
@@ -0,0 +1,2836 @@
+import os%0Aimport mock%0Aimport unittest%0A%0Afrom qingstor.sdk.config import Config%0Afrom qingstor.sdk.service.qingstor import Bucket%0Afrom qingstor.sdk.service.qingstor import QingStor%0Afrom qingstor.sdk.client.upload_client import UploadClient%0Afrom qingstor.sdk.error import (%0A BadRequestError,%0A InvalidObjectNameError%0A)%0A%0ATEST_PART_SIZE=5242880%0ATEST_FILE_PATH='test_file_100M'%0ATEST_OBJECT_KEY='test_upload_20170804'%0ATEST_ACCESS_KEY='This_is_mock_access_key'%0ATEST_SECRET_ACCESS_KEY='This_is_mock_secret_access_key'%0A%0A%0Aclass MockBucket:%0A%0A def __init__(self,status_code):%0A self.status_code = status_code%0A%0A # Mock the upload_id%0A def __getitem__(self, key):%0A return 000000000000%0A%0A%0Aclass CallbackFunc:%0A%0A def __init__(self):%0A pass%0A%0A def callback_func(self):%0A pass%0A%0Aclass TestUploadClient(unittest.TestCase):%0A%0A @classmethod%0A def setUpClass(cls):%0A output200=MockBucket(200)%0A cls.mock_http200=mock.Mock(return_value=output200)%0A output201=MockBucket(201)%0A cls.mock_http201=mock.Mock(return_value=output201)%0A output400=MockBucket(400)%0A cls.mock_http400=mock.Mock(return_value=output400)%0A%0A config=Config(TEST_ACCESS_KEY,TEST_SECRET_ACCESS_KEY)%0A # QingStor.Bucket=mock_qingstor%0A qingstor=QingStor(config)%0A # Create bucket instance%0A callback_func=CallbackFunc()%0A bucket=qingstor.Bucket('test_upload_bucket','pek3a')%0A cls.upload_obj = UploadClient(bucket, callback_func.callback_func, TEST_PART_SIZE)%0A%0A%0A def setUp(self):%0A os.system(%22dd if=/dev/zero of=test_file_100M bs=1024 count=102400%22)%0A%0A def tearDown(self):%0A os.system(%22rm -f test_file_100M%22)%0A%0A def test_right_response(self):%0A # Mock the output of initiate_multipart_upload%0A Bucket.initiate_multipart_upload=self.mock_http200%0A # Mock the output of upload_multipart%0A Bucket.upload_multipart=self.mock_http201%0A Bucket.complete_multipart_upload=self.mock_http201%0A with open(TEST_FILE_PATH, 'rb') as f:%0A self.upload_obj.upload('upload_20180803.mp4', f)%0A%0A def test_initialize_bad_response(self):%0A # Mock the output of initiate_multipart_upload%0A Bucket.initiate_multipart_upload=self.mock_http400%0A%0A with open(TEST_FILE_PATH, 'rb') as f:%0A self.assertRaises(InvalidObjectNameError,self.upload_obj.upload,TEST_OBJECT_KEY,f)%0A%0A def test_upload_bad_response(self):%0A # Mock the output of initiate_multipart_upload%0A Bucket.initiate_multipart_upload=self.mock_http200%0A%0A # Mock the output of upload_multipart%0A Bucket.upload_multipart=self.mock_http400%0A%0A with open(TEST_FILE_PATH, 'rb') as f:%0A self.assertRaises(BadRequestError,self.upload_obj.upload,TEST_OBJECT_KEY,f)%0A%0A%0Aif __name__==%22__main__%22:%0A unittest.main()%0A
|
|
305ab2ede27fde9097c7a69804189a529c868140
|
add missing filter plugins
|
filter_plugins/customs.py
|
filter_plugins/customs.py
|
Python
| 0.000001
|
@@ -0,0 +1,227 @@
+class FilterModule(object):%0A def filters(self):%0A return %7B%0A 'filename_without_extension': self.filename_without_extension%0A %7D%0A%0A def filename_without_extension(self, path, extension):%0A return path%5B:-len(extension)%5D
|
|
bc581b1ac7a12fd3026667663a4812fe0bd3869b
|
add dict.py
|
dict.py
|
dict.py
|
Python
| 0.000002
|
@@ -0,0 +1,516 @@
+#! /usr/bin/env python2%0A# -*- coding: utf-8 -*-%0A%0Aimport sys%0Aimport json%0Aimport urllib%0Aimport subprocess%0A%0Adef main():%0A word = subprocess.check_output('xsel')%0A params = urllib.urlencode(%7B'from': 'auto', 'to': 'auto', 'client_id':'WGCxN9fzvCxPo0nqlzGLCPUc', 'q': word%7D)%0A f = urllib.urlopen(%22http://openapi.baidu.com/public/2.0/bmt/translate?%25s%22, params)%0A j = json.loads(f.read())%0A d = dict(j%5B'trans_result'%5D%5B0%5D)%0A subprocess.call(%5B'notify-send', word, d%5B'dst'%5D%5D)%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
28b5fef57580640cd78775d6c0544bc633e5958a
|
Add helper script to generate API keys.
|
generate-key.py
|
generate-key.py
|
Python
| 0
|
@@ -0,0 +1,518 @@
+#!/usr/bin/python%0A%0Aimport os%0Aimport sqlite3%0Aimport sys%0Aimport time%0A%0Aif len(sys.argv) %3C 3:%0A raise ValueError('Usage: %25s %22Firstnam Lastname%22 email@example.com' %25 sys.argv%5B0%5D)%0A%0Adb = sqlite3.connect('/var/lib/zon-api/data.db')%0Aapi_key = str(os.urandom(26).encode('hex'))%0Atier = 'free'%0Aname = sys.argv%5B1%5D%0Aemail = sys.argv%5B2%5D%0Arequests = 0%0Areset = int(time.time())%0Aquery = 'INSERT INTO client VALUES (?, ?, ?, ?, ?, ?)'%0Adb.execute(query, (api_key, tier, name, email, requests, reset))%0Adb.commit()%0Adb.close()%0Aprint api_key%0A
|
|
a314da2415e661ed6cbc9929095a0f34610d9c21
|
FIX _get_search_domain in partner
|
transindar_personalization/res_partner.py
|
transindar_personalization/res_partner.py
|
# -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import models, api, fields
class res_partner(models.Model):
_inherit = 'res.partner'
@api.model
def name_search(
self, name, args=None, operator='ilike', limit=100):
recs = self.search(self._get_search_domain(
name, args=args, operator=operator, limit=limit), limit=limit)
if not recs:
return super(res_partner, self).name_search(
name=name, args=args, operator=operator, limit=limit)
return recs.name_get()
@api.model
def _get_search_domain(self, name, args=None, operator='ilike', limit=100):
if not args:
args = []
if name:
if self.search(
[('internal_code', '=ilike', name)],
limit=limit):
return [('internal_code', '=ilike', name)]
else:
return ['|', ('display_name', 'ilike', name),
('ref', 'ilike', name)]
return args
def _search_custom_search(self, operator, value):
res = self._get_search_domain(value, operator=operator)
return res
@api.multi
def _get_custom_search(self):
return False
custom_search = fields.Char(
compute='_get_custom_search',
string='Busqueda Inteligente',
search='_search_custom_search'
)
|
Python
| 0
|
@@ -1014,16 +1014,23 @@
, name)%5D
+ + args
,%0A
@@ -1107,32 +1107,39 @@
'=ilike', name)%5D
+ + args
%0A els
@@ -1250,16 +1250,23 @@
, name)%5D
+ + args
%0A
|
6807ce92c5d0a26a43db8cb25ef5ffd8b8ff6277
|
Add skeleton cryptdev module
|
salt/modules/cryptdev.py
|
salt/modules/cryptdev.py
|
Python
| 0
|
@@ -0,0 +1,553 @@
+# -*- coding: utf-8 -*-%0A'''%0ASalt module to manage Unix cryptsetup jobs and the crypttab file%0A'''%0A%0A# Import python libraries%0Afrom __future__ import absolute_import%0Aimport logging%0A%0A# Import salt libraries%0Aimport salt%0A%0A# Set up logger%0Alog = logging.getLogger(__name__)%0A%0A# Define the module's virtual name%0A__virtualname__ = 'cryptdev'%0A%0Adef __virtual__():%0A '''%0A Only load on POSIX-like systems%0A '''%0A if salt.utils.is_windows():%0A return (False, 'The cryptdev module cannot be loaded: not a POSIX-like system')%0A else:%0A return True%0A
|
|
9b457a08ce433b574f186bb1b32da666edee485a
|
Complete sol by recursion
|
lc0108_convert_sorted_array_to_binary_search_tree.py
|
lc0108_convert_sorted_array_to_binary_search_tree.py
|
Python
| 0.006524
|
@@ -0,0 +1,1346 @@
+%22%22%22Leetcode 108. Convert Sorted Array to Binary Search Tree%0AEasy%0A%0AURL: https://leetcode.com/problems/convert-sorted-array-to-binary-search-tree/%0A%0AGiven an array where elements are sorted in ascending order,%0Aconvert it to a height balanced BST.%0A%0AFor this problem, a height-balanced binary tree is defined as a binary tree%0Ain which the depth of the two subtrees of every node never differ by more than 1.%0A%0AExample:%0AGiven the sorted array: %5B-10,-3,0,5,9%5D,%0AOne possible answer is: %5B0,-3,9,-10,null,5%5D, which represents the following%0Aheight balanced BST:%0A 0%0A / %5C%0A -3 9%0A / /%0A -10 5%0A%22%22%22%0A%0A# Definition for a binary tree node.%0Aclass TreeNode(object):%0A def __init__(self, val):%0A self.val = val%0A self.left = None%0A self.right = None%0A%0A%0Aclass SolutionRecur(object):%0A def sortedArrayToBST(self, nums):%0A %22%22%22%0A :type nums: List%5Bint%5D%0A :rtype: TreeNode%0A %22%22%22%0A if not nums:%0A return None%0A%0A left, right = 0, len(nums) - 1%0A mid = left + (right - left) // 2%0A%0A root = TreeNode(nums%5Bmid%5D)%0A root.left = self.sortedArrayToBST(nums%5B:mid%5D)%0A root.right = self.sortedArrayToBST(nums%5Bmid+1:%5D)%0A %0A return root%0A%0A%0Adef main():%0A nums = %5B-10, -3, 0, 5, 9%5D%0A root = SolutionRecur().sortedArrayToBST(nums)%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
e36df0c827de666fa00d9b806759cde0f0cb1697
|
Test code
|
game.py
|
game.py
|
Python
| 0.000001
|
@@ -0,0 +1,2943 @@
+class Room:%0A def __init__(self, configuration=%7B%7D):%0A self.doors = configuration%0A%0Aclass Merchant:%0A def __init__(self, markup=1.2, markdown=0.8):%0A self.inventory = %5B%5D%0A self.markup = markup%0A self.markdown = markdown%0A %0A def add_item(self, item):%0A # Adds an item to the merchant's inventory%0A %0A if (not isinstance(item, Item)):%0A raise TypeError(%22Unexpected %22 + type(item))%0A%0A self.inventory.append(item)%0A%0A def get_selling_offers(self):%0A # Lists all items in the merchant's inventory%0A # and adds the markup fee%0A%0A offers = %5B%5D%0A for item in self.inventory:%0A offer = (item, item.value*self.markup)%0A offers.append(offer)%0A%0A return offers%0A%0A def get_buying_offers(self, items):%0A # Generates buying offers on the items in 'items'%0A%0A offers = %5B%5D%0A for item in items:%0A offer = (item, item.value*self.markdown)%0A offers.append(offer)%0A%0A return offers%0A%0Aclass Town(Room):%0A def __init__(self, name, room_configuration=%7B%7D):%0A super().__init__(room_configuration)%0A self.name = name%0A%0Aclass Item:%0A def __init__(self, name, description, value):%0A self.name = name%0A self.description = description%0A self.value = value # The item's monetary value%0A%0A @property%0A def value(self):%0A return self.value%0A%0A @value.setter%0A def x(self, value):%0A if value %3C 0:%0A raise ValueError(%22Item value cannot be less than 0%22)%0A else:%0A self.value = value%0A%0Aclass Weapon(Item):%0A def __init__(self, name, description, damage=0, value=0):%0A self.damage = damage%0A super().__init__(name, description, value)%0A%0A%0A%0A# Create new place with the name %22My Place%22%0Amy_place = Town(%22My Place%22)%0A%0A# Create new merchant with markup=1.2 and markdown=0.8%0Amy_merchant = Merchant(1.2, 0.8)%0A%0A# Attach the merchant to the place%0Amy_place.merchant = my_merchant%0A%0A# Create new weapon with the name %22Sword%22, the description %22A plain sword.%22%0A# a damage value of 20, and a monetary value of 10%0Asword = Weapon(%22Sword%22, %22A plain sword.%22, 20, 10)%0A%0A# Ditto%0Aaxe = Weapon(%22Axe%22, %22A plain axe.%22, 30, 20)%0A%0A# Ditto%0Apickaxe = Weapon(%22Pickaxe%22, %22A plain pickaxe.%22, 10, 10)%0A%0A# Add our weapons to the merchant we attached to our place%0Amy_place.merchant.add_item(sword)%0Amy_place.merchant.add_item(axe)%0Amy_place.merchant.add_item(pickaxe)%0A%0A# Create a new room%0A# Pass the configuration dict, which says that the east door should lead to my_place%0Astarting_room = Room(%7B'east': my_place%7D)%0A%0A# Get selling offers from the merchant in the place that is behind the east door of our room%0Aselling_offers = starting_room.doors%5B'east'%5D.merchant.get_selling_offers()%0A%0Aselling_offers_formatted = %5B%5D%0A%0Afor offer in selling_offers:%0A selling_offers_formatted.append((offer%5B0%5D, offer%5B1%5D))%0A %0Aprint(selling_offers_formatted)
|
|
e9eed6b6e99e948ed2863fcc45a037b0e2b1e80f
|
fix import in tornado worker
|
gunicorn/workers/gtornado.py
|
gunicorn/workers/gtornado.py
|
# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop, PeriodicCallback
from gunicorn.workers.base import Worker
class TornadoWorker(Worker):
def watchdog(self):
self.notify()
if self.ppid != os.getppid():
self.log.info("Parent changed, shutting down: %s" % self)
self.ioloop.stop()
def run(self):
self.socket.setblocking(0)
self.ioloop = IOLoop.instance()
PeriodicCallback(self.watchdog, 1000, io_loop=self.ioloop).start()
server = HTTPServer(self.app, io_loop=self.ioloop)
server._socket = self.socket
server.start(num_processes=1)
self.ioloop.start()
|
Python
| 0.000001
|
@@ -122,16 +122,27 @@
ation.%0A%0A
+import os%0A%0A
from tor
|
0051c9bfef75f51c8ccc8591e1dc8d723a17625e
|
Add another orphan-made tip to self-check :(.
|
gittip/models/__init__.py
|
gittip/models/__init__.py
|
"""
The most important object in the Gittip object model is Participant, and the
second most important one is Ccommunity. There are a few others, but those are
the most important two. Participant, in particular, is at the center of
everything on Gittip.
"""
from postgres import Postgres
class GittipDB(Postgres):
def self_check(self):
"""
Runs all available self checks on the database.
"""
self._check_balances()
self._check_tips()
self._check_orphans()
self._check_orphans_no_tips()
self._check_paydays_volumes()
def _check_tips(self):
"""
Checks that there are no rows in tips with duplicate (tipper, tippee, mtime).
https://github.com/gittip/www.gittip.com/issues/1704
"""
conflicting_tips = self.one("""
SELECT count(*)
FROM
(
SELECT * FROM tips
EXCEPT
SELECT DISTINCT ON(tipper, tippee, mtime) *
FROM tips
ORDER BY tipper, tippee, mtime
) AS foo
""")
assert conflicting_tips == 0
def _check_balances(self):
"""
Recalculates balances for all participants from transfers and exchanges.
https://github.com/gittip/www.gittip.com/issues/1118
"""
with self.get_cursor() as cursor:
if cursor.one("select exists (select * from paydays where ts_end < ts_start) as running"):
# payday is running and the query bellow does not account for pending
return
b = cursor.one("""
select count(*)
from (
select username, sum(a) as balance
from (
select participant as username, sum(amount) as a
from exchanges
where amount > 0
group by participant
union
select participant as username, sum(amount-fee) as a
from exchanges
where amount < 0
group by participant
union
select tipper as username, sum(-amount) as a
from transfers
group by tipper
union
select tippee as username, sum(amount) as a
from transfers
group by tippee
) as foo
group by username
except
select username, balance
from participants
) as foo2
""")
assert b == 0, "conflicting balances: {}".format(b)
def _check_orphans(self):
"""
Finds participants that
* does not have corresponding elsewhere account
* have not been absorbed by other participant
These are broken because new participants arise from elsewhere
and elsewhere is detached only by take over which makes a note
in absorptions if it removes the last elsewhere account.
Especially bad case is when also claimed_time is set because
there must have been elsewhere account attached and used to sign in.
https://github.com/gittip/www.gittip.com/issues/617
"""
orphans = self.all("""
select username
from participants
where not exists (select * from elsewhere where elsewhere.participant=username)
and not exists (select * from absorptions where archived_as=username)
""")
assert len(orphans) == 0, "missing elsewheres: {}".format(list(orphans))
def _check_orphans_no_tips(self):
"""
Finds participants
* without elsewhere account attached
* having non zero outstanding tip
This should not happen because when we remove the last elsewhere account
in take_over we also zero out all tips.
"""
tips_with_orphans = self.all("""
WITH orphans AS (
SELECT username FROM participants
WHERE NOT EXISTS (SELECT 1 FROM elsewhere WHERE participant=username)
), valid_tips AS (
SELECT * FROM (
SELECT DISTINCT ON (tipper, tippee) *
FROM tips
ORDER BY tipper, tippee, mtime DESC
) AS foo
WHERE amount > 0
)
SELECT id FROM valid_tips
WHERE tipper IN (SELECT * FROM orphans)
OR tippee IN (SELECT * FROM orphans)
""")
known = set([25206]) # '4c074000c7bc', 'naderman', '3.00'
real = set(tips_with_orphans) - known
assert len(real) == 0, real
def _check_paydays_volumes(self):
"""
Recalculate *_volume fields in paydays table using exchanges table.
"""
charge_volume = self.all("""
select * from (
select id, ts_start, charge_volume, (
select coalesce(sum(amount+fee), 0)
from exchanges
where timestamp > ts_start
and timestamp < ts_end
and amount > 0
) as ref
from paydays
order by id
) as foo
where charge_volume != ref
""")
assert len(charge_volume) == 0
charge_fees_volume = self.all("""
select * from (
select id, ts_start, charge_fees_volume, (
select coalesce(sum(fee), 0)
from exchanges
where timestamp > ts_start
and timestamp < ts_end
and amount > 0
) as ref
from paydays
order by id
) as foo
where charge_fees_volume != ref
""")
assert len(charge_fees_volume) == 0
ach_volume = self.all("""
select * from (
select id, ts_start, ach_volume, (
select coalesce(sum(amount), 0)
from exchanges
where timestamp > ts_start
and timestamp < ts_end
and amount < 0
) as ref
from paydays
order by id
) as foo
where ach_volume != ref
""")
assert len(ach_volume) == 0
ach_fees_volume = self.all("""
select * from (
select id, ts_start, ach_fees_volume, (
select coalesce(sum(fee), 0)
from exchanges
where timestamp > ts_start
and timestamp < ts_end
and amount < 0
) as ref
from paydays
order by id
) as foo
where ach_fees_volume != ref
""")
assert len(ach_fees_volume) == 0
#
|
Python
| 0
|
@@ -4990,16 +4990,23 @@
t(%5B25206
+, 46266
%5D) # '4c
|
796cce78bc1f12b9cc5bdb813172331433dd9e72
|
Move configuration details to docs
|
homeassistant/components/device_tracker/thomson.py
|
homeassistant/components/device_tracker/thomson.py
|
"""
homeassistant.components.device_tracker.thomson
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Device tracker platform that supports scanning a THOMSON router for device
presence.
This device tracker needs telnet to be enabled on the router.
Configuration:
To use the THOMSON tracker you will need to add something like the following
to your configuration.yaml file.
device_tracker:
platform: thomson
host: YOUR_ROUTER_IP
username: YOUR_ADMIN_USERNAME
password: YOUR_ADMIN_PASSWORD
Variables:
host
*Required
The IP address of your router, e.g. 192.168.1.1.
username
*Required
The username of an user with administrative privileges, usually 'admin'.
password
*Required
The password for your given admin account.
"""
import logging
from datetime import timedelta
import re
import threading
import telnetlib
from homeassistant.const import CONF_HOST, CONF_USERNAME, CONF_PASSWORD
from homeassistant.helpers import validate_config
from homeassistant.util import Throttle
from homeassistant.components.device_tracker import DOMAIN
# Return cached results if last scan was less then this time ago
MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10)
_LOGGER = logging.getLogger(__name__)
_DEVICES_REGEX = re.compile(
r'(?P<mac>(([0-9a-f]{2}[:-]){5}([0-9a-f]{2})))\s' +
r'(?P<ip>([0-9]{1,3}[\.]){3}[0-9]{1,3})\s+' +
r'(?P<status>([^\s]+))\s+' +
r'(?P<type>([^\s]+))\s+' +
r'(?P<intf>([^\s]+))\s+' +
r'(?P<hwintf>([^\s]+))\s+' +
r'(?P<host>([^\s]+))')
# pylint: disable=unused-argument
def get_scanner(hass, config):
""" Validates config and returns a THOMSON scanner. """
if not validate_config(config,
{DOMAIN: [CONF_HOST, CONF_USERNAME, CONF_PASSWORD]},
_LOGGER):
return None
scanner = ThomsonDeviceScanner(config[DOMAIN])
return scanner if scanner.success_init else None
class ThomsonDeviceScanner(object):
"""
This class queries a router running THOMSON firmware
for connected devices. Adapted from ASUSWRT scanner.
"""
def __init__(self, config):
self.host = config[CONF_HOST]
self.username = config[CONF_USERNAME]
self.password = config[CONF_PASSWORD]
self.lock = threading.Lock()
self.last_results = {}
# Test the router is accessible
data = self.get_thomson_data()
self.success_init = data is not None
def scan_devices(self):
""" Scans for new devices and return a
list containing found device ids. """
self._update_info()
return [client['mac'] for client in self.last_results]
def get_device_name(self, device):
""" Returns the name of the given device
or None if we don't know. """
if not self.last_results:
return None
for client in self.last_results:
if client['mac'] == device:
return client['host']
return None
@Throttle(MIN_TIME_BETWEEN_SCANS)
def _update_info(self):
"""
Ensures the information from the THOMSON router is up to date.
Returns boolean if scanning successful.
"""
if not self.success_init:
return False
with self.lock:
_LOGGER.info("Checking ARP")
data = self.get_thomson_data()
if not data:
return False
# flag C stands for CONNECTED
active_clients = [client for client in data.values() if
client['status'].find('C') != -1]
self.last_results = active_clients
return True
def get_thomson_data(self):
""" Retrieve data from THOMSON and return parsed result. """
try:
telnet = telnetlib.Telnet(self.host)
telnet.read_until(b'Username : ')
telnet.write((self.username + '\r\n').encode('ascii'))
telnet.read_until(b'Password : ')
telnet.write((self.password + '\r\n').encode('ascii'))
telnet.read_until(b'=>')
telnet.write(('hostmgr list\r\n').encode('ascii'))
devices_result = telnet.read_until(b'=>').split(b'\r\n')
telnet.write('exit\r\n'.encode('ascii'))
except EOFError:
_LOGGER.exception("Unexpected response from router")
return
except ConnectionRefusedError:
_LOGGER.exception("Connection refused by router," +
" is telnet enabled?")
return
devices = {}
for device in devices_result:
match = _DEVICES_REGEX.search(device.decode('utf-8'))
if match:
devices[match.group('ip')] = {
'ip': match.group('ip'),
'mac': match.group('mac').upper(),
'host': match.group('host'),
'status': match.group('status')
}
return devices
|
Python
| 0
|
@@ -183,553 +183,147 @@
e.%0A%0A
-This device tracker needs telnet to be enabled on the router.%0A%0AConfiguration:%0A%0ATo use the THOMSON tracker you will need to add something like the following%0Ato your configuration.yaml file.%0A%0Adevice_tracker:%0A platform: thomson%0A host: YOUR_ROUTER_IP%0A username: YOUR_ADMIN_USERNAME%0A password: YOUR_ADMIN_PASSWORD%0A%0AVariables:%0A%0Ahost%0A*Required%0AThe IP address of your router, e.g. 192.168.1.1.%0A%0Ausername%0A*Required%0AThe username of an user with administrative privileges, usually 'admin'.%0A%0Apassword%0A*Required%0AThe password for your given admin account.
+For more details about this platform, please refer to the documentation at%0Ahttps://home-assistant.io/components/device_tracker.thomson.html
%0A%22%22%22
|
96451643294003992e6d73ec34876badae177ed8
|
Add PULSE support
|
libpebble2/communication/transports/pulse.py
|
libpebble2/communication/transports/pulse.py
|
Python
| 0
|
@@ -0,0 +1,3084 @@
+from __future__ import absolute_import%0A__author__ = 'Liam McLoughlin'%0A%0Aimport struct%0A%0Atry:%0A import pulse2%0Aexcept ImportError:%0A pass%0A%0Afrom . import BaseTransport, MessageTargetWatch%0Afrom libpebble2.exceptions import ConnectionError, PebbleError%0A%0A%0Aclass PULSETransport(BaseTransport):%0A %22%22%22%0A Represents a direct connection to a physical/virtual Pebble uses the PULSEv2 interface.%0A This transport expects to be given a PULSE2 Link object.%0A%0A :param connection: A PULSE2 Interface object to tunnel Pebble Protocol over.%0A :type link: pulse2.link.Interface%0A %22%22%22%0A must_initialise = True%0A%0A PPOPULSE_PORT = 0x3e22%0A%0A OPCODE_PROTOCOL_DATA = 0x1%0A OPCODE_CONNECT = 0x2%0A OPCODE_DISCONNECT = 0x3%0A%0A def __init__(self, link):%0A self.link = link%0A self.connection = None%0A self.buffer = b''%0A%0A try:%0A import pulse2%0A except ImportError:%0A raise PebbleError('pulse2 package not installed: it is required for PULSE transport')%0A%0A @staticmethod%0A def _opcode(opcode):%0A return struct.pack('B', opcode)%0A%0A @staticmethod%0A def _chunks(list_items, chunk_length):%0A for i in xrange(0, len(list_items), chunk_length):%0A yield list_items%5Bi:i+chunk_length%5D%0A%0A def connect(self):%0A self.connection = self.link.open_socket('reliable', self.PPOPULSE_PORT)%0A if not self.connection:%0A raise ConnectionError('Failed to open PPoPULSE socket')%0A%0A self._send_with_opcode(self.OPCODE_CONNECT)%0A%0A def disconnect(self):%0A if self.connected:%0A try:%0A self._send_with_opcode(self.OPCODE_DISCONNECT)%0A except pulse2.exceptions.SocketClosed:%0A pass%0A self.connection.close()%0A self.connection = None%0A%0A @property%0A def connected(self):%0A return self.connection is not None%0A%0A def read_packet(self):%0A while self.connected:%0A if len(self.buffer) %3E= 2:%0A length, = struct.unpack('!H', self.buffer%5B:2%5D)%0A length += 4%0A%0A if len(self.buffer) %3E= length:%0A msg_data = self.buffer%5B:length%5D%0A self.buffer = self.buffer%5Blength:%5D%0A%0A return MessageTargetWatch(), msg_data%0A%0A try:%0A packet = self.connection.receive(block=True)%0A except (AttributeError, pulse2.exceptions.SocketClosed):%0A self.connection = None%0A raise ConnectionError('PULSE transport closed')%0A%0A assert packet%5B0%5D == self._opcode(self.OPCODE_PROTOCOL_DATA)%0A self.buffer += packet%5B1:%5D%0A%0A def send_packet(self, message, target=MessageTargetWatch()):%0A assert isinstance(target, MessageTargetWatch)%0A for chunk in self._chunks(message, self.connection.mtu - 1):%0A self._send_with_opcode(self.OPCODE_PROTOCOL_DATA, chunk)%0A%0A def _send_with_opcode(self, opcode, body=None):%0A assert self.connected%0A%0A data = self._opcode(opcode)%0A if body:%0A data += body%0A self.connection.send(data)%0A
|
|
4fd03b93f7c2ff31b6a7ab6bf6d404cc579a6bf8
|
Rewrite download_hash in Python (#5995)
|
scripts/download_hash.py
|
scripts/download_hash.py
|
Python
| 0
|
@@ -0,0 +1,1718 @@
+#!/usr/bin/env python3%0A%0A# After a new version of Kubernetes has been released,%0A# run this script to update roles/download/defaults/main.yml%0A# with new hashes.%0A%0Aimport hashlib%0Aimport sys%0A%0Aimport requests%0Afrom ruamel.yaml import YAML%0A%0AMAIN_YML = %22../roles/download/defaults/main.yml%22%0A%0Adef open_main_yaml():%0A yaml = YAML()%0A yaml.explicit_start = True%0A yaml.preserve_quotes = True%0A yaml.width = 4096%0A%0A with open(MAIN_YML, %22r%22) as main_yml:%0A data = yaml.load(main_yml)%0A%0A return data, yaml%0A%0A%0Adef download_hash(versions):%0A architectures = %5B%22arm%22, %22arm64%22, %22amd64%22%5D%0A downloads = %5B%22kubelet%22, %22kubectl%22, %22kubeadm%22%5D%0A%0A data, yaml = open_main_yaml()%0A%0A for download in downloads:%0A checksum_name = f%22%7Bdownload%7D_checksums%22%0A for arch in architectures:%0A for version in versions:%0A if not version.startswith(%22v%22):%0A version = f%22v%7Bversion%7D%22%0A url = f%22https://storage.googleapis.com/kubernetes-release/release/%7Bversion%7D/bin/linux/%7Barch%7D/%7Bdownload%7D%22%0A download_file = requests.get(url, allow_redirects=True)%0A download_file.raise_for_status()%0A sha256sum = hashlib.sha256(download_file.content).hexdigest()%0A data%5Bchecksum_name%5D%5Barch%5D%5Bversion%5D = sha256sum%0A%0A with open(MAIN_YML, %22w%22) as main_yml:%0A yaml.dump(data, main_yml)%0A print(f%22%5Cn%5CnUpdated %7BMAIN_YML%7D%5Cn%22)%0A%0A%0Adef usage():%0A print(f%22USAGE:%5Cn %7Bsys.argv%5B0%5D%7D %5Bk8s_version1%5D %5B%5Bk8s_version2%5D....%5Bk8s_versionN%5D%5D%22)%0A%0A%0Adef main(argv=None):%0A if not argv:%0A argv = sys.argv%5B1:%5D%0A if not argv:%0A usage()%0A sys.exit(1)%0A download_hash(argv)%0A%0A%0Aif __name__ == %22__main__%22:%0A sys.exit(main())%0A
|
|
f8bea417fe0a09e50938d0fc837bd1ab97ac3a0b
|
Add MarkedDict.keydict attribute
|
powerline/lint/markedjson/markedvalue.py
|
powerline/lint/markedjson/markedvalue.py
|
__all__ = ['gen_marked_value', 'MarkedValue']
try:
from __builtin__ import unicode
except ImportError:
unicode = str
def gen_new(cls):
def __new__(arg_cls, value, mark):
r = super(arg_cls, arg_cls).__new__(arg_cls, value)
r.mark = mark
r.value = value
return r
return __new__
def gen_init(cls):
def __init__(self, value, mark):
return cls.__init__(self, value)
return __init__
def gen_getnewargs(cls):
def __getnewargs__(self):
return (self.value, self.mark)
return __getnewargs__
class MarkedUnicode(unicode):
__new__ = gen_new(unicode)
__getnewargs__ = gen_getnewargs(unicode)
def _proc_partition(self, part_result):
pointdiff = 1
r = []
for s in part_result:
mark = self.mark.copy()
# XXX Does not work properly with escaped strings, but this requires
# saving much more information in mark.
mark.column += pointdiff
mark.pointer += pointdiff
r.append(MarkedUnicode(s, mark))
pointdiff += len(s)
return tuple(r)
def rpartition(self, sep):
return self._proc_partition(super(MarkedUnicode, self).rpartition(sep))
def partition(self, sep):
return self._proc_partition(super(MarkedUnicode, self).partition(sep))
class MarkedInt(int):
__new__ = gen_new(int)
__getnewargs__ = gen_getnewargs(int)
class MarkedFloat(float):
__new__ = gen_new(float)
__getnewargs__ = gen_getnewargs(float)
class MarkedDict(dict):
__new__ = gen_new(dict)
__init__ = gen_init(dict)
__getnewargs__ = gen_getnewargs(dict)
def copy(self):
return MarkedDict(super(MarkedDict, self).copy(), self.mark)
class MarkedList(list):
__new__ = gen_new(list)
__init__ = gen_init(list)
__getnewargs__ = gen_getnewargs(list)
class MarkedValue:
def __init__(self, value, mark):
self.mark = mark
self.value = value
__getinitargs__ = gen_getnewargs(None)
specialclasses = {
unicode: MarkedUnicode,
int: MarkedInt,
float: MarkedFloat,
dict: MarkedDict,
list: MarkedList,
}
classcache = {}
def gen_marked_value(value, mark, use_special_classes=True):
if use_special_classes and value.__class__ in specialclasses:
Marked = specialclasses[value.__class__]
elif value.__class__ in classcache:
Marked = classcache[value.__class__]
else:
class Marked(MarkedValue):
for func in value.__class__.__dict__:
if func == 'copy':
def copy(self):
return self.__class__(self.value.copy(), self.mark)
elif func not in set(('__init__', '__new__', '__getattribute__')):
if func in set(('__eq__',)):
# HACK to make marked dictionaries always work
exec ((
'def {0}(self, *args):\n'
' return self.value.{0}(*[arg.value if isinstance(arg, MarkedValue) else arg for arg in args])'
).format(func))
else:
exec ((
'def {0}(self, *args, **kwargs):\n'
' return self.value.{0}(*args, **kwargs)\n'
).format(func))
classcache[value.__class__] = Marked
return Marked(value, mark)
|
Python
| 0
|
@@ -1414,69 +1414,384 @@
%0A%09__
-init__ = gen_init(dict)%0A%09__getnewargs__ = gen_getnewargs(dict
+getnewargs__ = gen_getnewargs(dict)%0A%0A%09def __init__(self, value, mark):%0A%09%09dict.__init__(self, value)%0A%09%09self.keydict = dict(((key, key) for key in self))%0A%0A%09def __setitem__(self, key, value):%0A%09%09dict.__setitem__(self, key, value)%0A%09%09self.keydict%5Bkey%5D = key%0A%0A%09def update(self, *args, **kwargs):%0A%09%09dict.update(self, *args, **kwargs)%0A%09%09self.keydict = dict(((key, key) for key in self)
)%0A%0A%09
|
41e115305a4de332689199ecf6a52a37c0d72e55
|
Fix #108
|
src/collectors/memory/memory.py
|
src/collectors/memory/memory.py
|
# coding=utf-8
"""
This class collects data on memory utilization
Note that MemFree may report no memory free. This may not actually be the case,
as memory is allocated to Buffers and Cache as well. See
[this link](http://www.linuxatemyram.com/) for more details.
#### Dependencies
* /proc/meminfo or psutil
"""
import diamond.collector
import diamond.convertor
import os
try:
import psutil
except ImportError:
psutil = None
_KEY_MAPPING = [
'MemTotal',
'MemFree',
'Buffers',
'Cached',
'Active',
'Dirty',
'Inactive',
'Shmem',
'SwapTotal',
'SwapFree',
'SwapCached',
'VmallocTotal',
'VmallocUsed',
'VmallocChunk',
'Committed_AS',
]
class MemoryCollector(diamond.collector.Collector):
PROC = '/proc/meminfo'
def get_default_config_help(self):
config_help = super(MemoryCollector, self).get_default_config_help()
config_help.update({
'detailed': 'Set to True to Collect all the nodes',
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(MemoryCollector, self).get_default_config()
config.update({
'path': 'memory',
# Collect all the nodes or just a few standard ones?
# Uncomment to enable
#'detailed': 'True'
})
return config
def collect(self):
"""
Collect memory stats
"""
if os.access(self.PROC, os.R_OK):
file = open(self.PROC)
data = file.read()
file.close()
for line in data.splitlines():
try:
name, value, units = line.split()
name = name.rstrip(':')
value = int(value)
if (name not in _KEY_MAPPING
and 'detailed' not in self.config):
continue
for unit in self.config['byte_unit']:
value = diamond.convertor.binary.convert(value=value,
oldUnit=units,
newUnit=unit)
self.publish(name, value, metric_type='GAUGE')
# TODO: We only support one unit node here. Fix it!
break
except ValueError:
continue
return True
else:
if not psutil:
self.log.error('Unable to import psutil')
self.log.error('No memory metrics retrieved')
return None
phymem_usage = psutil.phymem_usage()
virtmem_usage = psutil.virtmem_usage()
units = 'B'
for unit in self.config['byte_unit']:
value = diamond.convertor.binary.convert(
value=phymem_usage.total, oldUnit=units, newUnit=unit)
self.publish('MemTotal', value, metric_type='GAUGE')
value = diamond.convertor.binary.convert(
value=phymem_usage.free, oldUnit=units, newUnit=unit)
self.publish('MemFree', value, metric_type='GAUGE')
value = diamond.convertor.binary.convert(
value=virtmem_usage.total, oldUnit=units, newUnit=unit)
self.publish('SwapTotal', value, metric_type='GAUGE')
value = diamond.convertor.binary.convert(
value=virtmem_usage.free, oldUnit=units, newUnit=unit)
self.publish('SwapFree', value, metric_type='GAUGE')
# TODO: We only support one unit node here. Fix it!
break
return True
return None
|
Python
| 0.000001
|
@@ -3203,35 +3203,40 @@
ue=phymem_usage.
-fre
+availabl
e, oldUnit=units
|
3b27b1d6b1c4739b8d456703542ec8182ce12277
|
Add a Wordpress+MySQL composed instance functional test case
|
heat/tests/functional/test_WordPress_Composed_Instances.py
|
heat/tests/functional/test_WordPress_Composed_Instances.py
|
Python
| 0.000002
|
@@ -0,0 +1,2055 @@
+# vim: tabstop=4 shiftwidth=4 softtabstop=4%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22); you may%0A# not use this file except in compliance with the License. You may obtain%0A# a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS, WITHOUT%0A# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the%0A# License for the specific language governing permissions and limitations%0A#%0A%0Aimport util%0Aimport verify%0Aimport nose%0Afrom nose.plugins.attrib import attr%0Aimport unittest%0A%0A%0A@attr(speed='slow')%0A@attr(tag=%5B'func', 'wordpress', 'composed', 'WordPressComposedInstances'%5D)%0Aclass WordPressComposedInstancesFunctionalTest(unittest.TestCase):%0A def setUp(self):%0A template = 'WordPress_Composed_Instances.template'%0A%0A self.func_utils = util.FuncUtils()%0A%0A self.func_utils.prepare_jeos('F17', 'x86_64', 'cfntools')%0A self.func_utils.create_stack(template, 'F17')%0A self.func_utils.check_cfntools()%0A self.func_utils.wait_for_provisioning()%0A self.func_utils.check_user_data(template)%0A%0A self.ssh = self.func_utils.get_ssh_client()%0A%0A def test_instance(self):%0A # ensure wordpress was installed by checking for expected%0A # configuration file over ssh%0A wp_file = '/etc/wordpress/wp-config.php'%0A stdin, stdout, sterr = self.ssh.exec_command('ls ' + wp_file)%0A result = stdout.readlines().pop().rstrip()%0A assert result == wp_file%0A print %22Wordpress installation detected%22%0A%0A # Verify the output URL parses as expected, ie check that%0A # the wordpress installation is operational%0A stack_url = self.func_utils.get_stack_output(%22WebsiteURL%22)%0A print %22Got stack output WebsiteURL=%25s, verifying%22 %25 stack_url%0A ver = verify.VerifyStack()%0A assert True == ver.verify_wordpress(stack_url)%0A%0A self.func_utils.cleanup()%0A
|
|
1172287e38f623994b039cea0dab36ea68d18471
|
add RabbitService
|
ics_demo/remote_services/demo.py
|
ics_demo/remote_services/demo.py
|
Python
| 0
|
@@ -0,0 +1,619 @@
+from base import Service%0Afrom ics_demo.helpers.base import uuidgen%0A%0Aclass RabbitService(Service):%0A%0A def it_is_my_warren(self, name):%0A cmd = 'mkdir -p /tmp/%25s' %25 name%0A self.remote_cmd_quiet(cmd)%0A%0A def put_carrot_bucket_in_my_warren(self, rabbit):%0A cmd = 'mkdir /tmp/%25s/carrots' %25 rabbit.name%0A self.remote_cmd_quiet(cmd)%0A%0A def put_a_carrot(self, rabbit):%0A cmd = 'touch /tmp/%25s/carrots/%25s' %25 (rabbit.name, uuidgen())%0A self.remote_cmd_quiet(cmd)%0A%0A def my_carrots(self, rabbit):%0A cmd = 'ls /tmp/%25s/carrots/' %25 rabbit.name%0A return self.remote_cmd_list(cmd)%0A
|
|
66d215c9c148572c4ebd9370453f7406da89abaf
|
Fix for unknown connection alias error message.
|
mongoengine/connection.py
|
mongoengine/connection.py
|
import pymongo
from pymongo import Connection, ReplicaSetConnection, uri_parser
__all__ = ['ConnectionError', 'connect', 'register_connection',
'DEFAULT_CONNECTION_NAME']
DEFAULT_CONNECTION_NAME = 'default'
class ConnectionError(Exception):
pass
_connection_settings = {}
_connections = {}
_dbs = {}
def register_connection(alias, name, host='localhost', port=27017,
is_slave=False, read_preference=False, slaves=None,
username=None, password=None, **kwargs):
"""Add a connection.
:param alias: the name that will be used to refer to this connection
throughout MongoEngine
:param name: the name of the specific database to use
:param host: the host name of the :program:`mongod` instance to connect to
:param port: the port that the :program:`mongod` instance is running on
:param is_slave: whether the connection can act as a slave ** Depreciated pymongo 2.0.1+
:param read_preference: The read preference for the collection ** Added pymongo 2.1
:param slaves: a list of aliases of slave connections; each of these must
be a registered connection that has :attr:`is_slave` set to ``True``
:param username: username to authenticate with
:param password: password to authenticate with
:param kwargs: allow ad-hoc parameters to be passed into the pymongo driver
"""
global _connection_settings
# Handle uri style connections
if "://" in host:
uri_dict = uri_parser.parse_uri(host)
if uri_dict.get('database') is None:
raise ConnectionError("If using URI style connection include "\
"database name in string")
_connection_settings[alias] = {
'host': host,
'name': uri_dict.get('database'),
'username': uri_dict.get('username'),
'password': uri_dict.get('password')
}
_connection_settings[alias].update(kwargs)
return
_connection_settings[alias] = {
'name': name,
'host': host,
'port': port,
'is_slave': is_slave,
'slaves': slaves or [],
'username': username,
'password': password,
'read_preference': read_preference
}
_connection_settings[alias].update(kwargs)
def disconnect(alias=DEFAULT_CONNECTION_NAME):
global _connections
global _dbs
if alias in _connections:
get_connection(alias=alias).disconnect()
del _connections[alias]
if alias in _dbs:
del _dbs[alias]
def get_connection(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
global _connections
# Connect to the database if not already connected
if reconnect:
disconnect(alias)
if alias not in _connections:
if alias not in _connection_settings:
msg = 'Connection with alias "%s" has not been defined'
if alias == DEFAULT_CONNECTION_NAME:
msg = 'You have not defined a default connection'
raise ConnectionError(msg)
conn_settings = _connection_settings[alias].copy()
if hasattr(pymongo, 'version_tuple'): # Support for 2.1+
conn_settings.pop('name', None)
conn_settings.pop('slaves', None)
conn_settings.pop('is_slave', None)
conn_settings.pop('username', None)
conn_settings.pop('password', None)
else:
# Get all the slave connections
if 'slaves' in conn_settings:
slaves = []
for slave_alias in conn_settings['slaves']:
slaves.append(get_connection(slave_alias))
conn_settings['slaves'] = slaves
conn_settings.pop('read_preference')
connection_class = Connection
if 'replicaSet' in conn_settings:
conn_settings['hosts_or_uri'] = conn_settings.pop('host', None)
connection_class = ReplicaSetConnection
try:
_connections[alias] = connection_class(**conn_settings)
except Exception, e:
raise ConnectionError("Cannot connect to database %s :\n%s" % (alias, e))
return _connections[alias]
def get_db(alias=DEFAULT_CONNECTION_NAME, reconnect=False):
global _dbs
if reconnect:
disconnect(alias)
if alias not in _dbs:
conn = get_connection(alias)
conn_settings = _connection_settings[alias]
_dbs[alias] = conn[conn_settings['name']]
# Authenticate if necessary
if conn_settings['username'] and conn_settings['password']:
_dbs[alias].authenticate(conn_settings['username'],
conn_settings['password'])
return _dbs[alias]
def connect(db, alias=DEFAULT_CONNECTION_NAME, **kwargs):
"""Connect to the database specified by the 'db' argument.
Connection settings may be provided here as well if the database is not
running on the default port on localhost. If authentication is needed,
provide username and password arguments as well.
Multiple databases are supported by using aliases. Provide a separate
`alias` to connect to a different instance of :program:`mongod`.
.. versionchanged:: 0.6 - added multiple database support.
"""
global _connections
if alias not in _connections:
register_connection(alias, db, **kwargs)
return get_connection(alias)
# Support old naming convention
_get_connection = get_connection
_get_db = get_db
|
Python
| 0
|
@@ -2910,16 +2910,24 @@
defined'
+ %25 alias
%0A
|
8de10ac1bf133c41cc1d0e330714e1659e42b092
|
add script to write consul-manager ip to a text file
|
consul/get_consul_manager_ip.py
|
consul/get_consul_manager_ip.py
|
Python
| 0
|
@@ -0,0 +1,614 @@
+%0Aimport os%0Aimport digitalocean%0A%0ATOKEN_FILE = %22/srv/secrets-newsblur/keys/digital_ocean.token%22%0A%0Awith open(TOKEN_FILE) as f:%0A token = f.read().strip()%0A os.environ%5B'DO_API_TOKEN'%5D = token%0A%0Amanager = digitalocean.Manager(token=token)%0Amy_droplets = manager.get_all_droplets()%0Aconsul_manager_droplet = %5Bd for d in my_droplets if d.name == %22consul-manager%22%5D%5B0%5D%0Aconsul_manager_ip_address = consul_manager_droplet.ip_address%0A%0A# write or overwrite the consul-manager ip%0Aif %22consul_manager_ip.txt%22 not in os.listdir('ansible'):%0A with open('consul_manager_ip.txt', 'w') as f:%0A f.write(consul_manager_ip_address)
|
|
32f4055b52c8768c80cf82451f6ace74af600d0c
|
test new analyze rewrite
|
lib/neuroimaging/refactoring/tests/test_analyze.py
|
lib/neuroimaging/refactoring/tests/test_analyze.py
|
Python
| 0.000002
|
@@ -0,0 +1,483 @@
+import unittest%0Afrom neuroimaging.refactoring.analyze import AnalyzeImage%0Afrom neuroimaging.tests.data import repository%0Afrom neuroimaging.visualization.arrayview import arrayview%0A%0Aclass AnalyzeImageTest(unittest.TestCase):%0A%0A def setUp(self):%0A self.image = AnalyzeImage(%22rho%22, datasource=repository)%0A%0A def test_header(self):%0A self.image.raw_array%0A%0A def test_arrayview(self):%0A arrayview(self.image.raw_array)%0A%0A%0Aif __name__ == '__main__': unittest.main()%0A
|
|
6feae8e14b4e690cb0d5c71880b9d6c167ac978b
|
add stub for a csv scraping script
|
ipeds_reporter/scripts/scrape.py
|
ipeds_reporter/scripts/scrape.py
|
Python
| 0
|
@@ -0,0 +1,225 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%22%22%22%0AWIP thing to scrape ipeds for me.%0A%22%22%22%0Afrom selenium import webdriver%0A%0A%0Adef main():%0A driver = webdriver.Firefox()%0A driver.close()%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
4241d67149887f8edc0636f7cb4fdbcb22e8e98b
|
Create repeatings.py
|
job_interview_algs/repeatings.py
|
job_interview_algs/repeatings.py
|
Python
| 0.000006
|
@@ -0,0 +1,650 @@
+TEXT = %22%22%22abba com mother bill mother com %0Aabba dog abba mother com%22%22%22%0A%0A%0Adef secuenced_words(txt):%0A %22%22%22%0A Function identifies and displays the three words%0A most often repeated as a group, regardless of the%0A words order in the group%0A %22%22%22%0A word_list = txt.split()%0A collector = dict()%0A for idx in range(1, len(word_list)-1):%0A item = frozenset(%5Bword_list%5Bidx-1%5D, word_list%5Bidx%5D, word_list%5Bidx+1%5D%5D)%0A if item not in collector:%0A collector%5Bitem%5D = 1%0A else:%0A collector%5Bitem%5D += 1%0A%0A return list(sorted(collector)%5B0%5D)%0A%0A%0Aif __name__ == %22__main__%22:%0A print(secuenced_words(TEXT))%0A
|
|
3bd95d8789871246fb90c6eb0487d9746ef5cb27
|
Migrate all project contents blocks to activity contents blocks
|
bluebottle/cms/migrations/0056_auto_20191106_1041.py
|
bluebottle/cms/migrations/0056_auto_20191106_1041.py
|
Python
| 0
|
@@ -0,0 +1,1653 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.11.15 on 2019-11-06 09:41%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations%0A%0A%0Adef migrate_project_blocks(apps, schema_editor):%0A ProjectsContent = apps.get_model('cms', 'ProjectsContent')%0A ActivitiesContent = apps.get_model('cms', 'ActivitiesContent')%0A Initiative = apps.get_model('initiatives', 'Initiative')%0A ContentType = apps.get_model('contenttypes', 'ContentType')%0A%0A activity_content_ctype = ContentType.objects.get_for_model(ActivitiesContent)%0A%0A for projects_content in ProjectsContent.objects.all():%0A activities_content = ActivitiesContent.objects.create(%0A title=projects_content.title,%0A sub_title=projects_content.sub_title,%0A sort_order=projects_content.sort_order,%0A placeholder=projects_content.placeholder,%0A parent_id=projects_content.parent_id,%0A language_code=projects_content.language_code,%0A polymorphic_ctype_id=activity_content_ctype.pk,%0A parent_type_id=projects_content.parent_type_id,%0A highlighted=projects_content.from_homepage%0A )%0A for project in projects_content.projects.all():%0A initiative = Initiative.objects.get(slug=project.slug)%0A%0A for activity in initiative.activities.all():%0A activities_content.activities.add(activity)%0A%0A activities_content.save()%0A projects_content.delete()%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('cms', '0055_migrate_statistics'),%0A %5D%0A%0A operations = %5B%0A migrations.RunPython(migrate_project_blocks)%0A %5D%0A
|
|
06570a926bde2ea10730062b05a2348c3020745c
|
Add example: filtered ensemble average.
|
examples/filter_ensemble_average.py
|
examples/filter_ensemble_average.py
|
Python
| 0
|
@@ -0,0 +1,939 @@
+import numpy as np%0Aimport matplotlib.pyplot as plt%0Aimport atomic%0A%0Afrom ensemble_average import time_dependent_power%0A%0A%0Aif __name__ == '__main__':%0A times = np.logspace(-7, 0, 50)%0A temperature = np.logspace(0, 3, 50)%0A density = 1e19%0A%0A from atomic.pec import TransitionPool%0A ad = atomic.element('argon')%0A tp = TransitionPool.from_adf15('adas_data/pec/*ar*.dat')%0A ad = tp.filter_energy(2e3, 20e3, 'eV').create_atomic_data(ad)%0A%0A rt = atomic.RateEquations(ad)%0A y = rt.solve(times, temperature, density)%0A%0A taus = np.array(%5B 1e14, 1e15, 1e16, 1e17, 1e18%5D)/density%0A%0A plt.figure(1); plt.clf()%0A from filter_construction import plot_coeffs%0A plot_coeffs(ad, temperature, 5)%0A plt.ylim(1e-35, 1e-30)%0A plt.draw()%0A%0A plt.figure(2); plt.clf()%0A time_dependent_power(y, taus)%0A plt.draw()%0A%0A plt.figure(3); plt.clf()%0A time_dependent_power(y, taus, ensemble_average=True)%0A plt.draw()%0A%0A%0A plt.show()%0A
|
|
d9985ec4fa37cf99e0e541c7affadd5ec9288a0c
|
Create multithread.py
|
APIs/multithread.py
|
APIs/multithread.py
|
Python
| 0.000001
|
@@ -0,0 +1,935 @@
+#!/usr/bin/env python3%0A# -*- coding: utf-8 -*-%0A%22%22%22%0ACreated on Mon Jan 15 22:59:02 2018%0A%0A@author: zhurundong%0A%22%22%22%0A%0Aimport time%0Aimport requests%0Aimport asyncio%0Aimport aiohttp%0Afrom concurrent.futures import ThreadPoolExecutor%0A%0ANUMBERS = range(12)%0AURL = 'http://httpbin.org/get?a=%7B%7D'%0A%0A# Get http requests results%0Adef fetch(a):%0A r = requests.get(URL.format(a))%0A return r.json()%5B'args'%5D%5B'a'%5D%0A%0Astart = time.time()%0A%0Afor num in NUMBERS:%0A result = fetch(num)%0A print('fetch(%7B%7D) = %7B%7D'.format(num, result))%0A%0Aprint('cost time: %7B%7D'.format(time.time() - start))%0A%0A# Get http requests results%0Adef fetch(a):%0A r = requests.get(URL.format(a))%0A return r.json()%5B'args'%5D%5B'a'%5D%0A%0Astart = time.time()%0A%0A# Using ThreadPool%0A%0Awith ThreadPoolExecutor(max_workers = 5) as executor:%0A for num, result in zip(NUMBERS, executor.map(fetch, NUMBERS)):%0A print('fetch(%7B%7D) = %7B%7D'.format(num, result))%0A%0Aprint('cost time: %7B%7D'.format(time.time() - start))%0A
|
|
93f6ebde39ef0538624ad3eb94316bf8bdf69fd9
|
Create N_QueensII.py
|
Array/N_QueensII.py
|
Array/N_QueensII.py
|
Python
| 0.000011
|
@@ -0,0 +1,1010 @@
+Follow up for N-Queens problem.%0A%0ANow, instead outputting board configurations, return the total number of distinct solutions.%0A%0Aclass Solution:%0A # @param %7Binteger%7D n%0A # @return %7Binteger%7D%0A def totalNQueens(self, n):%0A if n == 0: return 0%0A self.result = 0 # Here we should use the global variable, otherwise the result will not change%0A checklist = %5B-1 for i in xrange(n)%5D%0A self.queen_helper(n, 0, checklist)%0A return self.result %0A %0A def check_helper(self, depth, i, checklist):%0A for k in xrange(depth):%0A if checklist%5Bk%5D == i or abs(checklist%5Bk%5D - i) == abs(depth-k):%0A return False%0A return True%0A %0A def queen_helper(self, n, depth, checklist):%0A if depth == n: %0A self.result += 1; return%0A for i in xrange(n):%0A if self.check_helper(depth, i, checklist):%0A checklist%5Bdepth%5D = i%0A self.queen_helper(n, depth+1, checklist)%0A %0A
|
|
38b4ec7164f07af7135c41c401c4f403c1061d66
|
Add skeleton for parsing commands
|
app/main.py
|
app/main.py
|
Python
| 0.000002
|
@@ -0,0 +1,1430 @@
+%22%22%22lazy%0A%0A Usage:%0A lazy (new%7Cn)%0A lazy (show%7Cs) %5B%3Cid%3E%5D%0A lazy (delete%7Cd) %5B%3Cid%3E%5D%0A lazy (import%7Ci) %3Cpath%3E%0A lazy (export%7Ce) %3Cpath%3E %5B%3Cid%3E%5D%0A%0A Options:%0A -h, --help: Show this help message.%0A%22%22%22%0A%0Afrom docopt import docopt%0A%0A%0Adef main():%0A # Parse commandline arguments.%0A args = docopt(__doc__)%0A%0A if args%5B'new'%5D or args%5B'n'%5D:%0A # Insert a new task.%0A pass%0A elif args%5B'show'%5D or args%5B's'%5D:%0A if args%5B'%3Cid%3E'%5D:%0A # Show the task whose ID most closely matches the given ID.%0A pass%0A else:%0A # Show all tasks for the current user.%0A pass%0A elif args%5B'delete'%5D or args%5B'd'%5D:%0A if args%5B'%3Cid%3E'%5D:%0A # Delete the task with the ID that most closely matches the given%0A # ID.%0A pass%0A else:%0A # Prompt the user to input the ID of the task to delete.%0A # Then delete the task with the ID that matches the given one best.%0A pass%0A elif args%5B'import'%5D or args%5B'i'%5D:%0A # Check if the given path exists and if so, import from it.%0A pass%0A elif args%5B'export'%5D or args%5B'e'%5D:%0A # Check if it is possible to write to the given path.%0A if args%5B'%3Cid%3E'%5D:%0A # Write only the task with the ID that matches the given one best.%0A pass%0A else:%0A # Write all tasks the current user has to the file.%0A pass%0A
|
|
3ed9dd0ca03216311771cda5f9cd3eb954a14d4f
|
Add boilerplate with simple test sounds
|
telemeta/management/commands/telemeta-test-boilerplate.py
|
telemeta/management/commands/telemeta-test-boilerplate.py
|
Python
| 0
|
@@ -0,0 +1,1249 @@
+from optparse import make_option%0Afrom django.conf import settings%0Afrom django.core.management.base import BaseCommand, CommandError%0Afrom django.contrib.auth.models import User%0Afrom django.template.defaultfilters import slugify%0A%0Aimport os%0Afrom telemeta.models import *%0Afrom timeside.core.tools.test_samples import generateSamples%0A%0A%0Aclass Command(BaseCommand):%0A help = %22Setup and run a boilerplate for testing%22%0A%0A code = 'Tests'%0A%0A def handle(self, *args, **options):%0A # NOT for production%0A # self.processor_cleanup()%0A # self.result_cleanup()%0A%0A media_dir = 'items' + os.sep + 'tests'%0A samples_dir = settings.MEDIA_ROOT + media_dir%0A samples = generateSamples(samples_dir=samples_dir)%0A%0A collection, c = MediaCollection.objects.get_or_create(title=self.code,%0A code=self.code)%0A%0A for sample in samples.iteritems():%0A filename, path = sample%0A title = os.path.splitext(filename)%5B0%5D%0A path = media_dir + os.sep + filename%0A item, c = MediaItem.objects.get_or_create(title=title,%0A code=self.code + '-' + slugify(filename),%0A file=path, collection=collection)%0A%0A
|
|
eeea573c3ecf6aa2baacdda61c0f9a248a28780f
|
add missing migration
|
ynr/apps/uk_results/migrations/0034_auto_20180130_1243.py
|
ynr/apps/uk_results/migrations/0034_auto_20180130_1243.py
|
Python
| 0.000258
|
@@ -0,0 +1,1247 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.9.13 on 2018-01-30 12:43%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('uk_results', '0033_auto_20170506_2042'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterModelOptions(%0A name='postelectionresult',%0A options=%7B'get_latest_by': 'confirmed_resultset__created'%7D,%0A ),%0A migrations.AlterField(%0A model_name='councilelectionresultset',%0A name='review_status',%0A field=models.CharField(blank=True, choices=%5B(None, b'Unreviewed'), (b'unconfirmed', b'Unconfirmed'), (b'confirmed', b'Confirmed'), (b'rejected', b'Rejected')%5D, max_length=100),%0A ),%0A migrations.AlterField(%0A model_name='postelectionresult',%0A name='confirmed',%0A field=models.BooleanField(default=True),%0A ),%0A migrations.AlterField(%0A model_name='resultset',%0A name='review_status',%0A field=models.CharField(blank=True, choices=%5B(None, b'Unreviewed'), (b'unconfirmed', b'Unconfirmed'), (b'confirmed', b'Confirmed'), (b'rejected', b'Rejected')%5D, max_length=100),%0A ),%0A %5D%0A
|
|
59f0a18b5232e866f84fdaf6688ced5a1b4a9c44
|
Add fedora.tg.widgets module containing a few proof-of-concept Fedora TurboGears widgets
|
fedora/tg/widgets.py
|
fedora/tg/widgets.py
|
Python
| 0
|
@@ -0,0 +1,2155 @@
+# Proof-of-concept Fedora TurboGears widgets%0A# Authors: Luke Macken %3Clmacken@redhat.com%3E%0A%0Aimport re%0Aimport urllib2%0Aimport feedparser%0Aimport simplejson%0A%0Afrom bugzilla import Bugzilla%0Afrom turbogears.widgets import Widget%0A%0Aclass FedoraPeopleWidget(Widget):%0A template = %22%22%22%0A %3Ctable xmlns:py=%22http://purl.org/kid/ns#%22 border=%220%22%3E%0A %3Ctr py:for=%22entry in entries%22%3E%0A %3Ctd%3E%3Cimg src=%22$%7Bentry%5B'image'%5D%7D%22 height=%2232%22 width=%2232%22/%3E%3C/td%3E%0A %3Ctd%3E%3Ca href=%22$%7Bentry%5B'link'%5D%7D%22%3E$%7Bentry%5B'title'%5D%7D%3C/a%3E%3C/td%3E%0A %3C/tr%3E%0A %3C/table%3E%0A %22%22%22%0A params = %5B%22entries%22%5D%0A%0A def __init__(self):%0A self.entries = %5B%5D%0A regex = re.compile('%3Cimg src=%22(.*)%22 alt=%22%22 /%3E')%0A feed = feedparser.parse('http://planet.fedoraproject.org/rss20.xml')%0A for entry in feed%5B'entries'%5D%5B:5%5D:%0A self.entries.append(%7B%0A 'link' : entry%5B'link'%5D,%0A 'title' : entry%5B'title'%5D,%0A 'image' : regex.match(entry%5B'summary'%5D).group(1)%0A %7D)%0A%0A%0Aclass FedoraMaintainerWidget(Widget):%0A template = %22%22%22%0A %3Ctable xmlns:py=%22http://purl.org/kid/ns#%22 border=%220%22%3E%0A %3Ctr py:for=%22pkg in packages%22%3E%0A %3Ctd%3E%3Ca href=%22https://admin.fedoraproject.org/pkgdb/packages/name/$%7Bpkg%5B'name'%5D%7D%22%3E$%7Bpkg%5B'name'%5D%7D%3C/a%3E%3C/td%3E%0A %3C/tr%3E%0A %3C/table%3E%0A %22%22%22%0A params = %5B%22packages%22%5D%0A%0A def __init__(self, username):%0A page = urllib2.urlopen('https://admin.fedoraproject.org/pkgdb/users/packages/%25s/?tg_format=json' %25 username)%0A self.packages = simplejson.load(page)%5B'pkgs'%5D%5B:5%5D%0A%0A%0Aclass BugzillaWidget(Widget):%0A template = %22%22%22%0A %3Ctable xmlns:py=%22http://purl.org/kid/ns#%22 border=%220%22%3E%0A %3Ctr py:for=%22bug in bugs%22%3E%0A %3Ctd%3E%0A %3Ca href=%22$%7Bbug.url%7D%22%3E$%7Bbug.bug_id%7D%3C/a%3E $%7Bbug.short_short_desc%7D%0A %3C/td%3E%0A %3C/tr%3E%0A %3C/table%3E%0A %22%22%22%0A params = %5B%22bugs%22%5D%0A%0A def __init__(self, email):%0A bz = Bugzilla(url='https://bugzilla.redhat.com/xmlrpc.cgi')%0A self.bugs = bz.query(%7B%0A 'product' : 'Fedora',%0A 'email1' : email,%0A 'emailassigned_to1' : True%0A %7D)%5B:5%5D%0A
|
|
046922c6b842e5ba78fc44848ddf24e6434dd799
|
Add related options to floating ip config options
|
nova/conf/floating_ips.py
|
nova/conf/floating_ips.py
|
# needs:fix_opt_description
# Copyright 2016 Huawei Technology corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
# TODO(johngarbutt) all of these opitions only work with nova-network.
# We need to find a good way to document that.
floating_ip_opts = [
cfg.StrOpt('default_floating_pool',
default='nova',
help="""
Default pool for floating IPs.
This option specifies the default floating IP pool for allocating floating IPs.
While allocating a floating ip, users can optionally pass in the name of the
pool they want to allocate from, otherwise it will be pulled from the
default pool.
If this option is not set, then 'nova' is used as default floating pool.
Possible values:
* Any string representing a floating IP pool name
"""),
cfg.BoolOpt('auto_assign_floating_ip',
default=False,
help="""
Autoassigning floating IP to VM
When set to True, floating IP is auto allocated and associated
to the VM upon creation.
"""),
cfg.StrOpt('floating_ip_dns_manager',
default='nova.network.noop_dns_driver.NoopDNSDriver',
help="""
Full class name for the DNS Manager for floating IPs.
This option specifies the class of the driver that provides functionality
to manage DNS entries associated with floating IPs.
When a user adds a DNS entry for a specified domain to a floating IP,
nova will add a DNS entry using the specified floating DNS driver.
When a floating IP is deallocated, its DNS entry will automatically be deleted.
Possible values:
* Full Python path to the class to be used
"""),
cfg.StrOpt('instance_dns_manager',
default='nova.network.noop_dns_driver.NoopDNSDriver',
help="""
Full class name for the DNS Manager for instance IPs.
This option specifies the class of the driver that provides functionality
to manage DNS entries for instances.
On instance creation, nova will add DNS entries for the instance name and
id, using the specified instance DNS driver and domain. On instance deletion,
nova will remove the DNS entries.
Possible values:
* Full Python path to the class to be used
"""),
# TODO(aunnam): remove default
cfg.StrOpt('instance_dns_domain',
default='',
help="""
If specified, Nova checks if the availability_zone of every instance matches
what the database says the availability_zone should be for the specified
dns_domain.
""")
]
def register_opts(conf):
conf.register_opts(floating_ip_opts)
def list_opts():
return {'DEFAULT': floating_ip_opts}
|
Python
| 0.000011
|
@@ -1,33 +1,4 @@
-# needs:fix_opt_description%0A%0A
# Co
@@ -666,126 +666,8 @@
g%0A%0A%0A
-# TODO(johngarbutt) all of these opitions only work with nova-network.%0A# We need to find a good way to document that.%0A
floa
@@ -1131,20 +1131,16 @@
alues:%0A%0A
-
* Any st
@@ -1927,36 +1927,32 @@
ssible values:%0A%0A
-
* Full Python pa
@@ -1969,32 +1969,109 @@
class to be used
+%0A%0ARelated options:%0A%0A* use_neutron: this options only works with nova-network.
%0A%22%22%22),%0A cfg.S
@@ -2551,20 +2551,16 @@
alues:%0A%0A
-
* Full P
@@ -2598,48 +2598,90 @@
sed%0A
-%22%22%22),%0A # TODO(aunnam): remove default
+%0ARelated options:%0A%0A* use_neutron: this options only works with nova-network.%0A%22%22%22),
%0A
@@ -2913,16 +2913,93 @@
_domain.
+%0A%0ARelated options:%0A%0A* use_neutron: this options only works with nova-network.
%0A%22%22%22)%0A%5D%0A
|
1c410bcf4061efc2130dccc000fc132b487d7ea4
|
Fix problem with jinja2 filters not working properly on LazyField objects from set plugin.
|
flexget/plugins/modify/set_field.py
|
flexget/plugins/modify/set_field.py
|
from copy import copy
from datetime import datetime, date, time
from email.utils import parsedate
from time import mktime
import os
import re
import sys
import logging
from flexget.plugin import register_plugin, priority
from flexget.utils.tools import replace_from_entry
log = logging.getLogger('set')
jinja = False
def filter_pathbase(val):
"""Base name of a path."""
return os.path.basename(val or '')
def filter_pathname(val):
"""Base name of a path, without its extension."""
return os.path.splitext(os.path.basename(val or ''))[0]
def filter_pathext(val):
"""Extension of a path (including the '.')."""
return os.path.splitext(val or '')[1]
def filter_pathdir(val):
"""Directory containing the given path."""
return os.path.dirname(val or '')
def filter_pathscrub(val, ascii=False, windows=None):
"""Replace problematic characters in a path."""
if windows is None:
windows = sys.platform.startswith("win")
if ascii:
repl = {'"': '`', "'": '`'}
if windows:
repl.update({':': ';', '?': '_'})
else:
repl = {'"': u'\u201d', "'": u'\u2019'}
if windows:
repl.update({':': u'\u02d0', '?': u'\u061f'})
return re.sub('[%s]' % ''.join(repl), lambda i: repl[i.group(0)], val or '')
def filter_re_replace(val, pattern, repl):
"""Perform a regexp replacement on the given string."""
return re.sub(pattern, repl, unicode(val))
def filter_re_search(val, pattern):
"""Perform a search for given regexp pattern, return the matching portion of the text."""
if not isinstance(val, basestring):
return val
result = re.search(pattern, val)
if result:
i = result.group(0)
return result.group(0)
return ''
def filter_formatdate(val, format):
"""Returns a string representation of a datetime object according to format string."""
if not isinstance(val, (datetime, date, time)):
return val
return val.strftime(format)
def filter_parsedate(val):
"""Attempts to parse a date according to the rules in RFC 2822"""
return datetime.fromtimestamp(mktime(parsedate(val)))
class ModifySet(object):
"""Allows adding information to a feed entry for use later.
Example:
set:
path: ~/download/path/
"""
def __init__(self):
self.keys = {}
try:
from jinja2 import Environment
except ImportError:
self.jinja = False
else:
self.jinja = True
def validator(self):
from flexget import validator
v = validator.factory('dict')
v.accept_any_key('any')
return v
def register_key(self, key, type='text'):
"""
plugins can call this method to register set keys as valid
"""
if key:
if not key in self.keys:
self.keys[key] = type
def register_keys(self, keys):
"""
for easy registration of multiple keys
"""
for key, value in keys.iteritems():
self.register_key(key, value)
def on_feed_start(self, feed, config):
"""Checks that jinja2 is available"""
if not self.jinja:
log.warning("jinja2 module is not available, set plugin will only work with python string replacement.")
# Filter priority is -255 so we run after all filters are finished
@priority(-255)
def on_feed_filter(self, feed, config):
"""
Adds the set dict to all accepted entries. This is not really a filter plugin,
but it needs to be run before feed_download, so it is run last in the filter chain.
"""
for entry in feed.entries:
self.modify(entry, config, False, entry in feed.accepted)
def modify(self, entry, config, validate=False, errors=True):
"""
this can be called from a plugin to add set values to an entry
"""
# Create a new dict so we don't overwrite the set config with string replaced values.
conf = copy(config)
# If jinja2 is available do template replacement
if self.jinja:
from jinja2 import Environment, StrictUndefined, UndefinedError
env = Environment(undefined=StrictUndefined)
env.filters.update((name.split('_', 1)[1], filt)
for name, filt in globals().items()
if name.startswith("filter_"))
for field, template_string in conf.items():
if isinstance(template_string, basestring):
template = env.from_string(template_string)
variables = {'now': datetime.now()}
variables.update(entry)
try:
result = template.render(variables)
except UndefinedError, e:
# If the replacement failed, remove this key from the update dict
log.debug('%s did not have the required fields for jinja2 template: %s' % (entry['title'], e))
del conf[field]
else:
conf[field] = result
# Do string replacement
for field, value in conf.items():
if isinstance(value, basestring):
if value != config[field]:
# If jinja replacement already occurred, skip this field
continue
logger = log.error if errors else log.debug
result = replace_from_entry(value, entry, field, logger, default=None)
if result is None:
# If the replacement failed, remove this key from the update dict
del conf[field]
else:
conf[field] = result
if validate:
from flexget import validator
v = validator.factory('dict')
for key in self.keys:
v.accept(self.keys[key], key=key)
if not v.validate(config):
log.info('set parameters are invalid, error follows')
log.info(v.errors.messages)
return
# If there are valid items in the config, apply to entry.
if conf:
log.debug('adding set: info to entry:\'%s\' %s' % (entry['title'], conf))
entry.update(conf)
register_plugin(ModifySet, 'set', api_ver=2)
|
Python
| 0.000004
|
@@ -4206,16 +4206,22 @@
nedError
+, meta
%0A
@@ -4539,32 +4539,349 @@
g, basestring):%0A
+ # Make sure any LazyFields that are referenced are loaded before rendering%0A for var in meta.find_undeclared_variables(env.parse(template_string)):%0A if entry.is_lazy(var):%0A # Force the load%0A entry%5Bvar%5D%0A
|
efee783cb87fe2015ab719699e80a661aa3b4d4b
|
Create main.py
|
main.py
|
main.py
|
Python
| 0.000001
|
@@ -0,0 +1,979 @@
+import os%0Aimport cv2%0Aimport FocusStack%0A%22%22%22%0A%0A Focus stack driver program%0A%0A This program looks for a series of files of type .jpg, .jpeg, or .png%0A in a subdirectory %22input%22 and then merges them together using the%0A FocusStack module. The output is put in the file merged.png%0A%0A%0A Author: Charles McGuinness (charles@mcguinness.us)%0A Copyright: Copyright 2015 Charles McGuinness%0A License: Apache License 2.0%0A%0A%22%22%22%0A%0Adef stackHDRs(image_files):%0A focusimages = %5B%5D%0A for img in image_files:%0A print %22Reading in file %7B%7D%22.format(img)%0A focusimages.append(cv2.imread(%22input/%7B%7D%22.format(img)))%0A%0A merged = FocusStack.focus_stack(focusimages)%0A cv2.imwrite(%22merged.png%22, merged)%0A%0A%0Aif __name__ == %22__main__%22:%0A image_files = sorted(os.listdir(%22input%22))%0A for img in image_files:%0A if img.split(%22.%22)%5B-1%5D.lower() not in %5B%22jpg%22, %22jpeg%22, %22png%22%5D:%0A image_files.remove(img)%0A%0A%0A stackHDRs(image_files)%0A print %22That's All Folks!%22%0A
|
|
3e0ababfeb0e22d33853d4bad68a29a0249e1a60
|
Add script demonstrating thread deadlock
|
other/iterate_deadlock.py
|
other/iterate_deadlock.py
|
Python
| 0
|
@@ -0,0 +1,495 @@
+%0A%22%22%22%0A Demonstrates deadlock related to attribute iteration.%0A%22%22%22%0A%0Afrom threading import Thread%0A%0Aimport h5py%0A%0AFNAME = %22deadlock.hdf5%22%0A%0Adef make_file():%0A with h5py.File(FNAME,'w') as f:%0A for idx in xrange(1000):%0A f.attrs%5B'%25d'%25idx%5D = 1%0A%0Adef list_attributes():%0A with h5py.File(FNAME, 'r') as f:%0A names = list(f.attrs)%0A%0Aif __name__ == '__main__':%0A%0A make_file()%0A thread = Thread(target=list_attributes)%0A thread.start()%0A list_attributes()%0A thread.join()%0A
|
|
b1517f63c3aa549170d77c6fb3546901fdbe744b
|
Remove the hard-coded extra 'cv' and 'program' fields
|
candidates/migrations/0017_remove_cv_and_program_fields.py
|
candidates/migrations/0017_remove_cv_and_program_fields.py
|
Python
| 0.000636
|
@@ -0,0 +1,475 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import models, migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('candidates', '0016_migrate_data_to_extra_fields'),%0A %5D%0A%0A operations = %5B%0A migrations.RemoveField(%0A model_name='personextra',%0A name='cv',%0A ),%0A migrations.RemoveField(%0A model_name='personextra',%0A name='program',%0A ),%0A %5D%0A
|
|
5789cc585a69f3c73e63a36d99c02b119f593bc9
|
Create accelerometer.py
|
gadgets/navigators/accelerometer.py
|
gadgets/navigators/accelerometer.py
|
Python
| 0.000074
|
@@ -0,0 +1,624 @@
+from spi.rpi_spi import rpi_spi_dev%0Afrom spi.adc.MCP3208 import MCP3208%0A%0Aclass ACCEL_GY61():%0A%09# use chip ADXL335%0A%09def __init__(self,device=0,x_channel=0,y_channel=1,z_channel=2):%0A%09%09self.spi = rpi_spi_dev(device).spi%0A%09%09self.mcp = None%0A%09%09if self.spi is not None:%0A%09%09%09self.mcp = MCP3208(self.spi)%09%0A%09%09self.vrx_channel = x_channel%0A%09%09self.vry_channel = y_channel%0A%09%09self.vrz_channel = z_channel%09%0A%0A%09def get_data(self):%0A%09%09if self.mcp is None:%0A%09%09%09return (0,0)%0A%09%09xpos = self.mcp.read_adc(self.vrx_channel)%0A %09ypos = self.mcp.read_adc(self.vry_channel)%0A %09zpos = self.mcp.read_adc(self.vrz_channel)%0A%09%09return (xpos,ypos,zpos)%0A
|
|
c0da1aecb6e663d9586238e9d8f2b7a8abb40cf7
|
Add transform module to place ongoing built in transformmers
|
hug/transform.py
|
hug/transform.py
|
Python
| 0
|
@@ -0,0 +1,1727 @@
+%22%22%22hug/transform.py%0A%0ADefines Hug's built-in output transforming functions%0A%0ACopyright (C) 2015 Timothy Edmund Crosley%0A%0APermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated%0Adocumentation files (the %22Software%22), to deal in the Software without restriction, including without limitation%0Athe rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and%0Ato permit persons to whom the Software is furnished to do so, subject to the following conditions:%0A%0AThe above copyright notice and this permission notice shall be included in all copies or%0Asubstantial portions of the Software.%0A%0ATHE SOFTWARE IS PROVIDED %22AS IS%22, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED%0ATO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL%0ATHE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF%0ACONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR%0AOTHER DEALINGS IN THE SOFTWARE.%0A%0A%22%22%22%0A%0A%0Adef content_type(transformers, default=None):%0A '''Returns a different transformer depending on the content type passed in.%0A If none match and no default is given no transformation takes place.%0A%0A should pass in a dict with the following format:%0A%0A %7B'%5Bcontent-type%5D': transformation_action,%0A ...%0A %7D%0A '''%0A def transform(data, request):%0A transformer = transformers.get(request.content_type.split(';')%5B0%5D, default)%0A if not transformer:%0A return data%0A%0A return transformer(data)%0A return transform%0A
|
|
3ed52b0a51ccb18b053ca69984d8072e1ffdec25
|
Add 328
|
Ninja/Leetcode/328_Odd_Even_Linked_List.py
|
Ninja/Leetcode/328_Odd_Even_Linked_List.py
|
Python
| 0.998625
|
@@ -0,0 +1,1318 @@
+%22%22%22%0AGiven a singly linked list, group all odd nodes together followed by the even nodes. Please note here we are talking about the node number and not the value in the nodes.%0A%0AYou should try to do it in place. The program should run in O(1) space complexity and O(nodes) time complexity.%0A%0AExample 1:%0A%0AInput: 1-%3E2-%3E3-%3E4-%3E5-%3ENULL%0AOutput: 1-%3E3-%3E5-%3E2-%3E4-%3ENULL%0AExample 2:%0A%0AInput: 2-%3E1-%3E3-%3E5-%3E6-%3E4-%3E7-%3ENULL%0AOutput: 2-%3E3-%3E6-%3E7-%3E1-%3E5-%3E4-%3ENULL%0ANote:%0A%0AThe relative order inside both the even and odd groups should remain as it was in the input.%0AThe first node is considered odd, the second node even and so on ...%0A%22%22%22%0A%0A# Definition for singly-linked list.%0A# class ListNode:%0A# def __init__(self, x):%0A# self.val = x%0A# self.next = None%0A%0Aclass Solution:%0A def oddEvenList(self, head: ListNode) -%3E ListNode:%0A if not head:%0A return None%0A%0A odd_current = head%0A even_current = head.next%0A even_head = head.next%0A current = head%0A i = 0%0A while current:%0A if i %25 2 == 0:%0A even_current.next = current%0A even_current = even_current.next%0A else:%0A odd_current.next = current%0A odd_current = odd_current.next%0A%0A i += 1%0A current = current.next%0A%0A%0A%0A%0A return head%0A
|
|
bbd43a3af7fdd5eacea13fae9c1670aa5436e7bc
|
add data not sufficient exception that shall be raised when data provided to a feature is not sufficient
|
features/exception_data_not_suffient.py
|
features/exception_data_not_suffient.py
|
Python
| 0
|
@@ -0,0 +1,252 @@
+%22%22%22This exception shall be raised in case the data provided to a feature is not%0Asufficient%22%22%22%0A%0A%0Aclass DataNotSufficientError(Exception):%0A%0A def __init__(self, value):%0A self.value = value%0A%0A def __str__(self):%0A return repr(self.value)%0A
|
|
e41d4fa8a61126495dc5cc42575fa5ce5b89f1b7
|
add spiel on whitespace
|
1_start/whitespace.py
|
1_start/whitespace.py
|
Python
| 0.99847
|
@@ -0,0 +1,2102 @@
+# FUN WITH WHITESPACE IN PYTHON%0A%0A%0A# Whitespace is critical in Python. Unlike some other scripting languages,%0A# which use characters to tell the interpreter where functions and loops%0A# end, Python uses structured indentation for new lines, making %22blocks%22 of%0A# code.%0A%0Amy_string = 'New York'%0A%0Aprint %22Start spreading the news,%22%0A%0Aif my_string == 'New York':%0A print %22I'm leaving today,%22%0A print %22I want to be a part of it,%22%0A %0A for num in range(0,2):%0A print my_string%0A%0Aelse:%0A print %22you clearly don't know how this song goes. %7B%7D?%22.format(my_string)%0A %0A# What do you think the above does? Let's step through it.%0A# (Notice how blank lines between code is A-OK.) %0A%0A%0A# Some other places indentation and whitespace don't matter much:%0A%0A# When assigning items to a list or a string; the below is ugly, but sometimes%0A# it's more readable in a script to define things on different lines.%0A%0Alist_of_cities = %5B%0A%0A 'Buffalo',%0A %0A 'Key West',%0A 'Fort Collins', 'Bakersfield' %5D%0A%0Awordy_string = %22Four score and seven years ago, our fathers brought%22 %5C%0A %22 forth on this continent ... hmm. I%22 %5C%0A %22 am desperately trying to remember what Abraham Lincoln%22 %5C%0A %22 said, because it was one of the most important and%22 %5C%0A %22 and influentual speeches in modern history; I've even%22 %5C%0A %22 been to Gettysburg. Wow, this is pretty embarrasing.%22%0A%0A%0A# Tabs and spaces. Don't mix them. The interpreter will choke on it. Style%0A# dictates that you use four spaces instead of tabs. I generally set up my%0A# text editor to replace tabs on the fly or do it after I'm done with my%0A# script, because I much prefer hitting tab once instead of space four times.%0A%0Aprint %22Start spreading the news,%22%0Aif my_string == 'New York':%0A print %22I'm leaving today,%22%0A%09print %22I want to be a part of it,%22%0A for num in range(0,2):%0A print my_string%0Aelse:%0A print %22you clearly don't know how this song goes. %7B%7D?%22.format(my_string)%0A%0A# The above looks fine, right? You will get an IndentationError. Most text%0A# editors have a function%0A
|
|
acf7e2a9eeedfef28f892d4633b9bbeae479390a
|
Set conditional to be against result of get_realm, not input.
|
zerver/management/commands/soft_activate_deactivate_users.py
|
zerver/management/commands/soft_activate_deactivate_users.py
|
from __future__ import absolute_import
from __future__ import print_function
from django.db import connection
from django.conf import settings
from django.utils.timezone import now as timezone_now
from typing import Any, List, Dict
from argparse import ArgumentParser
from six.moves import map
import sys
from zerver.models import UserProfile, UserMessage, Realm, RealmAuditLog
from zerver.lib.soft_deactivation import (
do_soft_deactivate_users, do_soft_activate_users,
get_users_for_soft_deactivation, logger
)
from zerver.lib.management import ZulipBaseCommand
class Command(ZulipBaseCommand):
help = """Soft activate/deactivate users. Users are recognised by there emails here."""
def add_arguments(self, parser):
# type: (ArgumentParser) -> None
self.add_realm_args(parser)
parser.add_argument('-d', '--deactivate',
dest='deactivate',
action='store_true',
default=False,
help='Used to deactivate user/users.')
parser.add_argument('-a', '--activate',
dest='activate',
action='store_true',
default=False,
help='Used to activate user/users.')
parser.add_argument('--inactive-for',
type=int,
default=28,
help='Specify the number of days of user inactivity that user should be marked soft_deactviated')
parser.add_argument('users', metavar='<users>', type=str, nargs='*', default=[],
help="This option can be used to specify a list of user emails to soft activate/deactivate.")
def handle(self, *args, **options):
# type: (*Any, **str) -> None
if settings.STAGING:
print('This is a Staging server. Suppressing management command.')
sys.exit(0)
if options['realm_id']:
realm = self.get_realm(options)
filter_kwargs = {} # type: Dict[str, Realm]
if options['realm_id']:
filter_kwargs = dict(realm=realm)
user_emails = options['users']
activate = options['activate']
deactivate = options['deactivate']
if activate:
if not user_emails:
print('You need to specify at least one user to use the activate option.')
self.print_help("./manage.py", "soft_activate_deactivate_users")
sys.exit(1)
users_to_activate = UserProfile.objects.filter(
email__in=user_emails,
**filter_kwargs
)
users_to_activate = list(users_to_activate)
if len(users_to_activate) != len(user_emails):
user_emails_found = [user.email for user in users_to_activate]
for user in user_emails:
if user not in user_emails_found:
raise Exception('User with email %s was not found. Check if the email is correct.' % (user))
users_activated = do_soft_activate_users(users_to_activate)
logger.info('Soft Reactivated %d user(s)' % (len(users_activated)))
elif deactivate:
if user_emails:
users_to_deactivate = UserProfile.objects.filter(
email__in=user_emails,
**filter_kwargs
)
users_to_deactivate = list(users_to_deactivate)
if len(users_to_deactivate) != len(user_emails):
user_emails_found = [user.email for user in users_to_deactivate]
for user in user_emails:
if user not in user_emails_found:
raise Exception('User with email %s was not found. Check if the email is correct.' % (user))
print('Soft deactivating forcefully...')
else:
if options['realm_id']:
filter_kwargs = dict(user_profile__realm=realm)
users_to_deactivate = get_users_for_soft_deactivation(int(options['inactive_for']), filter_kwargs)
if users_to_deactivate:
users_deactivated = do_soft_deactivate_users(users_to_deactivate)
logger.info('Soft Deactivated %d user(s)' % (len(users_deactivated)))
else:
self.print_help("./manage.py", "soft_activate_deactivate_users")
sys.exit(1)
|
Python
| 0
|
@@ -2064,16 +2064,17 @@
ptions)%0A
+%0A
@@ -2129,35 +2129,33 @@
if
-options%5B'realm_id'%5D
+realm is not None
:%0A
@@ -4025,35 +4025,33 @@
if
-options%5B'realm_id'%5D
+realm is not None
:%0A
|
9760f81ce6cc7783f8fb097931e98f8234307a00
|
add nilearn interface for correlations
|
src/nibetaseries/interfaces/nilearn.py
|
src/nibetaseries/interfaces/nilearn.py
|
Python
| 0
|
@@ -0,0 +1,2579 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-%0A# vi: set ft=python sts=4 ts=4 sw=4 et:%0A%0Afrom nipype.interfaces.nilearn import NilearnBaseInterface%0Afrom nipype.interfaces.base import (%0A BaseInterfaceInputSpec, TraitedSpec,%0A File, SimpleInterface%0A )%0A%0A%0Aclass AtlasConnectivityInputSpec(BaseInterfaceInputSpec):%0A timeseries_file = File(exists=True, mandatory=True,%0A desc='The 4d file being used to extract timeseries data')%0A atlas_file = File(exists=True, mandatory=True,%0A desc='The atlas image with each roi given a unique index')%0A atlas_lut = File(exists=True, mandatory=True,%0A desc='The atlas lookup table to match the atlas image')%0A%0A%0Aclass AtlasConnectivityOutputSpec(TraitedSpec):%0A correlation_matrix = File(exists=True, desc='roi-roi fisher z transformed correlation matrix')%0A%0A%0Aclass AtlasConnectivity(NilearnBaseInterface, SimpleInterface):%0A %22%22%22Calculates correlations between regions of interest%22%22%22%0A%0A input_spec = AtlasConnectivityInputSpec%0A output_spec = AtlasConnectivityOutputSpec%0A%0A def _run_interface(self, runtime):%0A from nilearn.input_data import NiftiLabelsMasker%0A from nilearn.connectome import ConnectivityMeasure%0A import numpy as np%0A import pandas as pd%0A import os%0A%0A # extract timeseries from every label%0A masker = NiftiLabelsMasker(labels_img=self.inputs.atlas_file, standardize=True,%0A memory='nilearn_cache', verbose=1)%0A timeseries = masker.fit_transform(self.inputs.timeseries_file)%0A%0A # create correlation matrix%0A correlation_measure = ConnectivityMeasure(kind='correlation')%0A correlation_matrix = correlation_measure.fit_transform(%5Btimeseries%5D)%5B0%5D%0A np.fill_diagonal(correlation_matrix, np.NaN)%0A%0A # add the atlas labels to the matrix%0A atlas_lut_df = pd.read_csv(self.inputs.atlas_lut, sep='%5Ct')%0A regions = atlas_lut_df%5B'regions'%5D%0A correlation_matrix_df = pd.DataFrame(correlation_matrix, index=regions, columns=regions)%0A%0A # do a fisher's r -%3E z transform%0A fisher_z_matrix_df = correlation_matrix_df.apply(lambda x: np.log((1+x) / (1-x)) * 0.5)%0A%0A # write out the file.%0A out_file = os.path.join(runtime.cwd, 'fisher_z_correlation.tsv')%0A fisher_z_matrix_df.to_csv(out_file, sep='%5Ct')%0A%0A # save the filename in the outputs%0A self._results%5B'correlation_matrix'%5D = out_file%0A%0A return runtime%0A%0A%0A%0A%0A
|
|
eea6c7477d348e0a12fed89a7d38763c42621977
|
Fix an unicode error on Windows platform.
|
pelican/contents.py
|
pelican/contents.py
|
# -*- coding: utf-8 -*-
from pelican.utils import slugify, truncate_html_words
from pelican.log import *
from pelican.settings import _DEFAULT_CONFIG
from os import getenv
class Page(object):
"""Represents a page
Given a content, and metadata, create an adequate object.
:param content: the string to parse, containing the original content.
"""
mandatory_properties = ('title',)
def __init__(self, content, metadata=None, settings=None, filename=None):
# init parameters
if not metadata:
metadata = {}
if not settings:
settings = _DEFAULT_CONFIG
self._content = content
self.translations = []
self.status = "published" # default value
local_metadata = dict(settings.get('DEFAULT_METADATA', ()))
local_metadata.update(metadata)
# set metadata as attributes
for key, value in local_metadata.items():
setattr(self, key.lower(), value)
# default author to the one in settings if not defined
if not hasattr(self, 'author'):
if 'AUTHOR' in settings:
self.author = settings['AUTHOR']
else:
self.author = getenv('USER', 'John Doe')
warning("Author of `{0}' unknow, assuming that his name is `{1}'".format(filename or self.title, self.author).decode("utf-8"))
# manage languages
self.in_default_lang = True
if 'DEFAULT_LANG' in settings:
default_lang = settings['DEFAULT_LANG'].lower()
if not hasattr(self, 'lang'):
self.lang = default_lang
self.in_default_lang = (self.lang == default_lang)
# create the slug if not existing, fro mthe title
if not hasattr(self, 'slug') and hasattr(self, 'title'):
self.slug = slugify(self.title)
# create save_as from the slug (+lang)
if not hasattr(self, 'save_as') and hasattr(self, 'slug'):
if self.in_default_lang:
self.save_as = '%s.html' % self.slug
clean_url = '%s/' % self.slug
else:
self.save_as = '%s-%s.html' % (self.slug, self.lang)
clean_url = '%s-%s/' % (self.slug, self.lang)
# change the save_as regarding the settings
if settings.get('CLEAN_URLS', False):
self.url = clean_url
elif hasattr(self, 'save_as'):
self.url = self.save_as
if filename:
self.filename = filename
# manage the date format
if not hasattr(self, 'date_format'):
if hasattr(self, 'lang') and self.lang in settings['DATE_FORMATS']:
self.date_format = settings['DATE_FORMATS'][self.lang]
else:
self.date_format = settings['DEFAULT_DATE_FORMAT']
if hasattr(self, 'date'):
self.locale_date = self.date.strftime(self.date_format.encode('ascii','xmlcharrefreplace')).decode('utf')
# manage summary
if not hasattr(self, 'summary'):
self.summary = property(lambda self: truncate_html_words(self.content, 50)).__get__(self, Page)
# manage status
if not hasattr(self, 'status'):
self.status = settings['DEFAULT_STATUS']
def check_properties(self):
"""test that each mandatory property is set."""
for prop in self.mandatory_properties:
if not hasattr(self, prop):
raise NameError(prop)
@property
def content(self):
if hasattr(self, "_get_content"):
content = self._get_content()
else:
content = self._content
return content
class Article(Page):
mandatory_properties = ('title', 'date', 'category')
class Quote(Page):
base_properties = ('author', 'date')
def is_valid_content(content, f):
try:
content.check_properties()
return True
except NameError, e:
error(u"Skipping %s: impossible to find informations about '%s'" % (f, e))
return False
|
Python
| 0.000002
|
@@ -164,16 +164,48 @@
t getenv
+%0Afrom sys import platform, stdin
%0A%0Aclass
@@ -2911,24 +2911,213 @@
f, 'date'):%0A
+ if platform == 'win32':%0A self.locale_date = self.date.strftime(self.date_format.encode('ascii','xmlcharrefreplace')).decode(stdin.encoding)%0A else:%0A
@@ -3222,16 +3222,24 @@
('utf')%0A
+
%0A
|
eee7862cead703d11405276c1a399466c9f102c5
|
add shell.py
|
scripts/opencontrail-kubelet/opencontrail_kubelet/shell.py
|
scripts/opencontrail-kubelet/opencontrail_kubelet/shell.py
|
Python
| 0.000003
|
@@ -0,0 +1,358 @@
+#%0A# Copyright (c) 2015 Juniper Networks, Inc.%0A#%0A%0Aimport subprocess%0Aimport logging%0A%0Aclass Shell:%0A # Run a shell command. Log the command run and its output.%0A @staticmethod%0A def run(str):%0A logging.debug('sh: %25s' %25 str)%0A cmd = subprocess.check_output(str, shell=True)%0A logging.debug('output: %25s' %25 cmd.rstrip())%0A return cmd%0A
|
|
73369f23bd008331884d5644ba9923aae4809756
|
add offline db comparison tool
|
scripts/DEV/postgresql/compare_counts.py
|
scripts/DEV/postgresql/compare_counts.py
|
Python
| 0
|
@@ -0,0 +1,1060 @@
+import psycopg2%0A%0Aoldpg = psycopg2.connect(database='postgis', host='localhost', port=5555, user='mesonet')%0Acursor = oldpg.cursor()%0A%0Adbs = %5B%5D%0Acursor.execute(%22%22%22SELECT datname FROM pg_database%0AWHERE datistemplate = false ORDER by datname%22%22%22)%0Afor row in cursor:%0A dbs.append(row%5B0%5D)%0A%0Afor db in dbs:%0A if db %3C= 'cscap':%0A continue%0A print(%22running %25s%22 %25 (db,))%0A oldpg = psycopg2.connect(database=db, host='localhost', port=5555, user='mesonet')%0A ocursor = oldpg.cursor()%0A newpg = psycopg2.connect(database=db, host='localhost', port=5556, user='mesonet')%0A ncursor = newpg.cursor()%0A%0A tables = %5B%5D%0A ocursor.execute(%22%22%22SELECT table_name%0AFROM information_schema.tables WHERE table_schema = 'public'%0AORDER BY table_name%22%22%22)%0A for row in ocursor:%0A tables.append(row%5B0%5D)%0A%0A for table in tables:%0A ocursor.execute(%22%22%22SELECT count(*) from %22%22%22+table)%0A ncursor.execute(%22%22%22SELECT count(*) from %22%22%22+table)%0A orow = ocursor.fetchone()%0A nrow = ncursor.fetchone()%0A if orow%5B0%5D != nrow%5B0%5D:%0A print(%22%25s-%3E%25s old:%25s new:%25s%22 %25 (db, table, orow%5B0%5D, nrow%5B0%5D))%0A%0A
|
|
b61a423497c21fa4df818c8b5e5eaea788eb84ea
|
add ia_cdx_checker
|
scripts/ia_cdx_checker/ia_cdx_checker.py
|
scripts/ia_cdx_checker/ia_cdx_checker.py
|
Python
| 0.000216
|
@@ -0,0 +1,1656 @@
+#!/usr/bin/env python%0A%22%22%22%0A$ python ia_cdx_checker.py elxn42-tweets-urls-fixed-uniq-no-count.txt %7C cat %3E elx42_urls_in_ia.txt%0A%22%22%22%0A%0Afrom __future__ import print_function%0A%0Aimport sys%0Aimport json%0Aimport fileinput%0Aimport io%0Afrom urllib2 import Request, urlopen, URLError, HTTPError%0A%0Afor line in fileinput.input():%0A elx42_url = line.rstrip('%5Cn')%0A try:%0A url = 'http://web.archive.org/cdx/search/cdx?url=' + elx42_url + '&output=json&limit=-2'%0A request = Request(url, headers=%7B'User-Agent': %22Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; .NET CLR 1.1.4322; .NET CLR 2.0.50727; .NET CLR 3.0.04506.30)%22%7D)%0A jsonData = urlopen(request)%0A data = json.load(jsonData)%0A first_date = data%5B1%5D%5B1%5D%0A second_date = data%5B2%5D%5B1%5D%0A if first_date.startswith('201508'):%0A print(elx42_url)%0A if first_date.startswith('201509'):%0A print(elx42_url)%0A if first_date.startswith('201510'):%0A print(elx42_url)%0A if first_date.startswith('201511'):%0A print(elx42_url)%0A if first_date.startswith('201512'):%0A print(elx42_url)%0A if second_date.startswith('201508'):%0A print(elx42_url)%0A if second_date.startswith('201509'):%0A print(elx42_url)%0A if second_date.startswith('201510'):%0A print(elx42_url)%0A if second_date.startswith('201511'):%0A print(elx42_url)%0A if second_date.startswith('201512'):%0A print(elx42_url)%0A except HTTPError as e:%0A status_code = e.code%0A except IndexError as d:%0A index_error = d%0A except ValueError as f:%0A value_error = f%0A
|
|
22ba7e7bfce711257f055733ecd260b8e61ced91
|
Add example script to parse CapnProto traces
|
src/Backends/SynchroTraceGen/scripts/stgen_capnp_parser.py
|
src/Backends/SynchroTraceGen/scripts/stgen_capnp_parser.py
|
Python
| 0
|
@@ -0,0 +1,3685 @@
+#!/bin/python%0A%22%22%22%0AThis script demonstrates parsing a CapnProto SynchroTrace event trace.%0AThe 'STEventTrace.capnp' file must exist in the sys.path.%0AAdd its directory to the PYTHONPATH environmental variable or%0Acopy it to the current working directory.%0A%0AThe pycapnp library is required:%0ASee http://jparyani.github.io/pycapnp/install.html for further details.%0A%0AGenerate the *.capnp.bin file with:%0A bin/sigil2 --backend=stgen -l capnp --executable=...%0A%0ARun this script as:%0A ./stgen_capnp_parser.py sigil.events-#.capnp.bin.gz%0A OR%0A gunzip sigil.events-#.capnp.bin.gz%0A ./stgen_capnp_parser.py sigil.events-#.capnp.bin%0A%22%22%22%0A%0Aimport sys%0Aimport os%0Afrom warnings import warn%0Aimport capnp%0Aimport STEventTrace_capnp%0A%0A%0Adef processSTEventTrace(file):%0A for stream in (STEventTrace_capnp.EventStream%0A .read_multiple_packed(file, traversal_limit_in_words=2**63)):%0A for event in stream.events:%0A which = event.which()%0A if which == 'comp':%0A event.comp.iops # IOPs value%0A event.comp.flops # FLOPs value%0A event.comp.writes # writes value%0A event.comp.reads # reads value%0A for write in event.comp.writeAddrs:%0A write.start # start of address range%0A write.end # end of address range%0A for read in event.comp.writeAddrs:%0A read.start # start of address range%0A read.end # end of address range%0A elif which == 'comm':%0A for edge in event.comm.edges:%0A # the thread-event tuple that generated%0A # this communication edge%0A edge.producerThread%0A edge.producerEvent%0A for addr in edge.addrs:%0A addr.start # start of address range%0A addr.end # end of address range%0A elif which == 'sync':%0A if event.sync.type == 'spawn':%0A event.sync.id # spawned thread id%0A elif event.sync.type == 'join':%0A event.sync.id # joined thread id%0A elif event.sync.type == 'barrier':%0A event.sync.id # barrier id%0A elif event.sync.type == 'sync':%0A event.sync.id%0A elif event.sync.type == 'lock':%0A event.sync.id # lock mutex%0A elif event.sync.type == 'unlock':%0A event.sync.id # unlock mutex%0A elif event.sync.type == 'condWait':%0A event.sync.id # condition variable%0A elif event.sync.type == 'condSignal':%0A event.sync.id # condition variable%0A elif event.sync.type == 'condBroadcast':%0A event.sync.id # condition variable%0A elif event.sync.type == 'spinLock':%0A event.sync.id # lock id%0A elif event.sync.type == 'spinUnlock':%0A event.sync.id # unlock id%0A else:%0A raise Exception('unhandled sync event')%0A elif which == 'marker':%0A # the number of instructions since the last marker%0A event.marker.count%0A%0Aif __name__ == '__main__':%0A filepath = sys.argv%5B1%5D%0A name, ext = os.path.splitext(filepath)%0A if ext == '.gz':%0A # https://github.com/jparyani/pycapnp/issues/80%0A f = os.popen('cat ' + filepath + ' %7C gzip -d')%0A else:%0A if ext != '.bin':%0A warn('not a .bin file')%0A f = open(filepath, 'r')%0A processSTEventTrace(f)%0A
|
|
a1d2023aa6e8baa89747497e69a0a79fe1a27bdd
|
Drop ProjectPlan table
|
migrations/versions/36d7c98ddfee_drop_projectplan_table.py
|
migrations/versions/36d7c98ddfee_drop_projectplan_table.py
|
Python
| 0
|
@@ -0,0 +1,405 @@
+%22%22%22Drop ProjectPlan table%0A%0ARevision ID: 36d7c98ddfee%0ARevises: 12569fada93%0ACreate Date: 2014-10-14 11:25:48.151275%0A%0A%22%22%22%0A%0A# revision identifiers, used by Alembic.%0Arevision = '36d7c98ddfee'%0Adown_revision = '12569fada93'%0A%0Afrom alembic import op%0A%0A%0Adef upgrade():%0A ### commands auto generated by Alembic - please adjust! ###%0A op.drop_table('project_plan')%0A%0A%0Adef downgrade():%0A raise NotImplementedError%0A
|
|
730b208b490290c84bde8aa017a8f556d457d729
|
add list to integer
|
resource-4/combinatorics/digits/list-to-integer.py
|
resource-4/combinatorics/digits/list-to-integer.py
|
Python
| 0.002321
|
@@ -0,0 +1,84 @@
+def listToInt(listt,base=2):%0A%09return reduce(lambda x,y:base*x+y,reversed(listt), 0)%0A
|
|
5e3bc841800bb4e92df5871d97559810d67d7660
|
Create __init__.py
|
cmd/__init__.py
|
cmd/__init__.py
|
Python
| 0.000429
|
@@ -0,0 +1,22 @@
+__version__ = '0.0.0'%0A
|
|
2acb5d6da30c9b25eb05ca7a0da77bdaa45499a5
|
Create cod_variable.py
|
cod_variable.py
|
cod_variable.py
|
Python
| 0.000085
|
@@ -0,0 +1,357 @@
+def encode_single_vb(n):%0A byts = %5B%5D%0A while True:%0A byts.append(n %25 128)%0A if n %3C 128:%0A break%0A n //= 128%0A byts = byts%5B::-1%5D%0A byts%5B-1%5D += 128%0A return %5Bbin(n)%5B2:%5D for n in byts%5D%0A%0A%0Adef encode_vb(numbers):%0A bytestream = %5B%5D%0A for n in numbers:%0A bytestream.extend(encode_single_vb(n))%0A return bytestream%0A
|
|
624c133ba1afdb904e31742ac5f00a76859ab5b7
|
Write some docs for the response object
|
buffer/response.py
|
buffer/response.py
|
class ResponseObject(dict):
def __init__(self, *args, **kwargs):
super(ResponseObject, self).__init__(*args, **kwargs)
self.__dict__ = self._check_for_inception(self)
def _check_for_inception(self, root_dict):
for key in root_dict:
if type(root_dict[key]) == dict:
root_dict[key] = ResponseObject(root_dict[key])
return root_dict
def set_for(self, cls):
cls.__dict__ = self.__dict__
|
Python
| 0.000018
|
@@ -16,24 +16,333 @@
bject(dict):
+%0A '''%0A Simple data structure that convert any dict to an empty object%0A where all the atributes are the keys of the dict, but also preserve a dict%0A behavior%0A e.g:%0A%0A obj = ResponseObject(%7B'a':'b'%7D)%0A obj.key = 'value'%0A%0A obj.a =%3E 'b'%0A obj =%3E %7B'a': 'b', 'key': 'value'%7D%0A '''
%0A%0A def __in
@@ -526,16 +526,81 @@
t_dict):
+%0A '''%0A Used to check if there is a dict in a dict%0A '''
%0A%0A fo
|
d22242bda1a15cf59e395177c44b6d2701a5e246
|
add code to replicate issue #376
|
tests_on_large_datasets/redd_house3_f1_score.py
|
tests_on_large_datasets/redd_house3_f1_score.py
|
Python
| 0
|
@@ -0,0 +1,1214 @@
+from __future__ import print_function, division%0Afrom nilmtk import DataSet, HDFDataStore%0Afrom nilmtk.disaggregate import fhmm_exact%0Afrom nilmtk.metrics import f1_score%0Afrom os.path import join%0Aimport matplotlib.pyplot as plt%0A%0A%22%22%22%0AThis file replicates issue #376 (which should now be fixed)%0Ahttps://github.com/nilmtk/nilmtk/issues/376%0A%22%22%22%0A%0Adata_dir = '/data/REDD'%0Abuilding_number = 3%0Adisag_filename = join(data_dir, 'disag-fhmm' + str(building_number) + '.h5')%0A%0Adata = DataSet(join(data_dir, 'redd.h5'))%0Aprint(%22Loading building %22 + str(building_number))%0Aelec = data.buildings%5Bbuilding_number%5D.elec%0A%0Atop_train_elec = elec.submeters().select_top_k(k=5)%0Afhmm = fhmm_exact.FHMM()%0Afhmm.train(top_train_elec)%0A%0Aoutput = HDFDataStore(disag_filename, 'w')%0Afhmm.disaggregate(elec.mains(), output)%0Aoutput.close()%0A%0A### f1score fhmm%0Adisag = DataSet(disag_filename)%0Adisag_elec = disag.buildings%5Bbuilding_number%5D.elec%0A%0Af1 = f1_score(disag_elec, elec)%0Af1.index = disag_elec.get_labels(f1.index)%0Af1.plot(kind='barh')%0Aplt.ylabel('appliance');%0Aplt.xlabel('f-score');%0Aplt.title(%22FHMM%22);%0Aplt.savefig(join(data_dir, 'f1-fhmm' + str(building_number) + '.png'))%0Adisag.store.close()%0A####%0Aprint(%22Finishing building %22 + str(building_number))%0A
|
|
80bf877306a78a63cf7752975f980a2d435f7d5e
|
Add standard services and lazy service wrapper
|
polyaxon/libs/services.py
|
polyaxon/libs/services.py
|
Python
| 0
|
@@ -0,0 +1,1870 @@
+import inspect%0Aimport itertools%0Aimport logging%0A%0Afrom django.utils.functional import empty, LazyObject%0A%0Afrom libs.imports import import_string%0A%0A%0Alogger = logging.getLogger(__name__)%0A%0A%0Aclass InvalidService(Exception):%0A pass%0A%0A%0Aclass Service(object):%0A __all__ = ()%0A%0A def validate(self):%0A %22%22%22Validate the settings for this backend (i.e. such as proper connection info).%0A%0A Raise %60%60InvalidService%60%60 if there is a configuration error.%0A %22%22%22%0A%0A def setup(self):%0A %22%22%22Initialize this service.%22%22%22%0A%0A%0Aclass LazyServiceWrapper(LazyObject):%0A %22%22%22Lazyily instantiates a Polyaxon standard service class.%0A%0A %3E%3E%3E LazyServiceWrapper(BaseClass, 'path.to.import.Backend', %7B%7D)%0A%0A Provides an %60%60expose%60%60 method for dumping public APIs to a context, such as module locals:%0A%0A %3E%3E%3E service = LazyServiceWrapper(...)%0A %3E%3E%3E service.expose(locals())%0A %22%22%22%0A%0A def __init__(self, backend_base, backend_path, options):%0A super(LazyServiceWrapper, self).__init__()%0A self.__dict__.update(%0A %7B%0A 'backend_base': backend_base,%0A '_backend_path': backend_path,%0A '_options': options,%0A %7D%0A )%0A%0A def __getattr__(self, name):%0A if self._wrapped is empty:%0A self._setup()%0A return getattr(self._wrapped, name)%0A%0A def _setup(self):%0A backend = import_string(self._backend_path)%0A assert issubclass(backend, Service)%0A instance = backend(**self._options)%0A self._wrapped = instance%0A%0A def expose(self, context):%0A base = self.backend_base%0A for key in itertools.chain(base.__all__, ('validate', 'setup')):%0A if inspect.ismethod(getattr(base, key)):%0A context%5Bkey%5D = (lambda f: lambda *a, **k: getattr(self, f)(*a, **k))(key)%0A else:%0A context%5Bkey%5D = getattr(base, key)%0A
|
|
e204dd02b44066b28d09c0143cdeec557ff420fd
|
add a module for effects that can be applied to waveforms
|
potty_oh/effects.py
|
potty_oh/effects.py
|
Python
| 0
|
@@ -0,0 +1,932 @@
+# Copyright 2016 Curtis Sand%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%0A%22%22%22effects.py: a library of effects to apply to waveforms.%22%22%22%0A%0Afrom matplotlib import pyplot%0A%0Afrom .waveform import Waveform%0A%0A%0Adef normalize(waveform):%0A if isinstance(waveform, Waveform):%0A wavedata = waveform.frames%0A else:%0A wavedata = waveform%0A peak = max(wavedata)%0A wavedata *= 1.0 / peak%0A return wavedata%0A
|
|
b9a8d24048a5c5b83a996f9fb1b2b07857a56db0
|
unwind the changes to tracker main
|
heron/tracker/src/python/main.py
|
heron/tracker/src/python/main.py
|
import os
import sys
import tornado.httpserver
import tornado.ioloop
import tornado.web
from tornado.escape import json_encode, utf8
from tornado.options import define, options
from heron.tracker.src.python import handlers
from heron.tracker.src.python import log
from heron.tracker.src.python.log import Log as LOG
from heron.tracker.src.python.tracker import Tracker
define("stateconf", default='filestateconf', help="Yaml config file without extension for state locations")
define("port", default=8888, type=int, help="HTTP port to run the Tracker")
class Application(tornado.web.Application):
def __init__(self):
tracker = Tracker()
self.tracker = tracker
tracker.synch_topologies(options.stateconf)
tornadoHandlers = [
(r"/", handlers.MainHandler),
(r"/topologies", handlers.TopologiesHandler, {"tracker":tracker}),
(r"/topologies/states", handlers.StatesHandler, {"tracker":tracker}),
(r"/topologies/info", handlers.TopologyHandler, {"tracker":tracker}),
(r"/topologies/logicalplan", handlers.LogicalPlanHandler, {"tracker":tracker}),
(r"/topologies/physicalplan", handlers.PhysicalPlanHandler, {"tracker":tracker}),
(r"/topologies/executionstate", handlers.ExecutionStateHandler, {"tracker":tracker}),
(r"/topologies/metrics", handlers.MetricsHandler, {"tracker":tracker}),
(r"/topologies/metricstimeline", handlers.MetricsTimelineHandler, {"tracker":tracker}),
(r"/topologies/metricsquery", handlers.MetricsQueryHandler, {"tracker":tracker}),
(r"/topologies/exceptions", handlers.ExceptionHandler, {"tracker":tracker}),
(r"/topologies/exceptionsummary", handlers.ExceptionSummaryHandler, {"tracker":tracker}),
(r"/machines", handlers.MachinesHandler, {"tracker":tracker}),
(r"/topologies/pid", handlers.PidHandler, {"tracker":tracker}),
(r"/topologies/jstack", handlers.JstackHandler, {"tracker":tracker}),
(r"/topologies/jmap", handlers.JmapHandler, {"tracker":tracker}),
(r"/topologies/histo", handlers.MemoryHistogramHandler, {"tracker":tracker}),
(r"(.*)", handlers.DefaultHandler),
]
settings = dict(
static_path=os.path.dirname(__file__)
)
tornado.web.Application.__init__(self, tornadoHandlers, **settings)
def main():
log.configure(log.logging.DEBUG)
options.parse_command_line()
port = options.port
LOG.info("Running on port: " + str(port))
http_server = tornado.httpserver.HTTPServer(Application())
http_server.listen(port)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
|
Python
| 0.000001
|
@@ -393,20 +393,18 @@
efault='
-file
+zk
statecon
|
451e8f3ea4765051088ea1c84f81e32691591d89
|
Create __init__.py
|
core/__init__.py
|
core/__init__.py
|
Python
| 0.000429
|
@@ -0,0 +1,22 @@
+#!/usr/bin/env python%0A
|
|
65aa1424f7ea8e184180d93e790b1ece6705775d
|
fix missing coma
|
addons/purchase_requisition/__openerp__.py
|
addons/purchase_requisition/__openerp__.py
|
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Purchase Requisitions',
'version': '0.1',
'author': 'OpenERP SA',
'category': 'Purchase Management',
'images': ['images/purchase_requisitions.jpeg'],
'website': 'http://www.openerp.com',
'description': """
This module allows you to manage your Purchase Requisition.
===========================================================
When a purchase order is created, you now have the opportunity to save the
related requisition. This new object will regroup and will allow you to easily
keep track and order all your purchase orders.
""",
'depends' : ['purchase'],
'js': [
'static/src/js/web_addons.js',
],
'demo': ['purchase_requisition_demo.xml'],
'data': ['security/purchase_tender.xml',
'wizard/purchase_requisition_partner_view.xml',
'wizard/bid_line_qty_view.xml',
'purchase_requisition_data.xml',
'purchase_requisition_view.xml',
'purchase_requisition_report.xml',
'purchase_requisition_workflow.xml',
'security/ir.model.access.csv','purchase_requisition_sequence.xml'
'views/report_purchaserequisition.xml',
],
'auto_install': False,
'test': [
'test/purchase_requisition_users.yml',
'test/purchase_requisition_demo.yml',
'test/cancel_purchase_requisition.yml',
'test/purchase_requisition.yml',
],
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Python
| 0.000012
|
@@ -2081,16 +2081,17 @@
nce.xml'
+,
%0A
|
72738366fa074b457021faab0c21c3b89070b5ad
|
Add first revision of Nautilus extension.
|
nautilus/wizbit-extension.py
|
nautilus/wizbit-extension.py
|
Python
| 0
|
@@ -0,0 +1,2840 @@
+from urlparse import urlparse%0Afrom os.path import exists, split, isdir%0A%0Aimport nautilus%0Afrom lxml import etree%0A%0AWIZ_CONTROLLED = %22wiz-controlled%22%0AWIZ_CONFLICT = %22wiz-conflict%22%0A%0AYES = %22Yes%22%0ANO = %22No%22%0A%0Aclass WizbitExtension(nautilus.ColumnProvider, nautilus.InfoProvider):%0A def __init__(self):%0A pass%0A%0A def get_columns(self):%0A return %5Bnautilus.Column(%22NautilusWizbit::is_controlled%22,%0A WIZ_CONTROLLED,%0A %22Wizbit Controlled%22,%0A %22File may be syncronized by Wizbit%22),%0A%0A nautilus.Column(%22NautilusWizbit::has_conflict%22,%0A WIZ_CONFLICT,%0A %22Wizbit Conflict%22,%0A %22File may have multiple versions that need to be resolved%22)%5D%0A%0A def update_file_info(self, file):%0A controlled = False%0A conflict = False%0A%0A (scheme, netloc, path, params, query, fragment) = urlparse(file.get_uri())%0A%0A if scheme != 'file':%0A return%0A %0A wizpath = self.get_wizpath(path)%0A if wizpath:%0A if isdir(path):%0A controlled = True%0A else:%0A try:%0A repos = etree.parse (wizpath + %22/.wizbit/repos%22)%0A except IOError:%0A pass%0A else:%0A #Find if file is controlled%0A files = %5Bf.text for f in repos.getroot().xpath(%22/wizbit/repo/file%22)%5D%0A (path, filename) = split(path)%0A if filename in files:%0A controlled = True%0A %0A #Find if file is conflicting%0A repel = repos.getroot().xpath(%22/wizbit/repo%22)%0A for r in repel:%0A if r.get(%22name%22) == filename + %22.git%22:%0A heads = %5Bh for h in r if h.tag == %22head%22%5D%0A if len(heads) %3E 1:%0A conflict = True%0A%0A if controlled:%0A file.add_emblem(%22cvs-controlled%22)%0A file.add_string_attribute(WIZ_CONTROLLED, YES)%0A else:%0A file.add_string_attribute(WIZ_CONTROLLED, NO)%0A%0A if conflict:%0A file.add_emblem(%22cvs-conflict%22)%0A file.add_string_attribute(WIZ_CONFLICT, YES)%0A else:%0A file.add_string_attribute(WIZ_CONFLICT, NO)%0A%0A def get_wizpath(self, path):%0A if exists(path + %22/.wizbit/repos%22):%0A return path%0A else:%0A (head, tail) = split(path)%0A if head != '/':%0A return self.get_wizpath(head)%0A else:%0A if exists(%22/.wizbit/repos%22):%0A return head%0A else:%0A return %22%22%0A
|
|
2a9858381a78bd9ff9ff459a23f73630237e6669
|
send vm
|
weibo_data_input.py
|
weibo_data_input.py
|
Python
| 0
|
@@ -0,0 +1,1179 @@
+__author__ = 'heipiao'%0A# -*- coding: utf-8 -*-%0Afrom weibo import APIClient%0Aimport urllib2%0Aimport urllib%0A%0A#APP_KEY%E5%92%8CAPP_SECRET%EF%BC%8C%E9%9C%80%E8%A6%81%E6%96%B0%E5%BB%BA%E4%B8%80%E4%B8%AA%E5%BE%AE%E5%8D%9A%E5%BA%94%E7%94%A8%E6%89%8D%E8%83%BD%E5%BE%97%E5%88%B0%0AAPP_KEY = '3722673574'%0AAPP_SECRET = '7a6de53498caf87e655a98fa2f8912bf'%0A#%E7%AE%A1%E7%90%86%E4%B8%AD%E5%BF%83---%E5%BA%94%E7%94%A8%E4%BF%A1%E6%81%AF---%E9%AB%98%E7%BA%A7%E4%BF%A1%E6%81%AF%EF%BC%8C%E5%B0%86%22%E6%8E%88%E6%9D%83%E5%9B%9E%E8%B0%83%E9%A1%B5%22%E7%9A%84%E5%80%BC%E6%94%B9%E6%88%90https://api.weibo.com/oauth2/default.html%0ACALLBACK_URL = 'https://api.weibo.com/oauth2/default.html'%0AAUTH_URL = 'https://api.weibo.com/oauth2/authorize'%0A%0Adef GetCode(userid,passwd):%0A client = APIClient(app_key = APP_KEY, app_secret=APP_SECRET, redirect_uri=CALLBACK_URL)%0A referer_url = client.get_authorize_url()%0A postdata = %7B%0A %22action%22: %22login%22,%0A %22client_id%22: APP_KEY,%0A %22redirect_uri%22:CALLBACK_URL,%0A %22userId%22: userid,%0A %22passwd%22: passwd,%0A %7D%0A%0A headers = %7B%0A %22User-Agent%22:%22Mozilla/5.0 (Windows NT 6.1; WOW64; rv:25.0) Gecko/20100101 Firefox/25.0%22,%0A %22Referer%22:referer_url,%0A %22Connection%22:%22keep-alive%22%0A %7D%0A req = urllib2.Request(%0A url = AUTH_URL,%0A data = urllib.urlencode(postdata),%0A headers = headers%0A )%0A resp = urllib2.urlopen(req)%0A return resp.geturl()%5B-32:%5D%0Aif __name__ == %22__main__%22:%0A print GetCode(%2215029357121%22,%22liu8315%22)
|
|
7b899fbcf7a661758ab2a9cdca7ade6c461c8e65
|
add c model
|
transiNXOR_modeling/caffe2_tensor_to_c_array.py
|
transiNXOR_modeling/caffe2_tensor_to_c_array.py
|
Python
| 0.000001
|
@@ -0,0 +1,863 @@
+import sys%0Asys.path.append('../')%0Aimport caffe2_paths%0Afrom caffe2.python import workspace%0Afrom pinn import exporter%0Afrom scipy.io import savemat%0Aimport numpy as np%0Aimport pickle%0A%0Amodel_name = 'bise_h216_0'%0A%0Ainit_net = exporter.load_init_net('./transiXOR_Models/'+model_name+'_init')%0Aprint(type(init_net))%0Awith open(%22c_model/c_arrays.txt%22,%22w%22) as f:%0A%09for op in init_net.op:%0A%09%09tensor = workspace.FetchBlob(op.output%5B0%5D)%0A%09%09tensor_name = op.output%5B0%5D.replace('/', '_')%0A%09%09print(tensor_name)%0A%09%09print(tensor.shape)%0A%09%09tensor_str = np.array2string(tensor.flatten(), separator=',')%0A%09%09tensor_str = tensor_str.replace(%22%5B%22, %22%7B%22).replace(%22%5D%22, %22%7D%22)%0A%09%09str = 'float ' + tensor_name + '%5B%5D = ' + tensor_str + ';%5Cn'%0A%09%09f.write(str)%0A%0A%09## Preprocess param%0A%09with open(%22./transiXOR_Models/%22+model_name+%22_preproc_param.p%22, %22rb%22) as f:%0A%09%09preproc_dict = pickle.load(f)%0A%09print(preproc_dict)%0A%0A
|
|
27275d932d0fc8fe3d8079c8c5e07793ab0c0271
|
Update main.py
|
device/src/main.py
|
device/src/main.py
|
#This is the file executing while STM32 MCU bootup, and in this file,
#it will call other functions to fullfill the project.
#Communication module: LoRa.
#Communication method with gateway via LoRa.
#Uart port drive LoRa module.
#Parse JSON between device and gateway via LoRa channel.
#LoRa module: E32-TTL-100
#Pin specification:
#Module MCU
#M0(IN) <--> GPIO(Y3)(OUT) #mode setting, can not hang
#M1(IN) <--> GPIO(Y4)(OUT) #mode setting, can not hang
#RXD(IN) <--> Y1(TX)(OUT) #UART6
#TXD(OUT) <--> Y2(RX)(IN) #UART6
#AUX(OUT) <--> GPIO/INT(IN) #module status detecting
#VCC
#GND
#Communication mode is 0, need to set M0 and M1 to 0.
import pyb
from pyb import Pin
from pyb import Timer
from pyb import UART
import micropython
import irrigate
#Import light intensity needed module
import LightIntensity
import moisture
import rainfall
import security
import time
import json
micropython.alloc_emergency_exception_buf(100)
Pin('Y11',Pin.OUT_PP).low() #GND
Pin('Y9',Pin.OUT_PP).high() #VCC
#Set LoRa module with mode-0.
M0 = Pin('Y3', Pin.OUT_PP)
M1 = Pin('Y4', Pin.OUT_PP)
M0.low()
M1.low()
#Init uart4 for LoRa module.
lora_uart = UART(6,9600)
lora_uart.init(9600, bits=8, parity=None, stop=1)
#Send Online command to gateway while it power on to obtain its status data from gateway's database.
#lora_uart.write('{"ID":"1", "CMD":"Online", "TYPE":"N", "VALUE":"N"}\n')
#time.sleep(1)
#lora_uart.write('{"ID":"1", "CMD":"ENV", "TYPE":"moisture", "VALUE":"1800"}\n')
#LED shining regularly(using timer) to indicate the program is running correctly
tim1 = Timer(1, freq=1)
tim1.callback(lambda t: pyb.LED(1).toggle())
#Read the light intensity value from sensor regularly.
lightVlaue = 0
#time2 callback function, obtain value from light intensity sensor and send it to gateway via LoRa module.
#Warning: interruput function can not execute complex task suck print(), otherwise it will execute only one time and die.
def getLightInten():
global lightValue
# lightValue = LightIntensity.readLight()
# #print(lightValue)
# lora_uart.write('{"ID":"1", "CMD":"ENV", "TYPE":"light", "VALUE":"+lightValue+"}\n')
# lora_uart.write('{"ID":"1", "CMD":"ENV", "TYPE":"moisture", "VALUE":"1800"}\n')
'''
#Get soil moisture and send it to gateway, if the current value is lower than standard, gateway
#will send 'irrigate' command to device, device will control steering engine to open the tap and water the plants.
def moisture():
global moisture
moisture = moisture.readMoisture()
lora_uart.write('{"ID":"1", "CMD":"ENV", "TYPE":"moisture", "VALUE":"+moisture+"}\n')
'''
#tim2 = Timer(2, freq=1)
#tim2.callback(getLightInten())
if __name__=='__main__':
while True:
print('------------------------------------')
print('1. Light Intensity:')
lightValue = LightIntensity.readLight()
print(lightValue)
print('')
if lightValue > 1000:
irrigate.irrigate_start()
else:
irrigate.irrigate_stop()
print('2. Soil Moisture:')
#Python func call need to (), otherwise : function getMoisAo at 0x20003d20
print(moisture.getMoisAo())
print('')
print('3. Security Status:')
print(security.detectMotion())
print('')
print('4. Rainfall:')
print(rainfall.getRainAo())
print('')
time.sleep(3)
'''
#Waiting for the message from UART4 to obtain LoRa data.
len = lora_uart.any()
if(len > 0):
recv = lora_uart.read()
print(recv)
json_lora = json.loads(recv)
#Parse JSON from gateway.
if (json_lora.get("CMD") == 'Online' and json_lora.get("TYPE") == 'Light2' ): #Control the light(led on TPYBoard)
print('light2')
if json_lora.get("VALUE") == 'On':
pyb.LED(2).on()
# lora_uart.write('{"ID":"1", "CMD":"ENV", "TYPE":"moisture", "VALUE":"1800"}\n')
else:
pyb.LED(2).off()
elif json_lora.get("CMD") == 'irrigate': # irrigate the plants
if json_lora.get("VALUE") == 'Open':
irrigate.irrigate_start()
else:
irrigate.irrigate_stop()
'''
|
Python
| 0.000001
|
@@ -869,16 +869,34 @@
ainfall%0A
+import WaterLevel%0A
import s
@@ -3318,24 +3318,106 @@
print('')
+%0A%0A print('5. WaterLevel:')%0A print(WaterLevel.getWaterLevel())%09%0A print('')
%0A%09%0A time.
|
c9cd743151b3bbef3e484613f58e2c8b462ee6f4
|
initialize new_family in ReadDirectory
|
directory_tools.py
|
directory_tools.py
|
#!/usr/bin/env python
"""This module contains the tools to injest a MemberHub directory dump into
a dictionary, and display portions of the dictionary for analysis.
"""
import family
def ReadDirectory():
"""directory_tools.ReadDirectory
INPUTS:
Prompts user for name of comma-separated text file containing MemberHub directory dump.
OUTPUTS:
Dictionary keyed by file line number with values that are dictionaries with the following keys:
This function assumes the directory dump fields are in this order:
1. "person_id"
2. "last_name"
3. "first_name"
4. "middle_name"
5. "suffix"
6. "email"
7. "family_id"
8. "family_relation"
9. "maiden_name"
10. "born_on"
11. "gender"
12. "parents"
13. "street"
14. "city"
15. "state"
16. "zip"
17. "home_number"
18. "work_number"
19. "work_number_ext"
20. "fax_number"
21. "mobile_number":
22. "mobile_provider"
23. "allow_sms"
24. "hubs"
25. "hubs_administered"
26. "person_created"
27. "person_updated"
28. "account_created"
29. "account_updated"
30. "last_login"
ASSUMPTIONS:
1. Each line should contain exactly 30 fields that are separated by just a comma (','),
and none of the fields contain commas.
2. Lines that contain blank first or last names will be flagged, but not added to the
output dictionary.
3. Each line ends in up to 3 new line escape characters, but the last field is last login,
which should not be used.
"""
directory = [] # empty list
lines_read = lines_processed = families_created = 0
# initializing a dictionary
file_name = raw_input('Enter name of directory dump file: ')
try:
open_file = open(file_name)
title_line = open_file.readline()
fields = title_line.split(',')
if not len(fields) == 30:
print "The file %s does not contain 30 fields, and cannot be parsed." % file_name
print "The following fields were found:"
print fields
return []
for line in open_file:
lines_read += 1
fields = line.split(',')
if not len(fields) == 30:
print "Incorrect number of fields found on or near line %d. Line will not be processed." % (count+1)
print "The following fields were read on this line:"
print fields
elif fields[1] == "" or fields[2] == "":
print "Found a blank name on or near line %d. Line will not be procssed." % (count+1)
print "The following fields were read on this line:"
print fields
else:
lines_processed += 1
# according to MemberHub.com, a new family begins each time the family_relation (field 7)
# is not numbered (i.e. just "Adult" or just "Child"), so start a new family when that
# condition is detected
if fields[7].lower() in ("adult", "child"):
# to start processing a new family, append the family previously worked on (if
# it exists), then instantiate a new Family class
if new_family:
directory.append(new_family)
new_family = family.Family()
families_created += 1
if fields[7][:5].lower() == "adult":
new_family.AddAdultFromDirectory(fields)
elif fields[7][:5].lower() == "child":
new_family.AddChildFromDirectory(fields)
else:
print "Found entry in directory that is neither an adult nor a child."
print "The following fields were read on this line:"
print fields
else:
if new_family:
directory.append(new_family)
print "Read %d lines, processed %d lines, and created %d families from directory file" % \
(lines_read, lines_processed, families_created)
finally:
open_file.close()
return directory
def Print(directory):
while True:
end_entry = int(raw_input('Enter entry at which to stop printing (enter 0 to stop): '))
if end_entry == 0:
break
elif end_entry > len(directory):
end_entry = len(directory)
start_entry = int(raw_input('Enter entry from which to start printing: '))
if start_entry < 0:
start_entry += end_entry
for x in directory[start_entry, end_entry]:
x.Print()
def main():
directory = ReadDirectory()
Print(directory)
if __name__ == '__main__':
main()
|
Python
| 0.000007
|
@@ -1760,16 +1760,39 @@
ated = 0
+%0A new_family = False
%0A%0A #
|
f3cab8d72b9a070305f4f2c44922e381ea091205
|
add context manager example
|
examples/context_manager.py
|
examples/context_manager.py
|
Python
| 0.000063
|
@@ -0,0 +1,779 @@
+# -*- coding: utf-8 -*-%0Aimport riprova%0A%0A# Store number of function calls for error simulation%0Acalls = 0%0A%0A%0A# Register retriable operation with custom evaluator%0Adef mul2(x):%0A global calls%0A%0A if calls %3C 4:%0A calls += 1%0A raise RuntimeError('simulated call error')%0A%0A return x * 2%0A%0A%0A# Run task via context manager%0Awith riprova.Retrier() as retry:%0A result = retry.run(mul2, 2)%0A print('Result 1: %7B%7D'.format(result))%0A%0A%0A# Or alternatively create a shared retrier and reuse it across multiple%0A# context managers.%0Aretrier = riprova.Retrier()%0A%0Awith retrier as retry:%0A calls = 0%0A result = retry.run(mul2, 4)%0A print('Result 2: %7B%7D'.format(result))%0A%0Awith retrier as retry:%0A calls = 0%0A result = retry.run(mul2, 8)%0A print('Result 3: %7B%7D'.format(result))%0A
|
|
24cf3c2676e4ea7342e95e6a37857c6fa687865e
|
Remove managers for article obj.
|
src/submission/migrations/0058_auto_20210812_1254.py
|
src/submission/migrations/0058_auto_20210812_1254.py
|
Python
| 0
|
@@ -0,0 +1,411 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.11.29 on 2021-08-12 12:54%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('submission', '0057_merge_20210811_1506'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterModelManagers(%0A name='article',%0A managers=%5B%0A %5D,%0A ),%0A %5D%0A
|
|
9687ca646dd7ae5a7ff31e5b8657fb1ab88a0f5e
|
add buildbot
|
buildbot.py
|
buildbot.py
|
Python
| 0.000001
|
@@ -0,0 +1,2629 @@
+#!/usr/bin/env python%0A# encoding: utf-8%0A%0Aimport sys%0Aimport json%0Aimport subprocess%0A%0Aproject_name = 'bourne'%0A%0A%0Adef run_command(args):%0A print(%22Running: %7B%7D%22.format(args))%0A # sys.stdout.flush()%0A # subprocess.check_call(args)%0A%0A%0Adef get_tool_options(properties):%0A options = %5B%5D%0A if 'tool_options' in properties:%0A # Make sure that the values are correctly comma separated%0A for key, value in properties%5B'tool_options'%5D.items():%0A if value is None:%0A options += %5B'--%7B0%7D'.format(key)%5D%0A else:%0A options += %5B'--%7B0%7D=%7B1%7D'.format(key, value)%5D%0A%0A return options%0A%0A%0Adef configure(properties):%0A command = %5Bsys.executable, 'waf'%5D%0A%0A if properties.get('build_distclean'):%0A command += %5B'distclean'%5D%0A%0A command += %5B'configure', '--git-protocol=git@'%5D%0A%0A if 'waf_bundle_path' in properties:%0A command += %5B'--bundle-path=' + properties%5B'waf_bundle_path'%5D%5D%0A%0A if 'dependency_project' in properties:%0A command += %5B'--%7B0%7D-use-checkout=%7B1%7D'.format(%0A properties%5B'dependency_project'%5D,%0A properties%5B'dependency_checkout'%5D)%5D%0A%0A command += %5B%22--cxx_mkspec=%7B%7D%22.format(properties%5B'cxx_mkspec'%5D)%5D%0A command += get_tool_options(properties)%0A%0A run_command(command)%0A%0A%0Adef build(properties):%0A command = %5Bsys.executable, 'waf', 'build', '-v'%5D%0A run_command(command)%0A%0A%0Adef run_tests(properties):%0A command = %5Bsys.executable, 'waf', '-v', '--run_tests'%5D%0A run_cmd = '%25s'%0A%0A if properties.get('valgrind_run'):%0A run_cmd = 'valgrind --error-exitcode=1 %25s --profile=embedded'%0A%0A if run_cmd:%0A command += %5B%22--run_cmd=%7B%7D%22.format(run_cmd)%5D%0A%0A command += get_tool_options(properties)%0A%0A run_command(command)%0A%0A%0Adef install(properties):%0A command = %5Bsys.executable, 'waf', '-v', 'install'%5D%0A%0A if 'install_path' in properties:%0A command += %5B'--install_path=%7B0%7D'.format(properties%5B'install_path'%5D)%5D%0A if properties.get('install_relative'):%0A command += %5B'--install_relative'%5D%0A%0A run_command(command)%0A%0A%0Adef coverage_settings(options):%0A options%5B'required_line_coverage'%5D = 0.0%0A%0A%0Adef main():%0A argv = sys.argv%0A%0A if len(argv) != 3:%0A print(%22Usage: %7B%7D %3Ccommand%3E %3Cproperties%3E%22.format(argv%5B0%5D))%0A sys.exit(0)%0A%0A cmd = argv%5B1%5D%0A properties = json.loads(argv%5B2%5D)%0A%0A if cmd == 'configure':%0A configure(properties)%0A elif cmd == 'build':%0A build(properties)%0A elif cmd == 'run_tests':%0A run_tests(properties)%0A elif cmd == 'install':%0A install(properties)%0A else:%0A print(%22Unknown command: %7B%7D%22.format(cmd))%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
0f6866a91e4d8af2faedf2af277ad0df573536aa
|
Set win_delay_load_hook to false
|
binding.gyp
|
binding.gyp
|
{
'target_defaults': {
'conditions': [
['OS=="win"', {
'msvs_disabled_warnings': [
4530, # C++ exception handler used, but unwind semantics are not enabled
4506, # no definition for inline function
],
}],
],
},
'targets': [
{
'target_name': 'runas',
'sources': [
'src/main.cc',
],
'include_dirs': [
'<!(node -e "require(\'nan\')")'
],
'conditions': [
['OS=="win"', {
'sources': [
'src/runas_win.cc',
],
'libraries': [
'-lole32.lib',
'-lshell32.lib',
],
}],
['OS=="mac"', {
'sources': [
'src/runas_darwin.cc',
'src/fork.cc',
'src/fork.h',
],
'libraries': [
'$(SDKROOT)/System/Library/Frameworks/Security.framework',
],
}],
['OS not in ["mac", "win"]', {
'sources': [
'src/runas_posix.cc',
'src/fork.cc',
'src/fork.h',
],
}],
],
}
]
}
|
Python
| 0.999875
|
@@ -18,16 +18,52 @@
lts': %7B%0A
+ 'win_delay_load_hook': 'false',%0A
'con
|
0d6da71cb759c3819133baa3d7c043fb92df425e
|
Create weibo.py
|
2/weibo.py
|
2/weibo.py
|
Python
| 0.000001
|
@@ -0,0 +1,427 @@
+from bs4 import BeautifulSoup%0Aimport json%0Aimport ConfigParser%0Aimport urllib2%0A%0Afrom util import get_content%0A%0Alink_id = 18%0A%0Acf = ConfigParser.ConfigParser()%0Acf.read(%22config.ini%22)%0A%0AweiyinUrl = 'http://wb.weiyin.cc/Book/BookView/W440164363452#' + str(link_id)%0A%0Acontent = urllib2.urlopen(weiyinUrl)%0Ahtml = content.read()%0A%0Asoup = BeautifulSoup(html)%0A%0Aweibo = soup.find(%22div%22,attrs=%22head_title%22).find(%22p%22).text%0Aprint html%0Aprint weibo%0A
|
|
666caee40af2dccc30e78d52f8de962110408146
|
Add fan device
|
xknx/devices/fan.py
|
xknx/devices/fan.py
|
Python
| 0
|
@@ -0,0 +1,2918 @@
+%22%22%22%0AModule for managing a fan via KNX.%0A%0AIt provides functionality for%0A%0A* setting fan to specific speed%0A* reading the current speed from KNX bus.%0A%22%22%22%0Aimport asyncio%0Afrom .device import Device%0Afrom .remote_value_scaling import RemoteValueScaling%0A%0Aclass Fan(Device):%0A %22%22%22Class for managing a fan.%22%22%22%0A%0A # pylint: disable=too-many-instance-attributes%0A # pylint: disable=too-many-public-methods%0A%0A def __init__(self,%0A xknx,%0A name,%0A group_address_speed=None,%0A group_address_speed_state=None,%0A device_updated_cb=None):%0A %22%22%22Initialize fan class.%22%22%22%0A # pylint: disable=too-many-arguments%0A Device.__init__(self, xknx, name, device_updated_cb)%0A%0A self.speed = RemoteValueScaling(%0A xknx,%0A group_address_speed,%0A group_address_speed_state,%0A device_name=self.name,%0A after_update_cb=self.after_update,%0A range_from=0,%0A range_to=100)%0A%0A @classmethod%0A def from_config(cls, xknx, name, config):%0A %22%22%22Initialize object from configuration structure.%22%22%22%0A group_address_speed = %5C%0A config.get('group_address_speed')%0A group_address_speed_state = %5C%0A config.get('group_address_speed_state')%0A%0A return cls(%0A xknx,%0A name,%0A group_address_speed=group_address_speed,%0A group_address_speed_state=group_address_speed_state)%0A%0A def has_group_address(self, group_address):%0A %22%22%22Test if device has given group address.%22%22%22%0A return self.speed.has_group_address(group_address)%0A%0A def __str__(self):%0A %22%22%22Return object as readable string.%22%22%22%0A return '%3CFan name=%22%7B0%7D%22 ' %5C%0A 'speed=%22%7B1%7D%22 /%3E' %5C%0A .format(%0A self.name,%0A self.speed.group_addr_str())%0A%0A @asyncio.coroutine%0A def set_speed(self, speed):%0A %22%22%22Sets the fan to a desginated speed.%22%22%22%0A yield from self.speed.set(speed)%0A%0A @asyncio.coroutine%0A def do(self, action):%0A %22%22%22Execute 'do' commands.%22%22%22%0A if action.startswith(%22speed:%22):%0A yield from self.set_speed(int(action%5B11:%5D))%0A else:%0A self.xknx.logger.warning(%22Could not understand action %25s for device %25s%22, action, self.get_name())%0A%0A def state_addresses(self):%0A %22%22%22Return group addresses which should be requested to sync state.%22%22%22%0A state_addresses = %5B%5D%0A state_addresses.extend(self.speed.state_addresses())%0A return state_addresses%0A%0A async def process_group_write(self, telegram):%0A %22%22%22Process incoming GROUP WRITE telegram.%22%22%22%0A await self.speed.process(telegram)%0A%0A def current_speed(self):%0A %22%22%22Return current speed of fan.%22%22%22%0A return self.speed.value%0A%0A def __eq__(self, other):%0A %22%22%22Equal operator.%22%22%22%0A return self.__dict__ == other.__dict__%0A
|
|
eefa144b7a01f6beee1fcba30af32a967598d44f
|
add tests
|
tests/test_tabela_fipe.py
|
tests/test_tabela_fipe.py
|
Python
| 0
|
@@ -0,0 +1,1315 @@
+# -*- coding: utf-8 -*-%0D%0A#%0D%0A# Copyright 2015 Alexandre Villela (SleX) %3Chttps://github.com/sxslex/sxtools/%3E%0D%0A#%0D%0A# This program is free software: you can redistribute it and/or modify%0D%0A# it under the terms of the GNU General Public License as published by%0D%0A# the Free Software Foundation, either version 3 of the License, or%0D%0A# (at your option) any later version.%0D%0A#%0D%0A# This program is distributed in the hope that it will be useful,%0D%0A# but WITHOUT ANY WARRANTY; without even the implied warranty of%0D%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the%0D%0A# GNU General Public License for more details.%0D%0A#%0D%0A# You should have received a copy of the GNU General Public License%0D%0A# along with this program. If not, see %3Chttp://www.gnu.org/licenses/%3E.%0D%0A#%0D%0A# by sx.slex@gmail.com%0D%0A%0D%0Afrom TabelaFipe import TabelaFipe%0D%0Aimport unittest%0D%0A# import pprint%0D%0A%0D%0A%0D%0Aclass TestTabelaFipe(unittest.TestCase):%0D%0A%0D%0A def test_01_get_by_codefipe(self):%0D%0A tb = TabelaFipe()%0D%0A resp = tb.get_by_codefipe('006008-9')%0D%0A # pprint.pprint(resp)%0D%0A self.assertIsInstance(resp, dict)%0D%0A%0D%0A def test_02_get_by_codefipe_not_exists(self):%0D%0A tb = TabelaFipe()%0D%0A resp = tb.get_by_codefipe('111111-1')%0D%0A # pprint.pprint(resp)%0D%0A self.assertIsNone(resp)%0D%0A
|
|
a467cf8e92a783112bcecc82acf7b33c31282c49
|
Bump to 3.2.0.rc2
|
cms/__init__.py
|
cms/__init__.py
|
# -*- coding: utf-8 -*-
__version__ = '3.2.0.dev4'
default_app_config = 'cms.apps.CMSConfig'
|
Python
| 0.000003
|
@@ -42,12 +42,11 @@
2.0.
-dev4
+rc2
'%0A%0Ad
|
68a7f9faf1933bb224113d9fa5d0ddd362b2e5ea
|
Add script to generate the site documentation containing the sizes of the binary shellcodes.
|
SizeDocGenerator.py
|
SizeDocGenerator.py
|
Python
| 0
|
@@ -0,0 +1,703 @@
+import os, re;%0D%0A# I got the actual size of the binary code wrong on the site once - this script should help prevent that.%0D%0A%0D%0AdsDoc_by_sArch = %7B%22w32%22: %22x86%22, %22w64%22: %22x64%22, %22win%22: %22x86+x64%22%7D;%0D%0Awith open(%22build_info.txt%22, %22rb%22) as oFile:%0D%0A iBuildNumber = int(re.search(r%22build number%5C: (%5Cd+)%22, oFile.read(), re.M).group(1));%0D%0A%0D%0Aprint %22Sizes (build %25d)%22 %25 iBuildNumber;%0D%0A%0D%0Afor sArch in sorted(dsDoc_by_sArch.keys()):%0D%0A sDoc = dsDoc_by_sArch%5BsArch%5D;%0D%0A iBinSize = os.path.getsize(r%22build%5Cbin%5C%25s-exec-calc-shellcode.bin%22 %25 sArch);%0D%0A iBinESPSize = os.path.getsize(r%22build%5Cbin%5C%25s-exec-calc-shellcode-esp.bin%22 %25 sArch);%0D%0A print %22 * %25s: %25d bytes (%25d with stack allignment)%22 %25 (sDoc, iBinSize, iBinESPSize);%0D%0A
|
|
45b77de143a6ffcc46091d7879da4fa3009bc3e0
|
add jm client
|
python/jm_client.py
|
python/jm_client.py
|
Python
| 0
|
@@ -0,0 +1,501 @@
+#!/usr/bin/python%0A#%0A# jm_client.py%0A#%0A# Author: Zex %3Ctop_zlynch@yahoo.com%3E%0A#%0A%0Aimport dbus %0Aimport dbus.service%0Aimport dbus.mainloop.glib%0Aimport gobject%0A%0Afrom basic import * %0A%0Adef start_request():%0A %22%22%22%0A Start sending requests to server%0A %22%22%22%0A connection = dbus.SessionBus()%0A obj = connection.get_object(%0A JM_SERVICE_NAME,%0A JM_CONFIG_PATH)%0A%0A conf_iface = dbus.Interface(obj,%0A JM_CONFIG_IFACE)%0A%0A print obj.Introspect()%0A print conf_iface.list()%0A%0Astart_request()%0A
|
|
4ea6def1bdeb332b1f530f359a333e4f95078b2b
|
Update docstrings and link to docs
|
homeassistant/components/updater.py
|
homeassistant/components/updater.py
|
"""
homeassistant.components.sensor.updater
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Sensor that checks for available updates.
For more details about this platform, please refer to the documentation at
at https://home-assistant.io/components/sensor.updater/
"""
import logging
import requests
from homeassistant.const import __version__ as CURRENT_VERSION
from homeassistant.const import ATTR_FRIENDLY_NAME
from homeassistant.helpers import event
_LOGGER = logging.getLogger(__name__)
PYPI_URL = 'https://pypi.python.org/pypi/homeassistant/json'
DEPENDENCIES = []
DOMAIN = 'updater'
ENTITY_ID = 'updater.updater'
def setup(hass, config):
''' setup the updater component '''
def check_newest_version(_=None):
''' check if a new version is available and report if one is '''
newest = get_newest_version()
if newest != CURRENT_VERSION and newest is not None:
hass.states.set(
ENTITY_ID, newest, {ATTR_FRIENDLY_NAME: 'Update Available'})
event.track_time_change(hass, check_newest_version,
hour=[0, 12], minute=0, second=0)
check_newest_version()
return True
def get_newest_version():
''' Get the newest HA version form PyPI '''
try:
req = requests.get(PYPI_URL)
return req.json()['info']['version']
except requests.RequestException:
_LOGGER.exception('Could not contact PyPI to check for updates')
return
except ValueError:
_LOGGER.exception('Received invalid response from PyPI')
return
except KeyError:
_LOGGER.exception('Response from PyPI did not include version')
return
|
Python
| 0
|
@@ -22,23 +22,16 @@
ponents.
-sensor.
updater%0A
@@ -66,19 +66,18 @@
~~~~
-~~~~%0ASensor
+%0AComponent
tha
@@ -228,15 +228,8 @@
nts/
-sensor.
upda
@@ -626,13 +626,13 @@
-''' s
+%22%22%22 S
etup
@@ -653,20 +653,21 @@
omponent
- '''
+. %22%22%22
%0A%0A de
@@ -710,13 +710,13 @@
-''' c
+%22%22%22 C
heck
@@ -766,20 +766,21 @@
f one is
- '''
+. %22%22%22
%0A
@@ -1174,19 +1174,19 @@
():%0A
-'''
+%22%22%22
Get the
@@ -1194,17 +1194,29 @@
newest H
-A
+ome Assistant
version
@@ -1221,20 +1221,21 @@
on f
-o
r
+o
m PyPI
- '''
+. %22%22%22
%0A
|
7084442bb78098f05acbe3243231243543061bf6
|
Create gloabl_moran.py
|
gloabl_moran.py
|
gloabl_moran.py
|
Python
| 0.000006
|
@@ -0,0 +1,239 @@
+import arcpy%0Aarcpy.env.workspace = r%22C:%5CUsers%5Callenje4%5CDesktop%5CGGRC32 Lab 4 Files%5CGGRC32 Lab 4 Files%5CLocal Statistics Data%22%0Am = %0Aarcpy.SpatialAutocorrelation_stats(%22pop_sci.shp%22, %22PDens2011%22,%22NO_REPORT%22, %22CONTIGUITY_EDGES_CORNERS%22, %22#%22,)%0A%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.