commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13 values | lang stringclasses 23 values |
|---|---|---|---|---|---|---|---|---|
ef8d86704c0930091b8b8f07842e46ffff5bfc34 | Correct empty lines in imported files | benzolius/leo-scripts | correct_empty_lines.py | correct_empty_lines.py | print('\n\n\n\n')
print('Now:\n')
for p in c.all_positions():
try:
# Corrects empty lines around @language python\n@tabwidth -4
if p.h.startswith('@clean') and p.h.endswith('py'):
# Corrects empty lines after @first
if p.h == '@clean manage.py':
splited = p.b.partition('"""\nMain starter script for Django\n"""')
p.b = ''.join((splited[0].rstrip(), '\n', splited[1], splited[2]))
try:
if '@language python\n@tabwidth -4' in p.b:
splited = p.b.partition('@language python\n@tabwidth -4')
p.b = ''.join((splited[0].rstrip(), '\n', splited[1], '\n', splited[2].lstrip()))
print(p.h)
except Exception as exc:
print('---------------------------- Exception: -------------------- {}'.format(exc))
# Corrects empty lines between main blocks
else:
parent = p.parent().h
if parent.startswith('@clean'):
siblings = [pos.gnx for pos in p.self_and_siblings()]
# Corrects empty lines between main blocks
if p.gnx != siblings[-1]:
p.b = '{}\n\n\n'.format(p.b.rstrip())
# Corrects empty lines at end of file
else:
p.b = '{}\n'.format(p.b.rstrip())
# Corrects empty lines between methods
elif parent.startswith('class'):
siblings = [pos.gnx for pos in p.self_and_siblings()]
# Corrects empty lines between main blocks
if p.gnx != siblings[-1]:
p.b = '{}\n\n'.format(p.b.rstrip())
# Corrects empty lines at end of file
else:
p.b = '{}\n'.format(p.b.rstrip())
except Exception as exc:
print('---------------------------- Exception: -------------------- {}'.format(exc)) | mit | Python | |
08e6a7821283da3898d2eeb4418b6e035effb2f0 | Create spelling_bee.py | Kunalpod/codewars,Kunalpod/codewars | spelling_bee.py | spelling_bee.py | #Kunal Gautam
#Codewars : @Kunalpod
#Problem name: Spelling Bee
#Problem level: 6 kyu
def how_many_bees(hive):
if not hive: return 0
count = 0
for i in range(len(hive)):
for j in range(len(hive[i])):
try:
if hive[i][j]=='b' and hive[i][j+1]=='e' and hive[i][j+2]=='e': count += 1
except: pass
try:
if j-2>=0 and hive[i][j]=='b' and hive[i][j-1]=='e' and hive[i][j-2]=='e': count += 1
except: pass
try:
if hive[i][j]=='b' and hive[i+1][j]=='e' and hive[i+2][j]=='e': count += 1
except: pass
try:
if i-2>=0 and hive[i][j]=='b' and hive[i-1][j]=='e' and hive[i-2][j]=='e': count += 1
except: pass
return count
| mit | Python | |
706f29660c9fe6f21af3544ea7871ffdfd56db4a | Create sprintreport.py | freedom27/jira_sprint_ledstrip_tracker | sprintreport.py | sprintreport.py | from jirawrapper import JIRAWrapper
import sys
import getopt
if __name__ == "__main__":
argv = sys.argv[1:]
print_help = lambda: print('sprintreport.py [-p project] -c <username:password>')
try:
opts, args = getopt.getopt(argv, "p:c:", ["project=", "credentials="])
except getopt.GetoptError:
print_help()
sys.exit(2)
project = 'HSD'
username = None
password = None
for opt, arg in opts:
if opt in ('-p', '--project'):
project = arg
elif opt in ('-c', '--credentials'):
if ':' in arg:
username = arg.split(':')[0]
password = arg.split(':')[1]
if username is None or password is None:
print_help()
sys.exit(2)
try:
jira_wrapper = JIRAWrapper(username, password, project)
print('Stories in current sprint:')
for issue in jira_wrapper.current_sprint_user_stories():
if issue.fields.status.name == 'Completed':
percent = 100
else:
percent = issue.fields.aggregateprogress.percent
print('{0} - {1}% - {2}'.format(issue.key, str(percent), issue.fields.summary))
print('Sprint completed at {0}%'.format(str(jira_wrapper.current_sprint_progress())))
except:
print('ERROR: An error occurred while contacting JIRA!')
| mit | Python | |
729ab39844bcf6700ccdeb28955e90220d2bab86 | convert ip v4 | vvvvcp/NeverMore | base/convert_ip.py | base/convert_ip.py | import socket
from binascii import hexlify
def convert_ip4_address():
for ip_addr in ['127.0.0.1','192.168.0.1']:
packed_ip_addr = socket.inet_aton(ip_addr)
unpacked_ip_addr = socket.inet_ntoa(packed_ip_addr)
print "IP Address : %s => Packed: %s, Unpacked: %s"\
%(ip_addr, hexlify(packed_ip_addr), unpacked_ip_addr)
if __name__ == '__main__':
convert_ip4_address() | apache-2.0 | Python | |
d427012df993dcffb77de05502fcc170cff6cdcb | Add Notebook test | danielfrg/datasciencebox,danielfrg/datasciencebox,danielfrg/datasciencebox,danielfrg/datasciencebox | datasciencebox/tests/salt/test_notebook.py | datasciencebox/tests/salt/test_notebook.py | import pytest
import requests
import utils
def setup_module(module):
utils.invoke('install', 'notebook')
@utils.vagranttest
def test_salt_formulas():
project = utils.get_test_project()
kwargs = {'test': 'true', '--out': 'json', '--out-indent': '-1'}
out = project.salt('state.sls', args=['ipython.notebook'], target='master', kwargs=kwargs)
utils.check_all_true(out, none_is_ok=True)
@utils.vagranttest
def test_notebook_ui():
project = utils.get_test_project()
project = utils.get_test_project()
nn_ip = project.cluster.master.ip
r = requests.get('http://%s:8888/' % nn_ip)
assert r.status_code == 200
| apache-2.0 | Python | |
1380ee2a9eff29bd1271eb0e920b75752f7346c7 | Add (transitional?) db_oldrotate.py that finds exif rotate in old posts. | drougge/wellpapp-pyclient | db_oldrotate.py | db_oldrotate.py | #!/usr/bin/env python
# -*- coding: iso-8859-1 -*-
from pyexiv2 import Image as ExivImage
from db_add import exif2rotation
from dbclient import dbclient
client = dbclient()
posts = client._search_post("SPFrotate", ["rotate"])
print len(posts), "posts"
for post in posts:
if post["rotate"] == -1:
m = post["md5"]
exif = ExivImage(client.image_path(m))
exif.readMetadata()
rot = exif2rotation(exif)
if rot >= 0:
client.modify_post(m, rotate=rot)
| mit | Python | |
7da454aafc0821a6692edb9f829d0bd2e841c534 | Add tool support for DIVINE. | sosy-lab/benchexec,IljaZakharov/benchexec,martin-neuhaeusser/benchexec,IljaZakharov/benchexec,ultimate-pa/benchexec,IljaZakharov/benchexec,dbeyer/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,dbeyer/benchexec,dbeyer/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,martin-neuhaeusser/benchexec,martin-neuhaeusser/benchexec,dbeyer/benchexec,sosy-lab/benchexec,martin-neuhaeusser/benchexec,ultimate-pa/benchexec,IljaZakharov/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec | benchexec/tools/divine.py | benchexec/tools/divine.py | """
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import benchexec.util as util
import benchexec.tools.template
import benchexec.result as result
import os
class Tool(benchexec.tools.template.BaseTool):
"""
DIVINE wrapper object
"""
BINS = ['divine', 'rundivine', 'lart', 'libdivinert.bc']
def executable(self):
"""
Find the path to the executable file that will get executed.
This method always needs to be overridden,
and most implementations will look similar to this one.
The path returned should be relative to the current directory.
"""
return util.find_executable(self.BINS[0])
def version(self, executable):
return self._version_from_tool(executable)
def name(self):
"""
Return the name of the tool, formatted for humans.
"""
return 'DIVINE'
def cmdline(self, executable, options, tasks, propertyfile=None, rlimits={}):
"""
Compose the command line to execute from the name of the executable,
the user-specified options, and the inputfile to analyze.
This method can get overridden, if, for example, some options should
be enabled or if the order of arguments must be changed.
All paths passed to this method (executable, tasks, and propertyfile)
are either absolute or have been made relative to the designated working directory.
@param executable: the path to the executable of the tool (typically the result of executable())
@param options: a list of options, in the same order as given in the XML-file.
@param tasks: a list of tasks, that should be analysed with the tool in one run.
In most cases we we have only _one_ inputfile.
@param propertyfile: contains a specification for the verifier.
@param rlimits: This dictionary contains resource-limits for a run,
for example: time-limit, soft-time-limit, hard-time-limit, memory-limit, cpu-core-limit.
All entries in rlimits are optional, so check for existence before usage!
"""
directory = os.path.dirname(executable)
# Ignore propertyfile since we run only reachability
run = [os.path.join('.', directory, self.BINS[1]), directory] + options + tasks
return run
def determine_result(self, returncode, returnsignal, output, isTimeout):
"""
Parse the output of the tool and extract the verification result.
This method always needs to be overridden.
If the tool gave a result, this method needs to return one of the
benchexec.result.RESULT_* strings.
Otherwise an arbitrary string can be returned that will be shown to the user
and should give some indication of the failure reason
(e.g., "CRASH", "OUT_OF_MEMORY", etc.).
"""
last = output[-1]
if isTimeout:
return 'TIMEOUT'
if returncode != 0:
return 'Pre-run phase failed: ' + '\n'.join( output )
if last is None:
return 'ERROR - no output'
elif 'result: true' in last:
return result.RESULT_TRUE_PROP
elif 'result: false' in last:
return result.RESULT_FALSE_REACH
else:
return result.RESULT_UNKNOWN
def program_files(self, executable):
"""
OPTIONAL, this method is only necessary for situations when the benchmark environment
needs to know all files belonging to a tool
(to transport them to a cloud service, for example).
Returns a list of files or directories that are necessary to run the tool.
"""
directory = os.path.dirname(executable)
return map(lambda x: os.path.join('.', directory, x), self.BINS)
| apache-2.0 | Python | |
35eee909fa90d06efa34a39667db6345725bc177 | Add tool _list paging test | lym/allura-git,apache/incubator-allura,heiths/allura,apache/allura,apache/incubator-allura,lym/allura-git,apache/allura,heiths/allura,lym/allura-git,lym/allura-git,lym/allura-git,heiths/allura,apache/incubator-allura,heiths/allura,apache/allura,heiths/allura,apache/incubator-allura,apache/allura,apache/allura | Allura/allura/tests/functional/test_tool_list.py | Allura/allura/tests/functional/test_tool_list.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from allura.tests import TestController
from allura.tests import decorators as td
class TestToolListController(TestController):
@td.with_wiki
@td.with_tool('test', 'Wiki', 'wiki2')
def test_default(self):
"""Test that list page contains a link to all tools of that type."""
r = self.app.get('/p/test/_list/wiki')
content = r.html.find('div', id='content_base')
assert content.find('a', dict(href='/p/test/wiki/')), r
assert content.find('a', dict(href='/p/test/wiki2/')), r
@td.with_wiki
@td.with_tool('test', 'Wiki', 'wiki2')
def test_paging(self):
"""Test that list page handles paging correctly."""
r = self.app.get('/p/test/_list/wiki?limit=1&page=0')
content = r.html.find('div', id='content_base')
assert content.find('a', dict(href='/p/test/wiki/')), r
assert not content.find('a', dict(href='/p/test/wiki2/')), r
r = self.app.get('/p/test/_list/wiki?limit=1&page=1')
content = r.html.find('div', id='content_base')
assert not content.find('a', dict(href='/p/test/wiki/')), r
assert content.find('a', dict(href='/p/test/wiki2/')), r
| # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from allura.tests import TestController
from allura.tests import decorators as td
class TestToolListController(TestController):
@td.with_wiki
@td.with_tool('test', 'Wiki', 'wiki2')
def test_default(self):
"""Test that list page contains a link to all tools of that type."""
r = self.app.get('/p/test/_list/wiki')
assert len(r.html.find('a', dict(href='/p/test/wiki/'))) == 1, r
assert len(r.html.find('a', dict(href='/p/test/wiki2/'))) == 1, r
| apache-2.0 | Python |
64b03bd53f6f494398818199caabe10138469719 | Create couples-holding-hands.py | tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,kamyu104/LeetCode | Python/couples-holding-hands.py | Python/couples-holding-hands.py | # Time: O(n)
# Space: O(n)
class Solution(object):
def minSwapsCouples(self, row):
"""
:type row: List[int]
:rtype: int
"""
N = len(row)//2
couples = [[] for _ in xrange(N)]
for seat, num in enumerate(row):
couples[num//2].append(seat//2)
adj = [[] for _ in xrange(N)]
for couch1, couch2 in couples:
adj[couch1].append(couch2)
adj[couch2].append(couch1)
result = N
for couch in xrange(N):
if not adj[couch]: continue
result -= 1
couch1, couch2 = couch, adj[couch].pop()
while couch2 != couch:
adj[couch2].remove(couch1)
couch1, couch2 = couch2, adj[couch2].pop()
return result
| mit | Python | |
364de0a95b868bba980bfe6445cd80f55b39bb63 | add amber ti estimator test code | alchemistry/alchemlyb | src/alchemlyb/tests/test_ti_estimators_amber.py | src/alchemlyb/tests/test_ti_estimators_amber.py | """Tests for all TI-based estimators in ``alchemlyb``.
"""
import pytest
import pandas as pd
from alchemlyb.parsing import amber
from alchemlyb.estimators import TI
import alchemtest.amber
def amber_simplesolvated_charge_dHdl():
dataset = alchemtest.amber.load_simplesolvated()
dHdl = pd.concat([amber.extract_dHdl(filename)
for filename in dataset['data']['charge']])
return dHdl
def amber_simplesolvated_vdw_dHdl():
dataset = alchemtest.amber.load_simplesolvated()
dHdl = pd.concat([amber.extract_dHdl(filename)
for filename in dataset['data']['vdw']])
return dHdl
class TIestimatorMixin:
@pytest.mark.parametrize('X_delta_f', ((amber_simplesolvated_charge_dHdl(), -60.114),
(amber_simplesolvated_vdw_dHdl(), 3.824)))
def test_get_delta_f(self, X_delta_f):
est = self.cls().fit(X_delta_f[0])
delta_f = est.delta_f_.iloc[0, -1]
assert X_delta_f[1] == pytest.approx(delta_f, rel=1e-3)
class TestTI(TIestimatorMixin):
"""Tests for TI.
"""
cls = TI
| bsd-3-clause | Python | |
a8f4d9252016aa9c656ad6d06558b520af06b489 | Create spyne_webservice.py | gvaduha/homebrew,gvaduha/homebrew,gvaduha/homebrew,gvaduha/homebrew,gvaduha/homebrew,gvaduha/homebrew | PythonTests/spyne_webservice.py | PythonTests/spyne_webservice.py | """
USE: Wizdler Chrome Extension
from suds.client import Client
c = Client('http://localhost:8008/...?wsdl')
c.service.SmokeTest('XXX')
"""
response_file = '$RESP_FILE'
from spyne import Application, rpc, ServiceBase, Iterable, Integer, Unicode
from spyne.protocol.soap import Soap11
from spyne.server.wsgi import WsgiApplication
class TestService(ServiceBase):
__service_url_path__ = '/TestCenter/UC1/smoke.test'
#__in_protocol__ = Soap11(validator='lxml')
#__out_protocol__ = Soap11()
@rpc(Unicode, _returns=Unicode)
def SmokeTest(ctx, req):
"""Test stub
<b>Base test:</b>
@req input request
@return predefined file content
"""
f = open(response_file, 'r')
return req
return f.read()
application = Application([TestService], 'TestServiceNamespace',
in_protocol=Soap11(validator='lxml'),
out_protocol=Soap11()
)
wsgi_application = WsgiApplication(application)
if __name__ == '__main__':
import logging
from wsgiref.simple_server import make_server
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('spyne.protocol.xml').setLevel(logging.DEBUG)
logging.info("Service started")
server = make_server('127.0.0.1', 8008, wsgi_application)
server.serve_forever()
| mit | Python | |
9a6fe771ba03cd64c4f6d764125457ed808feca2 | Add datacite harvester | erinspace/scrapi,alexgarciac/scrapi,CenterForOpenScience/scrapi,jeffreyliu3230/scrapi,erinspace/scrapi,felliott/scrapi,fabianvf/scrapi,mehanig/scrapi,felliott/scrapi,fabianvf/scrapi,mehanig/scrapi,ostwald/scrapi,CenterForOpenScience/scrapi | scrapi/harvesters/datacite.py | scrapi/harvesters/datacite.py | '''
Harvester for the DataCite MDS for the SHARE project
Example API call: http://oai.datacite.org/oai?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
from scrapi.base.helpers import updated_schema, oai_extract_dois
class DataciteHarvester(OAIHarvester):
short_name = 'datacite'
long_name = 'DataCite MDS'
url = 'http://oai.datacite.org/oai'
base_url = 'http://oai.datacite.org/oai'
property_list = ['date', 'identifier', 'setSpec', 'description']
timezone_granularity = True
@property
def schema(self):
return updated_schema(self._schema, {
"description": ("//dc:description/node()", get_second_description),
"uris": {
"canonicalUri": ('//dc:identifier/node()', oai_extract_dois),
"objectUris": ('//dc:identifier/node()', oai_extract_dois)
}
})
def get_second_description(descriptions):
if descriptions:
if len(descriptions) > 1:
return descriptions[1]
else:
return descriptions[0]
return ''
| apache-2.0 | Python | |
c63f2e2993b0c32d7bc2de617dfb147c0f6a2d89 | patch fix | geekroot/erpnext,indictranstech/erpnext,indictranstech/erpnext,gsnbng/erpnext,indictranstech/erpnext,njmube/erpnext,njmube/erpnext,geekroot/erpnext,Aptitudetech/ERPNext,gsnbng/erpnext,geekroot/erpnext,gsnbng/erpnext,indictranstech/erpnext,gsnbng/erpnext,njmube/erpnext,njmube/erpnext,geekroot/erpnext | erpnext/patches/v7_0/make_is_group_fieldtype_as_check.py | erpnext/patches/v7_0/make_is_group_fieldtype_as_check.py | from __future__ import unicode_literals
import frappe
def execute():
for doctype in ["Sales Person", "Customer Group", "Item Group", "Territory"]:
# convert to 1 or 0
frappe.db.sql("update `tab{doctype}` set is_group = if(is_group='Yes',1,0) "
.format(doctype=doctype))
frappe.db.commit()
# alter fields to int
frappe.db.sql("alter table `tab{doctype}` change is_group is_group int(1) default '0'"
.format(doctype=doctype))
frappe.reload_doctype(doctype)
| import frappe
def execute():
for doctype in ["Sales Person", "Customer Group", "Item Group", "Territory"]:
frappe.reload_doctype(doctype)
#In MySQL, you can't modify the same table which you use in the SELECT part.
frappe.db.sql(""" update `tab{doctype}` set is_group = 1
where name in (select parent_{field} from (select distinct parent_{field} from `tab{doctype}`
where parent_{field} != '') as dummy_table)
""".format(doctype=doctype, field=doctype.strip().lower().replace(' ','_')))
| agpl-3.0 | Python |
70b2fcb7ca85878b2012cab8c476b40f2624e7ee | Add migration new_name | makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek | geotrek/trekking/migrations/0003_auto_20181113_1755.py | geotrek/trekking/migrations/0003_auto_20181113_1755.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-11-13 16:55
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('trekking', '0002_trek_pois_excluded'),
]
operations = [
migrations.AlterField(
model_name='trek',
name='pois_excluded',
field=models.ManyToManyField(db_table=b'l_r_troncon_poi_exclus', related_name='excluded_treks', to='trekking.POI', verbose_name='Excluded POIs'),
),
]
| bsd-2-clause | Python | |
43fbaa6c109c51a77832d6f09e0543794882b518 | Add scripts printing special:mode.yml from model.npz | marian-nmt/marian-train,marian-nmt/marian-train,emjotde/Marian,marian-nmt/marian-train,emjotde/amunn,emjotde/amunmt,emjotde/Marian,marian-nmt/marian-train,emjotde/amunmt,amunmt/marian,emjotde/amunn,emjotde/amunmt,emjotde/amunn,amunmt/marian,emjotde/amunn,emjotde/amunmt,amunmt/marian,marian-nmt/marian-train | scripts/contrib/model_info.py | scripts/contrib/model_info.py | #!/usr/bin/env python3
import sys
import argparse
import numpy as np
import yaml
DESC = "Prints version and model type from model.npz file."
S2S_SPECIAL_NODE = "special:model.yml"
def main():
args = parse_args()
model = np.load(args.model)
if S2S_SPECIAL_NODE not in model:
print("No special Marian YAML node found in the model")
exit(1)
yaml_text = bytes(model[S2S_SPECIAL_NODE]).decode('ascii')
if not args.key:
print(yaml_text)
exit(0)
# fix the invalid trailing unicode character '#x0000' added to the YAML
# string by the C++ cnpy library
try:
yaml_node = yaml.load(yaml_text)
except yaml.reader.ReaderError:
yaml_node = yaml.load(yaml_text[:-1])
print(yaml_node[args.key])
def parse_args():
parser = argparse.ArgumentParser(description=DESC)
parser.add_argument("-m", "--model", help="model file", required=True)
parser.add_argument("-k", "--key", help="print value for specific key")
return parser.parse_args()
if __name__ == "__main__":
main()
| mit | Python | |
89aaf8161bb776db672949e9e48f8f9af1b45837 | Delete subscriptions list | GeographicaGS/urbo-pgsql-connector,GeographicaGS/urbo-pgsql-connector,GeographicaGS/urbo-pgsql-connector | scripts/delete_subscr_list.py | scripts/delete_subscr_list.py | # -*- coding: utf-8 -*-
#
# Author: Cayetano Benavent, 2016.
# cayetano.benavent@geographica.gs
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
import requests
import yaml
import json
try:
from yaml import CLoader as Loader
except ImportError:
from yaml import Loader
try:
"""
Remove InsecureRequestWarning for unverified HTTPS requests.
For Requests library version < 2.4 an error raise in this import.
"""
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
except ImportError as err:
# raise ImportError("{}\nYou need to upgrade Requests Library".format(err))
pass
class GetAuthTokenException(Exception):
pass
class DeleteSubscriptionException(Exception):
pass
def getAuthToken(url_authtk, fl_fw_auth, timeout=10, ssl=False):
try:
headers_authtk = {'Content-Type': 'application/json'}
with open(fl_fw_auth) as fw_json_auth:
json_data = json.load(fw_json_auth)
payload = json.dumps(json_data)
resp = requests.post(url_authtk, headers=headers_authtk,
data=payload, verify=ssl, timeout=timeout)
if resp.ok:
auth_token = resp.headers.get('x-subject-token')
resp_json = resp.json().get('token')
exp_date = resp_json.get('expires_at')
return(auth_token, exp_date)
else:
raise GetAuthTokenException("Error: {}".format(resp.json()))
except Exception as err:
print("Error: {}".format(err))
def deleteSubscriptions(subs, url_subs, fiw_serv, fiw_subsserv, authtoken, timeout=10, ssl=False):
try:
headers_authtk = {
'Content-Type': 'application/json',
'Accept': 'application/json',
'Fiware-Service': fiw_serv,
'Fiware-ServicePath': fiw_subsserv,
'x-auth-token': authtoken
}
for subs_id in subs:
json_data = {
"subscriptionId": subs_id
}
payload = json.dumps(json_data)
resp = requests.post(url_subs, headers=headers_authtk,
data=payload, verify=ssl, timeout=timeout)
if resp.ok:
print("{0}. Deleted subscription: {1}".format(resp, subs_id))
else:
print(resp)
raise DeleteSubscriptionException("Error: {}".format(resp.json()))
except Exception as err:
print("Error: {}".format(err))
def main():
fl_fw_auth = "fiware_auth.json"
url_authtk = 'https://195.235.93.224:15001/v3/auth/tokens'
auth_token, exp_date = getAuthToken(url_authtk, fl_fw_auth)
print(auth_token)
url_subs = 'https://195.235.93.224:10027/v1/unsubscribeContext'
fiw_serv = 'urbo'
fiw_subsserv = '/geographica'
subs = ['57eccf87fdc8301538a65932','57eccf8b4e11614a5cb4c3de','57ecd3f64e11614a5cb4c3df']
deleteSubscriptions(subs, url_subs, fiw_serv, fiw_subsserv, auth_token)
if __name__ == '__main__':
main()
| agpl-3.0 | Python | |
af5e58c2fefdffa8046ecace238a3fd1a2a43387 | Add Naive Bayes base class: | christopherjenness/ML-lib | ML/naivebayes.py | ML/naivebayes.py | """
Naive Bayes Classifier
Includes gaussian, bernoulli and multinomial models
"""
import abc
import numpy as np
class NaiveBayes:
"""
Naive Bayes Classifier
Given class label, assumes features are independent
"""
__metaclass__ = abc.ABCMeta
def __init__(self):
"""
Attributes:
learned (bool): Keeps track of if classifier has been fit
class_names (np.ndarray): array of class names. [0, 1] for example.
class_priors (dict): prior probability of each class.
determined via fraction of training samples in each class
class_parameters (dict): dict of parameters for each class
"""
self.learned = False
self.class_names = []
self.class_priors = {}
self.class_parameters = {}
@abc.abstractmethod
def fit(self, X, y):
"""
Fits Naive Bayes classifier
Args:
X (np.ndarray): Training data of shape[n_samples, n_features]
y (np.ndarray): Target values of shape[n_samples, 1]
Returns: an instance of self
"""
return self
@abc.abstractmethod
def predict(self, X):
"""
Args:
x (np.array): Training data of shape[1, n_features]
Currently, only vector of single sample is supported
Returns: predicted class of sample
Raises:
ValueError if model has not been fit
"""
return self
| mit | Python | |
9d27a2f2791a26c3ff326d16001363ef48650596 | Rename jokes.py to geek.py and separate explicit jokes | ElectronicsGeek/pyjokes,borjaayerdi/pyjokes,trojjer/pyjokes,Wren6991/pyjokes,bennuttall/pyjokes,gmarkall/pyjokes,pyjokes/pyjokes,birdsarah/pyjokes,martinohanlon/pyjokes | pyjokes/geek.py | pyjokes/geek.py | # -*- coding: utf-8 -*-
"""
Jokes from stackoverflow - provided under CC BY-SA 3.0
http://stackoverflow.com/questions/234075/what-is-your-best-programmer-joke?page=4&tab=votes#tab-top
"""
geek_neutral = [
"A SQL query goes into a bar, walks up to two tables and asks, 'Can I join you?'",
"When your hammer is C++, everything begins to look like a thumb.",
"If you put a million monkeys at a million keyboards, one of them will eventually write a Java program. The rest of them will write Perl programs.",
"To understand what recursion is, you must first understand recursion.",
"Friends don't let friends use Python 2.7",
"I suggested holding a 'Python Object Oriented Programming Seminar', but the acronym was unpopular.",
"'Knock, knock.' 'Who’s there?' very long pause.....'Java.'",
"How many programmers does it take to change a light bulb? None, that's a hardware problem.",
"If you're happy and you know it, syntax error!",
"'Whats the object-oriented way to become wealthy?' 'Inheritance'",
"Q: How many prolog programmers does it take to change a lightbulb? A: Yes.",
"So this programmer goes out on a date with a hot chick.",
"Richard Stallman, Linus Torvalds, and Donald Knuth engage in a discussion on whose impact on computer science was the greatest. Stallman: 'God told me I have programmed the best editor in the world!' Torvalds: ''Well, God told me that I have programmed the best operating system in the world!' Knuth: 'Wait, wait, I never said that.'",
"A foo walks into a bar, takes a look around and says 'Hello World!' and meet up his friends Alice, Bob, and Carol.",
"Q: Why don't jokes work in octal? A: Because 7 10 11.",
"A programmer started to cuss Because getting to sleep was a fuss As she lay there in bed Looping 'round in her head was: while(!asleep()) sheep++",
"Keyboard not found ... press F1 to continue",
"Q: how many Microsoft programmers does it take to change a light bulb? A: none, they just make darkness a standard and tell everyone 'It's not a bug, it's a feature!'",
"How long does it take to copy a file in Vista? Yeah, I don't know either, I'm still waiting to find out.",
"Two bytes meet. The first byte asks, 'Are you ill?' The second byte replies, 'No, just feeling a bit off.'",
"Two threads walk into a bar. The barkeeper looks up and yells, 'hey, I want don't any conditions race like time last!'",
"Old C programmers don't die, they're just cast into void.",
"Eight bytes walk into a bar. The bartender asks, 'Can I get you anything?' 'Yeah,' reply the bytes. 'Make us a double.'",
"Q: Why did the programmer quit his job? A: Because he didn't get arrays.",
"XML is like violence. If it doesn't solve your problem, you're not using enough of it",
"Why do java programmers have to wear glasses? Because they don't see sharp.",
"Software developers like to solve problems. If there are no problems handily available, they will create their own problems.",
"I'd like to make the world a better place, but they won't give me the source code.",
".NET is called .NET so that it wouldn't show up in a Unix directory listing.",
"What do you mean, it needs comments!? If it was hard to write, it should be hard to understand--why do you think we call it code???",
"Hardware: The part of a computer that you can kick.",
"The programmer got stuck in the shower because the instructions on the shampoo bottle said, Lather, Rinse, Repeat.",
"Optimist : The glass is half full. Pessimist : The glass is half empty. Coder: The glass is twice as large as necessary.",
"In C we had to code our own bugs. In C++ we can inherit them.",
"Q: How come there is not obfuscated Perl contest? A: Because everyone would win.",
"It's been said that if you play a Windows CD backwards, you'll hear satanic chanting...worse still if you play it forwards, it installs Windows.",
"Q: How many programmers does it take to kill a cockroach? A: Two: one holds, the other installs Windows on it",
"Windows is...a 64 bit rewrite of...a 32 bit extension to...a 16 bit api to...an 8 bit kernel for...a 4 bit microprocessor by...a 2 bit company that can't stand...1 bit of competition.",
"The shortest programmer joke: 'I'm nearly done!'",
"JIT Happens!",
"I � Unicode.",
"What do you call a programmer from Finland? Nerdic.",
"Q: What did the Java code say to the C code? A: You've got no class.",
"Why did Microsoft name their new search engine BING? Because It's Not Google!",
"A classic one from learning finite state machines: 'Kleeneliness is next to Gödeliness'",
"Pirates go arg! Computer pirates go argv!",
"Software salesmen and used-car salesmen differ in that the latter know when they are lying.",
"Child: Dad, why does the sun rise in the east and set in the west? Dad: Son, it's working, don't touch",
]
geek_explicit = [
"Programming is like sex: One mistake and you have to support it for the rest of your life.",
"Software is like sex: It's better when it's free. (Linus Torvalds)",
"Software is like sex: It's never REALLY free.",
"There are 10 types of people in the world. Those who understand binary and those who get laid.",
"Why programmers like UNIX: unzip, strip, touch, finger, grep, mount, fsck, more, yes, fsck, fsck, fsck, umount, sleep",
"If your mom was a collection class, her insert method would be public.",
"Your mommas so fat that not even Dijkstra is able to find a shortest path around her.",
"C++ - where your friends have access to your private members.",
"The only 'intuitive' user interface is the nipple. After that, it's all learned.",
"During a recent password audit, it was found that a blonde was using the following password: MickeyMinniePlutoHueyLouieDeweyDonaldGoofy. When asked why such a big password, she said that it had to be at least 8 characters long.",
"Q: What's the difference between Software Development and Sex? A: In sex, you don't get a bonus for releasing early.",
]
| bsd-3-clause | Python | |
837e9db39b2c9010a5cc43f21821b5dec90a18b1 | add rdds.fileio to setup.py | pearsonlab/thunder,kunallillaney/thunder,pearsonlab/thunder,j-friedrich/thunder,kcompher/thunder,jwittenbach/thunder,thunder-project/thunder,kcompher/thunder,j-friedrich/thunder,kunallillaney/thunder,oliverhuangchao/thunder,broxtronix/thunder,poolio/thunder,poolio/thunder,mikarubi/thunder,broxtronix/thunder,oliverhuangchao/thunder,zhwa/thunder,mikarubi/thunder,zhwa/thunder | python/setup.py | python/setup.py | #!/usr/bin/env python
from setuptools import setup
import thunder
setup(
name='thunder-python',
version=str(thunder.__version__),
description='Large-scale neural data analysis in Spark',
author='The Freeman Lab',
author_email='the.freeman.lab@gmail.com',
url='https://github.com/freeman-lab/thunder',
packages=['thunder',
'thunder.clustering',
'thunder.decoding',
'thunder.factorization',
'thunder.lib',
'thunder.rdds',
'thunder.rdds.fileio',
'thunder.regression',
'thunder.standalone',
'thunder.utils',
'thunder.viz'],
scripts = ['bin/thunder', 'bin/thunder-submit', 'bin/thunder-ec2'],
package_data = {'thunder.utils': ['data/fish.txt', 'data/iris.txt'], 'thunder.lib': ['thunder_2.10-' + str(thunder.__version__) + '.jar']},
long_description=open('README.rst').read(),
install_requires=open('requirements.txt').read().split()
) | #!/usr/bin/env python
from setuptools import setup
import thunder
setup(
name='thunder-python',
version=str(thunder.__version__),
description='Large-scale neural data analysis in Spark',
author='The Freeman Lab',
author_email='the.freeman.lab@gmail.com',
url='https://github.com/freeman-lab/thunder',
packages=['thunder',
'thunder.clustering',
'thunder.decoding',
'thunder.factorization',
'thunder.lib',
'thunder.rdds',
'thunder.regression',
'thunder.standalone',
'thunder.utils',
'thunder.viz'],
scripts = ['bin/thunder', 'bin/thunder-submit', 'bin/thunder-ec2'],
package_data = {'thunder.utils': ['data/fish.txt', 'data/iris.txt'], 'thunder.lib': ['thunder_2.10-' + str(thunder.__version__) + '.jar']},
long_description=open('README.rst').read(),
install_requires=open('requirements.txt').read().split()
) | apache-2.0 | Python |
ee3b5d0d14403bd5964f0609eb48848833bff2c0 | Add a draft for a backup class | vadmium/python-quilt,bjoernricks/python-quilt | quilt/backup.py | quilt/backup.py | # vim: fileencoding=utf-8 et sw=4 ts=4 tw=80:
# python-quilt - A Python implementation of the quilt patch system
#
# Copyright (C) 2012 Björn Ricks <bjoern.ricks@googlemail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from quilt.utils import Directory, File
class Backup(object):
""" Class to backup files
This class should be exented in future to support all functions of quilts
backup-files script.
"""
def __init__(self, filename):
self.filename = filename
def backup(self, dest_dir):
file = File(self.filename)
if file.exists():
if not isinstance(dest_dir, Directory):
dest_dir = Directory(dest_dir)
dest_dir.create()
file.copy(dest_dir)
else:
dest_dir = dest_dir + file.get_directory()
dest_dir.create()
dest_file = dest_dir + file
dest_file.touch()
| mit | Python | |
3ab998b022ff69c21c470de397f12557a1141168 | Add tests for photo.utils | rjhelms/photo,rjhelms/photo | src/photo/tests/test_utils.py | src/photo/tests/test_utils.py | """
Tests for photo.utils
"""
import uuid
from django.test import TestCase
from photo import utils
# pylint: disable=too-few-public-methods
class DummyInstance:
"""
Dummy instance object for passing into UploadToPathAndRename
"""
pk = None # pylint: disable=invalid-name
class UploadToPathAndRenameTestCase(TestCase):
"""
Tests for utils.UploadToPathAndRename
"""
def setUp(self):
self.upload_to_path_and_rename = utils.UploadToPathAndRename('test')
self.instance = DummyInstance()
def test_extension_preserved(self):
"""
Verify that UploadToPathAndRename preserves file extensions.
"""
result = self.upload_to_path_and_rename(self.instance, "filename.jpg")
ext = result.split('.')[-1]
self.assertEqual(ext, 'jpg', "New filename has wrong extension")
def test_path_appended(self):
"""
Verify that UploadToPathAndRename appends specified path.
"""
result = self.upload_to_path_and_rename(self.instance, "filename.jpg")
path = result.split('/')[0]
self.assertEqual(path, 'test', "New filename has wrong path")
def test_instance_with_no_pk(self):
"""
Verify handling when instance does not have a primary key
"""
result = self.upload_to_path_and_rename(self.instance, "filename.jpg")
generated_uuid_string = result.split('/')[1].split('.')[0]
generated_uuid = uuid.UUID(generated_uuid_string, version=4)
self.assertNotEqual(generated_uuid, self.instance.pk,
"New filename did not get a random UUID")
def test_instance_with_uuid_pk(self):
"""
Verify handling when instance has a UUID primary key
"""
self.instance.pk = uuid.uuid4() # pylint: disable=invalid-name
result = self.upload_to_path_and_rename(self.instance, "filename.jpg")
generated_uuid_string = result.split('/')[1].split('.')[0]
generated_uuid = uuid.UUID(generated_uuid_string, version=4)
self.assertEqual(generated_uuid, self.instance.pk,
"New filename does not match UUID of instance")
def test_insance_with_non_uuid_pk(self):
"""
Verify handling when instance has a non-UUID primary key
"""
self.instance.pk = "test"
with self.assertRaises(TypeError):
self.upload_to_path_and_rename(self.instance, "filename.jpg")
| mit | Python | |
dea182f5618f7590ee7e8fb6d2872ac60c6b6069 | Add setup.py to afni. | wanderine/nipype,blakedewey/nipype,mick-d/nipype_source,christianbrodbeck/nipype,sgiavasis/nipype,FredLoney/nipype,gerddie/nipype,grlee77/nipype,mick-d/nipype,dgellis90/nipype,fprados/nipype,FCP-INDI/nipype,satra/NiPypeold,fprados/nipype,arokem/nipype,carolFrohlich/nipype,mick-d/nipype,JohnGriffiths/nipype,JohnGriffiths/nipype,wanderine/nipype,arokem/nipype,wanderine/nipype,gerddie/nipype,carlohamalainen/nipype,JohnGriffiths/nipype,blakedewey/nipype,grlee77/nipype,blakedewey/nipype,mick-d/nipype_source,FredLoney/nipype,rameshvs/nipype,iglpdc/nipype,iglpdc/nipype,pearsonlab/nipype,rameshvs/nipype,glatard/nipype,rameshvs/nipype,Leoniela/nipype,carolFrohlich/nipype,sgiavasis/nipype,grlee77/nipype,FCP-INDI/nipype,dgellis90/nipype,dgellis90/nipype,mick-d/nipype_source,carlohamalainen/nipype,glatard/nipype,glatard/nipype,Leoniela/nipype,pearsonlab/nipype,mick-d/nipype,dmordom/nipype,fprados/nipype,dmordom/nipype,carlohamalainen/nipype,sgiavasis/nipype,FCP-INDI/nipype,Leoniela/nipype,glatard/nipype,JohnGriffiths/nipype,dgellis90/nipype,iglpdc/nipype,dmordom/nipype,christianbrodbeck/nipype,wanderine/nipype,rameshvs/nipype,gerddie/nipype,iglpdc/nipype,arokem/nipype,carolFrohlich/nipype,grlee77/nipype,satra/NiPypeold,pearsonlab/nipype,sgiavasis/nipype,gerddie/nipype,arokem/nipype,FredLoney/nipype,mick-d/nipype,blakedewey/nipype,carolFrohlich/nipype,FCP-INDI/nipype,pearsonlab/nipype | nipype/interfaces/afni/setup.py | nipype/interfaces/afni/setup.py | def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('afni', parent_package, top_path)
config.add_data_dir('tests')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
| bsd-3-clause | Python | |
fdf6dfbb7f82252cfe1e07719e3658fc529f47aa | Create hello-friend.py | CamFlawless/python_projects | baby-steps/hello-friend.py | baby-steps/hello-friend.py | print "hell0-fr1end"
| mit | Python | |
709ec407d333a624ff4476a81bc0f6ebb86f055a | add comments handle python file | LichAmnesia/MBSCM,LichAmnesia/MBSCM,LichAmnesia/MBSCM,LichAmnesia/MBSCM,LichAmnesia/MBSCM | data/Reddit/moderators_subreddit_comments.py | data/Reddit/moderators_subreddit_comments.py | # -*- coding: utf-8 -*-
# @Author: Lich_
# @Date: 2016-11-26 18:14:52
# @Last Modified by: LichAmnesia
# @Last Modified time: 2016-11-26 13:53:35
import json
import os
# generate the moderators from moderators file. the output is the moderators_subreddit file
def getmoderators():
file = open('moderators_subreddit.csv', 'w')
file.write('moderators' + '\t' + 'subreddit' + '\n')
with open('E:\\Workspace\\NetworkData\\moderators') as fileobject:
for line in fileobject:
js = json.loads(line.replace('\n',''))
moderators = js['moderators']
for moderator in moderators:
file.write(moderator['name'] + '\t' + js['subreddit'] + '\n')
# print(moderator['name'], js['subreddit'])
file.close()
# This is to generate the comments
# the line of the RC 2016-09 is 67,000,000 lines
def getComments():
moderators_set = set([])
with open('moderators_subreddit') as fileobject:
for line in fileobject:
moderators_set.add(line.split()[0])
cnt = 0
with open('E:\\Workspace\\NetworkData\\RC_2016-09', 'r') as fileobject:
with open('comments', 'w+') as file:
for line in fileobject:
js = json.loads(line.replace('\n',''))
author = js['author']
if author in moderators_set:
del js['body']
file.write(json.dumps(js) + '\n')
cnt += 1
if cnt % 1000000 == 0:
file.flush()
os.fsync(file)
print(cnt)
file.close()
getmoderators() | mit | Python | |
5c278ec8afe7fd97c0f3a4b45c0acc25706afd1e | add python_solve.py | pymor/dune-burgers-demo,pymor/dune-burgers-demo,pymor/dune-burgers-demo | dune-burgers/pymor-wrapper/python_solve.py | dune-burgers/pymor-wrapper/python_solve.py | import sys
from pymor.tools import mpi
from pymor.discretizations.mpi import mpi_wrap_discretization
from pymor.vectorarrays.mpi import MPIVectorArrayAutoComm
from dune_burgers import discretize_dune_burgers
filename = sys.argv[1]
exponent = float(sys.argv[2])
obj_id = mpi.call(mpi.function_call_manage, discretize_dune_burgers, filename)
d = mpi_wrap_discretization(obj_id, use_with=True, with_apply2=False, array_type=MPIVectorArrayAutoComm)
U = d.solve(exponent)
| bsd-2-clause | Python | |
b4729cfbff3bdf11da73436d4f927f0ffb9d1b40 | Add montage func | Eric89GXL/mnefun,drammock/mnefun,kambysese/mnefun,kambysese/mnefun,drammock/mnefun,LABSN/mnefun,ktavabi/mnefun,ktavabi/mnefun,Eric89GXL/mnefun,LABSN/mnefun | mnefun/misc.py | mnefun/misc.py | # -*- coding: utf-8 -*-
"""Miscellaneous utilities."""
import numpy as np
import mne
def make_montage(info, kind, check=False):
from . import _reorder
assert kind in ('mgh60', 'mgh70', 'uw_70', 'uw_60')
picks = mne.pick_types(info, meg=False, eeg=True, exclude=())
if kind in ('mgh60', 'mgh70'):
ch_names = mne.utils._clean_names(
[info['ch_names'][pick] for pick in picks], remove_whitespace=True)
if kind == 'mgh60':
assert len(ch_names) in (59, 60)
else:
assert len(ch_names) in (70,)
montage = mne.channels.read_montage(kind, ch_names=ch_names)
else:
ch_names = getattr(_reorder, 'ch_names_' + kind)
ch_names = ch_names
montage = mne.channels.read_montage('standard_1020', ch_names=ch_names)
assert len(montage.ch_names) == len(ch_names)
montage.ch_names = ['EEG%03d' % ii for ii in range(1, 61)]
sphere = mne.make_sphere_model('auto', 'auto', info)
montage.pos /= np.linalg.norm(montage.pos, axis=-1, keepdims=True)
montage.pos *= sphere.radius
montage.pos += sphere['r0']
info = mne.pick_info(info, picks)
eeg_pos = np.array([ch['loc'][:3] for ch in info['chs']])
assert len(eeg_pos) == len(montage.pos), (len(eeg_pos), len(montage.pos))
if check:
from mayavi import mlab
mlab.figure(size=(800, 800))
mlab.points3d(*sphere['r0'], scale_factor=2 * sphere.radius,
color=(0., 0., 1.), opacity=0.1, mode='sphere')
mlab.points3d(*montage.pos.T, scale_factor=0.01,
color=(1, 0, 0), mode='sphere', opacity=0.5)
mlab.points3d(*eeg_pos.T, scale_factor=0.005, color=(1, 1, 1),
mode='sphere', opacity=1)
return montage, sphere
| bsd-3-clause | Python | |
ded9c39402ca9cf7adfaaebbf06c196048d48db9 | Add presubmit check for run-bindings-tests | XiaosongWei/blink-crosswalk,PeterWangIntel/blink-crosswalk,crosswalk-project/blink-crosswalk-efl,XiaosongWei/blink-crosswalk,Pluto-tv/blink-crosswalk,Bysmyyr/blink-crosswalk,modulexcite/blink,ondra-novak/blink,nwjs/blink,kurli/blink-crosswalk,PeterWangIntel/blink-crosswalk,Pluto-tv/blink-crosswalk,crosswalk-project/blink-crosswalk-efl,nwjs/blink,PeterWangIntel/blink-crosswalk,kurli/blink-crosswalk,ondra-novak/blink,XiaosongWei/blink-crosswalk,jtg-gg/blink,hgl888/blink-crosswalk-efl,crosswalk-project/blink-crosswalk-efl,XiaosongWei/blink-crosswalk,nwjs/blink,Pluto-tv/blink-crosswalk,kurli/blink-crosswalk,smishenk/blink-crosswalk,PeterWangIntel/blink-crosswalk,Bysmyyr/blink-crosswalk,modulexcite/blink,Bysmyyr/blink-crosswalk,smishenk/blink-crosswalk,jtg-gg/blink,Pluto-tv/blink-crosswalk,modulexcite/blink,smishenk/blink-crosswalk,Pluto-tv/blink-crosswalk,modulexcite/blink,Bysmyyr/blink-crosswalk,jtg-gg/blink,hgl888/blink-crosswalk-efl,Pluto-tv/blink-crosswalk,hgl888/blink-crosswalk-efl,modulexcite/blink,hgl888/blink-crosswalk-efl,jtg-gg/blink,XiaosongWei/blink-crosswalk,crosswalk-project/blink-crosswalk-efl,smishenk/blink-crosswalk,XiaosongWei/blink-crosswalk,modulexcite/blink,ondra-novak/blink,jtg-gg/blink,XiaosongWei/blink-crosswalk,PeterWangIntel/blink-crosswalk,Bysmyyr/blink-crosswalk,ondra-novak/blink,hgl888/blink-crosswalk-efl,Bysmyyr/blink-crosswalk,Bysmyyr/blink-crosswalk,PeterWangIntel/blink-crosswalk,nwjs/blink,nwjs/blink,hgl888/blink-crosswalk-efl,Bysmyyr/blink-crosswalk,Pluto-tv/blink-crosswalk,Bysmyyr/blink-crosswalk,crosswalk-project/blink-crosswalk-efl,Pluto-tv/blink-crosswalk,kurli/blink-crosswalk,modulexcite/blink,hgl888/blink-crosswalk-efl,nwjs/blink,modulexcite/blink,jtg-gg/blink,PeterWangIntel/blink-crosswalk,nwjs/blink,crosswalk-project/blink-crosswalk-efl,hgl888/blink-crosswalk-efl,crosswalk-project/blink-crosswalk-efl,smishenk/blink-crosswalk,kurli/blink-crosswalk,nwjs/blink,ondra-novak/blink,kurli/blink-crosswalk,XiaosongWei/blink-crosswalk,smishenk/blink-crosswalk,PeterWangIntel/blink-crosswalk,kurli/blink-crosswalk,ondra-novak/blink,hgl888/blink-crosswalk-efl,jtg-gg/blink,kurli/blink-crosswalk,ondra-novak/blink,XiaosongWei/blink-crosswalk,kurli/blink-crosswalk,ondra-novak/blink,smishenk/blink-crosswalk,hgl888/blink-crosswalk-efl,PeterWangIntel/blink-crosswalk,PeterWangIntel/blink-crosswalk,Pluto-tv/blink-crosswalk,Pluto-tv/blink-crosswalk,modulexcite/blink,jtg-gg/blink,Bysmyyr/blink-crosswalk,modulexcite/blink,XiaosongWei/blink-crosswalk,jtg-gg/blink,smishenk/blink-crosswalk,nwjs/blink,smishenk/blink-crosswalk,ondra-novak/blink,kurli/blink-crosswalk,jtg-gg/blink,nwjs/blink,smishenk/blink-crosswalk,crosswalk-project/blink-crosswalk-efl,crosswalk-project/blink-crosswalk-efl | Source/bindings/PRESUBMIT.py | Source/bindings/PRESUBMIT.py | # Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Blink bindings presubmit script
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into gcl.
"""
def _RunBindingsTests(input_api, output_api):
if input_api.is_committing:
message_type = output_api.PresubmitError
else:
message_type = output_api.PresubmitPromptWarning
pardir = input_api.os_path.pardir
run_bindings_tests_path = input_api.os_path.join(input_api.PresubmitLocalPath(), pardir, pardir, 'Tools', 'Scripts', 'run-bindings-tests')
cmd_name = 'run-bindings-tests'
if input_api.platform == 'win32':
# Windows needs some help.
cmd = [input_api.python_executable, run_bindings_tests_path]
else:
cmd = [run_bindings_tests_path]
test_cmd = input_api.Command(
name=cmd_name,
cmd=cmd,
kwargs={},
message=message_type)
if input_api.verbose:
print('Running ' + cmd_name)
return input_api.RunTests([test_cmd])
def CheckChangeOnUpload(input_api, output_api):
return _RunBindingsTests(input_api, output_api)
def CheckChangeOnCommit(input_api, output_api):
return _RunBindingsTests(input_api, output_api)
| bsd-3-clause | Python | |
e94d5f86d983f6b930d41abaed56cba05e5fa030 | test run_async | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/apps/userreports/tests/test_async_indicators.py | corehq/apps/userreports/tests/test_async_indicators.py | from django.test import SimpleTestCase
from corehq.apps.userreports.models import DataSourceConfiguration
from corehq.apps.userreports.util import get_indicator_adapter
class RunAsynchronousTest(SimpleTestCase):
def _create_data_source_config(self, indicators=None):
default_indicator = [{
"type": "expression",
"column_id": "laugh_sound",
"datatype": "string",
"expression": {
'type': 'named',
'name': 'laugh_sound'
}
}]
return DataSourceConfiguration.wrap({
'display_name': 'Mother Indicators',
'doc_type': 'DataSourceConfiguration',
'domain': 'test',
'referenced_doc_type': 'CommCareCase',
'table_id': 'mother_indicators',
'configured_filter': {},
'configured_indicators': indicators or default_indicator
})
def test_async_not_configured(self):
indicator_configuration = self._create_data_source_config()
adapter = get_indicator_adapter(indicator_configuration)
self.assertFalse(adapter.run_asynchronous)
def test_async_configured(self):
indicator_configuration = self._create_data_source_config()
indicator_configuration.asynchronous = True
adapter = get_indicator_adapter(indicator_configuration)
self.assertTrue(adapter.run_asynchronous)
def test_related_doc_expression(self):
indicator_configuration = self._create_data_source_config([{
"datatype": "string",
"type": "expression",
"column_id": "confirmed_referral_target",
"expression": {
"type": "related_doc",
"related_doc_type": "CommCareUser",
"doc_id_expression": {
"type": "property_path",
"property_path": ["form", "meta", "userID"]
},
"value_expression": {
"type": "property_path",
"property_path": [
"user_data",
"confirmed_referral_target"
]
}
}
}])
adapter = get_indicator_adapter(indicator_configuration)
self.assertTrue(adapter.run_asynchronous)
| bsd-3-clause | Python | |
d9d8e68c92fc808e43cf5ffe897541738f90d428 | Add manage.py to support running project scripts | EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list | manage.py | manage.py | from flask_script import Manager
from app import app
manager = Manager(app)
if __name__ == "__main__":
manager.run() | mit | Python | |
52193599f1f6bc5acded2188c9e2016c52d26188 | Add HTML music table generator | GamerSymphonyOrchestra/gen_tools | music_table.py | music_table.py | #!/usr/bin/env python3
# Written by Michael Younkin in 2014. This work is in the public domain.
HELP_TEXT = '''
music_table.py
--------------
SYNOPSIS
python music_table.py -h
python music_table.py URL_PREFIX
DESCRIPTION
This program generates HTML for the GSO website's "listen" page. It will
accept text in CSV format from STDIN and writes the HTML table to STDOUT.
You can copy and paste the output directly onto the GSO website's listen
page.
SAMPLE INPUT
Piece,Game,Performers,Arrangers,Filename
Piece Name,Game Name,Michael,Younkin,my_piece
Piece2,Game2,Younkin,Michael,my_second_piece
OPTIONS
-h, --help print this message
EXAMPLES
All examples assume the bash shell. Commands will differ based on how piping
works in your particular environment.
cat 'music_data.csv' | python music_table.py "audio"> music_table.html
Send the file 'music_data.csv' to music_table.py via STDIN, save the output
of the script to 'music_table.html'. All URLs will be
"audio/<piece name without spaces>.wav" or ".mp3" (no < >).
'''
TABLE_START = '''
<table>
<thead>
<tr><th>Piece</th><th>Game</th><th>Performers</th><th>Arrangers</th><th>Download</th></tr>
</thead>
<tbody>
'''
TABLE_END = '''
</tbody>
</table>
'''
ROW_TEMPLATE = '''
<tr><td>{0}</td><td>{1}</td><td>{2}</td><td>{3}</td><td>{4}</td></tr>
'''
DOWNLOAD_TEMPLATE = '''
<ul>
<li><a href="{0}">High Quality (.wav)</a></li>
<li><a href="{1}">Low Quality (.mp3)</a></li>
</ul>
'''
from sys import argv,exit,stdin,stderr
import csv
def main():
if len(argv) != 2:
print_error('invalid number of arguments')
print_help()
exit(1)
if argv[1] == '-h' or argv[1] == '--help':
print_help()
exit(0)
download_prefix = argv[1]
music_data = csv.reader(stdin)
next(music_data)
print_table_start()
for row in music_data:
print_table_row(row, download_prefix)
print_table_end()
def print_error(msg):
print('Error: {0}\n'.format(msg))
def print_help():
print(HELP_TEXT)
def print_table_start():
print(TABLE_START)
def print_table_row(row, dirname):
download_html = get_download_html(row, dirname)
format_args = row[:-1]
format_args.append(download_html)
row_str = ROW_TEMPLATE.format(*format_args)
print(row_str)
def print_table_end():
print(TABLE_END)
def get_download_html(row, download_prefix):
basename = row[4]
filename = '{0}/{1}'.format(download_prefix, basename)
high_url = filename + '.wav'
low_url = filename + '.mp3'
return DOWNLOAD_TEMPLATE.format(high_url, low_url)
main()
| mit | Python | |
fbd179d9d22a2eef6c2fb24152a441b85133e556 | Add missing component of previous commit | softwaresaved/fat,softwaresaved/fat,softwaresaved/fat,softwaresaved/fat | lowfat/utils.py | lowfat/utils.py | """
This module contains small utility classes and functions which do not clearly belong to one part of the project.
"""
import enum
class ChoicesEnum(enum.Enum):
"""
Abstract Enum class to represent values in a Django CharField choices.
"""
@classmethod
def choices(cls):
"""
Get the list of choices for this class.
The name of the enum field is used as the human readable name.
The value of the enum field is stored in the database.
"""
return tuple((tag.value, tag.name) for tag in cls)
| bsd-3-clause | Python | |
9199563bb21276102ca58dc6b7c99f593db863d4 | Add test for call_actions_during_tree_build | igordejanovic/parglare,igordejanovic/parglare | tests/func/test_build_tree.py | tests/func/test_build_tree.py | import pytest # noqa
from parglare import Grammar, Parser
def test_call_actions_during_tree_build():
grammar = """
Program: "begin" MoveCommand* "end";
MoveCommand: "move" Direction;
Direction: "up" | "down" | "left" | "right";
"""
g = Grammar.from_string(grammar)
code = """
begin
move left
move left
move up
move down
end
"""
left_moves = []
def left_dir_collector(_, nodes):
"""Finds all 'left' moves and adds them into a list."""
term = nodes[0]
if term.value == "left":
left_moves.append(term)
parser = Parser(g, build_tree=True,
actions={"Direction": left_dir_collector})
parser.parse(code)
# call_actions_during_tree_build is False by default, so left_dir_collector
# will not be called.
assert len(left_moves) == 0
parser.call_actions_during_tree_build = True
parser.parse(code)
assert len(left_moves) == 2
| mit | Python | |
9140b3249820d0dd86f7f85270327d9264841b50 | Test for selecting mysql search backend | p/wolis-phpbb,p/wolis-phpbb | tests/search_backend_mysql.py | tests/search_backend_mysql.py | from wolis.test_case import WolisTestCase
from wolis import utils
class SearchBackendMysqlTest(WolisTestCase):
@utils.restrict_database('mysql*')
@utils.restrict_phpbb_version('>=3.1.0')
def test_set_search_backend(self):
self.login('morpheus', 'morpheus')
self.acp_login('morpheus', 'morpheus')
self.change_acp_knob(
link_text='Search settings',
check_page_text='Here you can define what search backend will be used',
name='config[search_type]',
value='phpbb_search_fulltext_mysql',
confirm=True,
)
if __name__ == '__main__':
import unittest
unittest.main()
| bsd-2-clause | Python | |
3c3fe1e1f1884df47157c71584b7b6087bde7f10 | add owner check, code borrowed from RoboDanny | mikevb1/lagbot,mikevb1/discordbot | cogs/utils/checks.py | cogs/utils/checks.py | from discord.ext import commands
def is_owner_check(message):
return message.author.id == '103714384802480128'
def is_owner():
return commands.check(lambda ctx: is_owner_check(ctx.message))
| mit | Python | |
ee88f6927dee820318bc821081138374b8f754f2 | Create data_functions.py | BCCN-Prog/database | data_functions.py | data_functions.py | import pandas as pd
import numpy as np
def load_data(path):
'''
(str) -> (pandas.DataFrame)
Loads the database and cleans the whitespace in STATIONS_ID.
IMPORTANT: This function assumes you have the database stored in a text file in the directory.
'''
data = pd.read_csv(path, index_col = 2)
data["STATIONS_ID"] = data["STATIONS_ID"].str.replace(' ', '')
data["STATIONS_ID"] = data["STATIONS_ID"].convert_objects(convert_numeric=True)
return data
def get_data(data, station_id, category = 3):
"""
(pandas.Dataframe, int, list) -> (pandas.DataFrame)
Returns desired information from the database about requested city and categories. Index is based on and sorted by date.
station_id: The code for the requested city/station
category: Can be an int or a list of desired variable(s). By default gets the air temperature.
The codes for variables:
0: Numerical Index
1: STATIONS_ID
2: QUALITAETS_NIVEAU
3: Air Temperature / LUFTTEMPERATUR
4: DAMPFDRUCK
5: BEDECKUNGSGRAD
6: LUFTDRUCK_STATIONSHOEHE
7: REL_FEUCHTE
8: WINDGESCHWINDIGKEIT
9: Max Air Temperature
10: Min Air Temperature
11: LUFTTEMP_AM_ERDB_MINIMUM (?)
12: Max Wind Speed / WINDSPITZE_MAXIMUM
13: Precipitation Height / NIEDERSCHLAGSHOEHE (?)
14: NIEDERSCHLAGSHOEHE_IND (?)
15: Sunshine Duration
16: Snow Height
"""
rlv_station = data[data.iloc[:, 1] == station_id]
selected = rlv_station.iloc[:, category]
return selected
| bsd-3-clause | Python | |
82906d8dabfd551c997569f2f36ecdfc1ef3057f | Create duplicates.py | creativcoder/AlgorithmicProblems,creativcoder/AlgorithmicProblems,creativcoder/AlgorithmicProblems,creativcoder/AlgorithmicProblems | Python/duplicates.py | Python/duplicates.py | #The rem_dep method removes the duplicate values while maintaining the original order of the list.
def rem_dup(values):
output=[]
seen=set()
for val in values:
if val not in seen:
output.append(val)
seen.add(val)
return output
#sample list to test code.
values=[3,4,5,6,4,3,44,5,32,4,35,4,4,5,5]
print rem_dup(values)
| mit | Python | |
8611ea1e23b8958be98a6d5c15bd66f08e46859f | Handle connection error when sending event to consul. | meerkat-code/meerkat_libs | meerkat_libs/consul_client/__init__.py | meerkat_libs/consul_client/__init__.py | import json
import logging
import jwt
import collections
from os import environ
import backoff as backoff
import requests
from meerkat_libs import authenticate
CONSUL_URL = environ.get("CONSUL_URL", "http://nginx/consul")
SUBMISSIONS_BUFFER_SIZE = environ.get("CONSUL_SUBMISSIONS_BUFFER_SIZE", 1000)
DHIS2_EXPORT_ENABLED = environ.get("DHIS2_EXPORT_ENABLED", False)
events_buffer = collections.defaultdict(list)
def send_dhis2_events(uuid=None, raw_row=None, form_id=None, auth_token=None):
if not DHIS2_EXPORT_ENABLED:
return
if not auth_token:
logging.error("No authentication token provided.")
return
global events_buffer
upload_payload = {'token': '', 'content': 'record', 'formId': form_id, 'formVersion': '',
'data': raw_row,
'uuid': uuid
}
# TODO: Should md5 be generated here?
md5_of_body = ""
events_buffer[form_id].append(
{
'MessageId': uuid,
'ReceiptHandle': 'test-receipt-handle-1',
'MD5OfBody': md5_of_body,
'Body': upload_payload,
'Attributes': {
'test-attribute': 'test-attribute-value'
}
}
)
if len(events_buffer[form_id]) > SUBMISSIONS_BUFFER_SIZE:
logging.info("Sending batch of events to consul.")
__send_events_from_buffer(form_id=form_id, auth_token=auth_token)
def flush_dhis2_events(auth_token=None):
if not DHIS2_EXPORT_ENABLED:
return
if not auth_token:
logging.error("No authentication token provided.")
return
for form_id in events_buffer:
logging.info("Clearing Consul Client event buffer for %s.", form_id)
__send_events_from_buffer(form_id=form_id, auth_token=auth_token)
def __send_events_from_buffer(form_id=None, auth_token=None):
global events_buffer
json_payload = json.dumps(
{"formId": form_id,
"Messages": events_buffer[form_id]}
)
try:
requests.post(CONSUL_URL + "/dhis2/export/submissions", headers=_auth_headers(auth_token), json=json_payload)
events_buffer[form_id] = []
except requests.exceptions.ChunkedEncodingError:
logging.error("Failed to send chunk of events. Count %i", len(events_buffer[form_id]))
def _auth_headers(token):
return {'authorization': f"Bearer {token}"}
| import json
import logging
import jwt
import collections
from os import environ
import backoff as backoff
import requests
from meerkat_libs import authenticate
CONSUL_URL = environ.get("CONSUL_URL", "http://nginx/consul")
SUBMISSIONS_BUFFER_SIZE = environ.get("CONSUL_SUBMISSIONS_BUFFER_SIZE", 1000)
DHIS2_EXPORT_ENABLED = environ.get("DHIS2_EXPORT_ENABLED", False)
events_buffer = collections.defaultdict(list)
def send_dhis2_events(uuid=None, raw_row=None, form_id=None, auth_token=None):
if not DHIS2_EXPORT_ENABLED:
return
if not auth_token:
logging.error("No authentication token provided.")
return
global events_buffer
upload_payload = {'token': '', 'content': 'record', 'formId': form_id, 'formVersion': '',
'data': raw_row,
'uuid': uuid
}
# TODO: Should md5 be generated here?
md5_of_body = ""
events_buffer[form_id].append(
{
'MessageId': uuid,
'ReceiptHandle': 'test-receipt-handle-1',
'MD5OfBody': md5_of_body,
'Body': upload_payload,
'Attributes': {
'test-attribute': 'test-attribute-value'
}
}
)
if len(events_buffer[form_id]) > SUBMISSIONS_BUFFER_SIZE:
logging.info("Sending batch of events to consul.")
__send_events_from_buffer(form_id=form_id, auth_token=auth_token)
def flush_dhis2_events(auth_token=None):
if not DHIS2_EXPORT_ENABLED:
return
if not auth_token:
logging.error("No authentication token provided.")
return
for form_id in events_buffer:
logging.info("Clearing Consul Client event buffer for %s.", form_id)
__send_events_from_buffer(form_id=form_id, auth_token=auth_token)
def __send_events_from_buffer(form_id=None, auth_token=None):
global events_buffer
json_payload = json.dumps(
{"formId": form_id,
"Messages": events_buffer[form_id]}
)
requests.post(CONSUL_URL + "/dhis2/export/submissions", headers=_auth_headers(auth_token), json=json_payload)
events_buffer[form_id] = []
def _auth_headers(token):
return {'authorization': f"Bearer {token}"}
| mit | Python |
f666560f9ffa2323b8a125e3ad3d3faf6bd5b3de | add command to grant sms gateway permissions | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/apps/smsbillables/management/commands/add_sms_gateway_permissions.py | corehq/apps/smsbillables/management/commands/add_sms_gateway_permissions.py | from django.contrib.auth.models import User
from django.core.management import BaseCommand
from django_prbac.models import Grant, Role, UserRole
from corehq import privileges
class Command(BaseCommand):
help = 'Grants the user(s) specified the privilege to access global sms gateways'
def add_arguments(self, parser):
parser.add_argument(
'usernames',
nargs="*",
)
parser.add_argument(
'--remove-user',
action='store_true',
default=False,
help='Remove the users specified from the DEV_SUPPORT_TEAM privilege',
)
def handle(self, usernames, **options):
dev_support_role = Role.objects.get_or_create(
name="Dimagi Dev and Support Team",
slug=privileges.DEV_SUPPORT_TEAM,
)[0]
global_sms_gateway_access = Role.objects.get_or_create(
name="Accounting Admin",
slug=privileges.GLOBAL_SMS_GATEWAY,
)[0]
if not dev_support_role.has_privilege(global_sms_gateway_access):
Grant.objects.create(
from_role=dev_support_role,
to_role=global_sms_gateway_access,
)
remove_user = options['remove_user']
for username in usernames:
try:
user = User.objects.get(username=username)
try:
user_role = UserRole.objects.get(user=user)
except UserRole.DoesNotExist:
user_privs = Role.objects.get_or_create(
name="Privileges for %s" % user.username,
slug="%s_privileges" % user.username,
)[0]
user_role = UserRole.objects.create(
user=user,
role=user_privs,
)
if remove_user:
try:
# remove grant object
grant = Grant.objects.get(
from_role=user_role.role,
to_role=dev_support_role
)
grant.delete()
print("Removed %s from the operations team"
% user.username)
except Grant.DoesNotExist:
print("The user %s was never part of the operations "
"team. Leaving alone." % user.username)
elif not user_role.has_privilege(dev_support_role):
Grant.objects.create(
from_role=user_role.role,
to_role=dev_support_role,
)
print("Added %s to the Dev and Support team" % user.username)
else:
print("User %s is already part of the Dev and Support team"
% user.username)
except User.DoesNotExist:
print("User %s does not exist" % username)
| bsd-3-clause | Python | |
ff82f56b8ea901a30478b11a61f8ca52b23346bd | Add a test case for guessing the BuildDir associated with a subdirectory argument. | azverkan/scons,datalogics-robb/scons,datalogics/scons,datalogics/scons,datalogics-robb/scons,azverkan/scons,azverkan/scons,azverkan/scons,datalogics-robb/scons,azverkan/scons,datalogics/scons,datalogics/scons,datalogics-robb/scons | test/BuildDir/guess-subdir.py | test/BuildDir/guess-subdir.py | #!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Test that the logic that "guesses" the associated BuildDir for a
subdirectory correctly builds targets in the BuildDir subdirectory.
"""
import TestSCons
test = TestSCons.TestSCons()
test.subdir(['work'], ['work', 'src'])
test.write(['work', 'SConstruct'], """
c_builddir = r'%s'
BuildDir(c_builddir, '.', duplicate=0)
SConscript(c_builddir + '/SConscript')
""" % test.workpath('debug'))
test.write(['work', 'SConscript'], """
SConscript('src/SConscript')
""")
test.write(['work', 'src', 'SConscript'], """
env = Environment(OBJSUFFIX='.obj',
PROGSUFFIX='.exe')
env.Program('test.cpp')
""")
test.write(['work', 'src', 'test.cpp'], """\
#include <stdio.h>
#include <stdlib.h>
int
main(int argc, char *argv[])
{
printf("work/src/test.cpp\\n");
}
""")
test.run(chdir = 'work', arguments = '.')
test.must_exist(test.workpath('debug', 'src', 'test.obj'))
test.must_exist(test.workpath('debug', 'src', 'test.exe'))
test.pass_test()
| mit | Python | |
88e5b5117c747f21cc868503d2e5c123ca976585 | Add tests for decorators | indirectlylit/kolibri,learningequality/kolibri,indirectlylit/kolibri,learningequality/kolibri,learningequality/kolibri,indirectlylit/kolibri,learningequality/kolibri,indirectlylit/kolibri | kolibri/core/tasks/test/test_decorators.py | kolibri/core/tasks/test/test_decorators.py | import pytest
from kolibri.core.tasks.decorators import task
from kolibri.core.tasks.exceptions import FunctionNotRegisteredAsJob
from kolibri.core.tasks.job import JobRegistry
from kolibri.core.tasks.job import RegisteredJob
from kolibri.core.tasks.utils import stringify_func
@pytest.fixture
def registered_jobs():
JobRegistry.REGISTERED_JOBS.clear()
yield JobRegistry.REGISTERED_JOBS
JobRegistry.REGISTERED_JOBS.clear()
class TestTaskDecorators(object):
def test_task_register_without_args(self, registered_jobs):
@task.register
def add(x, y):
return x + y
@task.register()
def subtract(x, y):
return x - y
add_funcstr = stringify_func(add)
subtract_funcstr = stringify_func(subtract)
assert isinstance(registered_jobs[add_funcstr], RegisteredJob)
assert isinstance(registered_jobs[subtract_funcstr], RegisteredJob)
def test_task_register_with_args(self, registered_jobs):
@task.register(
job_id="test", validator=id, permission=id, priority=task.priority.HIGH
)
def add(x, y):
return x + y
add_funcstr = stringify_func(add)
assert isinstance(registered_jobs[add_funcstr], RegisteredJob)
assert add.task.job_id == "test"
assert add.task.validator == id
assert add.task.permission == id
assert add.task.priority == task.priority.HIGH
def test_task_config_without_args(self, registered_jobs):
@task.config
@task.register
def add(x, y):
return x + y
@task.config()
@task.register
def subtract(x, y):
return x - y
assert add.task.group is None
assert add.task.track_progress is False
assert add.task.cancellable is False
assert subtract.task.group is None
assert subtract.task.track_progress is False
assert subtract.task.cancellable is False
def test_config_with_args(self, registered_jobs):
@task.config(group="math", cancellable=True, track_progress=True)
@task.register
def add(x, y):
return x + y
assert add.task.group == "math"
assert add.task.cancellable is True
assert add.task.track_progress is True
def test_task_config_without_register(self, registered_jobs):
with pytest.raises(FunctionNotRegisteredAsJob):
@task.config
def add(x, y):
return x + y
def test_task_register_config_preserves_functionality(self, registered_jobs):
@task.config
@task.register
def add(x, y):
return x + y
assert add(2, 40) == 42
| mit | Python | |
a6be0447e07d388f5dc4942d7f9e391366185c78 | Create solution.py | lilsweetcaligula/Online-Judges,lilsweetcaligula/Online-Judges,lilsweetcaligula/Online-Judges | leetcode/easy/count_and_say/py/solution.py | leetcode/easy/count_and_say/py/solution.py | class Solution(object):
def countAndSay(self, n):
"""
:type n: int
:rtype: str
"""
s = '1'
while n > 1:
t = ''
cnt = 1
for i in range(len(s)):
if i + 1 < len(s) and s[i] == s[i + 1]:
cnt += 1
else:
t += str(cnt) + s[i]
cnt = 1
s = t
n -= 1
return s
| mit | Python | |
ff187730fa1ebd64984dbb6e91a8f04edae84548 | Introduce module for CLI commands; implement data generating command | shudmi/ngx-task | ngx_task/cli.py | ngx_task/cli.py | import os
from concurrent.futures import ThreadPoolExecutor, as_completed
from ngx_task import settings, utils
def generate_data():
if not os.path.exists(settings.DATA_DIR):
os.mkdir(settings.DATA_DIR, 0o755)
files_to_submit = ['arc-{}.zip'.format(arc_num) for arc_num in range(1, 51)]
with ThreadPoolExecutor() as pool:
futures_to_process = {pool.submit(utils.archive_documents, filename): filename
for filename in files_to_submit}
for fut in as_completed(futures_to_process):
print('Complete {}'.format(futures_to_process[fut]))
print('All data has been generated')
| apache-2.0 | Python | |
4ccf9b6135ca5c6317502ffd663d5de4d180eea3 | Add migration for commit b52f572646f30a8a4f2fc2bec6fc31c8f498f33f | alexandrovteam/curatr,alexandrovteam/curatr,alexandrovteam/curatr,alexandrovteam/curatr,alexandrovteam/curatr | mcf_standard_browser/standards_review/migrations/0032_auto_20160307_1802.py | mcf_standard_browser/standards_review/migrations/0032_auto_20160307_1802.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.3 on 2016-03-07 18:02
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('standards_review', '0031_auto_20160209_1255'),
]
operations = [
migrations.AddField(
model_name='dataset',
name='intrument',
field=models.TextField(default=''),
),
migrations.AddField(
model_name='fragmentationspectrum',
name='collision_energy',
field=models.TextField(default=''),
),
migrations.AddField(
model_name='fragmentationspectrum',
name='ms1_intensity',
field=models.FloatField(default=0.0),
),
migrations.AddField(
model_name='xic',
name='collision',
field=models.TextField(default=''),
),
migrations.AlterField(
model_name='standard',
name='molecule',
field=models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE,
to='standards_review.Molecule'),
),
]
| apache-2.0 | Python | |
bc5105d7e8263bcaf0be8cc88bff8438fa1972a4 | add import script for ryedale | chris48s/UK-Polling-Stations,andylolz/UK-Polling-Stations,chris48s/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,andylolz/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,andylolz/UK-Polling-Stations | polling_stations/apps/data_collection/management/commands/import_ryedale.py | polling_stations/apps/data_collection/management/commands/import_ryedale.py | """
Imports Ryedale
"""
from django.contrib.gis.geos import Point, GEOSGeometry
from data_collection.management.commands import BaseKamlImporter
class Command(BaseKamlImporter):
"""
Imports the Polling Station data from Ryedale Council
"""
council_id = 'E07000167'
districts_name = 'Thirsk_and_Malton_Wards.kml'
stations_name = 'Polling Stations Malton and Thirsk 07 05 2015.csv'
def district_record_to_dict(self, record):
# this kml has no altitude co-ordinates so the data is ok as it stands
geojson = record.geom.geojson
# The SRID for the KML is 4326 but the CSV is 2770 so we
# set it each time we create the polygon.
# We could probably do with a more elegant way of doing
# this longer term.
self._srid = self.srid
self.srid = 4326
poly = self.clean_poly(GEOSGeometry(geojson, srid=self.srid))
self.srid = self._srid
return {
'internal_council_id': record['Name'].value,
'name' : record['Name'].value,
'area' : poly
}
def station_record_to_dict(self, record):
location = Point(float(record.easting), float(record.northin), srid=self.srid)
address_parts = record.address.split(' ')
address = ' '.join(address_parts[:-2])
return {
'internal_council_id': record.postcode, # no id supplied, so we'll use the postcode
'postcode': record.postcode,
'address': address,
'location': location
}
| bsd-3-clause | Python | |
d44bf960aa597e5a38fbc2f8f7dd18fbd704cf7c | add nova start vm to recovery | fs714/drcontroller | drcontroller/recovery/nova_start_vm.py | drcontroller/recovery/nova_start_vm.py | import novaclient.v1_1.client as novaclient
import ConfigParser
def start_vm(server_id):
cf=ConfigParser.ConfigParser()
cf.read("/home/eshufan/projects/drcontroller/drcontroller/conf/set.conf")
drc_ncred={}
drc_ncred['auth_url']= cf.get("drc","auth_url")
drc_ncred['username']= cf.get("drc","user")
drc_ncred['api_key'] = cf.get("drc","password")
drc_ncred['project_id']=cf.get("drc","tenant_name")
drc_nova = novaclient.Client(**drc_ncred)
drc_nova.servers.start(server_id)
| apache-2.0 | Python | |
cb0a5480a7c198a34069e4e65707c18f0ee6b7b9 | Add catalan_numbers.py (#4455) | TheAlgorithms/Python | dynamic_programming/catalan_numbers.py | dynamic_programming/catalan_numbers.py | """
Print all the Catalan numbers from 0 to n, n being the user input.
* The Catalan numbers are a sequence of positive integers that
* appear in many counting problems in combinatorics [1]. Such
* problems include counting [2]:
* - The number of Dyck words of length 2n
* - The number well-formed expressions with n pairs of parentheses
* (e.g., `()()` is valid but `())(` is not)
* - The number of different ways n + 1 factors can be completely
* parenthesized (e.g., for n = 2, C(n) = 2 and (ab)c and a(bc)
* are the two valid ways to parenthesize.
* - The number of full binary trees with n + 1 leaves
* A Catalan number satisfies the following recurrence relation
* which we will use in this algorithm [1].
* C(0) = C(1) = 1
* C(n) = sum(C(i).C(n-i-1)), from i = 0 to n-1
* In addition, the n-th Catalan number can be calculated using
* the closed form formula below [1]:
* C(n) = (1 / (n + 1)) * (2n choose n)
* Sources:
* [1] https://brilliant.org/wiki/catalan-numbers/
* [2] https://en.wikipedia.org/wiki/Catalan_number
"""
def catalan_numbers(upper_limit: int) -> "list[int]":
"""
Return a list of the Catalan number sequence from 0 through `upper_limit`.
>>> catalan_numbers(5)
[1, 1, 2, 5, 14, 42]
>>> catalan_numbers(2)
[1, 1, 2]
>>> catalan_numbers(-1)
Traceback (most recent call last):
ValueError: Limit for the Catalan sequence must be ≥ 0
"""
if upper_limit < 0:
raise ValueError("Limit for the Catalan sequence must be ≥ 0")
catalan_list = [0] * (upper_limit + 1)
# Base case: C(0) = C(1) = 1
catalan_list[0] = 1
if upper_limit > 0:
catalan_list[1] = 1
# Recurrence relation: C(i) = sum(C(j).C(i-j-1)), from j = 0 to i
for i in range(2, upper_limit + 1):
for j in range(i):
catalan_list[i] += catalan_list[j] * catalan_list[i - j - 1]
return catalan_list
if __name__ == "__main__":
print("\n********* Catalan Numbers Using Dynamic Programming ************\n")
print("\n*** Enter -1 at any time to quit ***")
print("\nEnter the upper limit (≥ 0) for the Catalan number sequence: ", end="")
try:
while True:
N = int(input().strip())
if N < 0:
print("\n********* Goodbye!! ************")
break
else:
print(f"The Catalan numbers from 0 through {N} are:")
print(catalan_numbers(N))
print("Try another upper limit for the sequence: ", end="")
except (NameError, ValueError):
print("\n********* Invalid input, goodbye! ************\n")
import doctest
doctest.testmod()
| mit | Python | |
6d03266160ce95a41b94561d707e399df78aae14 | Add sanity test case comm_wifi_connect | ostroproject/meta-iotqa,wanghongjuan/meta-iotqa-1,daweiwu/meta-iotqa-1,daweiwu/meta-iotqa-1,ostroproject/meta-iotqa,daweiwu/meta-iotqa-1,daweiwu/meta-iotqa-1,wanghongjuan/meta-iotqa-1,wanghongjuan/meta-iotqa-1,ostroproject/meta-iotqa,ostroproject/meta-iotqa,wanghongjuan/meta-iotqa-1,wanghongjuan/meta-iotqa-1,daweiwu/meta-iotqa-1,ostroproject/meta-iotqa | lib/oeqa/runtime/sanity/comm_wifi_connect.py | lib/oeqa/runtime/sanity/comm_wifi_connect.py | import time
from oeqa.oetest import oeRuntimeTest
class CommWiFiTest(oeRuntimeTest):
'''WiFi test by connmanctl'''
def test_wifi_connect_nopassword(self):
'''connmanctl to connect a no-password wifi AP'''
# un-block software rfkill lock
self.target.run('rfkill unblock all')
# Enable WiFi
(status, output) = self.target.run('connmanctl enable wifi')
self.assertEqual(status, 0, msg="Error messages: %s" % output)
# Scan nearby to get service of Guest
(status, output) = self.target.run('connmanctl scan wifi')
self.assertEqual(status, 0, msg="Error messages: %s" % output)
(status, services) = self.target.run("connmanctl services | grep Guest | awk '{print $NF}'")
self.assertEqual(status, 0, msg="Not found AP service for Guest")
# Do connection
(status, output) = self.target.run('connmanctl disconnect %s' % services)
time.sleep(2)
i = 0
(status, output) = self.target.run('connmanctl connect %s' % services)
while ( 'Connected ' not in output ):
(status, output) = self.target.run('connmanctl connect %s' % services)
i = i + 1
if (i == 4):
break
self.assertEqual(status, 0, msg="Error messages: %s" % output)
time.sleep(15)
# Check ip address by ifconfig command
(status, output) = self.target.run("ifconfig wlp2s0 | grep 'inet addr:'")
self.assertEqual(status, 0, msg="Error messages: %s" % output)
| mit | Python | |
2cc6edec8295a216261fff09388a35e0805f474c | Add test to validate service names | pplu/botocore,boto/botocore | tests/functional/test_service_names.py | tests/functional/test_service_names.py | # Copyright 2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import re
from nose.tools import assert_true
from botocore.session import get_session
BLACKLIST = [
'runtime.sagemaker',
]
# Service names are limited here to 50 characters here as that seems like a
# reasonable limit in the general case. Services can be added to the
# blacklist above to be given an exception.
VALID_NAME_REGEX = re.compile(
(
'[a-z]' # Starts with a letter
'[a-z0-9]*' # Followed by any number of letters or digits
'(-[a-z0-9]+)*$' # Dashes are allowed as long as they aren't
# consecutive or at the end
), re.M)
VALID_NAME_EXPLANATION = (
'Service names must be made up entirely of lowercase alphanumeric '
'characters and dashes. The name must start with a letter and may not end '
'with a dash'
)
MIN_SERVICE_NAME_LENGTH = 2
MAX_SERVICE_NAME_LENGTH = 50
def _assert_name_length(service_name):
if service_name not in BLACKLIST:
service_name_length = len(service_name)
assert_true(service_name_length >= MIN_SERVICE_NAME_LENGTH,
'Service name must be greater than or equal to 2 '
'characters in length.')
assert_true(service_name_length <= MAX_SERVICE_NAME_LENGTH,
'Service name must be less than or equal to 50 '
'characters in length.')
def _assert_name_pattern(service_name):
if service_name not in BLACKLIST:
valid = VALID_NAME_REGEX.match(service_name) is not None
assert_true(valid, VALID_NAME_EXPLANATION)
def test_service_names_are_valid():
session = get_session()
loader = session.get_component('data_loader')
service_names = loader.list_available_services('service-2')
for service_name in service_names:
yield _assert_name_length, service_name
yield _assert_name_pattern, service_name
| apache-2.0 | Python | |
b63986d2ce2f7ac24cd78c7b5971878d9b1a841a | Add remaining integration tests | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | tests/integration/modules/mac_ports.py | tests/integration/modules/mac_ports.py | # -*- coding: utf-8 -*-
'''
integration tests for mac_ports
'''
# Import python libs
from __future__ import absolute_import, print_function
# Import Salt Testing libs
from salttesting.helpers import ensure_in_syspath, destructiveTest
ensure_in_syspath('../../')
# Import salt libs
import integration
import salt.utils
class MacPortsModuleTest(integration.ModuleCase):
'''
Validate the mac_ports module
'''
def setUp(self):
'''
Get current settings
'''
if not salt.utils.is_darwin():
self.skipTest('Test only available on Mac OS X')
if not salt.utils.which('port'):
self.skipTest('Test requires port binary')
if salt.utils.get_uid(salt.utils.get_user()) != 0:
self.skipTest('Test requires root')
def tearDown(self):
'''
Reset to original settings
'''
self.run_function('pkg.remove', [''], pkgs='["agree","cowsay","chef"]')
def test_list_pkgs(self):
'''
Test pkg.list_pkgs
'''
self.run_function('pkg.install', [''], pkgs='["agree","cowsay","chef"]')
self.assertIsInstance(self.run_function('pkg.list_pkgs'), dict)
self.assertIn('agree', self.run_function('pkg.list_pkgs'))
def test_latest_version(self):
'''
Test pkg.latest_version
'''
self.run_function('pkg.install', [''], pkgs='["agree","cowsay","chef"]')
self.assertIsInstance(
self.run_function('pkg.latest_version', ['agree']), dict)
self.assertIn(
'agree',
self.run_function('pkg.latest_version', ['agree']))
def test_remove(self):
'''
Test pkg.remove
'''
self.run_function('pkg.install', [''], pkgs='["agree","cowsay","chef"]')
removed = self.run_function('pkg.remove', ['agree'])
self.assertIsInstance(removed, dict)
self.assertIn('agree', removed)
def test_install(self):
'''
Test pkg.install
'''
self.run_function('pkg.remove', [''], pkgs='["agree","cowsay","chef"]')
installed = self.run_function('pkg.install', ['agree'])
self.assertIsInstance(installed, dict)
self.assertIn('agree', installed)
def test_list_upgrades(self):
'''
Test pkg.list_upgrades
'''
self.assertIsInstance(self.run_function('pkg.list_upgrades'), dict)
def test_upgrade_available(self):
'''
Test pkg.upgrade_available
'''
self.run_function('pkg.install', ['agree'])
self.assertFalse(self.run_function('pkg.upgrade_available', ['agree']))
def test_refresh_db(self):
'''
Test pkg.refresh_db
'''
self.assertTrue(self.run_function('pkg.refresh_db'))
def test_upgrade(self):
'''
Test pkg.upgrade
'''
results = self.run_function('pkg.upgrade')
self.assertIsInstance(results, dict)
self.assertTrue(results['result'])
if __name__ == '__main__':
from integration import run_tests
run_tests(MacPortsModuleTest)
| apache-2.0 | Python | |
6ad9b8e65562c00607fe0fe9f92cdba3c022ef2b | Add initial version Teach First Oauth2 backend. | proversity-org/edx-platform,proversity-org/edx-platform,proversity-org/edx-platform,proversity-org/edx-platform | lms/djangoapps/student_account/teachfirst.py | lms/djangoapps/student_account/teachfirst.py | from django.conf import settings
from social_core.backends.oauth import BaseOAuth2
import logging
log = logging.getLogger(__name__)
class TeachFirstOAuth2(BaseOAuth2):
"""TeachFirst OAuth2 authentication backend."""
settings_dict = settings.CUSTOM_BACKENDS.get('teachfirst')
name = 'teachfirst-oauth2'
REDIRECT_STATE = False
STATE_PARAMETER = False
AUTHORIZATION_URL = settings_dict.get('AUTH_URL')
ACCESS_TOKEN_URL = settings_dict.get('ACCESS_TOKEN_URL')
USER_DATA_URL = settings_dict.get('USER_DATA_URL')
ACCESS_TOKEN_METHOD = 'POST'
def auth_complete(self, *args, **kwargs):
"""Completes login process, must return user instance"""
self.process_error(self.data)
state = self.validate_state()
response = self.request_access_token(
self.access_token_url(),
data=self.auth_complete_params(state),
headers=self.auth_headers(),
auth=self.auth_complete_credentials(),
method=self.ACCESS_TOKEN_METHOD
)
self.process_error(response)
return self.do_auth(response['access_token'], response=response,
*args, **kwargs)
def auth_complete_params(self, state=None):
client_id, client_secret = self.get_key_and_secret()
return {
'state': state,
'grant_type': 'authorization_code',
'code': self.data.get('code', ''), # server response code
'client_id': client_id,
'client_secret': client_secret,
'redirect_uri': self.get_redirect_uri(state)
}
def get_user_details(self, response):
return {'username': response.get('username'),
'email': response.get('mail'),
'fullname': response.get('fullname')}
def user_data(self, access_token, *args, **kwargs):
response = self.get_json(self.USER_DATA_URL, headers={
'Authorization': 'Bearer {}'.format(access_token)
})
return response[0]
def get_user_id(self, details, response):
return details.get('email')
| agpl-3.0 | Python | |
372469139dc103f75003f96616ac53bce8986274 | Add prediction module | ef-ctx/righter,ef-ctx/righter,ef-ctx/righter,ef-ctx/righter | src/righter/predict.py | src/righter/predict.py | import json
import argparse
import righter
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input-file', help='File with one json per line containing the key text', required=True)
parser.add_argument('-o', '--file-output', help='Save analysis to output file', required=True)
args = parser.parse_args()
with open(args.input_file, 'r') as input_fp:
with open(args.file_output, 'w') as output_fp:
for line in input_fp:
writing = json.loads(line.strip())
writing['changes'] = righter.check(writing['text'])
print(json.dumps(writing), file=output_fp)
| apache-2.0 | Python | |
15daff9a7823ddd7dbc3fb6f141d539d6b636301 | Add description field to Config | Xicnet/radioflow-scheduler,Xicnet/radioflow-scheduler,Xicnet/radioflow-scheduler,Xicnet/radioflow-scheduler,Xicnet/radioflow-scheduler | project/timeslot/migrations/0008_auto_20160622_0937.py | project/timeslot/migrations/0008_auto_20160622_0937.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('timeslot', '0007_auto_20160616_0049'),
]
operations = [
migrations.AlterField(
model_name='config',
name='description',
field=models.TextField(max_length=4000, null=True, blank=True),
),
]
| agpl-3.0 | Python | |
757812e63c42f38ad065c31744427c5902a5d322 | Move some Utils outside of the main script. | BrakeValve/dataflow,BrakeValve/brake-valve-server,BrakeValve/brake-valve-server,BrakeValve/dataflow | data-preprocessor/listFile.py | data-preprocessor/listFile.py | # -*- coding: utf-8 -*-
"""
Created on Sun Sep 11 10:23:38 2016
@author: SISQUAKE
"""
import os
def listFilePath(path):
File = [];
Dir = [];
for (dirpath, dirnames, filenames) in os.walk(path):
for name in filenames:
tmp = os.path.join(dirpath, name);
File.append({'path':tmp,'name':name});
for name in dirnames:
tmp = os.path.join(dirpath, name);
Dir.append({'path':tmp,'name':name});
break
return {'file' : File , 'dir' : Dir}; | mit | Python | |
1bf55df37a12af7984f08eb429e61a2c3a7cd836 | Add initial server states | will-hart/blitz,will-hart/blitz | blitz/io/server_states.py | blitz/io/server_states.py | __author__ = 'Will Hart'
from blitz.constants import *
from blitz.io.client_states import BaseState
def validate_command(tcp, msg, commands):
"""
Helper function which checks to see if a message is in the list of valid commands
and sends an appropriate response over the TCP network
"""
if msg.split(' ')[0] not in commands:
tcp.send("ERROR 2")
else:
tcp.send("ERROR 1")
class ServerIdleState(BaseState):
def enter_state(self, tcp, state):
print "Calling ServerIdleState.enter_state: " + state.__name__
tcp.send("READY")
return self
def process_message(self, tcp, msg):
"""
Handle the various requests from the client including to start and stop logging
"""
print "Calling ServerIdleState.process_message: " + msg
# check if it is a command which causes a change of state
if msg == "START":
tcp.send("ACK")
return self.go_to_state(tcp, ServerLoggingState)
elif msg[0:8] == "DOWNLOAD":
return self.go_to_state(tcp, ServerDownloadingState)
if msg == "STOP" or msg == "STATUS":
# huh? We are not logging!?
tcp.send("NOSESSION")
else:
validate_command(tcp, msg, VALID_SERVER_COMMANDS)
return self
class ServerLoggingState(BaseState):
def enter_state(self, tcp, state):
print "Calling ServerLoggingState.enter_state: " + state.__name__
# TODO raise signal to start logging
print "[SIGNAL] Start logging"
return self
def process_message(self, tcp, msg):
print "Calling ServerLoggingState.process_message: " + msg
if msg == "STOP":
# TODO raise signal to stop logging
print "[SIGNAL] Stop logging"
tcp.send("ACK")
return self.go_to_state(tcp, ServerIdleState)
if msg == "STATUS":
# TODO raise signal to send status
print "[SIGNAL] send status"
else:
validate_command(tcp, msg, VALID_SERVER_COMMANDS)
return self
class ServerDownloadingState(BaseState):
def download_complete(self, tcp):
print "Calling ServerLoggingState.download_complete"
return self.go_to_state(tcp, ServerIdleState)
def process_message(self, tcp, msg):
validate_command(tcp, msg, VALID_SERVER_COMMANDS)
return self
class ServerClosedState(BaseState):
def process_message(self, tcp, msg):
print "Calling ServerClosedState.process_message" + msg
raise Exception("Attempted to receive message on closed server" + msg)
def send_message(self, tcp, msg):
print "Calling ServerClosedState.send_message" + msg
raise Exception("Attempted to send message on closed server" + msg)
| agpl-3.0 | Python | |
8e8cb549251b6914a34a729bb06c02462ed95af9 | convert old wizard into osv memory wizard for configuration | waytai/odoo,tangyiyong/odoo,nexiles/odoo,papouso/odoo,nagyistoce/odoo-dev-odoo,aviciimaxwell/odoo,JCA-Developpement/Odoo,VitalPet/odoo,rgeleta/odoo,bobisme/odoo,tvtsoft/odoo8,doomsterinc/odoo,wangjun/odoo,papouso/odoo,massot/odoo,nuncjo/odoo,leoliujie/odoo,CatsAndDogsbvba/odoo,apanju/GMIO_Odoo,acshan/odoo,fuhongliang/odoo,cdrooom/odoo,VielSoft/odoo,GauravSahu/odoo,jpshort/odoo,dllsf/odootest,datenbetrieb/odoo,nhomar/odoo-mirror,Noviat/odoo,Bachaco-ve/odoo,cedk/odoo,camptocamp/ngo-addons-backport,oasiswork/odoo,shingonoide/odoo,fdvarela/odoo8,jeasoft/odoo,fossoult/odoo,thanhacun/odoo,osvalr/odoo,gdgellatly/OCB1,inspyration/odoo,acshan/odoo,goliveirab/odoo,fuselock/odoo,Danisan/odoo-1,OpenUpgrade/OpenUpgrade,naousse/odoo,savoirfairelinux/OpenUpgrade,mmbtba/odoo,Ernesto99/odoo,OSSESAC/odoopubarquiluz,hanicker/odoo,alhashash/odoo,hoatle/odoo,acshan/odoo,odoo-turkiye/odoo,SerpentCS/odoo,optima-ict/odoo,optima-ict/odoo,ApuliaSoftware/odoo,NL66278/OCB,RafaelTorrealba/odoo,Ichag/odoo,ApuliaSoftware/odoo,tinkhaven-organization/odoo,JonathanStein/odoo,oliverhr/odoo,datenbetrieb/odoo,arthru/OpenUpgrade,nitinitprof/odoo,florentx/OpenUpgrade,odoo-turkiye/odoo,NL66278/OCB,BT-fgarbely/odoo,javierTerry/odoo,doomsterinc/odoo,fevxie/odoo,nuuuboo/odoo,apanju/GMIO_Odoo,lsinfo/odoo,bkirui/odoo,omprakasha/odoo,bobisme/odoo,sve-odoo/odoo,havt/odoo,rowemoore/odoo,dgzurita/odoo,mvaled/OpenUpgrade,bkirui/odoo,oihane/odoo,abstract-open-solutions/OCB,ApuliaSoftware/odoo,oliverhr/odoo,grap/OCB,ShineFan/odoo,credativUK/OCB,juanalfonsopr/odoo,kittiu/odoo,fgesora/odoo,ccomb/OpenUpgrade,frouty/odoogoeen,florian-dacosta/OpenUpgrade,chiragjogi/odoo,OpenPymeMx/OCB,collex100/odoo,colinnewell/odoo,Bachaco-ve/odoo,pedrobaeza/odoo,shaufi10/odoo,sadleader/odoo,ChanduERP/odoo,makinacorpus/odoo,jeasoft/odoo,Drooids/odoo,guerrerocarlos/odoo,Nick-OpusVL/odoo,nhomar/odoo-mirror,sebalix/OpenUpgrade,javierTerry/odoo,lgscofield/odoo,cpyou/odoo,spadae22/odoo,tinkerthaler/odoo,nuncjo/odoo,juanalfonsopr/odoo,NL66278/OCB,vrenaville/ngo-addons-backport,cdrooom/odoo,erkrishna9/odoo,MarcosCommunity/odoo,Grirrane/odoo,oliverhr/odoo,steedos/odoo,florentx/OpenUpgrade,hopeall/odoo,mlaitinen/odoo,patmcb/odoo,Nowheresly/odoo,bobisme/odoo,javierTerry/odoo,dgzurita/odoo,SerpentCS/odoo,tinkhaven-organization/odoo,Maspear/odoo,x111ong/odoo,BT-astauder/odoo,nuncjo/odoo,Maspear/odoo,christophlsa/odoo,agrista/odoo-saas,odoousers2014/odoo,mustafat/odoo-1,mkieszek/odoo,florentx/OpenUpgrade,collex100/odoo,glovebx/odoo,wangjun/odoo,feroda/odoo,Daniel-CA/odoo,numerigraphe/odoo,draugiskisprendimai/odoo,Elico-Corp/odoo_OCB,mmbtba/odoo,pedrobaeza/odoo,andreparames/odoo,hip-odoo/odoo,nuncjo/odoo,Endika/OpenUpgrade,mszewczy/odoo,florentx/OpenUpgrade,tvtsoft/odoo8,fdvarela/odoo8,gsmartway/odoo,joariasl/odoo,VitalPet/odoo,Elico-Corp/odoo_OCB,avoinsystems/odoo,fuhongliang/odoo,rschnapka/odoo,srimai/odoo,ovnicraft/odoo,rowemoore/odoo,colinnewell/odoo,arthru/OpenUpgrade,shivam1111/odoo,ramadhane/odoo,Danisan/odoo-1,matrixise/odoo,ApuliaSoftware/odoo,jolevq/odoopub,ygol/odoo,zchking/odoo,tinkhaven-organization/odoo,podemos-info/odoo,0k/odoo,JGarcia-Panach/odoo,colinnewell/odoo,shaufi/odoo,demon-ru/iml-crm,draugiskisprendimai/odoo,savoirfairelinux/odoo,pplatek/odoo,ShineFan/odoo,bakhtout/odoo-educ,hifly/OpenUpgrade,mlaitinen/odoo,tvtsoft/odoo8,NL66278/OCB,hopeall/odoo,nagyistoce/odoo-dev-odoo,fgesora/odoo,Maspear/odoo,Kilhog/odoo,shaufi/odoo,Endika/odoo,Antiun/odoo,NeovaHealth/odoo,alexcuellar/odoo,rahuldhote/odoo,ygol/odoo,0k/OpenUpgrade,dalegregory/odoo,oihane/odoo,hbrunn/OpenUpgrade,storm-computers/odoo,dalegregory/odoo,oliverhr/odoo,dezynetechnologies/odoo,ClearCorp-dev/odoo,thanhacun/odoo,jiangzhixiao/odoo,x111ong/odoo,jaxkodex/odoo,gorjuce/odoo,leorochael/odoo,fdvarela/odoo8,SAM-IT-SA/odoo,jaxkodex/odoo,bobisme/odoo,oliverhr/odoo,lsinfo/odoo,odooindia/odoo,Daniel-CA/odoo,waytai/odoo,rschnapka/odoo,poljeff/odoo,omprakasha/odoo,charbeljc/OCB,rgeleta/odoo,fjbatresv/odoo,goliveirab/odoo,mustafat/odoo-1,jusdng/odoo,jpshort/odoo,BT-astauder/odoo,jolevq/odoopub,hubsaysnuaa/odoo,nuuuboo/odoo,Daniel-CA/odoo,CatsAndDogsbvba/odoo,cedk/odoo,waytai/odoo,xujb/odoo,dgzurita/odoo,rdeheele/odoo,realsaiko/odoo,mvaled/OpenUpgrade,idncom/odoo,bobisme/odoo,dezynetechnologies/odoo,jaxkodex/odoo,prospwro/odoo,microcom/odoo,charbeljc/OCB,ubic135/odoo-design,fuhongliang/odoo,jiangzhixiao/odoo,nitinitprof/odoo,ThinkOpen-Solutions/odoo,lombritz/odoo,shaufi10/odoo,joariasl/odoo,spadae22/odoo,hoatle/odoo,camptocamp/ngo-addons-backport,ApuliaSoftware/odoo,florentx/OpenUpgrade,rschnapka/odoo,ChanduERP/odoo,mvaled/OpenUpgrade,havt/odoo,gavin-feng/odoo,kittiu/odoo,podemos-info/odoo,andreparames/odoo,gavin-feng/odoo,odoousers2014/odoo,agrista/odoo-saas,hoatle/odoo,stonegithubs/odoo,hmen89/odoo,hifly/OpenUpgrade,abdellatifkarroum/odoo,Ichag/odoo,Bachaco-ve/odoo,n0m4dz/odoo,juanalfonsopr/odoo,xzYue/odoo,Endika/odoo,gsmartway/odoo,kifcaliph/odoo,hifly/OpenUpgrade,florentx/OpenUpgrade,dezynetechnologies/odoo,sve-odoo/odoo,javierTerry/odoo,erkrishna9/odoo,shivam1111/odoo,OpenUpgrade-dev/OpenUpgrade,joariasl/odoo,Gitlab11/odoo,rgeleta/odoo,grap/OCB,alexteodor/odoo,mszewczy/odoo,provaleks/o8,inspyration/odoo,tarzan0820/odoo,Endika/odoo,sinbazhou/odoo,blaggacao/OpenUpgrade,ehirt/odoo,ecosoft-odoo/odoo,xzYue/odoo,omprakasha/odoo,cedk/odoo,addition-it-solutions/project-all,frouty/odoo_oph,CubicERP/odoo,gsmartway/odoo,Nowheresly/odoo,slevenhagen/odoo-npg,tinkerthaler/odoo,Gitlab11/odoo,tinkhaven-organization/odoo,gorjuce/odoo,tinkerthaler/odoo,numerigraphe/odoo,fuhongliang/odoo,JCA-Developpement/Odoo,csrocha/OpenUpgrade,addition-it-solutions/project-all,jusdng/odoo,kybriainfotech/iSocioCRM,nitinitprof/odoo,collex100/odoo,GauravSahu/odoo,kybriainfotech/iSocioCRM,numerigraphe/odoo,prospwro/odoo,realsaiko/odoo,guewen/OpenUpgrade,bwrsandman/OpenUpgrade,CatsAndDogsbvba/odoo,MarcosCommunity/odoo,gvb/odoo,hassoon3/odoo,datenbetrieb/odoo,BT-fgarbely/odoo,aviciimaxwell/odoo,alexcuellar/odoo,cloud9UG/odoo,Nick-OpusVL/odoo,dariemp/odoo,windedge/odoo,alqfahad/odoo,lgscofield/odoo,grap/OpenUpgrade,virgree/odoo,synconics/odoo,eino-makitalo/odoo,draugiskisprendimai/odoo,Nowheresly/odoo,fossoult/odoo,salaria/odoo,Maspear/odoo,mustafat/odoo-1,shaufi10/odoo,rschnapka/odoo,jaxkodex/odoo,eino-makitalo/odoo,RafaelTorrealba/odoo,sergio-incaser/odoo,Gitlab11/odoo,odoo-turkiye/odoo,CubicERP/odoo,Ichag/odoo,fevxie/odoo,bplancher/odoo,BT-fgarbely/odoo,slevenhagen/odoo,ApuliaSoftware/odoo,storm-computers/odoo,sysadminmatmoz/OCB,optima-ict/odoo,fossoult/odoo,xzYue/odoo,grap/OCB,nagyistoce/odoo-dev-odoo,draugiskisprendimai/odoo,chiragjogi/odoo,microcom/odoo,fdvarela/odoo8,mlaitinen/odoo,colinnewell/odoo,OpenUpgrade/OpenUpgrade,leorochael/odoo,jiangzhixiao/odoo,mkieszek/odoo,rubencabrera/odoo,Grirrane/odoo,janocat/odoo,NeovaHealth/odoo,janocat/odoo,virgree/odoo,christophlsa/odoo,synconics/odoo,gorjuce/odoo,luistorresm/odoo,bakhtout/odoo-educ,TRESCLOUD/odoopub,bguillot/OpenUpgrade,VielSoft/odoo,Nick-OpusVL/odoo,Codefans-fan/odoo,gdgellatly/OCB1,Codefans-fan/odoo,Elico-Corp/odoo_OCB,nuuuboo/odoo,hopeall/odoo,sve-odoo/odoo,gdgellatly/OCB1,provaleks/o8,pedrobaeza/OpenUpgrade,ojengwa/odoo,PongPi/isl-odoo,slevenhagen/odoo,lgscofield/odoo,bealdav/OpenUpgrade,Adel-Magebinary/odoo,luistorresm/odoo,ubic135/odoo-design,fuhongliang/odoo,klunwebale/odoo,lombritz/odoo,kirca/OpenUpgrade,OpenUpgrade-dev/OpenUpgrade,datenbetrieb/odoo,shivam1111/odoo,JCA-Developpement/Odoo,frouty/odoogoeen,hubsaysnuaa/odoo,dariemp/odoo,salaria/odoo,OpenPymeMx/OCB,funkring/fdoo,lombritz/odoo,abenzbiria/clients_odoo,minhtuancn/odoo,ehirt/odoo,colinnewell/odoo,zchking/odoo,goliveirab/odoo,OpenUpgrade/OpenUpgrade,cedk/odoo,nagyistoce/odoo-dev-odoo,kittiu/odoo,hopeall/odoo,hanicker/odoo,christophlsa/odoo,pplatek/odoo,ujjwalwahi/odoo,ojengwa/odoo,apanju/odoo,dsfsdgsbngfggb/odoo,bwrsandman/OpenUpgrade,klunwebale/odoo,nuncjo/odoo,hanicker/odoo,gorjuce/odoo,nhomar/odoo,srsman/odoo,fevxie/odoo,nhomar/odoo-mirror,microcom/odoo,nexiles/odoo,naousse/odoo,guerrerocarlos/odoo,BT-ojossen/odoo,apanju/GMIO_Odoo,gvb/odoo,diagramsoftware/odoo,odoousers2014/odoo,SerpentCS/odoo,aviciimaxwell/odoo,RafaelTorrealba/odoo,srsman/odoo,frouty/odoo_oph,sinbazhou/odoo,bguillot/OpenUpgrade,lightcn/odoo,bobisme/odoo,rowemoore/odoo,ClearCorp-dev/odoo,provaleks/o8,diagramsoftware/odoo,lightcn/odoo,florian-dacosta/OpenUpgrade,brijeshkesariya/odoo,chiragjogi/odoo,addition-it-solutions/project-all,glovebx/odoo,sv-dev1/odoo,tangyiyong/odoo,oasiswork/odoo,stonegithubs/odoo,feroda/odoo,MarcosCommunity/odoo,vrenaville/ngo-addons-backport,Danisan/odoo-1,tvibliani/odoo,deKupini/erp,bplancher/odoo,frouty/odoogoeen,dllsf/odootest,demon-ru/iml-crm,incaser/odoo-odoo,glovebx/odoo,csrocha/OpenUpgrade,goliveirab/odoo,lgscofield/odoo,brijeshkesariya/odoo,apanju/odoo,OpenPymeMx/OCB,rahuldhote/odoo,havt/odoo,virgree/odoo,dezynetechnologies/odoo,jolevq/odoopub,0k/OpenUpgrade,alexcuellar/odoo,camptocamp/ngo-addons-backport,incaser/odoo-odoo,n0m4dz/odoo,FlorianLudwig/odoo,guerrerocarlos/odoo,bealdav/OpenUpgrade,ehirt/odoo,bkirui/odoo,ihsanudin/odoo,OpenUpgrade/OpenUpgrade,fuselock/odoo,OpenUpgrade/OpenUpgrade,osvalr/odoo,savoirfairelinux/OpenUpgrade,takis/odoo,hmen89/odoo,tarzan0820/odoo,OpusVL/odoo,cloud9UG/odoo,cysnake4713/odoo,patmcb/odoo,brijeshkesariya/odoo,storm-computers/odoo,odootr/odoo,ovnicraft/odoo,erkrishna9/odoo,CopeX/odoo,CatsAndDogsbvba/odoo,bwrsandman/OpenUpgrade,ccomb/OpenUpgrade,nitinitprof/odoo,kifcaliph/odoo,xujb/odoo,srimai/odoo,FlorianLudwig/odoo,sebalix/OpenUpgrade,jiachenning/odoo,codekaki/odoo,nexiles/odoo,odoo-turkiye/odoo,poljeff/odoo,bguillot/OpenUpgrade,highco-groupe/odoo,PongPi/isl-odoo,jeasoft/odoo,glovebx/odoo,grap/OCB,fevxie/odoo,cloud9UG/odoo,frouty/odoo_oph,GauravSahu/odoo,pedrobaeza/odoo,tvibliani/odoo,dkubiak789/odoo,0k/odoo,dsfsdgsbngfggb/odoo,Endika/odoo,slevenhagen/odoo,ramadhane/odoo,podemos-info/odoo,BT-rmartin/odoo,glovebx/odoo,ShineFan/odoo,grap/OpenUpgrade,ChanduERP/odoo,Ichag/odoo,srimai/odoo,bakhtout/odoo-educ,nitinitprof/odoo,sv-dev1/odoo,SerpentCS/odoo,incaser/odoo-odoo,apanju/odoo,srimai/odoo,KontorConsulting/odoo,joariasl/odoo,Noviat/odoo,elmerdpadilla/iv,OpenPymeMx/OCB,apocalypsebg/odoo,nitinitprof/odoo,n0m4dz/odoo,hip-odoo/odoo,damdam-s/OpenUpgrade,jiachenning/odoo,dezynetechnologies/odoo,bealdav/OpenUpgrade,gsmartway/odoo,Codefans-fan/odoo,arthru/OpenUpgrade,jaxkodex/odoo,hopeall/odoo,hubsaysnuaa/odoo,fgesora/odoo,leoliujie/odoo,ccomb/OpenUpgrade,frouty/odoogoeen,odoo-turkiye/odoo,OpusVL/odoo,bakhtout/odoo-educ,shingonoide/odoo,makinacorpus/odoo,simongoffin/website_version,papouso/odoo,OpenUpgrade-dev/OpenUpgrade,laslabs/odoo,vnsofthe/odoo,apanju/odoo,hoatle/odoo,Drooids/odoo,pedrobaeza/odoo,leoliujie/odoo,BT-rmartin/odoo,tinkerthaler/odoo,shingonoide/odoo,datenbetrieb/odoo,bakhtout/odoo-educ,PongPi/isl-odoo,chiragjogi/odoo,kybriainfotech/iSocioCRM,savoirfairelinux/odoo,omprakasha/odoo,pplatek/odoo,damdam-s/OpenUpgrade,ingadhoc/odoo,jiachenning/odoo,cpyou/odoo,ihsanudin/odoo,factorlibre/OCB,markeTIC/OCB,papouso/odoo,frouty/odoogoeen,sinbazhou/odoo,doomsterinc/odoo,florian-dacosta/OpenUpgrade,x111ong/odoo,synconics/odoo,gsmartway/odoo,numerigraphe/odoo,naousse/odoo,mvaled/OpenUpgrade,hassoon3/odoo,FlorianLudwig/odoo,jpshort/odoo,nuuuboo/odoo,sinbazhou/odoo,makinacorpus/odoo,guewen/OpenUpgrade,goliveirab/odoo,Noviat/odoo,srimai/odoo,odooindia/odoo,aviciimaxwell/odoo,abdellatifkarroum/odoo,syci/OCB,christophlsa/odoo,blaggacao/OpenUpgrade,hoatle/odoo,Kilhog/odoo,jiachenning/odoo,bguillot/OpenUpgrade,BT-astauder/odoo,ojengwa/odoo,NeovaHealth/odoo,sinbazhou/odoo,nitinitprof/odoo,shingonoide/odoo,juanalfonsopr/odoo,fjbatresv/odoo,matrixise/odoo,leorochael/odoo,janocat/odoo,wangjun/odoo,kirca/OpenUpgrade,PongPi/isl-odoo,VielSoft/odoo,codekaki/odoo,avoinsystems/odoo,prospwro/odoo,ThinkOpen-Solutions/odoo,shingonoide/odoo,hassoon3/odoo,cloud9UG/odoo,lightcn/odoo,colinnewell/odoo,charbeljc/OCB,ingadhoc/odoo,nuuuboo/odoo,shingonoide/odoo,avoinsystems/odoo,nexiles/odoo,factorlibre/OCB,prospwro/odoo,abdellatifkarroum/odoo,VitalPet/odoo,takis/odoo,jesramirez/odoo,leorochael/odoo,synconics/odoo,mszewczy/odoo,minhtuancn/odoo,BT-fgarbely/odoo,grap/OpenUpgrade,alexteodor/odoo,Eric-Zhong/odoo,srsman/odoo,ramadhane/odoo,addition-it-solutions/project-all,cpyou/odoo,alhashash/odoo,fjbatresv/odoo,Bachaco-ve/odoo,simongoffin/website_version,OpusVL/odoo,osvalr/odoo,salaria/odoo,stephen144/odoo,jesramirez/odoo,brijeshkesariya/odoo,grap/OCB,JGarcia-Panach/odoo,QianBIG/odoo,thanhacun/odoo,omprakasha/odoo,QianBIG/odoo,pedrobaeza/odoo,QianBIG/odoo,idncom/odoo,jiachenning/odoo,PongPi/isl-odoo,virgree/odoo,jpshort/odoo,sergio-incaser/odoo,Antiun/odoo,Elico-Corp/odoo_OCB,ThinkOpen-Solutions/odoo,markeTIC/OCB,BT-ojossen/odoo,mszewczy/odoo,vnsofthe/odoo,savoirfairelinux/OpenUpgrade,gdgellatly/OCB1,syci/OCB,Eric-Zhong/odoo,nagyistoce/odoo-dev-odoo,VitalPet/odoo,ecosoft-odoo/odoo,patmcb/odoo,glovebx/odoo,SAM-IT-SA/odoo,datenbetrieb/odoo,Adel-Magebinary/odoo,Endika/odoo,poljeff/odoo,simongoffin/website_version,tvibliani/odoo,Drooids/odoo,tinkerthaler/odoo,sv-dev1/odoo,ramitalat/odoo,Codefans-fan/odoo,Bachaco-ve/odoo,slevenhagen/odoo-npg,jfpla/odoo,Endika/OpenUpgrade,odootr/odoo,Antiun/odoo,SerpentCS/odoo,fjbatresv/odoo,abdellatifkarroum/odoo,Nowheresly/odoo,Gitlab11/odoo,KontorConsulting/odoo,oliverhr/odoo,cloud9UG/odoo,hassoon3/odoo,OSSESAC/odoopubarquiluz,MarcosCommunity/odoo,codekaki/odoo,OSSESAC/odoopubarquiluz,lsinfo/odoo,mszewczy/odoo,odootr/odoo,rgeleta/odoo,salaria/odoo,Ernesto99/odoo,damdam-s/OpenUpgrade,diagramsoftware/odoo,jiangzhixiao/odoo,JCA-Developpement/Odoo,ClearCorp-dev/odoo,slevenhagen/odoo-npg,vrenaville/ngo-addons-backport,windedge/odoo,hip-odoo/odoo,jolevq/odoopub,Adel-Magebinary/odoo,alhashash/odoo,Ichag/odoo,minhtuancn/odoo,rubencabrera/odoo,waytai/odoo,abdellatifkarroum/odoo,fuhongliang/odoo,OpenUpgrade-dev/OpenUpgrade,diagramsoftware/odoo,microcom/odoo,apanju/odoo,collex100/odoo,Nowheresly/odoo,tangyiyong/odoo,Nick-OpusVL/odoo,avoinsystems/odoo,Daniel-CA/odoo,kittiu/odoo,mkieszek/odoo,hubsaysnuaa/odoo,shivam1111/odoo,jfpla/odoo,ojengwa/odoo,ingadhoc/odoo,jolevq/odoopub,sysadminmatmoz/OCB,apanju/odoo,factorlibre/OCB,thanhacun/odoo,cedk/odoo,lsinfo/odoo,ecosoft-odoo/odoo,funkring/fdoo,jfpla/odoo,ClearCorp-dev/odoo,fossoult/odoo,Gitlab11/odoo,codekaki/odoo,pedrobaeza/OpenUpgrade,slevenhagen/odoo,arthru/OpenUpgrade,papouso/odoo,kittiu/odoo,sergio-incaser/odoo,fuselock/odoo,leoliujie/odoo,vrenaville/ngo-addons-backport,ujjwalwahi/odoo,fevxie/odoo,andreparames/odoo,tangyiyong/odoo,rdeheele/odoo,stonegithubs/odoo,mlaitinen/odoo,dezynetechnologies/odoo,mustafat/odoo-1,x111ong/odoo,steedos/odoo,abstract-open-solutions/OCB,juanalfonsopr/odoo,steedos/odoo,guerrerocarlos/odoo,ojengwa/odoo,bplancher/odoo,odooindia/odoo,nagyistoce/odoo-dev-odoo,sebalix/OpenUpgrade,x111ong/odoo,dalegregory/odoo,highco-groupe/odoo,rubencabrera/odoo,VitalPet/odoo,SAM-IT-SA/odoo,salaria/odoo,abdellatifkarroum/odoo,Noviat/odoo,joariasl/odoo,Nick-OpusVL/odoo,TRESCLOUD/odoopub,alqfahad/odoo,idncom/odoo,TRESCLOUD/odoopub,klunwebale/odoo,gdgellatly/OCB1,podemos-info/odoo,mmbtba/odoo,rschnapka/odoo,ujjwalwahi/odoo,pedrobaeza/OpenUpgrade,CopeX/odoo,dgzurita/odoo,guewen/OpenUpgrade,grap/OCB,omprakasha/odoo,GauravSahu/odoo,sebalix/OpenUpgrade,MarcosCommunity/odoo,nhomar/odoo,bguillot/OpenUpgrade,mszewczy/odoo,nuncjo/odoo,xzYue/odoo,gvb/odoo,KontorConsulting/odoo,ecosoft-odoo/odoo,Danisan/odoo-1,cloud9UG/odoo,cedk/odoo,ShineFan/odoo,Maspear/odoo,lgscofield/odoo,csrocha/OpenUpgrade,slevenhagen/odoo-npg,markeTIC/OCB,OpenPymeMx/OCB,ygol/odoo,Ernesto99/odoo,laslabs/odoo,nuuuboo/odoo,sysadminmatmoz/OCB,wangjun/odoo,optima-ict/odoo,tvtsoft/odoo8,fdvarela/odoo8,xzYue/odoo,Daniel-CA/odoo,aviciimaxwell/odoo,tvibliani/odoo,christophlsa/odoo,ThinkOpen-Solutions/odoo,vnsofthe/odoo,laslabs/odoo,mlaitinen/odoo,dariemp/odoo,blaggacao/OpenUpgrade,andreparames/odoo,jusdng/odoo,Gitlab11/odoo,pedrobaeza/OpenUpgrade,fevxie/odoo,tarzan0820/odoo,patmcb/odoo,omprakasha/odoo,hubsaysnuaa/odoo,NL66278/OCB,leoliujie/odoo,credativUK/OCB,takis/odoo,alexcuellar/odoo,minhtuancn/odoo,windedge/odoo,ihsanudin/odoo,alhashash/odoo,NeovaHealth/odoo,datenbetrieb/odoo,oihane/odoo,matrixise/odoo,MarcosCommunity/odoo,matrixise/odoo,joshuajan/odoo,blaggacao/OpenUpgrade,jpshort/odoo,incaser/odoo-odoo,jesramirez/odoo,Adel-Magebinary/odoo,blaggacao/OpenUpgrade,tvibliani/odoo,zchking/odoo,BT-fgarbely/odoo,luiseduardohdbackup/odoo,ramitalat/odoo,highco-groupe/odoo,dalegregory/odoo,codekaki/odoo,JCA-Developpement/Odoo,savoirfairelinux/odoo,dsfsdgsbngfggb/odoo,janocat/odoo,tarzan0820/odoo,idncom/odoo,Endika/odoo,poljeff/odoo,damdam-s/OpenUpgrade,BT-rmartin/odoo,sve-odoo/odoo,fuhongliang/odoo,ClearCorp-dev/odoo,shivam1111/odoo,AuyaJackie/odoo,Eric-Zhong/odoo,florian-dacosta/OpenUpgrade,realsaiko/odoo,GauravSahu/odoo,jfpla/odoo,bakhtout/odoo-educ,numerigraphe/odoo,factorlibre/OCB,abenzbiria/clients_odoo,dllsf/odootest,christophlsa/odoo,gsmartway/odoo,odootr/odoo,joariasl/odoo,BT-ojossen/odoo,deKupini/erp,windedge/odoo,camptocamp/ngo-addons-backport,rschnapka/odoo,JGarcia-Panach/odoo,oihane/odoo,Eric-Zhong/odoo,agrista/odoo-saas,diagramsoftware/odoo,damdam-s/OpenUpgrade,idncom/odoo,andreparames/odoo,odooindia/odoo,JGarcia-Panach/odoo,havt/odoo,feroda/odoo,QianBIG/odoo,JonathanStein/odoo,Ichag/odoo,jfpla/odoo,OpenPymeMx/OCB,credativUK/OCB,hip-odoo/odoo,xzYue/odoo,dezynetechnologies/odoo,odooindia/odoo,CubicERP/odoo,wangjun/odoo,vnsofthe/odoo,demon-ru/iml-crm,dfang/odoo,prospwro/odoo,alqfahad/odoo,apanju/GMIO_Odoo,ccomb/OpenUpgrade,kirca/OpenUpgrade,Adel-Magebinary/odoo,jiangzhixiao/odoo,cdrooom/odoo,gvb/odoo,mmbtba/odoo,Endika/OpenUpgrade,chiragjogi/odoo,funkring/fdoo,bguillot/OpenUpgrade,oliverhr/odoo,minhtuancn/odoo,zchking/odoo,rubencabrera/odoo,credativUK/OCB,dkubiak789/odoo,OSSESAC/odoopubarquiluz,apocalypsebg/odoo,Drooids/odoo,abstract-open-solutions/OCB,aviciimaxwell/odoo,nhomar/odoo,mvaled/OpenUpgrade,CatsAndDogsbvba/odoo,Bachaco-ve/odoo,apocalypsebg/odoo,damdam-s/OpenUpgrade,gvb/odoo,OpenUpgrade-dev/OpenUpgrade,BT-astauder/odoo,0k/odoo,tvtsoft/odoo8,sve-odoo/odoo,doomsterinc/odoo,fjbatresv/odoo,brijeshkesariya/odoo,apanju/odoo,0k/OpenUpgrade,demon-ru/iml-crm,savoirfairelinux/OpenUpgrade,OpusVL/odoo,VielSoft/odoo,salaria/odoo,vnsofthe/odoo,hifly/OpenUpgrade,thanhacun/odoo,kifcaliph/odoo,makinacorpus/odoo,charbeljc/OCB,grap/OpenUpgrade,oihane/odoo,FlorianLudwig/odoo,vrenaville/ngo-addons-backport,VitalPet/odoo,bplancher/odoo,cdrooom/odoo,nuuuboo/odoo,funkring/fdoo,gavin-feng/odoo,dkubiak789/odoo,diagramsoftware/odoo,erkrishna9/odoo,TRESCLOUD/odoopub,makinacorpus/odoo,ojengwa/odoo,dariemp/odoo,Antiun/odoo,eino-makitalo/odoo,spadae22/odoo,virgree/odoo,CubicERP/odoo,OpenUpgrade/OpenUpgrade,savoirfairelinux/odoo,addition-it-solutions/project-all,bakhtout/odoo-educ,oasiswork/odoo,incaser/odoo-odoo,idncom/odoo,JonathanStein/odoo,sadleader/odoo,shingonoide/odoo,Noviat/odoo,brijeshkesariya/odoo,ramadhane/odoo,Noviat/odoo,stephen144/odoo,fuselock/odoo,QianBIG/odoo,havt/odoo,mvaled/OpenUpgrade,OpenUpgrade-dev/OpenUpgrade,sv-dev1/odoo,thanhacun/odoo,cysnake4713/odoo,spadae22/odoo,ihsanudin/odoo,nexiles/odoo,dllsf/odootest,mustafat/odoo-1,luistorresm/odoo,fuselock/odoo,BT-ojossen/odoo,Drooids/odoo,rgeleta/odoo,RafaelTorrealba/odoo,sergio-incaser/odoo,leorochael/odoo,ThinkOpen-Solutions/odoo,arthru/OpenUpgrade,joariasl/odoo,pplatek/odoo,Grirrane/odoo,abenzbiria/clients_odoo,dfang/odoo,ramitalat/odoo,ihsanudin/odoo,slevenhagen/odoo,ramitalat/odoo,rahuldhote/odoo,syci/OCB,Codefans-fan/odoo,lsinfo/odoo,salaria/odoo,abstract-open-solutions/OCB,sysadminmatmoz/OCB,mlaitinen/odoo,Daniel-CA/odoo,Kilhog/odoo,abenzbiria/clients_odoo,cysnake4713/odoo,JGarcia-Panach/odoo,lsinfo/odoo,apanju/GMIO_Odoo,sadleader/odoo,oasiswork/odoo,shaufi/odoo,hopeall/odoo,Eric-Zhong/odoo,Grirrane/odoo,luiseduardohdbackup/odoo,klunwebale/odoo,tangyiyong/odoo,ehirt/odoo,Noviat/odoo,alqfahad/odoo,dalegregory/odoo,n0m4dz/odoo,rowemoore/odoo,apocalypsebg/odoo,PongPi/isl-odoo,mkieszek/odoo,prospwro/odoo,hmen89/odoo,Danisan/odoo-1,mmbtba/odoo,hifly/OpenUpgrade,optima-ict/odoo,hbrunn/OpenUpgrade,fuselock/odoo,NeovaHealth/odoo,VielSoft/odoo,rahuldhote/odoo,Endika/OpenUpgrade,guerrerocarlos/odoo,nuncjo/odoo,shaufi/odoo,MarcosCommunity/odoo,GauravSahu/odoo,apocalypsebg/odoo,QianBIG/odoo,steedos/odoo,lombritz/odoo,shaufi10/odoo,ubic135/odoo-design,fevxie/odoo,shaufi/odoo,takis/odoo,gvb/odoo,eino-makitalo/odoo,lgscofield/odoo,dkubiak789/odoo,codekaki/odoo,kybriainfotech/iSocioCRM,cpyou/odoo,virgree/odoo,mkieszek/odoo,guewen/OpenUpgrade,mustafat/odoo-1,ShineFan/odoo,kittiu/odoo,hbrunn/OpenUpgrade,grap/OCB,FlorianLudwig/odoo,ovnicraft/odoo,hbrunn/OpenUpgrade,dfang/odoo,rahuldhote/odoo,matrixise/odoo,slevenhagen/odoo,charbeljc/OCB,sinbazhou/odoo,jeasoft/odoo,draugiskisprendimai/odoo,bobisme/odoo,sysadminmatmoz/OCB,jpshort/odoo,highco-groupe/odoo,Nick-OpusVL/odoo,zchking/odoo,CopeX/odoo,charbeljc/OCB,luistorresm/odoo,brijeshkesariya/odoo,camptocamp/ngo-addons-backport,gavin-feng/odoo,frouty/odoo_oph,patmcb/odoo,doomsterinc/odoo,kittiu/odoo,odoo-turkiye/odoo,acshan/odoo,SAM-IT-SA/odoo,TRESCLOUD/odoopub,agrista/odoo-saas,realsaiko/odoo,juanalfonsopr/odoo,dkubiak789/odoo,goliveirab/odoo,jiachenning/odoo,tvibliani/odoo,havt/odoo,arthru/OpenUpgrade,FlorianLudwig/odoo,ecosoft-odoo/odoo,dsfsdgsbngfggb/odoo,stonegithubs/odoo,rschnapka/odoo,hanicker/odoo,sadleader/odoo,luistorresm/odoo,ChanduERP/odoo,shivam1111/odoo,chiragjogi/odoo,rubencabrera/odoo,abstract-open-solutions/OCB,hubsaysnuaa/odoo,luistorresm/odoo,mkieszek/odoo,funkring/fdoo,oasiswork/odoo,hip-odoo/odoo,Ichag/odoo,ThinkOpen-Solutions/odoo,kybriainfotech/iSocioCRM,ujjwalwahi/odoo,csrocha/OpenUpgrade,shivam1111/odoo,bwrsandman/OpenUpgrade,ChanduERP/odoo,colinnewell/odoo,windedge/odoo,synconics/odoo,lombritz/odoo,xujb/odoo,collex100/odoo,mmbtba/odoo,feroda/odoo,spadae22/odoo,CubicERP/odoo,ingadhoc/odoo,Grirrane/odoo,fossoult/odoo,alexcuellar/odoo,BT-astauder/odoo,glovebx/odoo,csrocha/OpenUpgrade,hanicker/odoo,nhomar/odoo,klunwebale/odoo,stephen144/odoo,avoinsystems/odoo,windedge/odoo,kirca/OpenUpgrade,pedrobaeza/OpenUpgrade,CopeX/odoo,eino-makitalo/odoo,fgesora/odoo,avoinsystems/odoo,klunwebale/odoo,patmcb/odoo,BT-ojossen/odoo,hoatle/odoo,jesramirez/odoo,bkirui/odoo,OSSESAC/odoopubarquiluz,alhashash/odoo,OSSESAC/odoopubarquiluz,naousse/odoo,CopeX/odoo,Eric-Zhong/odoo,sergio-incaser/odoo,gvb/odoo,joshuajan/odoo,ygol/odoo,lsinfo/odoo,fossoult/odoo,grap/OpenUpgrade,pedrobaeza/OpenUpgrade,gavin-feng/odoo,Kilhog/odoo,dariemp/odoo,collex100/odoo,n0m4dz/odoo,agrista/odoo-saas,leoliujie/odoo,pplatek/odoo,ubic135/odoo-design,ramadhane/odoo,javierTerry/odoo,apocalypsebg/odoo,acshan/odoo,patmcb/odoo,dariemp/odoo,gavin-feng/odoo,stephen144/odoo,ccomb/OpenUpgrade,janocat/odoo,Ernesto99/odoo,addition-it-solutions/project-all,provaleks/o8,osvalr/odoo,makinacorpus/odoo,bealdav/OpenUpgrade,KontorConsulting/odoo,OpenUpgrade/OpenUpgrade,abstract-open-solutions/OCB,charbeljc/OCB,0k/odoo,nexiles/odoo,x111ong/odoo,diagramsoftware/odoo,jusdng/odoo,tvibliani/odoo,Kilhog/odoo,sysadminmatmoz/OCB,podemos-info/odoo,nagyistoce/odoo-dev-odoo,odoousers2014/odoo,MarcosCommunity/odoo,OpenPymeMx/OCB,KontorConsulting/odoo,steedos/odoo,ramadhane/odoo,ovnicraft/odoo,rahuldhote/odoo,Adel-Magebinary/odoo,slevenhagen/odoo-npg,factorlibre/OCB,eino-makitalo/odoo,guewen/OpenUpgrade,simongoffin/website_version,BT-rmartin/odoo,syci/OCB,0k/odoo,Elico-Corp/odoo_OCB,leorochael/odoo,cysnake4713/odoo,nexiles/odoo,dgzurita/odoo,BT-ojossen/odoo,jusdng/odoo,massot/odoo,JonathanStein/odoo,AuyaJackie/odoo,vrenaville/ngo-addons-backport,ojengwa/odoo,spadae22/odoo,Codefans-fan/odoo,rgeleta/odoo,stephen144/odoo,srsman/odoo,massot/odoo,tinkerthaler/odoo,papouso/odoo,guewen/OpenUpgrade,odootr/odoo,poljeff/odoo,CubicERP/odoo,xzYue/odoo,joshuajan/odoo,damdam-s/OpenUpgrade,waytai/odoo,podemos-info/odoo,alhashash/odoo,CatsAndDogsbvba/odoo,ccomb/OpenUpgrade,massot/odoo,ramitalat/odoo,VitalPet/odoo,n0m4dz/odoo,srsman/odoo,andreparames/odoo,frouty/odoogoeen,doomsterinc/odoo,kirca/OpenUpgrade,acshan/odoo,savoirfairelinux/OpenUpgrade,alexteodor/odoo,deKupini/erp,ovnicraft/odoo,FlorianLudwig/odoo,gorjuce/odoo,lombritz/odoo,synconics/odoo,fossoult/odoo,laslabs/odoo,waytai/odoo,luiseduardohdbackup/odoo,bwrsandman/OpenUpgrade,nhomar/odoo-mirror,rowemoore/odoo,realsaiko/odoo,chiragjogi/odoo,takis/odoo,VielSoft/odoo,ApuliaSoftware/odoo,Ernesto99/odoo,osvalr/odoo,rubencabrera/odoo,hbrunn/OpenUpgrade,elmerdpadilla/iv,ingadhoc/odoo,xujb/odoo,PongPi/isl-odoo,hassoon3/odoo,draugiskisprendimai/odoo,jeasoft/odoo,sv-dev1/odoo,markeTIC/OCB,hip-odoo/odoo,dgzurita/odoo,nhomar/odoo,rschnapka/odoo,frouty/odoo_oph,collex100/odoo,syci/OCB,frouty/odoogoeen,RafaelTorrealba/odoo,frouty/odoogoeen,Bachaco-ve/odoo,luiseduardohdbackup/odoo,stonegithubs/odoo,Drooids/odoo,joshuajan/odoo,jesramirez/odoo,vrenaville/ngo-addons-backport,savoirfairelinux/odoo,tarzan0820/odoo,csrocha/OpenUpgrade,Kilhog/odoo,odoo-turkiye/odoo,takis/odoo,demon-ru/iml-crm,ChanduERP/odoo,CatsAndDogsbvba/odoo,Endika/odoo,ovnicraft/odoo,Nowheresly/odoo,ingadhoc/odoo,odoousers2014/odoo,virgree/odoo,synconics/odoo,oihane/odoo,bkirui/odoo,jeasoft/odoo,provaleks/o8,rdeheele/odoo,hassoon3/odoo,credativUK/OCB,rowemoore/odoo,storm-computers/odoo,JonathanStein/odoo,Endika/OpenUpgrade,CubicERP/odoo,andreparames/odoo,inspyration/odoo,steedos/odoo,dsfsdgsbngfggb/odoo,luiseduardohdbackup/odoo,Endika/OpenUpgrade,provaleks/o8,bkirui/odoo,dalegregory/odoo,AuyaJackie/odoo,oasiswork/odoo,CopeX/odoo,poljeff/odoo,odootr/odoo,janocat/odoo,0k/OpenUpgrade,dariemp/odoo,shaufi10/odoo,x111ong/odoo,jaxkodex/odoo,tinkerthaler/odoo,feroda/odoo,florian-dacosta/OpenUpgrade,hanicker/odoo,rowemoore/odoo,avoinsystems/odoo,spadae22/odoo,bplancher/odoo,ShineFan/odoo,ccomb/OpenUpgrade,csrocha/OpenUpgrade,gorjuce/odoo,feroda/odoo,nhomar/odoo-mirror,BT-fgarbely/odoo,microcom/odoo,rubencabrera/odoo,tinkhaven-organization/odoo,grap/OpenUpgrade,elmerdpadilla/iv,dllsf/odootest,slevenhagen/odoo,apanju/GMIO_Odoo,xujb/odoo,Endika/OpenUpgrade,guewen/OpenUpgrade,shaufi/odoo,ingadhoc/odoo,tinkhaven-organization/odoo,ujjwalwahi/odoo,Daniel-CA/odoo,Nick-OpusVL/odoo,tvtsoft/odoo8,bealdav/OpenUpgrade,dsfsdgsbngfggb/odoo,acshan/odoo,RafaelTorrealba/odoo,markeTIC/OCB,rahuldhote/odoo,AuyaJackie/odoo,joshuajan/odoo,mvaled/OpenUpgrade,gavin-feng/odoo,JonathanStein/odoo,havt/odoo,fgesora/odoo,florian-dacosta/OpenUpgrade,sinbazhou/odoo,cpyou/odoo,JonathanStein/odoo,srsman/odoo,Eric-Zhong/odoo,osvalr/odoo,apocalypsebg/odoo,ovnicraft/odoo,laslabs/odoo,optima-ict/odoo,abstract-open-solutions/OCB,OpenPymeMx/OCB,GauravSahu/odoo,0k/OpenUpgrade,fjbatresv/odoo,tarzan0820/odoo,alexteodor/odoo,fuselock/odoo,janocat/odoo,ehirt/odoo,dgzurita/odoo,ehirt/odoo,slevenhagen/odoo-npg,camptocamp/ngo-addons-backport,apanju/GMIO_Odoo,elmerdpadilla/iv,jfpla/odoo,gdgellatly/OCB1,sv-dev1/odoo,alqfahad/odoo,leorochael/odoo,factorlibre/OCB,Gitlab11/odoo,BT-rmartin/odoo,ygol/odoo,deKupini/erp,rgeleta/odoo,Adel-Magebinary/odoo,thanhacun/odoo,gdgellatly/OCB1,RafaelTorrealba/odoo,gorjuce/odoo,gdgellatly/OCB1,joshuajan/odoo,storm-computers/odoo,oasiswork/odoo,dkubiak789/odoo,rdeheele/odoo,bplancher/odoo,vnsofthe/odoo,VitalPet/odoo,tangyiyong/odoo,BT-fgarbely/odoo,n0m4dz/odoo,mmbtba/odoo,jeasoft/odoo,credativUK/OCB,numerigraphe/odoo,codekaki/odoo,highco-groupe/odoo,elmerdpadilla/iv,cloud9UG/odoo,SAM-IT-SA/odoo,AuyaJackie/odoo,lombritz/odoo,poljeff/odoo,SerpentCS/odoo,blaggacao/OpenUpgrade,Danisan/odoo-1,codekaki/odoo,syci/OCB,sysadminmatmoz/OCB,fgesora/odoo,massot/odoo,JGarcia-Panach/odoo,AuyaJackie/odoo,podemos-info/odoo,SAM-IT-SA/odoo,jiangzhixiao/odoo,lgscofield/odoo,Antiun/odoo,incaser/odoo-odoo,incaser/odoo-odoo,Grirrane/odoo,NeovaHealth/odoo,takis/odoo,lightcn/odoo,aviciimaxwell/odoo,shaufi10/odoo,savoirfairelinux/odoo,alexcuellar/odoo,storm-computers/odoo,jpshort/odoo,BT-ojossen/odoo,Nowheresly/odoo,ubic135/odoo-design,minhtuancn/odoo,blaggacao/OpenUpgrade,pplatek/odoo,papouso/odoo,wangjun/odoo,oihane/odoo,numerigraphe/odoo,savoirfairelinux/OpenUpgrade,kifcaliph/odoo,javierTerry/odoo,dkubiak789/odoo,simongoffin/website_version,fgesora/odoo,CopeX/odoo,pplatek/odoo,sadleader/odoo,vrenaville/ngo-addons-backport,minhtuancn/odoo,dfang/odoo,fjbatresv/odoo,odootr/odoo,lightcn/odoo,ujjwalwahi/odoo,zchking/odoo,nhomar/odoo,dfang/odoo,lightcn/odoo,laslabs/odoo,bguillot/OpenUpgrade,zchking/odoo,ehirt/odoo,pedrobaeza/OpenUpgrade,dalegregory/odoo,stonegithubs/odoo,srimai/odoo,Ernesto99/odoo,mustafat/odoo-1,credativUK/OCB,feroda/odoo,klunwebale/odoo,AuyaJackie/odoo,tangyiyong/odoo,inspyration/odoo,ThinkOpen-Solutions/odoo,jaxkodex/odoo,shaufi/odoo,abenzbiria/clients_odoo,jusdng/odoo,luistorresm/odoo,tarzan0820/odoo,prospwro/odoo,Ernesto99/odoo,markeTIC/OCB,abdellatifkarroum/odoo,camptocamp/ngo-addons-backport,mszewczy/odoo,ecosoft-odoo/odoo,dfang/odoo,bealdav/OpenUpgrade,sergio-incaser/odoo,bwrsandman/OpenUpgrade,makinacorpus/odoo,jusdng/odoo,jiangzhixiao/odoo,windedge/odoo,luiseduardohdbackup/odoo,dsfsdgsbngfggb/odoo,ramitalat/odoo,naousse/odoo,hanicker/odoo,leoliujie/odoo,camptocamp/ngo-addons-backport,naousse/odoo,kybriainfotech/iSocioCRM,steedos/odoo,stephen144/odoo,SAM-IT-SA/odoo,sebalix/OpenUpgrade,idncom/odoo,Codefans-fan/odoo,ChanduERP/odoo,grap/OpenUpgrade,kybriainfotech/iSocioCRM,factorlibre/OCB,provaleks/o8,Maspear/odoo,guerrerocarlos/odoo,Elico-Corp/odoo_OCB,xujb/odoo,goliveirab/odoo,funkring/fdoo,Kilhog/odoo,shaufi10/odoo,KontorConsulting/odoo,Maspear/odoo,deKupini/erp,luiseduardohdbackup/odoo,hmen89/odoo,alqfahad/odoo,NeovaHealth/odoo,sebalix/OpenUpgrade,rdeheele/odoo,JGarcia-Panach/odoo,ramadhane/odoo,BT-rmartin/odoo,hmen89/odoo,vnsofthe/odoo,frouty/odoo_oph,ihsanudin/odoo,hoatle/odoo,markeTIC/OCB,VielSoft/odoo,Drooids/odoo,alexcuellar/odoo,jeasoft/odoo,ecosoft-odoo/odoo,kifcaliph/odoo,kirca/OpenUpgrade,microcom/odoo,juanalfonsopr/odoo,sv-dev1/odoo,cysnake4713/odoo,slevenhagen/odoo-npg,hbrunn/OpenUpgrade,lightcn/odoo,ihsanudin/odoo,funkring/fdoo,odoousers2014/odoo,tinkhaven-organization/odoo,cedk/odoo,mlaitinen/odoo,eino-makitalo/odoo,sebalix/OpenUpgrade,hopeall/odoo,srsman/odoo,naousse/odoo,erkrishna9/odoo,ygol/odoo,KontorConsulting/odoo,wangjun/odoo,guerrerocarlos/odoo,ujjwalwahi/odoo,pedrobaeza/odoo,stonegithubs/odoo,bwrsandman/OpenUpgrade,javierTerry/odoo,srimai/odoo,ShineFan/odoo,waytai/odoo,Antiun/odoo,hifly/OpenUpgrade,BT-rmartin/odoo,alqfahad/odoo,jfpla/odoo,0k/OpenUpgrade,hifly/OpenUpgrade,christophlsa/odoo,kirca/OpenUpgrade,Danisan/odoo-1,ygol/odoo,xujb/odoo,doomsterinc/odoo,gsmartway/odoo,hubsaysnuaa/odoo,credativUK/OCB,SerpentCS/odoo,Antiun/odoo,bkirui/odoo,alexteodor/odoo,draugiskisprendimai/odoo,osvalr/odoo,grap/OCB | bin/addons/base/module/wizard/base_module_configuration.py | bin/addons/base/module/wizard/base_module_configuration.py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import pooler
from osv import osv, fields
class base_module_configuration(osv.osv_memory):
_name = "base.module.configuration"
def config(self, cr, uid, data, context=None):
return self.pool.get('res.config').next(cr, uid, [], context=context)
base_module_configuration()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | Python | |
90c27c1444f80b6d746c8f92b6b79e38ae5ce87e | Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/1a04c55547456cffd9b9d250dc8680eb9d89f750. | paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,karllessard/tensorflow,Intel-Corporation/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,yongtang/tensorflow,Intel-Corporation/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gautam1858/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,Intel-Corporation/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,gautam1858/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model | third_party/tf_runtime/workspace.bzl | third_party/tf_runtime/workspace.bzl | """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "1a04c55547456cffd9b9d250dc8680eb9d89f750"
TFRT_SHA256 = "296130004f8b3ce22b46b9f263c9379dd462eff53c2332f2ddd2d5ab26ab87ec"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| """Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "5e36ba4f8e42a4022062a10a75684d5a2dfb1b53"
TFRT_SHA256 = "3bf90326b6dd4f938825dd7ab3424abbee7cc86b370f24e285ff60440c3db360"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = [
"http://mirror.tensorflow.org/github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
"https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT),
],
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
| apache-2.0 | Python |
f099d055279abfea2bd58c8e0b28c2fa162ac8cd | modify zqplant crawler script | colddew/mix-python | crawler/DzwBaikeCrawler.py | crawler/DzwBaikeCrawler.py | import hashlib
'''
curl
-H 'Host: api.dzwbaike.xyz'
-H 'Content-Type: text/html;charset=UTF-8'
-H 'Accept: */*' -H 'Accept-Language: zh-cn'
-H 'token: 05e33d91-dd85-40d5-aa67-a90130270a95'
-H 'User-Agent: Mozilla/5.0 (iPhone; CPU iPhone OS 10_3_3 like Mac OS X) AppleWebKit/603.3.8 (KHTML, like Gecko) Mobile/14G60 MicroMessenger/6.5.15 NetType/WIFI Language/zh_CN'
-H 'sign: b59fc7995e1dae61de5b768f58ea3367'
-H 'timestamp: 1505011126698'
-H 'Referer: https://servicewechat.com/wx56f5e9f2dde583b2/4/page-frame.html'
--compressed
'https://api.dzwbaike.xyz/api/tdouroubaikenew/list?page=6&limit=20'
'''
m2 = hashlib.md5()
sign = 'https://api.dzwbaike.xyz/api/tdouroubaikenew/list?page=6&limit=20'
m2.update(sign)
print m2.hexdigest()
| mit | Python | |
32a569f0f6f33ef5cf11031bb359989379582489 | add script to create cloudtrail table in Athena | 1Strategy/security-fairy | create_cloudtrail_table.py | create_cloudtrail_table.py | import boto3
# Create AWS session
try:
session = boto3.session.Session(profile_name='training')
except Exception as e:
session = boto3.session.Session()
# Connect to Athena
athena = session.client('athena', region_name='us-east-1')
def lambda_handler(event, context):
# You must submit the AWS account number within the event parameter
# Run the create cloudtrail table query
creation = athena.start_query_execution(QueryString=create_table,
ResultConfiguration=config
)
return creation
# Query Configurations
config = {
'OutputLocation': 's3://security-fairy/tables/',
'EncryptionConfiguration': {
'EncryptionOption': 'SSE_S3'
}
}
create_table = """
create external table if not exists aws_logs.cloudtrail (
eventVersion string,
userIdentity
struct<
type: string,
principalId: string,
arn: string,
accountId: string,
userName: string,
invokedBy: string,
accesskeyid:string,
sessioncontext:
struct<
attributes:
struct<
mfaauthenticated:string,
creationdate:string
>,
sessionIssuer:
struct<
type:string,
principalId:string,
arn:string,
accountId:string,
userName:string
>
>
>,
eventTime string,
eventSource string,
eventName string,
awsRegion string,
sourceIPAddress string,
userAgent string,
errorCode string,
errorMessage string,
requestID string,
eventID string,
resources
array<
struct<
ARN:string,
accountId:string,
type:string
>
>,
eventType string,
apiVersion string,
readOnly boolean,
recipientAccountId string,
sharedEventID string,
vpcEndpointId string
)
row format serde 'com.amazon.emr.hive.serde.CloudTrailSerde'
stored as inputformat 'com.amazon.emr.cloudtrail.CloudTrailInputFormat'
outputformat 'org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat'
location 's3://1strategy-training-traillogs/AWSLogs/{account_number}/CloudTrail/'
;
""".format(account_number=event.get(accountId))
| apache-2.0 | Python | |
f693949a21864938991904ed1503ae5303426c90 | Revert "Remove test stub" | willthames/ansible-lint | test/TestLineNumber.py | test/TestLineNumber.py | # Copyright (c) 2020 Albin Vass <albin.vass@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import ansiblelint.skip_utils
from ansiblelint import AnsibleLintRule
MAGIC_NUMBER = 7007
class LinenumberRule(AnsibleLintRule):
id = 'TEST0003'
shortdesc = 'Linenumber is returned'
description = 'This is a rule that return a linenumber'
tags = {'fake', 'dummy', 'test3'}
def matchplay(self, file, play):
return [('Linenumber returned', self.shortdesc, MAGIC_NUMBER)]
def test_rule_linenumber(monkeypatch):
def mock_response(*args, **kwargs):
return [{'skipped_rules': []}]
monkeypatch.setattr(ansiblelint.skip_utils,
"append_skipped_rules",
mock_response)
text = "- debug:\n msg: a"
rule = LinenumberRule()
matches = rule.matchyaml(dict(path="", type='tasklist'), text)
assert matches[0].linenumber == MAGIC_NUMBER
| mit | Python | |
3bb43f31263cce7ceebab943a1eed9e8c83cb90d | Set dbtable for models to use the "celery_" prefix not "djcelery". | planorama/django-celery,digimarc/django-celery,CloudNcodeInc/django-celery,georgewhewell/django-celery,iris-edu-int/django-celery,celery/django-celery,kanemra/django-celery,digimarc/django-celery,nadios/django-celery,nadios/django-celery,ask/django-celery,ask/django-celery,Amanit/django-celery,alexhayes/django-celery,digimarc/django-celery,iris-edu-int/django-celery,CloudNcodeInc/django-celery,iris-edu-int/django-celery,Amanit/django-celery,axiom-data-science/django-celery,celery/django-celery,georgewhewell/django-celery,kanemra/django-celery,alexhayes/django-celery,tkanemoto/django-celery,celery/django-celery,planorama/django-celery,axiom-data-science/django-celery,kanemra/django-celery,CloudNcodeInc/django-celery,tkanemoto/django-celery,georgewhewell/django-celery,axiom-data-science/django-celery,Amanit/django-celery,tkanemoto/django-celery | djcelery/models.py | djcelery/models.py | import django
from django.db import models
from django.utils.translation import ugettext_lazy as _
from picklefield.fields import PickledObjectField
from celery import conf
from celery import states
from djcelery.managers import TaskManager, TaskSetManager
TASK_STATUSES_CHOICES = zip(states.ALL_STATES, states.ALL_STATES)
class TaskMeta(models.Model):
"""Task result/status."""
task_id = models.CharField(_(u"task id"), max_length=255, unique=True)
status = models.CharField(_(u"task status"), max_length=50,
default=states.PENDING, choices=TASK_STATUSES_CHOICES)
result = PickledObjectField(null=True, default=None)
date_done = models.DateTimeField(_(u"done at"), auto_now=True)
traceback = models.TextField(_(u"traceback"), blank=True, null=True)
objects = TaskManager()
class Meta:
"""Model meta-data."""
verbose_name = _(u"task meta")
verbose_name_plural = _(u"task meta")
db_table = "celery_taskmeta"
def to_dict(self):
return {"task_id": self.task_id,
"status": self.status,
"result": self.result,
"date_done": self.date_done,
"traceback": self.traceback}
def __unicode__(self):
return u"<Task: %s state->%s>" % (self.task_id, self.status)
class TaskSetMeta(models.Model):
"""TaskSet result"""
taskset_id = models.CharField(_(u"task id"), max_length=255, unique=True)
result = PickledObjectField()
date_done = models.DateTimeField(_(u"done at"), auto_now=True)
objects = TaskSetManager()
class Meta:
"""Model meta-data."""
verbose_name = _(u"taskset meta")
verbose_name_plural = _(u"taskset meta")
db_table = "celery_tasksetmeta"
def to_dict(self):
return {"taskset_id": self.taskset_id,
"result": self.result,
"date_done": self.date_done}
def __unicode__(self):
return u"<TaskSet: %s>" % (self.taskset_id)
if (django.VERSION[0], django.VERSION[1]) >= (1, 1):
# keep models away from syncdb/reset if database backend is not
# being used.
if conf.RESULT_BACKEND != 'database':
TaskMeta._meta.managed = False
TaskSetMeta._meta.managed = False
| import django
from django.db import models
from django.utils.translation import ugettext_lazy as _
from picklefield.fields import PickledObjectField
from celery import conf
from celery import states
from djcelery.managers import TaskManager, TaskSetManager
TASK_STATUSES_CHOICES = zip(states.ALL_STATES, states.ALL_STATES)
class TaskMeta(models.Model):
"""Task result/status."""
task_id = models.CharField(_(u"task id"), max_length=255, unique=True)
status = models.CharField(_(u"task status"), max_length=50,
default=states.PENDING, choices=TASK_STATUSES_CHOICES)
result = PickledObjectField(null=True, default=None)
date_done = models.DateTimeField(_(u"done at"), auto_now=True)
traceback = models.TextField(_(u"traceback"), blank=True, null=True)
objects = TaskManager()
class Meta:
"""Model meta-data."""
verbose_name = _(u"task meta")
verbose_name_plural = _(u"task meta")
def to_dict(self):
return {"task_id": self.task_id,
"status": self.status,
"result": self.result,
"date_done": self.date_done,
"traceback": self.traceback}
def __unicode__(self):
return u"<Task: %s state->%s>" % (self.task_id, self.status)
class TaskSetMeta(models.Model):
"""TaskSet result"""
taskset_id = models.CharField(_(u"task id"), max_length=255, unique=True)
result = PickledObjectField()
date_done = models.DateTimeField(_(u"done at"), auto_now=True)
objects = TaskSetManager()
class Meta:
"""Model meta-data."""
verbose_name = _(u"taskset meta")
verbose_name_plural = _(u"taskset meta")
def to_dict(self):
return {"taskset_id": self.taskset_id,
"result": self.result,
"date_done": self.date_done}
def __unicode__(self):
return u"<TaskSet: %s>" % (self.taskset_id)
if (django.VERSION[0], django.VERSION[1]) >= (1, 1):
# keep models away from syncdb/reset if database backend is not
# being used.
if conf.RESULT_BACKEND != 'database':
TaskMeta._meta.managed = False
TaskSetMeta._meta.managed = False
| bsd-3-clause | Python |
18f7737f1f187aef7181e6cdd72db774df8eda3d | add transformer test | jubatus/jubakit | jubakit/test/integration/test_model.py | jubakit/test/integration/test_model.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from unittest import TestCase
import os
import json
from jubakit.model import JubaModel
from jubakit.anomaly import Anomaly, Config as AnomalyConfig
from jubakit.classifier import Classifier, Config as ClassifierConfig
from jubakit.recommender import Recommender, Config as RecommenderConfig
from jubakit.weight import Weight, Config as WeightConfig
from jubakit.dumb import Clustering, NearestNeighbor, Regression
class JubaModelTransformationTest(TestCase):
def _get_model(self, service, config):
# Create empty model for the given service.
s = service.run(config)
path = None
try:
s.save('test')
path = s.get_status().popitem()[1]['last_saved_path']
with open(path, 'rb') as f:
return JubaModel.load_binary(f)
finally:
s.stop()
if path and os.path.exists(path):
os.remove(path)
def _assertModelLoadable(self, service, model):
config = json.loads(model.system.config)
s = service.run(config)
path = None
try:
s.save('test')
path = s.get_status().popitem()[1]['last_saved_path']
with open(path, 'wb') as f:
model.dump_binary(f)
s.load('test')
finally:
s.stop()
if path and os.path.exists(path):
os.remove(path)
def test_from_classifier(self):
model = self._get_model(Classifier, ClassifierConfig(method="NN"))
self._assertModelLoadable(Classifier, model)
self._assertModelLoadable(Weight, model.transform('weight'))
self._assertModelLoadable(NearestNeighbor, model.transform('nearest_neighbor'))
def test_from_regression(self):
RegressionConfig = {
"method": "NN",
"parameter": {
"method": "euclid_lsh",
"parameter": {"hash_num": 64},
"nearest_neighbor_num": 128
},
"converter": {
"string_rules": [{"key":"*","type":"str","sample_weight":"bin","global_weight":"bin"}],
"num_rules": [{"key":"*","type":"num"}]
}
}
model = self._get_model(Regression, RegressionConfig)
self._assertModelLoadable(Regression, model)
self._assertModelLoadable(Weight, model.transform('weight'))
self._assertModelLoadable(NearestNeighbor, model.transform('nearest_neighbor'))
def test_from_recommender(self):
model = self._get_model(Recommender, RecommenderConfig(method="nearest_neighbor_recommender"))
self._assertModelLoadable(Recommender, model)
self._assertModelLoadable(Weight, model.transform('weight'))
self._assertModelLoadable(NearestNeighbor, model.transform('nearest_neighbor'))
def test_from_anomaly_nn(self):
model = self._get_model(Anomaly, AnomalyConfig(method="light_lof"))
self._assertModelLoadable(Anomaly, model)
self._assertModelLoadable(Weight, model.transform('weight'))
self._assertModelLoadable(NearestNeighbor, model.transform('nearest_neighbor'))
def test_from_anomaly_recommender(self):
model = self._get_model(Anomaly, AnomalyConfig(method="lof"))
self._assertModelLoadable(Anomaly, model)
self._assertModelLoadable(Weight, model.transform('weight'))
self._assertModelLoadable(Recommender, model.transform('recommender'))
def test_from_clustering(self):
model = self._get_model(Clustering, Clustering.CONFIG)
self._assertModelLoadable(Clustering, model)
self._assertModelLoadable(Weight, model.transform('weight'))
| mit | Python | |
317c74fb6a31aad82f080b3bb8383c4047ae2f63 | Create tests.py | jonathanmarvens/jeeves,jonathanmarvens/jeeves,jonathanmarvens/jeeves,BambooL/jeeves,jonathanmarvens/jeeves,BambooL/jeeves,BambooL/jeeves,jeanqasaur/jeeves,BambooL/jeeves | demo/openmrs/openmrs/tests.py | demo/openmrs/openmrs/tests.py | import unittest
from BaseOpenmrsObject import *
class TestOrderFunctions(unittest.TestCase):
def setUp(self):
self.order = Order()
self.order.setOrderId(9112)
self.order.setOrderNumber('911')
def test_copy_methods(self):
copy1 = self.order.copy()
copy2 = self.order.copyForModification()
self.assertEqual(self.order.OrderAction.ORDER, 'ORDER')
self.assertEqual(self.order.Urgency.ROUTINE, 'ROUTINE')
self.assertIs(copy2, self.order)
self.assertIsNot(copy1, self.order)
self.assertIsNot(copy1, copy2)
self.assertEqual(self.order.hashCode(), self.order.getOrderId())
def test_date_methods(self):
self.assertTrue(self.order.isCurrent())
checkDate = datetime(2013, 12, 25)
autoExpireDate = datetime(2017, 12, 25)
self.order.setAutoExpireDate(autoExpireDate)
self.assertTrue(self.order.isCurrent(checkDate))
discontinuedDate = datetime(2015, 12, 25)
self.order.setDiscontinuedDate(discontinuedDate)
self.assertTrue(self.order.isCurrent(checkDate))
self.assertFalse(self.order.isDiscontinued(checkDate))
startDate = datetime(2014, 12, 25)
self.order.setStartDate(startDate)
self.assertFalse(self.order.isCurrent(checkDate))
self.order.setDiscontinued(True)
self.assertFalse(self.order.isDiscontinued(checkDate))
checkDate2 = datetime(2016, 12, 25)
self.assertTrue(self.order.isDiscontinued(checkDate2))
self.assertTrue(self.order.isFuture(checkDate))
self.assertFalse(self.order.isDrugOrder())
obj = Order(orderId = 9112)
self.assertTrue(self.order.equals(obj))
self.assertIs(self.order.serialVersionUID, 1)
if __name__ == "__main__":
unittest.main()
| mit | Python | |
274d539e0cf08a3417315bd68bc5544dbb21d0ff | Add config test | authmillenon/wakefs | tests/config.py | tests/config.py | import wakefs.config
import os
import unittest
import random
import string
def random_str(N):
''.join(random.choice(string.ascii_uppercase + string.digits + string.ascii_lowercase) for x in range(N))
class TestConfigFileCreate(unittest.TestCase):
def test_file_create(self):
testfile = "test.cfg"
config = wakefs.config.Config(testfile)
config.close()
self.assertTrue(os.path.exists(testfile))
os.remove(testfile)
class TestConfigAttributes(unittest.TestCase):
def setUp(self):
self.testfile = "test.cfg"
self.config = wakefs.config.Config(self.testfile)
def test_get_attribute(self):
self.config.database_uri
def test_get_wrong_attribute(self):
with self.assertRaises(AttributeError):
self.config.detabase_uri
def test_set_attribute(self):
teststr = random_str(random.randint(5,20))
self.config.test = teststr
self.assertTrue(self.config.test == teststr)
def test_del_attribute(self):
teststr = random_str(random.randint(5,20))
self.config.test = teststr
self.assertTrue(self.config.test == teststr)
del self.config.test
with self.assertRaises(AttributeError):
self.config.test
def tearDown(self):
self.config.close()
os.remove(self.testfile)
| mit | Python | |
c1c5f58d12ff9a8e532de971c28e3676915a7117 | Add py-cairocffi package (#12161) | LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack | var/spack/repos/builtin/packages/py-cairocffi/package.py | var/spack/repos/builtin/packages/py-cairocffi/package.py | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyCairocffi(PythonPackage):
"""cairocffi is a CFFI-based drop-in replacement for Pycairo, a set of
Python bindings and object-oriented API for cairo. Cairo is a 2D vector
graphics library with support for multiple backends including image
buffers, PNG, PostScript, PDF, and SVG file output."""
homepage = "https://github.com/Kozea/cairocffi"
url = "https://pypi.io/packages/source/c/cairocffi/cairocffi-1.0.2.tar.gz"
import_modules = ['cairocffi']
version('1.0.2', sha256='01ac51ae12c4324ca5809ce270f9dd1b67f5166fe63bd3e497e9ea3ca91946ff')
depends_on('python@3.5:', type=('build', 'run'))
depends_on('py-setuptools@39.2.0:', type='build')
depends_on('py-cffi@1.1.0:', type=('build', 'run'))
depends_on('py-pytest-runner', type='test')
depends_on('py-pytest-cov', type='test')
depends_on('py-pytest-flake8', type='test')
depends_on('py-pytest-isort', type='test')
| lgpl-2.1 | Python | |
eda01c6b45629d6c039785ba502dcde47cabd020 | Add python implementation | juruen/montecarlo-minesweeper | probability.py | probability.py | #!/usr/bin/env python
import random
import sets
BOARD_SIZE = 9
MINES = 10
SAMPLES = 10**5
def generate_mines(board_size, num_of_mines):
mines = sets.Set()
while len(mines) != num_of_mines:
mines.add((random.randint(0, board_size - 1),
random.randint(0, board_size - 1)))
return mines
def has_adjacent(pos, mines):
x, y = pos
if (x + 1, y) in mines:
return True
if (x - 1, y) in mines:
return True
if (x, y + 1) in mines:
return True
if (x, y - 1) in mines:
return True
if (x + 1, y + 1) in mines:
return True
if (x + 1, y - 1) in mines:
return True
if (x - 1, y + 1) in mines:
return True
if (x - 1, y - 1) in mines:
return True
return False
def random_var_function(mines):
for mine in mines:
if not (has_adjacent(mine, mines)):
return 0
return 1
print sum([random_var_function(generate_mines(BOARD_SIZE, MINES)) for i in range(0, SAMPLES)]) / float(SAMPLES)
| apache-2.0 | Python | |
3352236ff27dbfd749b71dd152f6809b2019bee4 | add tests | honzajavorek/czech-holidays | test_czech_holidays.py | test_czech_holidays.py | import re
from datetime import date
import requests
import pytest
from czech_holidays import holidays, Holidays, Holiday
WIKIPEDIA_RE = re.compile(r'Rok\s\d{4}</th>\s<td>(?P<date>\w+)')
WIKIPEDIA_DATE_RE = re.compile(r'(?P<day>\d+)\.\s+(?P<month>\w+)\s+(?P<year>\d{4})')
def fetch_easter_dates():
response = requests.get('https://cs.wikipedia.org/wiki/Velikono%C4%8Dn%C3%AD_pond%C4%9Bl%C3%AD')
response.raise_for_status()
return [parse_wikipedia_date(match.group('date'))
for match in WIKIPEDIA_RE.finditer(response.text)]
def parse_wikipedia_date(date_text):
match = WIKIPEDIA_DATE_RE.search(date_text)
groups = match.groupdict()
return date(int(groups['year']),
3 if 'bře' in groups['month'] else 4,
int(groups['day']))
@pytest.mark.parametrize('date_text, expected', [
('5. dubna 2021', date(2021, 4, 5)),
('18. dubna 2022', date(2022, 4, 18)),
('10. dubna 2023', date(2023, 4, 10)),
('31. března 2024', date(2024, 3, 31)),
])
def test_parse_wikipedia_date(date_text, expected):
assert parse_wikipedia_date(date_text) == expected
@pytest.mark.parametrize('easter_date', fetch_easter_dates())
def test_easter(easter_date):
holiday = Holidays(easter_date.year).easter
assert (holiday.year, holiday.month, holiday.day) == (easter_date.year, easter_date.month, easter_date.day)
def test_christmas():
holiday = Holidays(2022).christmas
assert (holiday.year, holiday.month, holiday.day) == (2022, 12, 24)
| mit | Python | |
0415071808b1bfa659a790e50692dc65d479b627 | add config.sample.py | lynxis/testWrt | tests/config.sample.py | tests/config.sample.py | """
This is the sample config.py
create your own config.py which match your testsetup
It will be imported by device tests
"""
from testWrt import testsetup
TestSetup = testsetup.TestSetup()
TestSetup.set_openwrt("192.168.2.1")
| bsd-3-clause | Python | |
08f147052e19c43e89f3548a10ecc847316e6789 | Add a tool for backend synchronization | rensimlab/rensimlab.github.io,rensimlab/rensimlab.github.io,rensimlab/rensimlab.github.io | tools/syncer.py | tools/syncer.py | def sync():
import os
from bson.objectid import ObjectId
from girder import logger
from girder.models.assetstore import Assetstore
from girder.models.collection import Collection
from girder.models.file import File
from girder.models.folder import Folder
from girder.models.item import Item
from girder.models.user import User
from girder.exceptions import ResourcePathNotFound
from girder.utility import path as path_utils
from girder.utility import assetstore_utilities
assetstoreId = ObjectId('59b04a2c38eed90001dcc45c')
assetstore = Assetstore().load(id=assetstoreId)
adapter = assetstore_utilities.getAssetstoreAdapter(assetstore)
physicalPath = '/mnt/data/renaissance'
collectionPath = '/collection/Renaissance Simulations'
collectionId = ObjectId('59b04a0e38eed90001dcc45b')
rslCollection = Collection().load(collectionId, force=True)
admin = list(User().getAdmins())[0]
def purge_leaf_folder(path):
folder = path_utils.lookUpPath(path, user=admin)['document']
if Item().find({'folderId': folder['_id']}).count() > 0 or \
list(Folder().childFolders(folder, 'folder', user=admin)):
return
logger.info("Removing empty folder %s" % path)
Folder().remove(folder)
purge_leaf_folder(os.path.dirname(path))
q = {'assetstoreId': assetstoreId, 'imported': True}
fields = ['path', 'size', 'name', 'mtime']
girderFiles = {
fObj.pop('path'): fObj for fObj in File().find(q, fields=fields)
}
toImport = {}
toModify = {}
for (dirpath, dirnames, filenames) in os.walk(physicalPath):
for filename in filenames:
path = os.path.join(dirpath, filename)
stat = os.stat(path)
try:
girderObj = girderFiles.pop(path)
except KeyError:
toImport[path] = {
'mtime': stat.st_mtime, 'size': stat.st_size,
'name': filename
}
if girderObj['mtime'] != stat.st_mtime or \
girderObj['size'] != stat.st_size or \
girderObj['name'] != filename:
girderObj.update({
'mtime': stat.st_mtime, 'size': stat.st_size,
'name': filename, 'path': path
})
toModify[girderObj.pop('_id')] = girderObj
# Remove orphaned files
potentialLeafFolders = set()
for orphan in girderFiles.values():
fileObj = File().load(ObjectId(orphan['_id']), force=True)
itemObj = Item().load(fileObj['itemId'], force=True)
File().remove(fileObj)
girderPath = path_utils.getResourcePath('item', itemObj, force=True)
if not list(Item().childFiles(itemObj)):
Item().remove(itemObj)
potentialLeafFolders.add(os.path.dirname(girderPath))
logger.info('Removed %s' % girderPath)
# Remove empty folders
for path in list(potentialLeafFolders):
purge_leaf_folder(path)
# Import new items
for filePath, newFile in toImport.items():
relpath = os.path.relpath(filePath, physicalPath)
parentType = 'collection'
parent = rslCollection
dirs = os.path.dirname(relpath).split('/')
for directory in dirs:
try:
parent, parentType = \
path_utils.lookUpToken(directory, parentType, parent)
except ResourcePathNotFound:
parent = Folder().createFolder(
parent, directory, parentType=parentType,
public=True, creator=admin)
parentType = 'folder'
adapter._importDataAsItem(
os.path.basename(relpath), admin, parent,
os.path.dirname(filePath), [os.path.basename(filePath)],
reuseExisting=True)
logger.info('Imported %s to %s' %
(filePath, os.path.join(collectionPath, relpath)))
| mit | Python | |
eb091fc81dc374d0eb0800a596d6e0db95a55687 | Create CombinationSumII_001.py | Chasego/codirit,Chasego/codi,cc13ny/algo,Chasego/codi,cc13ny/Allin,Chasego/codirit,cc13ny/algo,Chasego/cod,cc13ny/Allin,cc13ny/algo,Chasego/codi,Chasego/codi,Chasego/cod,Chasego/codi,Chasego/codirit,cc13ny/Allin,Chasego/codirit,Chasego/cod,cc13ny/Allin,Chasego/cod,cc13ny/Allin,cc13ny/algo,cc13ny/algo,Chasego/codirit,Chasego/cod | leetcode/040-Combination-Sum-II/CombinationSumII_001.py | leetcode/040-Combination-Sum-II/CombinationSumII_001.py | #Node simplification, improvement & optimization
#How, it's good because it can be done on the original code of "Combination Sum"
class Solution:
# @param {integer[]} candidates
# @param {integer} target
# @return {integer[][]}
def combinationSum2(self, candidates, target):
candidates.sort()
return self.combsum(candidates, target)
def combsum(self, nums, target):
if target == 0:
return [[]]
if not nums or nums[0] > target or target < 1:
return []
res = []
i = 0
while i < len(nums):
num = nums[i]
cnt = 1
while i + cnt < len(nums) and nums[i + cnt] == num:
cnt += 1
j = i + cnt
pre = [num]
t = target
while t >= num and cnt > 0:
t -= num
cnt -= 1
subs = self.combsum(nums[j:], t)
for sub in subs:
res.append(pre + sub)
pre += [num]
i = j
return res
| mit | Python | |
d3a9a4a300acc204111e19945381a995f0f7cdda | add import script for Dumfries and Galloway | chris48s/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations | polling_stations/apps/data_collection/management/commands/import_dumfries_and_galloway.py | polling_stations/apps/data_collection/management/commands/import_dumfries_and_galloway.py | from data_collection.management.commands import BaseScotlandSpatialHubImporter
class Command(BaseScotlandSpatialHubImporter):
council_id = 'S12000006'
council_name = 'Dumfries and Galloway'
elections = ['local.dumfries-and-galloway.2017-05-04']
| bsd-3-clause | Python | |
ba5d87ff551df47df2ed4de15058df28ad49fe41 | add error classes. | seikichi/pumblr | pumblr/error.py | pumblr/error.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
class PumblrError(object):
"""Pumblr exception"""
def __init__(self, msg):
self._msg = msg
def __str__(self):
return self._msg
class PumblrAuthError(PumblrError):
"""403 Forbidden exception"""
pass
class PumblrReqestError(PumblrError):
"""400 Bad Request exception"""
pass
| mit | Python | |
86fe554e8cc67ad346d2ecc532cea6e94461a0c6 | Add support for file session | free-free/pyblog,free-free/pyblog,free-free/pyblog,free-free/pyblog | app/tools/session.py | app/tools/session.py | #-*- coding:utf-8 -*-
import uuid
import time
import os
import json
class Session(dict):
def __init__(self,session_id=None,expire=None,*args,**kw):
if session_id==None:
self._session_id=self._generate_session_id()
else:
self._session_id=session_id
self._expire=expire
super(Session,self).__init__(*args,**kw)
def _generate_session_id(self):
return str(uuid.uuid1().hex)
def set(self,sname,svalue):
pass
def get(self,sname):
pass
def __getattr__(self,k):
pass
def __setattr__(self,k,v):
pass
def save(self):
pass
class FileSession(Session):
_sesion_dir='/tmp/session'
def __init__(self,session_id=None,expire=None,*args,**kw):
if not os.path.exists(self._session_dir):
os.mkdir(self._session_dir)
if session_id==None:
self._session_id=str(uuid.uuid1().hex)
self._session_file=os.path.join(self._session_dir,self._session_id)
else:
self._session_id=session_id
self._session_file=os.path.join(self._session_dir,session_id)
if os.path.exists(self._session_file):
with open(self._session_file,'r',errors='ignore',encoding='utf-8') as f:
self[self._session_id]=json.load(self._fp)
else:
self[self._session_id]={}
super(FileSession,self).__init__(self._session_id,expire,*args,**kw)
def set(self,sname,svalue):
self[self._session_id][sname]=svalue
def get(self,sname):
return self[self._session_id].get(sname,None)
def save(self):
with open(self._session_file,'w',errors='ignore',encoding='utf-8') as f:
json.dump(self[self._session_if],f)
def __getattr__(self,k):
if k in self[self._session_id]:
return self[self._session_id].get(k)
return None
def __setattr__(self,k,v):
self[self._session_id][k]=v
class MongoSession(Session):
def __init__(self,session_id=None,expire=None,*args,**kw):
super(MongoSession,self).__init__(session_id,expire,*args,**kw)
class RedisSession(Session):
def __init__(self,session_id=None,expire=None,*args,**kw):
super(RedisSession,self).__init__(session_id,expire,*args,**kw)
class SessionManager(object):
def __init__(self,
| mit | Python | |
8bfd0031c4a93b644cd8f9892a0cc1a8671a9024 | add build/BuildSpawn.py | tianocore/buildtools-BaseTools,tianocore/buildtools-BaseTools,bitcrystal/buildtools-BaseTools,tianocore/buildtools-BaseTools,bitcrystal/buildtools-BaseTools,bitcrystal/buildtools-BaseTools,bitcrystal/buildtools-BaseTools,tianocore/buildtools-BaseTools,bitcrystal/buildtools-BaseTools,tianocore/buildtools-BaseTools | Source/Python/build/BuildSpawn.py | Source/Python/build/BuildSpawn.py | import os
from threading import *
from subprocess import *
class BuildSpawn(Thread):
def __init__(self, Sem=None, Filename=None, Args=None, Num=0):
Thread.__init__(self)
self.sem=Sem
self.filename=Filename
self.args=Args
self.num=Num
def run(self):
self.sem.acquire()
p = Popen(["nmake", "/nologo", "-f", self.filename, self.args], env=os.environ, cwd=os.path.dirname(self.filename))
p.communicate()
if p.returncode != 0:
return p.returncode
self.sem.release() | bsd-2-clause | Python | |
00a00621f005e3db3fd25c4c09fb1540ba165fed | Test the VenvBuilder | ionelmc/virtualenv,ionelmc/virtualenv,ionelmc/virtualenv | tests/unit/builders/test_venv.py | tests/unit/builders/test_venv.py | import subprocess
import pretend
import pytest
import virtualenv.builders.venv
from virtualenv.builders.venv import VenvBuilder, _SCRIPT
def test_venv_builder_check_available_success(monkeypatch):
check_output = pretend.call_recorder(lambda *a, **kw: None)
monkeypatch.setattr(
virtualenv.builders.venv,
"check_output",
check_output,
)
assert VenvBuilder.check_available("wat")
assert check_output.calls == [
pretend.call(["wat", "-c", "import venv"], stderr=subprocess.STDOUT),
]
def test_venv_builder_check_available_fails(monkeypatch):
@pretend.call_recorder
def check_output(*args, **kwargs):
raise subprocess.CalledProcessError(1, "an error!")
monkeypatch.setattr(
virtualenv.builders.venv,
"check_output",
check_output,
)
assert not VenvBuilder.check_available("wat")
assert check_output.calls == [
pretend.call(["wat", "-c", "import venv"], stderr=subprocess.STDOUT),
]
@pytest.mark.parametrize("system_site_packages", [True, False])
def test_venv_builder_create_venv(tmpdir, monkeypatch, system_site_packages):
check_call = pretend.call_recorder(lambda *a, **kw: None)
monkeypatch.setattr(subprocess, "check_call", check_call)
builder = VenvBuilder(
"wat",
None,
system_site_packages=system_site_packages,
)
builder.create_virtual_environment(str(tmpdir))
script = _SCRIPT.format(
system_site_packages=system_site_packages,
destination=str(tmpdir),
)
assert check_call.calls == [
pretend.call(["wat", "-c", script])
]
| mit | Python | |
efc8d3182f79111b3a1b7df445dafd46fef9862a | Add YAML wrapper allowing conf strings to be used in YAML source files | daemotron/controlbeast,daemotron/controlbeast | controlbeast/utils/yaml.py | controlbeast/utils/yaml.py | # -*- coding: utf-8 -*-
"""
controlbeast.utils.yaml
~~~~~~~~~~~~~~~~~~~~~~~
:copyright: Copyright 2014 by the ControlBeast team, see AUTHORS.
:license: ISC, see LICENSE for details.
"""
import os
import yaml
from controlbeast.conf import CbConf
from controlbeast.utils.dynamic import CbDynamicIterable
from controlbeast.utils.file import CbFile
class CbYaml(CbDynamicIterable, CbFile):
"""
Wrapper class providing access to YAML data sources.
This wrapper allows using Python format strings within YAML source
files, referring to any name defined in :py:mod:`~controlbeast.conf.default`.
"""
#: File name of the YAML file used as data source
_filename = None
def __init__(self, filename=''):
"""
CbYaml constructor
"""
if self._check_file_exists(filename) and self._check_access(filename, os.R_OK):
self._filename = filename
if self._filename:
conf = CbConf.get_instance()
with open(self._filename, 'r') as fp:
content = fp.read()
content = content.format(**conf)
yaml_dict = yaml.safe_load(content)
else:
yaml_dict = None
super(CbYaml, self).__init__(dict=yaml_dict)
@property
def filename(self):
"""
File name of the YAML file to read from.
Expected to be a string representing a valid and accessible YAML file.
"""
return self._filename
| isc | Python | |
79f92d050fbf9ebe4f088aeabb5e832abeefe0d5 | Initialize unit tests for Coursera API module | ueg1990/mooc_aggregator_restful_api | tests/test_coursera.py | tests/test_coursera.py | import unittest
from mooc_aggregator_restful_api import coursera
class CourseraTestCase(unittest.TestCase):
'''
Unit Tests for module udacity
'''
def setUp(self):
self.coursera_test_object = coursera.CourseraAPI()
def test_coursera_api_courses_response(self):
self.assertEqual(self.coursera_test_object.response_courses.status_code, 200)
def test_coursera_api_universities_response(self):
self.assertEqual(self.coursera_test_object.response_universities.status_code, 200)
def test_coursera_api_categories_response(self):
self.assertEqual(self.coursera_test_object.response_categories.status_code, 200)
def test_coursera_api_instructors_response(self):
self.assertEqual(self.coursera_test_object.response_instructors.status_code, 200)
def test_coursera_api_sessions_response(self):
self.assertEqual(self.coursera_test_object.response_sessions.status_code, 200)
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
| mit | Python | |
7d65a128ee71bc5c85170b247730ea385ab58d0c | add first handler test | edouard-lopez/rangevoting,guillaumevincent/rangevoting,edouard-lopez/rangevoting,guillaumevincent/rangevoting,edouard-lopez/rangevoting,guillaumevincent/rangevoting | tests/test_handlers.py | tests/test_handlers.py | import unittest
from unittest.mock import Mock
class RangeVotingHandler():
def __init__(self, member_repository):
self.repository = member_repository
def handle(self, command):
self.repository.save()
class RangeVotingRepository():
def save(self, rangevoting):
pass
class RangeVotingHandlerTestCase(unittest.TestCase):
def test_creation(self):
member_mock_repository = RangeVotingRepository()
rangevoting_handler = RangeVotingHandler(member_mock_repository)
self.assertEqual(member_mock_repository, rangevoting_handler.repository)
def test_handle_calls_save_method_from_repository(self):
member_mock_repository = RangeVotingRepository()
member_mock_repository.save = Mock()
rangevoting_handler = RangeVotingHandler(member_mock_repository)
rangevoting_handler.handle({})
self.assertTrue(member_mock_repository.save.called)
if __name__ == '__main__':
unittest.main()
| mit | Python | |
8415decb4fea7cb8ad3a2800ecd9c9a9190fa331 | Add rename script in python | Diego999/Digit-Dataset,Diego999/Digit-Dataset | rename.py | rename.py | import os
SEPARATOR = '_'
EXT = ".png"
digits = range(1, 10)
for d in digits:
i = 1
for file in os.listdir(str(d)):
if file.endswith(EXT):
filepath = str(d) + '/'
old_filename = filepath + file
new_filename = filepath + str(d) + SEPARATOR + str(i) + EXT
os.rename(old_filename, new_filename)
i += 1 | mit | Python | |
96cfe4d55ae6dd34cc30a72f19118aa66c65f7ca | add __main__ file to for python 2.7 entrypoint | petrus-v/selenium-odoo-qunit | selenium_odoo_qunit/__main__.py | selenium_odoo_qunit/__main__.py | if __name__ == '__main__':
from selenium_odoo_qunit import selenium_odoo_qunit as soq
soq.main()
| mpl-2.0 | Python | |
769f350802b78ffa9c74bc5b9a1e912b64ab718d | Add new package: py-asgiref (#16233) | iulian787/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack,iulian787/spack,iulian787/spack | var/spack/repos/builtin/packages/py-asgiref/package.py | var/spack/repos/builtin/packages/py-asgiref/package.py | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyAsgiref(PythonPackage):
"""ASGI specification and utilities."""
homepage = "https://asgi.readthedocs.io/en/latest/"
url = "https://github.com/django/asgiref/archive/3.2.7.tar.gz"
version('3.2.7', sha256='8a0b556b9e936418475f6670d59e14592c41d15d00b5ea4ad26f2b46f9f4fb9a')
version('3.2.6', sha256='29788163bdad8d494475a0137eba39b111fd86fbe825534a9376f9f2ab44251a')
version('3.2.5', sha256='eeb01ba02e86859746ee2a7bc8a75c484a006dc9575723563f24642a12b2bba8')
version('3.2.4', sha256='89e47532340338b7eafd717ab28658e8b48f4565d8384628c88d2d41565c8da0')
version('3.2.3', sha256='d38e16141c7189e23bfe03342d9cd3dbfd6baab99217892bfa7bc5646315b6bd')
version('3.2.2', sha256='47edf327aa70f317c9bc810d469ce681f1b35a7f499f68cf2b5da3ba6a651e69')
version('3.2.1', sha256='06a21df1f4456d29079f3c475c09ac31167bcc5f024c637dedf4e00d2dd9020b')
version('3.2.0', sha256='5db8c7a6c1ff54ea04a52f994d8af959427f1cab8e427aa802492a89fb0b635a')
version('3.1.4', sha256='bf01c52111ef7af2adc1e6d90282d2a32c5ebe09e84ae448389ceff7cef53fa9')
version('3.1.3', sha256='5b8bb7b3719b8c12a6c2363784a4d8c0eb5e980d8b4fdb6f38eccb52071dfab5')
depends_on('python@3.5:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
| lgpl-2.1 | Python | |
e6e641e3beb2aad3e6d4eb1a37a7ee029006631b | add download_data.py for downloading data from nrao, it is not working yet | caseyjlaw/aws-vla-frb,caseyjlaw/aws-vla-frb | aws/download_data.py | aws/download_data.py | import sys
import os
import urllib
import urllib2
import webbrowser
from mechanize import ParseResponse, urlopen, urljoin, Browser
url = 'https://archive.nrao.edu/archive/advquery.jsp'
def download_with_mech(email, destination, file):
'''
download data from nrao archive. Now it only works for filling the form. It cannot
submit by clicking "Get my data" buttom
'''
br = Browser()
br.set_handle_robots(False) # ignore robots
br.open(url)
br.select_form(nr=0)
br["PROJECT_CODE"] = "14A-425"
submit_response = br.submit(name = "SUBMIT", label = "Submit Query")
content = submit_response.read()
#print br.read()
'''redirect to the download page'''
br.select_form(name = "Form1")
br["EMAILADDR"] = "jtan0325@berkeley.edu" #replace by email
br["COPYFILEROOT"] = "/lustre/aoc/projects/fasttransients/moving" #replace by destination
br["CONVERT2FORMAT"] = ["SDM"]
achive_files = br.form.find_control(name = "FTPCHECKED")
for v in range(0, len(achive_files.items)):
# file name should be replaced by FILE
if "14A-425_sb29260830_1_000.56825.290659375" in str(achive_files.items[v]):
achive_files.items[v].selected = True
break
print str(br.read())
#submit_response = br.submit(name = "DOWNLOADFTPCHK")
#submit_content = submit_response.read()
#print submit_response
| bsd-3-clause | Python | |
f3ed5d434b83d7531aecd1431645267dedfecb45 | Create mqtt_sender.py | c2theg/srvBuilds,c2theg/srvBuilds,c2theg/srvBuilds,c2theg/srvBuilds | raspi/mqtt_sender.py | raspi/mqtt_sender.py | import paho.mqtt.publish as publish
MQTT_SERVER = "192.168.1.10"
MQTT_PATH = "test_channel"
publish.single(MQTT_PATH, "Hello World!", hostname=MQTT_SERVER)
| mit | Python | |
a51a226dc0a134e01915e514e2146a664671d998 | Update dates for CFP | pyvec/cz.pycon.org-2017,benabraham/cz.pycon.org-2017,benabraham/cz.pycon.org-2017,pyvec/cz.pycon.org-2017,benabraham/cz.pycon.org-2017,pyvec/cz.pycon.org-2017 | pyconcz_2017/proposals/pyconcz2016_config.py | pyconcz_2017/proposals/pyconcz2016_config.py | from datetime import datetime
from django.utils.timezone import get_current_timezone
from pyconcz_2017.proposals.models import Talk, Workshop, FinancialAid
tz = get_current_timezone()
class TalksConfig:
model = Talk
key = 'talks'
title = 'Talks'
cfp_title = 'Submit your talk'
template_about = 'proposals/talks_about.html'
date_start = datetime(year=2017, month=3, day=6, hour=12, minute=0, tzinfo=tz)
date_end = datetime(year=2017, month=4, day=17, hour=23, minute=59, second=59, tzinfo=tz)
class WorkshopsConfig:
model = Workshop
key = 'workshops'
title = 'Workshops'
cfp_title = 'Submit your workshop'
template_about = 'proposals/workshops_about.html'
date_start = datetime(year=2017, month=3, day=6, hour=12, minute=0, tzinfo=tz)
date_end = datetime(year=2017, month=4, day=17, hour=23, minute=59, second=59, tzinfo=tz)
class FinancialAidConfig:
model = FinancialAid
key = 'financial-aid'
title = 'Financial Aid'
cfp_title = 'Financial Aid Programme'
template_about = 'proposals/financial_aid_about.html'
date_start = datetime(year=2016, month=9, day=8, hour=12, minute=0, tzinfo=tz)
date_end = datetime(year=2016, month=10, day=8, hour=12, minute=0, tzinfo=tz)
| from datetime import datetime
from django.utils.timezone import get_current_timezone
from pyconcz_2017.proposals.models import Talk, Workshop, FinancialAid
tz = get_current_timezone()
class TalksConfig:
model = Talk
key = 'talks'
title = 'Talks'
cfp_title = 'Submit your talk'
template_about = 'proposals/talks_about.html'
date_start = datetime(year=2016, month=8, day=1, hour=12, minute=0, tzinfo=tz)
date_end = datetime(year=2016, month=9, day=15, hour=23, minute=59, second=59, tzinfo=tz)
class WorkshopsConfig:
model = Workshop
key = 'workshops'
title = 'Workshops'
cfp_title = 'Submit your workshop'
template_about = 'proposals/workshops_about.html'
date_start = datetime(year=2016, month=8, day=1, hour=12, minute=0, tzinfo=tz)
date_end = datetime(year=2016, month=10, day=5, hour=12, minute=0, tzinfo=tz)
class FinancialAidConfig:
model = FinancialAid
key = 'financial-aid'
title = 'Financial Aid'
cfp_title = 'Financial Aid Programme'
template_about = 'proposals/financial_aid_about.html'
date_start = datetime(year=2016, month=9, day=8, hour=12, minute=0, tzinfo=tz)
date_end = datetime(year=2016, month=10, day=8, hour=12, minute=0, tzinfo=tz)
| mit | Python |
ca842aee42fcb149e72a39334035cba81e969c65 | add clock | erroneousboat/dotfiles,erroneousboat/dotfiles,erroneousboat/dotfiles | files/bin/bin/clock.py | files/bin/bin/clock.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
##############################################################################
#
# clock
# -----
#
# This script prints an icon representation of the time of day.
#
# Dependencies: python3, nerd-fonts
#
# :authors: J.P.H. Bruins Slot
# :date: 07-01-2019
# :version: 0.1.0
#
##############################################################################
import datetime
def clock():
now = datetime.datetime.now().hour % 12
if now == 0:
return ""
elif now == 1:
return ""
elif now == 2:
return ""
elif now == 3:
return ""
elif now == 4:
return ""
elif now == 5:
return ""
elif now == 6:
return ""
elif now == 7:
return ""
elif now == 8:
return ""
elif now == 9:
return ""
elif now == 10:
return ""
elif now == 11:
return ""
else:
return ""
if __name__ == "__main__":
print(clock())
| mit | Python | |
0c09a85ff19a48dd69f44720823e8bb2cb75eef8 | add the visualization of the 1st conv layer kernels | frombeijingwithlove/dlcv_for_beginners,frombeijingwithlove/dlcv_for_beginners | chap9/visualize_conv1_kernels.py | chap9/visualize_conv1_kernels.py | import sys
import numpy as np
import matplotlib.pyplot as plt
import cv2
sys.path.append('/path/to/caffe/python')
import caffe
ZOOM_IN_SIZE = 50
PAD_SIZE = 4
WEIGHTS_FILE = 'freq_regression_iter_10000.caffemodel'
DEPLOY_FILE = 'deploy.prototxt'
net = caffe.Net(DEPLOY_FILE, WEIGHTS_FILE, caffe.TEST)
kernels = net.params['conv1'][0].data
kernels -= kernels.min()
kernels /= kernels.max()
zoomed_in_kernels = []
for kernel in kernels:
zoomed_in_kernels.append(cv2.resize(kernel[0], (ZOOM_IN_SIZE, ZOOM_IN_SIZE), interpolation=cv2.INTER_NEAREST))
# plot 12*8 squares kernels
half_pad = PAD_SIZE / 2
padded_size = ZOOM_IN_SIZE+PAD_SIZE
padding = ((0, 0), (half_pad, half_pad), (half_pad, half_pad))
padded_kernels = np.pad(zoomed_in_kernels, padding, 'constant', constant_values=1)
padded_kernels = padded_kernels.reshape(8, 12, padded_size, padded_size).transpose(0, 2, 1, 3)
kernels_img = padded_kernels.reshape((8*padded_size, 12*padded_size))[half_pad:-half_pad, half_pad: -half_pad]
plt.imshow(kernels_img, cmap='gray', interpolation='nearest')
plt.axis('off')
plt.show()
| bsd-3-clause | Python | |
65546f4cd97331455a3309509d076825f07a078c | Add set_up file to run all tests | igemsoftware2016/USTC-Software-2016,igemsoftware2016/USTC-Software-2016,igemsoftware2016/USTC-Software-2016 | set_up.py | set_up.py | import os
import unittest2 as unittest
def suite():
return unittest.TestLoader().discover('tests','test_*.py')
if __name__ == '__main__':
unittest.main(defaultTest = 'suite') | agpl-3.0 | Python | |
abea1f4598928fddf750358efcedbfaade019bf4 | Add migration to fix Attachment cache. | kou/zulip,kou/zulip,zulip/zulip,kou/zulip,kou/zulip,zulip/zulip,andersk/zulip,zulip/zulip,andersk/zulip,rht/zulip,rht/zulip,zulip/zulip,rht/zulip,andersk/zulip,rht/zulip,kou/zulip,zulip/zulip,kou/zulip,andersk/zulip,andersk/zulip,zulip/zulip,andersk/zulip,andersk/zulip,rht/zulip,zulip/zulip,rht/zulip,rht/zulip,kou/zulip | zerver/migrations/0386_fix_attachment_caches.py | zerver/migrations/0386_fix_attachment_caches.py | # Generated by Django 3.2.12 on 2022-03-23 04:32
from django.db import migrations, models
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from django.db.models import Exists, Model, OuterRef
def fix_attachment_caches(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
Attachment = apps.get_model("zerver", "Attachment")
ArchivedAttachment = apps.get_model("zerver", "ArchivedAttachment")
Message = apps.get_model("zerver", "Message")
ArchivedMessage = apps.get_model("zerver", "ArchivedMessage")
BATCH_SIZE = 10000
def update_batch(attachment_model: Model, message_model: Model, lower_bound: int) -> None:
attachment_model.objects.filter(
id__gt=lower_bound, id__lte=lower_bound + BATCH_SIZE
).update(
is_web_public=Exists(
message_model.objects.filter(
attachment=OuterRef("id"),
recipient__stream__invite_only=False,
recipient__stream__is_web_public=True,
),
),
is_realm_public=Exists(
message_model.objects.filter(
attachment=OuterRef("id"),
recipient__stream__invite_only=False,
)
),
)
max_id = Attachment.objects.aggregate(models.Max("id"))["id__max"]
if max_id is not None:
lower_bound = 0
while lower_bound < max_id:
print(f"Processed {lower_bound}/{max_id} attachments.")
update_batch(Attachment, Message, lower_bound)
lower_bound += BATCH_SIZE
max_id = ArchivedAttachment.objects.aggregate(models.Max("id"))["id__max"]
if max_id is not None:
lower_bound = 0
while lower_bound < max_id:
print(f"Processed {lower_bound}/{max_id} archived attachments.")
update_batch(ArchivedAttachment, ArchivedMessage, lower_bound)
lower_bound += BATCH_SIZE
class Migration(migrations.Migration):
atomic = False
dependencies = [
("zerver", "0385_attachment_flags_cache"),
]
operations = [
migrations.RunPython(fix_attachment_caches, reverse_code=migrations.RunPython.noop),
]
| apache-2.0 | Python | |
e7232c4050b4cae1302d2c638ed20f3ac69bf22c | comment out ui tests temporarily | miurahr/seahub,madflow/seahub,madflow/seahub,cloudcopy/seahub,miurahr/seahub,madflow/seahub,Chilledheart/seahub,madflow/seahub,miurahr/seahub,cloudcopy/seahub,Chilledheart/seahub,Chilledheart/seahub,cloudcopy/seahub,cloudcopy/seahub,miurahr/seahub,Chilledheart/seahub,madflow/seahub,Chilledheart/seahub | tests/ui/test_login.py | tests/ui/test_login.py | # import unittest
# from tests.common.common import BASE_URL, USERNAME, PASSWORD
# from selenium import webdriver
# from selenium.webdriver.common.keys import Keys
# LOGIN_URL = BASE_URL + u'/accounts/login/'
# HOME_URL = BASE_URL + u'/home/my/'
# LOGOUT_URL = BASE_URL + u'/accounts/logout/'
# def get_logged_instance():
# browser = webdriver.PhantomJS()
# browser.get(LOGIN_URL)
# username_input = browser.find_element_by_name('username')
# password_input = browser.find_element_by_name('password')
# username_input.send_keys(USERNAME)
# password_input.send_keys(PASSWORD)
# password_input.send_keys(Keys.RETURN)
# if browser.current_url != HOME_URL:
# browser.quit()
# return None
# return browser
# class LoginTestCase(unittest.TestCase):
# def setUp(self):
# self.browser = get_logged_instance()
# self.assertIsNotNone(self.browser)
# self.addCleanup(self.browser.quit)
# def test_login(self):
# self.assertRegexpMatches(self.browser.current_url, HOME_URL)
# def test_logout(self):
# myinfo_bar = self.browser.find_element_by_css_selector('#my-info')
# logout_input = self.browser.find_element_by_css_selector('a#logout')
# myinfo_bar.click()
# logout_input.click()
# self.assertRegexpMatches(self.browser.current_url, LOGOUT_URL)
| import unittest
from tests.common.common import BASE_URL, USERNAME, PASSWORD
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
LOGIN_URL = BASE_URL + u'/accounts/login/'
HOME_URL = BASE_URL + u'/home/my/'
LOGOUT_URL = BASE_URL + u'/accounts/logout/'
def get_logged_instance():
browser = webdriver.PhantomJS()
browser.get(LOGIN_URL)
username_input = browser.find_element_by_name('username')
password_input = browser.find_element_by_name('password')
username_input.send_keys(USERNAME)
password_input.send_keys(PASSWORD)
password_input.send_keys(Keys.RETURN)
if browser.current_url != HOME_URL:
browser.quit()
return None
return browser
class LoginTestCase(unittest.TestCase):
def setUp(self):
self.browser = get_logged_instance()
self.assertIsNotNone(self.browser)
self.addCleanup(self.browser.quit)
def test_login(self):
self.assertRegexpMatches(self.browser.current_url, HOME_URL)
def test_logout(self):
myinfo_bar = self.browser.find_element_by_css_selector('#my-info')
logout_input = self.browser.find_element_by_css_selector('a#logout')
myinfo_bar.click()
logout_input.click()
self.assertRegexpMatches(self.browser.current_url, LOGOUT_URL)
| apache-2.0 | Python |
7b063825ca3fbd8a638d56a477d3b2380b7901be | Add tests for list_max_two | lukin155/skola-programiranja | domaci-zadaci/06/test_list_max_two.py | domaci-zadaci/06/test_list_max_two.py | from solutions import list_max_two
import unittest
import random
class TestListMax(unittest.TestCase):
def test_two_equal_elements(self):
in_list = [random.randint(0, 1000)] * 2
expected = (in_list[0], in_list[1])
actual = list_max_two(in_list)
self.assertEqual(expected, actual)
def test_two_different_elements(self):
num = random.randint(0, 1000)
expected = (num, num - 1)
in_list = [num, num - 1]
actual = list_max_two(in_list)
self.assertEqual(expected, actual)
in_list = [num - 1, num]
actual = list_max_two(in_list)
self.assertEqual(expected, actual)
def test_random(self):
in_list = [1e6 * random.random() for _ in range(100000)]
sorted_list = in_list[:]
sorted_list.sort()
expected = (sorted_list[-1], sorted_list[-2])
actual = list_max_two(in_list)
print(len(in_list))
print(in_list[:100])
self.assertEqual(expected, actual)
if __name__ == "__main__":
unittest.main()
| mit | Python | |
bcb3948eea1903dc0127c8aee1b1decf11040496 | Add tolower simproc. | tyb0807/angr,f-prettyland/angr,iamahuman/angr,schieb/angr,angr/angr,schieb/angr,f-prettyland/angr,iamahuman/angr,angr/angr,schieb/angr,angr/angr,tyb0807/angr,f-prettyland/angr,tyb0807/angr,iamahuman/angr | angr/procedures/libc/tolower.py | angr/procedures/libc/tolower.py | import angr
from angr.sim_type import SimTypeInt
import logging
l = logging.getLogger("angr.procedures.libc.tolower")
class tolower(angr.SimProcedure):
def run(self, c):
self.argument_types = {0: SimTypeInt(self.state.arch, True)}
self.return_type = SimTypeInt(self.state.arch, True)
if not self.state.solver.symbolic(c):
try:
ret_expr = chr(self.state.solver.eval(c)).lower()
except ValueError: # not in range(256)
ret_expr = c
return ret_expr
else:
return self.state.solver.If(
self.state.solver.And(c >= 65, c <= 90), # A - Z
c + 32, c)
| bsd-2-clause | Python | |
405bef33c1c68029b31ec6cb8f88b1edc28e2a6e | Create extract_wavelength tests module | danforthcenter/plantcv,danforthcenter/plantcv,danforthcenter/plantcv | tests/plantcv/hyperspectral/test_extract_wavelength.py | tests/plantcv/hyperspectral/test_extract_wavelength.py | import numpy as np
from plantcv.plantcv.hyperspectral import extract_wavelength
def test_extract_wavelength(hyperspectral_test_data):
new = extract_wavelength(spectral_data=hyperspectral_test_data.load_hsi(), wavelength=500)
assert np.shape(new.array_data) == (1, 1600)
| mit | Python | |
d1abba72b79262c0b1462d7f7e42c798dc30003e | Create twinkle-status | Xi-Plus/Xiplus-Wikipedia-Bot,Xi-Plus/Xiplus-Wikipedia-Bot | twinkle-status/edit.py | twinkle-status/edit.py | # -*- coding: utf-8 -*-
import os
import re
os.environ['PYWIKIBOT_DIR'] = os.path.dirname(os.path.realpath(__file__))
import pywikibot
os.environ['TZ'] = 'UTC'
site = pywikibot.Site()
site.login()
with open('list.txt', 'r') as f:
for user in f:
user = user.strip()
page = pywikibot.Page(site, 'User:{}/common.js'.format(user))
if not page.exists():
print('{}\t{}'.format(user, 'gadget'))
continue
text = page.text
if 'Xiplus/Twinkle.js' in text:
print('{}\t{}'.format(user, 'Xiplus'))
continue
m = re.search(r'User:(.+?)/Twinkle.js', text)
if m:
print('{}\t{}'.format(user, m.group(1)))
continue
print('{}\t{}'.format(user, 'gadget'))
continue
| mit | Python | |
af062396637c86e5f12fcbf2a8250d6189ac207b | Create flask_fysql_example.py | Fy-Network/fysql | flask_fysql_example.py | flask_fysql_example.py | # -*- coding: utf-8 -*-
from fysql.databases import MySQLDatabase
from flask import current_app as app
class FySQL(object):
config = {}
name = ""
engine = MySQLDatabase
def __init__(self, app=None):
self.app = None
if app is not None:
self.init_app(app)
def init_app(self, app):
self.config = app.config.get('DATABASE', {})
self.name = self.config['db']
self.conn_kwargs = {}
self.engine = MySQLDatabase
for key, value in self.config.items():
if key not in ['engine', 'db']:
self.conn_kwargs[key] = value
if hasattr(app, 'teardown_appcontext'):
app.teardown_appcontext(self.teardown)
else:
app.teardown_request(self.teardown)
self.connect()
def connect(self):
self.db = self.engine(self.name, **self.conn_kwargs)
def teardown(self, exception):
self.db.close()
| mit | Python | |
8b7c32f2058ce3c24ef3c19eb7d2d3f8d3154037 | Disable large profile startup benchmarks. | Just-D/chromium-1,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,ltilve/chromium,hgl888/chromium-crosswalk,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk,ltilve/chromium,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,Just-D/chromium-1,Chilledheart/chromium,Just-D/chromium-1,chuan9/chromium-crosswalk,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,ltilve/chromium,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,ltilve/chromium,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,Chilledheart/chromium,Chilledheart/chromium,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,chuan9/chromium-crosswalk,Chilledheart/chromium,ltilve/chromium,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,Chilledheart/chromium,Fireblend/chromium-crosswalk,Just-D/chromium-1,Chilledheart/chromium,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,ltilve/chromium,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,Fireblend/chromium-crosswalk,Fireblend/chromium-crosswalk,ltilve/chromium,TheTypoMaster/chromium-crosswalk,ltilve/chromium,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,ltilve/chromium,Fireblend/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,PeterWangIntel/chromium-crosswalk | tools/perf/benchmarks/startup.py | tools/perf/benchmarks/startup.py | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import benchmark
from measurements import startup
import page_sets
class _StartupCold(benchmark.Benchmark):
"""Measures cold startup time with a clean profile."""
options = {'pageset_repeat': 5}
@classmethod
def Name(cls):
return 'startup'
def CreatePageTest(self, options):
return startup.Startup(cold=True)
class _StartupWarm(benchmark.Benchmark):
"""Measures warm startup time with a clean profile."""
options = {'pageset_repeat': 20}
@classmethod
def Name(cls):
return 'startup'
@classmethod
def ValueCanBeAddedPredicate(cls, _, is_first_result):
return not is_first_result
def CreatePageTest(self, options):
return startup.Startup(cold=False)
@benchmark.Enabled('has tabs')
@benchmark.Disabled('snowleopard') # crbug.com/336913
class StartupColdBlankPage(_StartupCold):
"""Measures cold startup time with a clean profile."""
tag = 'cold'
page_set = page_sets.BlankPageSet
@classmethod
def Name(cls):
return 'startup.cold.blank_page'
@benchmark.Enabled('has tabs')
class StartupWarmBlankPage(_StartupWarm):
"""Measures warm startup time with a clean profile."""
tag = 'warm'
page_set = page_sets.BlankPageSet
@classmethod
def Name(cls):
return 'startup.warm.blank_page'
@benchmark.Enabled('has tabs')
@benchmark.Enabled('win', 'linux', 'mac')
# TODO(erikchen): Investigate source of 30s browser hang on startup.
# http://crbug.com/473827
@benchmark.Disabled
class StartupLargeProfileColdBlankPage(_StartupCold):
"""Measures cold startup time with a large profile."""
tag = 'cold'
page_set = page_sets.BlankPageSet
options = {'pageset_repeat': 1}
def __init__(self, max_failures=None):
super(StartupLargeProfileColdBlankPage, self).__init__(max_failures)
self.generated_profile_archive = "large_profile.zip"
@classmethod
def Name(cls):
return 'startup.large_profile.cold.blank_page'
@benchmark.Enabled('has tabs')
@benchmark.Enabled('win', 'linux', 'mac')
# TODO(erikchen): Investigate source of 30s browser hang on startup.
# http://crbug.com/473827
@benchmark.Disabled
class StartupLargeProfileWarmBlankPage(_StartupWarm):
"""Measures warm startup time with a large profile."""
tag = 'warm'
page_set = page_sets.BlankPageSet
options = {'pageset_repeat': 1}
def __init__(self, max_failures=None):
super(StartupLargeProfileWarmBlankPage, self).__init__(max_failures)
self.generated_profile_archive = "large_profile.zip"
@classmethod
def Name(cls):
return 'startup.large_profile.warm.blank_page'
| # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import benchmark
from measurements import startup
import page_sets
class _StartupCold(benchmark.Benchmark):
"""Measures cold startup time with a clean profile."""
options = {'pageset_repeat': 5}
@classmethod
def Name(cls):
return 'startup'
def CreatePageTest(self, options):
return startup.Startup(cold=True)
class _StartupWarm(benchmark.Benchmark):
"""Measures warm startup time with a clean profile."""
options = {'pageset_repeat': 20}
@classmethod
def Name(cls):
return 'startup'
@classmethod
def ValueCanBeAddedPredicate(cls, _, is_first_result):
return not is_first_result
def CreatePageTest(self, options):
return startup.Startup(cold=False)
@benchmark.Enabled('has tabs')
@benchmark.Disabled('snowleopard') # crbug.com/336913
class StartupColdBlankPage(_StartupCold):
"""Measures cold startup time with a clean profile."""
tag = 'cold'
page_set = page_sets.BlankPageSet
@classmethod
def Name(cls):
return 'startup.cold.blank_page'
@benchmark.Enabled('has tabs')
class StartupWarmBlankPage(_StartupWarm):
"""Measures warm startup time with a clean profile."""
tag = 'warm'
page_set = page_sets.BlankPageSet
@classmethod
def Name(cls):
return 'startup.warm.blank_page'
@benchmark.Enabled('has tabs')
@benchmark.Enabled('win', 'linux', 'mac')
class StartupLargeProfileColdBlankPage(_StartupCold):
"""Measures cold startup time with a large profile."""
tag = 'cold'
page_set = page_sets.BlankPageSet
options = {'pageset_repeat': 1}
def __init__(self, max_failures=None):
super(StartupLargeProfileColdBlankPage, self).__init__(max_failures)
self.generated_profile_archive = "large_profile.zip"
@classmethod
def Name(cls):
return 'startup.large_profile.cold.blank_page'
@benchmark.Enabled('has tabs')
@benchmark.Enabled('win', 'linux', 'mac')
class StartupLargeProfileWarmBlankPage(_StartupWarm):
"""Measures warm startup time with a large profile."""
tag = 'warm'
page_set = page_sets.BlankPageSet
options = {'pageset_repeat': 1}
def __init__(self, max_failures=None):
super(StartupLargeProfileWarmBlankPage, self).__init__(max_failures)
self.generated_profile_archive = "large_profile.zip"
@classmethod
def Name(cls):
return 'startup.large_profile.warm.blank_page'
| bsd-3-clause | Python |
e13e714fa179544bef895274baf4f8ddb52ddd4a | add script to separate source columns to 6 columns with boolean values | BD2KGenomics/brca-website,BD2KGenomics/brca-website,BD2KGenomics/brca-website | python_scripts/seperating_source_column.py | python_scripts/seperating_source_column.py | """
seperating the Variant_Source column merged_v4.tsv into six columns:
Variant_in_ENIGMA
Variant_in_ClinVar
Variant_in_1000_Genomes
Variant_in_ExAC
Variant_in_LOVD
Variant_in_BIC
"""
COLUMNS = ["Variant_in_ENIGMA",
"Variant_in_ClinVar",
"Variant_in_1000_Genomes",
"Variant_in_ExAC",
"Variant_in_LOVD",
"Variant_in_BIC"]
SOURCES = ["ENIGMA", "ClinVar", "1000_Genomes", "ExAC", "LOVD", "BIC"]
def main():
f_in = open('/Users/Molly/Desktop/BRCA Research/data/merged_v4.tsv', "r")
f_out = open('/Users/Molly/Desktop/BRCA Research/data/merged_v5.tsv', "w")
line_num = 1
for line in f_in:
items = line.strip().split("\t")
if line_num == 1:
items.pop(0)
items = COLUMNS + items
else:
source = items[0]
items.pop(0)
items = separate_source(source) + items
line_num += 1
new_line = "\t".join(items) + "\n"
f_out.write(new_line)
def separate_source(source):
boolean_list = []
exisiting_sources = source.split("|")
for each_source in SOURCES:
if each_source in exisiting_sources:
boolean_list.append("True")
else:
boolean_list.append("False")
return boolean_list
if __name__=="__main__":
main() | apache-2.0 | Python | |
e0ab65f3877da992ac3705475ea0bdc520677cbe | Test CommandObjectMultiword functionality | llvm-mirror/lldb,llvm-mirror/lldb,llvm-mirror/lldb,llvm-mirror/lldb,llvm-mirror/lldb | packages/Python/lldbsuite/test/functionalities/multiword-commands/TestMultiWordCommands.py | packages/Python/lldbsuite/test/functionalities/multiword-commands/TestMultiWordCommands.py | """
Test multiword commands ('platform' in this case).
"""
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
class MultiwordCommandsTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
@no_debug_info_test
def test_ambiguous_subcommand(self):
self.expect("platform s", error=True,
substrs=["ambiguous command 'platform s'. Possible completions:",
"\tselect\n",
"\tshell\n",
"\tsettings\n"])
@no_debug_info_test
def test_empty_subcommand(self):
# FIXME: This has no error message.
self.expect("platform \"\"", error=True)
@no_debug_info_test
def test_help(self):
# <multiword> help brings up help.
self.expect("platform help",
substrs=["Commands to manage and create platforms.",
"Syntax: platform [",
"The following subcommands are supported:",
"connect",
"Select the current platform"])
| apache-2.0 | Python | |
a23e45a65221ec076059bd32cdb1d5bb787e123b | add less filter | amol-/dukpy,amol-/dukpy,amol-/dukpy | dukpy/webassets/lessfilter.py | dukpy/webassets/lessfilter.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function
import os
from webassets.filter import Filter
import dukpy
__all__ = ('CompileLess', )
class CompileLess(Filter):
name = 'lessc'
max_debug_level = None
def setup(self):
self.less_includes = self.get_config('LIBSASS_INCLUDES', require=False) or None
def input(self, _in, out, **kw):
options = {'paths': []}
if self.less_includes:
options['paths'].extend(self.less_includes)
if 'source_path' in kw:
options['paths'].append(os.path.dirname(kw['source_path']))
src = dukpy.less_compile(_in.read(), options=options)
out.write(src)
| mit | Python | |
0f251e6c8620e19fc5e16e88b1ffbd5d51f7a7be | Add initial HDF storage class | rnelsonchem/gcmstools | gcmstools/datastore.py | gcmstools/datastore.py | import numpy as np
import pandas as pd
import tables as tb
class HDFStore(object):
def __init__(self, hdfname):
self.pdh5 = pd.HDFStore(hdfname, mode='a', complevel=9,
complib='blosc')
self.h5 = self.pdh5._handle
self._filters = tb.Filters(complevel=9, complib='blosc')
self._files_df_columns = ('name', 'filenames')
if not hasattr(self.h5.root, 'files'):
df = pd.DataFrame(columns=self._files_df_columns).set_index('name')
self.pdh5['files'] = df
self.files = self.pdh5['files']
if not hasattr(self.h5.root, 'data'):
self.h5.create_group('/', 'data', filters=self._filters)
def append_files(self, datafiles):
if not isinstance(datafiles, (tuple, list)):
datafiles = [datafiles,]
names = []
for data in datafiles:
filename = data.filename
name = self._name_fix(filename)
names.append((name, filename))
self._append(name, data)
temp_df = pd.DataFrame(names, columns=self._files_df_columns)\
.set_index('name')
self.pdh5['files'] = pd.concat( (self.pdh5['files'], temp_df) )
def _append(self, name, gcmsobj):
group = self.h5.create_group('/data', name)
for key, val in gcmsobj.__dict__.items():
if isinstance(val, np.ndarray):
self.h5.create_carray(group, key, obj=val,)
else:
setattr(group._v_attrs, key, val)
def _name_fix(self, badname):
sp = badname.split('.')
nosuffix = '_'.join(sp[:-1])
nospace = nosuffix.replace(' ', '_')
if nospace[0].isdigit():
nonum = 'num' + nospcace
return nonum
else:
return nospace
def close(self, ):
self.pdh5.close()
def recompress(self,):
# Copy file to recompress
pass
| bsd-3-clause | Python | |
585a5fa27321134623dcf431ebf80ba1dcd708de | add example script test equality of coefficients in two regression (basic example for onewaygls) | jseabold/statsmodels,rgommers/statsmodels,ChadFulton/statsmodels,kiyoto/statsmodels,wkfwkf/statsmodels,wwf5067/statsmodels,josef-pkt/statsmodels,cbmoore/statsmodels,yarikoptic/pystatsmodels,wdurhamh/statsmodels,kiyoto/statsmodels,adammenges/statsmodels,wzbozon/statsmodels,adammenges/statsmodels,bashtage/statsmodels,bert9bert/statsmodels,bert9bert/statsmodels,gef756/statsmodels,bavardage/statsmodels,yarikoptic/pystatsmodels,hlin117/statsmodels,jstoxrocky/statsmodels,ChadFulton/statsmodels,YihaoLu/statsmodels,wdurhamh/statsmodels,wkfwkf/statsmodels,wzbozon/statsmodels,DonBeo/statsmodels,ChadFulton/statsmodels,yl565/statsmodels,yl565/statsmodels,bashtage/statsmodels,musically-ut/statsmodels,jstoxrocky/statsmodels,nguyentu1602/statsmodels,wwf5067/statsmodels,YihaoLu/statsmodels,wwf5067/statsmodels,hainm/statsmodels,bashtage/statsmodels,saketkc/statsmodels,bzero/statsmodels,DonBeo/statsmodels,phobson/statsmodels,hlin117/statsmodels,pprett/statsmodels,nvoron23/statsmodels,bashtage/statsmodels,bsipocz/statsmodels,wdurhamh/statsmodels,bert9bert/statsmodels,wesm/statsmodels,josef-pkt/statsmodels,hainm/statsmodels,nvoron23/statsmodels,detrout/debian-statsmodels,huongttlan/statsmodels,edhuckle/statsmodels,josef-pkt/statsmodels,nvoron23/statsmodels,astocko/statsmodels,wwf5067/statsmodels,wesm/statsmodels,Averroes/statsmodels,wzbozon/statsmodels,hainm/statsmodels,phobson/statsmodels,bzero/statsmodels,jstoxrocky/statsmodels,Averroes/statsmodels,bzero/statsmodels,cbmoore/statsmodels,nguyentu1602/statsmodels,gef756/statsmodels,wkfwkf/statsmodels,wdurhamh/statsmodels,alekz112/statsmodels,phobson/statsmodels,saketkc/statsmodels,jstoxrocky/statsmodels,wzbozon/statsmodels,bavardage/statsmodels,pprett/statsmodels,josef-pkt/statsmodels,gef756/statsmodels,astocko/statsmodels,kiyoto/statsmodels,waynenilsen/statsmodels,DonBeo/statsmodels,bsipocz/statsmodels,edhuckle/statsmodels,bashtage/statsmodels,DonBeo/statsmodels,waynenilsen/statsmodels,statsmodels/statsmodels,waynenilsen/statsmodels,pprett/statsmodels,alekz112/statsmodels,bavardage/statsmodels,josef-pkt/statsmodels,yl565/statsmodels,Averroes/statsmodels,edhuckle/statsmodels,nvoron23/statsmodels,cbmoore/statsmodels,edhuckle/statsmodels,wkfwkf/statsmodels,edhuckle/statsmodels,statsmodels/statsmodels,jseabold/statsmodels,kiyoto/statsmodels,detrout/debian-statsmodels,DonBeo/statsmodels,huongttlan/statsmodels,hlin117/statsmodels,hlin117/statsmodels,nvoron23/statsmodels,saketkc/statsmodels,nguyentu1602/statsmodels,cbmoore/statsmodels,rgommers/statsmodels,YihaoLu/statsmodels,jseabold/statsmodels,alekz112/statsmodels,huongttlan/statsmodels,alekz112/statsmodels,wdurhamh/statsmodels,pprett/statsmodels,yl565/statsmodels,wzbozon/statsmodels,musically-ut/statsmodels,musically-ut/statsmodels,rgommers/statsmodels,statsmodels/statsmodels,detrout/debian-statsmodels,bavardage/statsmodels,bsipocz/statsmodels,adammenges/statsmodels,hainm/statsmodels,saketkc/statsmodels,rgommers/statsmodels,phobson/statsmodels,nguyentu1602/statsmodels,bashtage/statsmodels,bsipocz/statsmodels,ChadFulton/statsmodels,bzero/statsmodels,adammenges/statsmodels,phobson/statsmodels,ChadFulton/statsmodels,wkfwkf/statsmodels,josef-pkt/statsmodels,bert9bert/statsmodels,YihaoLu/statsmodels,gef756/statsmodels,saketkc/statsmodels,statsmodels/statsmodels,astocko/statsmodels,statsmodels/statsmodels,wesm/statsmodels,rgommers/statsmodels,detrout/debian-statsmodels,ChadFulton/statsmodels,yl565/statsmodels,bert9bert/statsmodels,kiyoto/statsmodels,musically-ut/statsmodels,YihaoLu/statsmodels,bzero/statsmodels,astocko/statsmodels,bavardage/statsmodels,gef756/statsmodels,huongttlan/statsmodels,jseabold/statsmodels,cbmoore/statsmodels,jseabold/statsmodels,Averroes/statsmodels,waynenilsen/statsmodels,yarikoptic/pystatsmodels,statsmodels/statsmodels | scikits/statsmodels/examples/try_2regress.py | scikits/statsmodels/examples/try_2regress.py | # -*- coding: utf-8 -*-
"""F test for null hypothesis that coefficients in two regressions are the same
see discussion in http://mail.scipy.org/pipermail/scipy-user/2010-March/024851.html
Created on Thu Mar 25 22:56:45 2010
Author: josef-pktd
"""
import numpy as np
from numpy.testing import assert_almost_equal
import scikits.statsmodels as sm
np.random.seed(87654589)
nobs = 10 #100
x1 = np.random.randn(nobs)
y1 = 10 + 15*x1 + 2*np.random.randn(nobs)
x1 = sm.add_constant(x1) #, prepend=True)
assert_almost_equal(x1, np.vander(x1[:,0],2), 16)
res1 = sm.OLS(y1, x1).fit()
print res1.params
print np.polyfit(x1[:,0], y1, 1)
assert_almost_equal(res1.params, np.polyfit(x1[:,0], y1, 1), 14)
print res1.summary(xname=['x1','const1'])
#regression 2
x2 = np.random.randn(nobs)
y2 = 19 + 17*x2 + 2*np.random.randn(nobs)
#y2 = 10 + 15*x2 + 2*np.random.randn(nobs) # if H0 is true
x2 = sm.add_constant(x2) #, prepend=True)
assert_almost_equal(x2, np.vander(x2[:,0],2), 16)
res2 = sm.OLS(y2, x2).fit()
print res2.params
print np.polyfit(x2[:,0], y2, 1)
assert_almost_equal(res2.params, np.polyfit(x2[:,0], y2, 1), 14)
print res2.summary(xname=['x2','const2'])
# joint regression
x = np.concatenate((x1,x2),0)
y = np.concatenate((y1,y2))
dummy = np.arange(2*nobs)>nobs-1
x = np.column_stack((x,x*dummy[:,None]))
res = sm.OLS(y, x).fit()
print res.summary(xname=['x','const','x2','const2'])
print '\nF test for equal coefficients in 2 regression equations'
#effect of dummy times second regression is zero
#is equivalent to 3rd and 4th coefficient are both zero
print res.f_test([[0,0,1,0],[0,0,0,1]])
print '\nchecking coefficients individual versus joint'
print res1.params, res2.params
print res.params[:2], res.params[:2]+res.params[2:]
assert_almost_equal(res1.params, res.params[:2], 13)
assert_almost_equal(res2.params, res.params[:2]+res.params[2:], 13)
| bsd-3-clause | Python | |
06945ae360bdab9726ea78757d8e63b10ea252fe | Create cbalusek_02.py | GT-IDEaS/SkillsWorkshop2017,GT-IDEaS/SkillsWorkshop2017,GT-IDEaS/SkillsWorkshop2017 | Week01/Problem02/cbalusek_02.py | Week01/Problem02/cbalusek_02.py | """
Created on Fri Jul 21 10:17:21 2017
This short program will sum all of the even numbers in the fibonnacci
sequence less than 4 million.
@author: cbalusek3
"""
i1 = 1
i2 = 2
cum = 0
while i2 < 4000000:
itemp = i2
i2 += i1
i1 = itemp
if i2%2 == 0:
cum += i2
print(cum)
| bsd-3-clause | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.