commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13 values | lang stringclasses 23 values |
|---|---|---|---|---|---|---|---|---|
5e42ed2908f6186db4da92f283ca9b9b30ac9e31 | Add executable file | XENON1T/pax,XENON1T/pax | bin/paxit.py | bin/paxit.py | #!/usr/bin/env python
from pax import pax
if __name__ == '__main__':
pax.Processor(input='DumbExample.DumbExampleInput',
transform='DumbExample.DumbExampleTransform',
output='DumbExample.DumbExampleOutput')
| bsd-3-clause | Python | |
74c2563f55c71ff8d3d2cbb7ff2b69a4be8767ec | Create client.py | dissembledstar/covertcomms | client.py | client.py | #!/usr/bin/env python
# TODO take user choice for c2 method
# TODO start with pipe delimited HTTP POST data to test
# TODO loop and output to STDOUT
| mit | Python | |
6693c02315ba16c8a849c7a1892397bf819a2efd | Add meshDisplay commands | minoue/miExecutor | app/module/polygon/meshDisplay.py | app/module/polygon/meshDisplay.py | import maya.cmds as cmds
import maya.mel as mel
# class name must be 'Commands'
class Commands(object):
commandDict = {}
def _applyColor(self):
mel.eval("polyColorPerVertex -r 0.5 -g 0.5 -b 0.5 -a 1 -cdo;")
commandDict['applyColor'] = "polyApplyColor.png"
def _paintVertexColorTool(self):
cmds.PaintVertexColorTool()
commandDict['paintVertexColorTool'] = "paintVertexColour.png"
def _createEmptyColorSet(self):
mel.eval("colorSetEditCmdNew new none 1 RGB 0")
commandDict['createEmptyColorSet'] = "polyColorSetCreateEmpty.png"
def _deleteCurrentColorSet(self):
mel.eval("colorSetEditCmd delete none")
commandDict['deleteCurrentColorSet'] = "polyColorSetDelete.png"
def _renameCurrentColorSet(self):
mel.eval("colorSetEditCmd rename none")
commandDict['renameCurrentColorSet'] = "polyColorSetRename.png"
def _modifyCurrentColorSet(self):
mel.eval("colorSetEditCmd modify none")
commandDict['modifyCurrentColorSet'] = "polyColorSetModify.png"
def _colorSetEditor(self):
mel.eval("colorSetEditor")
commandDict['colorSetEditor'] = "polyColorSetEditor.png"
| mit | Python | |
65df19a82df1432f72674eb35765937aa6c889eb | Add migration removing database columns following removal of FileUpload fields (#16035) | mozilla/olympia,mozilla/addons-server,wagnerand/addons-server,mozilla/olympia,mozilla/addons-server,wagnerand/addons-server,mozilla/addons-server,diox/olympia,bqbn/addons-server,wagnerand/addons-server,mozilla/olympia,bqbn/addons-server,diox/olympia,diox/olympia,mozilla/olympia,bqbn/addons-server,wagnerand/addons-server,mozilla/addons-server,bqbn/addons-server,diox/olympia | src/olympia/files/migrations/0005_auto_20201120_0926.py | src/olympia/files/migrations/0005_auto_20201120_0926.py | # Generated by Django 2.2.17 on 2020-11-20 09:26
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('files', '0004_auto_20200923_1808'),
]
operations = [
migrations.RemoveIndex(
model_name='fileupload',
name='file_uploads_afe99c5e',
),
migrations.RemoveField(
model_name='fileupload',
name='compat_with_app',
),
migrations.RemoveField(
model_name='fileupload',
name='compat_with_appver',
),
]
| bsd-3-clause | Python | |
48d7fc87c51cb15829e9cf161b38dc004affee9e | ADD example_sequential | automl/auto-sklearn,automl/auto-sklearn | example/example_sequential.py | example/example_sequential.py | import sklearn.cross_validation
import sklearn.datasets
import sklearn.metrics
import autosklearn.classification
def main():
digits = sklearn.datasets.load_digits()
X = digits.data
y = digits.target
X_train, X_test, y_train, y_test = \
sklearn.cross_validation.train_test_split(X, y, random_state=1)
automl = autosklearn.classification.AutoSklearnClassifier(
time_left_for_this_task=1200, per_run_time_limit=30,
tmp_folder='/tmp/autoslearn_sequential_example_tmp',
output_folder='/tmp/autosklearn_sequential_example_out',
# Do not construct ensembles in parallel to avoid using more than one
# core at a time. The ensemble will be constructed after auto-sklearn
# finished fitting all machine learning models.
ensemble_size=0, delete_tmp_folder_after_terminate=False)
automl.fit(X_train, y_train, dataset_name='digits')
# This call to fit_ensemble uses all models trained in the previous call
# to fit to build an ensemble which can be used with automl.predict()
automl.fit_ensemble(y_train, ensemble_size=50)
print(automl.show_models())
predictions = automl.predict(X_test)
print(automl.sprint_statistics())
print("Accuracy score", sklearn.metrics.accuracy_score(y_test, predictions))
if __name__ == '__main__':
main()
| bsd-3-clause | Python | |
3246873aad0482b9087663f452c2fe32c37daa36 | Add new migration | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | custom/icds_reports/migrations/0101_auto_20190227_1801.py | custom/icds_reports/migrations/0101_auto_20190227_1801.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.20 on 2019-02-27 18:01
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('icds_reports', '0100_add_supervisor_id'),
]
operations = [
migrations.AlterField(
model_name='aggregatebirthpreparednesforms',
name='supervisor_id',
field=models.TextField(max_length=40, null=True),
),
migrations.AlterField(
model_name='aggregateccsrecorddeliveryforms',
name='supervisor_id',
field=models.TextField(max_length=40, null=True),
),
migrations.AlterField(
model_name='aggregateccsrecordpostnatalcareforms',
name='supervisor_id',
field=models.TextField(max_length=40, null=True),
),
migrations.AlterField(
model_name='aggregateccsrecordthrforms',
name='supervisor_id',
field=models.TextField(max_length=40, null=True),
),
migrations.AlterField(
model_name='aggregatecomplementaryfeedingforms',
name='supervisor_id',
field=models.TextField(max_length=40, null=True),
),
migrations.AlterField(
model_name='awwincentivereport',
name='supervisor_id',
field=models.TextField(max_length=40, null=True),
),
]
| bsd-3-clause | Python | |
d7cd9951581df833ac3e6eecd099035f2d30df0e | Add initial Starling mapping | reubano/csv2ofx,reubano/csv2ofx | csv2ofx/mappings/starling.py | csv2ofx/mappings/starling.py | from __future__ import (
absolute_import, division, print_function, unicode_literals)
from operator import itemgetter
def fixdate(ds):
dmy = ds.split('/')
# BUG (!?): don't understand but stolen from ubs-ch-fr.py
return '.'.join((dmy[1], dmy[0], dmy[2]))
mapping = {
'has_header': True,
'date': lambda tr: fixdate(tr['Date']),
'amount': itemgetter('Amount (GBP)'),
'desc': itemgetter('Reference'),
'payee': itemgetter('Counter Party')
}
| mit | Python | |
67a5e2797e362bb5a38b8af5b05cf72d1e28a262 | add import script for Wokingham (closes #795) | chris48s/UK-Polling-Stations,chris48s/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations | polling_stations/apps/data_collection/management/commands/import_wokingham.py | polling_stations/apps/data_collection/management/commands/import_wokingham.py | from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = 'E06000041'
addresses_name = 'parl.2017-06-08/Version 1/merged.tsv'
stations_name = 'parl.2017-06-08/Version 1/merged.tsv'
elections = ['parl.2017-06-08']
csv_delimiter = '\t'
| bsd-3-clause | Python | |
6972c0c7050bc68c10361dce02fb227df6426fe9 | convert LUNA16 to jpg and png | ALISCIFP/tensorflow-resnet-segmentation,ALISCIFP/tensorflow-resnet-segmentation | convert_mhd2jpg_png_LUNA16.py | convert_mhd2jpg_png_LUNA16.py | #!/usr/bin/env python
# This script belongs to https://github.com/
import os,glob
import argparse
import numpy as np
import SimpleITK as sitk
from PIL import Image
import cv2
import scipy.misc
DATA_DIRECTORY = '/home/zack/Data/LUNA16/'
OUT_DIRECTORY = "/home/zack/Data/LUNA16/"
def mhd2ndarray(data_file):
itkimg = sitk.ReadImage(data_file)
img=sitk.GetArrayFromImage(itkimg)
img = np.transpose(img,(1,2,0))
return img
def ndarry2jpg_png(data_file,out_dir,subsetIndex,flist):
data_path,fn = os.path.split(data_file)
# img_gt_file= data_path+"output/yes_lesion_no_rescale/seg/"+fn
img_gt_file = data_file.replace("subset"+str(subsetIndex),"output/yes_lesion_no_rescale/subset"+str(subsetIndex)+"/seg")
img = mhd2ndarray(data_file)
img_gt = mhd2ndarray(img_gt_file)
img_pad=np.lib.pad(img, ((0, 0),(0,0),(1,1)), 'constant', constant_values=(-3024, -3024))
# img_pos = img_pad-img_pad.min()
# img_pad = img_pos*(255.0/img_pos.max())
for i in xrange(0,img.shape[2]):
img3c = img_pad[:,:,i:i+3]
scipy.misc.imsave(os.path.join(out_dir+"JPEGImages/subset"+str(subsetIndex),fn+"_"+str(i)+".jpg"), img3c)
# im = Image.fromarray(img3c)
# im.save(os.path.join(out_dir+"JPEGImages/subset"+str(subsetIndex),fn+"_"+str(i)+"_"+".jpg"))
cv2.imwrite(os.path.join(out_dir+"PNGImages/subset"+str(subsetIndex),fn+"_"+str(i)+".png"),img_gt[:,:,i])
flist.write("/JPEGImages/subset"+str(subsetIndex)+"/"+fn+"_"+str(i)+".jpg "+"/PNGImages/subset"+str(subsetIndex)+"/"+fn+"_"+str(i)+".png\n")
def convert(data_dir,out_dir):
ftrain = open(data_dir + "dataset/train.txt", 'w')
fval = open(data_dir + "dataset/val.txt", 'w')
for i in xrange(10):
print "converting subset "+str(i)
os.chdir(data_dir + "subset" + str(i))
if not os.path.exists(data_dir + "JPEGImages/subset" + str(i)):
os.mkdir(data_dir + "JPEGImages/subset" + str(i))
if not os.path.exists(data_dir + "PNGImages/subset" + str(i)):
os.mkdir(data_dir + "PNGImages/subset" + str(i))
for file in glob.glob("*.mhd"):
if i<8:
ndarry2jpg_png(os.path.join(data_dir + "subset" + str(i),file), out_dir, i,ftrain)
else:
ndarry2jpg_png(os.path.join(data_dir + "subset" + str(i),file), out_dir, i,fval)
ftrain.close()
fval.close()
print "converting"
print "done."
def main():
parser = argparse.ArgumentParser()
parser = argparse.ArgumentParser(description="mdh to jpg-png file converter")
parser.add_argument("--data-dir", type=str, default=DATA_DIRECTORY,
help="Path to the directory containing the ILD dataset.")
parser.add_argument("--out-dir", type=str, default=OUT_DIRECTORY,
help="Path to the directory containing the ILD dataset in jpg and png format.")
args = parser.parse_args()
convert(args.data_dir,args.out_dir)
if __name__ == '__main__':
main()
| mit | Python | |
c86b61a23ad9b3a152bb6644cb5dde5a4b42fbc9 | Add Management Command to Dump Project and RemoteRepository Relationship in JSON format | rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org | readthedocs/projects/management/commands/dump_project_remote_repo_relation.py | readthedocs/projects/management/commands/dump_project_remote_repo_relation.py | import json
from django.core.management.base import BaseCommand
from readthedocs.projects.models import Project
class Command(BaseCommand):
help = "Dump Project and RemoteRepository Relationship in JSON format"
def handle(self, *args, **options):
data = []
queryset = Project.objects.filter(
remote_repository__isnull=False,
).values_list('id', 'remote_repository__json').distinct()
for project_id, remote_repository__json in queryset:
try:
json_data = json.loads(remote_repository__json)
# GitHub and GitLab uses `id` and Bitbucket uses `uuid`
# for the repository id
remote_id = json_data.get('id') or json_data.get('uuid')
if remote_id:
data.append({
'remote_id': remote_id,
'project_id': project_id
})
else:
self.stdout.write(
self.style.ERROR(
f'Project {project_id} does not have a remote_repository remote_id'
)
)
except json.decoder.JSONDecodeError:
self.stdout.write(
self.style.ERROR(
f'Project {project_id} does not have a valid remote_repository__json'
)
)
# Dump the data to a json file
with open('project-remote-repo-dump.json', 'w') as f:
f.write(json.dumps(data))
| mit | Python | |
03733ada2ec1ea7b9aeca76d9dc1a0ebfc1c0db7 | Add script to mask out a region of interest in an image | aguijarro/SelfDrivingCar | FindingLaneLines/ColorRegionCombined.py | FindingLaneLines/ColorRegionCombined.py | # Code uses to mask out a region of interest in an image
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
# Read in the image and print out some stats
# Note: in the previous example we were reading a .jpg
# Here we read a .png and convert to 0,255 bytescale
image = (mpimg.imread('test.png') * 255).astype('uint8')
# Grab the x and y size and make a copy of the image
ysize = image.shape[0]
xsize = image.shape[1]
color_select = np.copy(image)
line_image = np.copy(image)
# Define color selection criteria
# MODIFY THESE VARIABLES TO MAKE YOUR COLOR SELECTION
red_threshold = 200
green_threshold = 200
blue_threshold = 200
rgb_threshold = [red_threshold, green_threshold, blue_threshold]
# Define the vertices of a triangular mask.
# Keep in mind the origin (x=0, y=0) is in the upper left
# MODIFY THESE VALUES TO ISOLATE THE REGION
# WHERE THE LANE LINES ARE IN THE IMAGE
left_bottom = [0, 539]
right_bottom = [900, 539]
apex = [475, 320]
# Perform a linear fit (y=Ax+B) to each of the three sides of the triangle
# np.polyfit returns the coefficients [A, B] of the fit
fit_left = np.polyfit((left_bottom[0], apex[0]), (left_bottom[1], apex[1]), 1)
fit_right = np.polyfit((right_bottom[0], apex[0]), (right_bottom[1], apex[1]), 1)
fit_bottom = np.polyfit((left_bottom[0], right_bottom[0]), (left_bottom[1], right_bottom[1]), 1)
# Perform a "bitwise or" to mask pixels below the threshold
color_thresholds = (image[:,:,0] < rgb_threshold[0]) | \
(image[:,:,1] < rgb_threshold[1]) | \
(image[:,:,2] < rgb_threshold[2])
# Find the region inside the lines
XX, YY = np.meshgrid(np.arange(0, xsize), np.arange(0, ysize))
region_thresholds = (YY > (XX*fit_left[0] + fit_left[1])) & \
(YY > (XX*fit_right[0] + fit_right[1])) & \
(YY < (XX*fit_bottom[0] + fit_bottom[1]))
# Mask color and region selection
color_select[color_thresholds | ~region_thresholds] = [0, 0, 0]
# Color pixels red where both color and region selections met
line_image[~color_thresholds & region_thresholds] = [255, 0, 0]
# Display the image and show region and color selections
plt.imshow(image)
x = [left_bottom[0], right_bottom[0], apex[0], left_bottom[0]]
y = [left_bottom[1], right_bottom[1], apex[1], left_bottom[1]]
plt.plot(x, y, 'b--', lw=4)
plt.imshow(color_select)
# locally and wish to save the image
mpimg.imsave("test-color_select.png", color_select)
plt.imshow(line_image)
# locally and wish to save the image
mpimg.imsave("test-line-image.png", line_image)
| mit | Python | |
64c7bb3d9e924c5651e85229ea5451c8a808bf04 | add show_graph.py | rb-roomba/music | goldberg/show_graph.py | goldberg/show_graph.py | #! /usr/bin/python
# -*- coding: utf-8 -*-
import cPickle as pickle
import matplotlib.pyplot as plt
import numpy as np
import seaborn
import pandas as pd
import matplotlib.cm as cm
def pitch(height):
""" Calculate pitch of given height. """
cde_list = ["c","d","e","f","g","a","b"]
return cde_list[height % 7]
def doremi(cde):
""" (c, d, e,...) to (do, re, mi...) """
cde_list = ["c","d","e","f","g","a","b"]
doremi_list = ["do","re","mi","fa","sol","la","si"]
if not cde.lower() in cde_list:
print "Error: " + cde.lower() + " not in cde list. "
return doremi_list[cde_list.index(cde.lower())]
def plot_var(times, pitches, ends, var_n):
""" Show time series graph of variation [var_n]. """
# var_n: 0 to 30 (0: Aria)
n_data = filter(lambda x:(ends[var_n] < x[0] <= ends[var_n+1]),
zip(times, pitches))
# seaborn
df = pd.DataFrame(n_data)
df.columns = ["time","height"]
seaborn.jointplot('time', 'height', data=df)
plt.show()
def make_mat(times, pitches, full=False):
""" Make t->t+a matrix from times, pitches. """
tset = sorted(list(set(times)))
if full:
p_list = sorted(list(set(pitches)))
z = zip(times, pitches)
pitch_grouped = [[i[1] for i in z if i[0]==t] for t in tset]
ret_mat = np.zeros([len(p_list), len(p_list)])
for t in range(len(pitch_grouped)-1):
for p in pitch_grouped[t]:
for q in pitch_grouped[t+1]:
ret_mat[p_list.index(p)][p_list.index(q)] += 1
else:# not full
cde_list = ["c","d","e","f","g","a","b"]
z = zip(times, [pitch(i) for i in pitches])
pitch_grouped = [[i[1] for i in z if i[0]==t] for t in tset]
ret_mat = np.zeros([len(cde_list), len(cde_list)])
for t in range(len(pitch_grouped)-1):
for p in pitch_grouped[t]:
for q in pitch_grouped[t+1]:
ret_mat[cde_list.index(p)][cde_list.index(q)] += 1
return ret_mat
def show_heatmap(mat, pitches, full=False):
""" Show heatmap of matrix. """
if full:
i_list = [doremi(pitch(i)) for i in sorted(list(set(pitches)))]
df = pd.DataFrame(mat)
df.columns = i_list
df.index = i_list
else:
doremi_list = ["do","re","mi","fa","sol","la","si"]
df = pd.DataFrame(mat)
df.columns = doremi_list
df.index = doremi_list
seaborn.heatmap(df, cmap='Blues')
plt.show()
if __name__ == "__main__":
# load pickle
pickle_file = 'goldberg_full.pickle'
with open(pickle_file, 'rb') as f:
g_pickle = pickle.load(f)
# extract data from pickle
times = g_pickle["times"]
pitches = g_pickle["pitches"]
end_times = g_pickle["end_times"]
# Show times series graph
plot_var(times, pitches, end_times, 30)
# t -> t+1 Matrix
mat = make_mat(times, pitches)
full_mat = make_mat(times,pitches, True)
show_heatmap(mat, pitches)
show_heatmap(full_mat, pitches, True)
| mit | Python | |
0f92843ec20007dc4379ff88d2e711bdad5e9125 | Add ftp module, the backend for salt-ftp | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/cli/ftp.py | salt/cli/ftp.py | '''
The ftp module is used to execute the logic used by the salt-ftp command
line application, salt-ftp is NOT intended to broadcast large files, it is
intened to handle text files.
Salt-ftp can be used to distribute configuration files
'''
# Import python modules
import os
import stat
import sys
# Import salt modules
import salt.client
class SaltFTP(object):
'''
Create a salt ftp object, used to distribute simple files with salt
'''
def __init__(self, opts):
self.opts = opts
def _file_dict(self, fn_):
'''
Take a path and return the contents of the file as a string
'''
if not os.path.isfile(fn_):
err = 'The referenced file, ' + fn_ + ' is not available.'
sys.stderr.write(err + '\n')
sys.exit(42)
return {fn_: open(fn_, 'r').read()}
def _recurse_dir(self, fn_, files={}):
'''
Recursively pull files from a directory
'''
for base in os.listdir(fn_):
path = os.path.join(fn_, base)
if os.path.isdir(path):
files.update(self._recurse_dir(path))
else:
files.update(self._file_dict(path))
return files
def _load_files(self):
'''
Parse the files indicated in opts['src'] and load them into a python
object for transport
'''
files = {}
for fn_ in self.opts['src']:
if os.path.isfile(fn_):
files.update(self._file_dict(fn_))
elif os.path.isdir(fn_):
files.update(self._recurse_dir(fn_))
return files
def run(self):
'''
Make the salt client call
'''
arg = [self._load_files(), self.opts['dest']]
local = salt.client.LocalClient(self.opts['conf_file'])
args = [self.opts['tgt'],
'ftp.recv',
arg,
self.opts['timeout'],
]
if self.opts['pcre']:
args.append('pcre')
elif self.opts['list']:
args.append('list')
elif self.opts['facter']:
args.append('facter')
ret = local.cmd(*args)
print yaml.dump(ret)
| apache-2.0 | Python | |
b6e0daa9ee06961743ad5a5f728f3385bfdb0b4c | Add a helper script to apply changes in zuul | EntropyWorks/bifrost,bcornec/bifrost,juliakreger/bifrost,devananda/bifrost,openstack/bifrost,openstack/bifrost,EntropyWorks/bifrost,devananda/bifrost,bcornec/bifrost,juliakreger/bifrost | playbooks/roles/ironic-install/files/parse_zuul_changes.py | playbooks/roles/ironic-install/files/parse_zuul_changes.py | #!/usr/bin/env python
# (c) 2015, Hewlett-Packard Development Company, L.P.
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
import re
import subprocess
import sys
if len(sys.argv) is 1:
print("ERROR: This script requires arguments!\n"
"%s repository_path review_url repository_name "
"zuul_changes" % sys.argv[0])
sys.exit(1)
repo_path = sys.argv[1]
review_url = sys.argv[2]
repo_name = sys.argv[3]
change_list = str(sys.argv[4]).split('^')
applicable_changes = [x for x in change_list if repo_name in x]
try:
for change in applicable_changes:
(project, branch, ref) = change.split(':')
if re.search(repo_name, project):
if not re.search(branch, subprocess.check_output(
['git', '-C', repo_path, 'status', '-s', '-b'])):
command = ['git', '-C', repo_path, 'checkout', branch]
subprocess.call(command, stdout=True)
command = ['git', '-C', repo_path, 'fetch',
review_url + "/" + repo_name, ref]
if subprocess.call(command, stdout=True) is 0:
if subprocess.call(
['git', '-C', repo_path, 'cherry-pick', 'FETCH_HEAD'],
stdout=True) is 0:
print("Applied %s" % ref)
else:
print("Failed to cherry pick %s on to %s branch %s"
% (ref, repo_name, branch))
sys.exit(1)
else:
print("Failed to download %s on to %s branch %s"
% (ref, repo_name, branch))
sys.exit(1)
except Exception as e:
print("Failed to process change: %s" % e)
| apache-2.0 | Python | |
855fe9e95651626c2737c65424ad1067f253e28a | Implement invisible user mode | Heufneutje/txircd,ElementalAlchemist/txircd | txircd/modules/rfc/umode_i.py | txircd/modules/rfc/umode_i.py | from twisted.plugin import IPlugin
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
class InvisibleMode(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "InvisibleMode"
core = True
affectedActions = [ "showchanneluser" ]
def actions(self):
return [ ("modeactioncheck-user-i-showchanneluser", 1, self.isInvisible) ]
def userModes(self):
return [ ("i", ModeType.NoParam, self) ]
def isInvisible(self, user, channel, fromUser, userSeeing):
if "i" in user.modes:
return True
return None
def apply(self, actionName, user, param, channel, fromUser, sameUser):
if user != sameUser:
return None
if not channel or fromUser not in channel.users:
return False
return None
invisibleMode = InvisibleMode() | bsd-3-clause | Python | |
cfec967be4602dff636adb951b582d1db114f578 | Add single dataset doc gen | tensorflow/datasets,tensorflow/datasets,tensorflow/datasets,tensorflow/datasets,tensorflow/datasets | tensorflow_datasets/scripts/document_dataset_version.py | tensorflow_datasets/scripts/document_dataset_version.py | # coding=utf-8
# Copyright 2020 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
r"""Dump the list of all registered datasets/config/version in a `.txt` file.
Instructions:
```
python tensorflow_datasets/scripts/freeze_dataset_version.py
```
"""
import os
from absl import app
from absl import flags
import tensorflow.compat.v2 as tf
import tensorflow_datasets as tfds
from tensorflow_datasets.scripts.document_datasets import document_single_builder
FLAGS = flags.FLAGS
flags.DEFINE_string('tfds_dir', tfds.core.utils.tfds_dir(),
'Path to tensorflow_datasets directory')
DATASET_TO_TESTS = ['waymo_open_dataset']
def version_doc(ds_name):
builder = tfds.builder(ds_name)
version_path = os.path.join(FLAGS.tfds_dir, 'stable_versions.txt')
catalog_dir = tfds.core.get_tfds_path('../docs/catalog/')
with tf.io.gfile.GFile(os.path.join(catalog_dir, ds_name + ".md"), 'w') as f:
doc_builder = document_single_builder(builder)
f.write(doc_builder)
def main(_):
for name in DATASET_TO_TESTS:
version_doc(name)
if __name__ == '__main__':
app.run(main) | apache-2.0 | Python | |
ba5a251896e51bab70fa6b664e6974d3c42ff7b4 | add asciidoctor gem (#9905) | LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,iulian787/spack,iulian787/spack,iulian787/spack | var/spack/repos/builtin/packages/asciidoctor/package.py | var/spack/repos/builtin/packages/asciidoctor/package.py | # Copyright 2013-2018 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Asciidoctor(Package):
"""Modern asciidoc tool based on ruby"""
homepage = "https://asciidoctor.org/"
url = "https://rubygems.org/downloads/asciidoctor-1.5.8.gem"
version('1.5.8', '5f55200cab8d1cfcf561e66d3f477159', expand=False)
extends('ruby')
def install(self, spec, prefix):
gem('install', 'asciidoctor-{0}.gem'.format(self.version))
| lgpl-2.1 | Python | |
a189203bf2dc7ddd925ed8cfbeb191ee52d43711 | Add Python script to investigate s3 speeds | AcidLeroy/OpticalFlow,AcidLeroy/OpticalFlow,AcidLeroy/OpticalFlow,AcidLeroy/OpticalFlow | src/aws_scripts/benchmark_s3.py | src/aws_scripts/benchmark_s3.py | from __future__ import print_function
import boto3
import subprocess
import os
import pandas as pd
import time
def DownloadFile(local_file, bucket, bucket_path):
print("Downloading file...", end="")
s3 = boto3.resource('s3')
s3.Bucket(bucket).download_file(bucket_path, local_file)
print("Done!")
def UploadFile(local_file, bucket, bucket_path):
print("Uploading file...", end="")
s3 = boto3.resource('s3')
s3.Bucket(bucket).upload_file(local_file, bucket_path)
print("Done!")
def CreateFile(file_size):
file_size = file_size *1e6
file_name = 'file.bin'
subprocess.check_output(['dd', 'if=/dev/zero', 'of='+file_name, 'bs='+str(int(file_size)), 'count=1'])
return os.path.abspath(file_name)
def main():
file_sizes_mb = [1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024]
#file_sizes_mb = [1, 2, 4, 8]
num_iterations=10
bucket = 'aolme'
bucket_path = 'dummy/file.bin'
times ={}
for sz in file_sizes_mb:
local_file = CreateFile(sz)
upload_times =[]
download_times = []
print("Testing size: " + str(sz))
for iter in range(1, num_iterations+1):
start = time.time()
UploadFile(local_file, bucket, bucket_path)
elapsed_time = time.time() - start
upload_times.append(elapsed_time)
start = time.time()
DownloadFile(local_file, bucket, bucket_path)
elapsed_time = time.time() - start
download_times.append(elapsed_time)
times['Download ' + str(sz) + 'MB (s)']= download_times
times['Upload ' + str(sz) + 'MB (s)'] = upload_times
df = pd.DataFrame(times)
print(df.head())
df.to_csv("s3_timing_results.txt")
if __name__=='__main__':
main()
| mit | Python | |
a78e2566d26b229192e94fecb3c89801afbdce02 | Create MQTT.py | sstocker46/pyrobotlab,mecax/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,mecax/pyrobotlab,sstocker46/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,sstocker46/pyrobotlab,MyRobotLab/pyrobotlab | service/MQTT.py | service/MQTT.py | from time import sleep
topic = "mrl"
qos = 2
broker = "tcp://iot.eclipse.org:1883" //if you have your own just change the hostname/IP
clientID = "MRL MQTT python"
mqtt1 = Runtime.createAndStart("mqtt", "MQTT")
mqtt1.startService()
print mqtt1.getDescription()
mqtt1.startClient(topic, qos, broker, clientID)
sleep(1)
mqtt1.publish("Greetings from MRL python");
| apache-2.0 | Python | |
40907535fcae6771dad5f4ee5825fef56bfe7fa0 | Add test for teuthology.suite.build_email_body() | dreamhost/teuthology,dreamhost/teuthology,michaelsevilla/teuthology,SUSE/teuthology,robbat2/teuthology,ktdreyer/teuthology,zhouyuan/teuthology,dmick/teuthology,tchaikov/teuthology,ceph/teuthology,ivotron/teuthology,zhouyuan/teuthology,ktdreyer/teuthology,t-miyamae/teuthology,caibo2014/teuthology,caibo2014/teuthology,t-miyamae/teuthology,dmick/teuthology,yghannam/teuthology,robbat2/teuthology,michaelsevilla/teuthology,SUSE/teuthology,SUSE/teuthology,dmick/teuthology,ivotron/teuthology,yghannam/teuthology,tchaikov/teuthology,ceph/teuthology | teuthology/test/test_suite.py | teuthology/test/test_suite.py | #!/usr/bin/env python
from nose.tools import (
eq_ as eq,
assert_equal,
assert_raises,
with_setup,
)
import os
import shutil
import random
import yaml
from .. import suite
archive_base_dir = 'test_archive'
def get_random_metadata(name, hung=False):
"""
Generate a random info dict for a fake job. If 'hung' is not True, also
generate a summary dict.
:param name: test name e.g. 'test_foo'
:param hung: simulate a hung job e.g. don't return a summary.yaml
:return: a dict with keys 'job_id', 'info' and possibly 'summary', with
corresponding values
"""
rand = random.Random()
description = 'description for job with name %s' % name
owner = 'job@owner'
duration = rand.randint(1, 36000)
pid = rand.randint(1000, 99999)
job_id = rand.randint(1, 99999)
info = {
'description': description,
'job_id': job_id,
'name': name,
'owner': owner,
'pid': pid,
}
metadata = {
'info': info,
'job_id': job_id,
}
if not hung:
success = True if rand.randint(0, 1) != 1 else False
summary = {
'description': description,
'duration': duration,
'owner': owner,
'success': success,
}
if not success:
summary['failure_reason'] = 'Failure reason!'
metadata['summary'] = summary
return metadata
def make_archive_subdir():
os.mkdir(archive_base_dir)
def populate_archive(jobs):
for job in jobs:
archive_dir = os.path.join(archive_base_dir, str(job['job_id']))
os.mkdir(archive_dir)
with file(os.path.join(archive_dir, 'info.yaml'), 'w') as yfile:
yaml.safe_dump(job['info'], yfile)
if 'summary' in job:
with file(os.path.join(archive_dir, 'summary.yaml'), 'w') as yfile:
yaml.safe_dump(job['summary'], yfile)
def teardown_fake_archive():
shutil.rmtree(archive_base_dir)
reference = {
'name': 'test_name',
'jobs': [
{'info': {'description': 'description for job with name test_name',
'job_id': 30481, 'name': 'test_name', 'owner': 'job@owner',
'pid': 80399},
'job_id': 30481},
{'info': {'description': 'description for job with name test_name',
'job_id': 88979, 'name': 'test_name', 'owner': 'job@owner',
'pid': 3903},
'job_id': 88979,
'summary': {
'description': 'description for job with name test_name',
'duration': 35190, 'failure_reason': 'Failure reason!',
'owner': 'job@owner', 'success': False}},
{'info': {'description': 'description for job with name test_name',
'job_id': 68369, 'name': 'test_name', 'owner': 'job@owner',
'pid': 38524},
'job_id': 68369,
'summary': {'description': 'description for job with name test_name',
'duration': 33771, 'owner': 'job@owner', 'success':
True}},
],
'subject': '1 failed, 1 hung, 1 passed in test_name',
'body': """
Test Run: test_name
=================================================================
logs: http://qa-proxy.ceph.com/teuthology/test_archive/
failed: 1
hung: 1
passed: 1
Failed
=================================================================
[88979] description for job with name test_name
-----------------------------------------------------------------
time: 35190s
log: http://qa-proxy.ceph.com/teuthology/test_archive/88979/
Failure reason!
Hung
=================================================================
[30481] description for job with name test_name
Passed
=================================================================
[68369] description for job with name test_name
time: 33771s
""".strip(),
}
@with_setup(make_archive_subdir, teardown_fake_archive)
def test_build_email_body():
populate_archive(reference['jobs'])
(subject, body) = suite.build_email_body(reference['name'], archive_base_dir, 36000)
assert_equal(subject, reference['subject'])
assert_equal(body, reference['body'])
| mit | Python | |
2f0be29357a8b649b59cd685eb6dea646813feac | Add discovery example (#361) | balloob/pychromecast,dominikkarall/pychromecast,balloob/pychromecast | examples/discovery_example.py | examples/discovery_example.py | """
Example that shows how to receive updates on discovered chromecasts.
"""
import argparse
import logging
import time
import pychromecast
parser = argparse.ArgumentParser(description="Example on how to receive updates on discovered chromecasts.")
parser.add_argument("--show-debug", help="Enable debug log", action="store_true")
args = parser.parse_args()
if args.show_debug:
logging.basicConfig(level=logging.DEBUG)
def list_devices():
print("Currently known cast devices:")
for name, service in listener.services.items():
print(" {} {}".format(name, service))
def add_callback(name):
print("Found cast device {}".format(name))
list_devices()
def remove_callback(name, service):
print("Lost cast device {} {}".format(name, service))
list_devices()
listener, browser = pychromecast.discovery.start_discovery(add_callback, remove_callback)
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
pass
pychromecast.stop_discovery(browser)
| mit | Python | |
182d4229f6293eec791a5eba716917ace3bcfaca | Make looking for outliers easier | e-koch/canfar_scripts,e-koch/canfar_scripts | cal_pipe/manual_outlier_flag.py | cal_pipe/manual_outlier_flag.py |
import sys
'''
Check for garbage points in a MS by SPW.
'''
try:
vis_name = sys.argv[1]
corrstring = sys.argv[2]
starting_spw = int(sys.argv[3])
except IndexError:
vis_name = raw_input("MS Name? : ")
corrstring = raw_input("Corrstring? : ")
starting_spw = int(raw_input("SPW to start at? : "))
tb.open(vis_name + '/SPECTRAL_WINDOW')
freqs = tb.getcol('REF_FREQUENCY')
nchans = tb.getcol('NUM_CHAN')
tb.close()
spws = range(starting_spw, len(freqs))
for spw_num in spws:
print "On " + str(spw_num+1) + " of " + str(len(freqs))
default('plotms')
vis = vis_name
xaxis = 'time'
yaxis = 'amp'
ydatacolumn = 'corrected'
selectdata = True
field = ''
spw = str(spw_num)
scan = bp_scan
correlation = corrstring
averagedata = False
avgscan = False
transform = False
extendflag = False
iteraxis = ''
coloraxis = 'antenna2'
plotrange = []
xlabel = ''
ylabel = ''
showmajorgrid = False
showminorgrid = False
plotms()
| mit | Python | |
0c6ce9d5752dfe1f4306c87c2b25a529175ff106 | Add comicpull. A basic comic pull-list manager. | xchewtoyx/comicmgt,xchewtoyx/comicmgt | comicpull.py | comicpull.py | #!/usr/bin/python
# Copyright 2013 Russell Heilling
# pylint: disable=C0103
'''Comic pull-list management.
Manage titles on pull-list and add new titles to toread list.
'''
import logging
import os
import sqlite3
import sys
import args
from calibredb import CalibreDB, set_log_level
ARGS = args.ARGS
args.add_argument('--add_volume', '-a', action='append',
help='comicvine volume to add to pull-list.')
args.add_argument('--nopull', '-n', action='store_true',
help='Don\'t check for new issues')
args.add_argument('--todo_file', help='Location of todo.txt file',
default=os.path.join(os.environ['HOME'],
'Dropbox/todo/todo.txt'))
args.add_argument('--pulldb', '-p', help='Location of pull database',
default=os.path.join(os.environ['HOME'], '.pull.db'))
args.add_argument('--verbose', '-v', action='count',
help='Enable verbose logging.')
class PullList(object):
def __init__(self, pulldb):
self.pulldb = pulldb
self._check_tables()
def _check_tables(self):
with sqlite3.connect(self.pulldb) as conn:
tables = [table for (table,) in conn.execute(
"SELECT tbl_name FROM SQLITE_MASTER WHERE type = 'table'")]
if 'pull_volumes' not in tables:
self._create_pull_volumes()
if 'seen_issues' not in tables:
self._create_seen_issues()
def _create_pull_volumes(self):
logging.info('Creating pull_volumes table')
with sqlite3.connect(self.pulldb) as conn:
conn.execute("CREATE TABLE pull_volumes (volume INTEGER PRIMARY KEY)")
def _create_seen_issues(self):
logging.info('Creating seen_issues table')
with sqlite3.connect(self.pulldb) as conn:
conn.execute("CREATE TABLE seen_issues (issue INTEGER PRIMARY KEY)")
def add_issue(self, issueid):
logging.debug('Adding %d to issue list.', issueid)
with sqlite3.connect(self.pulldb) as conn:
conn.execute('INSERT INTO seen_issues (issue) VALUES (?)', issueid)
def add_volume(self, volumeid):
logging.debug('Adding %d to volume list.', volumeid)
with sqlite3.connect(self.pulldb) as conn:
conn.execute('INSERT INTO pull_volumes (volume) VALUES (?)', volumeid)
def pull_volume(self, volumeid):
logging.debug('Looking up volume id %d', volumeid)
pull = False
with sqlite3.connect(self.pulldb) as conn:
c = conn.execute('SELECT volume FROM pull_volumes WHERE issue=?',
volumeid)
if c.fetchone()[0] == volumeid:
pull = True
return pull
def seen_issue(self, issueid):
logging.debug('Looking up issue id %d', issueid)
seen = False
with sqlite3.connect(self.pulldb) as conn:
c = conn.execute('SELECT issue FROM seen_issues WHERE issue=?', issueid)
if c.fetchone()[0] == issueid:
seen = True
return seen
def volumes(self):
with sqlite3.connect(self.pulldb) as conn:
for (volume,) in conn.execute('SELECT volume FROM pull_volumes'):
yield volume[0]
class ReadingList(object):
def __init__(self, readinglist):
self.readinglist = readinglist
def add_issues(self, issues):
with open(self.readinglist, 'a') as reading_file:
for issue in issues:
reading_file.write('%d %s\n' % issue)
def set_logging():
if ARGS.verbose > 1:
logging.setLevel(logging.DEBUG)
set_log_level(logging.DEBUG)
if ARGS.verbose == 1:
logging.setLevel(logging.INFO)
set_log_level(logging.INFO)
else:
logging.setLevel(logging.WARN)
set_log_level(logging.WARN)
def main():
calibredb = CalibreDB()
pull_list = PullList(ARGS.pulldb)
# Add new volumes
if ARGS.addvolume:
for volume in ARGS.addvolume:
pull_list.add_volume(int(volume))
# Check database for new issues for pull volumes
if not ARGS.nopull:
new_issues = []
for volume in pull_list.volumes():
logging.info('Found volume %d', volume)
calibredb.search(query='identifiers:comicvine-volume:%d' % volume)
for issue in calibredb.get_data_as_dict():
if not pull_list.seen_issue(issue['id']):
logging.debug('Found unseen issue %d', issue['id'])
new_issues.append((issue['id'], issue['title']))
# Update toread list
if new_issues:
toread = ReadingList(ARGS.todo_file)
toread.add_issues(new_issues)
if __name__ == '__main__':
args.parse_args()
set_logging()
try:
main()
except KeyboardInterrupt:
sys.exit(1)
| mit | Python | |
9f6c1306e443f74dbf108def2a25c44c33f2bec1 | Add a serial test | simotek/tanko-bot,simotek/tanko-bot,simotek/tanko-bot | src/test-serial.py | src/test-serial.py | # RobotMain - Simon Lees simon@simotek.net
# Copyright (C) 2015 Simon Lees
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import threading
from .threadedserial import ThreadedSerial
from .util import CallbackHelper
from .constants import *
class SerialTestParsingException(Exception):
pass
class SerialTestInterfaceCallbacks:
def __init__(self):
self.annMessage = CallbackHelper()
class SerialTestInterface:
def __init__(self, callbacks):
self.__dataLock = threading.RLock()
self.__messageQueue = []
self.__callbacks = callbacks
self.__serial = ThreadedSerial("/dev/ttyS1", 4800)
self.__serial.setSerialRecieveFunction(self.onMessage)
self.__serial.create()
def getCallbacks(self):
return self.__callbacks
def setCallbacks(self, callbacks):
self.__callbacks = callbacks
# Adds message to queue for processing
def onMessage(self, message):
self.__dataLock.acquire()
self.__messageQueue.append(message)
self.__dataLock.release()
# Processes all messages on queue and fires there callbacks
def processMessages(self):
self.__dataLock.acquire()
while self.__messageQueue:
message = self.__messageQueue.pop(0)
#Unlock mutex to avoid holding while signals are triggered
self.__dataLock.release()
self.decodeMessage(message[0])
#relock mutex for next check of the queue
self.__dataLock.acquire()
# Release mutex once finished
self.__dataLock.release()
def decodeMessage(self, message):
data = message
self.__callbacks.annMessage.invoke(data)
def sendMessage(self, message):
print ("Sending: " + message)
self.__serial.write(message)
def sendDriveMotorSpeed(self, args):
message = str(CONST_SERVER_COMMAND_MOTOR_DRIVE+":"+args[0]+","+args[1]+"\n")
print (message)
self.__serial.write(message)
def printMessage(self, args):
print ("MSG:"+args[0])
if __name__ == '__main__':
parser = argparse.ArgumentParser("Main Robot control app")
#parser.add_argument("-s", "--no-serial", type=str, required=False, help="Stub out serial")
parser.add_argument('--no-serial', dest='noserial', action='store_true')
args = parser.parse_args()
serialCallbacks = SerialTestInterfaceCallbacks()
serialCallbacks.annMessage.register(printMessage)
serialInterface = SerialTestInterface(serialCallbacks)
# Main app event loop
while True:
serialInterface.processMessages()
time.sleep(1)
serialInterface.sendMessage("Foo\n")
serialInterface.processMessages()
time.sleep(1)
serialInterface.sendMessage("Baa\n")
| lgpl-2.1 | Python | |
8bf7b5ae2464721be8270b159b99cd728109f000 | Create __init__.py | jrbadiabo/Coursera-Stanford-ML-Class | Python_Version/Ex7.K-Means_PCA_-_Clustering/__init__.py | Python_Version/Ex7.K-Means_PCA_-_Clustering/__init__.py | mit | Python | ||
13056d362fceb0aa20487a440c7347ad4231ea97 | Add tiltoc.py | raycon/til | tiltoc.py | tiltoc.py | import os
from datetime import datetime
# Directory
root = os.getcwd()
# Exclude
excludes = (root, "drafts")
def relative(root, path):
return '/'.join(path.replace(root, '').split(os.path.sep)[1:])
def tils(root):
for (path, dirs, files) in os.walk(root):
dirs[:] = [d for d in dirs if d not in excludes and not d.startswith(".")]
paths = [os.path.join(path, f) for f in files if f.endswith(".md")]
if path != root:
yield relative(root, path), paths
def flat(tils):
for (relative, paths) in tils:
for path in paths:
yield relative, path
def recent(tils, limit):
modified = []
for relative, filename in tils:
date = os.path.getmtime(filename)
modified.append((date, filename))
modified.sort(key=lambda data: data[0], reverse=True)
return modified[:limit]
def link(root, path):
path = relative(root, path)
filename = path.split('/')[-1]
title = ' '.join(n.capitalize() for n in os.path.splitext(filename)[0].split('-'))
return f"[{title}]({path})"
def total(root):
return len(list(flat(tils(root))))
def readme():
lines = []
lines.append("# TIL\n")
lines.append("> Today I Learned\n")
# Recents
lines.append("## Recently Modified\n")
for date, filename in recent(flat(tils(root)), 15):
date = datetime.utcfromtimestamp(date).strftime("%Y-%m-%d")
l = link(root, filename)
lines.append(f"- *{date}* : {l}")
# Categories
lines.append("\n## Categories\n")
lines.append("Total `%s` TILs\n" % total(root))
for relative, paths in tils(root):
count = len(paths)
lines.append(f"- [{relative}](#{relative}) *({count})*")
# Links
for relative, paths in tils(root):
lines.append(f"\n### {relative}\n")
for path in paths:
l = link(root, path)
lines.append(f"- {l}")
return lines
output = open(os.path.join(root, "README.md"), 'w')
for line in readme():
output.write(line)
output.write('\n')
output.close()
| mit | Python | |
7fa62ea8374c839f8f188f34505de4d3114e6b56 | Create app.py | Fillll/reddit2telegram,Fillll/reddit2telegram,nsiregar/reddit2telegram,nsiregar/reddit2telegram | channels/r_BetterEveryLoop/app.py | channels/r_BetterEveryLoop/app.py | #encoding:utf-8
from utils import get_url, weighted_random_subreddit
# Subreddit that will be a source of content
subreddit = weighted_random_subreddit({
'BetterEveryLoop': 1.0,
# If we want get content from several subreddits
# please provide here 'subreddit': probability
# 'any_other_subreddit': 0.02
})
# Telegram channel with @reddit2telegram_bot as an admin
t_channel = '@r_BetterEveryLoop'
def send_post(submission, r2t):
what, url, ext = get_url(submission)
# If this func returns:
# False – it means that we will not send
# this submission, let's move to the next.
# True – everything is ok, we send the submission
# None – we do not want to send anything this time,
# let's just sleep.
# Get all data from submission that we need
title = submission.title
link = submission.shortlink
text = '{}\n{}'.format(title, link)
if what == 'text':
# If it is text submission, it is not really funny.
# return r2t.send_text(submission.selftext)
return False
elif what == 'other':
# Also we are not interesting in any other content.
return False
elif what == 'album':
# It is ok if it is an album.
base_url = submission.url
text = '{}\n{}\n\n{}'.format(title, base_url, link)
r2t.send_text(text)
r2t.send_album(url)
return True
elif what in ('gif', 'img'):
# Also it is ok if it is gif or any kind of image.
# Check if content has already appeared in
# out telegram channel.
if r2t.dup_check_and_mark(url) is True:
return False
return r2t.send_gif_img(what, url, ext, text)
else:
return False
| mit | Python | |
7a8c6520beb7dfb086d1c9b5254e7083d694f35c | check for globals in the lib that are not prefixed with toku. addresses #74 | percona/PerconaFT,ollie314/server,davidl-zend/zenddbi,davidl-zend/zenddbi,ottok/PerconaFT,flynn1973/mariadb-aix,percona/PerconaFT,ollie314/server,BohuTANG/ft-index,kuszmaul/PerconaFT-tmp,kuszmaul/PerconaFT,ollie314/server,flynn1973/mariadb-aix,kuszmaul/PerconaFT,ollie314/server,ottok/PerconaFT,davidl-zend/zenddbi,davidl-zend/zenddbi,kuszmaul/PerconaFT-tmp,ollie314/server,flynn1973/mariadb-aix,ollie314/server,flynn1973/mariadb-aix,natsys/mariadb_10.2,percona/PerconaFT,ottok/PerconaFT,natsys/mariadb_10.2,davidl-zend/zenddbi,ollie314/server,ottok/PerconaFT,kuszmaul/PerconaFT-tmp,davidl-zend/zenddbi,ollie314/server,davidl-zend/zenddbi,BohuTANG/ft-index,davidl-zend/zenddbi,kuszmaul/PerconaFT,natsys/mariadb_10.2,davidl-zend/zenddbi,flynn1973/mariadb-aix,BohuTANG/ft-index,natsys/mariadb_10.2,natsys/mariadb_10.2,natsys/mariadb_10.2,ollie314/server,BohuTANG/ft-index,natsys/mariadb_10.2,flynn1973/mariadb-aix,ollie314/server,kuszmaul/PerconaFT,percona/PerconaFT,ollie314/server,flynn1973/mariadb-aix,flynn1973/mariadb-aix,natsys/mariadb_10.2,natsys/mariadb_10.2,kuszmaul/PerconaFT-tmp,slanterns/server,davidl-zend/zenddbi,davidl-zend/zenddbi,flynn1973/mariadb-aix,flynn1973/mariadb-aix,natsys/mariadb_10.2,flynn1973/mariadb-aix,natsys/mariadb_10.2 | src/tokuglobals.py | src/tokuglobals.py | #!/usr/bin/python
import sys
import os
import re
def checkglobals(libname, exceptsymbols, verbose):
badglobals = 0
nmcmd = "nm -g " + libname
f = os.popen(nmcmd)
b = f.readline()
while b != "":
match = re.match("^([0-9a-f]+)\s(.?)\s(.*)$", b)
if match == None:
match = re.match("^\s+(.*)$", b)
if match == None:
print "unknown", b
badglobals = 1
else:
type = match.group(2)
symbol = match.group(3)
if verbose: print type, symbol
match = re.match("^toku_", symbol)
if match == None and not exceptsymbols.has_key(symbol):
print "non toku symbol=", symbol
badglobals = 1
b = f.readline()
f.close()
return badglobals
def main():
verbose = 0
for arg in sys.argv[1:]:
if arg == "-v":
verbose += 1
exceptsymbols = {}
for n in [ "_init", "_fini", "_end", "_edata", "__bss_start" ]:
exceptsymbols[n] = 1
for n in [ "db_env_create", "db_create", "db_strerror", "db_version", "log_compare" ]:
exceptsymbols[n] = 1
return checkglobals("libdb.so", exceptsymbols, verbose)
sys.exit(main())
| agpl-3.0 | Python | |
f709f00e5c4e112774cebbc7ab3f546354290da8 | Create human_readable_time.py | Kunalpod/codewars,Kunalpod/codewars | human_readable_time.py | human_readable_time.py | #Kunal Gautam
#Codewars : @Kunalpod
#Problem name: Human Readable Time
#Problem level: 5 kyu
def make_readable(seconds):
s = '{:0>2}'.format(seconds%60)
m = '{:0>2}'.format((seconds//60)%60)
h = '{:0>2}'.format(seconds//3600)
return ':'.join([h,m,s])
| mit | Python | |
56e51e48de854c83870549c1cab2893c24116734 | add tests | brennerm/check-mk-web-api | check_mk_web_api/test_WebApi.py | check_mk_web_api/test_WebApi.py | import os
import pytest
from check_mk_web_api import WebApi, CheckMkWebApiException
api = WebApi(
os.environ['CHECK_MK_URL'],
os.environ['CHECK_MK_USER'],
os.environ['CHECK_MK_SECRET']
)
def setup():
api.delete_all_hosts()
def test_add_host():
api.add_host('host00')
assert 'host00' in api.get_all_hosts()
def test_add_duplicate_host():
with pytest.raises(CheckMkWebApiException):
api.add_host('host00')
api.add_host('host00')
def test_edit_host():
api.add_host('host00', ipaddress='192.168.0.100')
assert api.get_host('host00')['attributes']['ipaddress'] == '192.168.0.100'
api.edit_host('host00', ipaddress='192.168.0.101')
assert api.get_host('host00')['attributes']['ipaddress'] == '192.168.0.101'
def test_unset_attribute():
api.add_host('host00', ipaddress='192.168.0.100')
assert api.get_host('host00')['attributes']['ipaddress'] == '192.168.0.100'
api.edit_host('host00', unset_attributes=['ipaddress'])
assert 'ipaddress' not in api.get_host('host00')['attributes']
def test_edit_nonexistent_host():
with pytest.raises(CheckMkWebApiException):
api.edit_host('host00', ipaddress='192.168.0.101')
def test_get_host():
api.add_host('host00')
assert api.get_host('host00')['hostname'] == 'host00'
def test_get_nonexistent_host():
with pytest.raises(CheckMkWebApiException):
api.get_host('host00')
def test_get_all_hosts():
api.add_host('host00')
api.add_host('host01')
all_hosts = api.get_all_hosts()
assert len(all_hosts) == 2
assert 'host00' in all_hosts
assert 'host01' in all_hosts
def test_delete_host():
api.add_host('host00')
assert len(api.get_all_hosts()) == 1
api.delete_host('host00')
assert len(api.get_all_hosts()) == 0
def test_delete_nonexistent_host():
with pytest.raises(CheckMkWebApiException):
api.delete_host('host00')
def test_delete_all_hosts():
api.add_host('host00')
api.add_host('host01')
assert len(api.get_all_hosts()) == 2
api.delete_all_hosts()
assert len(api.get_all_hosts()) == 0
def test_discover_services():
api.add_host('localhost')
api.discover_services('localhost')
def test_discover_services_for_nonexistent_host():
with pytest.raises(CheckMkWebApiException):
api.discover_services('localhost')
| mit | Python | |
bb638c49f0a73289392e63dff7bec6e9b49b8199 | Add a simple wrapper around losetup | robertbreker/ffs,xapi-project/ffs,djs55/ffs,franciozzy/ffs | datapath/raw+file/losetup.py | datapath/raw+file/losetup.py | #!/usr/bin/env python
import xapi
import commands
def log(txt):
print >>sys.stderr, txt
# [run dbg cmd] executes [cmd], throwing a BackendError if exits with
# a non-zero exit code.
def run(dbg, cmd):
code, output = commands.getstatusoutput(cmd)
if code <> 0:
log("%s: %s exitted with code %d: %s" % (dbg, cmd, code, output))
raise (xapi.InternalError("%s exitted with non-zero code %d: %s" % (cmd, code, output)))
return output
# Use Linux "losetup" to create block devices from files
class Losetup:
# [_find dbg path] returns the loop device associated with [path]
def _find(self, dbg, path):
for line in run(dbg, "losetup -a").split("\n"):
line = line.strip()
if line <> "":
bits = line.split()
loop = bits[0][0:-1]
this_path = bits[2][1:-1]
if this_path == path:
return loop
return None
# [add dbg path] creates a new loop device for [path] and returns it
def add(self, dbg, path):
run(dbg, "losetup -f %s" % path)
return self._find(dbg, path)
# [remove dbg path] removes the loop device associated with [path]
def remove(self, dbg, path):
loop = self._find(dbg, path)
run(dbg, "losetup -d %s" % loop)
| lgpl-2.1 | Python | |
79f7a947d9fbf1903957aea0ef459bee53d2ba82 | use subnet_service_types extension from neutron-lib | mahak/neutron,openstack/neutron,mahak/neutron,openstack/neutron,openstack/neutron,mahak/neutron | neutron/extensions/subnet_service_types.py | neutron/extensions/subnet_service_types.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.api.definitions import subnet_service_types as apidef
from neutron_lib.api import extensions
class Subnet_service_types(extensions.APIExtensionDescriptor):
"""Extension class supporting subnet service types."""
api_definition = apidef
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.api.definitions import subnet as subnet_def
from neutron_lib.api import extensions
from neutron_lib.api import validators
from neutron_lib import constants
from neutron_lib import exceptions
import six
import webob.exc
from neutron._i18n import _
# List for service plugins to register their own prefixes
valid_prefixes = []
class InvalidSubnetServiceType(exceptions.InvalidInput):
message = _("Subnet service type %(service_type)s does not correspond "
"to a valid device owner.")
class InvalidInputSubnetServiceType(exceptions.InvalidInput):
message = _("Subnet service type %(service_type)s is not a string.")
def _validate_subnet_service_types(service_types, valid_values=None):
if service_types:
if not isinstance(service_types, list):
raise webob.exc.HTTPBadRequest(
_("Subnet service types must be a list."))
prefixes = valid_prefixes
# Include standard prefixes
prefixes += list(constants.DEVICE_OWNER_PREFIXES)
prefixes += constants.DEVICE_OWNER_COMPUTE_PREFIX
for service_type in service_types:
if not isinstance(service_type, six.text_type):
raise InvalidInputSubnetServiceType(service_type=service_type)
elif not service_type.startswith(tuple(prefixes)):
raise InvalidSubnetServiceType(service_type=service_type)
validators.add_validator('type:validate_subnet_service_types',
_validate_subnet_service_types)
EXTENDED_ATTRIBUTES_2_0 = {
subnet_def.COLLECTION_NAME: {
'service_types': {
'allow_post': True,
'allow_put': True,
'default': constants.ATTR_NOT_SPECIFIED,
'validate': {'type:validate_subnet_service_types': None},
'is_visible': True,
},
},
}
class Subnet_service_types(extensions.ExtensionDescriptor):
"""Extension class supporting subnet service types."""
@classmethod
def get_name(cls):
return "Subnet service types"
@classmethod
def get_alias(cls):
return "subnet-service-types"
@classmethod
def get_description(cls):
return "Provides ability to set the subnet service_types field"
@classmethod
def get_updated(cls):
return "2016-03-15T18:00:00-00:00"
def get_extended_resources(self, version):
if version == "2.0":
return EXTENDED_ATTRIBUTES_2_0
else:
return {}
| apache-2.0 | Python |
cd8110b0ca4d53477b8331af2317e267da057df6 | add example 'audit' where chat is replicated in a secondary space | bernard357/shellbot | examples/audit.py | examples/audit.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Audit interactions in real-time
In this example we create a shell with one simple command: audit
- command: audit
- provides clear status if this room is currently audited or not
- command: audit on
- starts auditing
- command: audit off
- ensure private interactions
To run this script you have to provide a custom configuration, or set
environment variables instead::
- ``CHAT_ROOM_MODERATORS`` - You have at least your e-mail address
- ``CHAT_TOKEN`` - Received from Cisco Spark when you register your bot
- ``SERVER_URL`` - Public link used by Cisco Spark to reach your server
The token is specific to your run-time, please visit Cisco Spark for
Developers to get more details:
https://developer.ciscospark.com/
For example, if you run this script under Linux or macOs with support from
ngrok for exposing services to the Internet::
export CHAT_ROOM_MODERATORS="alice@acme.com"
export CHAT_TOKEN="<token id from Cisco Spark for Developers>"
export SERVER_URL="http://1a107f21.ngrok.io"
python hello.py
"""
import logging
from multiprocessing import Process, Queue
import os
from shellbot import ShellBot, Context, Command, Speaker
from shellbot.commands import Audit
from shellbot.spaces import SparkSpace
from shellbot.updaters import SpaceUpdater
Context.set_logger()
# create an audit command
#
audit = Audit()
# create a bot and load command
#
bot = ShellBot(command=audit)
# load configuration
#
os.environ['CHAT_ROOM_TITLE'] = 'Audit tutorial'
bot.configure()
# create a chat room
#
bot.bond(reset=True)
# create a mirror chat room
#
mirror = SparkSpace(bot=bot)
mirror.connect()
title = u"{} - {}".format(
mirror.configured_title(), u"Audited content")
mirror.bond(title=title)
# enable auditing
#
audit.arm(updater=SpaceUpdater(space=mirror))
# run the bot
#
bot.run()
# delete chat rooms when the bot is stopped
#
mirror.delete_space()
bot.dispose()
| apache-2.0 | Python | |
0229c868a8f204759c76ecae92cecc4e7a6312b5 | Add link module + Link class stub | thisissoon/Flask-HAL,thisissoon/Flask-HAL | flask_hal/link.py | flask_hal/link.py | #!/usr/bin/env python
# encoding: utf-8
"""
flask_hal.link
==============
Implements the ``HAL`` Link specification.
"""
class Link(object):
"""
"""
def __init__(
self,
href,
name=None,
title=None,
link_type=None,
deprecation=None,
profile=None,
templated=None,
hreflang=None):
"""
"""
self.href = href
self.name = name
self.title = title
self.link_type = link_type,
self.deprecation = deprecation
self.profile = profile
self.templated = templated
self.hreflang = hreflang
| unlicense | Python | |
34206ab96b5546e617b1478f59357a6a3f7ed8b6 | Add a custom gunicorn Sync worker implementation which works around some issues which occur when eventlet monkey patching is used with sync worker. | nzlosh/st2,emedvedev/st2,StackStorm/st2,peak6/st2,tonybaloney/st2,lakshmi-kannan/st2,lakshmi-kannan/st2,punalpatel/st2,Plexxi/st2,nzlosh/st2,Plexxi/st2,peak6/st2,lakshmi-kannan/st2,Plexxi/st2,StackStorm/st2,tonybaloney/st2,pixelrebel/st2,StackStorm/st2,punalpatel/st2,nzlosh/st2,punalpatel/st2,pixelrebel/st2,pixelrebel/st2,emedvedev/st2,tonybaloney/st2,nzlosh/st2,emedvedev/st2,peak6/st2,StackStorm/st2,Plexxi/st2 | st2common/st2common/util/gunicorn_workers.py | st2common/st2common/util/gunicorn_workers.py | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from gunicorn.workers.sync import SyncWorker
__all__ = [
'EventletSyncWorker'
]
class EventletSyncWorker(SyncWorker):
"""
Custom sync worker for gunicorn which works with eventlet monkey patching.
This worker class fixes "AssertionError: do not call blocking functions from
the mainloop" and some other issues on SIGINT / SIGTERM.
"""
def handle_quit(self, sig, frame):
try:
return super(EventletSyncWorker, self).handle_quit(sig=sig, frame=frame)
except AssertionError as e:
msg = str(e)
if 'do not call blocking functions from the mainloop' in msg:
# Workaround for "do not call blocking functions from the mainloop" issue
sys.exit(0)
raise e
| apache-2.0 | Python | |
1650824ef5886a12715bd0004e95ab3bf4dc5dfd | Add new package: phoenix (#18143) | LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack | var/spack/repos/builtin/packages/phoenix/package.py | var/spack/repos/builtin/packages/phoenix/package.py | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Phoenix(Package):
"""Apache Phoenix is a SQL skin over HBase delivered as a client-embedded
JDBC driver targeting low latency queries over HBase data."""
homepage = "https://github.com"
git = "https://github.com/apache/phoenix.git"
version('master', branch='master')
depends_on('java@8:', type=('build', 'run'))
depends_on('maven', type='build')
def install(self, spec, prefix):
mvn = which('mvn')
mvn('package', '-DskipTests')
install_tree('.', prefix)
| lgpl-2.1 | Python | |
7b688747a02f9ac29d4f119bf142f7ef0ad805e9 | Test for COM collections; IEnumVariant so far only. | denfromufa/comtypes,denfromufa/comtypes,denfromufa/comtypes,denfromufa/comtypes,denfromufa/comtypes | comtypes/test/test_collections.py | comtypes/test/test_collections.py | import unittest
from comtypes.client import CreateObject, GetModule #, Constants
from find_memleak import find_memleak
from ctypes import ArgumentError
class Test(unittest.TestCase):
def test_IEnumVARIANT(self):
# The XP firewall manager.
fwmgr = CreateObject('HNetCfg.FwMgr')
# apps has a _NewEnum property that implements IEnumVARIANT
apps = fwmgr.LocalPolicy.CurrentProfile.AuthorizedApplications
self.failUnlessEqual(apps.Count, len(apps))
cv = iter(apps)
names = [p.ProcessImageFileName for p in cv]
self.failUnlessEqual(len(apps), len(names))
# The iterator is consumed now:
self.failUnlessEqual([p.ProcessImageFileName for p in cv], [])
# But we can reset it:
cv.Reset()
self.failUnlessEqual([p.ProcessImageFileName for p in cv], names)
# Reset, then skip:
cv.Reset()
cv.Skip(3)
self.failUnlessEqual([p.ProcessImageFileName for p in cv], names[3:])
# Reset, then skip:
cv.Reset()
cv.Skip(300)
self.failUnlessEqual([p.ProcessImageFileName for p in cv], names[300:])
# Hm, do we want to allow random access to the iterator?
# Should the iterator support __getitem__ ???
self.failUnlessEqual(cv[0].ProcessImageFileName, names[0])
self.failUnlessEqual(cv[0].ProcessImageFileName, names[0])
self.failUnlessEqual(cv[0].ProcessImageFileName, names[0])
if len(names) > 1:
self.failUnlessEqual(cv[1].ProcessImageFileName, names[1])
self.failUnlessEqual(cv[1].ProcessImageFileName, names[1])
self.failUnlessEqual(cv[1].ProcessImageFileName, names[1])
# We can now call Next(celt) with celt != 1, the call always returns a list:
cv.Reset()
self.failUnlessEqual(names[:3], [p.ProcessImageFileName for p in cv.Next(3)])
# calling Next(0) makes no sense, but should work anyway:
self.failUnlessEqual(cv.Next(0), [])
cv.Reset()
self.failUnlessEqual(len(cv.Next(len(names) * 2)), len(names))
# slicing is not (yet?) supported
cv.Reset()
self.failUnlessRaises(ArgumentError, lambda: cv[:])
if __name__ == "__main__":
unittest.main()
| mit | Python | |
f45848477043d933119180e1229b841372b86240 | Add sensor for Dweet.io | Zyell/home-assistant,oandrew/home-assistant,jaharkes/home-assistant,fbradyirl/home-assistant,tboyce1/home-assistant,happyleavesaoc/home-assistant,miniconfig/home-assistant,ewandor/home-assistant,Zac-HD/home-assistant,leoc/home-assistant,robbiet480/home-assistant,ma314smith/home-assistant,philipbl/home-assistant,morphis/home-assistant,tboyce1/home-assistant,emilhetty/home-assistant,tinloaf/home-assistant,MartinHjelmare/home-assistant,varunr047/homefile,emilhetty/home-assistant,jawilson/home-assistant,w1ll1am23/home-assistant,coteyr/home-assistant,kennedyshead/home-assistant,soldag/home-assistant,open-homeautomation/home-assistant,betrisey/home-assistant,sffjunkie/home-assistant,jnewland/home-assistant,lukas-hetzenecker/home-assistant,MungoRae/home-assistant,betrisey/home-assistant,hmronline/home-assistant,open-homeautomation/home-assistant,instantchow/home-assistant,Zac-HD/home-assistant,leoc/home-assistant,JshWright/home-assistant,sander76/home-assistant,mezz64/home-assistant,srcLurker/home-assistant,MartinHjelmare/home-assistant,titilambert/home-assistant,partofthething/home-assistant,leppa/home-assistant,ma314smith/home-assistant,Smart-Torvy/torvy-home-assistant,deisi/home-assistant,robbiet480/home-assistant,auduny/home-assistant,Julian/home-assistant,dmeulen/home-assistant,alexmogavero/home-assistant,tinloaf/home-assistant,jamespcole/home-assistant,jnewland/home-assistant,JshWright/home-assistant,devdelay/home-assistant,Julian/home-assistant,molobrakos/home-assistant,srcLurker/home-assistant,Teagan42/home-assistant,dmeulen/home-assistant,Theb-1/home-assistant,eagleamon/home-assistant,soldag/home-assistant,hexxter/home-assistant,Zac-HD/home-assistant,molobrakos/home-assistant,home-assistant/home-assistant,luxus/home-assistant,philipbl/home-assistant,varunr047/homefile,joopert/home-assistant,auduny/home-assistant,hexxter/home-assistant,caiuspb/home-assistant,nugget/home-assistant,ewandor/home-assistant,ct-23/home-assistant,LinuxChristian/home-assistant,emilhetty/home-assistant,Zyell/home-assistant,morphis/home-assistant,philipbl/home-assistant,nugget/home-assistant,auduny/home-assistant,tboyce021/home-assistant,nnic/home-assistant,florianholzapfel/home-assistant,bdfoster/blumate,rohitranjan1991/home-assistant,sffjunkie/home-assistant,qedi-r/home-assistant,toddeye/home-assistant,Cinntax/home-assistant,DavidLP/home-assistant,aequitas/home-assistant,Cinntax/home-assistant,justyns/home-assistant,ct-23/home-assistant,DavidLP/home-assistant,fbradyirl/home-assistant,leppa/home-assistant,xifle/home-assistant,jamespcole/home-assistant,FreekingDean/home-assistant,eagleamon/home-assistant,dmeulen/home-assistant,persandstrom/home-assistant,instantchow/home-assistant,shaftoe/home-assistant,robjohnson189/home-assistant,postlund/home-assistant,JshWright/home-assistant,aequitas/home-assistant,HydrelioxGitHub/home-assistant,eagleamon/home-assistant,bdfoster/blumate,sfam/home-assistant,sdague/home-assistant,dmeulen/home-assistant,jnewland/home-assistant,JshWright/home-assistant,alexmogavero/home-assistant,ewandor/home-assistant,nnic/home-assistant,Zyell/home-assistant,Duoxilian/home-assistant,morphis/home-assistant,partofthething/home-assistant,jawilson/home-assistant,nkgilley/home-assistant,keerts/home-assistant,ct-23/home-assistant,kyvinh/home-assistant,kyvinh/home-assistant,ct-23/home-assistant,jabesq/home-assistant,happyleavesaoc/home-assistant,florianholzapfel/home-assistant,devdelay/home-assistant,ma314smith/home-assistant,deisi/home-assistant,nnic/home-assistant,adrienbrault/home-assistant,pschmitt/home-assistant,kennedyshead/home-assistant,balloob/home-assistant,mKeRix/home-assistant,GenericStudent/home-assistant,Smart-Torvy/torvy-home-assistant,fbradyirl/home-assistant,turbokongen/home-assistant,xifle/home-assistant,florianholzapfel/home-assistant,DavidLP/home-assistant,Smart-Torvy/torvy-home-assistant,shaftoe/home-assistant,nkgilley/home-assistant,PetePriority/home-assistant,instantchow/home-assistant,LinuxChristian/home-assistant,emilhetty/home-assistant,FreekingDean/home-assistant,srcLurker/home-assistant,deisi/home-assistant,MartinHjelmare/home-assistant,Zac-HD/home-assistant,adrienbrault/home-assistant,tchellomello/home-assistant,PetePriority/home-assistant,oandrew/home-assistant,oandrew/home-assistant,MungoRae/home-assistant,jaharkes/home-assistant,alexmogavero/home-assistant,hmronline/home-assistant,varunr047/homefile,Danielhiversen/home-assistant,keerts/home-assistant,aoakeson/home-assistant,hmronline/home-assistant,robjohnson189/home-assistant,MungoRae/home-assistant,varunr047/homefile,Julian/home-assistant,rohitranjan1991/home-assistant,aronsky/home-assistant,jabesq/home-assistant,pschmitt/home-assistant,Theb-1/home-assistant,titilambert/home-assistant,mikaelboman/home-assistant,mikaelboman/home-assistant,open-homeautomation/home-assistant,oandrew/home-assistant,sdague/home-assistant,Danielhiversen/home-assistant,turbokongen/home-assistant,tboyce1/home-assistant,nevercast/home-assistant,miniconfig/home-assistant,ma314smith/home-assistant,leoc/home-assistant,leoc/home-assistant,mKeRix/home-assistant,robjohnson189/home-assistant,tinloaf/home-assistant,stefan-jonasson/home-assistant,Duoxilian/home-assistant,LinuxChristian/home-assistant,justyns/home-assistant,nevercast/home-assistant,betrisey/home-assistant,florianholzapfel/home-assistant,sffjunkie/home-assistant,varunr047/homefile,luxus/home-assistant,hexxter/home-assistant,w1ll1am23/home-assistant,aoakeson/home-assistant,tboyce021/home-assistant,tboyce1/home-assistant,mikaelboman/home-assistant,home-assistant/home-assistant,open-homeautomation/home-assistant,deisi/home-assistant,mezz64/home-assistant,mKeRix/home-assistant,aequitas/home-assistant,sffjunkie/home-assistant,joopert/home-assistant,aronsky/home-assistant,GenericStudent/home-assistant,bdfoster/blumate,hmronline/home-assistant,qedi-r/home-assistant,philipbl/home-assistant,hmronline/home-assistant,bdfoster/blumate,shaftoe/home-assistant,happyleavesaoc/home-assistant,morphis/home-assistant,hexxter/home-assistant,justyns/home-assistant,Theb-1/home-assistant,keerts/home-assistant,miniconfig/home-assistant,betrisey/home-assistant,balloob/home-assistant,alexmogavero/home-assistant,rohitranjan1991/home-assistant,caiuspb/home-assistant,persandstrom/home-assistant,HydrelioxGitHub/home-assistant,Duoxilian/home-assistant,jaharkes/home-assistant,stefan-jonasson/home-assistant,kyvinh/home-assistant,coteyr/home-assistant,Duoxilian/home-assistant,caiuspb/home-assistant,mikaelboman/home-assistant,sfam/home-assistant,xifle/home-assistant,mKeRix/home-assistant,stefan-jonasson/home-assistant,kyvinh/home-assistant,jaharkes/home-assistant,miniconfig/home-assistant,postlund/home-assistant,coteyr/home-assistant,PetePriority/home-assistant,MungoRae/home-assistant,eagleamon/home-assistant,srcLurker/home-assistant,mikaelboman/home-assistant,HydrelioxGitHub/home-assistant,bdfoster/blumate,jabesq/home-assistant,devdelay/home-assistant,deisi/home-assistant,nugget/home-assistant,ct-23/home-assistant,tchellomello/home-assistant,xifle/home-assistant,sfam/home-assistant,Smart-Torvy/torvy-home-assistant,toddeye/home-assistant,persandstrom/home-assistant,LinuxChristian/home-assistant,stefan-jonasson/home-assistant,shaftoe/home-assistant,devdelay/home-assistant,Teagan42/home-assistant,LinuxChristian/home-assistant,emilhetty/home-assistant,robjohnson189/home-assistant,molobrakos/home-assistant,balloob/home-assistant,sander76/home-assistant,luxus/home-assistant,nevercast/home-assistant,jamespcole/home-assistant,keerts/home-assistant,aoakeson/home-assistant,happyleavesaoc/home-assistant,MungoRae/home-assistant,lukas-hetzenecker/home-assistant,Julian/home-assistant,sffjunkie/home-assistant | homeassistant/components/sensor/dweet.py | homeassistant/components/sensor/dweet.py | """
homeassistant.components.sensor.dweet
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Displays values from Dweet.io..
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.dweet/
"""
from datetime import timedelta
import logging
import homeassistant.util as util
from homeassistant.util import Throttle
from homeassistant.helpers.entity import Entity
from homeassistant.const import STATE_UNKNOWN
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = 'Dweet.io Sensor'
CONF_DEVICE = 'device'
# Return cached results if last scan was less then this time ago
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
# pylint: disable=unused-variable
def setup_platform(hass, config, add_devices, discovery_info=None):
""" Setup the Dweet sensor. """
import dweepy
device = config.get('device')
if device is None:
_LOGGER.error('Not all required config keys present: %s',
', '.join(CONF_DEVICE))
return False
try:
dweepy.get_latest_dweet_for(device)
except dweepy.DweepyError:
_LOGGER.error("Device/thing '%s' could not be found", device)
return False
dweet = DweetData(device)
add_devices([DweetSensor(dweet,
config.get('name', DEFAULT_NAME),
config.get('variable'),
config.get('unit_of_measurement'),
config.get('correction_factor', None),
config.get('decimal_places', None))])
class DweetSensor(Entity):
""" Implements a Dweet sensor. """
def __init__(self, dweet, name, variable, unit_of_measurement, corr_factor,
decimal_places):
self.dweet = dweet
self._name = name
self._variable = variable
self._state = STATE_UNKNOWN
self._unit_of_measurement = unit_of_measurement
self._corr_factor = corr_factor
self._decimal_places = decimal_places
self.update()
@property
def name(self):
""" The name of the sensor. """
return self._name
@property
def unit_of_measurement(self):
""" Unit the value is expressed in. """
return self._unit_of_measurement
@property
def state(self):
""" Returns the state. """
values = self.dweet.data
if values is not None:
value = util.extract_value_json(values[0]['content'],
self._variable)
if self._corr_factor is not None:
value = float(value) * float(self._corr_factor)
if self._decimal_places is not None:
value = round(value, self._decimal_places)
if self._decimal_places == 0:
value = int(value)
return value
else:
return STATE_UNKNOWN
def update(self):
""" Gets the latest data from REST API. """
self.dweet.update()
# pylint: disable=too-few-public-methods
class DweetData(object):
""" Class for handling the data retrieval. """
def __init__(self, device):
self._device = device
self.data = dict()
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
""" Gets the latest data from Dweet.io. """
import dweepy
try:
self.data = dweepy.get_latest_dweet_for(self._device)
except dweepy.DweepyError:
_LOGGER.error("Device '%s' could not be found", self._device)
self.data = None
| mit | Python | |
c0cc954686c7ababe2d3fba6e563e955e85d9caf | add individual migration script | macarthur-lab/xbrowse,macarthur-lab/xbrowse,ssadedin/seqr,ssadedin/seqr,macarthur-lab/seqr,macarthur-lab/seqr,macarthur-lab/seqr,ssadedin/seqr,macarthur-lab/xbrowse,ssadedin/seqr,macarthur-lab/xbrowse,macarthur-lab/xbrowse,macarthur-lab/seqr,ssadedin/seqr,macarthur-lab/seqr,macarthur-lab/xbrowse | xbrowse_server/base/management/commands/transfer_missing_family_individuals_to_different_project.py | xbrowse_server/base/management/commands/transfer_missing_family_individuals_to_different_project.py | from django.core.management.base import BaseCommand
from seqr.views.apis.pedigree_image_api import update_pedigree_images
from xbrowse_server.base.models import Project as BaseProject, Family as BaseFamily, Individual as BaseIndividual
from xbrowse_server.base.model_utils import create_xbrowse_model, update_xbrowse_model
from collections import defaultdict
"""
This was created to fix a one time issue with transfering families in which all the parents were dropped
(https://github.com/macarthur-lab/seqr-private/issues/335)
If transfers are done using the transfer_families_to_different_project command this problem shouldn't happen, so
this probably shouldn't be used again.
"""
INDIVIDUAL_FIELDS = [
'indiv_id',
'maternal_id',
'paternal_id',
'gender',
'affected',
'nickname',
'other_notes',
'case_review_status',
'case_review_status_last_modified_date',
'case_review_status_last_modified_by',
'case_review_discussion',
'phenotips_data',
]
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('--from-project', required=True)
parser.add_argument('--to-project', required=True)
def handle(self, *args, **options):
to_project = BaseProject.objects.get(project_id=options['to_project'])
from_project = BaseProject.objects.get(project_id=options['from_project'])
to_families = {
f.family_id: {'family': f, 'individual_ids': [i.indiv_id for i in f.individual_set.only('indiv_id').all()]}
for f in BaseFamily.objects.filter(project=to_project).only('family_id').prefetch_related('individual_set')
}
from_families = {
f.family_id: {'family': f, 'individuals': f.individual_set.all()}
for f in BaseFamily.objects.filter(
project=from_project, family_id__in=to_families.keys()
).only('family_id').prefetch_related('individual_set')
}
missing_to_family_individuals = {
family_id: [i for i in from_families[family_id]['individuals'] if i.indiv_id not in family_dict['individual_ids']]
for family_id, family_dict in to_families.items()
}
missing_individual_counts = defaultdict(int)
missing_individuals = []
updated_families = set()
for family_id, individuals in missing_to_family_individuals.items():
missing_individual_counts[len(individuals)] += 1
missing_individuals += individuals
updated_families.add(to_families[family_id]['family'])
print('Transferring individuals from {} to {}:'.format(from_project.project_name, to_project.project_name))
for individual in missing_individuals:
create_xbrowse_model(
BaseIndividual,
project=to_project,
family=to_families[individual.family.family_id]['family'],
**{field: getattr(individual, field) for field in INDIVIDUAL_FIELDS}
)
for family in updated_families:
update_xbrowse_model(family, pedigree_image=from_families[family.family_id]['family'].pedigree_image)
for num_individuals, num_families in missing_individual_counts.items():
print('Added {} individuals to {} families'.format(num_individuals, num_families))
print("Done.")
| agpl-3.0 | Python | |
e055cc963eb5eb05c2685ae75d1614c81ef3c59a | load qld data into elasticsearch | uq-eresearch/eheritage,uq-eresearch/eheritage,uq-eresearch/eheritage | injest/search_index.py | injest/search_index.py | # -*- coding: utf-8 -*-
"""
This module contains the code for putting heritage places into a search index.
"""
from elasticsearch import Elasticsearch
ES_HOST = "192.168.10.200"
es = Elasticsearch(ES_HOST)
def add_heritage_place(place):
"""Add a heritage place to the search index
:param place: Dictionary defining a heritage place.
"""
try:
id = "%s-%s" % (place['state'], place['id'])
result = es.index(index="eheritage", doc_type='heritage_place', id=id, body=place)
print result
except AttributeError as e:
print e
print place
return False
return True
if __name__ == "__main__":
from qld import parse_ahpi_xml
hp_filename = "/mnt/groups/maenad/activities/e-Heritage/QLD/heritage_list.xml"
result = parse_ahpi_xml(hp_filename, add_heritage_place)
print result | bsd-3-clause | Python | |
7cc0f1cc461d6ccd4c191ec243184deab7f1028f | Create strings.py | delimitry/python_utils | strings/strings.py | strings/strings.py | mit | Python | ||
6917e3b8689ace77bda912f82163f0f91a1a881b | Solve conflict in DB schema for circle CI | sonicyang/chiphub,sonicyang/chiphub,sonicyang/chiphub | digikey/migrations/0022_merge.py | digikey/migrations/0022_merge.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('digikey', '0019_merge'),
('digikey', '0021_auto_20151125_1353'),
]
operations = [
]
| mit | Python | |
2067c07cf1dc9029bf3cea372d7cc70297c7743e | Add jsonable_boundaries module | sprin/jsonable-boundaries | jsonable_boundaries.py | jsonable_boundaries.py | """
`jsonable` is just a cute shortening of JSON-serializable,
but it also means that the object is pure data, and does not
drag any (publicly-accessible) behavior with it.
Anything that can be serialized by the default simplejson
http://simplejson.readthedocs.org/en/latest/#encoders-and-decoders
serializer would count. But the slightly more flexible serializer
given in `jsonable_handler` allows datetimes as well as objects that
implement a `to_json()` method.
Functions which can consume a single argument that can be turned into JSON
are `jsonable consumers`. JSONable consumers will
declare the expected format of the jsonable they can consume
using JSON Schema.
JSONable boundaries are functions or methods signatures which serve as the
interface between different sub-systems in an application. If we seek to have
two components loosely-coupled, then a good guarantee of this is if
they communicate only with values, and not with complex objects. In other words,
if we can take the argument to a function, serialize and then immediately
deserialize it before handing it back to to the function, then it is said
to have a JSONable signature if it still works.
One important practical advantage of this is that it enables large
systems to be more easily decomposed into components that communicate with JSON
messages through a queue.
"""
import functools
import itertools
import collections
import jsonschema
import simplejson as json
VALIDATE = True
def jsonable_handler(obj):
if hasattr(obj, 'isoformat'):
return obj.isoformat()
if hasattr(obj, 'to_json'):
return obj.to_json()
if isinstance(obj, collections.Iterable):
return [x for x in obj]
else:
raise TypeError(
'Object of type {0} with value of {1} is not JSON serializable'
.format(type(obj), repr(obj))
)
def schema(json_schema):
def decorator(f):
f.schema = json_schema
return f
return decorator
def serial_deserial(arg):
"""
Function to serialize and immediately deserialize with
`jsonable_handler`.
"""
json_string = json.dumps(arg, default=jsonable_handler)
return json.loads(json_string)
def validate(do_validation):
def decorator(f):
@functools.wraps(f)
def wrapper(arg):
if do_validation:
deserialized = serial_deserial(arg)
jsonschema.validate(deserialized, f.schema)
return f(arg)
return wrapper
return decorator
## EXAMPLE VALID CONSUMERS
@validate(VALIDATE)
@schema({"type" : "number"})
def number_consumer(jsonable):
return jsonable * 2
@validate(VALIDATE)
@schema({"type" : "number"})
def number_consumer_no_return(jsonable):
# do side effects
pass
@validate(VALIDATE)
@schema({
"type": "array",
"items": { "type": "number" },
})
def seq_consumer(jsonable):
return [x*2 for x in jsonable]
## EXAMPLE INVALID CONSUMERS
@validate(VALIDATE)
@schema({"type" : "number"})
def number_consumer_bad_return(jsonable):
# Return a non-JSON-serializable, in this case the int function.
return int
## TESTS
from nose.tools import raises
def assert_ijsonable(f, input, expected_output):
"""Assert that the JSONable interface is respected: serialization
of input is effectively idempotent with respect to expected output.
Additionally, the output must be a JSONable object.
"""
assert f(input) == expected_output
assert f(serial_deserial(input)) == expected_output
if expected_output:
json.dumps(expected_output)
def test_jsonable_handler_nested_iterables():
nested_iterables = itertools.imap(lambda x: xrange(x+1), xrange(3))
json_string = json.dumps(nested_iterables, default=jsonable_handler)
deserialized = json.loads(json_string)
assert deserialized == [[0], [0, 1], [0, 1, 2]]
def test_valid_number():
assert_ijsonable(number_consumer, 2, 4)
@raises(jsonschema.ValidationError)
def test_invalid_string():
number_consumer('foo')
@raises(AssertionError)
def test_non_idempotent_serialization():
class fake_number(int):
def __mul__(self, x):
return 2
obj = fake_number(2)
# This lack of idempotent serialization/deserialization should violate the
# JSONable interface, and therefore raise an AssertionError.
# This is how you guarantee your objects that use custom serialization
# still satisfy the JSONable requirement with respect to the consumer.
assert_ijsonable(number_consumer, obj, 4)
def test_no_return():
assert_ijsonable(number_consumer_no_return, 2, None)
@raises(TypeError)
def test_bad_return():
assert_ijsonable(number_consumer_bad_return, 2, int)
def test_valid_array():
assert_ijsonable(seq_consumer, [1, 2], [2, 4])
@raises(jsonschema.ValidationError)
def test_invalid_array():
seq_consumer(['foo'])
def test_valid_iterable():
assert_ijsonable(seq_consumer, xrange(1,3), [2, 4])
@raises(jsonschema.ValidationError)
def test_invalid_iterable():
seq_consumer(itertools.imap(lambda x: 'foo', xrange(1,3)))
| mit | Python | |
875ca73c4cab4c3d036f824c3942589f0ced935f | Test password update | m-ober/byceps,homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps | tests/services/authentication/test_update_password_hash.py | tests/services/authentication/test_update_password_hash.py | """
:Copyright: 2006-2017 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.authentication.password.models import Credential
from byceps.services.authentication.password import service as password_service
from byceps.services.user import event_service
from tests.base import AbstractAppTestCase
class UpdatePasswordHashTest(AbstractAppTestCase):
def test_update_password_hash(self):
orga_id = self.create_user('Party_Orga').id
user = self.create_user('Party_User')
user_id = user.id
password_service.create_password_hash(user_id, 'InitialPassw0rd')
password_hash_before = get_password_hash(user_id)
assert password_hash_before is not None
events_before = event_service.get_events_for_user(user_id)
assert len(events_before) == 0
# -------------------------------- #
password_service.update_password_hash(user_id, 'ReplacementPassw0rd', orga_id)
# -------------------------------- #
password_hash_after = get_password_hash(user_id)
assert password_hash_after is not None
assert password_hash_after != password_hash_before
events_after = event_service.get_events_for_user(user_id)
assert len(events_after) == 1
password_updated_event = events_after[0]
assert password_updated_event.event_type == 'password-updated'
assert password_updated_event.data == {
'initiator_id': str(orga_id),
}
def get_password_hash(user_id):
credential = Credential.query.get(user_id)
return credential.password_hash
| bsd-3-clause | Python | |
4b040fd365c2a8609b2943525b87c2e27924eb19 | add novaclient sample | li-ma/homework,li-ma/homework,li-ma/homework | test-novaclient.py | test-novaclient.py | from novaclient.v1_1.client import Client
context = {
'user': 'admin',
'auth_token': '22b07d939fa544769bf625753ecaec18',
'tenant': '7dfd3b6a98664f7cb78808f57b7984da',
}
# nova_compute_url
url = 'http://192.168.242.10:8774/v2/7dfd3b6a98664f7cb78808f57b7984da'
PROXY_AUTH_URL = 'http://192.168.242.10:5000/v2.0'
client = Client(username=context['user'],
api_key=context['auth_token'],
project_id=context['tenant'],
bypass_url=url,
auth_url=PROXY_AUTH_URL)
client.client.auth_token = context['auth_token']
client.client.management_url = url
print dir(client)
print client.flavors.list()
| apache-2.0 | Python | |
1d355343700bd74ebc2944053e5086a22c1c0b5c | add post-commit.py | arssivka/rrc,arssivka/rrc,arssivka/rrc,arssivka/rrc | git/post-commit.py | git/post-commit.py | #!/usr/bin/python
import os
import re
import subprocess as sp
def up_version(ver, idx):
ver[idx] += 1
return ver[:idx + 1] + [0] * (3 - idx)
commands = {
'#vtwe++': lambda ver: up_version(ver, 3),
'#vpat++': lambda ver: up_version(ver, 2),
'#vmin++': lambda ver: up_version(ver, 1),
'#vmaj++': lambda ver: up_version(ver, 0),
}
message = sp.check_output(['git', 'log', '-1', 'HEAD', '--pretty=format:%s'])
get_new_version = None
for cmd, func in commands.items():
if message.find(cmd) != -1:
get_new_version = func
break
if get_new_version is None:
exit()
root_dir = sp.check_output(['git', 'rev-parse', '--show-toplevel']).strip()
cmake_path = os.path.join(root_dir, 'CMakeLists.txt')
with open(cmake_path) as cmake_file:
cmake_lines = cmake_file.readlines()
version_str = ''
project_line_idx = 0
for idx, line in enumerate(cmake_lines):
project_found = re.search('^project\(.*\)', line)
if project_found is None:
continue
project_line = project_found.group(0)
version_found = re.search('VERSION \d+.\d+.\d+.\d+', project_line)
if version_found is None:
continue
version_str = version_found.group(0).replace('VERSION ', '')
project_line_idx = idx
break
if version_str:
version = [int(v) for v in version_str.split('.')]
new_version = [str(v) for v in get_new_version(version)]
cmake_lines[project_line_idx].replace(version_str, '.'.join(new_version))
with open(cmake_path, 'w') as cmake_file:
cmake_file.write(''.join(cmake_lines))
| apache-2.0 | Python | |
2b5500154b8be9ee7760bb1cb883775a604a2d8f | Create appleproduct_checker.py | darrenfu/bigdata | tools/scripts/python/appleproduct_checker.py | tools/scripts/python/appleproduct_checker.py | #!/usr/bin/python
import sys
import os
class IParser:
def __init__():
pass
def parse(self, doc, out_file):
self._doc = doc
self._out_file = out_file
class JSONParser(IParser):
def __init__(self):
pass
def parse(self, doc, out_file, model):
import json
out_arr = []
IParser.parse(self, doc, out_file)
jobj = json.load(doc)
nodes = jobj['body']['stores']
model_txt = None
for node in nodes:
if model_txt == None:
model_txt = node['partsAvailability'][model]['storePickupProductTitle']
if node['partsAvailability'][model]['pickupDisplay'] == 'available' and node['storedistance'] < 100:
line = "%s, %s, %s, %s, %s, %s" % (node['address']['address'], node['address']['address2'], node['city'], node['state'], node['address']['postalCode'], node['storeDistanceWithUnit'])
out_arr.append(line)
print line
self._out_file.write(line)
self._out_file.write("\n")
return {"productTitle":model_txt, "availableStores":out_arr}
def send_request(url):
import urllib2
print "Downloading %s" % url
res = urllib2.urlopen(url)
return res
# main flow starts from here
def check_iphone(model, *args):
zipcode = '98033'
uri = 'https://www.apple.com/shop/retail/pickup-message?pl=true&cppart=UNLOCKED/US&parts.0=%s&location=%s'
url = uri % (model, zipcode)
res = send_request(url)
if len(args) < 0:
print('Usage: .py [zkhost:port=localhost:2181]')
sys.exit(1)
outfile = "/tmp/applestore_list.dat"
print "Generating available appstore list to: " + outfile
out = open(outfile, 'a')
# Download interesting grid batch ids via QMS service
parser = JSONParser()
jresult = parser.parse(res, out, model)
out.close()
res.close()
if len(jresult['availableStores']) > 0:
print("Detect an available iphone! SMS ...")
from twilio.rest import Client
account_sid='ACfdc83d84cb014c7a7acb1a5fbed5dfbf'
auth_token='...'
recipient_mobile='+1...'
sender_mobile='+1...'
msg_body = "%s\n\n%s" % (jresult['productTitle'], ";\n".join(jresult['availableStores']))
client = Client(account_sid, auth_token)
client.api.account.messages.create(to=recipient_mobile, from_=sender_mobile, body=msg_body)
if __name__ == "__main__":
product_code_dict = {
'iphone8p 64gb gray': 'MQ8D2LL/A',
'iphone8p 64gb silver': 'MQ8E2LL/A',
'iphone8p 64gb gold': 'MQ8F2LL/A',
'iphone8p 256gb gray': 'MQ8G2LL/A',
'iphone8p 256gb silver': 'MQ8H2LL/A',
'iphone8p 256gb gold': 'MQ8J2LL/A',
'iphone8 64gb gray': 'MQ6K2LL/A',
'iphone8 64gb silver': 'MQ6L2LL/A',
'iphone8 64gb gold': 'MQ6M2LL/A',
'iphone8 256gb gray': 'MQ7F2LL/A',
'iphone8 256gb gilver': 'MQ7G2LL/A',
'iphone8 256gb gold': 'MQ7H2LL/A',
}
check_iphone(product_code_dict['iphone8p 64gb gray'], *sys.argv)
#check_iphone(product_code_dict['iphone8p 64gb silver'], *sys.argv)
| apache-2.0 | Python | |
b778e3438909d290ce88bbb0d187aa793652ff5e | Create euler2.py | Dovydas-Kr/dt211c-cloud-repo | euler2.py | euler2.py | mit | Python | ||
fe7e82257703e3c5773d3e321435cb4443eed46d | Add gunicorn configuration file | hombit/olgart,hombit/olgart,hombit/olgart,hombit/olgart | gunicorn_config.py | gunicorn_config.py | import multiprocessing, os.path
command = '/usr/local/bin/gunicorn'
pythonpath = os.path.dirname( os.path.abspath(__file__) )
bind = '127.0.0.1:9000'
workers = multiprocessing.cpu_count() * 2 + 1
user = 'nobody'
| mit | Python | |
91370b54e4a974fe51f563edda75e2738d3fb00c | Add noise sensor | pvizeli/ha-ffmpeg | haffmpeg/sensor.py | haffmpeg/sensor.py | """For HA camera components."""
from .core import HAFFmpegQue
class SensorNoise(HAFFmpegQue):
"""Implement a noise detection on a autio stream."""
def __init__(self, ffmpeg_bin):
"""Init CameraMjpeg."""
HAFFmpegQue.__init__(self, ffmpeg_bin=ffmpeg_bin)
self._peak = -30
self._time_period = 2
@property
def peak(self, val):
self._peak = val
@property
def time_period(self, val):
self._time_period = val
def open_sensor(self, input_source, output_dest=None, extra_cmd=None):
"""Open FFmpeg process as mjpeg video stream."""
command = [
"-i",
input_source,
"-vn",
"-c:v",
"mjpeg",
"-f",
"mpjpeg"
]
self.open(cmd=command, output=output_dest, extra_cmd=extra_cmd)
| bsd-3-clause | Python | |
65f05e93edc2e9a7033edb8d54bd25b04c32d084 | test script.. | pymad/cpymad,pymad/jpymad,pymad/jpymad,pymad/jpymad,pymad/cpymad | test_elemaccess.py | test_elemaccess.py | from cern import cpymad
lhc=cpymad.model('lhc')
print lhc.get_sequences()
all_elements=lhc.get_element_list('lhcb1')
print lhc.get_element('lhcb1',all_elements[3])
| apache-2.0 | Python | |
10ac7ea7a67b7a15146ae7c9c0ba9ba74876df81 | Add a testcase for RubyGems | lilydjwg/nvchecker | tests/test_gems.py | tests/test_gems.py | from tests.helper import ExternalVersionTestCase
class RubyGemsTest(ExternalVersionTestCase):
def test_gems(self):
self.assertEqual(self.sync_get_version("example", {"gems": None}), "1.0.2")
| mit | Python | |
e9115cb3c52386dc7b74b4d06070b44697725811 | Add manage.py | glegoux/django-settings-startup | tests/manage.py | tests/manage.py | import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| mit | Python | |
9eaf735bcdba9f3fd8a51219d0eebb61f8b2f166 | add 20.6.7 | LLNL/spack,tmerrick1/spack,matthiasdiener/spack,tmerrick1/spack,EmreAtes/spack,lgarren/spack,EmreAtes/spack,iulian787/spack,lgarren/spack,TheTimmy/spack,LLNL/spack,mfherbst/spack,mfherbst/spack,TheTimmy/spack,iulian787/spack,tmerrick1/spack,LLNL/spack,matthiasdiener/spack,matthiasdiener/spack,krafczyk/spack,mfherbst/spack,skosukhin/spack,skosukhin/spack,LLNL/spack,LLNL/spack,iulian787/spack,matthiasdiener/spack,lgarren/spack,krafczyk/spack,krafczyk/spack,EmreAtes/spack,matthiasdiener/spack,skosukhin/spack,iulian787/spack,tmerrick1/spack,tmerrick1/spack,skosukhin/spack,lgarren/spack,iulian787/spack,mfherbst/spack,mfherbst/spack,lgarren/spack,krafczyk/spack,krafczyk/spack,skosukhin/spack,EmreAtes/spack,TheTimmy/spack,TheTimmy/spack,EmreAtes/spack,TheTimmy/spack | var/spack/repos/builtin/packages/py-setuptools/package.py | var/spack/repos/builtin/packages/py-setuptools/package.py | from spack import *
class PySetuptools(Package):
"""Easily download, build, install, upgrade, and uninstall Python packages."""
homepage = "https://pypi.python.org/pypi/setuptools"
url = "https://pypi.python.org/packages/source/s/setuptools/setuptools-11.3.tar.gz"
version('20.6.7', '45d6110f3ec14924e44c33411db64fe6')
version('20.5', 'fadc1e1123ddbe31006e5e43e927362b')
version('19.2', '78353b1f80375ca5e088f4b4627ffe03')
version('18.1', 'f72e87f34fbf07f299f6cb46256a0b06')
version('16.0', '0ace0b96233516fc5f7c857d086aa3ad')
version('11.3.1', '01f69212e019a2420c1693fb43593930')
extends('python')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)
| from spack import *
class PySetuptools(Package):
"""Easily download, build, install, upgrade, and uninstall Python packages."""
homepage = "https://pypi.python.org/pypi/setuptools"
url = "https://pypi.python.org/packages/source/s/setuptools/setuptools-11.3.tar.gz"
version('20.5', 'fadc1e1123ddbe31006e5e43e927362b')
version('19.2', '78353b1f80375ca5e088f4b4627ffe03')
version('18.1', 'f72e87f34fbf07f299f6cb46256a0b06')
version('16.0', '0ace0b96233516fc5f7c857d086aa3ad')
version('11.3.1', '01f69212e019a2420c1693fb43593930')
extends('python')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)
| lgpl-2.1 | Python |
bcc48ecba38450bd77b5dcce7ae60d2966cbd7d2 | Migrate object_sections to relationships | kr41/ggrc-core,uskudnik/ggrc-core,kr41/ggrc-core,NejcZupec/ggrc-core,prasannav7/ggrc-core,andrei-karalionak/ggrc-core,NejcZupec/ggrc-core,jmakov/ggrc-core,hasanalom/ggrc-core,jmakov/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core,josthkko/ggrc-core,uskudnik/ggrc-core,hasanalom/ggrc-core,uskudnik/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core,hyperNURb/ggrc-core,selahssea/ggrc-core,AleksNeStu/ggrc-core,hasanalom/ggrc-core,prasannav7/ggrc-core,josthkko/ggrc-core,jmakov/ggrc-core,NejcZupec/ggrc-core,uskudnik/ggrc-core,VinnieJohns/ggrc-core,uskudnik/ggrc-core,edofic/ggrc-core,edofic/ggrc-core,kr41/ggrc-core,VinnieJohns/ggrc-core,andrei-karalionak/ggrc-core,edofic/ggrc-core,kr41/ggrc-core,hyperNURb/ggrc-core,andrei-karalionak/ggrc-core,josthkko/ggrc-core,hyperNURb/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,NejcZupec/ggrc-core,prasannav7/ggrc-core,hasanalom/ggrc-core,prasannav7/ggrc-core,hyperNURb/ggrc-core,plamut/ggrc-core,jmakov/ggrc-core,selahssea/ggrc-core,j0gurt/ggrc-core,jmakov/ggrc-core,edofic/ggrc-core,j0gurt/ggrc-core,josthkko/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,hyperNURb/ggrc-core,selahssea/ggrc-core,j0gurt/ggrc-core,j0gurt/ggrc-core,hasanalom/ggrc-core | src/ggrc/migrations/versions/20150521150652_23880aa43323_migrate_object_sections_to_relationships.py | src/ggrc/migrations/versions/20150521150652_23880aa43323_migrate_object_sections_to_relationships.py | # Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
"""Migrate object_sections to relationships
Revision ID: 23880aa43323
Revises: 324d461206
Create Date: 2015-05-21 15:06:52.172183
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = '23880aa43323'
down_revision = '324d461206'
def upgrade():
sql = """
REPLACE INTO relationships (
modified_by_id, created_at, updated_at, source_id,
source_type, destination_id, destination_type, context_id
)
SELECT os.modified_by_id, os.created_at, os.updated_at,
os.section_id as source_id, s.type as source_type,
os.sectionable_id as destination_id,
os.sectionable_type as destination_type, os.context_id
FROM object_sections as os JOIN sections as s ON os.section_id = s.id;
"""
op.execute(sql)
op.drop_constraint(
'object_sections_ibfk_1', 'object_sections', type_='foreignkey')
op.drop_constraint(
'object_sections_ibfk_2', 'object_sections', type_='foreignkey')
def downgrade():
op.create_foreign_key(
'object_sections_ibfk_1',
'object_sections',
'contexts',
['context_id'],
['id']
)
op.create_foreign_key(
'object_sections_ibfk_2',
'object_sections',
'sections',
['section_id'],
['id']
)
| apache-2.0 | Python | |
2f0bf45ec747778d38801892e97d5a902443841d | Define "Assessment updated" notification type | VinnieJohns/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core | src/ggrc/migrations/versions/20170207134238_562ec606ff7c_add_assessment_updated_notification_type.py | src/ggrc/migrations/versions/20170207134238_562ec606ff7c_add_assessment_updated_notification_type.py | # Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""
Add Assessment updated notification type
Create Date: 2017-02-07 13:42:38.921370
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
from datetime import datetime
from alembic import op
# revision identifiers, used by Alembic.
revision = '562ec606ff7c'
down_revision = '6e9a3ed063d2'
def upgrade():
"""Add new notification type: Assessment updated."""
description = (
"Send an Assessment updated notification to "
"Assessors, Creators and Verifiers."
)
now = datetime.utcnow().strftime("%Y-%m-%d %H-%M-%S")
sql = """
INSERT INTO notification_types (
name,
description,
template,
advance_notice,
instant,
created_at,
updated_at
)
VALUES (
"assessment_updated",
"{description}",
"assessment_updated",
0,
FALSE,
'{now}',
'{now}'
)
""".format(description=description, now=now)
op.execute(sql)
def downgrade():
"""Remove the "Assessment updated" notification type.
Also delete all notifications of that type.
"""
sql = """
DELETE n
FROM notifications AS n
LEFT JOIN notification_types AS nt ON
n.notification_type_id = nt.id
WHERE
nt.name = "assessment_updated"
"""
op.execute(sql)
sql = """
DELETE
FROM notification_types
WHERE name = "assessment_updated"
"""
op.execute(sql)
| apache-2.0 | Python | |
3ec80d1a0de750af7831cca5a29294558600e88f | Add python_bootstrap.py | GoogleCloudPlatform/repo-automation-playground,GoogleCloudPlatform/repo-automation-playground,GoogleCloudPlatform/repo-automation-playground,GoogleCloudPlatform/repo-automation-playground,GoogleCloudPlatform/repo-automation-playground,GoogleCloudPlatform/repo-automation-playground,GoogleCloudPlatform/repo-automation-playground,GoogleCloudPlatform/repo-automation-playground,GoogleCloudPlatform/repo-automation-playground | xunit-autolabeler-v2/ast_parser/python/python_bootstrap.py | xunit-autolabeler-v2/ast_parser/python/python_bootstrap.py | # Copyright 2020 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
from python import invoker
if len(sys.argv) != 2:
raise ValueError('Please specify exactly one [root] directory.')
root_dir = sys.argv[1]
output_file = os.path.join(root_dir, 'repo.json')
json_out = invoker.get_json_for_dir(root_dir)
with open(output_file, 'w') as f:
f.write(json_out + '\n')
print(f'JSON written to: {output_file}')
print('Do not move this file!')
| apache-2.0 | Python | |
3ccc8357d5d5466acc97f1b065f500e9b096eeb7 | add exception for cursor | havt/odoo,xujb/odoo,shaufi10/odoo,omprakasha/odoo,lgscofield/odoo,dariemp/odoo,colinnewell/odoo,alqfahad/odoo,Eric-Zhong/odoo,Gitlab11/odoo,fdvarela/odoo8,Gitlab11/odoo,ShineFan/odoo,hanicker/odoo,abdellatifkarroum/odoo,papouso/odoo,doomsterinc/odoo,dgzurita/odoo,nagyistoce/odoo-dev-odoo,SerpentCS/odoo,idncom/odoo,mmbtba/odoo,odoousers2014/odoo,ccomb/OpenUpgrade,xujb/odoo,xujb/odoo,osvalr/odoo,codekaki/odoo,gdgellatly/OCB1,colinnewell/odoo,abdellatifkarroum/odoo,shaufi/odoo,lightcn/odoo,xzYue/odoo,ramitalat/odoo,sadleader/odoo,nhomar/odoo,lombritz/odoo,shaufi10/odoo,Nowheresly/odoo,ujjwalwahi/odoo,Drooids/odoo,savoirfairelinux/odoo,Kilhog/odoo,fgesora/odoo,odoo-turkiye/odoo,frouty/odoogoeen,ujjwalwahi/odoo,lgscofield/odoo,vrenaville/ngo-addons-backport,fdvarela/odoo8,srsman/odoo,JonathanStein/odoo,aviciimaxwell/odoo,arthru/OpenUpgrade,ygol/odoo,leorochael/odoo,jeasoft/odoo,abstract-open-solutions/OCB,xzYue/odoo,podemos-info/odoo,lombritz/odoo,pedrobaeza/odoo,numerigraphe/odoo,mszewczy/odoo,Eric-Zhong/odoo,mszewczy/odoo,ramadhane/odoo,lsinfo/odoo,charbeljc/OCB,ojengwa/odoo,feroda/odoo,incaser/odoo-odoo,MarcosCommunity/odoo,shingonoide/odoo,Elico-Corp/odoo_OCB,florian-dacosta/OpenUpgrade,minhtuancn/odoo,andreparames/odoo,0k/OpenUpgrade,ihsanudin/odoo,VitalPet/odoo,rahuldhote/odoo,Adel-Magebinary/odoo,tinkerthaler/odoo,fevxie/odoo,odoousers2014/odoo,slevenhagen/odoo-npg,fossoult/odoo,Codefans-fan/odoo,sinbazhou/odoo,hopeall/odoo,jusdng/odoo,shaufi/odoo,ApuliaSoftware/odoo,n0m4dz/odoo,SAM-IT-SA/odoo,thanhacun/odoo,OpenUpgrade/OpenUpgrade,slevenhagen/odoo,christophlsa/odoo,lgscofield/odoo,Endika/OpenUpgrade,NeovaHealth/odoo,patmcb/odoo,apanju/GMIO_Odoo,janocat/odoo,sysadminmatmoz/OCB,dalegregory/odoo,osvalr/odoo,odooindia/odoo,rowemoore/odoo,hbrunn/OpenUpgrade,glovebx/odoo,nitinitprof/odoo,OpenPymeMx/OCB,ramitalat/odoo,lgscofield/odoo,OpenUpgrade-dev/OpenUpgrade,Danisan/odoo-1,papouso/odoo,slevenhagen/odoo-npg,cysnake4713/odoo,SAM-IT-SA/odoo,OpusVL/odoo,JGarcia-Panach/odoo,sysadminmatmoz/OCB,Ernesto99/odoo,chiragjogi/odoo,pplatek/odoo,bkirui/odoo,abdellatifkarroum/odoo,vnsofthe/odoo,Grirrane/odoo,stonegithubs/odoo,BT-ojossen/odoo,leoliujie/odoo,lsinfo/odoo,makinacorpus/odoo,mvaled/OpenUpgrade,eino-makitalo/odoo,provaleks/o8,stephen144/odoo,minhtuancn/odoo,tvibliani/odoo,massot/odoo,blaggacao/OpenUpgrade,janocat/odoo,nuuuboo/odoo,rowemoore/odoo,Bachaco-ve/odoo,vrenaville/ngo-addons-backport,collex100/odoo,hifly/OpenUpgrade,sebalix/OpenUpgrade,oliverhr/odoo,funkring/fdoo,guewen/OpenUpgrade,ujjwalwahi/odoo,odooindia/odoo,optima-ict/odoo,nhomar/odoo,fuhongliang/odoo,mkieszek/odoo,kifcaliph/odoo,addition-it-solutions/project-all,ThinkOpen-Solutions/odoo,Noviat/odoo,Codefans-fan/odoo,slevenhagen/odoo,florian-dacosta/OpenUpgrade,andreparames/odoo,ShineFan/odoo,CopeX/odoo,jfpla/odoo,shingonoide/odoo,tarzan0820/odoo,arthru/OpenUpgrade,tvibliani/odoo,ChanduERP/odoo,thanhacun/odoo,nitinitprof/odoo,Daniel-CA/odoo,credativUK/OCB,lgscofield/odoo,leoliujie/odoo,florentx/OpenUpgrade,highco-groupe/odoo,andreparames/odoo,Ichag/odoo,guewen/OpenUpgrade,abdellatifkarroum/odoo,fossoult/odoo,slevenhagen/odoo,goliveirab/odoo,fevxie/odoo,apanju/GMIO_Odoo,kybriainfotech/iSocioCRM,QianBIG/odoo,Kilhog/odoo,bplancher/odoo,shivam1111/odoo,apanju/odoo,alhashash/odoo,dllsf/odootest,AuyaJackie/odoo,OSSESAC/odoopubarquiluz,ujjwalwahi/odoo,tvibliani/odoo,credativUK/OCB,ramadhane/odoo,frouty/odoo_oph,guewen/OpenUpgrade,rgeleta/odoo,Ernesto99/odoo,odootr/odoo,VielSoft/odoo,spadae22/odoo,GauravSahu/odoo,wangjun/odoo,guerrerocarlos/odoo,BT-rmartin/odoo,Danisan/odoo-1,prospwro/odoo,GauravSahu/odoo,virgree/odoo,prospwro/odoo,grap/OpenUpgrade,savoirfairelinux/OpenUpgrade,gvb/odoo,syci/OCB,oihane/odoo,tinkerthaler/odoo,pedrobaeza/odoo,guewen/OpenUpgrade,NeovaHealth/odoo,sebalix/OpenUpgrade,Eric-Zhong/odoo,janocat/odoo,stonegithubs/odoo,jolevq/odoopub,Eric-Zhong/odoo,gorjuce/odoo,ChanduERP/odoo,salaria/odoo,Maspear/odoo,hifly/OpenUpgrade,vnsofthe/odoo,zchking/odoo,fevxie/odoo,sve-odoo/odoo,sebalix/OpenUpgrade,grap/OpenUpgrade,florian-dacosta/OpenUpgrade,poljeff/odoo,dllsf/odootest,agrista/odoo-saas,OpenUpgrade-dev/OpenUpgrade,Grirrane/odoo,tangyiyong/odoo,dsfsdgsbngfggb/odoo,fuselock/odoo,leorochael/odoo,makinacorpus/odoo,juanalfonsopr/odoo,shaufi/odoo,alexteodor/odoo,tvtsoft/odoo8,VielSoft/odoo,papouso/odoo,CubicERP/odoo,x111ong/odoo,gavin-feng/odoo,gavin-feng/odoo,QianBIG/odoo,cysnake4713/odoo,jpshort/odoo,bplancher/odoo,hanicker/odoo,cpyou/odoo,simongoffin/website_version,Daniel-CA/odoo,podemos-info/odoo,bobisme/odoo,hubsaysnuaa/odoo,rschnapka/odoo,dgzurita/odoo,hopeall/odoo,gavin-feng/odoo,Drooids/odoo,doomsterinc/odoo,microcom/odoo,srimai/odoo,nhomar/odoo-mirror,mkieszek/odoo,kybriainfotech/iSocioCRM,kifcaliph/odoo,VitalPet/odoo,Elico-Corp/odoo_OCB,kirca/OpenUpgrade,inspyration/odoo,numerigraphe/odoo,fossoult/odoo,apocalypsebg/odoo,rahuldhote/odoo,grap/OCB,eino-makitalo/odoo,goliveirab/odoo,collex100/odoo,jiangzhixiao/odoo,OpusVL/odoo,TRESCLOUD/odoopub,steedos/odoo,steedos/odoo,bobisme/odoo,grap/OCB,bplancher/odoo,idncom/odoo,virgree/odoo,fdvarela/odoo8,javierTerry/odoo,salaria/odoo,ShineFan/odoo,abstract-open-solutions/OCB,mkieszek/odoo,shingonoide/odoo,ccomb/OpenUpgrade,ThinkOpen-Solutions/odoo,Ichag/odoo,hmen89/odoo,inspyration/odoo,x111ong/odoo,odoousers2014/odoo,nuuuboo/odoo,numerigraphe/odoo,xujb/odoo,rdeheele/odoo,javierTerry/odoo,OpenUpgrade/OpenUpgrade,CubicERP/odoo,frouty/odoogoeen,OSSESAC/odoopubarquiluz,ovnicraft/odoo,glovebx/odoo,prospwro/odoo,rahuldhote/odoo,fevxie/odoo,makinacorpus/odoo,apocalypsebg/odoo,RafaelTorrealba/odoo,kirca/OpenUpgrade,datenbetrieb/odoo,pedrobaeza/OpenUpgrade,savoirfairelinux/odoo,hbrunn/OpenUpgrade,patmcb/odoo,apanju/GMIO_Odoo,erkrishna9/odoo,fjbatresv/odoo,jaxkodex/odoo,juanalfonsopr/odoo,GauravSahu/odoo,abenzbiria/clients_odoo,podemos-info/odoo,srimai/odoo,oihane/odoo,ramadhane/odoo,JCA-Developpement/Odoo,fjbatresv/odoo,alexcuellar/odoo,klunwebale/odoo,feroda/odoo,laslabs/odoo,Daniel-CA/odoo,gsmartway/odoo,sergio-incaser/odoo,CopeX/odoo,nagyistoce/odoo-dev-odoo,GauravSahu/odoo,osvalr/odoo,jesramirez/odoo,demon-ru/iml-crm,RafaelTorrealba/odoo,ihsanudin/odoo,ShineFan/odoo,gsmartway/odoo,agrista/odoo-saas,charbeljc/OCB,lombritz/odoo,Bachaco-ve/odoo,BT-astauder/odoo,sergio-incaser/odoo,NL66278/OCB,frouty/odoogoeen,jiachenning/odoo,bplancher/odoo,odoo-turkiye/odoo,x111ong/odoo,Grirrane/odoo,csrocha/OpenUpgrade,luiseduardohdbackup/odoo,camptocamp/ngo-addons-backport,fgesora/odoo,ecosoft-odoo/odoo,deKupini/erp,jfpla/odoo,lsinfo/odoo,acshan/odoo,lgscofield/odoo,ubic135/odoo-design,dfang/odoo,luiseduardohdbackup/odoo,nuuuboo/odoo,synconics/odoo,microcom/odoo,draugiskisprendimai/odoo,nuncjo/odoo,mlaitinen/odoo,xzYue/odoo,cedk/odoo,Antiun/odoo,mkieszek/odoo,AuyaJackie/odoo,Noviat/odoo,MarcosCommunity/odoo,ramitalat/odoo,damdam-s/OpenUpgrade,fuselock/odoo,juanalfonsopr/odoo,incaser/odoo-odoo,sebalix/OpenUpgrade,nuncjo/odoo,ehirt/odoo,charbeljc/OCB,fossoult/odoo,ecosoft-odoo/odoo,codekaki/odoo,SAM-IT-SA/odoo,datenbetrieb/odoo,datenbetrieb/odoo,markeTIC/OCB,jeasoft/odoo,gorjuce/odoo,rowemoore/odoo,oihane/odoo,Maspear/odoo,VielSoft/odoo,elmerdpadilla/iv,dkubiak789/odoo,sv-dev1/odoo,odootr/odoo,patmcb/odoo,camptocamp/ngo-addons-backport,Adel-Magebinary/odoo,hubsaysnuaa/odoo,nexiles/odoo,hanicker/odoo,lightcn/odoo,codekaki/odoo,mvaled/OpenUpgrade,mkieszek/odoo,realsaiko/odoo,nitinitprof/odoo,0k/OpenUpgrade,ThinkOpen-Solutions/odoo,ingadhoc/odoo,papouso/odoo,ovnicraft/odoo,Kilhog/odoo,havt/odoo,wangjun/odoo,CopeX/odoo,abstract-open-solutions/OCB,shivam1111/odoo,Eric-Zhong/odoo,luistorresm/odoo,JGarcia-Panach/odoo,QianBIG/odoo,FlorianLudwig/odoo,hassoon3/odoo,bakhtout/odoo-educ,hassoon3/odoo,bwrsandman/OpenUpgrade,oliverhr/odoo,ramadhane/odoo,demon-ru/iml-crm,erkrishna9/odoo,cloud9UG/odoo,simongoffin/website_version,MarcosCommunity/odoo,damdam-s/OpenUpgrade,0k/odoo,srimai/odoo,NL66278/OCB,xzYue/odoo,charbeljc/OCB,KontorConsulting/odoo,avoinsystems/odoo,sysadminmatmoz/OCB,mszewczy/odoo,FlorianLudwig/odoo,cedk/odoo,stephen144/odoo,savoirfairelinux/odoo,ThinkOpen-Solutions/odoo,shivam1111/odoo,stephen144/odoo,PongPi/isl-odoo,odooindia/odoo,pplatek/odoo,fuselock/odoo,camptocamp/ngo-addons-backport,jaxkodex/odoo,tinkhaven-organization/odoo,lsinfo/odoo,KontorConsulting/odoo,nexiles/odoo,glovebx/odoo,provaleks/o8,camptocamp/ngo-addons-backport,joariasl/odoo,ChanduERP/odoo,hifly/OpenUpgrade,MarcosCommunity/odoo,Bachaco-ve/odoo,christophlsa/odoo,PongPi/isl-odoo,frouty/odoogoeen,mlaitinen/odoo,apanju/odoo,sv-dev1/odoo,jiangzhixiao/odoo,poljeff/odoo,Endika/odoo,nuuuboo/odoo,hopeall/odoo,luiseduardohdbackup/odoo,Endika/odoo,draugiskisprendimai/odoo,naousse/odoo,OSSESAC/odoopubarquiluz,spadae22/odoo,collex100/odoo,pplatek/odoo,RafaelTorrealba/odoo,feroda/odoo,agrista/odoo-saas,havt/odoo,damdam-s/OpenUpgrade,fjbatresv/odoo,JGarcia-Panach/odoo,camptocamp/ngo-addons-backport,KontorConsulting/odoo,jusdng/odoo,dfang/odoo,rahuldhote/odoo,kybriainfotech/iSocioCRM,matrixise/odoo,fuselock/odoo,rowemoore/odoo,jolevq/odoopub,osvalr/odoo,christophlsa/odoo,Danisan/odoo-1,waytai/odoo,bakhtout/odoo-educ,kittiu/odoo,feroda/odoo,klunwebale/odoo,ecosoft-odoo/odoo,bakhtout/odoo-educ,rgeleta/odoo,jpshort/odoo,Daniel-CA/odoo,guerrerocarlos/odoo,stonegithubs/odoo,alhashash/odoo,javierTerry/odoo,highco-groupe/odoo,leorochael/odoo,grap/OpenUpgrade,jolevq/odoopub,bguillot/OpenUpgrade,blaggacao/OpenUpgrade,hbrunn/OpenUpgrade,credativUK/OCB,pedrobaeza/OpenUpgrade,nagyistoce/odoo-dev-odoo,AuyaJackie/odoo,vnsofthe/odoo,microcom/odoo,florentx/OpenUpgrade,CatsAndDogsbvba/odoo,alqfahad/odoo,FlorianLudwig/odoo,shaufi10/odoo,OpenPymeMx/OCB,incaser/odoo-odoo,RafaelTorrealba/odoo,JGarcia-Panach/odoo,bwrsandman/OpenUpgrade,VitalPet/odoo,savoirfairelinux/OpenUpgrade,mlaitinen/odoo,laslabs/odoo,sysadminmatmoz/OCB,codekaki/odoo,cpyou/odoo,odoo-turkiye/odoo,dkubiak789/odoo,jfpla/odoo,sinbazhou/odoo,brijeshkesariya/odoo,Codefans-fan/odoo,bguillot/OpenUpgrade,charbeljc/OCB,dezynetechnologies/odoo,nhomar/odoo-mirror,omprakasha/odoo,ovnicraft/odoo,juanalfonsopr/odoo,JGarcia-Panach/odoo,xujb/odoo,bealdav/OpenUpgrade,slevenhagen/odoo,n0m4dz/odoo,rschnapka/odoo,grap/OpenUpgrade,Ichag/odoo,naousse/odoo,JonathanStein/odoo,alexcuellar/odoo,mustafat/odoo-1,doomsterinc/odoo,NL66278/OCB,dsfsdgsbngfggb/odoo,collex100/odoo,bguillot/OpenUpgrade,luistorresm/odoo,highco-groupe/odoo,bakhtout/odoo-educ,acshan/odoo,hassoon3/odoo,bobisme/odoo,laslabs/odoo,bobisme/odoo,ovnicraft/odoo,ubic135/odoo-design,ramitalat/odoo,ovnicraft/odoo,GauravSahu/odoo,steedos/odoo,alqfahad/odoo,realsaiko/odoo,MarcosCommunity/odoo,Endika/odoo,dkubiak789/odoo,srsman/odoo,windedge/odoo,sinbazhou/odoo,fgesora/odoo,mlaitinen/odoo,provaleks/o8,OpenUpgrade/OpenUpgrade,rgeleta/odoo,bwrsandman/OpenUpgrade,mustafat/odoo-1,ShineFan/odoo,optima-ict/odoo,CopeX/odoo,VielSoft/odoo,kybriainfotech/iSocioCRM,fdvarela/odoo8,ingadhoc/odoo,pedrobaeza/OpenUpgrade,blaggacao/OpenUpgrade,gvb/odoo,alexcuellar/odoo,cysnake4713/odoo,sadleader/odoo,incaser/odoo-odoo,dllsf/odootest,hanicker/odoo,jusdng/odoo,AuyaJackie/odoo,jesramirez/odoo,dalegregory/odoo,zchking/odoo,tinkhaven-organization/odoo,shaufi/odoo,dariemp/odoo,odootr/odoo,shivam1111/odoo,stonegithubs/odoo,synconics/odoo,VitalPet/odoo,diagramsoftware/odoo,cdrooom/odoo,rgeleta/odoo,lightcn/odoo,deKupini/erp,savoirfairelinux/OpenUpgrade,vnsofthe/odoo,shaufi10/odoo,Antiun/odoo,prospwro/odoo,abenzbiria/clients_odoo,abstract-open-solutions/OCB,avoinsystems/odoo,nuuuboo/odoo,factorlibre/OCB,slevenhagen/odoo-npg,mvaled/OpenUpgrade,rahuldhote/odoo,dariemp/odoo,frouty/odoogoeen,joariasl/odoo,aviciimaxwell/odoo,sinbazhou/odoo,hip-odoo/odoo,Maspear/odoo,joariasl/odoo,mustafat/odoo-1,odoo-turkiye/odoo,mustafat/odoo-1,sv-dev1/odoo,JonathanStein/odoo,gvb/odoo,pplatek/odoo,gvb/odoo,damdam-s/OpenUpgrade,rowemoore/odoo,credativUK/OCB,tvtsoft/odoo8,hopeall/odoo,shingonoide/odoo,SAM-IT-SA/odoo,jiachenning/odoo,joshuajan/odoo,zchking/odoo,codekaki/odoo,bealdav/OpenUpgrade,stonegithubs/odoo,jiangzhixiao/odoo,tinkhaven-organization/odoo,jpshort/odoo,pedrobaeza/odoo,ccomb/OpenUpgrade,Antiun/odoo,kybriainfotech/iSocioCRM,joshuajan/odoo,BT-fgarbely/odoo,nuncjo/odoo,markeTIC/OCB,klunwebale/odoo,Daniel-CA/odoo,JGarcia-Panach/odoo,hubsaysnuaa/odoo,draugiskisprendimai/odoo,sinbazhou/odoo,pedrobaeza/odoo,shivam1111/odoo,apanju/GMIO_Odoo,alexcuellar/odoo,fgesora/odoo,abenzbiria/clients_odoo,dalegregory/odoo,gorjuce/odoo,nhomar/odoo,naousse/odoo,bkirui/odoo,ShineFan/odoo,factorlibre/OCB,avoinsystems/odoo,collex100/odoo,BT-rmartin/odoo,Kilhog/odoo,RafaelTorrealba/odoo,laslabs/odoo,tvibliani/odoo,tinkerthaler/odoo,demon-ru/iml-crm,srsman/odoo,hbrunn/OpenUpgrade,oliverhr/odoo,ingadhoc/odoo,thanhacun/odoo,apanju/odoo,cloud9UG/odoo,alqfahad/odoo,lombritz/odoo,Adel-Magebinary/odoo,BT-rmartin/odoo,bguillot/OpenUpgrade,Drooids/odoo,avoinsystems/odoo,BT-fgarbely/odoo,OpenUpgrade/OpenUpgrade,wangjun/odoo,Adel-Magebinary/odoo,addition-it-solutions/project-all,optima-ict/odoo,sadleader/odoo,ApuliaSoftware/odoo,slevenhagen/odoo,incaser/odoo-odoo,janocat/odoo,vnsofthe/odoo,arthru/OpenUpgrade,fuhongliang/odoo,savoirfairelinux/odoo,matrixise/odoo,JCA-Developpement/Odoo,apocalypsebg/odoo,ChanduERP/odoo,n0m4dz/odoo,rgeleta/odoo,camptocamp/ngo-addons-backport,ClearCorp-dev/odoo,OpenPymeMx/OCB,steedos/odoo,frouty/odoo_oph,joariasl/odoo,xzYue/odoo,mvaled/OpenUpgrade,nuncjo/odoo,ihsanudin/odoo,VielSoft/odoo,brijeshkesariya/odoo,bwrsandman/OpenUpgrade,windedge/odoo,gdgellatly/OCB1,jolevq/odoopub,SerpentCS/odoo,jusdng/odoo,sebalix/OpenUpgrade,oasiswork/odoo,oihane/odoo,funkring/fdoo,CubicERP/odoo,srimai/odoo,agrista/odoo-saas,cdrooom/odoo,markeTIC/OCB,lombritz/odoo,Gitlab11/odoo,fjbatresv/odoo,OpenUpgrade-dev/OpenUpgrade,Daniel-CA/odoo,blaggacao/OpenUpgrade,Endika/odoo,ApuliaSoftware/odoo,Maspear/odoo,ecosoft-odoo/odoo,ubic135/odoo-design,osvalr/odoo,waytai/odoo,steedos/odoo,AuyaJackie/odoo,ClearCorp-dev/odoo,nuuuboo/odoo,odoousers2014/odoo,rahuldhote/odoo,csrocha/OpenUpgrade,rschnapka/odoo,abstract-open-solutions/OCB,massot/odoo,tangyiyong/odoo,CatsAndDogsbvba/odoo,draugiskisprendimai/odoo,idncom/odoo,bwrsandman/OpenUpgrade,sebalix/OpenUpgrade,mmbtba/odoo,optima-ict/odoo,Grirrane/odoo,eino-makitalo/odoo,odootr/odoo,FlorianLudwig/odoo,damdam-s/OpenUpgrade,mlaitinen/odoo,realsaiko/odoo,slevenhagen/odoo-npg,jeasoft/odoo,factorlibre/OCB,joshuajan/odoo,Adel-Magebinary/odoo,frouty/odoo_oph,collex100/odoo,hip-odoo/odoo,abenzbiria/clients_odoo,gorjuce/odoo,alexcuellar/odoo,rdeheele/odoo,SerpentCS/odoo,ClearCorp-dev/odoo,SerpentCS/odoo,QianBIG/odoo,mmbtba/odoo,hassoon3/odoo,gorjuce/odoo,ygol/odoo,joariasl/odoo,tinkerthaler/odoo,oasiswork/odoo,florian-dacosta/OpenUpgrade,OSSESAC/odoopubarquiluz,JonathanStein/odoo,florian-dacosta/OpenUpgrade,jaxkodex/odoo,omprakasha/odoo,hip-odoo/odoo,Maspear/odoo,BT-ojossen/odoo,andreparames/odoo,demon-ru/iml-crm,mustafat/odoo-1,dgzurita/odoo,factorlibre/OCB,christophlsa/odoo,hifly/OpenUpgrade,brijeshkesariya/odoo,massot/odoo,storm-computers/odoo,storm-computers/odoo,BT-ojossen/odoo,xzYue/odoo,bakhtout/odoo-educ,rschnapka/odoo,tvibliani/odoo,bkirui/odoo,markeTIC/OCB,BT-astauder/odoo,VielSoft/odoo,provaleks/o8,MarcosCommunity/odoo,FlorianLudwig/odoo,srsman/odoo,Noviat/odoo,arthru/OpenUpgrade,sve-odoo/odoo,OpenUpgrade/OpenUpgrade,rdeheele/odoo,hmen89/odoo,bplancher/odoo,ojengwa/odoo,elmerdpadilla/iv,SerpentCS/odoo,klunwebale/odoo,dsfsdgsbngfggb/odoo,OSSESAC/odoopubarquiluz,tvtsoft/odoo8,VitalPet/odoo,PongPi/isl-odoo,0k/OpenUpgrade,alexteodor/odoo,ehirt/odoo,CatsAndDogsbvba/odoo,dkubiak789/odoo,oasiswork/odoo,alhashash/odoo,jfpla/odoo,odoo-turkiye/odoo,sergio-incaser/odoo,leoliujie/odoo,shaufi/odoo,prospwro/odoo,jiachenning/odoo,charbeljc/OCB,jiangzhixiao/odoo,ujjwalwahi/odoo,hbrunn/OpenUpgrade,JCA-Developpement/Odoo,Bachaco-ve/odoo,rgeleta/odoo,Kilhog/odoo,joariasl/odoo,odoousers2014/odoo,simongoffin/website_version,hmen89/odoo,datenbetrieb/odoo,shaufi10/odoo,0k/odoo,credativUK/OCB,joariasl/odoo,kirca/OpenUpgrade,jaxkodex/odoo,leorochael/odoo,omprakasha/odoo,vnsofthe/odoo,bguillot/OpenUpgrade,sadleader/odoo,leorochael/odoo,windedge/odoo,blaggacao/OpenUpgrade,sv-dev1/odoo,n0m4dz/odoo,colinnewell/odoo,salaria/odoo,tinkhaven-organization/odoo,klunwebale/odoo,Bachaco-ve/odoo,fuhongliang/odoo,abdellatifkarroum/odoo,CubicERP/odoo,pedrobaeza/OpenUpgrade,Endika/OpenUpgrade,zchking/odoo,synconics/odoo,lgscofield/odoo,nhomar/odoo,BT-rmartin/odoo,oihane/odoo,draugiskisprendimai/odoo,oasiswork/odoo,jpshort/odoo,xujb/odoo,havt/odoo,Drooids/odoo,mkieszek/odoo,prospwro/odoo,agrista/odoo-saas,luistorresm/odoo,slevenhagen/odoo-npg,ygol/odoo,VitalPet/odoo,FlorianLudwig/odoo,rowemoore/odoo,ThinkOpen-Solutions/odoo,BT-ojossen/odoo,vrenaville/ngo-addons-backport,BT-fgarbely/odoo,microcom/odoo,ccomb/OpenUpgrade,grap/OpenUpgrade,guerrerocarlos/odoo,avoinsystems/odoo,fuhongliang/odoo,dgzurita/odoo,guerrerocarlos/odoo,virgree/odoo,synconics/odoo,erkrishna9/odoo,dezynetechnologies/odoo,xzYue/odoo,funkring/fdoo,glovebx/odoo,thanhacun/odoo,GauravSahu/odoo,cedk/odoo,nexiles/odoo,srimai/odoo,jiachenning/odoo,hoatle/odoo,hip-odoo/odoo,Nick-OpusVL/odoo,microcom/odoo,hanicker/odoo,alhashash/odoo,codekaki/odoo,feroda/odoo,colinnewell/odoo,Danisan/odoo-1,Nowheresly/odoo,odooindia/odoo,0k/OpenUpgrade,ubic135/odoo-design,oihane/odoo,nagyistoce/odoo-dev-odoo,SAM-IT-SA/odoo,joshuajan/odoo,cpyou/odoo,ClearCorp-dev/odoo,Maspear/odoo,brijeshkesariya/odoo,rubencabrera/odoo,salaria/odoo,sysadminmatmoz/OCB,pedrobaeza/odoo,grap/OCB,stephen144/odoo,tarzan0820/odoo,numerigraphe/odoo,patmcb/odoo,dezynetechnologies/odoo,Nowheresly/odoo,jaxkodex/odoo,luistorresm/odoo,OpenPymeMx/OCB,sve-odoo/odoo,n0m4dz/odoo,oliverhr/odoo,spadae22/odoo,apanju/odoo,nhomar/odoo,hifly/OpenUpgrade,fuhongliang/odoo,Noviat/odoo,lightcn/odoo,diagramsoftware/odoo,leoliujie/odoo,leorochael/odoo,nitinitprof/odoo,incaser/odoo-odoo,funkring/fdoo,tvtsoft/odoo8,hifly/OpenUpgrade,vrenaville/ngo-addons-backport,doomsterinc/odoo,luistorresm/odoo,nagyistoce/odoo-dev-odoo,gavin-feng/odoo,NeovaHealth/odoo,JonathanStein/odoo,mlaitinen/odoo,realsaiko/odoo,zchking/odoo,takis/odoo,synconics/odoo,Endika/OpenUpgrade,ClearCorp-dev/odoo,Nick-OpusVL/odoo,makinacorpus/odoo,alqfahad/odoo,alqfahad/odoo,matrixise/odoo,draugiskisprendimai/odoo,glovebx/odoo,alqfahad/odoo,andreparames/odoo,takis/odoo,nuncjo/odoo,goliveirab/odoo,jiachenning/odoo,factorlibre/OCB,arthru/OpenUpgrade,minhtuancn/odoo,apocalypsebg/odoo,dariemp/odoo,colinnewell/odoo,jiangzhixiao/odoo,avoinsystems/odoo,dariemp/odoo,klunwebale/odoo,andreparames/odoo,fuselock/odoo,doomsterinc/odoo,naousse/odoo,windedge/odoo,pplatek/odoo,NeovaHealth/odoo,ApuliaSoftware/odoo,omprakasha/odoo,minhtuancn/odoo,nexiles/odoo,naousse/odoo,Adel-Magebinary/odoo,leoliujie/odoo,incaser/odoo-odoo,sebalix/OpenUpgrade,shingonoide/odoo,rubencabrera/odoo,tangyiyong/odoo,OSSESAC/odoopubarquiluz,rschnapka/odoo,ChanduERP/odoo,codekaki/odoo,gavin-feng/odoo,fjbatresv/odoo,KontorConsulting/odoo,dkubiak789/odoo,havt/odoo,dezynetechnologies/odoo,rschnapka/odoo,hmen89/odoo,nitinitprof/odoo,cpyou/odoo,Ichag/odoo,rubencabrera/odoo,feroda/odoo,Drooids/odoo,Danisan/odoo-1,JGarcia-Panach/odoo,fevxie/odoo,Ichag/odoo,BT-astauder/odoo,blaggacao/OpenUpgrade,JCA-Developpement/Odoo,funkring/fdoo,numerigraphe/odoo,florentx/OpenUpgrade,sysadminmatmoz/OCB,draugiskisprendimai/odoo,ehirt/odoo,nuncjo/odoo,optima-ict/odoo,charbeljc/OCB,apanju/GMIO_Odoo,oihane/odoo,dfang/odoo,Codefans-fan/odoo,vrenaville/ngo-addons-backport,wangjun/odoo,rdeheele/odoo,matrixise/odoo,steedos/odoo,TRESCLOUD/odoopub,fjbatresv/odoo,OpenPymeMx/OCB,hoatle/odoo,hmen89/odoo,fdvarela/odoo8,grap/OCB,CatsAndDogsbvba/odoo,waytai/odoo,GauravSahu/odoo,waytai/odoo,highco-groupe/odoo,Gitlab11/odoo,gdgellatly/OCB1,Drooids/odoo,mvaled/OpenUpgrade,jusdng/odoo,ccomb/OpenUpgrade,ygol/odoo,frouty/odoo_oph,podemos-info/odoo,javierTerry/odoo,0k/OpenUpgrade,mszewczy/odoo,markeTIC/OCB,jpshort/odoo,BT-astauder/odoo,goliveirab/odoo,apocalypsebg/odoo,nhomar/odoo-mirror,nhomar/odoo-mirror,KontorConsulting/odoo,bakhtout/odoo-educ,srimai/odoo,MarcosCommunity/odoo,tinkhaven-organization/odoo,ehirt/odoo,mlaitinen/odoo,srsman/odoo,janocat/odoo,syci/OCB,dalegregory/odoo,ecosoft-odoo/odoo,dfang/odoo,papouso/odoo,waytai/odoo,apocalypsebg/odoo,ojengwa/odoo,bealdav/OpenUpgrade,rubencabrera/odoo,ChanduERP/odoo,jaxkodex/odoo,poljeff/odoo,javierTerry/odoo,Ichag/odoo,bguillot/OpenUpgrade,vrenaville/ngo-addons-backport,gvb/odoo,chiragjogi/odoo,podemos-info/odoo,Nick-OpusVL/odoo,PongPi/isl-odoo,sv-dev1/odoo,BT-ojossen/odoo,Adel-Magebinary/odoo,synconics/odoo,cedk/odoo,virgree/odoo,podemos-info/odoo,hoatle/odoo,PongPi/isl-odoo,shingonoide/odoo,xujb/odoo,poljeff/odoo,gsmartway/odoo,nexiles/odoo,AuyaJackie/odoo,gdgellatly/OCB1,stephen144/odoo,odoo-turkiye/odoo,luiseduardohdbackup/odoo,deKupini/erp,jesramirez/odoo,ojengwa/odoo,simongoffin/website_version,markeTIC/OCB,pedrobaeza/OpenUpgrade,KontorConsulting/odoo,Elico-Corp/odoo_OCB,sv-dev1/odoo,numerigraphe/odoo,Codefans-fan/odoo,addition-it-solutions/project-all,tvtsoft/odoo8,tarzan0820/odoo,shivam1111/odoo,nitinitprof/odoo,arthru/OpenUpgrade,mszewczy/odoo,csrocha/OpenUpgrade,klunwebale/odoo,OpenUpgrade-dev/OpenUpgrade,grap/OpenUpgrade,aviciimaxwell/odoo,Eric-Zhong/odoo,ehirt/odoo,ovnicraft/odoo,ojengwa/odoo,spadae22/odoo,dgzurita/odoo,gdgellatly/OCB1,goliveirab/odoo,wangjun/odoo,joshuajan/odoo,slevenhagen/odoo,NL66278/OCB,thanhacun/odoo,frouty/odoogoeen,patmcb/odoo,glovebx/odoo,bakhtout/odoo-educ,grap/OCB,ingadhoc/odoo,minhtuancn/odoo,alexteodor/odoo,doomsterinc/odoo,BT-fgarbely/odoo,RafaelTorrealba/odoo,Ernesto99/odoo,oliverhr/odoo,syci/OCB,dalegregory/odoo,ihsanudin/odoo,colinnewell/odoo,funkring/fdoo,sadleader/odoo,cysnake4713/odoo,stephen144/odoo,jfpla/odoo,naousse/odoo,gvb/odoo,prospwro/odoo,jusdng/odoo,elmerdpadilla/iv,datenbetrieb/odoo,jesramirez/odoo,VielSoft/odoo,cloud9UG/odoo,ramitalat/odoo,fuhongliang/odoo,brijeshkesariya/odoo,OpenUpgrade-dev/OpenUpgrade,OpenPymeMx/OCB,guerrerocarlos/odoo,slevenhagen/odoo,windedge/odoo,vrenaville/ngo-addons-backport,NL66278/OCB,Elico-Corp/odoo_OCB,colinnewell/odoo,CopeX/odoo,leoliujie/odoo,abenzbiria/clients_odoo,lsinfo/odoo,spadae22/odoo,dsfsdgsbngfggb/odoo,jeasoft/odoo,kittiu/odoo,frouty/odoo_oph,cloud9UG/odoo,shaufi10/odoo,BT-astauder/odoo,deKupini/erp,shaufi/odoo,CubicERP/odoo,salaria/odoo,damdam-s/OpenUpgrade,hoatle/odoo,janocat/odoo,grap/OpenUpgrade,camptocamp/ngo-addons-backport,slevenhagen/odoo-npg,sergio-incaser/odoo,minhtuancn/odoo,feroda/odoo,ApuliaSoftware/odoo,gorjuce/odoo,BT-ojossen/odoo,PongPi/isl-odoo,gavin-feng/odoo,x111ong/odoo,rschnapka/odoo,christophlsa/odoo,eino-makitalo/odoo,Kilhog/odoo,dezynetechnologies/odoo,jeasoft/odoo,fgesora/odoo,bobisme/odoo,KontorConsulting/odoo,SerpentCS/odoo,eino-makitalo/odoo,Endika/odoo,aviciimaxwell/odoo,massot/odoo,Ichag/odoo,grap/OCB,kittiu/odoo,dalegregory/odoo,spadae22/odoo,fossoult/odoo,tinkhaven-organization/odoo,tvtsoft/odoo8,nexiles/odoo,syci/OCB,mustafat/odoo-1,TRESCLOUD/odoopub,nhomar/odoo-mirror,makinacorpus/odoo,CatsAndDogsbvba/odoo,Danisan/odoo-1,Gitlab11/odoo,takis/odoo,rahuldhote/odoo,storm-computers/odoo,abstract-open-solutions/OCB,idncom/odoo,stonegithubs/odoo,Codefans-fan/odoo,oasiswork/odoo,gsmartway/odoo,cedk/odoo,mszewczy/odoo,juanalfonsopr/odoo,diagramsoftware/odoo,Elico-Corp/odoo_OCB,florentx/OpenUpgrade,osvalr/odoo,fossoult/odoo,syci/OCB,erkrishna9/odoo,csrocha/OpenUpgrade,guerrerocarlos/odoo,CatsAndDogsbvba/odoo,sysadminmatmoz/OCB,MarcosCommunity/odoo,Ernesto99/odoo,rubencabrera/odoo,savoirfairelinux/odoo,cysnake4713/odoo,florentx/OpenUpgrade,NeovaHealth/odoo,numerigraphe/odoo,ShineFan/odoo,tinkhaven-organization/odoo,nagyistoce/odoo-dev-odoo,ehirt/odoo,shivam1111/odoo,naousse/odoo,cedk/odoo,oasiswork/odoo,OpusVL/odoo,dezynetechnologies/odoo,lightcn/odoo,frouty/odoogoeen,bplancher/odoo,Eric-Zhong/odoo,kittiu/odoo,kittiu/odoo,NeovaHealth/odoo,aviciimaxwell/odoo,alexteodor/odoo,fossoult/odoo,ojengwa/odoo,mustafat/odoo-1,wangjun/odoo,abstract-open-solutions/OCB,provaleks/o8,srimai/odoo,podemos-info/odoo,pplatek/odoo,bwrsandman/OpenUpgrade,alexteodor/odoo,gsmartway/odoo,alexcuellar/odoo,TRESCLOUD/odoopub,0k/odoo,patmcb/odoo,goliveirab/odoo,provaleks/o8,hifly/OpenUpgrade,bobisme/odoo,inspyration/odoo,takis/odoo,csrocha/OpenUpgrade,florian-dacosta/OpenUpgrade,idncom/odoo,Maspear/odoo,leorochael/odoo,papouso/odoo,n0m4dz/odoo,OpenPymeMx/OCB,sve-odoo/odoo,BT-rmartin/odoo,pedrobaeza/OpenUpgrade,havt/odoo,ApuliaSoftware/odoo,Endika/OpenUpgrade,acshan/odoo,Nick-OpusVL/odoo,n0m4dz/odoo,leoliujie/odoo,CubicERP/odoo,Antiun/odoo,bkirui/odoo,odootr/odoo,bealdav/OpenUpgrade,hoatle/odoo,salaria/odoo,factorlibre/OCB,VitalPet/odoo,ThinkOpen-Solutions/odoo,rubencabrera/odoo,ubic135/odoo-design,kifcaliph/odoo,codekaki/odoo,Codefans-fan/odoo,goliveirab/odoo,apanju/GMIO_Odoo,OpenUpgrade/OpenUpgrade,nitinitprof/odoo,ehirt/odoo,avoinsystems/odoo,addition-it-solutions/project-all,cedk/odoo,Kilhog/odoo,gvb/odoo,Endika/OpenUpgrade,oasiswork/odoo,tarzan0820/odoo,ChanduERP/odoo,abdellatifkarroum/odoo,Gitlab11/odoo,tangyiyong/odoo,salaria/odoo,apanju/GMIO_Odoo,gsmartway/odoo,jfpla/odoo,rowemoore/odoo,collex100/odoo,storm-computers/odoo,shingonoide/odoo,brijeshkesariya/odoo,x111ong/odoo,hopeall/odoo,odootr/odoo,erkrishna9/odoo,CatsAndDogsbvba/odoo,diagramsoftware/odoo,cdrooom/odoo,rgeleta/odoo,hopeall/odoo,odootr/odoo,luistorresm/odoo,bealdav/OpenUpgrade,SerpentCS/odoo,credativUK/OCB,dgzurita/odoo,Nick-OpusVL/odoo,hoatle/odoo,CopeX/odoo,elmerdpadilla/iv,SAM-IT-SA/odoo,tvibliani/odoo,apanju/odoo,dariemp/odoo,VitalPet/odoo,QianBIG/odoo,florentx/OpenUpgrade,windedge/odoo,bkirui/odoo,takis/odoo,slevenhagen/odoo-npg,addition-it-solutions/project-all,acshan/odoo,ihsanudin/odoo,0k/odoo,tarzan0820/odoo,Noviat/odoo,mvaled/OpenUpgrade,matrixise/odoo,provaleks/o8,csrocha/OpenUpgrade,joshuajan/odoo,demon-ru/iml-crm,Nick-OpusVL/odoo,Grirrane/odoo,dllsf/odootest,hassoon3/odoo,apocalypsebg/odoo,optima-ict/odoo,kittiu/odoo,takis/odoo,dsfsdgsbngfggb/odoo,grap/OCB,laslabs/odoo,hubsaysnuaa/odoo,dfang/odoo,nhomar/odoo,brijeshkesariya/odoo,windedge/odoo,Endika/OpenUpgrade,christophlsa/odoo,tangyiyong/odoo,alhashash/odoo,Ernesto99/odoo,realsaiko/odoo,hubsaysnuaa/odoo,gdgellatly/OCB1,cloud9UG/odoo,rubencabrera/odoo,diagramsoftware/odoo,fevxie/odoo,acshan/odoo,fuselock/odoo,nagyistoce/odoo-dev-odoo,damdam-s/OpenUpgrade,0k/OpenUpgrade,ramadhane/odoo,dariemp/odoo,ccomb/OpenUpgrade,ingadhoc/odoo,savoirfairelinux/OpenUpgrade,diagramsoftware/odoo,funkring/fdoo,highco-groupe/odoo,hoatle/odoo,pedrobaeza/odoo,havt/odoo,massot/odoo,QianBIG/odoo,OpenUpgrade-dev/OpenUpgrade,BT-rmartin/odoo,fgesora/odoo,glovebx/odoo,Antiun/odoo,Endika/odoo,BT-rmartin/odoo,credativUK/OCB,Noviat/odoo,kittiu/odoo,virgree/odoo,guewen/OpenUpgrade,savoirfairelinux/OpenUpgrade,microcom/odoo,luiseduardohdbackup/odoo,bkirui/odoo,osvalr/odoo,ingadhoc/odoo,mmbtba/odoo,cdrooom/odoo,hbrunn/OpenUpgrade,chiragjogi/odoo,laslabs/odoo,sergio-incaser/odoo,Antiun/odoo,tangyiyong/odoo,thanhacun/odoo,nexiles/odoo,apanju/odoo,juanalfonsopr/odoo,dfang/odoo,jolevq/odoopub,BT-ojossen/odoo,SAM-IT-SA/odoo,BT-fgarbely/odoo,Gitlab11/odoo,OpusVL/odoo,ApuliaSoftware/odoo,jiangzhixiao/odoo,syci/OCB,thanhacun/odoo,takis/odoo,andreparames/odoo,hip-odoo/odoo,ecosoft-odoo/odoo,rdeheele/odoo,deKupini/erp,eino-makitalo/odoo,Endika/odoo,bobisme/odoo,eino-makitalo/odoo,tarzan0820/odoo,odoo-turkiye/odoo,frouty/odoo_oph,storm-computers/odoo,tarzan0820/odoo,hanicker/odoo,grap/OCB,dezynetechnologies/odoo,rschnapka/odoo,datenbetrieb/odoo,Nowheresly/odoo,ujjwalwahi/odoo,gavin-feng/odoo,poljeff/odoo,lsinfo/odoo,blaggacao/OpenUpgrade,lsinfo/odoo,ramadhane/odoo,gorjuce/odoo,AuyaJackie/odoo,aviciimaxwell/odoo,ygol/odoo,Nick-OpusVL/odoo,factorlibre/OCB,javierTerry/odoo,dkubiak789/odoo,CubicERP/odoo,sinbazhou/odoo,ingadhoc/odoo,simongoffin/website_version,tinkerthaler/odoo,guewen/OpenUpgrade,Bachaco-ve/odoo,alhashash/odoo,ramadhane/odoo,hubsaysnuaa/odoo,patmcb/odoo,ramitalat/odoo,waytai/odoo,fjbatresv/odoo,jpshort/odoo,kirca/OpenUpgrade,markeTIC/OCB,mvaled/OpenUpgrade,jaxkodex/odoo,chiragjogi/odoo,ecosoft-odoo/odoo,dsfsdgsbngfggb/odoo,JCA-Developpement/Odoo,steedos/odoo,doomsterinc/odoo,credativUK/OCB,odoousers2014/odoo,shaufi10/odoo,sve-odoo/odoo,vrenaville/ngo-addons-backport,waytai/odoo,kybriainfotech/iSocioCRM,jpshort/odoo,mszewczy/odoo,minhtuancn/odoo,lombritz/odoo,zchking/odoo,Endika/OpenUpgrade,elmerdpadilla/iv,hubsaysnuaa/odoo,apanju/odoo,ovnicraft/odoo,luiseduardohdbackup/odoo,jiangzhixiao/odoo,tvibliani/odoo,acshan/odoo,sergio-incaser/odoo,storm-computers/odoo,jfpla/odoo,Nowheresly/odoo,ihsanudin/odoo,chiragjogi/odoo,nuuuboo/odoo,sv-dev1/odoo,ygol/odoo,hanicker/odoo,wangjun/odoo,cloud9UG/odoo,savoirfairelinux/odoo,Bachaco-ve/odoo,CopeX/odoo,jeasoft/odoo,addition-it-solutions/project-all,pedrobaeza/OpenUpgrade,savoirfairelinux/OpenUpgrade,jusdng/odoo,hip-odoo/odoo,cpyou/odoo,lightcn/odoo,guewen/OpenUpgrade,jesramirez/odoo,OpenUpgrade/OpenUpgrade,srsman/odoo,x111ong/odoo,Danisan/odoo-1,luistorresm/odoo,zchking/odoo,BT-fgarbely/odoo,papouso/odoo,dsfsdgsbngfggb/odoo,jeasoft/odoo,Grirrane/odoo,synconics/odoo,dllsf/odootest,pplatek/odoo,oliverhr/odoo,Antiun/odoo,lightcn/odoo,omprakasha/odoo,makinacorpus/odoo,javierTerry/odoo,camptocamp/ngo-addons-backport,bwrsandman/OpenUpgrade,virgree/odoo,JonathanStein/odoo,bkirui/odoo,kirca/OpenUpgrade,fuhongliang/odoo,idncom/odoo,virgree/odoo,acshan/odoo,cloud9UG/odoo,christophlsa/odoo,TRESCLOUD/odoopub,hopeall/odoo,poljeff/odoo,dalegregory/odoo,RafaelTorrealba/odoo,chiragjogi/odoo,tinkerthaler/odoo,tangyiyong/odoo,kybriainfotech/iSocioCRM,Elico-Corp/odoo_OCB,Daniel-CA/odoo,inspyration/odoo,vnsofthe/odoo,BT-fgarbely/odoo,luiseduardohdbackup/odoo,ThinkOpen-Solutions/odoo,srsman/odoo,fuselock/odoo,alexcuellar/odoo,0k/odoo,ujjwalwahi/odoo,juanalfonsopr/odoo,lombritz/odoo,Drooids/odoo,bealdav/OpenUpgrade,shaufi/odoo,Noviat/odoo,mmbtba/odoo,ccomb/OpenUpgrade,stonegithubs/odoo,ihsanudin/odoo,makinacorpus/odoo,omprakasha/odoo,mmbtba/odoo,kirca/OpenUpgrade,oliverhr/odoo,bguillot/OpenUpgrade,jeasoft/odoo,idncom/odoo,janocat/odoo,chiragjogi/odoo,Nowheresly/odoo,aviciimaxwell/odoo,gdgellatly/OCB1,kifcaliph/odoo,ygol/odoo,guerrerocarlos/odoo,dgzurita/odoo,ojengwa/odoo,hassoon3/odoo,kirca/OpenUpgrade,JonathanStein/odoo,Nowheresly/odoo,csrocha/OpenUpgrade,spadae22/odoo,Ernesto99/odoo,gsmartway/odoo,Ernesto99/odoo,PongPi/isl-odoo,dkubiak789/odoo,tinkerthaler/odoo,sinbazhou/odoo,x111ong/odoo,diagramsoftware/odoo,kifcaliph/odoo,FlorianLudwig/odoo,mmbtba/odoo,abdellatifkarroum/odoo,poljeff/odoo,odooindia/odoo,jiachenning/odoo,fgesora/odoo,frouty/odoogoeen,OpenPymeMx/OCB,nuncjo/odoo,datenbetrieb/odoo,gdgellatly/OCB1,fevxie/odoo,NeovaHealth/odoo | addons/mrp/wizard/wizard_procurement.py | addons/mrp/wizard/wizard_procurement.py | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2008 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import wizard
import threading
import pooler
parameter_form = '''<?xml version="1.0"?>
<form string="Parameters" colspan="4">
<label string="This wizard will schedule procurements." colspan="4" align="0.0"/>
</form>'''
parameter_fields = {
}
def _procure_calculation_procure(self, db_name, uid, data, context):
db, pool = pooler.get_db_and_pool(db_name)
cr = db.cursor()
try:
proc_obj = pool.get('mrp.procurement')
proc_obj._procure_confirm(cr, uid, use_new_cursor=cr.dbname, context=context)
finally:
cr.close()
return {}
def _procure_calculation(self, cr, uid, data, context):
threaded_calculation = threading.Thread(target=_procure_calculation_procure, args=(self, cr.dbname, uid, data, context))
threaded_calculation.start()
return {}
class procurement_compute(wizard.interface):
states = {
'init': {
'actions': [],
'result': {'type': 'form', 'arch':parameter_form, 'fields': parameter_fields, 'state':[('end','Cancel'),('compute','Compute Procurements') ]}
},
'compute': {
'actions': [_procure_calculation],
'result': {'type': 'state', 'state':'end'}
},
}
procurement_compute('mrp.procurement.compute')
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2008 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import wizard
import threading
import pooler
parameter_form = '''<?xml version="1.0"?>
<form string="Parameters" colspan="4">
<label string="This wizard will schedule procurements." colspan="4" align="0.0"/>
</form>'''
parameter_fields = {
}
def _procure_calculation_procure(self, db_name, uid, data, context):
db, pool = pooler.get_db_and_pool(db_name)
cr = db.cursor()
proc_obj = pool.get('mrp.procurement')
proc_obj._procure_confirm(cr, uid, use_new_cursor=cr.dbname, context=context)
return {}
def _procure_calculation(self, cr, uid, data, context):
threaded_calculation = threading.Thread(target=_procure_calculation_procure, args=(self, cr.dbname, uid, data, context))
threaded_calculation.start()
return {}
class procurement_compute(wizard.interface):
states = {
'init': {
'actions': [],
'result': {'type': 'form', 'arch':parameter_form, 'fields': parameter_fields, 'state':[('end','Cancel'),('compute','Compute Procurements') ]}
},
'compute': {
'actions': [_procure_calculation],
'result': {'type': 'state', 'state':'end'}
},
}
procurement_compute('mrp.procurement.compute')
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | Python |
f1145fc98f825b20452dd924d287349b4a8ae628 | Update templates.py | googlefonts/oss-fuzz,google/oss-fuzz,ssbr/oss-fuzz,FeliciaLim/oss-fuzz,skia-dev/oss-fuzz,skia-dev/oss-fuzz,robertswiecki/oss-fuzz,googlefonts/oss-fuzz,skia-dev/oss-fuzz,ssbr/oss-fuzz,oliverchang/oss-fuzz,robertswiecki/oss-fuzz,google/oss-fuzz,vitalybuka/oss-fuzz,kcc/oss-fuzz,ssbr/oss-fuzz,ssbr/oss-fuzz,google/oss-fuzz,kcc/oss-fuzz,google/oss-fuzz,googlefonts/oss-fuzz,kcc/oss-fuzz,googlefonts/oss-fuzz,FeliciaLim/oss-fuzz,FeliciaLim/oss-fuzz,googlefonts/oss-fuzz,googlefonts/oss-fuzz,skia-dev/oss-fuzz,robertswiecki/oss-fuzz,oliverchang/oss-fuzz,skia-dev/oss-fuzz,FeliciaLim/oss-fuzz,robertswiecki/oss-fuzz,oliverchang/oss-fuzz,FeliciaLim/oss-fuzz,oliverchang/oss-fuzz,robertswiecki/oss-fuzz,google/oss-fuzz,google/oss-fuzz,skia-dev/oss-fuzz,skia-dev/oss-fuzz,ssbr/oss-fuzz,vitalybuka/oss-fuzz,oliverchang/oss-fuzz,skia-dev/oss-fuzz,googlefonts/oss-fuzz,vitalybuka/oss-fuzz,kcc/oss-fuzz,google/oss-fuzz,vitalybuka/oss-fuzz,robertswiecki/oss-fuzz,google/oss-fuzz,googlefonts/oss-fuzz,skia-dev/oss-fuzz,kcc/oss-fuzz,googlefonts/oss-fuzz,google/oss-fuzz,robertswiecki/oss-fuzz,skia-dev/oss-fuzz,google/oss-fuzz,google/oss-fuzz,googlefonts/oss-fuzz,robertswiecki/oss-fuzz,skia-dev/oss-fuzz,robertswiecki/oss-fuzz,robertswiecki/oss-fuzz,robertswiecki/oss-fuzz,vitalybuka/oss-fuzz | infra/templates.py | infra/templates.py | # Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
JENKINS_TEMPLATE = """\
// Copyright 2016 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
////////////////////////////////////////////////////////////////////////////////
def libfuzzerBuild = fileLoader.fromGit('infra/libfuzzer-pipeline.groovy',
'https://github.com/google/oss-fuzz.git')
libfuzzerBuild {
git = "put git url here"
}
"""
DOCKER_TEMPLATE = """\
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
FROM ossfuzz/base-libfuzzer
MAINTAINER your@email.com
RUN apt-get install -y make autoconf automake libtool
RUN git clone <git_url> # or use other version control
COPY build.sh /src/
"""
BUILD_TEMPLATE = """\
#!/bin/bash -eu
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
cd /src/%s
# build the target.
# e.g.
#
# ./autogen.sh
# ./configure
# make clean all
# build your fuzzer(s)
# e.g.
# $CXX $CXXFLAGS -std=c++11 -Iinclude \\
# /path/to/name_of_fuzzer.cc -o /out/name_of_fuzzer \\
# -lfuzzer /path/to/library.a $FUZZER_LDFLAGS
"""
| # Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
JENKINS_TEMPLATE = """\
// Copyright 2016 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
////////////////////////////////////////////////////////////////////////////////
def libfuzzerBuild = fileLoader.fromGit('infra/libfuzzer-pipeline.groovy',
'https://github.com/google/oss-fuzz.git')
libfuzzerBuild {
git = "put git url here"
}
"""
DOCKER_TEMPLATE = """\
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
FROM ossfuzz/base-libfuzzer
MAINTAINER your@email.com
RUN apt-get install -y make autoconf automake libtool
RUN git checkout put git url here
COPY build.sh /src/
"""
BUILD_TEMPLATE = """\
#!/bin/bash -eu
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
cd /src/%s
# build the target.
# e.g.
#
# ./autogen.sh
# ./configure
# make clean all
# build your fuzzer(s)
# e.g.
# $CXX $CXXFLAGS -std=c++11 -Iinclude \\
# /path/to/name_of_fuzzer.cc -o /out/name_of_fuzzer \\
# -lfuzzer /path/to/library.a $FUZZER_LDFLAGS
"""
| apache-2.0 | Python |
4efc039389b9aab3813a3815a58281c898f1acfd | Create user_exceptions.py | ahhh/SPSE,thedarkcoder/SPSE | user_exceptions.py | user_exceptions.py | #/bin/python
# Python program for playing around with user defined exceptions
class TooSmallError(Exception):
message = "Too small! Try again ;)"
class TooBigError(Exception):
message = "Too big! Try again ;)"
class ExactError(Exception):
def __init__(self):
print "HAHAHA You hit the trap"
class unhandledError(Exception):pass
def checkNumber(num):
if(num <= 4):
raise TooSmallError
elif(num >= 7):
raise TooBigError
elif(num == 5):
raise ExactError
return num
while 1:
try:
usrInpt = int(raw_input("Enter the magic number: "))
print checkNumber(usrInpt)
except TooSmallError, e:
print e.message
except TooBigError, e:
print e.message
except ExactError, e:
print e.message
else:
break
| mit | Python | |
fe11a1b638b1779e51da87eaa30f1f12b2d0911c | Add a module for known data models: CDS, ECMWF, etc. | ecmwf/cfgrib | cf2cdm/datamodels.py | cf2cdm/datamodels.py | #
# Copyright 2017-2018 European Centre for Medium-Range Weather Forecasts (ECMWF).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Authors:
# Alessandro Amici - B-Open - https://bopen.eu
#
from __future__ import absolute_import, division, print_function, unicode_literals
CDS = {
'latitude': {
'out_name': 'lat',
'stored_direction': 'increasing',
},
'longitude': {
'out_name': 'lon',
'stored_direction': 'increasing',
},
'isobaricInhPa': {
'out_name': 'plev',
'units': 'Pa',
'stored_direction': 'decreasing',
},
'isobaricInPa': {
'out_name': 'plev',
'units': 'Pa',
'stored_direction': 'decreasing',
},
'number': {
'out_name': 'realization',
'stored_direction': 'increasing',
},
'time': {
'out_name': 'forecast_reference_time',
'stored_direction': 'increasing',
},
'valid_time': {
'out_name': 'time',
'stored_direction': 'increasing',
},
'leadtime': {
'out_name': 'forecast_period',
'stored_direction': 'increasing',
},
}
ECMWF = {
'isobaricInhPa': {
'out_name': 'level',
'units': 'hPa',
'stored_direction': 'decreasing',
},
'isobaricInPa': {
'out_name': 'level',
'units': 'hPa',
'stored_direction': 'decreasing',
},
'hybrid': {
'out_name': 'level',
'stored_direction': 'increasing',
},
} | apache-2.0 | Python | |
b6303a18051f5bc050c141b72935b9a87c752a59 | move into separate module | hdm-dt-fb/rvt_model_services,hdm-dt-fb/rvt_model_services | utils/rms_paths.py | utils/rms_paths.py | import pathlib
from collections import namedtuple
def get_paths(prj_root):
"""
Maps path structure into a namedtuple.
:return:dict: namedtuple paths
"""
root_dir = pathlib.Path(prj_root).absolute().parent
RMSPaths = namedtuple("RMSPaths", "root logs warnings commands journals com_warnings com_qc db xml_exp xml_imp")
path_map = RMSPaths(root=root_dir,
logs=root_dir / "logs",
warnings=root_dir / "warnings",
commands=root_dir / "commands",
journals=root_dir / "journals",
com_warnings=root_dir / "commands" / "warnings",
com_qc=root_dir / "commands" / "qc",
db=root_dir / "db",
xml_exp=root_dir / "db" / "xml_export",
xml_imp=root_dir / "db" / "xml_import",
)
return path_map
| mit | Python | |
1a1ee9eff1f04d6e40c9288e15dc3fad7515c2b8 | Make a unittest for CArray class | rezoo/chainer,keisuke-umezawa/chainer,ronekko/chainer,ktnyt/chainer,chainer/chainer,wkentaro/chainer,wkentaro/chainer,okuta/chainer,ktnyt/chainer,chainer/chainer,wkentaro/chainer,aonotas/chainer,pfnet/chainer,kashif/chainer,jnishi/chainer,hvy/chainer,okuta/chainer,niboshi/chainer,keisuke-umezawa/chainer,niboshi/chainer,tkerola/chainer,ktnyt/chainer,ysekky/chainer,niboshi/chainer,chainer/chainer,hvy/chainer,cupy/cupy,niboshi/chainer,chainer/chainer,hvy/chainer,keisuke-umezawa/chainer,ktnyt/chainer,okuta/chainer,jnishi/chainer,cupy/cupy,okuta/chainer,jnishi/chainer,delta2323/chainer,kiyukuta/chainer,wkentaro/chainer,anaruse/chainer,cupy/cupy,keisuke-umezawa/chainer,jnishi/chainer,cupy/cupy,hvy/chainer | tests/cupy_tests/core_tests/test_carray.py | tests/cupy_tests/core_tests/test_carray.py | import unittest
import cupy
from cupy import testing
class TestCArray(unittest.TestCase):
def test_size(self):
x = cupy.arange(3).astype('i')
y = cupy.ElementwiseKernel(
'raw int32 x', 'int32 y', 'y = x.size()', 'test_carray_size',
)(x, size=1)
self.assertEqual(int(y[0]), 3)
def test_shape(self):
x = cupy.arange(6).reshape((2, 3)).astype('i')
y = cupy.ElementwiseKernel(
'raw int32 x', 'int32 y', 'y = x.shape()[i]', 'test_carray_shape',
)(x, size=2)
testing.assert_array_equal(y, (2, 3))
def test_strides(self):
x = cupy.arange(6).reshape((2, 3)).astype('i')
y = cupy.ElementwiseKernel(
'raw int32 x', 'int32 y', 'y = x.strides()[i]',
'test_carray_strides',
)(x, size=2)
testing.assert_array_equal(y, (12, 4))
def test_getitem_int(self):
x = cupy.arange(24).reshape((2, 3, 4)).astype('i')
y = cupy.empty_like(x)
y = cupy.ElementwiseKernel(
'raw T x', 'int32 y', 'y = x[i]', 'test_carray_getitem_int',
)(x, y)
testing.assert_array_equal(y, x)
def test_getitem_idx(self):
x = cupy.arange(24).reshape((2, 3, 4)).astype('i')
y = cupy.empty_like(x)
y = cupy.ElementwiseKernel(
'raw T x', 'int32 y',
'int idx[] = {i / 12, i / 4 % 3, i % 4}; y = x[idx]',
'test_carray_getitem_idx',
)(x, y)
testing.assert_array_equal(y, x)
| mit | Python | |
2cf5f7baf115511c9dd8a8a0333a9b579455b9a3 | Add file for rule's symbol tests | PatrikValkovic/grammpy | tests/rules_tests/FromSymbolComputeTest.py | tests/rules_tests/FromSymbolComputeTest.py | #!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import main, TestCase
from grammpy import Rule
class FromSymbolComputeTest(TestCase):
pass
if __name__ == '__main__':
main()
| mit | Python | |
3f63b647d9f1e3c0ea8c83a869db8fc0028127c5 | Create 05.FibonacciNumbers.py | stoyanov7/SoftwareUniversity,stoyanov7/SoftwareUniversity,stoyanov7/SoftwareUniversity,stoyanov7/SoftwareUniversity | TechnologiesFundamentals/ProgrammingFundamentals/MethodsAndDebugging-Excercises/05.FibonacciNumbers.py | TechnologiesFundamentals/ProgrammingFundamentals/MethodsAndDebugging-Excercises/05.FibonacciNumbers.py | def fibonacci(n):
if n == 0 or n == 1:
return 1
else:
return fibonacci(n - 1) + fibonacci(n - 2)
if __name__ == '__main__':
number = int(input())
print(fibonacci(number))
| mit | Python | |
f5a0227042b64c6e8a8d85b9e0bc9cf437dc11b8 | resolve migration conflict | OpenVolunteeringPlatform/django-ovp-users,OpenVolunteeringPlatform/django-ovp-users | ovp_users/migrations/0012_merge_20170112_2144.py | ovp_users/migrations/0012_merge_20170112_2144.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2017-01-12 21:44
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('ovp_users', '0011_auto_20170111_1633'),
('ovp_users', '0011_auto_20170112_1417'),
]
operations = [
]
| agpl-3.0 | Python | |
0145c8e9c3c528fb3870e6367ee630d58786203b | Add files via upload | samkit-jain/Handwriting-Recognition,samkit-jain/Handwriting-Recognition | extract_font.py | extract_font.py | import Image, ImageFont, ImageDraw
import time
import os
from scipy import ndimage
import Image, ImageDraw
import matplotlib.image as mpimg
import numpy as np
import math
def convert_im(code, image1):
image1 = image1.crop(image1.getbbox())
w1, h1 = image1.size
image2 = Image.new("RGB", (28, 28), (255, 255, 255))
datas = image1.getdata()
newdata = []
"""
for item in datas:
if item[0] == 255 and item[1] == 255 and item[2] == 255: # Red -> Black
newdata.append(white)
elif item[0] == 0 and item[1] == 0 and item[2] == 0: # Black -> White
newdata.append(white)
else:
newdata.append(black)
"""
#image1.putdata(newdata)
image1.thumbnail((20, 20), Image.ANTIALIAS)
image2.paste(image1, (0, 0))
image2.save("step4.png")
digit_image = mpimg.imread("step4.png")
gray_digit = np.dot(digit_image[...,:3], [0.299, 0.587, 0.114])
gray_digit = gray_digit.flatten()
for i in range(len(gray_digit)):
gray_digit[i] = 1.0 - gray_digit[i]
gray_digit[i] = round(gray_digit[i], 8)
# Calculating center of mass of the image
x, y = ndimage.measurements.center_of_mass(gray_digit.reshape(28, 28))
if math.isnan(x) or math.isnan(y):
return
image2 = image2.transform(image2.size, Image.AFFINE, (1, 0, y - 14, 0, 1, x - 14), fill=0)
image2 = Image.new("RGB", (28, 28), (255, 255, 255))
image2.paste(image1, (14 - int(round(y, 0)), 14 - int(round(x, 0))))
image2.save(chr(code) + str(time.time()) + ".png")
def extractFunction(font_loc):
white = (255, 255, 255)
# use a truetype font
font = ImageFont.truetype(font_loc, 280)
im = Image.new("RGB", (280, 280), white)
draw = ImageDraw.Draw(im)
for code in range(ord('a'), ord('z') + 1):
w, h = draw.textsize(chr(code), font=font)
im = Image.new("RGB", (w, h), white)
draw = ImageDraw.Draw(im)
draw.text((0, 0), chr(code), font=font, fill="#000000")
convert_im(code, im)
#im.save(chr(code) + str(time.time()) + ".png")
for code in range(ord('A'), ord('Z') + 1):
w, h = draw.textsize(chr(code), font=font)
im = Image.new("RGB", (w, h), white)
draw = ImageDraw.Draw(im)
draw.text((0, 0), chr(code), font=font, fill="#000000")
convert_im(code, im)
#im.save(chr(code) + str(time.time()) + ".png")
cur_dir = '/home/samkit/Desktop/Handwriting Recognition/fonts/'
c = 1
for name in os.listdir(cur_dir):
if c > 5538:
extractFunction(cur_dir + '/' + name)
print c
c += 1
| apache-2.0 | Python | |
3247ff26cbe63d875a493382c3f567f112de8b58 | Add maintenance script to fix missing menu links | DirkHoffmann/indico,mic4ael/indico,mic4ael/indico,indico/indico,mvidalgarcia/indico,ThiefMaster/indico,OmeGak/indico,DirkHoffmann/indico,mvidalgarcia/indico,pferreir/indico,OmeGak/indico,pferreir/indico,indico/indico,mvidalgarcia/indico,ThiefMaster/indico,mvidalgarcia/indico,ThiefMaster/indico,mic4ael/indico,ThiefMaster/indico,indico/indico,DirkHoffmann/indico,pferreir/indico,OmeGak/indico,mic4ael/indico,DirkHoffmann/indico,OmeGak/indico,pferreir/indico,indico/indico | bin/maintenance/update_menus.py | bin/maintenance/update_menus.py | from __future__ import division
from collections import Counter
from indico.core.db import DBMgr
from indico.util.console import conferenceHolderIterator, success
from MaKaC.conference import ConferenceHolder
from MaKaC.webinterface.displayMgr import ConfDisplayMgrRegistery
def update_menus(dbi):
links = ('collaboration', 'downloadETicket')
ch = ConferenceHolder()
cdmr = ConfDisplayMgrRegistery()
counter = Counter()
for __, event in conferenceHolderIterator(ch, deepness='event'):
menu = cdmr.getDisplayMgr(event).getMenu()
must_update = False
for linkname in links:
if menu.getLinkByName(linkname) is None:
counter[linkname] += 1
must_update = True
if must_update:
menu.updateSystemLink()
counter['updated'] += 1
if counter['updated'] % 100:
dbi.commit()
for linkname in links:
print "{} links missing: {}".format(linkname, counter[linkname])
success("Event menus updated: {}".format(counter['updated']))
if __name__ == '__main__':
dbi = DBMgr.getInstance()
dbi.startRequest()
update_menus(dbi)
dbi.endRequest()
| mit | Python | |
1324ae9a6ba9d57841df3f7b729036120eee3d47 | delete easyui | wufengwhu/my_blog,wufengwhu/my_blog,wufengwhu/my_blog | exercise/file/somecript.py | exercise/file/somecript.py | import sys
text = sys.stdin.read()
words = text.split()
wordcount = len(words)
print() | apache-2.0 | Python | |
b333deae9db90a193d274bf235dca8d6dddff5c2 | Add script for getting milestone contributors (#4396) | nishantmonu51/druid,implydata/druid,knoguchi/druid,solimant/druid,winval/druid,implydata/druid,monetate/druid,michaelschiff/druid,pjain1/druid,implydata/druid,pjain1/druid,deltaprojects/druid,praveev/druid,andy256/druid,metamx/druid,dkhwangbo/druid,Fokko/druid,michaelschiff/druid,lizhanhui/data_druid,noddi/druid,michaelschiff/druid,knoguchi/druid,deltaprojects/druid,mghosh4/druid,implydata/druid,gianm/druid,dkhwangbo/druid,noddi/druid,mghosh4/druid,praveev/druid,winval/druid,leventov/druid,gianm/druid,nishantmonu51/druid,implydata/druid,monetate/druid,knoguchi/druid,mghosh4/druid,Fokko/druid,druid-io/druid,liquidm/druid,pjain1/druid,solimant/druid,himanshug/druid,winval/druid,pjain1/druid,metamx/druid,himanshug/druid,b-slim/druid,gianm/druid,jon-wei/druid,liquidm/druid,nishantmonu51/druid,michaelschiff/druid,michaelschiff/druid,dkhwangbo/druid,jon-wei/druid,gianm/druid,taochaoqiang/druid,andy256/druid,himanshug/druid,b-slim/druid,noddi/druid,taochaoqiang/druid,solimant/druid,himanshug/druid,michaelschiff/druid,druid-io/druid,michaelschiff/druid,mghosh4/druid,jon-wei/druid,pjain1/druid,liquidm/druid,monetate/druid,jon-wei/druid,nishantmonu51/druid,nishantmonu51/druid,dkhwangbo/druid,deltaprojects/druid,gianm/druid,leventov/druid,praveev/druid,liquidm/druid,andy256/druid,metamx/druid,noddi/druid,deltaprojects/druid,knoguchi/druid,noddi/druid,winval/druid,Fokko/druid,metamx/druid,Fokko/druid,andy256/druid,Fokko/druid,druid-io/druid,Fokko/druid,b-slim/druid,liquidm/druid,nishantmonu51/druid,nishantmonu51/druid,Fokko/druid,lizhanhui/data_druid,leventov/druid,druid-io/druid,leventov/druid,monetate/druid,mghosh4/druid,praveev/druid,pjain1/druid,monetate/druid,b-slim/druid,gianm/druid,mghosh4/druid,gianm/druid,pjain1/druid,jon-wei/druid,praveev/druid,solimant/druid,lizhanhui/data_druid,winval/druid,solimant/druid,taochaoqiang/druid,lizhanhui/data_druid,mghosh4/druid,liquidm/druid,dkhwangbo/druid,taochaoqiang/druid,taochaoqiang/druid,deltaprojects/druid,deltaprojects/druid,himanshug/druid,jon-wei/druid,monetate/druid,andy256/druid,implydata/druid,druid-io/druid,deltaprojects/druid,b-slim/druid,leventov/druid,metamx/druid,jon-wei/druid,monetate/druid,knoguchi/druid,lizhanhui/data_druid | docs/_bin/get-milestone-contributors.py | docs/_bin/get-milestone-contributors.py | #!/usr/bin/env python3
import json
import sys
import requests
# tested with python 3.6 and requests 2.13.0
if len(sys.argv) != 2:
sys.stderr.write('usage: program <milestone-number>\n')
sys.stderr.write('Provide the github milestone number, not name. (e.g., 19 instead of 0.10.1)\n')
sys.exit(1)
milestone_num = sys.argv[1]
done = False
page_counter = 1
contributors = set()
# Get all users who created a closed issue or merged PR for a given milestone
while not done:
resp = requests.get("https://api.github.com/repos/druid-io/druid/issues?milestone=%s&state=closed&page=%s" % (milestone_num, page_counter))
pagination_link = resp.headers["Link"]
# last page doesn't have a "next"
if "rel=\"next\"" not in pagination_link:
done = True
else:
page_counter += 1
issues = json.loads(resp.text)
for issue in issues:
contributor_name = issue["user"]["login"]
contributors.add(contributor_name)
# doesn't work as-is for python2, the contributor names are "unicode" instead of "str" in python2
contributors = sorted(contributors, key=str.lower)
for contributor_name in contributors:
print("@%s" % contributor_name)
| apache-2.0 | Python | |
28377ff5ac680cb5c97997f01b6300debe1abd80 | add missing migration. fix #118 | mgaitan/waliki,OlegGirko/waliki,mgaitan/waliki,OlegGirko/waliki,mgaitan/waliki,OlegGirko/waliki | waliki/migrations/0006_auto_20170326_2008.py | waliki/migrations/0006_auto_20170326_2008.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-26 20:08
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('waliki', '0005_auto_20141124_0020'),
]
operations = [
migrations.AlterField(
model_name='redirect',
name='status_code',
field=models.IntegerField(choices=[(302, '302 Found'), (301, '301 Moved Permanently')], default=302),
),
]
| bsd-3-clause | Python | |
648c51c41b51fe7919798de755ab1f04f40502a7 | Add Simple File Example | CybOXProject/python-cybox | examples/simple_file_instance.py | examples/simple_file_instance.py | #!/usr/bin/env python
"""Creates the CybOX content for CybOX_Simple_File_Instance.xml
"""
from cybox.common import Hash
from cybox.core import Observable, Observables
from cybox.objects.file_object import File
def main():
h = Hash("a7a0390e99406f8975a1895860f55f2f")
f = File()
f.file_name = "bad_file24.exe"
f.file_path = "AppData\Mozilla"
f.file_extension = ".exe"
f.size_in_bytes = 3282
f.add_hash(h)
o = Observable(f)
o.description = "This observable specifies a specific file observation."
print Observables(o).to_xml()
if __name__ == "__main__":
main()
| bsd-3-clause | Python | |
47e7d9fc28214e69ed9b0a36104feed80422a5b7 | Create molecule.py | Shirui816/FTinMS | molecule.py | molecule.py | import numpy as np
def grab_iter_dual(i, bond_hash, mol_used, body_hash=None):
s = [i]
r = []
while s:
v = s.pop()
if not mol_used[v]:
r.append(v)
mol_used[v] = True
# for w in bond_hash[v]:
# s.append(w)
s.extend(bond_hash[v])
if not body_hash:
continue
for w in body_hash.get(v):
s.append(w)
for x in bond_hash[w]:
s.append(x)
return r
def bond_hash_dualdirect(bond, natoms):
"""
:param bond: bond data in hoomdxml format (name, id1, id2)
:param natoms: total number of particles
:return: hash table of with value in {bondname1: [idxes], bondname2:[idxes]...} for each particle (in dual direct)
"""
bond_hash_nn = {}
print('Building bond hash...')
if not isinstance(bond, np.ndarray):
return {}
for i in range(natoms):
bond_hash_nn[i] = []
for b in bond:
idx = b[1]
jdx = b[2]
bond_hash_nn[idx].append(jdx)
bond_hash_nn[jdx].append(idx)
print('Done.')
return bond_hash_nn
def molecules(bond, natoms):
bond_hash = bond_hash_dualdirect(bond, natoms)
mol_used = {}
for i in range(natoms):
mol_used[i] = False
_ret, ml = [], []
for i in range(natoms):
mol = grab_iter_dual(i, bond_hash, mol_used)
if len(mol) > 1:
_ret.append(mol)
ml.append(len(mol))
ret = np.zeros((len(_ret), max(ml)), dtype=np.int64) - 1
for i, mol in enumerate(_ret):
ret[i][:ml[i]] = _ret[i]
return ret, np.array(ml, dtype=np.int64), bond_hash
| bsd-3-clause | Python | |
7cdeb30d5beefbed8d44e3b8eb5384ff05a1f09e | change logg buff, but no save | daveg999/automation_class | class4/class4_ex2.py | class4/class4_ex2.py | #!/usr/bin/env python
import paramiko
import time
from getpass import getpass
def prevent_paging(remote_conn):
''' stop pagination '''
remote_conn.send("\n")
remote_conn.send("term len 0\n")
time.sleep(1)
''' clear output buffer '''
output = remote_conn.recv(1000)
return output
def close_connection(remote_conn):
''' close SSH connection '''
remote_conn.close()
def start_config_mode(remote_conn):
''' get into configuration mode on Cisco gear '''
remote_conn.send("\n")
remote_conn.send("config t\n")
time.sleep(1)
def exit_config_mode(remote_conn):
''' leave config mode '''
remote_conn.send("\n")
remote_conn.send("end\n")
if __name__ == '__main__':
''' set static variables '''
device = '184.105.247.71'
username = 'pyclass'
password = getpass()
''' initialize variables '''
remote_conn_pre = paramiko.SSHClient()
remote_conn_pre.set_missing_host_key_policy(paramiko.AutoAddPolicy())
''' connect to device '''
remote_conn_pre.connect(device, username=username, password=password, look_for_keys=False, allow_agent=False)
remote_conn = remote_conn_pre.invoke_shell()
''' go into configuration mode '''
start_config_mode(remote_conn)
''' send config change commands to device '''
remote_conn.send("\n")
remote_conn.send("logging buffered 99999\n")
time.sleep(1)
''' exit configuration mode '''
exit_config_mode(remote_conn)
''' disable paging using function '''
prevent_paging(remote_conn)
''' send command to device and print results '''
remote_conn.send("\n")
remote_conn.send("sho run | inc buffered\n")
time.sleep(1)
output = remote_conn.recv(50000)
print output
''' close connection using function '''
close_connection(remote_conn)
| apache-2.0 | Python | |
dfb4b3ab679a5c8767bd7571da0fdd40850d2d84 | Add __main__.py | gustavla/crox | crox/__main__.py | crox/__main__.py | #!/usr/bin/env python
from __future__ import division, print_function, absolute_import
import os
import sys
sys.path = [os.path.join(os.path.abspath(os.path.dirname(__file__)), "..")] + sys.path
from crox.core import main
if __name__ == '__main__':
main()
| bsd-3-clause | Python | |
2a81e39a843e31181af455a89ad2b200b7d2f024 | Add migrations for session changes | stencila/hub,stencila/hub,stencila/hub,stencila/hub,stencila/hub | director/sessions_/migrations/0005_auto_20160316_2124.py | director/sessions_/migrations/0005_auto_20160316_2124.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-03-16 21:24
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sessions_', '0004_auto_20160124_0931'),
]
operations = [
migrations.AddField(
model_name='session',
name='network',
field=models.IntegerField(blank=True, default=-1, help_text='Network limit for this session. -1 = unlimited', null=True),
),
migrations.AddField(
model_name='sessiontype',
name='lifetime',
field=models.IntegerField(default=-1, help_text='Minutes before the session is terminated. -1 = infinite.'),
),
migrations.AddField(
model_name='sessiontype',
name='number',
field=models.IntegerField(default=-1, help_text='Maximum number of this session type that can be running at one time. -1 = infinite.'),
),
migrations.AddField(
model_name='sessiontype',
name='rank',
field=models.IntegerField(default=1, help_text='Nominal rank of session type (higher ~ more powerful)'),
),
migrations.AlterField(
model_name='sessiontype',
name='network',
field=models.FloatField(default=0, help_text='Gigabytes (GB) of network transfer allocated to the session.'),
),
migrations.AlterField(
model_name='sessiontype',
name='timeout',
field=models.IntegerField(default=60, help_text='Minutes of inactivity before the session is terminated'),
),
]
| apache-2.0 | Python | |
6a5efced71110c49f8866a09b94e2a4c335fed13 | Copy ompt test into its own file. | geopm/geopm,geopm/geopm,cmcantalupo/geopm,geopm/geopm,geopm/geopm,cmcantalupo/geopm,geopm/geopm,cmcantalupo/geopm,cmcantalupo/geopm,cmcantalupo/geopm | integration/test/test_ompt.py | integration/test/test_ompt.py | #!/usr/bin/env python
#
# Copyright (c) 2015, 2016, 2017, 2018, 2019, 2020, Intel Corporation
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# * Neither the name of Intel Corporation nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY LOG OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import sys
import unittest
import os
import geopm_context
import geopmpy.io
import util
class TestIntegration_ompt(unittest.TestCase):
@util.skip_unless_config_enable('ompt')
def test_unmarked_ompt(self):
name = 'test_unmarked_ompt'
report_path = name + '.report'
num_node = 4
num_rank = 16
app_conf = geopmpy.io.BenchConf(name + '_app.config')
self._tmp_files.append(app_conf.get_path())
app_conf.append_region('stream-unmarked', 1.0)
app_conf.append_region('dgemm-unmarked', 1.0)
app_conf.append_region('all2all-unmarked', 1.0)
agent_conf = geopmpy.io.AgentConf(name + '_agent.config', self._agent, self._options)
self._tmp_files.append(agent_conf.get_path())
launcher = geopm_test_launcher.TestLauncher(app_conf, agent_conf, report_path)
launcher.set_num_node(num_node)
launcher.set_num_rank(num_rank)
launcher.run(name)
self._output = geopmpy.io.AppOutput(report_path)
node_names = self._output.get_node_names()
self.assertEqual(len(node_names), num_node)
stream_id = None
region_names = self._output.get_region_names()
stream_name = [key for key in region_names if key.lower().find('stream') != -1][0]
for nn in node_names:
stream_data = self._output.get_report_data(node_name=nn, region=stream_name)
found = False
for name in region_names:
if stream_name in name: # account for numbers at end of OMPT region names
found = True
self.assertTrue(found)
self.assertEqual(1, stream_data['count'].item())
if stream_id:
self.assertEqual(stream_id, stream_data['id'].item())
else:
stream_id = stream_data['id'].item()
ompt_regions = [key for key in region_names if key.startswith('[OMPT]')]
self.assertLessEqual(2, len(ompt_regions))
self.assertTrue(('MPI_Alltoall' in region_names))
gemm_region = [key for key in region_names if key.lower().find('gemm') != -1]
self.assertLessEqual(1, len(gemm_region))
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | Python | |
48eb8de9a10b870aa3d0f26386a67c9d8f5e47c1 | Add basic extruder implementation | ynotstartups/Wanhao,hmflash/Cura,senttech/Cura,totalretribution/Cura,Curahelper/Cura,fieldOfView/Cura,hmflash/Cura,fieldOfView/Cura,totalretribution/Cura,ynotstartups/Wanhao,Curahelper/Cura,senttech/Cura | cura/Extruder.py | cura/Extruder.py | # Copyright (c) 2016 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import re #To parse container registry names to increment the duplicates-resolving number.
import UM.Settings.ContainerRegistry #To search for nozzles, materials, etc.
import UM.Settings.ContainerStack #To create a container stack for this extruder.
class Extruder:
## Creates a new extruder from the specified definition container.
#
# \param definition The definition container defining this extruder.
def __init__(self, definition):
self._definition = definition
container_registry = UM.Settings.ContainerRegistry.getInstance()
#Find the nozzles that fit on this extruder.
self._nozzles = container_registry.findInstanceContainers(type = "nozzle", definitions = "*," + definition.getId() + ",*")
self._nozzles += container_registry.findInstanceContainers(type = "nozzle", definitions = "*," + definition.getId())
self._nozzles += container_registry.findInstanceContainers(type = "nozzle", definitions = definition.getId() + ",*")
self._nozzles += container_registry.findInstanceContainers(type = "nozzle", definitions = definition.getId())
#Create a container stack for this extruder.
self._container_stack = UM.Settings.ContainerStack(self._uniqueName(self._definition.getId()))
self._container_stack.addMetaDataEntry("type", "extruder_train")
self._container_stack.addContainer(self._definition)
#Find the nozzle to use for this extruder.
self._nozzle = container_registry.getEmptyInstanceContainer()
if len(self._nozzles) >= 1: #First add any extruder. Later, overwrite with preference if the preference is valid.
self._nozzle = self._nozzles[0]
preferred_nozzle_id = self._definition.getMetaDataEntry("preferred_nozzle")
if preferred_nozzle_id:
for nozzle in self._nozzles:
if nozzle.getId() == preferred_nozzle_id:
self._nozzle = nozzle
break
self._container_stack.addContainer(self._nozzle)
#Find a material to use for this nozzle.
self._material = container_registry.getEmptyInstanceContainer()
all_materials = container_registry.findInstanceContainers(type = "material")
if len(all_materials) >= 1:
self._material = all_materials[0]
preferred_material_id = self._definition.getMetaDataEntry("preferred_material")
if preferred_material_id:
preferred_material = container_registry.findInstanceContainers(type = "material", id = preferred_material_id.lower())
if len(preferred_material) >= 1:
self._material = preferred_material[0]
self._container_stack.addContainer(self._material)
#Find a quality to use for this extruder.
self._quality = container_registry.getEmptyInstanceContainer()
all_qualities = container_registry.findInstanceContainers(type = "quality")
if len(all_qualities) >= 1:
self._quality = all_qualities[0]
preferred_quality_id = self._definition.getMetaDataEntry("preferred_quality")
if preferred_quality_id:
preferred_quality = container_registry.findInstanceContainers(type = "quality", id = preferred_quality_id.lower())
if len(preferred_quality) >= 1:
self._quality = preferred_quality[0]
self._container_stack.addContainer(self._quality)
## Finds a unique name for an extruder stack.
#
# \param extruder An extruder definition to design a name for.
# \return A name for an extruder stack that is unique and reasonably
# human-readable.
def _uniqueName(self, extruder):
container_registry = UM.Settings.ContainerRegistry.getInstance()
name = extruder.getName().strip()
num_check = re.compile("(.*?)\s*#\d$").match(name)
if num_check: #There is a number in the name.
name = num_check.group(1) #Filter out the number.
if name == "": #Wait, that deleted everything!
name = "Extruder"
unique_name = name
i = 1
while container_registry.findContainers(id = unique_name) or container_registry.findContainers(name = unique_name): #A container already has this name.
i += 1 #Try next numbering.
unique_name = "%s #%d" % (name, i) #Fill name like this: "Extruder #2".
return unique_name | agpl-3.0 | Python | |
43d2045611320bbe78c1167e6505135425bf9499 | Add customfilters.py to theme | osuosl/dougfir-pelican-theme,osuosl/dougfir-pelican-theme,osuosl/dougfir-pelican-theme,osuosl/dougfir-pelican-theme | customfilters.py | customfilters.py | #!/usr/bin/env python
import bs4
def menu_filter(pelican_pages, direct_templates):
"""
Jinja filter for Pelican page object list
Structures pages into a three-level menu that can be parsed by Jinja2
templating. Reads page metadata of the form:
:menu: <parent>, <name>, <weight>; <parent2>, <name2>, <weight2>; ...
where the top-level menu items have a parent name 'top'.
"""
page_list = []
menu = []
# Pull menu metadata from Pelican page object list
for page in pelican_pages:
if hasattr(page, 'menu'):
# Split into list of menu locations for each page
menu_data = page.menu.split(';')
# Record each menu location per page
for item in menu_data:
temp_data = item.split(',')
temp_dict = {
'parent': temp_data[0].strip(),
'name': temp_data[1].strip(),
'weight': int(temp_data[2]),
'link': "/{0}".format(page.slug),
'children': [],
}
#Add each menu location to a page list
page_list.append(temp_dict)
# Add the direct templates before sorting
for item in direct_templates:
page_list.append(item.copy())
# Sort the page list by weight
page_list = sorted(page_list, key=lambda k: k['weight'])
# Find top-level menu items and place in menu
for item in page_list:
if item['parent'] == 'top':
menu.append(item.copy())
# For each top-menu item, find its children
for parent in menu:
for page in page_list:
if page['parent'] == parent['name']:
parent['children'].append(page.copy())
# For each second-level menu item, find its children
for parent in menu:
for child in parent['children']:
for page in page_list:
if page['parent'] == child['name']:
child['children'].append(page.copy())
return menu
def close_html_tags(html_string):
"""Closes any html tags in html_string that have been opened but have not
been closed.
"""
soup = bs4.BeautifulSoup(html_string, "html.parser")
return soup
| apache-2.0 | Python | |
9968aad924fee0c80b895bc24e1452f5c525ee9e | add invite sender | webrecorder/webrecorder,webrecorder/webrecorder,webrecorder/webrecorder,webrecorder/webrecorder | invite.py | invite.py | from auth import init_manager_for_invite
import sys
def main():
m = init_manager_for_invite()
res = m.send_invite(sys.argv[1],
email_template='templates/emailinvite.html',
host='http://localhost:8088')
if res:
print('Success')
else:
print('Fail')
if __name__ == "__main__":
main()
| apache-2.0 | Python | |
1780ea557d8b01804bf716887d820b340eff3931 | Create split-array-into-fibonacci-sequence.py | kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode | Python/split-array-into-fibonacci-sequence.py | Python/split-array-into-fibonacci-sequence.py | # Time: O(n^3)
# Space: O(n)
# Given a string S of digits, such as S = "123456579",
# we can split it into a Fibonacci-like sequence [123, 456, 579].
#
# Formally, a Fibonacci-like sequence is a list F of non-negative
# integers such that:
#
# 0 <= F[i] <= 2^31 - 1,
# (that is, each integer fits a 32-bit signed integer type);
# F.length >= 3;
# and F[i] + F[i+1] = F[i+2] for all 0 <= i < F.length - 2.
# Also, note that when splitting the string into pieces,
# each piece must not have extra leading zeroes,
# except if the piece is the number 0 itself.
#
# Return any Fibonacci-like sequence split from S,
# or return [] if it cannot be done.
#
# Example 1:
#
# Input: "123456579"
# Output: [123,456,579]
# Example 2:
#
# Input: "11235813"
# Output: [1,1,2,3,5,8,13]
# Example 3:
#
# Input: "112358130"
# Output: []
# Explanation: The task is impossible.
# Example 4:
#
# Input: "0123"
# Output: []
# Explanation: Leading zeroes are not allowed, so "01", "2", "3" is not valid.
# Example 5:
#
# Input: "1101111"
# Output: [110, 1, 111]
# Explanation: The output [11, 0, 11, 11] would also be accepted.
#
# Note:
# - 1 <= S.length <= 200
# - S contains only digits.
try:
xrange # Python 2
except NameError:
xrange = range # Python 3
class Solution(object):
def splitIntoFibonacci(self, S):
"""
:type S: str
:rtype: List[int]
"""
def startswith(S, k, x):
y = 0
for i in xrange(k, len(S)):
y = 10*y + int(S[i])
if y == x:
return i-k+1
elif y > x:
break
return 0
MAX_INT = 2**31-1
a = 0
for i in xrange(len(S)-2):
a = 10*a + int(S[i])
b = 0
for j in xrange(i+1, len(S)-1):
b = 10*b + int(S[j])
fib = [a, b]
k = j+1
while k < len(S):
if fib[-2] > MAX_INT-fib[-1]:
break
c = fib[-2]+fib[-1]
length = startswith(S, k, c)
if length == 0:
break
fib.append(c)
k += length
else:
return fib
if b == 0:
break
if a == 0:
break
return []
| mit | Python | |
5d789bc0829f46fd3bf86c923e2f53280c7ee530 | add a library for dealing fastq files | jason-weirather/Au-public,jason-weirather/Au-public,jason-weirather/Au-public,jason-weirather/Au-public | iron/pythonlib/FASTQBasics.py | iron/pythonlib/FASTQBasics.py | import re, sys
import subprocess
import FileBasics
import math
class QualityFormatConverter:
def __init__(self,type):
self.observed_to_Q = {}
self.observed_to_probability = {}
self.type = type
if type == 'S':
start = 33
for i in range(0,41):
self.observed_to_Q[i+33] = i
for i in self.observed_to_Q:
q = self.observed_to_Q[i]
self.observed_to_probability[i] = math.pow(10,float(q)/-10)
else:
sys.stderr.write("Error: unsupported quality type "+type+"\n")
sys.exit()
def call_observed_ascii_probability(self,ascii_char):
if ord(ascii_char) not in self.observed_to_probability:
sys.stderr.write("Error: Looking for a character: '" + ascii_char + "' not present in type: "+self.type+"\n")
sys.exit()
return self.observed_to_probability[ord(ascii_char)]
class QualityFormatDetector:
def __init__(self):
self.type = 'unknown'
self.about = 'unknown'
self.max_read_count = 10000
self.observed_qualities = {}
# returns a type that can be used in a quality format converter
def call_type(self):
truecount_76_104 = 0
for i in range(76,105):
if i in self.observed_qualities: truecount_76_104 += self.observed_qualities[i]
truecount_59_63 = 0
for i in range(59,64):
if i in self.observed_qualities: truecount_59_63+=self.observed_qualities[i]
truecount_64_66 = 0
for i in range(64,67):
if i in self.observed_qualities: truecount_64_66+=self.observed_qualities[i]
truecount_67_72 = 0
for i in range(67,73):
if i in self.observed_qualities: truecount_67_72+=self.observed_qualities[i]
truecount_33_58 = 0
for i in range(33,59):
if i in self.observed_qualities: truecount_33_58+=self.observed_qualities[i]
truecount_74 = 0
if truecount_74 in self.observed_qualities: truecount_74 = self.observed_qualities[74]
if truecount_74 > 2 and truecount_33_58 >2:
sys.stderr.write("Error: Unprogrammed 'L' Illumina 1.8+ Phred+33, (0,41) ranges ord 33 to 74\n")
sys.exit()
return
if truecount_33_58 > 2:
self.about = "'S' Sanger Phred+33, (0,40) ranges ord 33 to 73"
self.type = 'S'
return self.type
if truecount_59_63 > 2 and truecount_76_104 > 2:
sys.stderr.write("Error: Unprogrammed 'X' Solexa Solexa+64, (-5,40) ranges ord 59 to 104\n")
sys.exit()
return
if truecount_64_66 > 2 and truecount_76_104 > 2:
sys.stderr.write("Error: Unprogrammed 'I' Illumina 1.3+ Phred+64, (0,40) ranges ord 64 to 104\n")
sys.exit()
return
if truecount_67_72 > 2 and truecount_76_104 > 2:
print 'J'
sys.stderr.write("Error: Unprogrammed 'J' Illumina 1.5+ Phred+64, (3,40) ranges ord 67 to 104\n")
sys.exit()
return
sys.stderr.write("Error: unable to choose fastq type\n")
sys.exit()
def set_max_read_count(self,read_count):
self.max_read_count = read_count
def read_fastq_file(self,filename):
gfr = FileBasics.GenericFileReader(filename)
linecount = 0
while True and linecount < self.max_read_count:
line1 = gfr.readline().rstrip()
if not line1: break
line2 = gfr.readline().rstrip()
if not line2: break
line3 = gfr.readline().rstrip()
if not line3: break
line4 = gfr.readline().rstrip()
if not line4: break
self.record_observation(line4)
linecount += 1
gfr.close()
def record_observation(self,line):
chars = list(line)
for c in chars:
deci = ord(c)
if deci not in self.observed_qualities:
self.observed_qualities[deci] = 0
self.observed_qualities[deci] += 1
| apache-2.0 | Python | |
b7fd9a4bbb78e99a939322fc8cd373a6b16dd9a4 | Update and rename Python to Python/Applications/TSP/tsp.py | krzyszti/my_projects,krzyszti/my_projects,krzyszti/my_projects,krzyszti/my_projects | Python/Applications/TSP/tsp.py | Python/Applications/TSP/tsp.py | """
This application will solve TSP problem with simple evolution algorithm.
The result will be shown in pyglet window.
"""
import pyglet
import random
import sys
import math
class Evolution(object):
def __init__(self):
self.file_path = 'example.dat'
self.base = []
self.iterations = 100
self.best = []
def open_file(self):
cities = []
with open(self.file_path) as file_handle:
for line in file_handle:
city = line.split(' ')
cities.append([int(city[0]), int(city[1])])
self.base = cities
self.best.append(cities)
self.best.append(self.path(cities))
def cities_permutations(self):
cities = []
cities.extend(self.base)
random.shuffle(cities)
return cities
@staticmethod
def mutation(lst):
result = lst
for element in result:
(a, b) = (1, 1)
while a == b:
(a, b) = (random.randint(0, len(element) - 1), random.randint(0, len(element) - 1))
(element[a], element[b]) = (element[b], element[a])
return result
@staticmethod
def crossover(lst):
result = []
for i in range(0, len(lst) - 1, 2):
one = lst[i]
two = lst[i+1]
point = random.randint(0, len(one) - 1)
one[point:], two[point:] = (two[point:], one[point:])
result.append(one)
result.append(two)
return result
def selection(self, lst):
result = []
for i in range(0, len(lst)):
(a, b) = (random.randint(0, len(lst) - 1), random.randint(0, len(lst) - 1))
if self.path(lst[a]) > self.path(lst[b]):
result.append(lst[a])
else:
result.append(lst[b])
result.append(self.best[0])
return result
def calculate(self):
pop = []
population_limit = random.randint(200, 1000)
for i in range(0, population_limit):
pop.append(self.cities_permutations())
for i in range(0, self.iterations):
pop = self.mutation(pop)
pop = self.crossover(pop)
pop = self.selection(pop)
def choose_best(self, lst):
for element in lst:
if self.best[1] > self.path(element):
self.best = [element, self.path(element)]
def path(self, lst):
result = 0
i = 0
while i < len(lst) - 1:
result += self.line(lst[i], lst[i + 1])
i += 1
result += self.line(lst[-1], lst[0])
return result
@staticmethod
def line(xy1, xy2):
(x1, y1) = xy1
(x2, y2) = xy2
if x1 == x2:
return abs(y1 - y2)
elif y1 == y2:
return abs(x1 - x2)
else:
return math.sqrt(pow((x2 - x1), 2) + pow((y2 - y1), 2))
class Window(pyglet.window.Window):
def __init__(self):
super(Window, self).__init__()
self.tsp = Evolution()
self.tsp.open_file()
def on_draw(self):
self.clear()
self.print_path()
def on_key_press(self, symbol, modifiers):
if symbol == pyglet.window.key.C:
print("Please wait, calculation in progress")
self.tsp.calculate()
print("Calculation is finished")
if symbol == pyglet.window.key.Q:
self.close()
@staticmethod
def line(xy1, xy2):
(x1, y1) = xy1
(x2, y2) = xy2
pyglet.graphics.draw(2, pyglet.gl.GL_LINES, ('v2i', (x1, y1, x2, y2)), ('c3B', (0, 255, 0, 0, 255, 0)))
def print_path(self):
path = self.tsp.best[0]
for i in range(0, len(path)-1):
self.line(path[i], path[i+1])
self.line(path[-1], path[0])
def main():
Window()
sys.exit(pyglet.app.run())
if __name__ == '__main__':
main()
| mit | Python | |
e16043547bb43476f56195d5652881ebeb684e57 | Add new create_realm management command. | kou/zulip,zulip/zulip,kou/zulip,andersk/zulip,rht/zulip,rht/zulip,kou/zulip,zulip/zulip,zulip/zulip,zulip/zulip,kou/zulip,rht/zulip,andersk/zulip,andersk/zulip,zulip/zulip,rht/zulip,andersk/zulip,andersk/zulip,kou/zulip,rht/zulip,zulip/zulip,rht/zulip,rht/zulip,zulip/zulip,kou/zulip,kou/zulip,andersk/zulip,andersk/zulip | zerver/management/commands/create_realm.py | zerver/management/commands/create_realm.py | import argparse
from typing import Any
from django.core.management.base import CommandError
from zerver.lib.actions import do_create_realm, do_create_user
from zerver.lib.management import ZulipBaseCommand
from zerver.models import UserProfile
class Command(ZulipBaseCommand):
help = """\
Create a new Zulip organization (realm) via the command line.
We recommend `./manage.py generate_realm_creation_link` for most
users, for several reasons:
* Has a more user-friendly web flow for account creation.
* Manages passwords in a more natural way.
* Automatically logs the user in during account creation.
This management command is available as an alternative for situations
where one wants to script the realm creation process.
Since every Zulip realm must have an owner, this command creates the
initial organization owner user for the new realm, using the same
workflow as `./manage.py create_user`.
"""
def add_arguments(self, parser: argparse.ArgumentParser) -> None:
parser.add_argument("realm_name", help="Name for the new organization")
parser.add_argument(
"--string-id",
help="Subdomain for the new organization. Empty if root domain.",
default="",
)
self.add_create_user_args(parser)
def handle(self, *args: Any, **options: str) -> None:
realm_name = options["realm_name"]
string_id = options["string_id"]
create_user_params = self.get_create_user_params(options)
try:
realm = do_create_realm(string_id=string_id, name=realm_name)
except AssertionError as e:
raise CommandError(str(e))
do_create_user(
create_user_params.email,
create_user_params.password,
realm,
create_user_params.full_name,
# Explicitly set tos_version=None. For servers that
# have configured Terms of Service, this means that
# users created via this mechanism will be prompted to
# accept the Terms of Service on first login.
role=UserProfile.ROLE_REALM_OWNER,
realm_creation=True,
tos_version=None,
acting_user=None,
)
| apache-2.0 | Python | |
059a29aede4fb1b4db914131b0aad9ca581201fe | Create followers.py | ppaulojr/atp-followers | followers.py | followers.py | from twython import Twython
import datetime
print datetime.datetime.now()
players = ["fabiofogna",
"richardgasquet1",
"JohnIsner",
"keinishikori",
"andy_murray",
"milosraonic",
"GrigorDimitrov",
"delpotrojuan",
"DavidFerrer87",
"tomasberdych",
"stanwawrinka",
"RafaelNadal",
"rogerfederer",
"DjokerNole"]
APIKEY = "REGISTER_AN_API_KEY"
APISEC = "GRAB_YOUR_API_SECRET"
twitter = Twython(APIKEY,APISEC)
for p in players:
followers = twitter.show_user(screen_name = p)
print (p,followers['followers_count'])
| mit | Python | |
0c775a1bb685ff5a77f7f4fb3bbde58d0f1f4334 | add missing catalogo app migration | ibamacsr/indicar_process,ibamacsr/indicar-process,ibamacsr/indicar-process,ibamacsr/indicar_process,ibamacsr/indicar_process | indicarprocess/catalogo/migrations/0002_catalogorapideye.py | indicarprocess/catalogo/migrations/0002_catalogorapideye.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.contrib.gis.db.models.fields
class Migration(migrations.Migration):
dependencies = [
('catalogo', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='CatalogoRapidEye',
fields=[
('gid', models.AutoField(serialize=False, primary_key=True)),
('image', models.CharField(max_length=80, unique=True)),
('path', models.CharField(max_length=120)),
('tms', models.CharField(max_length=254)),
('quicklook', models.CharField(max_length=150)),
('data', models.DateField()),
('geom', django.contrib.gis.db.models.fields.MultiPolygonField(srid=4674, null=True, blank=True)),
('nuvens', models.FloatField()),
],
options={
'db_table': 'catalogo_rapideye',
},
),
]
| agpl-3.0 | Python | |
c994cfda927353c195048b7c5ac42ed557d8ac0a | Create setup.py | bestupefy/openstack-plugin | swift/setup.py | swift/setup.py | apache-2.0 | Python | ||
ffa99e9ae77a2b651e29dcdbf6abf7a2c9c142a6 | Create DP_Coin_Change_Problem_ctci.py | jcchuks/Hackerrank,jcchuks/Hackerrank,jcchuks/MiscCodes,jcchuks/MiscCodes,jcchuks/MiscCodes | DP_Coin_Change_Problem_ctci.py | DP_Coin_Change_Problem_ctci.py | #!/bin/python
'''Given a number of dollars, N, and a list of dollar values for C = {C0,C1,C2,...CM} distinct coins, M, find
and print the number of different ways you can make change for N dollars if each coin is available in an infinite quantity.'''
import sys
def make_change(coins, n):
result = dfs(coins,n)
return result
def dfs(coins,n):
array = [0 for x in range(n+1)]
array[0] = 1
matrix = [array[:] for x in range(len(coins) + 1)]
for i in range(1,len(coins) + 1):
for j in range(1,n+1):
if j >= coins[i-1]:
matrix[i][j] = matrix[i][j- coins[i-1]] + matrix[i-1][j]
else :
matrix[i][j] = matrix[i-1][j]
return matrix[len(coins)][n]
n,m = raw_input().strip().split(' ')
n,m = [int(n),int(m)]
coins = map(int,raw_input().strip().split(' '))
print make_change(coins, n)
| mit | Python | |
67ee018391df9682e5fe96dd0beba687517f1bf1 | Create autoclearbuf.py | TingPing/plugins,TingPing/plugins | HexChat/autoclearbuf.py | HexChat/autoclearbuf.py | import hexchat
__module_name__ = 'autoclearbuf'
__module_author__ = 'fladd & TingPing'
__module_version__ = '1.0'
__module_description__ = 'Auto clear buffer of closed queries with znc'
# TODO:
# Don't run on non-znc networks
# Actually check for channel type (currently crashes)
recently_cleared = []
def privmsg_cb(word, word_eol, userdata):
# ZNC helpfully tells us what we just did.. so lets hide that spam
if word[0] == ':*status!znc@znc.in' and word_eol[4].startswith('buffers matching'):
cleared = word[6][1:-1] # [nick]
if cleared in recently_cleared:
recently_cleared.remove(cleared)
return hexchat.EAT_ALL
def close_cb(word, word_eol, userdata):
name = hexchat.get_info('channel')
# Ignore ZNC queries and channels
if name[0] != '*' and name[0] != '#':
recently_cleared.append(name)
hexchat.command('znc clearbuffer {}'.format(name))
hexchat.hook_print('Close Context', close_cb)
hexchat.hook_server('PRIVMSG', privmsg_cb)
| mit | Python | |
fdc1145b91175673552d21abbfb7ba41c034c426 | Add blaze css | miyakogi/wdom,miyakogi/wdom,miyakogi/wdom | wdom/themes/blaze.py | wdom/themes/blaze.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from wdom.tag import NewTagClass as NewTag
from wdom.tag import *
css_files = [
'//cdn.jsdelivr.net/blazecss/latest/blaze.min.css',
]
Button = NewTag('Button', bases=Button, class_='button')
DefaultButton = NewTag('DefaultButton', 'button', Button, class_='button--default')
PrimaryButton = NewTag('PrimaryButton', 'button', Button, class_='button--primary')
SuccessButton = NewTag('SuccessButton', 'button', Button, class_='button--success')
InfoButton = NewTag('InfoButton', 'button', Button, class_='button--primary')
WarningButton = NewTag('WarningButton', 'button', Button, class_='button--secondary')
DangerButton = NewTag('DangerButton', 'button', Button, class_='button--error')
LinkButton = NewTag('LinkButton', 'button', Button)
Input = NewTag('Input', 'input', Input, class_='field')
TextInput = NewTag('TextInput', 'input', TextInput, class_='field')
Textarea = NewTag('Textarea', 'textarea', Textarea, class_='field')
Select = NewTag('Select', 'select', Select, class_='choice')
Ul = NewTag('Ul', 'ul', Ul, class_='list')
Ol = NewTag('Ol', 'ol', Ol, class_='list--ordered')
Li = NewTag('Li', 'li', Li, class_='list__item')
Table = NewTag('Table', 'table', Table, class_='table')
Tr = NewTag('Tr', 'tr', Tr, class_='table__row')
Th = NewTag('Th', 'th', Th, class_='table__cell')
Td = NewTag('Td', 'td', Td, class_='table__cell')
H1 = NewTag('H1', 'div', H1, class_='heading heading--super')
H2 = NewTag('H2', 'div', H2, class_='heading heading--xlarge')
H3 = NewTag('H3', 'div', H3, class_='heading heading--large')
H4 = NewTag('H4', 'div', H4, class_='heading heading--medium')
H5 = NewTag('H5', 'div', H5, class_='heading heading--small')
H6 = NewTag('H6', 'div', H6, class_='heading heading--xsmall')
| mit | Python | |
3caa3f617e493ec0e6226e2553ff785f53aa849d | Add monitor scripts to check pods status on compute node | blrm/openshift-tools,blrm/openshift-tools,blrm/openshift-tools,blrm/openshift-tools,blrm/openshift-tools,blrm/openshift-tools | scripts/monitoring/cron-send-node-pods-status.py | scripts/monitoring/cron-send-node-pods-status.py | #!/usr/bin/env python
""" Check all the customer pods status on every compute node, send status code "1" if all pods on a compute node are not running status """
# Adding the ignore because it does not like the naming of the script
# to be different than the class name
# pylint: disable=invalid-name
# pylint: disable=wrong-import-position
# pylint: disable=broad-except
# pylint: disable=line-too-long
import argparse
import time
import logging
logging.basicConfig(
format='%(asctime)s - %(relativeCreated)6d - %(levelname)-8s - %(message)s',
)
logger = logging.getLogger()
logger.setLevel(logging.WARN)
# Our jenkins server does not include these rpms.
# In the future we might move this to a container where these
# libs might exist
#pylint: disable=import-error
from openshift_tools.monitoring.ocutil import OCUtil
from openshift_tools.monitoring.metric_sender import MetricSender
ocutil = OCUtil()
def runOCcmd_yaml(cmd, base_cmd='oc'):
""" log commands through ocutil """
logger.info(base_cmd + " " + cmd)
ocy_time = time.time()
ocy_result = ocutil.run_user_cmd_yaml(cmd, base_cmd=base_cmd, )
logger.info("oc command took %s seconds", str(time.time() - ocy_time))
return ocy_result
def parse_args():
""" parse the args from the cli """
parser = argparse.ArgumentParser(description='Check all the nodes pods Status')
parser.add_argument('-s', '--skip_namespaces', nargs='+', help='namespaces exception list that we should avoid to check', required=True)
parser.add_argument('-v', '--verbose', action='count', default=0,
help='verbosity level, specify multiple')
args = parser.parse_args()
if args.verbose > 0:
logger.setLevel(logging.INFO)
if args.verbose > 1:
logger.setLevel(logging.DEBUG)
return args
def check_node_pods_status(nsList):
"""get all the info of all node """
result_status = 0
nsFilter = ""
for ns in nsList:
nsFilter += ",metadata.namespace!="+ns
node_info = runOCcmd_yaml("get node ")
for item in node_info['items']:
nodeName = item['metadata']['name']
logger.info("Checking node: %s", item['metadata']['name'])
node_not_running_pods = runOCcmd_yaml("get pods --all-namespaces --field-selector='spec.nodeName="+nodeName+",status.phase!=Running"+nsFilter+"'")
node_pods = runOCcmd_yaml("get pods --all-namespaces --field-selector='spec.nodeName="+nodeName+nsFilter+"'")
if len(node_not_running_pods['items']) == len(node_pods['items']):
result_status = 1
logger.warn("Node: %s, all pods are not running", item['metadata']['name'])
return result_status
def main():
""" check all the node pods tatus see if any node have problem """
args = parse_args()
logger.debug("args: ")
logger.debug(args)
nsList = args.skip_namespaces
pods_status = check_node_pods_status(nsList)
#send the value to zabbix
mts = MetricSender(verbose=args.verbose)
mts.add_metric({'openshift.nodes.pods.status': pods_status})
mts.send_metrics()
if __name__ == "__main__":
main()
| apache-2.0 | Python | |
0c44e2527004bbb3000c2f3cbe06648ff04f0c92 | Fix python 2.6 'zero length field name in format' error | stripe/stripe-python,uploadcare/stripe-python,alexmic/stripe-python,NahomAgidew/stripe-python,koobs/stripe-python,woodb/stripe-python,zenmeso/stripe-python | stripe/error.py | stripe/error.py | # Exceptions
class StripeError(Exception):
def __init__(self, message=None, http_body=None, http_status=None,
json_body=None, headers=None):
super(StripeError, self).__init__(message)
if http_body and hasattr(http_body, 'decode'):
try:
http_body = http_body.decode('utf-8')
except:
http_body = ('<Could not decode body as utf-8. '
'Please report to support@stripe.com>')
self.http_body = http_body
self.http_status = http_status
self.json_body = json_body
self.headers = headers or {}
self.request_id = self.headers.get('request-id', None)
def __str__(self):
msg = super(StripeError, self).__str__()
if self.request_id is not None:
return "Request {0}: {1}".format(self.request_id, msg)
else:
return msg
class APIError(StripeError):
pass
class APIConnectionError(StripeError):
pass
class CardError(StripeError):
def __init__(self, message, param, code, http_body=None,
http_status=None, json_body=None, headers=None):
super(CardError, self).__init__(
message, http_body, http_status, json_body,
headers)
self.param = param
self.code = code
class InvalidRequestError(StripeError):
def __init__(self, message, param, http_body=None,
http_status=None, json_body=None, headers=None):
super(InvalidRequestError, self).__init__(
message, http_body, http_status, json_body,
headers)
self.param = param
class AuthenticationError(StripeError):
pass
| # Exceptions
class StripeError(Exception):
def __init__(self, message=None, http_body=None, http_status=None,
json_body=None, headers=None):
super(StripeError, self).__init__(message)
if http_body and hasattr(http_body, 'decode'):
try:
http_body = http_body.decode('utf-8')
except:
http_body = ('<Could not decode body as utf-8. '
'Please report to support@stripe.com>')
self.http_body = http_body
self.http_status = http_status
self.json_body = json_body
self.headers = headers or {}
self.request_id = self.headers.get('request-id', None)
def __str__(self):
msg = super(StripeError, self).__str__()
if self.request_id is not None:
return "Request {}: {}".format(self.request_id, msg)
else:
return msg
class APIError(StripeError):
pass
class APIConnectionError(StripeError):
pass
class CardError(StripeError):
def __init__(self, message, param, code, http_body=None,
http_status=None, json_body=None, headers=None):
super(CardError, self).__init__(
message, http_body, http_status, json_body,
headers)
self.param = param
self.code = code
class InvalidRequestError(StripeError):
def __init__(self, message, param, http_body=None,
http_status=None, json_body=None, headers=None):
super(InvalidRequestError, self).__init__(
message, http_body, http_status, json_body,
headers)
self.param = param
class AuthenticationError(StripeError):
pass
| mit | Python |
ece35f891ffd976a7ecfd191e9fbad1e416650d2 | TEST added for phantom | samuelstjean/dipy,nilgoyyou/dipy,maurozucchelli/dipy,matthieudumont/dipy,FrancoisRheaultUS/dipy,beni55/dipy,mdesco/dipy,Messaoud-Boudjada/dipy,JohnGriffiths/dipy,oesteban/dipy,JohnGriffiths/dipy,matthieudumont/dipy,sinkpoint/dipy,mdesco/dipy,samuelstjean/dipy,jyeatman/dipy,rfdougherty/dipy,samuelstjean/dipy,beni55/dipy,demianw/dipy,oesteban/dipy,maurozucchelli/dipy,maurozucchelli/dipy,villalonreina/dipy,sinkpoint/dipy,Messaoud-Boudjada/dipy,jyeatman/dipy,StongeEtienne/dipy,rfdougherty/dipy,villalonreina/dipy,StongeEtienne/dipy,FrancoisRheaultUS/dipy,nilgoyyou/dipy,demianw/dipy | dipy/sims/tests/test_phantom.py | dipy/sims/tests/test_phantom.py | import numpy as np
import nose
import nibabel as nib
from nose.tools import assert_true, assert_false, assert_equal, assert_almost_equal
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.core.geometry import vec2vec_rotmat
from dipy.data import get_data
from dipy.viz import fvtk
from dipy.reconst.dti import Tensor
from dipy.sims.phantom import orbital_phantom
def test_phantom():
def f(t):
x=np.sin(t)
y=np.cos(t)
z=np.linspace(-1,1,len(x))
return x,y,z
fimg,fbvals,fbvecs=get_data('small_64D')
bvals=np.load(fbvals)
bvecs=np.load(fbvecs)
bvecs[np.isnan(bvecs)]=0
N=50 #timepoints
vol=orbital_phantom(bvals=bvals,
bvecs=bvecs,
func=f,
t=np.linspace(0,2*np.pi,N),
datashape=(10,10,10,len(bvals)),
origin=(5,5,5),
scale=(3,3,3),
angles=np.linspace(0,2*np.pi,16),
radii=np.linspace(0.2,2,6))
ten=Tensor(vol,bvals,bvecs)
FA=ten.fa()
FA[np.isnan(FA)]=0
assert_equal(np.round(FA.max()*1000),707)
if __name__ == "__main__":
test_phantom()
| bsd-3-clause | Python | |
1b0207786bdc5488790d971c3772ff4825d49a00 | add kmeans | ntduong/ML | kmeans.py | kmeans.py | '''
Simple k-means with random initial starting points (ver 0)
(c) Duong Nguyen nguyen@sg.cs.titech.ac.jp
'''
import math
import random
from collections import defaultdict
import matplotlib.pyplot as plt
def edist(p, q):
"""
Compute the Euclidean distance between two points p, q.
"""
if len(p) != len(q):
raise ValueError, "lengths must match!"
sqSum = sum(map(lambda x,y: (x-y)**2, p, q))
return math.sqrt(sqSum)
def pearson(x, y):
"""
Compute the Pearson correlation between two points x, y.
This can be used a "distance" between x, y too.
"""
if len(x) != len(y):
raise ValueError, "lengths must match!"
n = len(x)
sumx = sum(x)
sumy = sum(y)
sumxy = sum(map(lambda a,b: a*b, x, y))
sqSumx = sum(map(lambda a: a**2, x))
sqSumy = sum(map(lambda a: a**2, y))
nu = sumxy - float(sumx) * sumy/n
de = math.sqrt((float(sqSumx) - sumx**2/n) * (float(sqSumy) - sumy**2/n))
if de == 0: return 0 # no correlation
else: return nu/de
def kmeans(points, distance=pearson, k=5, iter=1000):
"""
k-means algorithm of clustering data points into k clusters using distance metric.
"""
pdim = len(points[0])
# Compute the [min, max] ranges for each coordinate of point. This can be used to choose initial random means.
vrange = zip(map(min, points), map(max, points))
# Select initial k random means
means = []
for i in range(k):
means.append([random.random()*(vrange[i][1]-vrange[i][0]) + vrange[i][0] for j in range(pdim)])
#print means
clusters = None
for t in range(iter):
print 'Iteration %d' %t
# First, assign each data point to a cluster specified by its nearest mean.
tmpClusters = defaultdict(list)
for p in points:
pdist, cid = min([(distance(p, means[id]), id) for id in range(len(means))])
tmpClusters[cid].append(p)
# Stop if convergence
if tmpClusters == clusters: break
# Update clusters
clusters = tmpClusters
# Compute new mean for each cluster.
for i in range(k):
# Get the list of points that belong to i-th cluster
cPoints = tmpClusters[i]
# Get the size of i-th cluster
cSize = len(cPoints)
# Compute new mean for i-th cluster by simply compute its average
if cSize > 0:
total = map(sum, zip(*cPoints))
avg = map(lambda x: x/cSize, total)
means[i] = avg # new mean of i-th cluster
clusters = tmpClusters
return clusters, means
def genPoints(n=100, d=2):
points = []
for i in range(n/2):
points.append([random.random()*100+50 for j in range(d)])
for i in range(n/2):
points.append([random.random()*100-50 for j in range(d)])
# randomly shuffle data points
random.shuffle(points)
return points
def plot2d(points, color='b', marker='o'):
temp = zip(*points)
assert len(temp[0]) == len(temp[1]), "lengths mismatched!"
plt.scatter(temp[0], temp[1], c=color, marker=marker)
#plt.show()
def clusterStat(clusters):
for id in clusters:
print 'Cluster %d: ' %id
print clusters[id]
print 'Size: %d' % len(clusters[id])
print '-'*30
def plotClusters(clusters, means):
nc = len(means)
# Assume that there are 5 clusters at most, for the sake of simplicity in plotting color scheme.
clist = ['r', 'g', 'b', 'm', 'k']
mlist = ['*', '+', 's', 'H', 'D']
# First plot mean of each cluster
for i in range(nc):
#plt.scatter(means[i][0], means[i][1], s=60, c=clist[i], marker=mlist[i])
plt.plot(means[i][0], means[i][1], clist[i]+mlist[i], markersize=10)
for i in range(nc):
plot2d(clusters[i], color=clist[i])
plt.show()
if __name__ == '__main__':
points = genPoints()
#plot2d(points)
clusters, means = kmeans(points, distance=edist, k=2)
plotClusters(clusters, means)
| mit | Python | |
4f2cd5d857c00c5b2f4d4aed5fff42a00a003cac | add directory | Mellcap/MellPlayer | MellPlayer/directory.py | MellPlayer/directory.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Netease Music Player
Created on 2017-02-23
@author: Mellcap
'''
import os
DIRECTORY = '~/.MellPlayer'
def create_directory(directory):
if not os.path.exists(directory):
os.makedirs(directory)
| mit | Python | |
6bf066dff1cd88a56d228e73b5eabcaee55e534f | Add unit tests for deterministic annealing gradients | jluttine/bayespy,fivejjs/bayespy,SalemAmeen/bayespy,bayespy/bayespy | bayespy/inference/vmp/tests/test_annealing.py | bayespy/inference/vmp/tests/test_annealing.py | ######################################################################
# Copyright (C) 2015 Jaakko Luttinen
#
# This file is licensed under Version 3.0 of the GNU General Public
# License. See LICENSE for a text of the license.
######################################################################
######################################################################
# This file is part of BayesPy.
#
# BayesPy is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# BayesPy is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BayesPy. If not, see <http://www.gnu.org/licenses/>.
######################################################################
"""
Unit tests for `vmp` module.
"""
import numpy as np
from scipy import special
from numpy import testing
from bayespy.nodes import (Gaussian,
GaussianARD,
GaussianGammaISO,
Gamma,
Wishart)
from ..vmp import VB
from bayespy.utils import misc
from bayespy.utils import linalg
from bayespy.utils import random
from bayespy.utils.misc import TestCase
class TestVB(TestCase):
def test_annealing(self):
X = GaussianARD(3, 4)
X.initialize_from_parameters(-1, 6)
Q = VB(X)
Q.set_annealing(0.1)
#
# Check that the gradient is correct
#
# Initial parameters
phi0 = X.phi
# Gradient
rg = X.get_riemannian_gradient()
g = X.get_gradient(rg)
# Numerical gradient of the first parameter
eps = 1e-6
p0 = X.get_parameters()
l0 = Q.compute_lowerbound(ignore_masked=False)
g_num = [(), ()]
e = eps
p1 = p0[0] + e
X.set_parameters([p1, p0[1]])
l1 = Q.compute_lowerbound(ignore_masked=False)
g_num[0] = (l1 - l0) / eps
# Numerical gradient of the second parameter
p1 = p0[1] + e
X.set_parameters([p0[0], p1])
l1 = Q.compute_lowerbound(ignore_masked=False)
g_num[1] = (l1 - l0) / (eps)
# Check
self.assertAllClose(g[0],
g_num[0])
self.assertAllClose(g[1],
g_num[1])
#
# Gradient should be zero after updating
#
X.update()
# Initial parameters
phi0 = X.phi
# Numerical gradient of the first parameter
eps = 1e-8
p0 = X.get_parameters()
l0 = Q.compute_lowerbound(ignore_masked=False)
g_num = [(), ()]
e = eps
p1 = p0[0] + e
X.set_parameters([p1, p0[1]])
l1 = Q.compute_lowerbound(ignore_masked=False)
g_num[0] = (l1 - l0) / eps
# Numerical gradient of the second parameter
p1 = p0[1] + e
X.set_parameters([p0[0], p1])
l1 = Q.compute_lowerbound(ignore_masked=False)
g_num[1] = (l1 - l0) / (eps)
# Check
self.assertAllClose(0,
g_num[0],
atol=1e-5)
self.assertAllClose(0,
g_num[1],
atol=1e-5)
# Not at the optimum
X.initialize_from_parameters(-1, 6)
# Initial parameters
phi0 = X.phi
# Gradient
g = X.get_riemannian_gradient()
# Parameters after VB-EM update
X.update()
phi1 = X.phi
# Check
self.assertAllClose(g[0],
phi1[0] - phi0[0])
self.assertAllClose(g[1],
phi1[1] - phi0[1])
pass
| mit | Python | |
2d9122adc0e12b00f29bca321979dc2ecc428ddc | Create proxy_scan.py | kelath/Burp-Extensions | proxy_scan.py | proxy_scan.py | from burp import IBurpExtender
from burp import IHttpListener
from burp import IScannerListener
from java.net import URL
from java.io import File
import datetime
import time
class BurpExtender(IBurpExtender, IHttpListener, IScannerListener):
def registerExtenderCallbacks(self, callbacks):
self._callbacks = callbacks
self._callbacks.setExtensionName("Proxy History Tester")
httpReqResp = callbacks.getProxyHistory()
print "There are %d items in the list" % httpReqResp.__len__()
for item in httpReqResp:
#print item.getRequest().tostring()
print item.getHttpService().getHost()
print item.getHttpService().getPort()
self._callbacks.doActiveScan(item.getHttpService().getHost(),item.getHttpService().getPort(),0,item.getRequest())
| mit | Python | |
19c0183e6e23195aef479553c018718565925585 | Add new regex based features | Rostlab/nalaf | nala/features/regex.py | nala/features/regex.py | import re
from nala.features import FeatureGenerator
class RegexNLFeatureGenerator(FeatureGenerator):
def __init__(self):
self.patterns = [
re.compile('[g]\.[0-9]+_[0-9]+(del)[0-9]+'),
re.compile('deletion of( (the|a))?.* region'),
re.compile('deletion of( (the|a))?( \d+(bp|base pairs?|a\.a\.|amino acids?|nucleotides?)?)? [\w\-\.]+'),
re.compile('\d+(-| )?(bp|base pairs?|a\.a\.|amino acids?|nucleotides?).*deletion'),
re.compile('[\w\-\.]+ deletion'),
re.compile('(c|carboxyl?|cooh|n|amino|nh2|amine)(-| )(terminus|terminal)( (tail|end))?'),
re.compile('exons? \d+(( ?(and|or|-) ?\d+))?')
]
def generate(self, dataset):
"""
:type dataset: nala.structures.data.Dataset
"""
for part in dataset.parts():
matches = {}
for index, pattern in enumerate(self.patterns):
matches[index] = []
for match in pattern.finditer(part.text):
matches[index].append((match.start(), match.end()))
for sentence in part.sentences:
for token in sentence:
for match_index, match in matches.items():
name = 'regex_nl_{}'.format(match_index)
value = 'O'
for start, end in match:
if start == token.start:
value = 'B'
break
elif start < token.start < token.end < end:
value = 'I'
break
elif token.end == end:
value = 'E'
break
token.features[name] = value
| apache-2.0 | Python | |
0259d9a361fa49966977f958b8222f977616713f | Add SRIM energy histogram vacancy analysis script (#31) | idaholab/mytrim,idaholab/mytrim,idaholab/mytrim,idaholab/mytrim,idaholab/mytrim | analysis/SRIMVacEnergyCount.py | analysis/SRIMVacEnergyCount.py | #!/usr/bin/python
#
# Tool to parse a SRIM COLLISION.txt file and produce an output comparable
# to TrimVacEnergyCount (output.type = 'vaccount')
#
import fileinput
import math
import re
recoil = re.compile('^\xdb')
# read file header
header = [''] * 4
for i in range(4) :
header[i] = fileinput.input()
# parse rest of the file
vac = []
for line in fileinput.input() :
# detect recoils
if recoil.match(line) :
field = line.split()
# vacancy
if field[7] == '1' :
x = int(float(field[4]))
E = int(math.log10(float(field[3])))
E = max(0, E);
if E not in vac :
vac += [[0]] * (1 + E - len(vac))
try:
vac[E][x] += 1
except IndexError:
vac[E] += [0] * (1 + x - len(vac[E]))
vac[E][x] = 1
# output histogram
for E in range(len(vac)) :
for x in range(len(vac[E])) :
print "%d %d %d" % (E, x, vac[E][x])
print
| lgpl-2.1 | Python | |
620e568b59d8e811a6457be251dfa7d5bf0f8a3d | implement stack using the linked list | enlighter/algorithms | utils/stack.py | utils/stack.py | try:
from .linkedlist import LinkedList, Element
except ModuleNotFoundError as e:
from linkedlist import LinkedList, Element
class Stack(object):
def __init__(self, top=None):
self.ll = LinkedList(top)
def push(self, new_element):
"""Push (add) a new element onto the top of the stack"""
self.ll.insert_first(new_element)
def pop(self):
"""Pop (remove) the first element off the top of the stack and return it"""
return self.ll.delete_first()
print(__name__)
if __name__ == '__main__':
# Test cases
# Set up some Elements
e1 = Element(1)
e2 = Element(2)
e3 = Element(3)
e4 = Element(4)
# Start setting up a Stack
stack = Stack(e1)
# Test stack functionality
stack.push(e2)
stack.push(e3)
print(stack.pop().value)
print(stack.pop().value)
print(stack.pop().value)
print(stack.pop())
stack.push(e4)
print(stack.pop().value)
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.