commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
2a02fa394430b724d414631927bdd506e3ca082c
|
Update tasks.py
|
logstash_plugin/tasks.py
|
logstash_plugin/tasks.py
|
########
# Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
# Built in Imports
import subprocess
# Cloudify Imports
from cloudify import ctx
from cloudify.decorators import operation
from cloudify import exceptions
from constants import (
WHICH_YUM,
WHICH_APT,
YUM_RPM_URL,
YUM_REPO_PATH,
YUM_REPO_CONTENT,
APT_KEY_URL,
APT_DEB_STR
)
@operation
def start(command, **_):
"""starts logstash daemon"""
ctx.logger.info('Attempting to start log transport service.')
output = _run(command)
if output != 0:
raise exceptions.NonRecoverableError(
'Unable to start log transport service: {0}'.format(output))
@operation
def stop(command, **_):
"""stops logstash daemon"""
ctx.logger.info('Attempting to stop log transport service.')
output = _run(command)
if output != 0:
raise exceptions.NonRecoverableError(
'Unable to stop log transport service: {0}'.format(output))
@operation
def install(**_):
""" Installs Logstash """
ctx.logger.info('Attempting to install log transport service.')
_install_log_stash()
def _install_log_stash():
if _run(WHICH_YUM) == 0:
_install_on_centos()
elif _run(WHICH_APT) == 0:
_install_on_ubuntu()
else:
raise exceptions.NonRecoverableError(
'Unable to install, because host is '
'neither a Ubuntu, nor a CentOS host.')
def _install_on_centos():
ctx.logger.info(
'Host is a CentOS host. Installing Logstash via yum.')
_run('rpm --import {0}'.format(YUM_RPM_URL))
_run('sudo /bin/cat > {0} <<-EOM '
'{1} EOM'.format(YUM_REPO_PATH, YUM_REPO_CONTENT))
_run('sudo /usr/bin/yum -y install logstash')
def _install_on_ubuntu():
ctx.logger.info(
'Host is an Ubuntu host. Installing Logstash via apt.')
_run('/usr/bin/wget -qO - {0} | sudo apt-key add -'.format(APT_KEY_URL))
_run('/bin/echo "deb {0}" | '
'sudo /usr/bin/tee -a /etc/apt/sources.list'.format(APT_DEB_STR))
_run('sudo /usr/bin/apt-get update')
_run('sudo /usr/bin/apt-get -y install logstash')
def _run(command):
command_as_list = command.split()
ctx.logger.info('Running: {0}.'.format(command))
ctx.logger.info('Sending: {0}.'.format(command_as_list))
try:
p = subprocess.Popen(
command_as_list, stdout=subprocess.PIPE, shell=True)
except Exception as e:
raise exceptions.NonRecoverableError(
'Failed: {0}.'.format(str(e)))
try:
out, err = p.communicate()
except Exception as e:
raise exceptions.NonRecoverableError(
'Failed: {0}.'.format(str(e)))
finally:
ctx.logger.info(
'RAN: {0}. OUT: {1}. ERR: {2}. Code: {3}.'.format(
command, out, err, p.returncode))
return p.returncode
|
Python
| 0.000001
|
@@ -2187,21 +2187,16 @@
n('sudo
-/bin/
cat %3E %7B0
@@ -2206,16 +2206,16 @@
%3C-EOM '%0A
+
@@ -2281,25 +2281,16 @@
n('sudo
-/usr/bin/
yum -y i
@@ -2432,25 +2432,16 @@
_run('
-/usr/bin/
wget -qO
@@ -2500,21 +2500,16 @@
_run('
-/bin/
echo %22de
@@ -2534,25 +2534,16 @@
'sudo
-/usr/bin/
tee -a /
@@ -2596,33 +2596,24 @@
_run('sudo
-/usr/bin/
apt-get upda
@@ -2636,17 +2636,8 @@
udo
-/usr/bin/
apt-
|
3204e36721a245277abcc2f647e374adc08f4b63
|
Database name must be uppercase
|
luigi/gtrnadb/parsers.py
|
luigi/gtrnadb/parsers.py
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import json
import logging
import itertools as it
import operator as op
from time import sleep
from databases.data import Exon
from databases.data import Entry
from databases.data import SecondaryStructure
from databases import helpers
from gtrnadb.helpers import url
from gtrnadb.helpers import anticodon
from gtrnadb.helpers import note_data
from gtrnadb.helpers import chromosome
from gtrnadb.helpers import common_name
from gtrnadb.helpers import lineage
from gtrnadb.helpers import species
from gtrnadb.helpers import description
from gtrnadb.helpers import product
from gtrnadb.helpers import primary_id
from gtrnadb.helpers import dot_bracket
from gtrnadb.helpers import accession
from gtrnadb.helpers import parent_accession
LOGGER = logging.getLogger(__name__)
def gtrnadb_secondary_structure(data):
"""
Generate a secondary structure from the raw angle bracket string. This
will transform it into a reasonable dot-bracket string and create a
SecondaryStructure object.
"""
return SecondaryStructure(dot_bracket=dot_bracket(data))
def gtrnadb_exons(locations):
"""
This will create the Exons from the data provided by GtRNAdb.
"""
exons = []
for exon in locations['exons']:
complement = None
if exon['strand'] == '+':
complement = False
elif exon['strand'] == '-':
complement = True
else:
raise ValueError("Invalid strand %s" % exon)
exons.append(Exon(
primary_start=int(exon['start']),
primary_end=int(exon['stop']),
complement=complement,
))
return exons
def gtrnadb_entries(data):
"""
Take an entry from GtRNAdb and produce the RNAcentrals that it
represents. A single entry may represent more than one Entry because it
may occur in more than one location. As we provide an accession for
each location this ends up representing more than one RNAcentral Entry.
"""
if data['metadata']['pseudogene']:
return
two_d = gtrnadb_secondary_structure(data)
for location in data['genome_locations']:
try:
yield Entry(
primary_id=primary_id(data, location),
accession=accession(data, location),
ncbi_tax_id=int(data['ncbi_tax_id']),
database='GtRNAdb',
sequence=data['sequence'],
exons=gtrnadb_exons(location),
rna_type='tRNA',
url=url(data),
note_data=note_data(data),
secondary_structure=two_d,
chromosome=chromosome(location),
species=species(data),
common_name=common_name(data),
anticodon=anticodon(data),
lineage=lineage(data),
gene=data['gene'],
optional_id=data['gene'],
product=product(data),
parent_accession=parent_accession(location),
description=description(data),
mol_type='genomic DNA',
feature_location_start=1,
feature_location_end=len(data['sequence']),
gene_synonyms=data.get('synonyms', []),
)
except helpers.UnknownTaxonId:
print("Unknown taxon id in %s" % data)
break
def parse(filename):
"""
This will parse a JSON file produced by GtRNAdb and yield the RNAcentral
entries that it represents.
"""
with open(filename, 'rb') as raw:
data = json.load(raw)
for datum in data:
for entry in gtrnadb_entries(datum):
yield entry
|
Python
| 0.999999
|
@@ -2960,22 +2960,22 @@
abase='G
-tRNAdb
+TRNADB
',%0A
|
e0c3a9dbff86a0f311357bfab161d110bfe3a8df
|
Create separate function for writing to logfile
|
TwircBot.py
|
TwircBot.py
|
import socket
import sys
import re
from datetime import datetime as dt
class TwircBot(object):
"""
Basic Bot class that reads in a config file, connects to chat rooms,
and logs the results.
"""
def __init__(self, config_file_name):
"""Parse the configuration file to retrieve the config parameters """
self.irc = socket.socket()
self.host = 'irc.twitch.tv'
self.port = 6667
self.block_size = 4096
self.readConfigFile(config_file_name)
def connect(self):
"""Connect to twitch chat"""
user_string = 'USER ' + self.nick
nick_string = 'NICK ' + self.nick
oauth_string = 'PASS oauth:' + self.oauth
cap_req_string = 'CAP REQ :twitch.tv/membership'
self.irc.connect((self.host, self.port))
self.send(user_string)
self.send(oauth_string)
self.send(nick_string)
self.send(cap_req_string)
for channel in self.channel_list:
self.join(channel)
while True:
data = self.receive()
if data:
print(data)
log_file = open(self.log_file_name,"a")
current_time = dt.strftime(dt.utcnow(), self.time_format)
log_file.write(current_time + "\n" + data)
log_file.close()
self.processData(data)
def print_config(self):
"""
Prints a string that contains all the configuration variables
for a given TwircBot instance.
"""
config_string = "\n***** TwircBot config *****\n"
config_string += "Connecting to " + self.nick + "@"
config_string += self.host + ":" + str(self.port) + "\n"
config_string += "Channels: "
for channels in self.channel_list:
config_string += str(channels) + ", "
config_string = config_string[:-2] #Remove last comma and space
config_string += "\nLog file: " + self.log_file_name
config_string += "\nTime format: " + self.time_format
config_string += "\n***** TwircBot config *****\n"
print(config_string)
def send(self, message_string):
"""Accept a string, convert it to bytes, and send it."""
message_bytes = bytes(message_string + '\r\n', 'utf-8')
self.irc.send(message_bytes)
def receive(self):
"""Accept some bytes from the socket and return them as a string."""
message_bytes = self.irc.recv(self.block_size)
message_string = message_bytes.decode('utf-8')
return message_string
def pong(self):
"""Send a PONG."""
self.send('PONG :tmi.twitch.tv\r\n')
def privmsg(self, channel, message):
""" Send a private message to a particular channel. """
self.send('PRIVMSG #' + channel + ' :' + message)
def join(self, channel):
""" Join a channel. """
self.send('JOIN #' + channel)
def part(self, channel):
""" Leave a channel. """
self.send('PART #' + channel)
def processData(self, data):
""" Break up the datastream into lines and decide what to do with them. """
for line in data.splitlines():
words = line.split()
if words[0] == 'PING':
self.pong()
if re.search('smart', line):
self.privmsg(self.nick, 'You are smart!')
def readConfigFile(self, config_file_name):
""" Read a configuration file and load all the values. """
config_file = open(config_file_name,"r")
for line in config_file:
words = line.split()
if words[0] == "oauth:":
self.oauth = line.split()[1]
elif words[0] == "nick:":
self.nick = line.split()[1]
elif words[0] == "channels:":
self.channel_list = line.split()[1:]
elif words[0] == "log:":
self.log_file_name = line.split()[1]
elif words[0] == "timeFormat:":
self.time_format = re.search('\[.*\]',line).group(0)
config_file.close()
def logData(self, data):
print("Blarg")
|
Python
| 0.000001
|
@@ -1117,230 +1117,8 @@
ta)%0A
- log_file = open(self.log_file_name,%22a%22)%0A current_time = dt.strftime(dt.utcnow(), self.time_format)%0A log_file.write(current_time + %22%5Cn%22 + data)%0A log_file.close()%0A
@@ -2993,16 +2993,47 @@
ines():%0A
+ self.logData(line)%0A
@@ -3989,21 +3989,265 @@
-print(%22Blarg%22
+%22%22%22 Timestamps a line of output and send it to the logfile %22%22%22%0A current_time = dt.strftime(dt.utcnow(), self.time_format)%0A log_file = open(self.log_file_name,%22a%22)%0A log_file.write(current_time + %22 %22 + data + %22%5Cn%22)%0A log_file.close(
)%0A
|
e01e292cb387fe7006c25907899b3f24070b4e77
|
Check existence of resulting path inside `icongetter.get_iconpath()`, since the underlying function is not doing this for absolute path names. As those names are now "officially" supported, this feature was added to the function's docstring.
|
launchit/icongetter.py
|
launchit/icongetter.py
|
"""
Detect suitable command icons.
"""
# Stdlib
import glob
import os
import warnings
# 3rd party
import xdg.IconTheme
import xdg.Menu
# launchit package
from . import settings
from ._stringutils import convert, keep_string_type
from .core import is_command, is_executable_file, parse_commandline
# Directory that contains the desktop environment's `.menu`-files
MENU_DIR = settings.config['menu-dir']
# Icon name constants (following XDG icon spec)
ICON_RUN = 'system-run'
ICON_EXECUTABLE = 'application-x-executable'
@keep_string_type
def get_iconpath_for_commandline(cmdline, size, theme):
"""
High-level function, which takes and parses the given `cmdline`.
Then `cmdline`s first argument is used to guess a suitable icon
name. Finally a full icon path for that name is returned with
respect to the desired `size` and `theme`. Note that `None` may
be returned if no icon path could be obtained.
"""
args = parse_commandline(cmdline)
icon_name = guess_icon_name(args[0] if args else '')
return get_iconpath(icon_name, size, theme)
@keep_string_type
def get_iconpath(icon_name, size, theme):
"""
Return a path, which refers to an icon file with the given name
regarding to given `size` and `theme`. Return `None` if no icon
path could be obtained.
"""
return xdg.IconTheme.getIconPath(icon_name, size, theme)
@keep_string_type
def guess_icon_name(path, fallback=ICON_RUN):
"""
Return a suitable icon name for the given `path`. If `path` is a
command, which appears in one of the user's menu entries, then
return the entry's icon name. Otherwise return a generic icon name
depending on the filetype of `path`. In case that no association
could be made, `fallback` will be returned instead.
"""
starter_icon = get_starter_icon(path)
if starter_icon:
name = starter_icon
elif is_command(path) or is_executable_file(path):
name = ICON_EXECUTABLE
else:
name = fallback
return name
icon_cache = {}
@keep_string_type
def get_starter_icon(command, use_cache=True):
"""
Return the associated icon for a given command. This is done by
analyzing the starter entries of the user's menu files: If the
command appears inside an entry, its icon's file name is returned,
otherwise None is returned. The command may be given as a name
(`firefox`) or as a path (`/usr/bin/firefox`).
Note that this function may take a while to analyze all menu
entries. To speed this up, the icon names are cached, unless
`use_cache` is False. In fact, using the cache means that the
function will look for contents inside that cache when invoked.
If it sees an empty cache (e.g. right after the module has been
initialized) the cache will be filled using the results of
`iter_command_icons()`. These cached results are used for any
later call. The cache may be rebuilt via `init_icon_cache()`,
if needed.
"""
if not isinstance(command, basestring):
raise TypeError('command must be a string')
if use_cache:
if not icon_cache:
init_icon_cache()
icons = icon_cache
else:
icons = dict(iter_command_icons())
if is_command(command):
command = os.path.basename(command)
try:
icon = icons[command]
except KeyError:
return None
return icon
def init_icon_cache():
"""
(Re-)Initialize the cache used to guess the icon for a given command.
"""
icons = iter_command_icons()
icon_cache.clear()
icon_cache.update(icons)
def iter_command_icons():
"""
Analyze the user's menu entries and return an iterator, which
contains the associated command and its icon file for each entry
as tuples in the form `(command, icon)`. Note that if a command
refers to an existing file inside one of the directories defined
by the environment variable PATH and if that command is given as
an absolute path, it will be shortened to its file name (e.g.
`/usr/bin/firefox` => `firefox`).
"""
for menu in iter_menu_files():
for entry in iter_desktop_entries(menu):
exec_ = convert(entry.getExec(), str)
cmd = parse_commandline(exec_)[0]
if is_command(cmd):
cmd = os.path.basename(cmd)
icon = convert(entry.getIcon(), str)
yield (cmd, icon)
# PyXDG-related helper functions
def iter_menu_files():
"""
Iterate through the `.menu`-files found in the globally defined
MENU_DIR and yield a `xdg.Menu.Menu` object for each file. Each
of those objects will then contain the `.menu`-file's entries in
a parsed structure.
"""
menu_files = os.path.join(MENU_DIR, '*.menu')
for menu_file in glob.glob(menu_files):
with warnings.catch_warnings():
# Suppress a warning that may occur, when parsing KDE entries
warnings.filterwarnings('ignore', 'os.popen3() is deprecated')
yield xdg.Menu.parse(menu_file)
def iter_desktop_entries(menu):
"""
Walk through given `xdg.Menu.Menu` object and its submenus.
Yield any appearing desktop entry (i.e. a program starter).
Note that examination is done recursively: If the function
encounters a submenu, it will iterate through the entries of
that submenu before dealing with the entries, which remain
inside the parent menu.
"""
for entry in menu.getEntries():
if isinstance(entry, xdg.Menu.MenuEntry):
yield entry.DesktopEntry
elif isinstance(entry, xdg.Menu.Menu):
# Entry is a submenu
for desktop_entry in iter_desktop_entries(entry):
yield desktop_entry
# (Other types are ignored)
|
Python
| 0
|
@@ -1312,76 +1312,314 @@
ed.%0A
+%0A
-%22%22%22%0A return xdg.IconTheme.getIconPath(icon_name, size, theme)
+Note that %60icon_name%60 may also be given as an absolute path name. %0A It is then returned unchanged, if it exists, ignoring the values%0A for %60size%60 and %60theme%60.%0A %22%22%22%0A path = xdg.IconTheme.getIconPath(icon_name, size, theme)%0A if not os.path.exists(path):%0A return None%0A return path
%0A%0A@k
|
52569c1a662a84d0e56ca276853714d272f7c10b
|
Update env.tomcat in fab script with latest tomcat 7 version
|
manage/fabfile/target.py
|
manage/fabfile/target.py
|
"""
Target Environments
"""
from fabric.api import *
targetenvs = []
def targetenv(f):
"""
Decorator function that makes sure that the list targetenvs contains all
available target environments. It does so by adding the decorated function
to the targetenvs list which is used by the _needs_targetenv function.
"""
targetenvs.append("target." + f.__name__)
return task(f)
def _needs_targetenv():
"""
Makes sure that the env dictionary contains a certain set of keys. These
keys are provided by one of the targetenv functions (decorated with
@targetenv). Targets calling this function require a target to have been
invoked.
"""
require('target', 'roledefs', 'dist_dir', 'tomcat', provided_by=targetenvs)
@targetenv
def dev_unix():
"""Set target env to: dev-unix"""
# Name env:
env.target = "dev-unix"
# Machines:
env.roledefs = {
'admin': ['localhost'],
'main': ['localhost'],
'service': ['localhost'],
'examples': ['localhost'],
}
# Filesystem paths
env.rinfo_main_store = "/opt/work/rinfo/depots/rinfo"
env.examples_store = "/opt/work/rinfo/depots"
env.dist_dir = '/opt/work/rinfo/rinfo_dist'
env.rinfo_dir = '/opt/work/rinfo'
env.rinfo_rdf_repo_dir = '/opt/work/rinfo/aduna'
env.admin_webroot = "/opt/work/rinfo/admin"
# Tomcat
env.tomcat = "/opt/tomcat"
env.tomcat_webapps = "%(tomcat)s/webapps"%env
env.tomcat_start = "%(tomcat)s/bin/catalina.sh start"%env
env.tomcat_stop = "%(tomcat)s/bin/catalina.sh stop"%env
env.tomcat_user = "tomcat"
env.tomcat_group = 'tomcat'
@targetenv
def demo():
"""Set target env to: demo"""
# Name env:
env.target = "demo"
# Machines:
env.user = 'rinfo'
env.roledefs = {
'main': ['demo.lagrummet.se'],
'service': ['demo.lagrummet.se'],
'checker': ['demo.lagrummet.se'],
'admin': ['demo.lagrummet.se'],
'demosource': ['demo.lagrummet.se'],
}
# Manage
env.mgr_workdir = "/home/%(user)s/mgr_work" % env
env.dist_dir = 'rinfo_dist'
# Filesystem paths
env.rinfo_dir = '/opt/rinfo'
env.rinfo_main_store = "/opt/rinfo/store"
env.rinfo_rdf_repo_dir = '/opt/rinfo/sesame-repo'
env.demo_data_root = "/opt/rinfo/demo-depots"
# Apache
env.admin_webroot = "/var/www/admin"
env.docs_webroot = "/var/www/dokumentation"
env.apache_sites = {
'main': ['rinfo-main', 'admin'],
'service': ['service'],
'demosource': ['sfs', 'dv', 'prop', 'sou', 'ds'],
'checker': ['checker'],
}
# Tomcat
_tomcat_env()
# Integration is a virtual environment that you could setup on your own computer
# See README.txt for more information
@targetenv
def integration():
"""Set target env to: integration"""
# Name env:
env.target = "integration"
# Machines:
env.user = 'rinfo'
env.roledefs = {
'main': ['rinfo-main'],
'service': ['rinfo-service'],
'checker': ['rinfo-checker'],
'doc': ['rinfo-integration'],
'admin': ['rinfo-integration'],
'demosource': ['rinfo-integration'],
}
# Manage
env.mgr_workdir = "/home/%(user)s/mgr_work" % env
env.dist_dir = 'rinfo_dist'
# Filesystem paths
env.rinfo_main_store = "/opt/rinfo/store"
env.rinfo_dir = '/opt/rinfo'
env.rinfo_rdf_repo_dir = '/opt/rinfo/sesame-repo'
env.demo_data_root = "/opt/rinfo/demo-depots"
# Apache
env.admin_webroot = "/var/www/admin"
env.docs_webroot = "/var/www/dokumentation"
env.apache_sites = {
'demosource': ['sfs', 'dv', 'prop', 'sou', 'ds'],
'main': ['rinfo-main', 'admin'],
'service': ['service'],
'checker': ['checker'],
}
# Tomcat
_tomcat_env()
@targetenv
def prod():
"""Set target env to: prod"""
# Name env:
env.target = "prod"
# Machines:
env.user = 'rinfo'
env.roledefs = {
'main': ['rinfo.lagrummet.se'],
'service': ['service.lagrummet.se'],
'checker': ['checker.lagrummet.se'],
'doc': ['dev.lagrummet.se'],
'admin': ['admin.lagrummet.se'],
}
# Manage
env.mgr_workdir = "/home/%(user)s/mgr_work" % env
env.dist_dir = 'rinfo_dist'
# Filesystem paths
env.rinfo_main_store = "/opt/rinfo/store"
env.rinfo_dir = '/opt/rinfo'
env.rinfo_rdf_repo_dir = '/opt/rinfo/sesame-repo'
# Apache
env.admin_webroot = "/var/www/admin"
env.docs_webroot = "/var/www/dokumentation"
env.apache_sites = {
'main': ['default', 'admin'],
'service': ['service'],
'checker': ['checker'],
}
# Tomcat
_tomcat_env()
def _tomcat_env():
env.apache_jk_tomcat = True
env.tomcat_version = "7.0.37"
env.tomcat = "/opt/tomcat"
env.tomcat_webapps = "%(tomcat)s/webapps"%env
env.tomcat_start = '/etc/init.d/tomcat start'
env.tomcat_stop = '/etc/init.d/tomcat stop'
env.tomcat_user = 'tomcat'
env.tomcat_group = 'tomcat'
|
Python
| 0
|
@@ -4787,9 +4787,9 @@
7.0.
-3
+4
7%22%0A
|
5479c0e9dcf92213640c0f07ab8f14d3702e0738
|
Fix pdf parsing
|
modules/leclercmobile/pages/history.py
|
modules/leclercmobile/pages/history.py
|
# -*- coding: utf-8 -*-
# Copyright(C) 2012 Florent Fourcot
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import re
import os
import subprocess
import tempfile
import shutil
from datetime import datetime, date, time
from decimal import Decimal
from weboob.tools.browser import BasePage
from weboob.capabilities.bill import Detail, Bill
__all__ = ['HistoryPage', 'PdfPage']
def _get_date(detail):
return detail.datetime
class PdfPage():
def __init__(self, file):
self.pdf = file
def _parse_pdf(self):
pdffile = tempfile.NamedTemporaryFile(bufsize=100000, mode='w', suffix='.pdf')
temptxt = pdffile.name.replace('.pdf', '.txt')
cmd = "ebook-convert"
stdout = open("/dev/null", "w")
shutil.copyfileobj(self.pdf, pdffile)
pdffile.flush()
subprocess.call([cmd, pdffile.name, temptxt], stdout=stdout)
pdffile.close()
txtfile = open(temptxt, 'r')
txt = txtfile.read()
txtfile.close()
os.remove(temptxt)
return txt
def get_details(self):
txt = self._parse_pdf()
page = txt.split('CONSOMMATION')[2].split('ACTIVITE DETAILLEE')[0]
lines = page.split('\n')
lines = [x for x in lines if len(x) > 0] # Remove empty lines
details = []
detail = None
lines.pop(-1) # line for picture informations
for line in lines:
if "Votre consommation" in line:
line = line.split(": ", 1)[1]
if re.match('[A-Za-z]', line[0]):
# We have a new element, return the other one
if detail is not None:
details.append(detail)
detail = Detail()
split = re.split("(\d)", line, maxsplit=1)
detail.price = Decimal(0)
detail.infos = split[1] + split[2]
if '€' in line:
specialprice = split[1] + split[2]
detail.price = Decimal(specialprice.replace('€', ''))
detail.label = unicode(split[0], encoding='utf-8')
elif '€' in line:
detail.price = Decimal(line.replace('€', ''))
else:
detail.infos = unicode(line, encoding='utf-8')
details.append(detail)
return details
def get_balance(self):
for calls in self.get_calls():
if "Votre solde" in calls.label:
detail = Detail()
detail.price = calls.price
detail.label = u"Balance"
return detail
# Standard pdf text extractor take text line by line
# But the position in the file is not always the "real" position to display...
# It produce some unsorted and unparsable data
# Example of bad software: pdfminer and others python tools
# This is why we have to use "ebook-convert" from calibre software,
# it is the only one to 'reflow" text and give some relevant results
# The bad new is that ebook-convert doesn't support simple use with stdin/stdout
def get_calls(self):
txt = self._parse_pdf()
pages = txt.split("DEBIT")
pages.pop(0) # remove headers
details = []
for page in pages:
page = page.split('RÉGLO MOBILE')[0].split('N.B. Prévoir')[0] # remove footers
lines = page.split('\n')
lines = [x for x in lines if len(x) > 0] # Remove empty lines
numitems = (len(lines) + 1) / 4 # Each line has five columns
lines.pop(0)
modif = 0
i = 0
while i < numitems:
if modif != 0:
numitems = ((len(lines) + 1 + modif) / 4)
nature = i * 4 - modif
dateop = nature
corres = nature + 1
duree = corres + 1
price = duree + 1
if "Changement vers le Forfait" in lines[nature]:
modif += 1
i += 1
continue
if "Votre solde" in lines[nature]:
lines[nature + 1] = "Votre solde " + lines[nature + 1]
dateop = nature + 1
corres = dateop + 1
duree = corres + 1
price = duree + 1
modif -= 1
if not lines[corres][0:3].isdigit() and not lines[corres][0:3] == "-":
modif += 1
detail = Detail()
splits = re.split("(\d+\/\d+\/\d+)", lines[dateop])
mydate = date(*reversed([int(x) for x in splits[1].split("/")]))
mytime = time(*[int(x) for x in splits[2].split(":")])
detail.datetime = datetime.combine(mydate, mytime)
if lines[corres] == '-':
lines[corres] = ""
if lines[duree] == '-':
lines[duree] = ''
detail.label = unicode(splits[0], encoding='utf-8', errors='replace') + u" " + lines[corres] + u" " + lines[duree]
# Special case with only 3 columns, we insert a price
if "Activation de votre ligne" in detail.label:
lines.insert(price, '0')
try:
detail.price = Decimal(lines[price].replace(',', '.'))
except:
detail.price = Decimal(0)
details.append(detail)
i += 1
return sorted(details, key=_get_date, reverse=True)
class HistoryPage(BasePage):
def on_loaded(self):
pass
def getmaxid(self):
max = 1
while len(self.document.xpath('//li[@id="liMois%s"]' % max)) > 0:
max += 1
return max - 1
def date_bills(self, parentid):
max = 1
while len(self.document.xpath('//li[@id="liMois%s"]' % max)) > 0:
li = self.document.xpath('//li[@id="liMois%s"]' % max)[0]
max += 1
link = li.xpath('a')[0]
bill = Bill()
bill._url = link.attrib['href']
bill.label = link.text
bill.format = u"pdf"
bill.id = parentid + bill.label.replace(' ', '')
yield bill
|
Python
| 0.000001
|
@@ -4515,17 +4515,16 @@
ree + 1%0A
-%0A
@@ -4978,24 +4978,286 @@
modif -= 1%0A
+ elif len(re.split(%22(%5Cd+%5C/%5Cd+%5C/%5Cd+)%22, lines%5Bdateop%5D)) %3C 2:%0A dateop = nature + 1%0A corres = dateop + 1%0A duree = corres + 1%0A price = duree + 1%0A modif -= 1%0A
|
e6e1ae536b3a09e36d6fada64b0064003afdd639
|
drop UserAdmin.parents_link and UserAdmin.participants_link
|
leprikon/admin/user.py
|
leprikon/admin/user.py
|
from __future__ import unicode_literals
from django import forms
from django.conf.urls import url as urls_url
from django.contrib import admin
from django.contrib.auth import get_user_model, login
from django.contrib.auth.admin import UserAdmin as _UserAdmin
from django.contrib.auth.decorators import user_passes_test
from django.contrib.messages import ERROR
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404, render
from django.utils.encoding import smart_text
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from ..forms.user import UserAdminCreateForm
from ..utils import merge_users
from .messages import SendMessageAdminMixin
class UserAdmin(SendMessageAdminMixin, _UserAdmin):
actions = ('merge',)
add_form = UserAdminCreateForm
add_fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('username', 'first_name', 'last_name', 'email'),
}),
)
def merge(self, request, queryset):
class MergeForm(forms.Form):
target = forms.ModelChoiceField(
label=_('Target user'),
help_text=_('All information will be merged into selected account.'),
queryset=queryset,
)
if request.POST.get('post', 'no') == 'yes':
form = MergeForm(request.POST)
if form.is_valid():
target = form.cleaned_data['target']
for user in queryset.all():
if user != target:
try:
merge_users(user, target)
self.message_user(request, _('User {} was merged into user {}.').format(user, target))
except:
self.message_user(
request, _('Can not merge user {} into user {}.').format(user, target),
level=ERROR
)
return
else:
form = MergeForm()
return render(request, 'leprikon/admin/merge.html', {
'title': _('Select target user for merge'),
'question': _('Are you sure you want to merge selected users into one? '
'All participants, parents, registrations and other related information '
'will be added to the target user account and the remaining users will be deleted.'),
'queryset': queryset,
'objects_title': _('Users'),
'form_title': _('Select target account for merge'),
'opts': self.model._meta,
'form': form,
'action_checkbox_name': admin.helpers.ACTION_CHECKBOX_NAME,
})
merge.short_description = _('Merge selected user accounts')
def get_list_display(self, request):
return ['id'] \
+ list(super(UserAdmin, self).get_list_display(request)) \
+ ['parents_link', 'participants_link', 'login_as_link']
def get_search_fields(self, request):
return list(super(UserAdmin, self).get_search_fields(request)) + [
'leprikon_parents__first_name',
'leprikon_parents__last_name',
'leprikon_parents__email',
'leprikon_participants__first_name',
'leprikon_participants__last_name',
'leprikon_participants__email',
]
def get_urls(self):
urls = super(UserAdmin, self).get_urls()
login_as_view = self.admin_site.admin_view(user_passes_test(lambda u: u.is_superuser)(self.login_as))
return [
urls_url(r'(?P<user_id>\d+)/login-as/$', login_as_view, name='auth_user_login_as'),
] + urls
def login_as(self, request, user_id):
user = get_object_or_404(get_user_model(), id=user_id)
user.backend = 'django.contrib.auth.backends.ModelBackend'
login(request, user)
return HttpResponseRedirect(reverse('leprikon:summary'))
def login_as_link(self, obj):
return '<a href="{url}">{text}</a>'.format(
url = reverse('admin:auth_user_login_as', args=[obj.id]),
text = _('login')
)
login_as_link.allow_tags = True
login_as_link.short_description = _('login')
@cached_property
def parents_url(self):
return reverse('admin:leprikon_parent_changelist')
def parents_link(self, obj):
return '<a href="{url}?user__id={user}">{names}</a>'.format(
url = self.parents_url,
user = obj.id,
names = ', '.join(smart_text(parent) for parent in obj.leprikon_parents.all()),
)
parents_link.allow_tags = True
parents_link.short_description = _('parents')
@cached_property
def participants_url(self):
return reverse('admin:leprikon_participant_changelist')
def participants_link(self, obj):
return '<a href="{url}?user__id={user}">{names}</a>'.format(
url = self.participants_url,
user = obj.id,
names = ', '.join(smart_text(participant) for participant in obj.leprikon_participants.all()),
)
participants_link.allow_tags = True
participants_link.short_description = _('participants')
def get_message_recipients(self, request, queryset):
return queryset.all()
|
Python
| 0
|
@@ -504,105 +504,8 @@
der%0A
-from django.utils.encoding import smart_text%0Afrom django.utils.functional import cached_property%0A
from
@@ -2972,45 +2972,8 @@
+ %5B
-'parents_link', 'participants_link',
'log
@@ -4255,998 +4255,8 @@
')%0A%0A
- @cached_property%0A def parents_url(self):%0A return reverse('admin:leprikon_parent_changelist')%0A%0A def parents_link(self, obj):%0A return '%3Ca href=%22%7Burl%7D?user__id=%7Buser%7D%22%3E%7Bnames%7D%3C/a%3E'.format(%0A url = self.parents_url,%0A user = obj.id,%0A names = ', '.join(smart_text(parent) for parent in obj.leprikon_parents.all()),%0A )%0A parents_link.allow_tags = True%0A parents_link.short_description = _('parents')%0A%0A @cached_property%0A def participants_url(self):%0A return reverse('admin:leprikon_participant_changelist')%0A%0A def participants_link(self, obj):%0A return '%3Ca href=%22%7Burl%7D?user__id=%7Buser%7D%22%3E%7Bnames%7D%3C/a%3E'.format(%0A url = self.participants_url,%0A user = obj.id,%0A names = ', '.join(smart_text(participant) for participant in obj.leprikon_participants.all()),%0A )%0A participants_link.allow_tags = True%0A participants_link.short_description = _('participants')%0A%0A
|
0e43293a808d315b1cc5ea86c599511bb8cedd0a
|
exclude wikidata
|
mapreduce/dbpedia_dbm.py
|
mapreduce/dbpedia_dbm.py
|
"""
Creates DBPedia labels-types Shelve file of the following format:
{ LABEL: [Type1, Type2, ...], ...}
For example:
Tramore: Town, Settlement, PopulatedPlace, Place
Tramore,_Ireland: Town, Settlement, PopulatedPlace, Place
"""
import subprocess
import urllib
from collections import defaultdict
import shelve
import rdflib
# parse dbpedia ontology
dbpedia_types = defaultdict(list)
g = rdflib.Graph()
g.parse("dbpedia_2014.owl", format="xml")
for subject, predicate, obj in g:
if str(predicate) == 'http://www.w3.org/2000/01/rdf-schema#subClassOf':
dbpedia_types[str(obj)].append(str(subject))
#BFS traversal
dbpedia_types_order = {}
nodes = [('http://www.w3.org/2002/07/owl#Thing', 0)]
while nodes:
node, order = nodes.pop(0)
dbpedia_types_order[node.replace('http://dbpedia.org/ontology/', '')] = order
for child in dbpedia_types[node]:
nodes.append((child, order+1))
del dbpedia_types
TYPES_FILE = 'instance_types_en.nt.bz2'
EXCLUDES = {'Agent', 'TimePeriod', 'PersonFunction', 'Year'}
dbpediadb_types = defaultdict(list)
# BZ2File module cannot process multi-stream files, so use subprocess
p = subprocess.Popen('bzcat -q ' + TYPES_FILE, shell=True, stdout=subprocess.PIPE)
for line in p.stdout:
if '<BAD URI: Illegal character' in line:
continue
try:
uri, predicate, type_uri = line.split(' ', 2)
except:
continue
if 'http://dbpedia.org/ontology/' not in type_uri:
continue
uri = urllib.unquote(uri.replace('<http://dbpedia.org/resource/', '')[:-1])
type_uri = type_uri.replace('<http://dbpedia.org/ontology/', '')[:-4]
if type_uri in EXCLUDES:
continue
dbpediadb_types[uri].append(type_uri)
dbpediadb = shelve.open('dbpedia_types.dbm')
dbpediadb_lower = shelve.open('dbpedia_lowercase2labels.dbm', writeback=True)
# sort types
for uri, types in dbpediadb_types.iteritems():
dbpediadb_types[uri] = sorted(types, key=lambda x: dbpedia_types_order[x], reverse=True)
# write canonical labels first
for uri, types in dbpediadb_types.iteritems():
dbpediadb[uri] = types
dbpediadb_lower[uri.lower()] = [uri]
REDIRECTS_FILE = 'redirects_transitive_en.nt.bz2'
# BZ2File module cannot process multi-stream files, so use subprocess
p = subprocess.Popen('bzcat -q ' + REDIRECTS_FILE, shell=True, stdout=subprocess.PIPE)
for line in p.stdout:
try:
uri_redirect, predicate, uri_canon = line.split(' ', 2)
except:
continue
name_redirect = urllib.unquote(uri_redirect.replace('<http://dbpedia.org/resource/', '')[:-1])
name_canon = urllib.unquote(uri_canon.replace('<http://dbpedia.org/resource/', '')[:-4])
if '(disambiguation)' in name_redirect:
continue
# skip entities that have no types
if name_canon not in dbpediadb_types:
continue
dbpediadb[name_redirect] = dbpediadb_types[name_canon]
if name_redirect.lower() in dbpediadb_lower:
dbpediadb_lower[name_redirect.lower()].append(name_redirect)
else:
dbpediadb_lower[name_redirect.lower()] = [name_redirect]
dbpediadb.close()
|
Python
| 0.002561
|
@@ -1474,24 +1474,100 @@
continue%0A
+ if 'http://dbpedia.org/ontology/Wikidata' in type_uri:%0A continue%0A
uri = ur
|
bcac244f676c494b28d81d79b7c661bdbc5cee39
|
remove some whitespace I don't like
|
helpers.py
|
helpers.py
|
"""
Imposter - Another weblog app
Copyright (c) 2010 by Jochem Kossen <jochem.kossen@gmail.com>
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS
BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
utility function library
"""
from flask import g
from hashlib import sha256
from docutils import core
from docutils.writers.html4css1 import Writer,HTMLTranslator
import markdown
import re
from unicodedata import normalize
def hashify(seed, text):
return sha256('%s%s' % (seed, text)).hexdigest()
class HTMLFragmentTranslator(HTMLTranslator):
def __init__(self, document):
HTMLTranslator.__init__(self, document)
self.head_prefix = ['','','','','']
self.body_prefix = []
self.body_suffix = []
self.stylesheet = []
def astext(self):
return ''.join(self.body)
html_fragment_writer = Writer()
html_fragment_writer.translator_class = HTMLFragmentTranslator
def rest_to_html(s):
"""Convert ReST input to HTML output"""
return core.publish_string(s, writer = html_fragment_writer)
def markup_to_html(format, text):
"""Convert supported marked-up input to HTML output"""
if format.value == 'rest':
return rest_to_html(text)
elif format.value == 'markdown':
return markdown.markdown(text)
return text
def slugify(text, delim=u'-', maxlen=128):
"""Generates an ASCII-only slug usable in paths and URLs.
Based on http://flask.pocoo.org/snippets/5/
"""
punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+')
result = []
for word in punct_re.split(text.lower()):
word = normalize('NFKD', unicode(word)).encode('ascii', 'ignore')
if word:
result.append(word)
return unicode(delim.join(result)[0:maxlen])
def summarize(content, length=250, suffix='...'):
"""Generate summary from given content
Based on http://stackoverflow.com/questions/250357/smart-truncate-in-python
"""
if len(content) <= length:
return content
return content[:length+1].rsplit(' ', 1)[0]+suffix
|
Python
| 0.999982
|
@@ -2157,19 +2157,17 @@
, writer
- =
+=
html_fra
|
d5c319ae224804ed74f2f0bd7716d827b20ab09a
|
use frameworks only for swift pods
|
measure_cocoapod_size.py
|
measure_cocoapod_size.py
|
#!/usr/bin/python
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/usr/bin/python
"""measure_cocoapod_size.py provides size impact of a given set of cocoapods.
Usage: ./measure_cocoapod_size.py -cocoapods $POD_NAME:$POD_VERSION
"""
import argparse
import os
import tempfile
from xcode_project_diff import GenerateSizeDifference
OBJC_APP_DIR = 'sizetestproject'
OBJC_APP_NAME = 'SizeTest'
SWIFT_APP_DIR = 'SwiftApp'
SWIFT_APP_NAME = 'SwiftApp'
MODE_SWIFT = 'swift'
MODE_OBJC = 'objc'
DEFAULT_SPEC_REPOS = ['https://github.com/CocoaPods/Specs.git']
SPEC_REPO_DICT = {
'cpdc-internal': 'sso://cpdc-internal/spec',
'cpdc-eap': 'sso://cpdc-eap/spec',
'master': 'https://github.com/CocoaPods/Specs.git'
}
def GetSampleApp(mode):
if mode == MODE_SWIFT:
return SWIFT_APP_DIR, SWIFT_APP_NAME
else:
return OBJC_APP_DIR, OBJC_APP_NAME
def InstallPods(cocoapods, target_dir, spec_repos, target_name, mode):
"""InstallPods installs the pods.
Args:
cocoapods: Mapping from pod names to pod versions.
target_dir: The target directory.
spec_repos: The set of spec repos.
target_name: The name of the target.
mode: The type of cocoapods.
Returns:
The path to the workspace.
"""
cwd = os.getcwd()
os.chdir(target_dir)
os.system('pod init')
os.system('touch Podfile')
with open('Podfile', 'w') as podfile:
for repo in spec_repos:
podfile.write('source "{}"\n'.format(repo))
podfile.write('\n')
podfile.write('use_frameworks!\n')
podfile.write('target \'{}\' do\n'.format(target_name))
for pod, version in cocoapods.items():
if version:
podfile.write(' pod \'{}\', \'{}\'\n'.format(pod, version))
else:
podfile.write(' pod \'{}\'\n'.format(pod))
podfile.write('end')
os.system('pod install')
os.chdir(cwd)
return os.path.join(target_dir, '{}.xcworkspace'.format(target_name))
def CopyProject(source_dir, target_dir):
"""CopyProject copies the project from the source to the target.
Args:
source_dir: The path to the source directory.
target_dir: The path to the target directory.
"""
os.system('cp -r {} {}'.format(source_dir, target_dir))
def GetPodSizeImpact(parsed_args):
"""GetPodSizeImpact gets the size impact of the set of pods.
Args:
parsed_args: The set of arguments passed to the program.
"""
sample_app_dir, sample_app_name = GetSampleApp(parsed_args.mode)
cocoapods = {}
if parsed_args.spec_repos:
spec_repos = []
for repo in parsed_args.spec_repos:
if repo in SPEC_REPO_DICT:
spec_repos.append(SPEC_REPO_DICT[repo])
else:
spec_repos.append(repo)
else:
spec_repos = DEFAULT_SPEC_REPOS
for pod in parsed_args.cocoapods:
pod_info = pod.split(':')
pod_name = pod_info[0].strip()
if len(pod_info) > 1:
pod_version = pod_info[1].strip()
else:
pod_version = ''
cocoapods[pod_name] = pod_version
base_project = tempfile.mkdtemp()
target_project = tempfile.mkdtemp()
CopyProject(sample_app_dir, base_project)
CopyProject(sample_app_dir, target_project)
target_project = InstallPods(cocoapods,
os.path.join(target_project, sample_app_dir),
spec_repos, sample_app_name, parsed_args.mode)
source_project = os.path.join(base_project,
'{}/{}.xcodeproj'.format(sample_app_dir, sample_app_name))
source_size, target_size = GenerateSizeDifference(
source_project, sample_app_name, target_project, sample_app_name)
print 'The pods combined add an extra size of {} bytes'.format(
target_size - source_size)
def Main():
"""Main generates the PodSize impact.
"""
parser = argparse.ArgumentParser(description='The size impact of a cocoapod')
parser.add_argument(
'--cocoapods',
metavar='N',
type=str,
nargs='+',
required=True,
help='The set of cocoapods')
parser.add_argument(
'--mode',
type=str,
choices=[MODE_SWIFT, MODE_OBJC],
default=MODE_OBJC,
help='Type of cocoapod'
)
parser.add_argument(
'--spec_repos',
metavar='N',
type=str,
nargs='+',
required=False,
help='The set of spec_repos')
args = parser.parse_args()
GetPodSizeImpact(args)
if __name__ == '__main__':
Main()
|
Python
| 0
|
@@ -1991,24 +1991,53 @@
write('%5Cn')%0A
+ if mode == MODE_SWIFT:%0A
podfile.
|
988f2cfa9d7dfb643a1efe26e6fd9dc8985c5483
|
decode utf8 coming from subprocess
|
modules/validation/export_validator.py
|
modules/validation/export_validator.py
|
from validator import Validator
from ..ether import exec_via_temp, ExportConfig, ether_to_sgml
import re
# TODO: would have been ideal to write this without any filesystem operations
class ExportValidator(Validator):
def __init__(self, rule):
self.corpus = rule[0]
self.doc = rule[1]
self.config = rule[3]
self.schema = rule[5]
def validate(self, socialcalc, doc_id, doc_name, doc_corpus):
report = ""
if self.corpus is not None:
if re.search(self.corpus, doc_corpus) is None:
return report, False
if self.doc is not None:
if re.search(self.doc, doc_name) is None:
return report, False
export_data = ether_to_sgml(socialcalc, doc_id, config=self.config)
schema = self.schema
command = "xmllint --schema schemas/" + schema + " tempfilename"
out, err = exec_via_temp(export_data, command)
err = err.strip()
err = err.replace("<br>","").replace("\n","").replace('<h1 align="center">xmllint output</h1>',"")
err = re.sub(r'/tmp/[A-Za-z0-9_]+:','XML schema: <br>',err)
err = re.sub(r'/tmp/[A-Za-z0-9_]+','XML schema ',err)
err = re.sub(r'\n','<br/>',err)
if err == "XML schema validates":
report = ""
else:
report = "Problems with exporting with " + self.config \
+ " and validating with " + self.schema + ":<br>" + err + "<br>"
return report, True
|
Python
| 0.998153
|
@@ -1270,17 +1270,16 @@
schema
-
validate
@@ -1465,16 +1465,31 @@
%3E%22 + err
+.decode(%22utf8%22)
+ %22%3Cbr%3E
|
ea84da1709fac734ec869f2cbfe3a4dbcf7f9be2
|
Remove unneeded import
|
BlockServer/epics/procserv_utils.py
|
BlockServer/epics/procserv_utils.py
|
# This file is part of the ISIS IBEX application.
# Copyright (C) 2012-2016 Science & Technology Facilities Council.
# All rights reserved.
#
# This program is distributed in the hope that it will be useful.
# This program and the accompanying materials are made available under the
# terms of the Eclipse Public License v1.0 which accompanies this distribution.
# EXCEPT AS EXPRESSLY SET FORTH IN THE ECLIPSE PUBLIC LICENSE V1.0, THE PROGRAM
# AND ACCOMPANYING MATERIALS ARE PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND. See the Eclipse Public License v1.0 for more details.
#
# You should have received a copy of the Eclipse Public License v1.0
# along with this program; if not, you can obtain a copy from
# https://www.eclipse.org/org/documents/epl-v10.php or
# http://opensource.org/licenses/eclipse-1.0.php
import time
from server_common.channel_access import ChannelAccess
from server_common.utilities import print_and_log, ioc_restart_pending, retry
class ProcServWrapper(object):
"""A wrapper for accessing some of the functionality of ProcServ."""
@staticmethod
def generate_prefix(prefix, ioc):
"""Generate the PV prefix for an IOCs ProcServ.
Args:
prefix (string): The prefix for the instrument
ioc (string): The name of the IOC
Returns:
string : The PV prefix
"""
return "%sCS:PS:%s" % (prefix, ioc)
def start_ioc(self, prefix, ioc):
"""Starts the specified IOC.
Args:
prefix (string): The prefix for the instrument
ioc (string): The name of the IOC
"""
print_and_log("Starting IOC %s" % ioc)
ChannelAccess.caput(self.generate_prefix(prefix, ioc) + ":START", 1)
def stop_ioc(self, prefix, ioc):
"""Stops the specified IOC.
Args:
prefix (string): The prefix for the instrument
ioc (string): The name of the IOC
"""
print_and_log("Stopping IOC %s" % ioc)
ChannelAccess.caput(self.generate_prefix(prefix, ioc) + ":STOP", 1)
def restart_ioc(self, prefix, ioc):
"""Restarts the specified IOC.
Args:
prefix (string): The prefix for the instrument
ioc (string): The name of the IOC
"""
print_and_log("Restarting IOC %s" % ioc)
ChannelAccess.caput(self.generate_prefix(prefix, ioc) + ":RESTART", 1)
def ioc_restart_pending(self, prefix, ioc):
"""Tests to see if an IOC restart is pending
Args:
prefix (string): The prefix for the instrument
ioc (string): The name of the IOC
Returns:
bool: Whether a restart is pending
"""
return ioc_restart_pending(self.generate_prefix(prefix, ioc), ChannelAccess)
def get_ioc_status(self, prefix, ioc):
"""Gets the status of the specified IOC.
Args:
prefix (string): The prefix for the instrument
ioc (string): The name of the IOC
Returns:
string : The status
"""
ans = ChannelAccess.caget(self.generate_prefix(prefix, ioc) + ":STATUS", as_string=True)
if ans is None:
raise Exception("Could not find IOC ({})".format(self.generate_prefix(prefix, ioc)))
return ans.upper()
def toggle_autorestart(self, prefix, ioc):
"""Toggles the auto-restart property.
Args:
prefix (string): The prefix for the instrument
ioc (string): The name of the IOC
"""
# Check IOC is running, otherwise command is ignored
print_and_log("Toggling auto-restart for IOC {}".format(ioc))
ChannelAccess.caput(self.generate_prefix(prefix, ioc) + ":TOGGLE", 1)
@retry(50, 0.1, ValueError) # Retry for 5 seconds to get a valid value on failure
def get_autorestart(self, prefix, ioc):
"""Gets the current auto-restart setting of the specified IOC.
Args:
prefix (string): The prefix for the instrument
ioc (string): The name of the IOC
Returns:
bool : Whether auto-restart is enabled
"""
ioc_prefix = self.generate_prefix(prefix, ioc)
ans = ChannelAccess.caget("{}:AUTORESTART".format(ioc_prefix), as_string=True)
if ans not in ["On", "Off"]:
raise ValueError("Could not get auto-restart property for IOC {}, got '{}'".format(ioc_prefix, ans))
return ans == "On"
|
Python
| 0.000002
|
@@ -843,21 +843,8 @@
php%0A
-import time%0A%0A
from
|
b469e0b953b3be57ea69889fa1a4ff447b433afe
|
Fix for gradient change in numpy 1.12
|
metpy/calc/kinematics.py
|
metpy/calc/kinematics.py
|
# Copyright (c) 2008-2015 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
from __future__ import division
import numpy as np
from ..package_tools import Exporter
from ..constants import g
from ..units import atleast_2d, concatenate, units
exporter = Exporter(globals())
def _gradient(f, *args, **kwargs):
if len(args) < f.ndim:
args = list(args)
args.extend([units.Quantity(1., 'dimensionless')] * (f.ndim - len(args)))
grad = np.gradient(f, *args, **kwargs)
if f.ndim == 1:
return units.Quantity(grad, f.units / args[0].units)
return [units.Quantity(g, f.units / dx.units) for dx, g in zip(args, grad)]
def _stack(arrs):
return concatenate([a[np.newaxis] for a in arrs], axis=0)
def _get_gradients(u, v, dx, dy):
# Helper function for getting convergence and vorticity from 2D arrays
dudx, dudy = _gradient(u, dx, dy)
dvdx, dvdy = _gradient(v, dx, dy)
return dudx, dudy, dvdx, dvdy
@exporter.export
def v_vorticity(u, v, dx, dy):
r'''Calculate the vertical vorticity of the horizontal wind.
The grid must have a constant spacing in each direction.
Parameters
----------
u : (X, Y) ndarray
x component of the wind
v : (X, Y) ndarray
y component of the wind
dx : float
The grid spacing in the x-direction
dy : float
The grid spacing in the y-direction
Returns
-------
(X, Y) ndarray
vertical vorticity
See Also
--------
h_convergence, convergence_vorticity
'''
_, dudy, dvdx, _ = _get_gradients(u, v, dx, dy)
return dvdx - dudy
@exporter.export
def h_convergence(u, v, dx, dy):
r'''Calculate the horizontal convergence of the horizontal wind.
The grid must have a constant spacing in each direction.
Parameters
----------
u : (X, Y) ndarray
x component of the wind
v : (X, Y) ndarray
y component of the wind
dx : float
The grid spacing in the x-direction
dy : float
The grid spacing in the y-direction
Returns
-------
(X, Y) ndarray
The horizontal convergence
See Also
--------
v_vorticity, convergence_vorticity
'''
dudx, _, _, dvdy = _get_gradients(u, v, dx, dy)
return dudx + dvdy
@exporter.export
def convergence_vorticity(u, v, dx, dy):
r'''Calculate the horizontal convergence and vertical vorticity of the
horizontal wind.
The grid must have a constant spacing in each direction.
Parameters
----------
u : (X, Y) ndarray
x component of the wind
v : (X, Y) ndarray
y component of the wind
dx : float
The grid spacing in the x-direction
dy : float
The grid spacing in the y-direction
Returns
-------
convergence, vorticity : tuple of (X, Y) ndarrays
The horizontal convergence and vertical vorticity, respectively
See Also
--------
v_vorticity, h_convergence
Notes
-----
This is a convenience function that will do less work than calculating
the horizontal convergence and vertical vorticity separately.
'''
dudx, dudy, dvdx, dvdy = _get_gradients(u, v, dx, dy)
return dudx + dvdy, dvdx - dudy
@exporter.export
def advection(scalar, wind, deltas):
r'''Calculate the advection of a scalar field by the wind.
The order of the dimensions of the arrays must match the order in which
the wind components are given. For example, if the winds are given [u, v],
then the scalar and wind arrays must be indexed as x,y (which puts x as the
rows, not columns).
Parameters
----------
scalar : N-dimensional array
Array (with N-dimensions) with the quantity to be advected.
wind : sequence of arrays
Length N sequence of N-dimensional arrays. Represents the flow,
with a component of the wind in each dimension. For example, for
horizontal advection, this could be a list: [u, v], where u and v
are each a 2-dimensional array.
deltas : sequence
A (length N) sequence containing the grid spacing in each dimension.
Returns
-------
N-dimensional array
An N-dimensional array containing the advection at all grid points.
'''
# This allows passing in a list of wind components or an array
wind = _stack(wind)
# Gradient returns a list of derivatives along each dimension. We convert
# this to an array with dimension as the first index
grad = _stack(_gradient(scalar, *deltas))
# Make them be at least 2D (handling the 1D case) so that we can do the
# multiply and sum below
grad, wind = atleast_2d(grad, wind)
return (-grad * wind).sum(axis=0)
@exporter.export
def geostrophic_wind(heights, f, dx, dy):
r'''Calculate the geostrophic wind given from the heights or geopotential.
Parameters
----------
heights : (x,y) ndarray
The height field, given with leading dimensions of x by y. There
can be trailing dimensions on the array.
f : array_like
The coriolis parameter. This can be a scalar to be applied
everywhere or an array of values.
dx : scalar
The grid spacing in the x-direction
dy : scalar
The grid spacing in the y-direction
Returns
-------
A 2-item tuple of arrays
A tuple of the u-component and v-component of the geostrophic wind.
'''
if heights.dimensionality['[length]'] == 2.0:
norm_factor = 1. / f
else:
norm_factor = g / f
# If heights is has more than 2 dimensions, we need to pass in some dummy
# grid deltas so that we can still use np.gradient. It may be better to
# to loop in this case, but that remains to be done.
deltas = [dx, dy]
if heights.ndim > 2:
deltas = deltas + [units.Quantity(1., units.m)] * (heights.ndim - 2)
grad = _gradient(heights, *deltas)
dx, dy = grad[0], grad[1] # This throws away unused gradient components
return -norm_factor * dy, norm_factor * dx
|
Python
| 0
|
@@ -525,36 +525,59 @@
np.gradient(f, *
+(a.magnitude for a in
args
+)
, **kwargs)%0A
|
717fae72c0ee212e6b02f23c79af99aa76a34a5a
|
Update hmc.py
|
hmc/hmc.py
|
hmc/hmc.py
|
"""
pyhmc: Hamiltonain Monte Carlo Sampling in Python
=================================================
This package is a straight-forward port of the functions `hmc2.m` and
hmc2_opt.m from the [MCMCstuff](http://www.lce.hut.fi/research/mm/mcmcstuff/)
matlab toolbox written by Aki Vehtari. The code is originally based on the
functions `hmc.m` from the [netlab toolbox](http://www.ncrg.aston.ac.uk/netlab/index.php)
written by Ian T Nabney. The portion of algorithm involving "windows" is
derived from the C code for this function included in the
[Software for Flexible Bayesian Modeling](http://www.cs.toronto.edu/~radford/fbm.software.html)
written by Radford Neal.
The original Python [port](https://github.com/koepsell/pyhmc) was made
by Kilian Koepsell, and subsequently modernized by Robert T. McGibbon.
Authors
-------
- Kilian Koepsell <kilian@berkeley.edu>
- Robert T. McGibbon <rmcgibbo@gmail.com>
"""
from __future__ import print_function, division
import numbers
import numpy as np
from ._hmc import hmc_main_loop
__all__ = ['hmc']
def hmc(fun, x0, args=(), display=False, steps=1, n_samples=1, n_burn=0,
persistence=False, decay=0.9, epsilon=0.2, window=1,
return_energies=False, return_diagnostics=False, random_state=None):
"""Hamiltonian Monte Carlo sampler.
Uses a Hamiltonian / Hybrid Monte Carlo algorithm to sample from the
distribution P ~ exp(f). The Markov chain starts at the point x0. The
callable ``fun`` should return the log probability and gradient of the
log probability.
Parameters
----------
fun : callable
A callable which takes a vector in the parameter spaces as input
and returns the natural logarithm of the posterior probabily
for that position, and gradient of the posterior probability with
respect to the parameter vector, ``logp, grad = func(x, *args)``.
x0 : 1-d array
Starting point for the sampling Markov chain.
Optional Parameters
-------------------
args : tuple
additional arguments to be passed to fun().
display : bool
If True, enables verbose display output. Default: False
steps : int
Defines the trajectory length (i.e. the number of leapfrog
steps at each iteration).
n_samples : int
The number of samples retained from the Markov chain.
n_burn : int
The number of samples omitted from the start of the chain as 'burn in'.
persistence : bool
If True, momentum persistence is used (i.e. the momentum
variables decay rather than being replaced). Default: False
decay : float, default=0.9
Defines the decay used when a persistent update of (leap-frog)
momentum is used. Bounded to the interval [0, 1.).
epsilon : float, default=0.2.
The step adjustment used in leap-frogs
window : int, default=1
The size of the acceptance window.
return_energies : bool, default=False
If True, the energy values for all samples are returned.
return_diagnostics : bool, default=False
If True, diagnostic information is returned (see below).
Returns
-------
samples : array
Array with data samples in rows.
energies : array
If return_energies is True, also returns an array of the energy values
(i.e. negative log probabilities) for all samples.
diagn : dict
If return_diagnostics is True, also returns a dictionary with diagnostic
information (position, momentum and acceptance threshold) for each step
of the chain in diagn.pos, diagn.mom and diagn.acc respectively.
All candidate states (including rejected ones) are stored in
diagn['pos']. The diagn dictionary contains the following items:
``pos`` : array
the position vectors of the dynamic process
``mom`` : array
the momentum vectors of the dynamic process
``acc`` : array
the acceptance thresholds
``rej`` : float
the rejection rate
``stp`` : float
the step size vectors
"""
# check some options
assert steps >= 1, 'step size has to be >= 1'
assert n_samples >= 1, 'n_samples has to be >= 1'
assert n_burn >= 0, 'n_burn has to be >= 0'
assert decay >= 0, 'decay has to be >= 0'
assert decay <= 1, 'decay has to be <= 1'
assert window >= 0, 'window has to be >= 0'
if window > steps:
window = steps
if display:
print("setting window size to step size %d" % window)
if persistence:
alpha = decay
salpha = np.sqrt(1-alpha**2)
else:
alpha = salpha = 0.
n_params = len(x0)
# Initialize matrix of returned samples
samples = np.zeros((n_samples, n_params))
# Return energies?
if return_energies:
energies = np.zeros(n_samples)
else:
energies = np.zeros(0)
# Return diagnostics?
if return_diagnostics:
diagn_pos = np.zeros((n_samples, n_params))
diagn_mom = np.zeros((n_samples, n_params))
diagn_acc = np.zeros(n_samples)
else:
diagn_pos = None
diagn_mom = None
diagn_acc = None
random = _check_random_state(random_state)
p = random.randn(n_params)
# Main loop.
all_args = [
fun, x0, args, p, samples, energies,
diagn_pos, diagn_mom, diagn_acc,
n_samples, n_burn, window,
steps, display, persistence,
return_energies, return_diagnostics,
alpha, salpha, epsilon, random,]
n_reject = hmc_main_loop(*all_args)
if display:
print('\nFraction of samples rejected: %g\n'%(n_reject / n_samples))
# Store diagnostics
if return_diagnostics:
diagn = dict()
diagn['pos'] = diagn_pos # positions matrix
diagn['mom'] = diagn_mom # momentum matrix
diagn['acc'] = diagn_acc # acceptance treshold matrix
diagn['rej'] = n_reject / n_samples # rejection rate
diagn['stps'] = epsilon # stepsize vector
if return_energies or return_diagnostics:
out = (samples,)
else:
return samples
if return_energies:
out += (energies,)
if return_diagnostics:
out += (diagn,)
return out
def _check_random_state(seed):
"""Turn seed into a np.random.RandomState instance
If seed is None, return the RandomState singleton used by np.random.
If seed is an int, return a new RandomState instance seeded with seed.
If seed is already a RandomState instance, return it.
Otherwise raise ValueError.
"""
if seed is None or seed is np.random:
return np.random.mtrand._rand
if isinstance(seed, (numbers.Integral, np.integer)):
return np.random.RandomState(seed)
if isinstance(seed, np.random.RandomState):
return seed
raise ValueError('%r cannot be used to seed a numpy.random.RandomState'
' instance' % seed)
|
Python
| 0.000001
|
@@ -168,16 +168,17 @@
.m%60 and%0A
+%60
hmc2_opt
@@ -179,16 +179,17 @@
c2_opt.m
+%60
from th
|
47077fd978866acefb127d3ca3b72182a468a020
|
Support prelim.csv files in sort_sam.py script.
|
micall/utils/sort_sam.py
|
micall/utils/sort_sam.py
|
#! /usr/bin/env python3
import os
from argparse import ArgumentParser, FileType
import subprocess
def parse_args():
parser = ArgumentParser(description='Sort SAM file before viewing.')
parser.add_argument('sam', help='SAM file to sort')
return parser.parse_args()
def main():
args = parse_args()
# samtools view -Sb example.sam -o example.bam
sam_name = args.sam
sam_root, _ = os.path.splitext(sam_name)
subprocess.check_call(
['samtools', 'view', '-Sb', sam_name, '-o', sam_root + '.bam'])
# samtools sort example.bam -o example.sorted.bam
subprocess.check_call(
['samtools', 'sort', sam_root + '.bam', '-o', sam_root + '.sorted.bam'])
# samtools view -h -o example.sorted.sam example.sorted.bam
subprocess.check_call(['samtools',
'view',
'-h',
'-o',
sam_root + '.sorted.sam',
sam_root + '.sorted.bam'])
if __name__ == '__main__':
main()
|
Python
| 0
|
@@ -17,16 +17,39 @@
python3%0A
+import csv%0Aimport json%0A
import o
@@ -99,297 +99,1712 @@
Type
-%0A%0Aimport subprocess%0A%0A%0Adef parse_args():%0A parser = ArgumentParser(description='Sort SAM file before viewing.')%0A parser.add_argument('sam', help='SAM file to sort')%0A%0A return parser.parse_args()%0A%0A%0Adef main():%0A args = parse_args()%0A # samtools view -Sb example.sam -o example.bam
+, ArgumentDefaultsHelpFormatter%0A%0Aimport subprocess%0Afrom pathlib import Path%0A%0Aimport typing%0A%0A%0Adef parse_args():%0A # noinspection PyTypeChecker%0A parser = ArgumentParser(description='Sort SAM file before viewing.',%0A formatter_class=ArgumentDefaultsHelpFormatter)%0A parser.add_argument('sam', help='SAM file to sort, or prelim.csv')%0A default_projects = Path(__file__).parent.parent / 'projects.json'%0A parser.add_argument('--projects',%0A type=FileType(),%0A help='JSON file with project definitions',%0A default=str(default_projects))%0A%0A return parser.parse_args()%0A%0A%0Adef convert_from_csv(csv_name: str,%0A sam_name: str,%0A projects_file: typing.TextIO):%0A with open(csv_name) as csv_file, open(sam_name, 'w') as sam_file:%0A writer = csv.writer(sam_file, delimiter='%5Ct', lineterminator=os.linesep)%0A writer.writerow(%5B'@HD', 'VN:1.0', 'SO:unsorted'%5D)%0A%0A reader = csv.reader(csv_file)%0A header = next(reader)%0A region_index = 2%0A assert header%5Bregion_index%5D == 'rname'%0A%0A regions = %7Brow%5Bregion_index%5D for row in reader%7D%0A%0A projects = json.load(projects_file)%0A for region, info in projects%5B'regions'%5D.items():%0A if region in regions:%0A reference = ''.join(info%5B'reference'%5D)%0A reference_length = len(reference)%0A row = %5B'@SQ', f'SN:%7Bregion%7D', f'LN:%7Breference_length%7D'%5D%0A writer.writerow(row)%0A%0A csv_file.seek(0)%0A reader = csv.reader(csv_file)%0A next(reader)%0A writer.writerows(reader)%0A%0A%0Adef main():%0A args = parse_args()
%0A
@@ -1838,17 +1838,23 @@
m_root,
-_
+sam_ext
= os.pa
@@ -1875,16 +1875,218 @@
m_name)%0A
+ if sam_ext == '.csv':%0A csv_name = sam_name%0A sam_name = sam_root + '.sam'%0A convert_from_csv(csv_name, sam_name, args.projects)%0A # samtools view -Sb example.sam -o example.bam%0A
subp
@@ -2650,16 +2650,17 @@
bam'%5D)%0A%0A
+%0A
if __nam
|
f2c800c5c9f88b4b583181e9cf49eb3cd8d538f0
|
Improve open_in_browser base tag injection (#5319)
|
scrapy/utils/response.py
|
scrapy/utils/response.py
|
"""
This module provides some useful functions for working with
scrapy.http.Response objects
"""
import os
import webbrowser
import tempfile
from typing import Any, Callable, Iterable, Optional, Tuple, Union
from weakref import WeakKeyDictionary
import scrapy
from scrapy.http.response import Response
from twisted.web import http
from scrapy.utils.python import to_bytes, to_unicode
from w3lib import html
_baseurl_cache: "WeakKeyDictionary[Response, str]" = WeakKeyDictionary()
def get_base_url(response: "scrapy.http.response.text.TextResponse") -> str:
"""Return the base url of the given response, joined with the response url"""
if response not in _baseurl_cache:
text = response.text[0:4096]
_baseurl_cache[response] = html.get_base_url(text, response.url, response.encoding)
return _baseurl_cache[response]
_metaref_cache: "WeakKeyDictionary[Response, Union[Tuple[None, None], Tuple[float, str]]]" = WeakKeyDictionary()
def get_meta_refresh(
response: "scrapy.http.response.text.TextResponse",
ignore_tags: Optional[Iterable[str]] = ('script', 'noscript'),
) -> Union[Tuple[None, None], Tuple[float, str]]:
"""Parse the http-equiv refrsh parameter from the given response"""
if response not in _metaref_cache:
text = response.text[0:4096]
_metaref_cache[response] = html.get_meta_refresh(
text, response.url, response.encoding, ignore_tags=ignore_tags)
return _metaref_cache[response]
def response_status_message(status: Union[bytes, float, int, str]) -> str:
"""Return status code plus status text descriptive message
"""
status_int = int(status)
message = http.RESPONSES.get(status_int, "Unknown Status")
return f'{status_int} {to_unicode(message)}'
def response_httprepr(response: Response) -> bytes:
"""Return raw HTTP representation (as bytes) of the given response. This
is provided only for reference, since it's not the exact stream of bytes
that was received (that's not exposed by Twisted).
"""
values = [
b"HTTP/1.1 ",
to_bytes(str(response.status)),
b" ",
to_bytes(http.RESPONSES.get(response.status, b'')),
b"\r\n",
]
if response.headers:
values.extend([response.headers.to_string(), b"\r\n"])
values.extend([b"\r\n", response.body])
return b"".join(values)
def open_in_browser(
response: Union["scrapy.http.response.html.HtmlResponse", "scrapy.http.response.text.TextResponse"],
_openfunc: Callable[[str], Any] = webbrowser.open,
) -> Any:
"""Open the given response in a local web browser, populating the <base>
tag for external links to work
"""
from scrapy.http import HtmlResponse, TextResponse
# XXX: this implementation is a bit dirty and could be improved
body = response.body
if isinstance(response, HtmlResponse):
if b'<base' not in body:
repl = f'<head><base href="{response.url}">'
body = body.replace(b'<head>', to_bytes(repl))
ext = '.html'
elif isinstance(response, TextResponse):
ext = '.txt'
else:
raise TypeError("Unsupported response type: "
f"{response.__class__.__name__}")
fd, fname = tempfile.mkstemp(ext)
os.write(fd, body)
os.close(fd)
return _openfunc(f"file://{fname}")
|
Python
| 0
|
@@ -107,26 +107,18 @@
%0Aimport
-webbrowse
r
+e
%0Aimport
@@ -126,16 +126,34 @@
empfile%0A
+import webbrowser%0A
from typ
@@ -2933,23 +2933,20 @@
repl = f
-'%3Chead%3E
+r'%5C1
%3Cbase hr
@@ -2990,30 +2990,107 @@
y =
-body.replace(b'%3Chead%3E'
+re.sub(b%22%3C!--.*?--%3E%22, b%22%22, body, flags=re.DOTALL)%0A body = re.sub(rb%22(%3Chead(?:%3E%7C%5Cs.*?%3E))%22
, to
@@ -3101,16 +3101,22 @@
es(repl)
+, body
)%0A
|
19f26c267b4cc7a31e6fe2aeaa470d975cf3c3ae
|
Remove set and update docstring
|
motobot/core_plugins/bot_management.py
|
motobot/core_plugins/bot_management.py
|
from motobot import command, hook, Notice, IRCLevel, Command, Target, Action, Priority
@command('command', level=IRCLevel.master, priority=Priority.max)
def command_command(bot, context, message, args):
""" Command to manage the basic functions of the bot.
The 'join' and 'part' argument both require a channel argument.
The 'join' command has an optional channel password argument.
The 'quit', 'part', and 'reconnect' argument have an optional quit/part message.
The 'show' argument will return a list of currently joined channels.
The 'set' argument will set an attribute of the bot. Use with caution.
The 'reload' command will reload the plugins in the loaded packages.
"""
try:
arg = args[1].lower()
if arg == 'join':
channel = ' '.join(args[2:])
response = join_channel(context.database, channel)
elif arg == 'part':
channel = args[2]
message = ' '.join(args[3:])
response = part_channel(context.database, channel, message)
elif arg == 'quit':
bot.running = False
message = ' '.join(args[2:])
response = quit(message)
elif arg == 'reconnect':
message = ' '.join(args[2:])
response = quit(message)
elif arg == 'show':
response = show_channels(context.database)
elif arg == 'set':
name = args[2]
value = args[3:]
response = set_attribute(bot, name, value)
elif arg == 'reload':
error = bot.reload_plugins()
response = "Plugins have been reloaded." + \
(" There were some errors." if error else "")
else:
response = "Error: Invalid argument."
except IndexError:
response = "Error: Too few arguments supplied."
return response, Notice(context.nick)
@command('say', level=IRCLevel.master)
def say_command(bot, context, message, args):
""" Send a message to a given target.
Usage: say <TARGET> [MESSAGE]
"""
try:
target = args[1]
message = ' '.join(args[2:])
return say(target, message)
except IndexError:
return ("Error: Too few arguments supplied.", Notice(context.nick))
def join_channel(database, channel):
response = None
channels = database.get(set())
if channel.lower() in channels:
response = "I'm already in {}.".format(channel)
else:
channels.add(channel.lower())
database.set(channels)
response = (
[Command('JOIN', channel)],
"I have joined {}.".format(channel)
)
return response
def part_channel(database, channel, message):
response = None
channels = database.get(set())
if channel.lower() not in channels:
response = "I'm not in {}.".format(channel)
else:
channels.discard(channel.lower())
database.set(channels)
response = [
(message, Command('PART', channel)),
"I have left {}.".format(channel)
]
return response
def quit(message):
return [
"Goodbye!",
(message, Command('QUIT', []))
]
def show_channels(database):
channels = map(lambda x: x.split(' ', 1)[0], database.get(set()))
return "I am currently in: {}.".format(', '.join(channels))
def say(target, message):
target_modifier = Target(target)
if message.startswith('/me '):
return (message[4:], target_modifier, Action)
else:
return (message, target_modifier)
def set_attribute(bot, name, value):
return "This function has not yet been implemeneted."
@hook('KICK')
def handle_kick(bot, context, message):
if message.params[1] == bot.nick:
channel = message.params[0]
part_channel(context.database, channel, None)
@hook('004')
def handling_joining_channels(bot, context, message):
channels = context.database.get(set())
channels |= set(map(lambda x: x.lower(), bot.channels))
context.database.set(channels)
for channel in channels:
bot.send('JOIN ' + channel)
|
Python
| 0
|
@@ -253,24 +253,107 @@
f the bot.%0A%0A
+ Valid arguments are: 'join', 'part', 'quit', 'show', 'reconnect' and 'reload'.%0A
The 'joi
@@ -636,83 +636,8 @@
ls.%0A
- The 'set' argument will set an attribute of the bot. Use with caution.%0A
@@ -1390,146 +1390,8 @@
se)%0A
- elif arg == 'set':%0A name = args%5B2%5D%0A value = args%5B3:%5D%0A response = set_attribute(bot, name, value)%0A
@@ -3446,105 +3446,8 @@
)%0A%0A%0A
-def set_attribute(bot, name, value):%0A return %22This function has not yet been implemeneted.%22%0A%0A%0A
@hoo
|
db33d02b22ece1f081e4ca2171a76d9e1eb784d9
|
Add latest field to form
|
Instanssi/admin_kompomaatti/forms.py
|
Instanssi/admin_kompomaatti/forms.py
|
# -*- coding: utf-8 -*-
from django import forms
from django.shortcuts import get_object_or_404
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit, Layout, Fieldset, ButtonHolder
from datetime import datetime
from django.core.exceptions import ValidationError
from django.contrib.auth.models import User
from Instanssi.kompomaatti.models import Compo, Entry, VoteCode, VoteCodeRequest, Event, Competition, CompetitionParticipation
import urlparse
class AdminCompetitionScoreForm(forms.Form):
def __init__(self, *args, **kwargs):
self.competition = kwargs.pop('competition', None)
# Init
super(AdminCompetitionScoreForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout()
# Create a fieldset for everything
fs = Fieldset(u'Pisteytys')
# Set fields
participants = CompetitionParticipation.objects.filter(competition=self.competition)
for p in participants:
name = str(p.id)
self.fields[name] = forms.FloatField()
self.fields[name].label = p.participant_name
self.fields[name].help_text = u'Osallistujan '+p.participant_name+u' saavuttama tulos.'
self.fields[name].initial = p.score
fs.fields.append(name)
# Add buttonholder
bh = ButtonHolder (
Submit('submit', u'Tallenna')
)
fs.fields.append(bh)
# Add fieldset to layout
self.helper.layout.fields.append(fs)
def save(self):
for k,v in self.cleaned_data.iteritems():
p = get_object_or_404(CompetitionParticipation, pk=int(k))
p.score = v
p.save()
class AdminParticipationEditForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(AdminParticipationEditForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Fieldset(
u'Osallistuja',
'participant_name',
'score',
'disqualified',
'disqualified_reason',
ButtonHolder (
Submit('submit', u'Tallenna')
)
)
)
class Meta:
model = CompetitionParticipation
fields = ('participant_name','score','disqualified','disqualified_reason')
class AdminCompetitionForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(AdminCompetitionForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Fieldset(
u'Kilpailu',
'name',
'description',
'participation_end',
'start',
'end',
'score_type',
'score_sort',
'show_results',
'hide_from_archive',
'active',
ButtonHolder (
Submit('submit', u'Tallenna')
)
)
)
class Meta:
model = Competition
exclude = ('event',)
class AdminCompoForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(AdminCompoForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Fieldset(
u'Kompo',
'name',
'description',
'adding_end',
'editing_end',
'compo_start',
'voting_start',
'voting_end',
'entry_sizelimit',
'source_sizelimit',
'formats',
'source_formats',
'active',
'show_voting_results',
'entry_view_type',
'hide_from_archive',
ButtonHolder (
Submit('submit', u'Tallenna')
)
)
)
class Meta:
model = Compo
exclude = ('event',)
class AdminEntryAddForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
# Initialize
self.event = kwargs.pop('event', None)
super(AdminEntryAddForm, self).__init__(*args, **kwargs)
# Set choices
if self.event:
compos = []
for compo in Compo.objects.filter(event=self.event):
compos.append((compo.id, compo.name))
self.fields['compo'].choices = compos
# Set form
self.helper = FormHelper()
self.helper.layout = Layout(
Fieldset(
u'Tuotos',
'user',
'compo',
'name',
'description',
'creator',
'entryfile',
'sourcefile',
'imagefile_original',
'youtube_url',
ButtonHolder (
Submit('submit', u'Lisää')
)
)
)
class Meta:
model = Entry
exclude = ('disqualified','disqualified_reason','imagefile_thumbnail','imagefile_medium','archive_score','archive_rank')
class AdminEntryEditForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
# Initialize
self.event = kwargs.pop('event', None)
super(AdminEntryEditForm, self).__init__(*args, **kwargs)
# Set choices for Compo field
if self.event:
compos = []
for compo in Compo.objects.filter(event=self.event):
compos.append((compo.id, compo.name))
self.fields['compo'].choices = compos
# Set form
self.helper = FormHelper()
self.helper.layout = Layout(
Fieldset(
u'Tuotos',
'compo',
'user',
'name',
'description',
'creator',
'entryfile',
'sourcefile',
'imagefile_original',
'youtube_url',
'disqualified',
'disqualified_reason',
ButtonHolder (
Submit('submit', u'Tallenna')
)
)
)
def clean_youtube_url(self):
# Make sure field has content
if not self.cleaned_data['youtube_url']:
return self.cleaned_data['youtube_url']
# Check if we already have a valid embed url
url = self.cleaned_data['youtube_url']
if url.find('http://www.youtube.com/v/') == 0:
return url
# Parse querystring to find video ID
parsed = urlparse.urlparse(url)
qs = urlparse.parse_qs(parsed.query)
# Check if the video id exists in query string
if 'v' not in qs:
raise ValidationError(u'Osoitteesta ei löytynyt videotunnusta.')
# All done. Return valid url
return 'http://www.youtube.com/v/'+qs['v'][0]+'/'
class Meta:
model = Entry
exclude = ('imagefile_thumbnail','imagefile_medium','archive_score','archive_rank')
class CreateTokensForm(forms.Form):
amount = forms.IntegerField(min_value=1, max_value=100, label=u"Määrä", help_text=u"Montako tokenia luodaan.")
def __init__(self, *args, **kwargs):
super(CreateTokensForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.layout = Layout(
Fieldset(
u'Generoi tokeneita',
'amount',
ButtonHolder (
Submit('submit', u'Generoi')
)
)
)
|
Python
| 0
|
@@ -3977,32 +3977,71 @@
_from_archive',%0A
+ 'hide_from_frontpage',%0A
|
1b3b3edac1b01a59519690c86647c70a67c4d90b
|
Add support for relative paths in mac os gen_snapshot. (#35324)
|
sky/tools/create_macos_gen_snapshots.py
|
sky/tools/create_macos_gen_snapshots.py
|
#!/usr/bin/env python3
#
# Copyright 2013 The Flutter Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import subprocess
import sys
import os
def main():
parser = argparse.ArgumentParser(
description='Copies architecture-dependent gen_snapshot binaries to output dir'
)
parser.add_argument('--dst', type=str, required=True)
parser.add_argument('--clang-dir', type=str, default='clang_x64')
parser.add_argument('--x64-out-dir', type=str)
parser.add_argument('--arm64-out-dir', type=str)
parser.add_argument('--armv7-out-dir', type=str)
args = parser.parse_args()
if args.x64_out_dir:
generate_gen_snapshot(
args.x64_out_dir, os.path.join(args.dst, 'gen_snapshot_x64')
)
if args.arm64_out_dir:
generate_gen_snapshot(
os.path.join(args.arm64_out_dir, args.clang_dir),
os.path.join(args.dst, 'gen_snapshot_arm64')
)
if args.armv7_out_dir:
generate_gen_snapshot(
os.path.join(args.armv7_out_dir, args.clang_dir),
os.path.join(args.dst, 'gen_snapshot_armv7')
)
def generate_gen_snapshot(directory, destination):
gen_snapshot_dir = os.path.join(directory, 'gen_snapshot')
if not os.path.isfile(gen_snapshot_dir):
print('Cannot find gen_snapshot at %s' % gen_snapshot_dir)
sys.exit(1)
subprocess.check_call([
'xcrun', 'bitcode_strip', '-r', gen_snapshot_dir, '-o', destination
])
if __name__ == '__main__':
sys.exit(main())
|
Python
| 0
|
@@ -236,16 +236,121 @@
ort os%0A%0A
+buildroot_dir = os.path.abspath(%0A os.path.join(os.path.realpath(__file__), '..', '..', '..', '..')%0A)%0A%0A
%0Adef mai
@@ -709,94 +709,288 @@
tr)%0A
+%0A
-parser.add_argument('--armv7-out-
+args = parser.parse_args()%0A%0A dst = (%0A args.dst%0A if os.path.isabs(args.dst) else os.path.join(buildroot_
dir
-'
,
-type=str
+args.dst)%0A
)%0A%0A
+if
args
- = parser.parse_args()%0A%0A if
+.x64_out_dir:%0A x64_out_dir = (%0A args.x64_out_dir if os.path.isabs(args.x64_out_dir) else%0A os.path.join(buildroot_dir,
arg
@@ -994,33 +994,39 @@
args.x64_out_dir
-:
+)%0A )
%0A generate_ge
@@ -1036,30 +1036,16 @@
napshot(
-%0A args.
x64_out_
@@ -1054,37 +1054,32 @@
r, os.path.join(
-args.
dst, 'gen_snapsh
@@ -1082,29 +1082,24 @@
apshot_x64')
-%0A
)%0A%0A if args
@@ -1122,38 +1122,55 @@
-generate_gen_snapshot(%0A
+arm64_out_dir = (%0A args.arm64_out_dir if
os.
@@ -1166,36 +1166,37 @@
_dir if os.path.
-join
+isabs
(args.arm64_out_
@@ -1198,34 +1198,22 @@
_out_dir
-, args.clang_dir),
+) else
%0A
@@ -1230,51 +1230,22 @@
oin(
-args.dst, 'gen_snapshot_arm64')%0A )%0A%0A if
+buildroot_dir,
arg
@@ -1249,26 +1249,26 @@
args.arm
-v7
+64
_out_dir
:%0A ge
@@ -1255,25 +1255,31 @@
rm64_out_dir
-:
+)%0A )
%0A generat
@@ -1319,18 +1319,13 @@
oin(
-args.
arm
-v7
+64
_out
@@ -1368,21 +1368,16 @@
th.join(
-args.
dst, 'ge
@@ -1394,10 +1394,10 @@
_arm
-v7
+64
')%0A
|
151fe948e07de094e37b8f5b5b00ab526c59e03e
|
Fix server error when trying to open Compo page when not logged in
|
Instanssi/kompomaatti/views/compo.py
|
Instanssi/kompomaatti/views/compo.py
|
# -*- coding: utf-8 -*-
from django.http import Http404, HttpResponseRedirect, HttpResponse
from datetime import datetime
from Instanssi.kompomaatti.misc.custom_render import custom_render
from Instanssi.kompomaatti.misc.time_formatting import compo_times_formatter
from Instanssi.kompomaatti.models import Entry, Compo, Vote, VoteCode
from Instanssi.settings import ACTIVE_EVENT_ID
from Instanssi.kompomaatti.misc import entrysort
def compo(request, compo_id):
# Get compo information
try:
c = Compo.objects.get(id=compo_id, event=ACTIVE_EVENT_ID, active=True)
except Compo.DoesNotExist:
raise Http404
# Format times and stuff
c = compo_times_formatter(c)
# The following is only relevant, if the user is logged in and valid.
has_voted = False
voting_open = False
votes = {}
if request.user.is_authenticated():
# Check if user has already voted
votes = Vote.objects.filter(user=request.user, compo=c).order_by('rank')
if votes.count() > 0:
has_voted = True
# Check if voting is open
now = datetime.now()
if c.voting_start <= now and now < c.voting_end:
voting_open = True
# Check if we can show the entries
show_entries = False
if c.voting_start <= now:
show_entries = True
# Check if we want to do something with forms and stuff.
if request.method == 'POST':
if voting_open:
# Make sure the user has a valid votecode
try:
votecode = VoteCode.objects.get(associated_to=request.user)
except:
return HttpResponse("Ei äänestysoikeutta!")
# Get entries in compo that are not disqualified
compo_entries = Entry.objects.filter(compo=c, disqualified=False)
# Get the input data, and format it so that we can handle it.
# HTML mode and JS mode voting systems give out different kind
# of data
order = []
tmp = {}
if request.POST['action'] == 'vote_html':
for entry in compo_entries:
check_for = "ventry_"+str(entry.id)
if not request.POST.has_key(check_for):
return HttpResponse("Virhe syötteen käsittelyssä!")
try:
tmp[entry.id] = int(request.POST[check_for])
except:
return HttpResponse("Virhe syötteen käsittelyssä!")
order = sorted(tmp, key=tmp.get)
else:
order_raw = request.POST.getlist('order[]')
for id in order_raw:
try:
order.append(int(id))
except:
return HttpResponse("Virhe syötteen käsittelyssä!")
# Remove old votes by this user, on this compo
if has_voted:
Vote.objects.filter(user=request.user, compo=c).delete()
# Check voting input for cheating :P
# See if all entries have a rank.
for entry in compo_entries:
if entry.id not in order:
return HttpResponse("Virhe syötteen käsittelyssä!")
# See that we have the right amount of entries
if len(order) != len(compo_entries):
return HttpResponse("Virhe syötteen käsittelyssä!")
# Make sure that no entry is in the list twice
checked_ids = []
for entryid in order:
if entryid not in checked_ids:
checked_ids.append(entryid)
else:
return HttpResponse("Virhe syötteen käsittelyssä!")
# Add new votes, if there were no errors
number = 1
for entry_id in order:
vote = Vote()
vote.user = request.user
vote.compo = c
vote.entry = Entry.objects.get(id=entry_id)
vote.rank = number
vote.save()
number += 1
# Select response mode according to input
if request.POST['action'] == 'vote_html':
return HttpResponseRedirect('/kompomaatti/compo/'+compo_id+'/')
else:
return HttpResponse("0") # 0 = Success.
else: # If voting is closed, just show 404. This shouldn't really happen ...
raise Http404
# Get entries.
# If voting is open, and user has already voted, get the order of entries by previous voting
# If voting is open, and user has NOT voted yet, get the entries in random order
# Otherwise just get entries sorted by name
# Make sure that no disqualified entries are included if voting is open. No need to vote for those ...
if voting_open and has_voted:
e = []
# First go through the entries that have been voted for and add them to list.
for vote in votes:
if not vote.entry.disqualified:
e.append(vote.entry)
# Then, make sure to also show entries that have NOT been voted previously by the user
# (if entry has been added late)
entries_tmp = Entry.objects.filter(compo=c,disqualified=False).order_by('?')
for entry in entries_tmp:
if entry not in e:
e.append(entry)
elif voting_open:
e = Entry.objects.filter(compo=c,disqualified=False).order_by('?')
else:
if c.show_voting_results:
e = entrysort.sort_by_score(Entry.objects.filter(compo=c))
elif show_entries:
e = Entry.objects.filter(compo=c).order_by('name')
else:
e = []
# Render the page. Ya, rly.
return custom_render(request, 'kompomaatti/compo.html', {
'compo': c,
'entries': e,
'voting_open': voting_open,
'has_voted': has_voted,
'show_entries': show_entries,
})
|
Python
| 0
|
@@ -694,24 +694,181 @@
ter(c)%0A %0A
+ now = datetime.now()%0A %0A # Check if we can show the entries%0A show_entries = False%0A if c.voting_start %3C= now:%0A show_entries = True%0A %0A
# The fo
@@ -1259,37 +1259,8 @@
pen%0A
- now = datetime.now()%0A
@@ -1356,151 +1356,8 @@
%0A
- # Check if we can show the entries%0A show_entries = False%0A if c.voting_start %3C= now:%0A show_entries = True%0A %0A
|
16b92aac9a7f82a3c15bd2e50d02e4af482a7cf0
|
Fix crestHandler auth callback.
|
mint/web/cresthandler.py
|
mint/web/cresthandler.py
|
from conary.repository.netrepos import proxy
import restlib.http.modpython
from restlib import response
import crest.root
import crest.webhooks
from mint.rest.db import database as restDatabase
from mint.db import database
from mint.rest.middleware import auth
def handleCrest(uri, cfg, db, repos, req):
handler, callback = getCrestHandler(cfg, db)
if isinstance(repos, proxy.SimpleRepositoryFilter):
callback.repos = repos.repos
else:
callback.repos = repos
return handler.handle(req, uri)
def getCrestHandler(cfg, db):
assert(cfg)
assert(db)
crestController = crest.root.Controller(None, '/rest')
crestHandler = restlib.http.modpython.ModPythonHttpHandler(crestController)
crestCallback = CrestRepositoryCallback(db)
crestHandler.addCallback(crestCallback)
db = database.Database(cfg, db)
db = restDatabase.Database(cfg, db)
crestHandler.addCallback(auth.AuthenticationCallback(cfg, db))
return crestHandler, crestCallback
class AuthChecker(auth.AuthenticationCallback(cfg, db):
def processMethod(self, request, viewMethod, args, kw):
return self.checkDisablement(request, viewMethod)
class CrestRepositoryCallback(crest.webhooks.ReposCallback):
def __init__(self, db):
self.db = db
crest.webhooks.ReposCallback.__init__(self, None)
def makeUrl(self, request, *args, **kwargs):
if 'host' in kwargs:
cu = self.db.cursor()
fqdn = kwargs['host']
hostname = fqdn.split('.', 1)[0]
cu.execute('''SELECT COUNT(*) FROM Projects
WHERE hostname=?''', hostname)
if not cu.fetchall():
return 'http://%s/%s' % (kwargs['host'], '/'.join(args))
baseUrl = request.getHostWithProtocol() + '/repos/%s/api' % hostname
return request.url(baseUrl=baseUrl, *args)
return request.url(*args)
|
Python
| 0
|
@@ -919,21 +919,21 @@
allback(
-auth.
+Crest
Authenti
@@ -1008,19 +1008,35 @@
ass
-AuthChecker
+CrestAuthenticationCallback
(aut
@@ -1059,24 +1059,16 @@
Callback
-(cfg, db
):%0A d
|
c022cc1d1fd22ac14cbc01574a0ff2f9fcb26cc2
|
Fix bug. tree formatter.
|
Controller/hone_aggTreeFormation.py
|
Controller/hone_aggTreeFormation.py
|
"""
Author: Peng Sun
hone_aggTreeFormation.py
Define factory and class to compose the aggregation tree
"""
import logging
from hone_job import *
from hone_hostEntry import *
DefaultBranchFactor = 4
class TreeFormatterBase:
''' base class of tree formatter '''
def addLeaf(self, hostEntry):
raise NotImplementedError('Derived class should implement the method')
def removeLeaf(self, hostEntry):
raise NotImplementedError('Derived class should implement the method')
def displayTree(self):
raise NotImplementedError('Derived class should implement the method')
class SimpleNode:
def __init__(self, hostId, hostAddress, nodeLevel):
self.hostId = hostId
self.hostAddress = hostAddress
self.nodeLevel = nodeLevel
self.parent = None
self.children = []
def getHostId(self):
return self.hostId
def getHostAddress(self):
return self.hostAddress
def setParent(self, node):
assert isinstance(node, SimpleNode)
self.parent = node
def getParent(self):
return self.parent
def addChild(self, node):
assert isinstance(node, SimpleNode)
if node not in self.children:
self.children.append(node)
def removeChild(self, node):
assert isinstance(node, SimpleNode)
if node in self.children:
self.children.remove(node)
def getChildren(self):
return self.children
def resetChildren(self):
self.children = []
def setNodeLevel(self, nodeLevel):
self.nodeLevel = nodeLevel
def getNodeLevel(self):
return self.nodeLevel
class SimpleTreeFormatter(TreeFormatterBase):
''' basic implementation of formatting aggregation tree by the sequence of hosts' entering the system '''
def __init__(self, job, branchFactor=DefaultBranchFactor):
self.controllerNode = SimpleNode('controller', 'controller IP', 1)
self.aggTree = [[], [self.controllerNode]]
self.branchFactor = branchFactor
self.job = job
def addLeaf(self, hostEntry):
n = len(self.aggTree)
node = SimpleNode(hostEntry.hostId, hostEntry.hostAddress, 0)
foundSpot = False
searchNodeLevel = 1
while (searchNodeLevel < n) and (not foundSpot):
for nodeToCheck in self.aggTree[searchNodeLevel]:
if len(nodeToCheck.getChildren) < self.branchFactor:
nodeToCheck.addChild(node)
node.setParent(nodeToCheck)
self.aggTree[searchNodeLevel - 1].append(node)
self.job.addAggLink(searchNodeLevel - 1, node.getHostId(), nodeToCheck.getHostId())
foundSpot = True
break
if not foundSpot:
selfPromoteNode = SimpleNode(node.getHostId(), node.getHostAddress(), node.getNodeLevel() + 1)
selfPromoteNode.addChild(node)
node.setParent(selfPromoteNode)
self.aggTree[searchNodeLevel - 1].append(node)
self.job.addAggLink(searchNodeLevel - 1, node.getHostId(), selfPromoteNode.getHostId())
node = selfPromoteNode
searchNodeLevel += 1
if not foundSpot:
self.controllerNode.setNodeLevel(self.controllerNode.getNodeLevel() + 1)
self.aggTree[n - 1].remove(self.controllerNode)
self.aggTree.append([self.controllerNode])
nodeToPromote = self.controllerNode.getChildren()[0]
newNode = SimpleNode(nodeToPromote.getHostId(), nodeToPromote.getHostAddress(), nodeToPromote.getNodeLevel() + 1)
for child in self.controllerNode.getChildren():
newNode.addChild(child)
self.job.removeAggLink(n - 2, child.getHostId(), self.controllerNode.getHostId())
self.job.addAggLink(n - 2, child.getHostId(), newNode.getHostId())
self.controllerNode.resetChildren()
newNode.setParent(self.controllerNode)
self.controllerNode.addChild(newNode)
self.aggTree[n - 1].append(newNode)
self.job.addAggLink(n - 1, newNode.getHostId(), self.controllerNode.getHostId())
node.setParent(self.controllerNode)
self.controllerNode.addChild(node)
self.aggTree[n - 1].append(node)
self.job.addAggLink(n - 1, node.getHostId(), self.controllerNode.getHostId())
self.job.addAggLink(n - 1, node.getHostId(), self.controllerNode.getHostId())
def removeLeaf(self, hostEntry):
# TODO add handler for deletion of nodes
logging.warning('Node deletion in aggregation tree is not implemented yet!')
def displayTree(self):
message = ''
for i in reversed(range(1, len(self.aggTree))):
for node in self.aggTree[i]:
message += 'Level {0:2} node. ID: {1}. IP: {2}.\n'.format(i, node.getHostId(), node.getHostAddress())
message += ' Children nodes: \n'
for child in node.getChildren():
message += ' Child ID: {1}. IP: {2}.\n'.format(child.getHostId(), child.getHostAddress())
message += '\n'
return message
class TreeFormatterFactory:
''' factory method for creating concrete TreeFormatter '''
@staticmethod
def GetNewFormatter(job, description='simple'):
if description == 'simple':
return SimpleTreeFormatter(job)
else:
return SimpleTreeFormatter(job)
|
Python
| 0
|
@@ -2417,16 +2417,18 @@
Children
+()
) %3C self
|
abc07dcd0b08add083ccabbc99656edb73589f5b
|
Fix edit scheme command for filenames
|
plugins_/color_scheme_dev.py
|
plugins_/color_scheme_dev.py
|
import functools
import logging
import re
import sublime
import sublime_plugin
from sublime_lib import ResourcePath
from .lib.scope_data import completions_from_prefix
from .lib import syntax_paths
__all__ = (
'ColorSchemeCompletionsListener',
'PackagedevEditSchemeCommand',
)
SCHEME_TEMPLATE = """\
{
// http://www.sublimetext.com/docs/3/color_schemes.html
"variables": {
// "green": "#FF0000",
},
"globals": {
// "foreground": "var(green)",
},
"rules": [
{
// "scope": "string",
// "foreground": "#00FF00",
},
],
}""".replace(" ", "\t")
VARIABLES = [
("--background\tbuiltin color", "--background"),
("--foreground\tbuiltin color", "--foreground"),
("--accent\tbuiltin color", "--accent"),
("--bluish\tbuiltin color", "--bluish"),
("--cyanish\tbuiltin color", "--cyanish"),
("--greenish\tbuiltin color", "--greenish"),
("--orangish\tbuiltin color", "--orangish"),
("--pinkish\tbuiltin color", "--pinkish"),
("--purplish\tbuiltin color", "--purplish"),
("--redish\tbuiltin color", "--redish"),
("--yellowish\tbuiltin color", "--yellowish"),
]
l = logging.getLogger(__name__)
def _inhibit_word_completions(func):
"""Decorator that inhibits ST's word completions if non-None value is returned."""
@functools.wraps(func)
def wrapper(*args, **kwargs):
ret = func(*args, **kwargs)
if ret is not None:
return (ret, sublime.INHIBIT_WORD_COMPLETIONS)
return wrapper
class ColorSchemeCompletionsListener(sublime_plugin.ViewEventListener):
"""Provide variable and scope name completions for color schemes.
Extract completions from defined variables in the current file
and determine scope completions based on our scope_data module.
Also provide variable completions for themes.
"""
@classmethod
def applies_to_primary_view_only(cls):
return False
@classmethod
def is_applicable(cls, settings):
return settings.get('syntax') in (syntax_paths.COLOR_SCHEME, syntax_paths.THEME)
def _line_prefix(self, point):
_, col = self.view.rowcol(point)
line = self.view.substr(self.view.line(point))
return line[:col]
def variable_completions(self, prefix, locations):
variable_regions = self.view.find_by_selector("entity.name.variable.sublime-color-scheme, "
"entity.name.variable.sublime-theme")
variables = set(self.view.substr(r) for r in variable_regions)
l.debug("Found %d variables to complete: %r", len(variables), sorted(variables))
return VARIABLES + sorted(("{}\tvariable".format(var), var) for var in variables)
def _scope_prefix(self, locations):
# Determine entire prefix
prefixes = set()
for point in locations:
text = self._line_prefix(point)
real_prefix = re.search(r'[\w.-]*$', text).group(0) # may be zero-length
prefixes.add(real_prefix)
if len(prefixes) > 1:
return None
else:
return next(iter(prefixes))
def scope_completions(self, prefix, locations):
real_prefix = self._scope_prefix(locations)
l.debug("Full prefix: %r", real_prefix)
if real_prefix is None:
return None
else:
return completions_from_prefix(real_prefix)
@_inhibit_word_completions
def on_query_completions(self, prefix, locations):
def verify_scope(selector, offset=0):
"""Verify scope for each location."""
return all(self.view.match_selector(point + offset, selector)
for point in locations)
if (
verify_scope("meta.function-call.var.sublime-color-scheme")
or (verify_scope("meta.function-call.var.sublime-color-scheme", -1)
and verify_scope("punctuation.definition.string.end.json"))
):
return self.variable_completions(prefix, locations)
elif verify_scope("meta.scope-selector.sublime"):
return self.scope_completions(prefix, locations)
else:
return None
class PackagedevEditSchemeCommand(sublime_plugin.WindowCommand):
"""Like syntax-specific settings but for the currently used color scheme."""
def run(self):
view = self.window.active_view()
if not view:
return
scheme_path = ResourcePath(view.settings().get('color_scheme'))
self.window.run_command(
'edit_settings',
{
"base_file": '/'.join(("${packages}",) + scheme_path.parts[1:]),
"user_file": "${packages}/User/" + scheme_path.stem + '.sublime-color-scheme',
"default": SCHEME_TEMPLATE,
},
)
|
Python
| 0
|
@@ -4443,24 +4443,25 @@
return%0A
+%0A
scheme_p
@@ -4456,71 +4456,298 @@
-scheme_path = ResourcePath(view.settings().get('color_scheme'))
+# Be lazy here and don't consider invalid values%0A scheme_setting = view.settings().get('color_scheme')%0A if '/' not in scheme_setting:%0A scheme_path = ResourcePath.glob_resources(scheme_setting)%5B0%5D%0A else:%0A scheme_path = ResourcePath(scheme_setting)%0A
%0A
|
34e2f4ab9d7258763c4cf024f3f9e48ffec66a66
|
Fix abbrs plugin
|
mistune3/plugins/abbr.py
|
mistune3/plugins/abbr.py
|
import re
import types
from ..util import escape
from ..helpers import PREVENT_BACKSLASH
__all__ = ['abbr']
# https://michelf.ca/projects/php-markdown/extra/#abbr
REF_ABBR = (
r'^ {0,3}\*\[(?P<abbr_key>[^\]]+)'+ PREVENT_BACKSLASH + r'\]:'
r'(?P<abbr_text>(?:[ \t]*\n(?: {3,}|\t)[^\n]+)|(?:[^\n]*))$'
)
def parse_ref_abbr(block, m, state):
ref = state.env.get('ref_abbrs')
if not ref:
ref = {}
key = m.group('abbr_key')
text = m.group('abbr_text')
ref[key] = text.strip()
state.env['ref_abbrs'] = ref
# abbr definition can split paragraph
state.append_token({'type': 'blank_line'})
return m.end() + 1
def process_text(self, text, state):
ref = state.env.get('ref_abbrs')
if not ref:
return state.append_token({'type': 'text', 'raw': text})
pattern = re.compile(r'|'.join(re.escape(k) for k in ref.keys()))
pos = 0
while pos < len(text):
m = pattern.search(text, pos)
if not m:
break
end_pos = m.start()
if end_pos > pos:
hole = text[pos:end_pos]
state.append_token({'type': 'text', 'raw': hole})
label = m.group(0)
state.append_token({
'type': 'abbr',
'children': self.render_tokens([{'type': 'text', 'raw': label}]),
'attrs': {'title': ref[label]}
})
pos = m.end()
if pos == 0:
# special case, just pure text
state.append_token({'type': 'text', 'raw': text})
elif pos < len(text):
state.append_token({'type': 'text', 'raw': text[pos:]})
def render_abbr(renderer, text, title):
if not title:
return '<abbr>' + text + '</abbr>'
return '<abbr title="' + escape(title) + '">' + text + '</abbr>'
def abbr(md):
md.block.register('ref_abbr', REF_ABBR, parse_ref_abbr, before='paragraph')
# replace process_text
md.inline.process_text = types.MethodType(process_text, md.inline)
if md.renderer and md.renderer.NAME == 'html':
md.renderer.register('abbr', render_abbr)
|
Python
| 0
|
@@ -807,23 +807,248 @@
%7D)%0A%0A
-pattern
+if state.tokens:%0A last = state.tokens%5B-1%5D%0A if last%5B'type'%5D == 'text':%0A state.tokens.pop()%0A text = last%5B'raw'%5D + text%0A%0A abbrs_re = state.env.get('abbrs_re')%0A if not abbrs_re:%0A abbrs_re
= re.co
@@ -1098,16 +1098,58 @@
eys()))%0A
+ state.env%5B'abbrs_re'%5D = abbrs_re%0A%0A
pos
@@ -1195,15 +1195,16 @@
m =
-pattern
+abbrs_re
.sea
|
3de70716cc8865196847682444c766507ec42776
|
Add missing import, rm init. cap constants
|
plugin/smispy/eseries.py
|
plugin/smispy/eseries.py
|
# Copyright (C) 2011-2014 Red Hat, Inc.
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Author: tasleson
from smis import Smis
import pywbem
import time
from lsm.plugin.smispy.smis import handle_cim_errors
from lsm import LsmError, ErrorNumber, Capabilities, uri_parse
class ESeries(Smis):
def plugin_register(self, uri, password, timeout, flags=0):
"""
The only difference we did here compare to supper method:
we force to be fallback mode.
NetApp-E support 'Masking and Mapping' profile but not expose it
via CIM_RegisteredProfile.
"""
protocol = 'http'
port = Smis.IAAN_WBEM_HTTP_PORT
u = uri_parse(uri, ['scheme', 'netloc', 'host'], None)
if u['scheme'].lower() == 'smispy+ssl':
protocol = 'https'
port = Smis.IAAN_WBEM_HTTPS_PORT
if 'port' in u:
port = u['port']
# smisproxy.py already make sure namespace defined.
namespace = u['parameters']['namespace']
self.all_vendor_namespaces = [namespace]
url = "%s://%s:%s" % (protocol, u['host'], port)
self.system_list = None
if 'systems' in u['parameters']:
self.system_list = split(u['parameters']["systems"], ":")
self._c = pywbem.WBEMConnection(url, (u['username'], password),
namespace)
if "no_ssl_verify" in u["parameters"] \
and u["parameters"]["no_ssl_verify"] == 'yes':
try:
self._c = pywbem.WBEMConnection(
url,
(u['username'], password),
namespace,
no_verification=True)
except TypeError:
# pywbem is not holding fix from
# https://bugzilla.redhat.com/show_bug.cgi?id=1039801
pass
self.tmo = timeout
self.fallback_mode = True
def _deal_volume_associations(self, vol, lun):
"""
Check a volume to see if it has any associations with other
volumes.
"""
rc = False
lun_path = lun.path
ss = self._c.References(lun_path,
ResultClass='CIM_StorageSynchronized')
if len(ss):
for s in ss:
if 'SyncedElement' in s:
item = s['SyncedElement']
if Smis._cim_name_match(item, lun_path):
self._detach(vol, s)
rc = True
if 'SystemElement' in s:
item = s['SystemElement']
if Smis._cim_name_match(item, lun_path):
self._detach(vol, s)
rc = True
return rc
def _is_access_group(self, s):
return True
@handle_cim_errors
def access_group_delete(self, group, flags=0):
ccs = self._get_class_instance('CIM_ControllerConfigurationService')
pc = self._get_cim_instance_by_id('AccessGroup', group.id)
in_params = {'ProtocolController': pc.path}
return self._pi("access_group_delete", Smis.JOB_RETRIEVE_NONE,
*(self._c.InvokeMethod('DeleteProtocolController',
ccs.path, **in_params)))[0]
@handle_cim_errors
def capabilities(self, system, flags=0):
cap = self._common_capabilities(system)
#We will explicitly set initiator grant/revoke
cap.set(Capabilities.VOLUME_INITIATOR_GRANT)
cap.set(Capabilities.VOLUME_INITIATOR_REVOKE)
#TODO We need to investigate why our interrogation code doesn't work.
#The array is telling us one thing, but when we try to use it, it
#doesn't work
return cap
def _detach(self, vol, sync):
#Get the Configuration service for the system we are interested in.
scs = self._get_class_instance('CIM_StorageConfigurationService',
'SystemName', vol.system_id)
in_params = {'Operation': pywbem.Uint16(2),
'Synchronization': sync.path}
job_id = self._pi("_detach", Smis.JOB_RETRIEVE_NONE,
*(self._c.InvokeMethod(
'ModifySynchronization', scs.path,
**in_params)))[0]
self._poll("ModifySynchronization, detach", job_id)
@handle_cim_errors
def volume_delete(self, volume, flags=0):
scs = self._get_class_instance('CIM_StorageConfigurationService',
'SystemName', volume.system_id)
lun = self._get_cim_instance_by_id('Volume', volume.id)
#If we actually have an association to delete, the volume will be
#deleted with the association, no need to call ReturnToStoragePool
if not self._deal_volume_associations(volume, lun):
in_params = {'TheElement': lun.path}
#Delete returns None or Job number
return self._pi("volume_delete", Smis.JOB_RETRIEVE_NONE,
*(self._c.InvokeMethod('ReturnToStoragePool',
scs.path, **in_params)))[0]
#Loop to check to see if volume is actually gone yet!
try:
lun = self._get_cim_instance_by_id('Volume', volume.id)
while lun is not None:
lun = self._get_cim_instance_by_id('Volume', volume.id)
time.sleep(0.125)
except LsmError as e:
pass
@handle_cim_errors
def access_group_initiator_delete(self, access_group, init_id, flags=0):
"""
When using HidePaths to remove initiator, the whole SPC will be
removed. Before we find a workaround for this, I would like to have
this method disabled as NO_SUPPORT.
"""
raise LsmError(ErrorNumber.NO_SUPPORT,
"SMI-S plugin does not support "
"access_group_initiator_delete() against NetApp-E")
|
Python
| 0
|
@@ -787,16 +787,41 @@
rt Smis%0A
+from string import split%0A
import p
@@ -933,22 +933,8 @@
ber,
- Capabilities,
uri
@@ -4125,171 +4125,8 @@
m)%0A%0A
- #We will explicitly set initiator grant/revoke%0A cap.set(Capabilities.VOLUME_INITIATOR_GRANT)%0A cap.set(Capabilities.VOLUME_INITIATOR_REVOKE)%0A%0A
|
e3fb065c5985027cbcbe01dbfbc3414a96fff961
|
Remove setting class variable
|
keystone/common/validation/validators.py
|
keystone/common/validation/validators.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Internal implementation of request body validating middleware."""
import jsonschema
from keystone import exception
from keystone.i18n import _
class SchemaValidator(object):
"""Resource reference validator class."""
validator = None
validator_org = jsonschema.Draft4Validator
def __init__(self, schema):
# NOTE(lbragstad): If at some point in the future we want to extend
# our validators to include something specific we need to check for,
# we can do it here. Nova's V3 API validators extend the validator to
# include `self._validate_minimum` and `self._validate_maximum`. This
# would be handy if we needed to check for something the jsonschema
# didn't by default. See the Nova V3 validator for details on how this
# is done.
validators = {}
validator_cls = jsonschema.validators.extend(self.validator_org,
validators)
fc = jsonschema.FormatChecker()
self.validator = validator_cls(schema, format_checker=fc)
def validate(self, *args, **kwargs):
try:
self.validator.validate(*args, **kwargs)
except jsonschema.ValidationError as ex:
# NOTE: For whole OpenStack message consistency, this error
# message has been written in a format consistent with WSME.
if len(ex.path) > 0:
# NOTE(lbragstad): Here we could think about using iter_errors
# as a method of providing invalid parameters back to the
# user.
# TODO(lbragstad): If the value of a field is confidential or
# too long, then we should build the masking in here so that
# we don't expose sensitive user information in the event it
# fails validation.
detail = _("Invalid input for field '%(path)s'. The value is "
"'%(value)s'.") % {'path': ex.path.pop(),
'value': ex.instance}
else:
detail = ex.message
raise exception.SchemaValidationError(detail=detail)
|
Python
| 0.999429
|
@@ -765,29 +765,8 @@
%22%22%0A%0A
- validator = None%0A
|
f20c911285cc83f2cfe2b4650ba85f4b82eae43c
|
Improve description about the api
|
plyer/facades/temperature.py
|
plyer/facades/temperature.py
|
class Temperature(object):
'''Temperature facade.
Temperature sensor is used to measure the ambient room temperature in degrees Celsius
With method `enable` you can turn on temperature sensor and 'disable'
method stops the sensor.
Use property `temperature` to get ambient air temperature in degree C.
'''
@property
def temperature(self):
'''Current air temperature in degree C.'''
return self._get_temperature()
def enable(self):
'''Enable temperature sensor.'''
self._enable()
def disable(self):
'''Disable temperature sensor.'''
self._disable()
#private
def _get_temperature(self, **kwargs):
raise NotImplementedError()
def _enable(self, **kwargs):
raise NotImplementedError()
def _disable(self, **kwargs):
raise NotImplementedError()
|
Python
| 0.000004
|
@@ -116,24 +116,28 @@
mperature in
+%0A
degrees Cel
@@ -140,16 +140,21 @@
Celsius
+ (%C2%B0C)
%0A Wit
|
bec24879cafaa4e17dd7cd56bcdaa3b04cb378b9
|
remove test_dpdk_vf.py from run_tests
|
plugin_test/run_tests.py
|
plugin_test/run_tests.py
|
"""Copyright 2015 Mirantis, Inc.
Licensed under the Apache License, Version 2.0 (the "License"); you may
not use this file except in compliance with the License. You may obtain
copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations
under the License.
"""
import sys
import os
import re
from nose.plugins import Plugin
from paramiko.transport import _join_lingering_threads
class CloseSSHConnectionsPlugin(Plugin):
"""Closes all paramiko's ssh connections after each test case.
Plugin fixes proboscis disability to run cleanup of any kind.
'afterTest' calls _join_lingering_threads function from paramiko,
which stops all threads (set the state to inactive and joins for 10s)
"""
name = 'closesshconnections'
def options(self, parser, env=os.environ):
"""Options."""
super(CloseSSHConnectionsPlugin, self).options(parser, env=env)
def configure(self, options, conf):
"""Configure env."""
super(CloseSSHConnectionsPlugin, self).configure(options, conf)
self.enabled = True
def afterTest(self, *args, **kwargs):
"""After_Test.
After_Test calls _join_lingering_threads function from paramiko,
which stops all threads (set the state to inactive and joins for 10s).
"""
_join_lingering_threads()
def import_tests():
"""Import test suite of project."""
from tests import test_smoke_bvt # noqa
from tests import test_integration # noqa
from tests import test_functional # noqa
from tests import test_failover # noqa
from tests import test_system # noqa
from tests import test_dpdk # noqa
from tests import test_sriov # noqa
from tests import test_dpdk_vf # noqa
from tests import test_dpdk_on_vf # noqa
from tests import test_contrail_multiple_networks # noqa
def run_tests():
"""Run test cases."""
from proboscis import TestProgram # noqa
import_tests()
# Run Proboscis and exit.
TestProgram(
addplugins=[CloseSSHConnectionsPlugin()]
).run_and_exit()
if __name__ == '__main__':
sys.path.append(sys.path[0] + "/fuel-qa")
import_tests()
from fuelweb_test.helpers.patching import map_test
if any(re.search(r'--group=patching_master_tests', arg)
for arg in sys.argv):
map_test('master')
elif any(re.search(r'--group=patching.*', arg) for arg in sys.argv):
map_test('environment')
run_tests()
|
Python
| 0.000002
|
@@ -1981,51 +1981,8 @@
oqa%0A
- from tests import test_dpdk_vf # noqa%0A
|
6560d0e29fb5c86188d770958aed2a34921e811a
|
Remove unused import
|
socorro/scripts/add_crashid_to_queue.py
|
socorro/scripts/add_crashid_to_queue.py
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import argparse
import os
import os.path
import sys
import pika
from socorro.lib.ooid import is_crash_id_valid
from socorro.scripts import get_envvar, WrappedTextHelpFormatter
EPILOG = """
To use in a docker-based local dev environment:
$ socorro-cmd add_crashid_to_queue socorro.normal <CRASHID>
Queues:
* socorro.normal - normal processing
* socorro.priority - priority processing
"""
def build_pika_connection(host, port, virtual_host, user, password):
"""Build a pika (rabbitmq) connection"""
return pika.BlockingConnection(
pika.ConnectionParameters(
host=host,
port=port,
virtual_host=virtual_host,
connection_attempts=10,
socket_timeout=10,
retry_delay=1,
credentials=pika.credentials.PlainCredentials(
user,
password
)
)
)
def main(argv=None):
parser = argparse.ArgumentParser(
formatter_class=WrappedTextHelpFormatter,
description='Send crash id to rabbitmq queue for processing',
epilog=EPILOG.strip(),
)
parser.add_argument('queue', help='the queue to add the crash id to')
parser.add_argument('crashid', nargs='*', help='one or more crash ids to add')
if argv is None:
args = parser.parse_args()
else:
args = parser.parse_args(argv)
# This will pull crash ids from the command line if specified, or stdin
crashids_iterable = args.crashid or sys.stdin
crashids = [crashid.strip() for crashid in crashids_iterable if crashid.strip()]
# Verify crash ids first
for crashid in crashids:
if not is_crash_id_valid(crashid):
print('Crash id "%s" is not valid. Exiting.' % crashid)
return 1
# NOTE(willkg): This matches what's in socorro.external.rabbitmq classes without us having to
# use configman and ConnectionContext and deal with switching between configured queues
host = get_envvar('resource.rabbitmq.host')
port = int(get_envvar('resource.rabbitmq.port', '5672'))
user = get_envvar('secrets.rabbitmq.rabbitmq_user')
password = get_envvar('secrets.rabbitmq.rabbitmq_password')
virtual_host = get_envvar('resource.rabbitmq.virtual_host', '/')
print('Configuration:')
print('host: %s' % host)
print('port: %s' % port)
print('user: %s' % user)
print('password: ********')
print('virtual_host: %s' % virtual_host)
print('queue: %s' % args.queue)
print('# crashids: %s' % len(crashids))
print('')
conn = build_pika_connection(host, port, virtual_host, user, password)
props = pika.BasicProperties(delivery_mode=2)
channel = conn.channel()
for crashid in crashids:
print('Sending %s to %s....' % (crashid, args.queue))
channel.basic_publish(
exchange='',
routing_key=args.queue,
body=crashid,
properties=props
)
print('Done!')
return 0
if __name__ == '__main__':
sys.exit(main())
|
Python
| 0.000001
|
@@ -246,23 +246,8 @@
os%0A
-import os.path%0A
impo
|
658abc2bb8dd9fe896b6f1145b6a9f669320ce3b
|
Debug log the raw Zaqar message
|
tripleoclient/plugin.py
|
tripleoclient/plugin.py
|
# Copyright 2013 Nebula Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""OpenStackClient Plugin interface"""
import json
import logging
import socket
import uuid
from osc_lib import utils
from swiftclient import client as swift_client
import websocket
from tripleoclient import exceptions
LOG = logging.getLogger(__name__)
DEFAULT_TRIPLEOCLIENT_API_VERSION = '1'
# Required by the OSC plugin interface
API_NAME = 'tripleoclient'
API_VERSION_OPTION = 'os_tripleoclient_api_version'
API_VERSIONS = {
'1': 'tripleoclient.plugin'
}
def make_client(instance):
return ClientWrapper(instance)
# Required by the OSC plugin interface
def build_option_parser(parser):
"""Hook to add global options
Called from openstackclient.shell.OpenStackShell.__init__()
after the builtin parser has been initialized. This is
where a plugin can add global options such as an API version setting.
:param argparse.ArgumentParser parser: The parser object that has been
initialized by OpenStackShell.
"""
parser.add_argument(
'--os-tripleoclient-api-version',
metavar='<tripleoclient-api-version>',
default=utils.env(
'OS_TRIPLEOCLIENT_API_VERSION',
default=DEFAULT_TRIPLEOCLIENT_API_VERSION),
help='TripleO Client API version, default=' +
DEFAULT_TRIPLEOCLIENT_API_VERSION +
' (Env: OS_TRIPLEOCLIENT_API_VERSION)')
return parser
class WebsocketClient(object):
def __init__(self, instance, queue_name="tripleo"):
self._project_id = None
self._ws = None
self._websocket_client_id = None
self._queue_name = queue_name
endpoint = instance.get_endpoint_for_service_type(
'messaging-websocket')
token = instance.auth.get_token(instance.session)
self._project_id = instance.auth_ref.project_id
self._websocket_client_id = str(uuid.uuid4())
LOG.debug('Instantiating messaging websocket client: %s', endpoint)
try:
self._ws = websocket.create_connection(endpoint)
except socket.error:
LOG.error("Could not establish a connection to the Zaqar "
"websocket. The command was sent but the answer "
"could not be read.")
raise
self.send('authenticate', extra_headers={'X-Auth-Token': token})
# create and subscribe to a queue
# NOTE: if the queue exists it will 204
self.send('queue_create', {'queue_name': queue_name})
self.send('subscription_create', {
'queue_name': queue_name,
'ttl': 10000
})
def cleanup(self):
self._ws.close()
def send(self, action, body=None, extra_headers=None):
headers = {
'Client-ID': self._websocket_client_id,
'X-Project-ID': self._project_id
}
if extra_headers is not None:
headers.update(extra_headers)
msg = {'action': action, 'headers': headers}
if body:
msg['body'] = body
self._ws.send(json.dumps(msg))
data = self.recv()
if data['headers']['status'] not in (200, 201, 204):
raise RuntimeError(data)
return data
def recv(self):
return json.loads(self._ws.recv())
def wait_for_messages(self, timeout=None):
"""Wait for messages on a Zaqar queue
A timeout can be provided in seconds, if no timeout is provided it
will block forever until a message is received. If no message is
received (for example, Zaqar is down) then it will block until manually
killed.
If no timeout is provided this method will never stop waiting for new
messages. It is the responsibility of the consumer to stop consuming
messages.
"""
if timeout is None:
LOG.warning("Waiting for messages on queue '{}' with no timeout."
.format(self._queue_name))
self._ws.settimeout(timeout)
while True:
try:
yield self.recv()['body']['payload']
except websocket.WebSocketTimeoutException:
raise exceptions.WebSocketTimeout()
def __enter__(self):
"""Return self to allow usage as a context manager"""
return self
def __exit__(self, *exc):
"""Call cleanup when exiting the context manager"""
self.cleanup()
class ClientWrapper(object):
def __init__(self, instance):
self._instance = instance
self._object_store = None
self._local_orchestration = None
def local_orchestration(self, api_port):
"""Returns an local_orchestration service client"""
if self._local_orchestration is not None:
return self._local_orchestration
API_VERSIONS = {
'1': 'heatclient.v1.client.Client',
}
heat_client = utils.get_client_class(
API_NAME,
'1',
API_VERSIONS)
LOG.debug('Instantiating local_orchestration client: %s', heat_client)
client = heat_client(
endpoint='http://127.0.0.1:%s/v1/admin' % api_port,
username='admin',
password='fake',
region_name='regionOne',
token='fake',
)
self._local_orchestration = client
return self._local_orchestration
def messaging_websocket(self, queue_name='tripleo'):
"""Returns a websocket for the messaging service"""
return WebsocketClient(self._instance, queue_name)
@property
def object_store(self):
"""Returns an object_store service client
The Swift/Object client returned by python-openstack client isn't an
instance of python-swiftclient, and had far less functionality.
"""
if self._object_store is not None:
return self._object_store
endpoint = self._instance.get_endpoint_for_service_type(
"object-store",
region_name=self._instance._region_name,
)
token = self._instance.auth.get_token(self._instance.session)
kwargs = {
'preauthurl': endpoint,
'preauthtoken': token
}
self._object_store = swift_client.Connection(**kwargs)
return self._object_store
|
Python
| 0.000001
|
@@ -4635,13 +4635,17 @@
-yield
+message =
sel
@@ -4652,16 +4652,81 @@
f.recv()
+%0A LOG.debug(message)%0A yield message
%5B'body'%5D
|
c20c76e2cb4dbbe37f6a294ead050f1b3883d5a2
|
FIX use the model_selection
|
sklearn/ensemble/tests/test_iforest.py
|
sklearn/ensemble/tests/test_iforest.py
|
"""
Testing for Isolation Forest algorithm (sklearn.ensemble.iforest).
"""
# Authors: Nicolas Goix <nicolas.goix@telecom-paristech.fr>
# Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
# License: BSD 3 clause
import numpy as np
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_warns_message
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_no_warnings
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import ignore_warnings
from sklearn.grid_search import ParameterGrid
from sklearn.ensemble import IsolationForest
from sklearn.cross_validation import train_test_split
from sklearn.datasets import load_boston, load_iris
from sklearn.utils import check_random_state
from sklearn.metrics import roc_auc_score
from scipy.sparse import csc_matrix, csr_matrix
rng = check_random_state(0)
# load the iris dataset
# and randomly permute it
iris = load_iris()
perm = rng.permutation(iris.target.size)
iris.data = iris.data[perm]
iris.target = iris.target[perm]
# also load the boston dataset
# and randomly permute it
boston = load_boston()
perm = rng.permutation(boston.target.size)
boston.data = boston.data[perm]
boston.target = boston.target[perm]
def test_iforest():
"""Check Isolation Forest for various parameter settings."""
X_train = np.array([[0, 1], [1, 2]])
X_test = np.array([[2, 1], [1, 1]])
grid = ParameterGrid({"n_estimators": [3],
"max_samples": [0.5, 1.0, 3],
"bootstrap": [True, False]})
with ignore_warnings():
for params in grid:
IsolationForest(random_state=rng,
**params).fit(X_train).predict(X_test)
def test_iforest_sparse():
"""Check IForest for various parameter settings on sparse input."""
rng = check_random_state(0)
X_train, X_test, y_train, y_test = train_test_split(boston.data[:50],
boston.target[:50],
random_state=rng)
grid = ParameterGrid({"max_samples": [0.5, 1.0],
"bootstrap": [True, False]})
for sparse_format in [csc_matrix, csr_matrix]:
X_train_sparse = sparse_format(X_train)
X_test_sparse = sparse_format(X_test)
for params in grid:
# Trained on sparse format
sparse_classifier = IsolationForest(
n_estimators=10, random_state=1, **params).fit(X_train_sparse)
sparse_results = sparse_classifier.predict(X_test_sparse)
# Trained on dense format
dense_classifier = IsolationForest(
n_estimators=10, random_state=1, **params).fit(X_train)
dense_results = dense_classifier.predict(X_test)
assert_array_equal(sparse_results, dense_results)
assert_array_equal(sparse_results, dense_results)
def test_iforest_error():
"""Test that it gives proper exception on deficient input."""
X = iris.data
# Test max_samples
assert_raises(ValueError,
IsolationForest(max_samples=-1).fit, X)
assert_raises(ValueError,
IsolationForest(max_samples=0.0).fit, X)
assert_raises(ValueError,
IsolationForest(max_samples=2.0).fit, X)
# The dataset has less than 256 samples, explicitly setting max_samples > n_samples
# should result in a warning. If not set explicitly there should be no warning
assert_warns_message(UserWarning,
"max_samples will be set to n_samples for estimation",
IsolationForest(max_samples=1000).fit, X)
assert_no_warnings(IsolationForest(max_samples='auto').fit, X)
assert_raises(ValueError,
IsolationForest(max_samples='foobar').fit, X)
def test_recalculate_max_depth():
"""Check that max_depth is recalculated when max_samples is reset to n_samples"""
X = iris.data
clf = IsolationForest().fit(X)
for est in clf.estimators_:
assert_equal(est.max_depth, int(np.ceil(np.log2(X.shape[0]))))
def test_max_samples_attribute():
X = iris.data
clf = IsolationForest().fit(X)
assert_equal(clf.max_samples_, X.shape[0])
clf = IsolationForest(max_samples=500)
assert_warns_message(UserWarning,
"max_samples will be set to n_samples for estimation",
clf.fit, X)
assert_equal(clf.max_samples_, X.shape[0])
clf = IsolationForest(max_samples=0.4).fit(X)
assert_equal(clf.max_samples_, 0.4*X.shape[0])
def test_iforest_parallel_regression():
"""Check parallel regression."""
rng = check_random_state(0)
X_train, X_test, y_train, y_test = train_test_split(boston.data,
boston.target,
random_state=rng)
ensemble = IsolationForest(n_jobs=3,
random_state=0).fit(X_train)
ensemble.set_params(n_jobs=1)
y1 = ensemble.predict(X_test)
ensemble.set_params(n_jobs=2)
y2 = ensemble.predict(X_test)
assert_array_almost_equal(y1, y2)
ensemble = IsolationForest(n_jobs=1,
random_state=0).fit(X_train)
y3 = ensemble.predict(X_test)
assert_array_almost_equal(y1, y3)
def test_iforest_performance():
"""Test Isolation Forest performs well"""
# Generate train/test data
rng = check_random_state(2)
X = 0.3 * rng.randn(120, 2)
X_train = np.r_[X + 2, X - 2]
X_train = X[:100]
# Generate some abnormal novel observations
X_outliers = rng.uniform(low=-4, high=4, size=(20, 2))
X_test = np.r_[X[100:], X_outliers]
y_test = np.array([0] * 20 + [1] * 20)
# fit the model
clf = IsolationForest(max_samples=100, random_state=rng).fit(X_train)
# predict scores (the lower, the more normal)
y_pred = clf.predict(X_test)
# check that there is at most 6 errors (false positive or false negative)
assert_greater(roc_auc_score(y_test, y_pred), 0.98)
def test_iforest_works():
# toy sample (the last two samples are outliers)
X = [[-2, -1], [-1, -1], [-1, -2], [1, 1], [1, 2], [2, 1], [6, 3], [-4, 7]]
# Test LOF
clf = IsolationForest(random_state=rng)
clf.fit(X)
pred = clf.predict(X)
# assert detect outliers:
assert_greater(np.min(pred[-2:]), np.max(pred[:-2]))
|
Python
| 0.000001
|
@@ -1,13 +1,12 @@
-%0A
%22%22%22%0ATesting
@@ -770,20 +770,19 @@
arn.
-cross_valida
+model_selec
tion
|
f956159efe43f14618c1b2baed8abaddbab42488
|
Fix export_models.py to work with new structure
|
scripts/export_models.py
|
scripts/export_models.py
|
import os
import utils
def process_file(filename):
handle = open(filename, "r")
lines = handle.read().replace("\r", "").split("\n")
handle.close()
real_lines = []
for line in lines:
line = line.replace(" default ", " ")
raw_line = line.strip()
if raw_line.startswith("@") or raw_line.startswith("import ") or raw_line == "":
continue
if raw_line.startswith("export ") and raw_line.endswith(";"):
continue
real_lines.append(line)
return real_lines
def process_dir(dirname):
files = os.listdir(dirname)
contents = []
for filename in files: # Ensure that "I..." classes are put first
if filename.startswith("I"):
files.remove(filename)
files.insert(0, filename)
for filename in files:
if filename == "index.ts" or not filename.endswith(".ts"):
continue
contents += process_file(dirname + filename)
return contents
def main():
server_dir = utils.get_server_dir()
lines = ['import * as linq from "linq";']
lines += process_dir(server_dir + "db/interfaces/")
lines += process_dir(server_dir + "db/models/")
comment = "// This file is automatically generated by /scripts/export_models.py.\n"
raw = comment + "\n".join(lines)
out_file = server_dir + "static/js/lib/Models.ts"
handle = open(out_file, "w")
handle.write(raw)
handle.close()
print("Wrote output to {}. Compiling client-side Typescript...".format(out_file))
os.chdir(server_dir + "static/js")
utils.compile_ts()
os.chdir(server_dir)
if __name__ == "__main__":
main()
|
Python
| 0.000001
|
@@ -1,12 +1,24 @@
+import json%0A
import os%0Aim
@@ -576,16 +576,21 @@
+real_
files =
os.l
@@ -589,27 +589,10 @@
s =
-os.listdir(dirname)
+%5B%5D
%0A
@@ -602,24 +602,72 @@
ntents = %5B%5D%0A
+ for root, _, files in os.walk(dirname):%0A
for file
@@ -724,32 +724,36 @@
t first%0A
+
+
if filename.star
@@ -781,58 +781,120 @@
-files.remove(filename)%0A files.insert(0,
+ real_files.insert(0, root + %22/%22 + filename)%0A else:%0A real_files.append(root + %22/%22 +
fil
@@ -916,24 +916,29 @@
filename in
+real_
files:%0A
@@ -943,24 +943,41 @@
if
+os.path.basename(
filename
== %22ind
@@ -968,16 +968,17 @@
filename
+)
== %22ind
@@ -1075,18 +1075,8 @@
ile(
-dirname +
file
@@ -1118,22 +1118,23 @@
():%0A
-server
+content
_dir = u
@@ -1154,16 +1154,147 @@
er_dir()
+ + %22content/%22%0A handle = open(content_dir + %22config.json%22, %22r%22)%0A content_config = json.loads(handle.read())%0A handle.close()
%0A lin
@@ -1340,60 +1340,120 @@
-line
+model_dir
s
-+
=
-process_dir(server_dir + %22db/interfaces/%22)%0A
+content_config%5B%22modelDirs%22%5D%0A model_dirs.insert(0, %22enums%22)%0A for model_dir in model_dirs:%0A
@@ -1477,31 +1477,35 @@
dir(
-server
+content
_dir +
-%22db/
model
-s
+_dir + %22
/%22)%0A
@@ -1587,17 +1587,16 @@
odels.py
-.
%5Cn%22%0A
@@ -1643,22 +1643,23 @@
_file =
-server
+content
_dir + %22
@@ -1852,30 +1852,31 @@
os.chdir(
-server
+content
_dir + %22stat
@@ -1919,16 +1919,26 @@
s.chdir(
+utils.get_
server_d
@@ -1939,16 +1939,18 @@
rver_dir
+()
)%0A%0Aif __
|
532c7db03b90b54b8859d764084f79cd3ee7e50b
|
Update example to use RAX_CREDS_FILE env var
|
plugins/inventory/rax.py
|
plugins/inventory/rax.py
|
#!/usr/bin/env python
# (c) 2013, Jesse Keating <jesse.keating@rackspace.com>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
inventory: rax
short_description: Rackspace Public Cloud external inventory script
description:
- Generates inventory that Ansible can understand by making API request to Rackspace Public Cloud API
- |
When run against a specific host, this script returns the following variables:
rax_os-ext-sts_task_state
rax_addresses
rax_links
rax_image
rax_os-ext-sts_vm_state
rax_flavor
rax_id
rax_rax-bandwidth_bandwidth
rax_user_id
rax_os-dcf_diskconfig
rax_accessipv4
rax_accessipv6
rax_progress
rax_os-ext-sts_power_state
rax_metadata
rax_status
rax_updated
rax_hostid
rax_name
rax_created
rax_tenant_id
rax__loaded
where some item can have nested structure.
- credentials are set in a credentials file
version_added: None
options:
creds_file:
description:
- File to find the Rackspace Public Cloud credentials in
required: true
default: null
region_name:
description:
- Region name to use in request
required: false
default: DFW
author: Jesse Keating
notes:
- Two environment variables need to be set, RAX_CREDS and RAX_REGION.
- RAX_CREDS points to a credentials file appropriate for pyrax
- RAX_REGION defines a Rackspace Public Cloud region (DFW, ORD, LON, ...)
requirements: [ "pyrax" ]
examples:
- description: List server instances
code: RAX_CREDS=~/.raxpub RAX_REGION=ORD rax.py --list
- description: List server instance properties
code: RAX_CREDS=~/.raxpub RAX_REGION=ORD rax.py --host <HOST_IP>
'''
import sys
import re
import os
import argparse
try:
import json
except:
import simplejson as json
try:
import pyrax
except ImportError:
print('pyrax required for this module')
sys.exit(1)
# Setup the parser
parser = argparse.ArgumentParser(description='List active instances',
epilog='List by itself will list all the active \
instances. Listing a specific instance will show \
all the details about the instance.')
parser.add_argument('--list', action='store_true', default=True,
help='List active servers')
parser.add_argument('--host',
help='List details about the specific host (IP address)')
args = parser.parse_args()
# setup the auth
try:
creds_file = os.environ['RAX_CREDS_FILE']
region = os.environ['RAX_REGION']
except KeyError, e:
sys.stderr.write('Unable to load %s\n' % e.message)
sys.exit(1)
pyrax.set_setting('identity_type', 'rackspace')
try:
pyrax.set_credential_file(os.path.expanduser(creds_file),
region=region)
except Exception, e:
sys.stderr.write("%s: %s\n" % (e, e.message))
sys.exit(1)
# Execute the right stuff
if not args.host:
groups = {}
# Cycle on servers
for server in pyrax.cloudservers.list():
# Define group (or set to empty string)
try:
group = server.metadata['group']
except KeyError:
group = 'undefined'
# Create group if not exist and add the server
groups.setdefault(group, []).append(server.accessIPv4)
# Return server list
print(json.dumps(groups))
sys.exit(0)
# Get the deets for the instance asked for
results = {}
# This should be only one, but loop anyway
for server in pyrax.cloudservers.list():
if server.accessIPv4 == args.host:
for key in [key for key in vars(server) if
key not in ('manager', '_info')]:
# Extract value
value = getattr(server, key)
# Generate sanitized key
key = 'rax_' + re.sub("[^A-Za-z0-9\-]", "_", key).lower()
results[key] = value
print(json.dumps(results))
sys.exit(0)
|
Python
| 0
|
@@ -2237,32 +2237,37 @@
code: RAX_CREDS
+_FILE
=~/.raxpub RAX_R
@@ -2362,16 +2362,21 @@
AX_CREDS
+_FILE
=~/.raxp
|
7f876881267da77efeb8c3f5bb585502e33e76fc
|
add imagefile funcs to namespace
|
ts_charting/__init__.py
|
ts_charting/__init__.py
|
from ts_charting.figure import Figure, Grapher
from ts_charting.charting import *
import ts_charting.ohlc
import ts_charting.boxplot
import ts_charting.span
from ts_charting.styler import styler, marker_styler, level_styler
from ts_charting.ipython import figsize, IN_NOTEBOOK
from ts_charting.plot_3d import plot_wireframe
|
Python
| 0.000003
|
@@ -319,8 +319,64 @@
reframe%0A
+from ts_charting.imagefile import plot_pdf, save_images%0A
|
cb47cd2fbd37b9fce12abbf1c0ccff38d863f838
|
Add some refactorings
|
scripts/generate_code.py
|
scripts/generate_code.py
|
from collections import namedtuple
from itertools import dropwhile, groupby
VARIABLE_SECTION_START = 'Pos. = '
VARIABLE_NAME_FIELD = 'Variable = '
VARIABLE_LABEL_FIELD = 'Variable label = '
VALUE_FIELD = 'Value = '
VALUE_LABEL_FIELD = 'Label = '
Variable = namedtuple('Variable', ['id', 'name', 'label', 'values'])
class DataDictionaryParser():
def __init__(self, path_to_file):
with path_to_file.open('r') as txt_file:
lines = txt_file.readlines()
lines = filter(lambda line: line is not '\n', lines)
lines = dropwhile(lambda line: not line.startswith(VARIABLE_SECTION_START), lines)
lines = (line.rstrip('\n') for line in lines)
variable_sections = self._variable_section_generator(lines)
self.variables = [self._parse_variable(variable_section)
for variable_section in variable_sections]
self.number_variables = len(list(variable_sections))
@classmethod
def _parse_variable(cls, variable_section):
variable_section = list(variable_section)
position, name, label = variable_section[0].split('\t')
value_lines = filter(lambda line: line.startswith(VALUE_FIELD), variable_section)
return Variable(
id=int(position.split(VARIABLE_SECTION_START)[1]),
name=name.split(VARIABLE_NAME_FIELD)[1],
label=label.split(VARIABLE_LABEL_FIELD)[1],
values=cls._parse_variable_values(value_lines)
)
@staticmethod
def _parse_variable_values(value_lines):
value_lines_split = (line.split('\t') for line in value_lines)
values = {
value.split(VALUE_FIELD)[1]: label.split(VALUE_LABEL_FIELD)[1] for value, label in value_lines_split
}
if len(values) == 0:
return None
else:
return values
@staticmethod
def _variable_section_generator(lines):
variable_section = []
for line in lines:
if line.startswith(VARIABLE_SECTION_START) and len(variable_section) > 0:
yield variable_section
variable_section = []
variable_section.append(line)
yield variable_section
|
Python
| 0.000243
|
@@ -1544,79 +1544,8 @@
s):%0A
- value_lines_split = (line.split('%5Ct') for line in value_lines)%0A
@@ -1633,16 +1633,28 @@
IELD)%5B1%5D
+%0A
for val
@@ -1670,69 +1670,61 @@
in
-value_
+(
line
-s_
+.
split
-%0A %7D%0A if len(values) == 0:%0A
+('%5Ct') for line in value_lines)%0A
+%7D%0A
@@ -1738,52 +1738,43 @@
urn
-None%0A else:%0A return values
+values if len(values) %3E 0 else None
%0A%0A
|
0c825167e814f6a62d7c50decc6877e401ce4e26
|
Fix launch_master.py
|
scripts/launch_master.py
|
scripts/launch_master.py
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Repeatedly launch the build master in an infinite loop, updating the source
between launches. This script is intended to be run at boot time. """
import os
import subprocess
import sys
import time
# File where the PID of the running master is stored
PID_FILE = 'twistd.pid'
# Maximum time (in seconds) to wait for PID_FILE to be written after the master
# is launched. If PID_FILE is not written by then, we assume an error occurred.
PID_TIMEOUT = 10.0
def _SyncSources():
""" Run 'gclient sync' on the buildbot sources. """
cmd = ['gclient', 'sync']
if not subprocess.call(cmd) == 0:
# Don't throw an exception or quit, since we want to keep the master running
print 'WARNING: Failed to update sources.'
def _LaunchMaster(private=False):
""" Launch the build master and return its PID.
private: boolean designating whether or not to set the master as private.
"""
# Make sure the master is stopped.
cmd = ['make', 'stop']
kill_proc = subprocess.Popen(cmd)
kill_proc.wait()
# Launch the master
cmd = ['make', 'start']
env = dict(os.environ)
env['PRIVATE_MASTER'] = 'True' if private else 'False'
launch_proc = subprocess.Popen(cmd, env=env)
launch_proc.wait()
# Wait for the pid file to be written, then use it to obtain the master's pid
pid_file = None
start_time = time.clock()
while not pid_file:
try:
pid_file = open(PID_FILE)
except:
if time.clock() - start_time > PID_TIMEOUT:
raise Exception('Failed to launch master.')
time.sleep(1)
pid = str(pid_file.read()).rstrip()
pid_file.close
return pid
# TODO(borenet): Right now, this only works on Linux. Make it cross-platform.
def _BlockUntilFinished(pid):
""" Blocks until the given process has finished.
pid: PID of the process to wait for
"""
running = True
while running:
time.sleep(1)
poll_proc = subprocess.Popen(['cat', '/proc/%s/stat' % pid],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
poll_proc.wait()
running = poll_proc.poll() == 0
def _UpdateAndRunMaster(private=False):
""" Update the buildbot sources and run the build master, blocking until it
finishes.
private: boolean designating whether or not to set the master as private.
"""
_SyncSources()
pid = _LaunchMaster(private=private)
print 'Launched build master with PID: %s' % pid
_BlockUntilFinished(pid)
print 'Master process has finished.'
def main():
""" Alternately sync the buildbot source and launch the build master. """
private = '--private' in sys.argv
loop = '--loop' in sys.argv
master_path = os.path.join(os.path.split(os.path.abspath(__file__))[0],
os.pardir, 'master')
os.chdir(master_path)
_UpdateAndRunMaster(private=private)
while loop:
print 'Restarting the build master.'
_UpdateAndRunMaster(private=private)
if '__main__' == __name__:
sys.exit(main())
|
Python
| 0.000002
|
@@ -724,24 +724,157 @@
%22%0A
-cmd = %5B'
+path_to_gclient = os.path.join(os.pardir, os.pardir, 'depot_tools',%0A 'gclient.py')%0A cmd = %5B'python', path_to_
gclient
-'
, 's
|
328c521e49659cb9b7d0e89cfcb1c7a22b4dbd21
|
Make more flexible
|
scripts/matchToNative.py
|
scripts/matchToNative.py
|
#!/usr/bin/env ccp4-python
import os
import sys
root = os.sep.join( os.path.abspath(__file__).split( os.sep )[:-2] )
sys.path.insert( 0, os.path.join( root, "python" ) )
sys.path.insert( 0, os.path.join( root, "scripts" ) )
# We use MRBUMP's MTZ_parse
sys.path.append(os.path.join(os.environ["CCP4"], "share", "mrbump", "include", "file_info")) # For MTZ_parse
import ample_util
import csymmatch
import phenixer
import MTZ_parse
def run( nativePdb, nativeMtz, mrPdbs ):
# Get the labels from the MTZ file
print "Parsing MTZ file {0} to determine column labels".format( nativeMtz )
mtzp = MTZ_parse.MTZ_parse()
mtzp.run_mtzdmp( nativeMtz )
# Generate map from
print "Generating map from: {0} {1}".format( nativeMtz,
nativePdb )
mtzMap = phenixer.generateMap( nativeMtz,
nativePdb,
FP= mtzp.F,
SIGFP=mtzp.SIGF,
FREE=mtzp.FreeR_flag,
)
for mrPdb in mrPdbs:
print "Searching for origin shift using: {0} {1}".format( mtzMap, mrPdb )
origin = phenixer.ccmtzOrigin( mtzMap, mrPdb )
# offset.pdb is the mrPdb moved onto the new origin
offsetPdb = "offset.pdb"
print "Found origin: {0}\nOffset pdb is: {1}".format( origin, offsetPdb )
# Run csymmatch to map offsetted pdb onto native
csymmPdb = ample_util.filename_append( filename=mrPdb, astr="csymmatch", directory=os.getcwd() )
print "Running csymmatch to wrap {0} onto native {1}".format( offsetPdb, nativePdb )
csymmatch.Csymmatch().run( refPdb=nativePdb, inPdb=offsetPdb, outPdb=csymmPdb, originHand=False )
print "Matched PDB is: {0}".format( csymmPdb )
return
if __name__ == "__main__":
assert len(sys.argv) >= 4,"Usage: {0} native.pdb native.mtz molecular_replacement.pdb[s]".format( sys.argv[0] )
nativePdb = sys.argv[1]
nativeMtz = sys.argv[2]
mrPdbs = sys.argv[3:]
run( nativePdb, nativeMtz, mrPdbs )
|
Python
| 0.000049
|
@@ -409,17 +409,16 @@
henixer%0A
-%0A
import M
@@ -460,279 +460,225 @@
eMtz
-, mrPdbs ):%0A %0A # Get the labels from the MTZ file%0A print %22Parsing MTZ file %7B0%7D to determine column labels%22.format( nativeMtz )%0A mtzp = MTZ_parse.MTZ_parse()%0A mtzp.run_mtzdmp( nativeMtz )%0A %0A # Generate map from %0A print %22Generating map from
+=None, nativeMap=None, mrPdbs, outDir=None ):%0A%0A # Find out where we're running from%0A if outDir is not None:%0A if not os.path.isdir(outDir):%0A raise RuntimeError,%22Cannot find output directory
: %7B0%7D
- %7B1%7D
%22.fo
@@ -686,54 +686,100 @@
mat(
- nativeMtz,%0A
+outDir)%0A outDir=os.path.abspath(outDir)%0A else:%0A outDir=os.getcwd()%0A
@@ -778,57 +778,55 @@
+%0A
- nativePdb )%0A mtzMap = phenixer.
+if nativeMap is None:%0A nativeMap =
gene
@@ -833,17 +833,26 @@
rateMap(
-
+nativePdb,
nativeMt
@@ -856,55 +856,52 @@
eMtz
-,
+)
%0A
-
+%0A if not os.path.isfile(
native
-Pdb,
+Map):
%0A
@@ -909,191 +909,82 @@
- FP= mtzp.F,%0A SIGFP=mtzp.SIGF,%0A FREE=mtzp.FreeR_flag,%0A )
+raise RuntimeError,%22Cannot find nativeMap: %7B0%7D%22.format(nativeMap)%0A
%0A
@@ -1076,19 +1076,22 @@
format(
-mtz
+native
Map, mrP
@@ -1135,19 +1135,22 @@
Origin(
-mtz
+native
Map, mrP
@@ -1496,26 +1496,21 @@
ectory=o
-s.getcwd()
+utDir
)%0A
@@ -1782,17 +1782,673 @@
return
-
+%0A%0Adef generateMap(nativePdb,nativeMtz):%0A %0A # Get the labels from the MTZ file%0A print %22Parsing MTZ file %7B0%7D to determine column labels%22.format( nativeMtz )%0A mtzp = MTZ_parse.MTZ_parse()%0A mtzp.run_mtzdmp( nativeMtz )%0A # Generate map from %0A print %22Generating map from: %7B0%7D %7B1%7D%22.format( nativeMtz,%0A nativePdb )%0A return phenixer.generateMap( nativeMtz,%0A nativePdb,%0A FP= mtzp.F,%0A SIGFP=mtzp.SIGF,%0A FREE=mtzp.FreeR_flag,%0A )
%0A%0Aif __n
|
90d42e80690a80a7099142b6b024c8d3b0f78075
|
Fix DelayedCall cancellation in remind plugin on reload
|
plugins/remind/plugin.py
|
plugins/remind/plugin.py
|
from twisted.internet import error, reactor
from cardinal.decorators import command, help
class RemindPlugin:
def __init__(self):
self.call_ids = []
@command('remind')
@help("Sends a reminder after a set time.")
@help("Syntax: .remind <minutes> <message>")
def remind(self, cardinal, user, channel, msg):
message = msg.split(None, 2)
if len(message) < 3:
cardinal.sendMsg(channel, "Syntax: .remind <minutes> <message>")
return
self.call_ids.append(reactor.callLater(60 * int(message[1]),
cardinal.sendMsg, user.nick, message[2]))
cardinal.sendMsg(channel,
"%s: You will be reminded in %d minutes." %
(user.nick, int(message[1])))
def close(self):
for call_id in call_ids:
try:
call_id.cancel()
except error.AlreadyCancelled:
pass
entrypoint = RemindPlugin
|
Python
| 0
|
@@ -583,35 +583,32 @@
-
cardinal.sendMsg
@@ -837,16 +837,21 @@
l_id in
+self.
call_ids
|
e6fb45d2e4b28db8e7d638f88e71ab3bc2720f57
|
Fix imports for django 1.8
|
src/django_babel_underscore/__init__.py
|
src/django_babel_underscore/__init__.py
|
# -*- coding: utf-8 -*-
from django.template import Lexer, TOKEN_TEXT
from django.utils.encoding import force_text
from django_babel.extract import extract_django
from django.utils import six
from markey import underscore
from markey.tools import TokenStream
from markey.machine import tokenize, parse_arguments
def extract(fileobj, keywords, comment_tags, options):
"""Extracts translation messages from underscore template files.
This method does also extract django templates. If a template does not
contain any django translation tags we always fallback to underscore extraction.
This is a plugin to Babel, written according to
http://babel.pocoo.org/docs/messages/#writing-extraction-methods
:param fileobj: the file-like object the messages should be extracted
from
:param keywords: a list of keywords (i.e. function names) that should
be recognized as translation functions
:param comment_tags: a list of translator tags to search for and
include in the results
:param options: a dictionary of additional options (optional)
:return: an iterator over ``(lineno, funcname, message, comments)``
tuples
:rtype: ``iterator``
"""
encoding = options.get('encoding', 'utf-8')
original_position = fileobj.tell()
text = fileobj.read().decode(encoding)
# TODO: There must be another way. Find a way to fix the ordering
# in babel directly!
vars = [token.token_type != TOKEN_TEXT for token in Lexer(text, None).tokenize()]
could_be_django = any(list(vars))
if could_be_django:
fileobj.seek(original_position)
iterator = extract_django(fileobj, keywords, comment_tags, options)
for lineno, funcname, message, comments in iterator:
yield lineno, funcname, message, comments
else:
# Underscore template extraction
comments = []
fileobj.seek(original_position)
for lineno, line in enumerate(fileobj, 1):
funcname = None
stream = TokenStream.from_tuple_iter(tokenize(line, underscore.rules))
while not stream.eof:
if stream.current.type == 'gettext_begin':
stream.expect('gettext_begin')
funcname = stream.expect('func_name').value
args, kwargs = parse_arguments(stream, 'gettext_end')
strings = []
for arg in args:
try:
arg = int(arg)
except ValueError:
pass
if isinstance(arg, six.string_types):
strings.append(force_text(arg))
else:
strings.append(None)
for arg in kwargs:
strings.append(None)
if len(strings) == 1:
strings = strings[0]
else:
strings = tuple(strings)
yield lineno, funcname, strings, []
stream.next()
|
Python
| 0.000009
|
@@ -17,16 +17,129 @@
f-8 -*-%0A
+import django%0A%0Aif django.VERSION%5B:2%5D %3E= (1, 8):%0A from django.template.base import Lexer, TOKEN_TEXT%0Aelse:%0A
from dja
@@ -176,16 +176,17 @@
EN_TEXT%0A
+%0A
from dja
|
67b12cfb4bac0f9fa8665f6cb82b9527fd821b52
|
add slave lag time
|
newrelic_plugin_agent/plugins/redis.py
|
newrelic_plugin_agent/plugins/redis.py
|
"""
Redis plugin polls Redis for stats
"""
import logging
from newrelic_plugin_agent.plugins import base
LOGGER = logging.getLogger(__name__)
class Redis(base.SocketStatsPlugin):
GUID = 'com.meetme.newrelic_redis_agent'
DEFAULT_PORT = 6379
def add_datapoints(self, stats):
"""Add all of the data points for a node
:param dict stats: all of the nodes
"""
self.add_gauge_value('Clients/Blocked', '',
stats.get('blocked_clients', 0))
self.add_gauge_value('Clients/Connected', '',
stats.get('connected_clients', 0))
self.add_gauge_value('Slaves/Connected', '',
stats.get('connected_slaves', 0))
# must happen before saving the new values
# but only if we have the previous values
if 'Keys/Hit' in self.derive_last_interval.keys() and 'Keys/Missed' in self.derive_last_interval.keys():
prev_hits = self.derive_last_interval['Keys/Hit']
prev_misses = self.derive_last_interval['Keys/Missed']
# hits and misses since the last measure
hits = stats.get('keyspace_hits', 0) - prev_hits
misses = stats.get('keyspace_misses', 0) - prev_misses
# total queries since the last measure
total = hits + misses
if total > 0:
self.add_gauge_value('Hits Ratio', '', 100 * hits / total)
self.add_derive_value('Keys/Evicted', '',
stats.get('evicted_keys', 0))
self.add_derive_value('Keys/Expired', '',
stats.get('expired_keys', 0))
self.add_derive_value('Keys/Hit', '',
stats.get('keyspace_hits', 0))
self.add_derive_value('Keys/Missed', '',
stats.get('keyspace_misses', 0))
self.add_derive_value('Commands Processed', '',
stats.get('total_commands_processed', 0))
self.add_derive_value('Connections', '',
stats.get('total_connections_received', 0))
self.add_derive_value('Changes Since Last Save', '',
stats.get('changes_since_last_save', 0))
self.add_gauge_value('Pubsub/Commands', '',
stats.get('pubsub_commands', 0))
self.add_gauge_value('Pubsub/Patterns', '',
stats.get('pubsub_patterns', 0))
self.add_derive_value('CPU/User/Self', 'sec',
stats.get('used_cpu_user', 0))
self.add_derive_value('CPU/System/Self', 'sec',
stats.get('used_cpu_sys', 0))
self.add_derive_value('CPU/User/Children', 'sec',
stats.get('used_cpu_user_childrens', 0))
self.add_derive_value('CPU/System/Children', 'sec',
stats.get('used_cpu_sys_childrens', 0))
self.add_gauge_value('Memory Use', 'MB',
stats.get('used_memory', 0) / 1048576,
max_val=stats.get('used_memory_peak',
0) / 1048576)
self.add_gauge_value('Memory Fragmentation', 'ratio',
stats.get('mem_fragmentation_ratio', 0))
keys, expires = 0, 0
for db in range(0, self.config.get('db_count', 16)):
db_stats = stats.get('db%i' % db, dict())
self.add_gauge_value('DB/%s/Expires' % db, '',
db_stats.get('expires', 0))
self.add_gauge_value('DB/%s/Keys' % db, '',
db_stats.get('keys', 0))
keys += db_stats.get('keys', 0)
expires += db_stats.get('expires', 0)
self.add_gauge_value('Keys/Total', '', keys)
self.add_gauge_value('Keys/Will Expire', '', expires)
def connect(self):
"""Top level interface to create a socket and connect it to the
memcached daemon.
:rtype: socket
"""
connection = super(Redis, self).connect()
if self.config.get('password'):
connection.send("*2\r\n$4\r\nAUTH\r\n$%i\r\n%s\r\n" %
(len(self.config['password']),
self.config['password']))
buffer_value = connection.recv(self.SOCKET_RECV_MAX)
if buffer_value == '+OK\r\n':
return connection
LOGGER.error('Authentication error: %s', buffer_value[4:].strip())
return None
return connection
def fetch_data(self, connection):
"""Loop in and read in all the data until we have received it all.
:param socket connection: The connection
:rtype: dict
"""
connection.send("*0\r\ninfo\r\n")
# Read in the first line $1437
buffer_value = connection.recv(self.SOCKET_RECV_MAX)
lines = buffer_value.split('\r\n')
if lines[0][0] == '$':
byte_size = int(lines[0][1:].strip())
else:
return None
while len(buffer_value) < byte_size:
buffer_value += connection.recv(self.SOCKET_RECV_MAX)
lines = buffer_value.split('\r\n')
values = dict()
for line in lines:
if ':' in line:
key, value = line.strip().split(':')
if key[:2] == 'db':
values[key] = dict()
subvalues = value.split(',')
for temp in subvalues:
subvalue = temp.split('=')
value = subvalue[-1]
try:
values[key][subvalue[0]] = int(value)
except ValueError:
try:
values[key][subvalue[0]] = float(value)
except ValueError:
values[key][subvalue[0]] = value
continue
try:
values[key] = int(value)
except ValueError:
try:
values[key] = float(value)
except ValueError:
values[key] = value
return values
|
Python
| 0
|
@@ -732,32 +732,172 @@
ted_slaves', 0))
+%0A self.add_gauge_value('Last master IO sync (lag time)', '',%0A stats.get('master_last_io_seconds_ago', 0))
%0A%0A # must
|
a7d261b9049eb2daa79d4e7fc40cc665650f014e
|
Test print.
|
ninja_shogun/scripts/shogun_bugbase.py
|
ninja_shogun/scripts/shogun_bugbase.py
|
#!/usr/bin/env python
import click
import os
from collections import Counter, defaultdict
import csv
import pandas as pd
import pickle
from ninja_utils.utils import verify_make_dir
from ninja_shogun.wrappers import utree_search
def build_img_map(infile: str):
gg2img_oid = defaultdict(int)
df = pd.DataFrame.from_csv(infile)
for row in df.iterrows():
gg2img_oid[row] = row
return gg2img_oid
@click.command()
@click.option('-i', '--input', type=click.Path(), default=os.getcwd(), help='Directory containing the input FASTA files with ".fna" extensions (default=cwd)')
@click.option('-o', '--output', type=click.Path(), default=os.path.join(os.getcwd(), 'shogun_bugbase'), help='Output directory for the results')
@click.option('-u', '--img_database_folder', type=click.Path(), help='Location of the BugBase Database folder.')
def shogun_bugbase(input, output, img_database_folder):
verify_make_dir(output)
utree_indx = os.path.join(img_database_folder, 'img.genes.ctr')
with open(os.path.join(img_database_folder, 'img_map.pkl'), 'rb') as inf:
gg2img_oid = pickle.load(inf)
basenames = [os.path.basename(filename)[:-4] for filename in os.listdir(input) if filename.endswith('.fna')]
for basename in basenames:
fna_file = os.path.join(input, basename + '.fna')
tsv_outf = os.path.join(output, basename + '.utree.tsv')
if not os.path.isfile(tsv_outf):
print(utree_search(utree_indx, fna_file, tsv_outf))
else:
print("Found the output file \"%s\". Skipping the alignment phase for this file." % tsv_outf)
counts = []
utree_outf = os.path.join(output, 'taxon_counts.txt')
# Indexing for emblalmer
if not os.path.isfile(utree_outf):
for basename in basenames:
lcas = []
utree_tsv = os.path.join(output, basename + '.utree.tsv')
with open(utree_tsv) as inf:
tsv_parser = csv.reader(inf, delimiter='\t')
for line in tsv_parser:
if line[1]:
taxon = line[1].replace('; ', ';')
if taxon in gg2img_oid:
lcas.append(gg2img_oid[taxon])
counts.append(Counter(filter(None, lcas)))
df = pd.DataFrame(counts, index=basenames)
df.T.to_csv(os.path.join(output, 'taxa_counts.txt'), sep='\t')
if __name__ == '__main__':
shogun_bugbase()
|
Python
| 0
|
@@ -2114,16 +2114,53 @@
', ';')%0A
+ print(taxon)%0A
|
90e01a0e8ef2ea25456e49ad8f2cfa6e7d79b6b9
|
add credit
|
DataSources/raw/mastodon/Scraper.py
|
DataSources/raw/mastodon/Scraper.py
|
import sys
sys.path.append('c:/program files/anaconda3/lib/site-packages')
import codecs
import datetime
import json
from mastodon.Mastodon import Mastodon
from mastodon.streaming import StreamListener, MalformedEventError
__all__ = ['Mastodon', 'StreamListener', 'MalformedEventError']
## you need to create an app
# Mastodon.create_app( 'softwerxpy', to_file = 'pytooter_clientcred.secret' )
## you need to create a secret key
# mastodon = Mastodon(client_id = 'pytooter_clientcred.secret')
# mastodon.log_in('<login>','<password>', to_file = 'pytooter_usercred.secret' )
mastodon = Mastodon(
client_id = 'pytooter_clientcred.secret',
access_token = 'pytooter_usercred.secret'
)
#mastodon.toot('my first Toot!')
class Listener(StreamListener):
def __init__(self):
self.updates = []
self.notifications = []
self.deletes = []
self.heartbeats = 0
print("_init_")
def on_update(self, status):
self.updates.append(status)
print("on_update")
filename = 'json_{:%Y%m%d%H%M%S%f}'.format(datetime.datetime.now())
with open('data/' + filename + '.json', 'w') as outfile:
json.dump(status, outfile)
def on_notification(self, notification):
self.notifications.append(notification)
print("on_notification")
def on_delete(self, status_id):
self.deletes.append(status_id)
print("on_delete")
def handle_heartbeat(self):
self.heartbeats += 1
print("handle_heartbeat")
def handle_stream_(self, lines):
'''Test helper to avoid littering all tests with six.b().'''
print("handle_stream_")
return self.handle_stream(map(six.b, lines))
def test_heartbeat():
listener = Listener()
listener.handle_stream_([':one', ':two'])
assert listener.heartbeats == 2
def test_status():
listener = Listener()
listener.handle_stream_([
'event: update',
'data: {"foo": "bar"}',
'',
])
assert listener.updates == [{"foo": "bar"}]
def test_notification():
listener = Listener()
listener.handle_stream_([
'event: notification',
'data: {"foo": "bar"}',
'',
])
assert listener.notifications == [{"foo": "bar"}]
def test_delete():
listener = Listener()
listener.handle_stream_([
'event: delete',
'data: 123',
'',
])
assert listener.deletes == [123]
l = Listener()
mastodon.public_stream(l)
input("Press Enter to continue...")
|
Python
| 0
|
@@ -1,12 +1,102 @@
+# uses https://github.com/halcy/Mastodon.py%0A# install by typing pip install Mastodon.py %0A%0A
import sys%0As
|
7b8683f1798659c7fb7d5aa14a762518c60f69ad
|
fix test cases
|
learntools/intro_to_programming/ex3.py
|
learntools/intro_to_programming/ex3.py
|
from learntools.core import *
def get_expected_cost(beds, baths, has_basement):
value = 80000 + 30000 * beds + 10000 * baths + 40000 * has_basement
return value
class FloatToInt(ThoughtExperiment):
_solution = ("Negative floats are always rounded UP to the closest integer (for instance, "
"both -1.1 and -1.9 are rounded up to -1). Positive floats are rounded either "
"UP or DOWN, depending on whether the preceding or following integer is closer "
"(for instance, 1.1 is rounded down to 1, and 1.9 is rounded up to 2). In the "
"case that the float is equidistant from both integers, it is rounded up "
"(for instance, 1.5 is rounded up to 2).")
class MultiplyBooleans(ThoughtExperiment):
_solution = ("When you multiple an integer or float by a boolean with value `True`, it just returns "
"that same integer or float (and is equivalent to multiplying by 1). If you "
"multiply an integer or float by a boolean with value `False`, it always returns 0. This "
"is true for both positive and negative numbers. If you multiply a string by a boolean with "
"value `True`, it just returns that same string. And if you multiply a string by a boolean "
"with value `False`, it returns an empty string (or a string with length zero).")
class EstimateHouseValueBool(FunctionProblem):
_var = 'get_expected_cost'
_test_cases = [
((1, 1, False), 120000),
((2, 1, True), 190000),
((3, 2, True), 230000),
((4, 5, False), 250000),
]
_hint = ("The variable `has_basement` is either `True` or `False`. What happens when you "
"multiply it by 40000 (the value of a basement)? Refer to the previous question "
"if you are unsure.")
_solution = CS(
"""def get_expected_cost(beds, baths, has_basement):
value = 80000 + 30000 * beds + 10000 * baths + 40000 * has_basement
return value
""")
class AddingBooleans(ThoughtExperiment):
_solution = "When you add booleans, adding `False` is equivalent to adding 0, and adding `True` is equivalent to adding 1."
class CustomEngravings(FunctionProblem):
_var = 'cost_of_project'
_test_cases = [
(("Charlie+Denver", True), 240),
(("08/10/2000", False), 120),
((3, 2, True), 230000),
((4, 5, False), 250000),
]
_hint = ("There are two options - either the project uses solid gold or does not. With this in mind, you can structure your solution like this: `cost = solid_gold * ____ + (not_solid_gold) * ____`. You need to figure out how to fill in the blanks.")
_solution = CS(
"""def cost_of_project(engraving, solid_gold):
cost = solid_gold * (100 + 10 * len(engraving)) + (not solid_gold) * (50 + 7 * len(engraving))
return cost
""")
qvars = bind_exercises(globals(), [
FloatToInt,
MultiplyBooleans,
EstimateHouseValueBool,
AddingBooleans,
CustomEngravings
],
var_format='q{n}',
)
__all__ = list(qvars)
|
Python
| 0.000013
|
@@ -2402,36 +2402,40 @@
120),%0A ((
-3, 2
+%22Adrian%22
, True), 230000)
@@ -2419,37 +2419,34 @@
Adrian%22, True),
-23000
+16
0),%0A ((4,
@@ -2435,36 +2435,37 @@
160),%0A ((
-4, 5
+%22Ana%22
, False), 250000
@@ -2450,38 +2450,34 @@
(%22Ana%22, False),
-250000
+71
),%0A %5D%0A _hi
|
d6c81135077867283738bcf9cceb0ce8198808d6
|
Enable SSL verify for prod
|
unicornclient/config.py
|
unicornclient/config.py
|
import os
import logging
ENV = os.getenv('PYTHONENV', 'prod')
LOG_LEVEL = logging.DEBUG
LOG_FORMAT = '%(asctime)s - %(levelname)s - %(message)s'
HOST = 'localhost'
PORT = 8080
SSL_VERIFY = False
DEFAULT_ROUTINES = ['auth', 'ping', 'status', 'system']
if ENV == 'prod':
LOG_LEVEL = logging.INFO
HOST = 'unicorn.ahst.fr'
#SSL_VERIFY = True
|
Python
| 0
|
@@ -334,9 +334,8 @@
-#
SSL_
|
9775ca470e423636880027a39f826452b7ce8d7a
|
Add the RoutePoint class. It didn't inherit from Django models
|
moveon/models.py
|
moveon/models.py
|
from django.db import models
class Company(models.Model):
name = models.TextField()
code = models.TextField()
url = models.URLField()
logo = models.TextField()
def __str__(self):
return self.name
class Transport(models.Model):
name = models.TextField()
def __str__(self):
return self.name
class Station(models.Model):
osmid = models.IntegerField(primary_key=True, unique=True)
latitude = models.DecimalField(max_digits=10, decimal_places=7)
longitude = models.DecimalField(max_digits=10, decimal_places=7)
code = models.TextField()
name = models.TextField()
available = models.BooleanField()
adapted = models.BooleanField()
shelter = models.BooleanField()
bench = models.BooleanField()
def __str__(self):
return self.name
class Line(models.Model):
osmid = models.IntegerField(primary_key=True, unique=True)
company = models.ForeignKey(Company)
transport = models.ForeignKey(Transport)
code = models.TextField()
name = models.TextField()
colour = models.CharField(max_length=7)
stations = models.ManyToManyField(Station)
def __str__(self):
return self.code + ' ' + self.name
class Route(models.Model):
osmid = models.IntegerField(primary_key=True, unique=True)
line = models.ForeignKey(Line)
name = models.TextField()
station_from = models.TextField()
station_to = models.TextField()
def __str__(self):
return self.name
class Time(models.Model):
moment = models.BigIntegerField(primary_key=True, unique=True)
def __str__(self):
return self.moment
class TimeTable(models.Model):
monday = models.BooleanField()
tuesday = models.BooleanField()
wednesday = models.BooleanField()
thursday = models.BooleanField()
friday = models.BooleanField()
saturday = models.BooleanField()
sunday = models.BooleanField()
holiday = models.BooleanField()
start = models.DateField()
end = models.DateField()
time_table = models.ManyToManyField(Time)
class Node(models.Model):
osmid = models.IntegerField(primary_key=True, unique=True)
latitude = models.DecimalField(max_digits=10, decimal_places=7)
longitude = models.DecimalField(max_digits=10, decimal_places=7)
near_station = models.ForeignKey(Station, null=True)
def __str__(self):
return self.name
class Stretch(models.Model):
route = models.ForeignKey(Route)
time_table = models.ManyToManyField(TimeTable)
class RoutePoint():
node = models.ForeignKey(Node)
stretch = models.ForeignKey(Stretch)
order = models.IntegerField()
time_from_beggining = models.BigIntegerField()
|
Python
| 0
|
@@ -2571,16 +2571,28 @@
tePoint(
+models.Model
):%0A n
|
5741d373ce42e7fbf7f888e4c220b033d21567fb
|
move default iembot listen ports
|
iembot.tac
|
iembot.tac
|
# Twisted Bits
from twisted.application import service, internet
from twisted.web import server
from twisted.enterprise import adbapi
# Base Python
import json
# Local Import
import iemchatbot
dbconfig = json.load(open('settings.json'))
application = service.Application("Public IEMBOT")
serviceCollection = service.IServiceCollection(application)
# This provides DictCursors!
dbpool = adbapi.ConnectionPool("pyiem.twistedpg", cp_reconnect=True,
database=dbconfig.get('databaserw').get('openfire'),
host=dbconfig.get('databaserw').get('host'),
password=dbconfig.get('databaserw').get('password'),
user=dbconfig.get('databaserw').get('user') )
jabber = iemchatbot.JabberClient(dbpool)
defer = dbpool.runQuery("select propname, propvalue from properties")
defer.addCallback(jabber.fire_client_with_config, serviceCollection)
# 2. JSON channel requests
json = server.Site( iemchatbot.JSONResource(jabber), logPath='/dev/null' )
x = internet.TCPServer(8003, json)
x.setServiceParent(serviceCollection)
# 3. Answer requests for RSS feeds of the bot logs
rss = server.Site( iemchatbot.RootResource(), logPath="/dev/null" )
r = internet.TCPServer(8004, rss)
r.setServiceParent(serviceCollection)
# END
|
Python
| 0
|
@@ -1062,17 +1062,17 @@
PServer(
-8
+9
003, jso
@@ -1259,9 +1259,9 @@
ver(
-8
+9
004,
|
f32affa563735a64466015ed543cc384531efa85
|
Fix missing trim of output string
|
modules/contrib/shell.py
|
modules/contrib/shell.py
|
# pylint: disable=C0111,R0903,W1401
""" Execute command in shell and print result
Few command examples:
'ping -c 1 1.1.1.1 | grep -Po '(?<=time=)\d+(\.\d+)? ms''
'echo 'BTC=$(curl -s rate.sx/1BTC | grep -Po \'^\d+\')USD''
'curl -s https://wttr.in/London?format=%l+%t+%h+%w'
'pip3 freeze | wc -l'
'any_custom_script.sh | grep arguments'
Parameters:
* shell.command: Command to execute
Use single parentheses if evaluating anything inside (sh-style)
For example shell.command='echo $(date +'%H:%M:%S')'
But NOT shell.command='echo $(date +'%H:%M:%S')'
Second one will be evaluated only once at startup
* shell.interval: Update interval in seconds
(defaults to 1s == every bumblebee-status update)
* shell.async: Run update in async mode. Won't run next thread if
previous one didn't finished yet. Useful for long
running scripts to avoid bumblebee-status freezes
(defaults to False)
"""
import os
import subprocess
import threading
import core.module
import core.widget
import core.input
import util.format
import util.cli
class Module(core.module.Module):
def __init__(self, config):
super().__init__(config, core.widget.Widget(self.get_output))
self.__command = self.parameter('command')
self.__async = util.format.asbool(self.parameter('async'))
if self.__async:
self.__output = 'please wait...'
self.__current_thread = threading.Thread()
# LMB and RMB will update output regardless of timer
core.input.register(self, button=core.input.LEFT_MOUSE, cmd=self.update)
core.input.register(self, button=core.input.RIGHT_MOUSE, cmd=self.update)
def set_output(self, value):
self.__output = value
def get_output(self, _):
return self.__output
def update(self):
# if requested then run not async version and just execute command in this thread
if not self.__async:
self.__output = util.cli.execute(self.__command, ignore_errors=True)
return
# if previous thread didn't end yet then don't do anything
if self.__current_thread.is_alive():
return
# spawn new thread to execute command and pass callback method to get output from it
self.__current_thread = threading.Thread(
target=lambda obj, cmd: obj.set_output(util.cli.execute(cmd, ignore_errors=True)),
args=(self, self.__command)
)
self.__current_thread.start()
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
|
Python
| 0.0005
|
@@ -1418,16 +1418,48 @@
command'
+, 'echo %22no command configured%22'
)%0A
@@ -2215,16 +2215,24 @@
rs=True)
+.strip()
%0A
@@ -2703,16 +2703,123 @@
tart()%0A%0A
+ def state(self, _):%0A if self.__output == 'no command configured':%0A return 'warning'%0A%0A
# vim: t
|
a2f8e21ed7ed5d1df756129f3b87fcdb29628bf3
|
Fix typo in filename function name
|
ifs/lib.py
|
ifs/lib.py
|
import glob
import importlib
import os
import re
import subprocess
import sys
import urllib
import click
class Cmd(object):
returncode = None
cmd = None
output = None
def __init__(self, cmd):
cmd = "set -e\n%s" % cmd
self.cmd = cmd
def execute(self):
try:
self.output = subprocess.check_output(self.cmd, stderr=subprocess.STDOUT, shell=True)
self.returncode = 0
return True
except subprocess.CalledProcessError as e:
self.output = e.output
self.returncode = e.returncode
return False
@classmethod
def run(cls, cmd):
cmdo = cls(cmd)
cmdo.execute()
return cmdo
def get_download_url(app, version=None):
if not version:
version = app.version
if hasattr(app, 'download_url') and app.download_url:
return app.download_url.replace('VERSION', version)
else:
return None
def get_download_fileame(url, target):
if os.path.isdir(target):
target += '/' + url.split('/')[-1]
def download(url, target):
"""
Target should be an absolute path to the location you want to download the
file.
"""
# Add progress bar via:
# http://stackoverflow.com/a/22776/317916
if not url:
return None
urllib.urlretrieve(url, target)
return target
def list_apps():
path = os.path.dirname(os.path.realpath(__file__))
files = glob.glob(path + '/source/*.py')
results = []
for f in files:
i = os.path.basename(f).split('.')[0]
if i not in ['__init__']:
results.append(i)
return results
def load_app(app_name):
try:
mod = importlib.import_module('ifs.source.%s' % app_name, '..source')
except ImportError as e:
mod = None
return mod
def match_semver(string):
return
def check_version(app):
# Call app.version_cmd to check which version is currently installed
cmd = Cmd.run(app.version_cmd)
if hasattr(app, 'version_re'):
r = re.compile(app.version_re)
else:
r = re.compile('(\d+\.\d+\.\d+)')
matches = r.search(cmd.output)
if matches is None:
return None
else:
return matches.group(1)
def app_info(app):
info = {
"default_version": app.version,
"current_version": check_version(app) or 'Not Installed',
"dependencies": app.depends,
"download_url": app.download_url,
}
return info
def cmd_install_deps(app):
if hasattr(app, 'depends') and app.depends:
return 'apt-get install -y %s' % ' '.join(app.depends)
else:
return None
def cmd_install_app(app, version=None):
cmd = app.install_script
if not version:
version = app.version
return cmd.replace('VERSION', version)
def install(app, version=None, force=False):
if not version:
version = app.version
# Create temp directory
target='/tmp/ifs-%s-%s' % (app.__name__[11:], app.version)
if not os.path.exists(target):
os.mkdir(target)
os.chdir(target)
# Download source
dl_url = get_download_url(app, version)
dl_file = get_download_fileame(dl_url, target)
if dl_file and not os.path.exists(dl_file):
click.echo('Downloading %s' % dl_url)
if download(dl_url, dl_file):
click.echo('Downloaded %s' % dl_url)
# Install dependencies
depc = cmd_install_deps(app)
if depc:
click.echo('Installing dependencies: %s' % depc)
deps = Cmd.run(depc)
if deps.returncode > 0:
return deps
click.echo('Installing from source')
install = Cmd.run(cmd_install_app(app, version))
return install
|
Python
| 0.998527
|
@@ -973,24 +973,25 @@
ownload_file
+n
ame(url, tar
@@ -1003,16 +1003,24 @@
%0A if
+url and
os.path.
@@ -1076,16 +1076,32 @@
'/')%5B-1%5D
+%0A return None
%0A%0Adef do
@@ -3206,16 +3206,17 @@
oad_file
+n
ame(dl_u
|
9b3cd3eb39ac3d3e8d0e91de3860f21996ab51aa
|
fix inv
|
Cogs/utils.py
|
Cogs/utils.py
|
from discord.ext import commands
import discord
import os
import asyncio
import inspect
import textwrap
import tokage
class Utilities:
def __init__(self, bot):
self.bot = bot
@commands.command(hidden=True, enabled=False)
async def setavatar(self, ctx, picture):
path = os.path.join("Bot Pics", picture)
try:
with open('%s' % path, 'rb') as f:
await ctx.bot.user.edit(avatar=f.read())
await ctx.send(":ok_hand: Avatar changed to %s" % picture.split(".")[0])
except Exception:
await ctx.send(":exclamation: File not found!")
@commands.command(hidden=True, aliases=["eval", "evaluate"])
@commands.is_owner()
async def debug(self, ctx, *, code: str):
"""Evaluates code."""
code = code.strip('`')
python = '```py\n{}\n```'
result = None
env = {
'bot': self.bot,
'ctx': ctx,
'message': ctx.message,
'server': ctx.guild,
'guild': ctx.guild,
'channel': ctx.channel,
'author': ctx.author,
'history': await ctx.channel.history().flatten(),
't_client': tokage.Client()
}
env.update(globals())
try:
result = eval(code, env)
if inspect.isawaitable(result):
result = await result
except Exception as e:
await ctx.send(python.format(type(e).__name__ + ': ' + str(e)))
return
await ctx.send(python.format(result))
@commands.command(name="exec", hidden=True)
@commands.is_owner()
async def execute(self, ctx, *, code):
if code.startswith("```") and code.endswith("```"):
code = code.strip("```")
if code.startswith("py\n"):
code = code[3:]
env = {
'bot': self.bot,
'ctx': ctx,
'message': ctx.message,
'server': ctx.guild,
'guild': ctx.guild,
'channel': ctx.channel,
'author': ctx.author,
'history': await ctx.channel.history().flatten(),
't_client': tokage.Client()
}
env.update(globals())
wrapped = 'async def func():\n%s' % textwrap.indent(code, ' ')
try:
result = exec(wrapped, env)
func = env['func']
await func()
if result:
await ctx.send(f"```{result}```")
except Exception as e:
await ctx.send(f"```{type(e).__name__ + ': ' + str(e)}```")
@commands.command(aliased=["join", "inv"])
async def invite(self, ctx):
await ctx.send(f"<{discord.utils.oauth_url(bot.user.id)}>")
def setup(bot):
bot.add_cog(Utilities(bot))
|
Python
| 0.000004
|
@@ -2665,16 +2665,63 @@
, ctx):%0A
+ %22%22%22Sends an invite link for the bot%22%22%22%0A
@@ -2763,16 +2763,21 @@
uth_url(
+self.
bot.user
|
4312dcee00eabe97040a7a1da58f25d714a9dfee
|
Remove debug statement and prevent nsfw for images
|
scripts/python/reddit.py
|
scripts/python/reddit.py
|
#!/usr/bin/env python3
# Copyright 2012-2013 Jake Basile and Kyle Varga
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Shows a random image from /r/aww'''
import urllib.request
import urllib.parse
import random
import sys
import json
import re
if len(sys.argv) < 2:
sys.exit(1)
subreddit = sys.argv[1]
timeframe = sys.argv[2] if len(sys.argv) > 2 else 'day'
sort = sys.argv[3] if len(sys.argv) > 3 else 'top'
filetypes = sys.argv[4] if len(sys.argv) > 4 else 'jpg|jpeg|gif|png'
regex = '^.*\.(%s)$' % filetypes
results = urllib.request.urlopen('http://reddit.com/r/%s.json?limit=100&t=%s&sort=%s' % (subreddit, timeframe, sort))
if results.status != 200:
print(results.url)
sys.exit(0)
jsn = json.loads(results.read().decode('utf-8'))
images = [
str(c['data']['url'])
for c in jsn['data']['children']
if re.match(regex, c['data']['url'])
]
print(random.choice(images))
|
Python
| 0
|
@@ -1165,28 +1165,8 @@
00:%0A
-%09print(results.url)%0A
%09sys
@@ -1171,17 +1171,17 @@
ys.exit(
-0
+1
)%0A%0Ajsn =
@@ -1339,16 +1339,50 @@
%5B'url'%5D)
+ and c%5B'data'%5D%5B'over_18'%5D == False
%0A%5D%0Aprint
|
f6cf19966651e8c1e21fa3bde777c5bad6285c9f
|
add print
|
scripts/relay_control.py
|
scripts/relay_control.py
|
#!/usr/bin/python
import RPi.GPIO as GPIO
import argparse
GPIO.setmode(GPIO.BOARD)
# GPIO/BOARD | Relay IN | Rotors | Zone
# 22/15 | R2 IN2 | 1 | B
# 18/12 | R1 IN2 | 2 | A
# 24/18 | R1 IN3 | 3 | D
# 17/11 | R1 IN4 | 4 | C
# 27/13 | R2 IN1 | 5 | E
relayIO = { "1": 15, "2": 12, "3": 18, "4": 11, "5": 13}
def setState(relay, state):
GPIO.output(int(relayIO[relay]), bool(state))
if getState(relay) != state:
print("relay: " + relay + "is not set to " + state)
print("relay: " + relay + "is set to " + getState(relay))
def getState(relay):
return GPIO.input(int(relayIO[relay]))
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--relay', help='Set relay 1/2/3/4/5', required=True)
parser.add_argument('--state',help='Set state high=1 or low=0', required=True)
args = parser.parse_args()
setState(args.relay, args.state)
if __name__ == '__main__':
main()
|
Python
| 0.000085
|
@@ -390,16 +390,100 @@
state):%0A
+%09print(%22Trying to set relay: %22 + int(relayIO%5Brelay%5D) + %22 to state: %22 + bool(state))%0A
%09GPIO.ou
|
a594f546bdd000dbb0ac6c3e9d33460f71ec77c0
|
Comment out 'Terrain' field in both jobs
|
scripts/runsimulation.py
|
scripts/runsimulation.py
|
import sys
import os
import json
import csv
import shutil
import subprocess
def createSimulationDirectory(simulationid):
# YZ: This directory is created by making a full copy of the 'Facade' folder in the jEPlus repository
directory = 'simulations/{0}'.format(simulationid)
if os.path.exists(directory):
shutil.rmtree (directory)
shutil.copytree("../jEPlus/Box", directory) # YZ: check if it should "../../jEPlus/Facade"
return os.path.abspath(directory)
def convertDataToCSV(jsondata):
# This would take the json object passed in and convert it to CSV data.
# The format of the data is as follows:
'''
{
"JobID": "<string>", <-- can be any text that identifies this case
"WeatherFile": 0, <-- should always be 0
"ModelFile": 0, <-- should always be 0
"Terrain": "<string>",
"Orientation" : "<number>",
"Width": "<number>",
"Height": "<number>",
"Depth": "<number>",
"OccupancyType": "<string>",
"Window" : "null or <number>",
"CoolingSP" : "<number>",
"HeatingSP" : "<number>"
"InsulationLevel" : "<number>",
"InfiltrationRate" : "<number>",
"Mvalue" : "<number>",
"Qvalue" : "<number>",
"WindowType": "<string>",
"WallType": "<string>",
"FinLeft" : "<number>",
"FinRight" : "<number>",
"Overhang" : "<number>",
}
'''
# "ventilationRate" : "<number>" jsondata["ventilationRate"]])
# This is a kludge for now. It just writes out the parameters on 3 lines to test that we received them correctly
# TODO: When it is implemented, it should be one line with values appearing in the same order as the parameters in the project.
csvdata = [[ 'job1', # jsondata["JobID"],
0,
0,
#Row 0 BioPCM case
#Geometry tab (P1,P2,P3,P4,P5,P6,P7,P8)
jsondata["Height"],
jsondata["Depth"],
jsondata["Width"],
jsondata["WinGR"],
jsondata["Overhang"],
jsondata["LFin"],
jsondata["RFin"],
jsondata["Orientation"],
#Facade tab (P9,P10,P11,P12,P13)
jsondata["WallType"],
jsondata["WindowType"],
jsondata["InfiltrationRate"],
jsondata["InsulationLevel"],
"M" + jsondata["Mvalue"] + "Q" + jsondata["Qvalue"],
#Activity tab (P14,P15,P16)
jsondata["OccupancyType"],
jsondata["CoolingSP"],
jsondata["HeatingSP"],
#Site tab (P17)
jsondata["Terrain"]
],
# Row 2 NoPCM This is a kludge for now used to add a noPCM run.
['job0', # jsondata["JobID"],
0,
0,
#Geometry tab (P1,P2,P3,P4,P5,P6,P7,P8)
jsondata["Height"],
jsondata["Depth"],
jsondata["Width"],
jsondata["WinGR"],
jsondata["Overhang"],
jsondata["LFin"],
jsondata["RFin"],
jsondata["Orientation"],
#Facade tab (P9,P10,P11,P12,P13)
jsondata["WallType"],
jsondata["WindowType"],
jsondata["InfiltrationRate"],
jsondata["InsulationLevel"],
#This is a kludge for now used to add a noPCM run.
"WallAirGap",
#Activity tab (P14,P15,P16)
jsondata["OccupancyType"],
jsondata["CoolingSP"],
jsondata["HeatingSP"],
#Site tab (P17)
# jsondata["Terrain"]
]
]
return csvdata
def createJobListFile(directory, jsondata):
outputfile = directory + '/joblist.csv'
with open(outputfile, 'w', newline='') as csvfile:
csvfile = csv.writer(csvfile, delimiter=',', quotechar='\'', quoting=csv.QUOTE_MINIMAL)
csvfile.writerows(convertDataToCSV(jsondata))
# Main purpose of this function is to copy the selected weather file to in.epw in the simulation folder
def copySupportingFiles(simulationDirectory, jsondata):
# YZ: is the value in jsondata.weatherFile containing the full path to the weather file to copy?
shutil.copyfile(jsondata['WeatherFile'], simulationDirectory + os.sep + 'in.epw')
# YZ: no need to copy the geometry file unless it has been altered for this case.
#TODO: I think in version 1 we will always use the same idf file but it will get it's data from the parameter file.
# We need to replace the one that is currently there with the correct one that takes a parameter file.
# shutil.copyfile('../idf/Geometry.idf', simulationDirectory + os.sep + 'in.idf') <-- No need to copy in.idf. The project folder contains the correct model file.
def executeSimulation(simulationDirectory, resultsDirectory):
olddir = os.getcwd()
os.chdir('../jess_client')
# Call JESS client to run a single case defined in the joblist.csv file
subprocess.call(['java', '-jar', '../jess_client/JESS_Client.jar',
'-cfg', '../jess_client/client.cfg',
'-log', '../jess_client/log4j.cfg',
'-job', simulationDirectory,
'-type', 'JEPLUS_PROJECT',
'-subset', 'LIST_FILE',
'-subset_param', 'joblist.csv',
'-output', resultsDirectory])
os.chdir(olddir)
# This is the main entry to execute simulation. Simulationid is used to name the working directory, and
# jsondata should contain all parameter values for the cases
def runSimulation(simulationid, jsondata):
# create simulation directory "simulations/[simulationid]" by copying from the template folder ("jEPlus/Facade/")
directory = createSimulationDirectory(simulationid)
resultsDirectory = directory + os.sep + 'output'
resultsFile = resultsDirectory + os.sep + 'AllDerivedResults.csv'
# copy selected weather file to the simulation directory
copySupportingFiles(directory, jsondata)
# create job list file from jsondata
createJobListFile(directory, jsondata)
# run simulation
executeSimulation(directory, resultsDirectory)
# Send the directory that the simulation results are in and the file to stream back to the client to the caller.
print(resultsDirectory)
print(resultsFile)
runSimulation(sys.argv[1], json.JSONDecoder().decode(sys.argv[2]))
|
Python
| 0
|
@@ -2448,33 +2448,32 @@
ata%5B%22HeatingSP%22%5D
-,
%0A#Site tab (P17)
@@ -2478,32 +2478,34 @@
7)%0A
+ #
jsondata%5B%22Terra
@@ -3422,17 +3422,16 @@
tingSP%22%5D
-,
%0A#Site t
|
ba7792cbede153a1cc4daea92dded29d9f7cf959
|
Use start and end anchors when matching book name regex (Fixes Issue #3 in differentiating between Philippians and Philemon)
|
scriptures/references.py
|
scriptures/references.py
|
import re
from .bible_re import testaments, book_re, scripture_re
class InvalidReferenceException(Exception):
"""
Invalid Reference Exception
"""
pass
def get_book(name):
"""
Get a book from its name or None if not found
"""
for books in testaments.values():
for book in books:
if re.match(book[2], name, re.IGNORECASE):
return book
return None
def extract(text):
"""
Extract a list of tupled scripture references from a block of text
"""
references = []
for r in re.finditer(scripture_re, text):
try:
references.append(normalize_reference(*r.groups()))
except InvalidReferenceException:
pass
return references
def is_valid_reference(bookname, chapter, verse=None,
end_chapter=None, end_verse=None):
"""
Check to see if a scripture reference is valid
"""
try:
return normalize_reference(bookname, chapter, verse,
end_chapter, end_verse) is not None
except InvalidReferenceException:
return False
def reference_to_string(bookname, chapter, verse=None,
end_chapter=None, end_verse=None):
"""
Get a display friendly string from a scripture reference
"""
book=None
normalized = normalize_reference(bookname, chapter, verse,
end_chapter, end_verse)
# if start and end chapters are the same
if normalized[1] == normalized[3]:
book = get_book(normalized[0])
if len(book[3]) == 1: # single chapter book
# If start and end verses are the same
if normalized[2] == normalized[4]:
return '{0} {1}'.format(*normalized[0::2])
else:
return '{0} {1}-{2}'.format(*normalized[0::2])
else: # multichapter book
# If the start verse is one and the end verse is the last verse in
# the chapter
if normalized[2] == 1 and normalized[4] == book[3][normalized[1]-1]:
return '{0} {1}'.format(*normalized[:2])
# If start and end verses are the same
elif normalized[2] == normalized[4]:
return '{0} {1}:{2}'.format(*normalized[:3])
else:
return '{0} {1}:{2}-{3}'.format(
*(normalized[:3] + normalized[-1:]))
else: # start and end chapters are different
return '{0} {1}:{2}-{3}:{4}'.format(*normalized)
def normalize_reference(bookname, chapter, verse=None,
end_chapter=None, end_verse=None):
"""
Get a complete five value tuple scripture reference with full book name
from partial data
"""
book = get_book(bookname)
# SPECIAL CASE FOR BOOKS WITH ONE CHAPTER:
# If there is only one chapter in this book, set the chapter to one and
# treat the incoming chapter argument as though it were the verse.
# This normalizes references such as:
# Jude 2 and Jude 2-4
if len(book[3]) == 1:
if verse is None and end_chapter is None:
verse=chapter
chapter=1
else:
# This is not a single chapter book.
# If a start verse was NOT provided, but an end_verse was- we have a
# reference such as John 3-4 which is invalid.
if verse is None and end_verse:
raise InvalidReferenceException()
# Convert to integers or leave as None
chapter = int(chapter) if chapter else None
verse = int(verse) if verse else None
end_chapter = int(end_chapter) if end_chapter else chapter
end_verse = int(end_verse) if end_verse else None
if not book \
or (chapter is None or chapter < 1 or chapter > len(book[3])) \
or (verse is not None and (verse < 1 or verse > book[3][chapter-1])) \
or (end_chapter is not None and (
end_chapter < 1
or end_chapter < chapter
or end_chapter > len(book[3]))) \
or (end_verse is not None and(
end_verse < 1
or (end_chapter and end_verse > book[3][end_chapter-1])
or (chapter == end_chapter and end_verse < verse))):
raise InvalidReferenceException()
if not verse:
return (book[0], chapter, 1, chapter, book[3][chapter-1])
if not end_verse:
if end_chapter and end_chapter != chapter:
end_verse = book[3][end_chapter-1]
else:
end_verse = verse
if not end_chapter:
end_chapter = chapter
return (book[0], chapter, verse, end_chapter, end_verse)
|
Python
| 0
|
@@ -338,16 +338,25 @@
e.match(
+'%5E%25s$' %25
book%5B2%5D,
|
62957cca1251084751c78e2b9b5821342d1a9095
|
Add properties to CohortsBase model
|
scuole/cohorts/models.py
|
scuole/cohorts/models.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.db import models
from .managers import CohortQuerySet
class CohortsBase(models.Model):
FEMALE = 'Female'
MALE = 'Male'
GENDER_CHOICES = (
(FEMALE, 'Female'),
(MALE, 'Male'),
)
WHITE = 'White'
HISPANIC = 'Hispanic'
AFRICAN_AMERICAN = 'African American'
OTHERS = 'Others'
ETHNICITY_CHOICES = (
(WHITE, 'White'),
(HISPANIC, 'Hispanic'),
(AFRICAN_AMERICAN, 'African American'),
(OTHERS, 'Others'),
)
ECONOMICALLY_DISADVANTAGED = 'Economically Disadvantaged'
NOT_ECONOMICALLY_DISADVANTAGED = 'Not Economically Disadvantaged'
ECON_CHOICES = (
(ECONOMICALLY_DISADVANTAGED, 'Economically Disadvantaged'),
(NOT_ECONOMICALLY_DISADVANTAGED, 'Not Economically Disadvantaged')
)
gender = models.CharField(
'Gender', max_length=30, choices=GENDER_CHOICES, blank=True)
ethnicity = models.CharField(
'Ethnicity', max_length=30, choices=ETHNICITY_CHOICES, blank=True)
economic_status = models.CharField(
'Economic status', max_length=30, choices=ECON_CHOICES, blank=True)
enrolled_8th = models.IntegerField(null=True)
enrolled_9th = models.IntegerField(null=True)
enrolled_9th_percent = models.FloatField(null=True)
enrolled_10th = models.IntegerField(null=True)
enrolled_10th_percent = models.FloatField(null=True)
lessthan_10th_enrolled = models.IntegerField(null=True)
lessthan_10th_enrolled_percent = models.FloatField(null=True)
graduated = models.IntegerField(null=True)
graduated_percent = models.FloatField(null=True)
enrolled_4yr = models.IntegerField(null=True)
enrolled_4yr_percent = models.FloatField(null=True)
enrolled_2yr = models.IntegerField(null=True)
enrolled_2yr_percent = models.FloatField(null=True)
enrolled_out_of_state = models.IntegerField(null=True)
enrolled_out_of_state_percent = models.FloatField(null=True)
total_enrolled = models.IntegerField(null=True)
total_enrolled_percent = models.FloatField(null=True)
enrolled_wo_record = models.IntegerField(null=True)
enrolled_wo_record_percent = models.FloatField(null=True)
total_degrees = models.IntegerField(null=True)
total_degrees_percent = models.FloatField(null=True)
bacc = models.IntegerField(null=True)
bacc_acc = models.IntegerField(null=True)
bacc_cert = models.IntegerField(null=True)
assoc = models.IntegerField(null=True)
accoc_cert = models.IntegerField(null=True)
cert = models.IntegerField(null=True)
objects = CohortQuerySet.as_manager()
class Meta:
abstract = True
|
Python
| 0
|
@@ -103,16 +103,68 @@
t models
+%0Afrom django.utils.functional import cached_property
%0A%0Afrom .
@@ -2737,16 +2737,446 @@
ager()%0A%0A
+ @cached_property%0A def percent_enrolled_higher_education(self):%0A try:%0A return self.total_enrolled / self.enrolled_8th%0A except (TypeError, ZeroDivisionError):%0A return 'N/A'%0A%0A @cached_property%0A def percent_completed_higher_education(self):%0A try:%0A return self.total_degrees / self.enrolled_8th%0A except (TypeError, ZeroDivisionError):%0A return 'N/A'%0A%0A
clas
|
b58dbb8c6d8410a8e745b8e1db318a3e52834c3f
|
Version bump
|
articleappkit/__init__.py
|
articleappkit/__init__.py
|
"""
A set of classes to make creating text- or article-based Django apps easier.
"""
__version_info__ = {
'major': 0,
'minor': 3,
'micro': 0,
'releaselevel': 'beta',
'serial': 3
}
def get_version(short=False):
assert __version_info__['releaselevel'] in ('alpha', 'beta', 'final')
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final' and not short:
vers.append('%s%i' % (__version_info__['releaselevel'][0], __version_info__['serial']))
return ''.join(vers)
__version__ = get_version()
|
Python
| 0
|
@@ -168,20 +168,21 @@
evel': '
-beta
+final
',%0A '
@@ -190,17 +190,17 @@
erial':
-3
+1
%0A%7D%0A%0A%0Adef
|
781bbdf8e2fb6499804dd52f66a8f7c75bbea2a1
|
Print working directory for logs
|
src/py/rpmostreecompose/imagefactory.py
|
src/py/rpmostreecompose/imagefactory.py
|
#!/usr/bin/env python
# Copyright (C) 2014 Colin Walters <walters@verbum.org>, Andy Grimm <agrimm@redhat.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import json
import os
import sys
import tempfile
import argparse
import shutil
import subprocess
import distutils.spawn
from gi.repository import Gio, OSTree, GLib
import iniparse
# For ImageFactory builds
from imgfac.BuildDispatcher import BuildDispatcher
from imgfac.PluginManager import PluginManager
from imgfac.ApplicationConfiguration import ApplicationConfiguration
import logging
from .taskbase import TaskBase
from .utils import run_sync, fail_msg
class ImgBuilder(object):
'''
Abstract class from which specific builder inherit.
This is mostly because we want to allow for direct calls to imagefactory,
but also calls to koji. In one case, we need to generate a TDL; in the
other, we provide the parameters and let the system construct it.
'''
def __init__(self, *args, **kwargs):
pass
def build(self):
'''
Trigger a build. Return something useful like a build id, status, etc.
'''
raise NotImplementedError
def download(self, dest):
'''
Copy/download artifacts to a destination
'''
raise NotImplementedError
class ImgFacBuilder(ImgBuilder):
def __init__(self, *args, **kwargs):
config = json.loads(open('/etc/imagefactory/imagefactory.conf').read())
config['plugins'] = '/etc/imagefactory/plugins.d'
config['timeout'] = 3600
ApplicationConfiguration(configuration=config)
plugin_mgr = PluginManager('/etc/imagefactory/plugins.d')
plugin_mgr.load()
logfile = os.path.join(kwargs['workdir'], 'imgfac.log')
self.fhandler = logging.FileHandler(logfile)
self.tlog = logging.getLogger()
self.tlog.setLevel(logging.DEBUG)
self.tlog.addHandler(self.fhandler)
pass
def build(self, template=None, parameters=None):
bd = BuildDispatcher()
builder = bd.builder_for_base_image(template=template,
parameters=parameters)
print json.dumps(builder.app_config)
image = builder.base_image
thread = builder.base_thread
for key in image.metadata():
print "%s %s" % (key, getattr(image, key, None))
thread.join()
if image.status != "COMPLETE":
fail_msg("Failed image status: " + image.status)
return image.data
def download(self):
pass
class KojiBuilder(ImgBuilder):
def __init__(self, **kwargs):
# sort of
# server = kwargs.pop('server')
# self.session = koji.ClientSession(server, kwargs)
pass
def build(self):
# TODO: populate buildinfo
# self.session.createImageBuild(buildinfo)
pass
def download(self):
pass
class ImageFactoryTask(TaskBase):
def create_disks(self):
[res,rev] = self.repo.resolve_rev(self.ref, False)
[res,commit] = self.repo.load_variant(OSTree.ObjectType.COMMIT, rev)
commitdate = GLib.DateTime.new_from_unix_utc(OSTree.commit_get_timestamp(commit)).format("%c")
print commitdate
# XXX - Define this somewhere?
imageoutputdir=os.path.join(self.outputdir, 'images')
imagedir = os.path.join(imageoutputdir, rev[:8])
if not os.path.exists(imagedir):
os.makedirs(imagedir)
imagestmpdir = os.path.join(self.workdir, 'images')
if not os.path.exists(imagestmpdir):
os.mkdir(imagestmpdir)
generated = []
imgtargetcloud=os.path.join(imagestmpdir, self._name, '%s.qcow2' % self.os_name)
self.create_cloud_image(self.workdir, imgtargetcloud, self._kickstart)
generated.append(imgtargetcloud)
for f in generated:
destpath = os.path.join(imagedir, os.path.basename(f))
print "Created: " + destpath
shutil.move(f, destpath)
def create_cloud_image(self, tmpdir, target, ksfile):
targetdir = os.path.dirname(target)
if not os.path.exists(targetdir):
os.makedirs(targetdir)
port_file_path = tmpdir + '/repo-port'
subprocess.check_call(['ostree',
'trivial-httpd', '--autoexit', '--daemonize',
'--port-file', port_file_path],
cwd=self.ostree_repo)
httpd_port = open(port_file_path).read().strip()
print "trivial httpd port=%s" % (httpd_port, )
ks_basename = os.path.basename(ksfile)
flattened_ks = os.path.join(tmpdir, ks_basename)
# FIXME - eventually stop hardcoding this via some mapping
if ks_basename.find('fedora') >= 0:
kickstart_version = 'F21'
else:
kickstart_version = 'RHEL7'
run_sync(['ksflatten', '--version', kickstart_version,
'-c', ksfile, '-o', flattened_ks])
# TODO: Pull kickstart from separate git repo
ksdata = open(flattened_ks).read()
substitutions = { 'OSTREE_PORT': httpd_port,
'OSTREE_REF': self.ref,
'OSTREE_OSNAME': self.os_name }
for subname, subval in substitutions.iteritems():
ksdata = ksdata.replace('@%s@' % (subname, ), subval)
parameters = { "install_script": ksdata,
"generate_icicle": False,
}
print "Starting build"
image_path = self.builder.build(template=open(self._tdl).read(),
parameters=parameters)
shutil.copyfile(image_path, target)
print "Created: " + target
@property
def builder(self):
# TODO: option to switch to koji builder
if True:
return ImgFacBuilder(workdir=self.workdir)
else:
return KojiBuilder()
## End Composer
def main():
parser = argparse.ArgumentParser(description='Use ImageFactory to create a disk image')
parser.add_argument('-c', '--config', type=str, required=True, help='Path to config file')
parser.add_argument('--name', type=str, required=True, help='Image name')
parser.add_argument('--tdl', type=str, required=True, help='TDL file')
parser.add_argument('-k', '--kickstart', type=str, required=True, help='Path to kickstart')
parser.add_argument('-r', '--release', type=str, default='rawhide', help='Release to compose (references a config file section)')
parser.add_argument('-v', '--verbose', action='store_true', help='verbose output')
args = parser.parse_args()
composer = ImageFactoryTask(args.config, name=args.name,
kickstart=args.kickstart,
tdl=args.tdl,
release=args.release)
composer.show_config()
composer.create_disks()
composer.cleanup()
|
Python
| 0
|
@@ -2421,24 +2421,77 @@
gfac.log')%0A%0A
+ print %22ImgFacBuilder logging to: %22 + logfile%0A
self
|
9067e4b8557fb8f15445bc8e976c3c4da8835123
|
Change delay after reset password
|
pritunl_loader/loader.py
|
pritunl_loader/loader.py
|
from constants import *
from exceptions import *
import uuid
import subprocess
import os
import paramiko
import base64
import time
import requests
import json
import socket
class IgnorePolicy(paramiko.MissingHostKeyPolicy):
def missing_host_key(self, *args, **kwargs):
pass
class Loader:
def __init__(self, api_key, region):
self.api_key = api_key
self.region = region
self.droplet_id = None
self.host = None
self.private_key = None
self.public_key = None
self.public_key_id = None
def generate_key(self):
key_path = os.path.join(KEY_TEMP_DIR, uuid.uuid4().hex)
pub_key_path = key_path + '.pub'
process = subprocess.check_output([
'ssh-keygen', '-b', '1024', '-t', 'rsa', '-C', 'pritunl',
'-N', '', '-f', key_path,
])
self.private_key = paramiko.RSAKey(filename=key_path)
os.remove(key_path)
self.public_key = open(pub_key_path).read().strip()
os.remove(pub_key_path)
def import_key(self):
response = requests.post(
'%s/account/keys' % API_URL,
headers={
'Authorization': 'Bearer %s' % self.api_key,
'Content-Type': 'application/json',
},
data=json.dumps({
'name': DROPLET_NAME,
'public_key': self.public_key,
}),
)
if response.status_code < 200 or response.status_code >= 300:
if response.json().get('id') == 'unauthorized':
raise InvalidApiKey('API key is invalid')
raise KeyImportError('Failed to import ssh key')
self.public_key_id = response.json()['ssh_key']['id']
def reset_password(self):
if not self.droplet_id:
return
response = requests.post(
'%s/droplets/%s/actions' % (API_URL, self.droplet_id),
headers={
'Authorization': 'Bearer %s' % self.api_key,
'Content-Type': 'application/json',
},
data=json.dumps({
'type': 'password_reset',
}),
)
if response.status_code < 200 or response.status_code >= 300:
if response.json().get('id') == 'unauthorized':
raise InvalidApiKey('API key is invalid')
raise ResetPasswordError('Failed to reset droplet password')
def remove_key(self):
if not self.public_key_id:
return
response = requests.delete(
'%s/account/keys/%s' % (API_URL, self.public_key_id),
headers={
'Authorization': 'Bearer %s' % self.api_key,
},
)
def create_droplet(self):
if not self.public_key:
self.generate_key()
self.import_key()
response = requests.post(
'%s/droplets' % API_URL,
headers={
'Authorization': 'Bearer %s' % self.api_key,
'Content-Type': 'application/json',
},
data=json.dumps({
'name': DROPLET_NAME,
'region': self.region,
'size': DROPLET_SIZE,
'image': DROPLET_IMAGE,
'ssh_keys': [self.public_key_id],
}),
)
if response.status_code < 200 or response.status_code >= 300:
if response.json().get('id') == 'unauthorized':
raise InvalidApiKey('API key is invalid')
raise CreateDropletError('Failed to create droplet')
self.droplet_id = response.json()['droplet']['id']
start_time = int(time.time())
while True:
response = requests.get(
'%s/droplets/%s' % (API_URL, self.droplet_id),
headers={
'Authorization': 'Bearer %s' % self.api_key,
},
)
if response.status_code < 200 or response.status_code >= 300:
if response.json().get('id') == 'unauthorized':
raise InvalidApiKey('API key is invalid')
raise CreateDropletError(
'Failed to create droplet, error getting droplet status')
response = response.json()
if response['droplet']['status'] == 'active':
networks = response['droplet']['networks']['v4']
for network in networks:
if network['type'] == 'public':
self.host = network['ip_address']
break
if not self.host:
raise CreateDropletError('Failed to create droplet, ' + \
'unable to get droplet IP address')
break
if int(time.time()) - start_time > DROPLET_TIMEOUT:
raise CreateDropletError(
'Failed to create droplet, timed out')
time.sleep(1)
time.sleep(5)
def _ssh_exec(self, client, timeout, command):
command += '; echo $?'
start_time = int(time.time())
stdin, stdout, stderr = client.exec_command(
command, timeout=timeout)
exit_code = None
for line in stdout:
exit_code = line.strip()
if int(exit_code):
raise DropletExecError('Command %r returned error exit code %s' % (
command, exit_code))
timeout -= (int(time.time()) - start_time)
return max(15, timeout)
def install(self, timeout=LOADER_TIMEOUT):
try:
if not self.droplet_id:
self.create_droplet()
client = paramiko.SSHClient()
client.set_missing_host_key_policy(IgnorePolicy())
for i in xrange(40):
try:
client.connect(self.host, username='root',
pkey=self.private_key, timeout=CONNECT_TIMEOUT)
except socket.timeout:
time.sleep(1)
if i >= 11:
raise DropletTimeout('SSH connection timed out')
except:
time.sleep(3)
if i >= 11:
raise
for command in (
'apt-get install -qq -y python-software-properties',
'add-apt-repository -y ppa:pritunl',
'apt-get update -qq',
'apt-get install -qq -y pritunl',
'rm -f /root/.ssh/authorized_keys',
):
try:
timeout = self._ssh_exec(client, timeout, command)
except socket.timeout:
raise DropletTimeout('SSH connection timed out')
client.close()
self.reset_password()
finally:
self.remove_key()
|
Python
| 0.000001
|
@@ -6854,16 +6854,43 @@
sword()%0A
+ time.sleep(30)%0A
|
8bc482db2e9cf98d3e3571f49a85ee7a287efaf7
|
Use DjangoJSONEncoder when serving jsonp requests
|
server/shared/request.py
|
server/shared/request.py
|
from django.http import JsonResponse, HttpResponse
from django.shortcuts import render_to_response
from django.template.loader import render_to_string
import logging
import json
import re
logger = logging.getLogger("logger")
class ErrorResponse(Exception):
def __init__(self, message, data=None, status=401, err=None):
super(Exception, self).__init__(self, message)
self.data = data or { "error": message }
self.status = status
self.exception = err
def make_response(template=None, error_template="error.html"):
"""
View decorator
Tailor the response to the requested data type, as specified
in the Accept header. Expects the wrapped view to return a
dict. If the request wants JSON, renders the dict as JSON data.
"""
def constructor_fn(view):
def wrapped_view(req, *args, **kwargs):
use_template = template
status = 200
try:
data = view(req, *args, **kwargs)
except ErrorResponse as err:
data = err.data
use_template = error_template
status = err.status
# render error template or return JSON with proper error
# code
jsonp_callback = req.GET.get("callback")
if jsonp_callback:
body = "{callback}({json})".format(callback=jsonp_callback,
json=json.dumps(data))
response = HttpResponse(body, status=status)
response["Content-Type"] = "application/javascript"
return response
accepts = req.META["HTTP_ACCEPT"]
typestring, _ = accepts.split(";", 1)
if not use_template \
or re.search(r"application/json", typestring):
response = JsonResponse(data, status=status)
# TODO: We may (or may not!) want to be more restrictive
# in the future:
response["Access-Control-Allow-Origin"] = "*"
return response
return render_to_response(use_template, data, status=status)
return wrapped_view
return constructor_fn
|
Python
| 0.000001
|
@@ -1,20 +1,79 @@
+from django.core.serializers.json import DjangoJSONEncoder%0A
from django.http imp
@@ -222,27 +222,15 @@
ging
-%0Aimport json%0Aimport
+, json,
re%0A
@@ -1368,16 +1368,82 @@
llback:%0A
+ content = json.dumps(data, cls=DjangoJSONEncoder)%0A
@@ -1570,24 +1570,15 @@
son=
-json.dumps(data)
+content
)%0A%0A
|
c27d9dfe166c7dc98beb12c9406e70362e9bddcf
|
Support Python 3
|
flask_indieauth.py
|
flask_indieauth.py
|
# -*- coding: utf-8 -*-
"""
Flask-IndieAuth
==============
This extension adds the ability to authorize requests to your Flask
endpoints via [IndieAuth](https://indieweb.org/IndieAuth), using
current_app.config['TOKEN_ENDPOINT'] as the token server.
This is useful for developers of Micropub (https://www.w3.org/TR/micropub/)
server implementations.
Configuration
-------------
`current_app.config` should contain the following configuration details:
* `TOKEN_ENDPONT` (e.g. "https://tokens.indieauth.org/token")
* `ME` (e.g. "http://example.com")
Example Usage
-------------
from flask_indieauth import requires_indieauth
@app.route('/micropub', methods=['GET','POST'])
@requires_indieauth
def handle_micropub():
...
When a Flask route is wrapped in @requires_indieauth, this extension
will look for an IndieAuth bearer token in these locations in order:
* HTTP header `Authorization: Bearer xxx...`
* HTTP form data in the parameter `access_token`
* HTTP POST body, if in JSON format, in the `access_token` attribute
If an access token is found, it is checked for a `me` value equal to the
domain in current_app.config["ME"] and a `scope` value of `post`.
If all checks pass, processing is passed to the Flask route handler.
"""
from functools import wraps
from flask import request, Response, current_app, g
from urllib2 import Request, urlopen
from urlparse import urlparse, parse_qs
import json
def requires_indieauth(f):
"""Wraps a Flask handler to require a valid IndieAuth access token.
"""
@wraps(f)
def decorated(*args, **kwargs):
access_token = get_access_token()
resp = check_auth(access_token)
if isinstance(resp, Response):
return resp
return f(*args, **kwargs)
return decorated
def check_auth(access_token):
"""This function contacts the configured IndieAuth Token Endpoint
to see if the given token is a valid token and for whom.
"""
if not access_token:
current_app.logger.error('No access token.')
return deny('No access token found.')
request = Request(
current_app.config['TOKEN_ENDPOINT'],
headers={"Authorization" : ("Bearer %s" % access_token)}
)
contents = urlopen(request).read()
token_data = parse_qs(contents)
me = token_data['me'][0]
client_id = token_data['client_id'][0]
if me is None or client_id is None:
current_app.logger.error("Invalid token [%s]" % contents)
return deny('Invalid token')
me, me_error = check_me(me)
if me is None:
current_app.logger.error("Invalid `me` value [%s]" % me_error)
return deny(me_error)
scope = token_data['scope'][0]
if "post" not in token_data['scope']:
current_app.logger.error("Scope %s does not contain 'post'." % scope)
return deny("Scope %s does not contain 'post'." % scope)
g.user = {
'me': me,
'client_id': client_id,
'scope': scope
}
def check_me(me):
token_me_base = (urlparse(me)).netloc
me_base = (urlparse(current_app.config["ME"])).netloc
if (me_base != token_me_base):
return (None, "token me (%s) doesn't match ours (%s)" % (token_me_base, me_base))
return (me, None)
def deny(reason):
"""Sends a 400 response because token is missing or bad"""
return Response(reason, 400)
def get_access_token():
access_token = request.headers.get('Authorization')
if access_token:
access_token = access_token.replace('Bearer ', '')
if not access_token:
access_token = request.form.get('access_token')
if not access_token:
access_token = get_access_token_from_json_request(request)
return access_token
def get_access_token_from_json_request(request):
try:
jsondata = json.loads(request.get_data())
return jsondata['access_token']
except ValueError:
return None
|
Python
| 0.000001
|
@@ -626,25 +626,24 @@
-----------%0A
-%0A
%0A
@@ -1451,16 +1451,170 @@
_app, g%0A
+import json%0Atry:%0A # For Python 3.0 and later%0A from urllib.request import Request, urlopen%0Aexcept ImportError:%0A # Fallback to Python2 urllib2%0A
from url
@@ -1642,24 +1642,50 @@
urlopen%0A
+try:%0A # Python 3.0%0A
from url
parse im
@@ -1672,24 +1672,28 @@
from url
+lib.
parse import
@@ -1712,27 +1712,114 @@
arse_qs%0A
-import json
+except ImportError:%0A # Fallback to Python2 urlparse%0A from urlparse import urlparse, parse_qs
%0A%0Adef re
@@ -2631,16 +2631,32 @@
).read()
+.decode('utf-8')
%0A tok
|
e1d119d743076b29cf19c584c337579903ab3875
|
fix templates path
|
flaskr/__init__.py
|
flaskr/__init__.py
|
#!/usr/bin/python3
# -*- coding: latin-1 -*-
import os
import sys
# import psycopg2
import json
from bson import json_util
from pymongo import MongoClient
from flask import Flask, request, session, g, redirect, url_for, abort, \
render_template, flash
def create_app():
app = Flask(__name__)
return app
app = create_app()
# REPLACE WITH YOUR DATABASE NAME
MONGODATABASE = "my_db"
MONGOSERVER = "localhost"
MONGOPORT = 27017
client = MongoClient(MONGOSERVER, MONGOPORT)
mongodb = client[MONGODATABASE]
''' # Uncomment for postgres connection
# REPLACE WITH YOUR DATABASE NAME, USER AND PASS
POSTGRESDATABASE = "mydatabase"
POSTGRESUSER = "myuser"
POSTGRESPASS = "mypass"
postgresdb = psycopg2.connect(
database=POSTGRESDATABASE,
user=POSTGRESUSER,
password=POSTGRESPASS)
'''
#Cambiar por Path Absoluto en el servidor
QUERIES_FILENAME = '/var/www/flaskr/queries'
@app.route("/")
def home():
with open(QUERIES_FILENAME, 'r', encoding='utf-8') as queries_file:
json_file = json.load(queries_file)
pairs = [(x["name"],
x["database"],
x["description"],
x["query"]) for x in json_file]
return render_template('file.html', results=pairs)
@app.route("/mongo")
def mongo():
query = request.args.get("query")
results = eval('mongodb.'+query)
results = json_util.dumps(results, sort_keys=True, indent=4)
if "find" in query:
return render_template('mongo.html', results=results)
else:
return "ok"
@app.route("/postgres")
def postgres():
query = request.args.get("query")
cursor = postgresdb.cursor()
cursor.execute(query)
results = [[a for a in result] for result in cursor]
print(results)
return render_template('postgres.html', results=results)
@app.route("/example")
def example():
return render_template('example.html')
if __name__ == "__main__":
app.run()
|
Python
| 0.000001
|
@@ -1211,16 +1211,26 @@
mplate('
+templates/
file.htm
@@ -1248,16 +1248,16 @@
=pairs)%0A
-
%0A%0A@app.r
@@ -1480,16 +1480,26 @@
mplate('
+templates/
mongo.ht
@@ -1793,16 +1793,26 @@
mplate('
+templates/
postgres
@@ -1872,24 +1872,24 @@
example():%0A
-
return r
@@ -1904,16 +1904,26 @@
mplate('
+templates/
example.
|
22571c096051fefc28b467ca29d93a4f0ea6cb9c
|
fix column pruning
|
mongoose_fdw/__init__.py
|
mongoose_fdw/__init__.py
|
###
### Author: David Wallin
### Time-stamp: <2015-03-02 08:56:11 dwa>
from multicorn import ForeignDataWrapper
from multicorn.utils import log_to_postgres as log2pg
from pymongo import MongoClient
class Mongoose_fdw (ForeignDataWrapper):
def __init__(self, options, columns):
super(Mongoose_fdw, self).__init__(options, columns)
self.host_name = options.get('host', 'localhost')
self.port_nr = int(options.get('port', '27017'))
self.user = options.get('user')
self.password = options.get('password')
self.db_name = options.get('db', 'test')
self.collection_name = options.get('collection', 'test')
self.c = MongoClient(host=self.host_name,
port=self.port_nr)
self.auth_db = options.get('auth_db', self.db_name)
self.c.userprofile.authenticate(self.user,
self.password,
source=self.auth_db)
self.db = getattr(self.c, self.db_name)
self.coll = getattr(self.db, self.collection_name)
def execute(self, quals, columns):
## TODO: build spec based on quals:
if quals:
log2pg('quals: {}'.format(quals))
log2pg('Quals are not implemented yet')
## Only request fields of interest:
fields = {k: True for k in columns.keys()}
if '_id' not in fields:
fields['_id'] = False
Q = {}
cur = self.coll.find(spec=Q, fields=fields, snapshot=True)
for doc in cur:
yield doc
## Local Variables: ***
## mode:python ***
## coding: utf-8 ***
## End: ***
|
Python
| 0.000001
|
@@ -1387,15 +1387,8 @@
umns
-.keys()
%7D%0A
|
441cfadb97879d9ac76407145ba77185bbb292f8
|
fix regex n test
|
mots_vides/stop_words.py
|
mots_vides/stop_words.py
|
"""
StopWord Python container, managing collection of stop words.
"""
import re
class StopWord(object):
"""
Object managing collection of stop words for a given language.
"""
def __init__(self, language, collection=[]):
"""
Initializes with a given language and an optional collection.
"""
self.language = language
self.collection = set(collection)
self.regex = None
def __add__(self, entry):
"""
Adds an entry or collection of entries to an instance.
"""
if isinstance(entry, str):
self.collection.add(entry)
else:
self.collection = self.collection.union(entry)
return self
def __sub__(self, entry):
"""
Substracts an entry or collection of entries to an instance.
"""
if isinstance(entry, str):
self.collection.remove(entry)
else:
self.collection = self.collection.difference(entry)
return self
def __len__(self):
"""
Returns the collection length.
"""
return self.collection.__len__()
def __contains__(self, entry):
"""
Checks if an entry is in collection.
"""
return self.collection.__contains__(entry)
def __iter__(self):
"""
Iterates over the collection.
"""
return self.collection.__iter__()
def _compile_regex(self, word):
self.regex = re.compile(r'((^| ){0}(| ))|({0} )|{0}'.format(word), flags=re.IGNORECASE)
return self.regex
def rebase(self, text):
for word in self.collection:
current_regex = self._compile_regex(word)
text = current_regex.sub('', text).strip()
return text
|
Python
| 0.99982
|
@@ -1507,27 +1507,72 @@
%5E%7C )
-%7B0%7D(%7C ))%7C(%7B0%7D )%7C%7B0%7D
+((?%3C!%5Cw)%7B0%7D(?!%5Cw))(%7C ))%7C(((?%3C!%5Cw)%7B0%7D(?!%5Cw)) )%7C((?%3C!%5Cw)%7B0%7D(?!%5Cw))
'.fo
|
8447c6e908e1e15f7a0e7e5eb8b114b81075390e
|
modify test case: delete report
|
NippoKun/report/tests/test_report.py
|
NippoKun/report/tests/test_report.py
|
from django.contrib.auth.models import User
from django.test import TestCase, Client, RequestFactory
from ..models import Report
# Create your tests here.
class CreateReportTest(TestCase):
def setUp(self):
self.client = Client()
self.client.post('/report/user_register/',
{'username': 'john', 'password1': 'johnpass', 'password2': 'johnpass'})
self.client.post('/report/login/', {'username': 'john', 'password': 'johnpass'})
"""
status_code = 302: created new report.
status_code = 200: not create new report.
"""
def test_create_report(self):
response = self.client.post('/report/report_entries/',
{'report_title': 'test title', 'report_content': 'test'})
self.assertEqual(response.status_code, 302)
def test_create_report_no_report_title(self):
response = self.client.post('/report/report_entries/',
{'report_title': '', 'report_content': 'test'})
self.assertEqual(response.status_code, 302)
def test_create_report_no_report_content(self):
response = self.client.post('/report/report_entries/',
{'report_title': 'test title', 'report_content': ''})
self.assertEqual(response.status_code, 302)
def test_create_report_no_report_info(self):
response = self.client.post('/report/report_entries/',
{'report_title': '', 'report_content': ''})
self.assertEqual(response.status_code, 302)
class DeleteReportTest(TestCase):
def setUp(self):
self.client = Client()
self.client.post('/report/user_register/',
{'username': 'john', 'password1': 'johnpass', 'password2': 'johnpass'})
self.client.post('/report/login/', {'username': 'john', 'password': 'johnpass'})
"""
status_code = 404: deleted report.
status_code = otherwise: not delete report.
"""
def test_delete_report(self):
report = self.client.post('/report/report_entries/',
{'report_title': 'test title', 'report_content': 'test'})
response = self.client.delete(report)
self.assertEqual(response.status_code, 404)
class UpdateReportContentTest(TestCase):
def setUp(self):
self.client = Client()
self.client.post('/report/user_register/',
{'username': 'john',
'password1': 'johnpass',
'password2': 'johnpass'})
self.client.login(username='john', password='johnpass')
request_factory = RequestFactory()
self.request = request_factory.get('/report/mypage/')
def test_update_report_content(self):
self.request.user = User.objects.get(pk=1)
self.client.post('/report/report_entries/',
{'report_author': self.request.user,
'report_title': 'test title',
'report_content': 'test'
})
self.request.report = Report.objects.get(pk=1)
report = {
'report_author': self.request.report.report_author,
'report_title': self.request.report.report_title,
'report_content': 'update content'
}
self.client.post('/report/1/edition/', report)
self.request.report = Report.objects.get(pk=1)
self.assertEqual(self.request.report.report_content, 'update content')
class UpdateReportTitleTest(TestCase):
def setUp(self):
self.client = Client()
self.client.post('/report/user_register/',
{'username': 'john',
'password1': 'johnpass',
'password2': 'johnpass'})
self.client.login(username='john', password='johnpass')
request_factory = RequestFactory()
self.request = request_factory.get('/report/mypage/')
def test_update_report_title(self):
self.request.user = User.objects.get(pk=1)
self.client.post('/report/report_entries/',
{'report_author': self.request.user,
'report_title': 'test title',
'report_content': 'test'
})
self.request.report = Report.objects.get(pk=1)
report = {
'report_author': self.request.report.report_author,
'report_title': 'update title',
'report_content': self.request.report.report_content
}
self.client.post('/report/1/edition/', report)
self.request.report = Report.objects.get(pk=1)
self.assertEqual(self.request.report.report_title, 'update title')
|
Python
| 0.000729
|
@@ -2224,96 +2224,180 @@
-response = self.client.delete(report)%0A self.assertEqual(response.status_code, 404
+before_count = Report.objects.count()%0A self.client.delete(report)%0A after_count = Report.objects.count()%0A self.assertEqual(before_count, after_count + 1
)%0A%0A%0A
|
9c7d335780e219893f0976cda6a5388b51fa0a64
|
Update to v19.2.6
|
mycroft/version/__init__.py
|
mycroft/version/__init__.py
|
# Copyright 2017 Mycroft AI Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
from genericpath import exists, isfile
from os.path import join, expanduser
from mycroft.configuration import Configuration
from mycroft.util.log import LOG
# The following lines are replaced during the release process.
# START_VERSION_BLOCK
CORE_VERSION_MAJOR = 19
CORE_VERSION_MINOR = 2
CORE_VERSION_BUILD = 5
# END_VERSION_BLOCK
CORE_VERSION_TUPLE = (CORE_VERSION_MAJOR,
CORE_VERSION_MINOR,
CORE_VERSION_BUILD)
CORE_VERSION_STR = '.'.join(map(str, CORE_VERSION_TUPLE))
class VersionManager:
@staticmethod
def get():
data_dir = expanduser(Configuration.get()['data_dir'])
version_file = join(data_dir, 'version.json')
if exists(version_file) and isfile(version_file):
try:
with open(version_file) as f:
return json.load(f)
except Exception:
LOG.error("Failed to load version from '%s'" % version_file)
return {"coreVersion": None, "enclosureVersion": None}
def check_version(version_string):
"""
Check if current version is equal or higher than the
version string provided to the function
Args:
version_string (string): version string ('Major.Minor.Build')
"""
version_tuple = tuple(map(int, version_string.split('.')))
return CORE_VERSION_TUPLE >= version_tuple
|
Python
| 0
|
@@ -903,9 +903,9 @@
D =
-5
+6
%0A# E
|
3a653ec11fe895f625485085fa89053febded331
|
Make implementation agree with documentation (both latex and docstring). Even if ignore_errors was true, an exception would occur if path didn't exist.
|
Lib/shutil.py
|
Lib/shutil.py
|
"""Utility functions for copying files and directory trees.
XXX The functions here don't copy the resource fork or other metadata on Mac.
"""
import os
import sys
import stat
import exceptions
__all__ = ["copyfileobj","copyfile","copymode","copystat","copy","copy2",
"copytree","move","rmtree","Error"]
class Error(exceptions.EnvironmentError):
pass
def copyfileobj(fsrc, fdst, length=16*1024):
"""copy data from file-like object fsrc to file-like object fdst"""
while 1:
buf = fsrc.read(length)
if not buf:
break
fdst.write(buf)
def copyfile(src, dst):
"""Copy data from src to dst"""
fsrc = None
fdst = None
# check for same pathname; all platforms
_src = os.path.normcase(os.path.abspath(src))
_dst = os.path.normcase(os.path.abspath(dst))
if _src == _dst:
return
try:
fsrc = open(src, 'rb')
fdst = open(dst, 'wb')
copyfileobj(fsrc, fdst)
finally:
if fdst:
fdst.close()
if fsrc:
fsrc.close()
def copymode(src, dst):
"""Copy mode bits from src to dst"""
if hasattr(os, 'chmod'):
st = os.stat(src)
mode = stat.S_IMODE(st.st_mode)
os.chmod(dst, mode)
def copystat(src, dst):
"""Copy all stat info (mode bits, atime and mtime) from src to dst"""
st = os.stat(src)
mode = stat.S_IMODE(st.st_mode)
if hasattr(os, 'utime'):
os.utime(dst, (st.st_atime, st.st_mtime))
if hasattr(os, 'chmod'):
os.chmod(dst, mode)
def copy(src, dst):
"""Copy data and mode bits ("cp src dst").
The destination may be a directory.
"""
if os.path.isdir(dst):
dst = os.path.join(dst, os.path.basename(src))
copyfile(src, dst)
copymode(src, dst)
def copy2(src, dst):
"""Copy data and all stat info ("cp -p src dst").
The destination may be a directory.
"""
if os.path.isdir(dst):
dst = os.path.join(dst, os.path.basename(src))
copyfile(src, dst)
copystat(src, dst)
def copytree(src, dst, symlinks=0):
"""Recursively copy a directory tree using copy2().
The destination directory must not already exist.
Error are reported to standard output.
If the optional symlinks flag is true, symbolic links in the
source tree result in symbolic links in the destination tree; if
it is false, the contents of the files pointed to by symbolic
links are copied.
XXX Consider this example code rather than the ultimate tool.
"""
names = os.listdir(src)
os.mkdir(dst)
errors = []
for name in names:
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
try:
if symlinks and os.path.islink(srcname):
linkto = os.readlink(srcname)
os.symlink(linkto, dstname)
elif os.path.isdir(srcname):
copytree(srcname, dstname, symlinks)
else:
copy2(srcname, dstname)
# XXX What about devices, sockets etc.?
except (IOError, os.error), why:
errors.append((srcname, dstname, why))
if errors:
raise Error, errors
def rmtree(path, ignore_errors=0, onerror=None):
"""Recursively delete a directory tree.
If ignore_errors is set, errors are ignored; otherwise, if
onerror is set, it is called to handle the error; otherwise, an
exception is raised.
"""
cmdtuples = []
_build_cmdtuple(path, cmdtuples)
for func, arg in cmdtuples:
try:
func(arg)
except OSError:
exc = sys.exc_info()
if ignore_errors:
pass
elif onerror is not None:
onerror(func, arg, exc)
else:
raise exc[0], (exc[1][0], exc[1][1] + ' removing '+arg)
# Helper for rmtree()
def _build_cmdtuple(path, cmdtuples):
for f in os.listdir(path):
real_f = os.path.join(path,f)
if os.path.isdir(real_f) and not os.path.islink(real_f):
_build_cmdtuple(real_f, cmdtuples)
else:
cmdtuples.append((os.remove, real_f))
cmdtuples.append((os.rmdir, path))
def move(src, dst):
"""Recursively move a file or directory to another location.
If the destination is on our current filesystem, then simply use
rename. Otherwise, copy src to the dst and then remove src.
A lot more could be done here... A look at a mv.c shows a lot of
the issues this implementation glosses over.
"""
try:
os.rename(src, dst)
except OSError:
if os.path.isdir(src):
copytree(src, dst, symlinks=1)
rmtree(src)
else:
copy2(src,dst)
os.unlink(src)
|
Python
| 0
|
@@ -3233,17 +3233,21 @@
_errors=
-0
+False
, onerro
@@ -3452,25 +3452,24 @@
is raised.%0A
-%0A
%22%22%22%0A
@@ -3479,24 +3479,52 @@
tuples = %5B%5D%0A
+ arg = path%0A try:%0A
_build_c
@@ -3548,24 +3548,28 @@
tuples)%0A
+
+
for func, ar
@@ -3584,29 +3584,16 @@
tuples:%0A
- try:%0A
@@ -3606,20 +3606,16 @@
nc(arg)%0A
-
exce
@@ -3634,20 +3634,16 @@
-
exc = sy
@@ -3663,20 +3663,16 @@
-
if ignor
@@ -3697,21 +3697,13 @@
-
-
pass%0A
-
@@ -3744,20 +3744,16 @@
-
-
onerror(
@@ -3772,38 +3772,30 @@
xc)%0A
-
else:%0A
-
|
0d77cb02dfec448c1de8def96c9b73856b602759
|
Update models.py
|
user_sessions/models.py
|
user_sessions/models.py
|
import django
from django.conf import settings
from django.contrib.sessions.models import SessionManager
from django.db import models
from django.utils.translation import ugettext_lazy as _
class Session(models.Model):
"""
Session objects containing user session information.
Django provides full support for anonymous sessions. The session
framework lets you store and retrieve arbitrary data on a
per-site-visitor basis. It stores data on the server side and
abstracts the sending and receiving of cookies. Cookies contain a
session ID -- not the data itself.
Additionally this session object providers the following properties:
``user``, ``user_agent`` and ``ip``.
"""
session_key = models.CharField(_('session key'), max_length=40,
primary_key=True)
session_data = models.TextField(_('session data'))
expire_date = models.DateTimeField(_('expiry date'), db_index=True)
objects = SessionManager()
class Meta:
verbose_name = _('session')
verbose_name_plural = _('sessions')
def get_decoded(self):
return SessionStore(None, None).decode(self.session_data)
user = models.ForeignKey(getattr(settings, 'AUTH_USER_MODEL', 'auth.User'),
null=True)
user_agent = models.CharField(max_length=200)
last_activity = models.DateTimeField(auto_now=True)
if django.VERSION[:2] >= (1, 6):
ip = models.GenericIPAddressField()
else:
ip = models.IPAddressField()
# At bottom to avoid circular import
from .backends.db import SessionStore
|
Python
| 0
|
@@ -1485,16 +1485,33 @@
ssField(
+verbose_name='IP'
)%0A el
@@ -1549,16 +1549,33 @@
ssField(
+verbose_name='IP'
)%0A%0A%0A# At
|
b5814202bdcc5a15503d6c52c59aa2eb8736b7ec
|
Add whitelist to redpill plugin
|
proxy/plugins/redpill.py
|
proxy/plugins/redpill.py
|
# redpill.py PSO2Proxy plugin
# For use with redpill.py flask webapp and website for packet logging and management
import sqlite
dbLocation = '/var/pso2-www/redpill/redpill.db'
#TODO
|
Python
| 0
|
@@ -121,16 +121,25 @@
t sqlite
+, plugins
%0A%0AdbLoca
@@ -184,10 +184,930 @@
db'%0A
-%0A#TODO
+enabled = False%0A%0Aif enabled:%0A%09@plugins.onStartHook%0A%09def redpillInit():%0A%09%09print(%22%5BRedpill%5D Redpill initilizing with database %25s.%22 %25 dbLocation)%0A%0A%09@plugins.packetHook(0x11, 0x0)%0A%09def loginPacketHook(context, packet):%0A%09%09username = packet%5B0x8:0x48%5D.decode('utf-8')%0A%09%09username = username.rstrip('%5C0')%0A%09%09if not user_exists(username):%0A%09%09%09context.loseConnection()%0A%09%09%09print(%22%5BRedpill%5D %25s is not in the whitelist database. Hanging up.%22 %25 username)%0A%0A%09@plugins.onConnectionHook%0A%09def registerClient(client):%0A%09%09pass%0A%0A%09@plugins.onConnectionLossHook%0A%09def archivePackets(client):%0A%09%09pass%0A%0A%09def getConn():%0A%09%09conn = sqlite3.connect(dbLocation)%0A%09%09conn.row_factory = sqlite3.Row%0A%09%09return conn%0A%0A%09def user_exists(username):%0A%09%09con = getConn()%0A%09%09with con:%0A%09%09%09cur = con.cursor()%0A%09%09%09cur.execute(%22select * from users where username = ? COLLATE NOCASE%22, (username, ))%0A%09%09%09check = cur.fetchone()%0A%09%09%09if check == None:%0A%09%09%09%09return False%0A%09%09%09else:%0A%09%09%09%09return True
|
2bd8c77e1b1282412787d88f347e99f361a4d65f
|
disable flow class by default
|
config/settings/local.py
|
config/settings/local.py
|
# -*- coding: utf-8 -*-
"""
Local settings
- Run in Debug mode
- Use console backend for emails
- Add Django Debug Toolbar
- Add django-extensions as app
"""
import socket
import os
from .common import * # noqa
# DEBUG
# ------------------------------------------------------------------------------
DEBUG = env.bool('DJANGO_DEBUG', default=True)
TEMPLATES[0]['OPTIONS']['debug'] = DEBUG
# SECRET CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Note: This key only used for development and testing.
SECRET_KEY = env('DJANGO_SECRET_KEY', default='!)_b4xaov6!0b^_=96*wh@p-9si4p0ho-@4&g7eija9gaxhmo!')
# Mail settings
# ------------------------------------------------------------------------------
EMAIL_PORT = 1025
EMAIL_HOST = 'localhost'
EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND',
default='django.core.mail.backends.console.EmailBackend')
# CACHING
# ------------------------------------------------------------------------------
REDIS_MAX_CONNECTIONS = env.int('REDIS_MAX_CONNECTIONS', default=1)
CACHES = {
'default': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': REDIS_URL,
'OPTIONS': {
'CONNECTION_POOL_CLASS': 'redis.BlockingConnectionPool',
'CONNECTION_POOL_KWARGS': {"max_connections": REDIS_MAX_CONNECTIONS, 'timeout': 20},
'CLIENT_CLASS': 'django_redis.client.DefaultClient',
'IGNORE_EXCEPTIONS': True, # mimics memcache behavior.
# http://niwinz.github.io/django-redis/latest/#_memcached_exceptions_behavior
}
}
}
# django-debug-toolbar
# ------------------------------------------------------------------------------
MIDDLEWARE += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
INSTALLED_APPS += ('debug_toolbar', )
INTERNAL_IPS = ['127.0.0.1', '10.0.2.2', ]
# tricks to have debug toolbar when developing with docker
if os.environ.get('USE_DOCKER') == 'yes':
ip = socket.gethostbyname(socket.gethostname())
INTERNAL_IPS += [ip[:-1] + "1"]
DEBUG_TOOLBAR_CONFIG = {
'DISABLE_PANELS': [
'debug_toolbar.panels.redirects.RedirectsPanel',
],
'SHOW_TEMPLATE_CONTEXT': True,
}
# django-extensions
# ------------------------------------------------------------------------------
INSTALLED_APPS += ('django_extensions', )
# TESTING
# ------------------------------------------------------------------------------
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
# Add the HireFire middleware for monitoring queue to scale dynos
# See: https://hirefire.readthedocs.io/
HIREFIRE_PROCS = ['config.procs.WorkerProc']
HIREFIRE_TOKEN = env('HIREFIRE_TOKEN', default="localtest")
# Site URL
SITE_URL = env('SITE_URL', default="http://localhost:8000")
# Github credentials
GITHUB_USERNAME = env('GITHUB_USERNAME')
GITHUB_PASSWORD = env('GITHUB_PASSWORD')
GITHUB_WEBHOOK_BASE_URL = env('GITHUB_WEBHOOK_BASE_URL')
GITHUB_WEBHOOK_SECRET = env('GITHUB_WEBHOOK_SECRET')
# Salesforce OAuth Connected App credentials
CONNECTED_APP_CLIENT_ID = env('CONNECTED_APP_CLIENT_ID')
CONNECTED_APP_CLIENT_SECRET = env('CONNECTED_APP_CLIENT_SECRET')
CONNECTED_APP_CALLBACK_URL = env('CONNECTED_APP_CALLBACK_URL')
# SFDX Credentials
SFDX_CLIENT_ID = env('SFDX_CLIENT_ID')
SFDX_HUB_KEY = env('SFDX_HUB_KEY')
SFDX_HUB_USERNAME = env('SFDX_HUB_USERNAME')
GITHUB_STATUS_UPDATES_ENABLED = env.bool('GITHUB_STATUS_UPDATES_ENABLED', False)
METACI_FLOW_SUBCLASS_ENABLED = env.bool('METACI_FLOW_SUBCLASS_ENABLED', True)
|
Python
| 0
|
@@ -3625,13 +3625,14 @@
ABLED',
-Tru
+Fals
e)
|
28f74edc5b2902ccb9026388db789807a5c2e1f1
|
Append layout and seat if in csv if exist in ticket.
|
congressus/invs/utils.py
|
congressus/invs/utils.py
|
from django.conf import settings
from django.http import HttpResponse
from .models import Invitation
from tickets.utils import concat_pdf
from tickets.utils import generate_pdf
def gen_csv_from_generator(ig, numbered=True, string=True):
csv = []
name = ig.type.name
for i, inv in enumerate(ig.invitations.all()):
line = '%s, %s' % (inv.order, name)
if numbered:
line = ('%d,' % (i + 1)) + line
csv.append(line)
if string:
return '\n'.join(csv)
return csv
def gen_csv_from_generators(igs):
csv = []
for ig in igs:
csv += gen_csv_from_generator(ig, numbered=False, string=False)
out = []
for i, line in enumerate(csv):
out.append(('%d ' % (i + 1)) + line)
return '\n'.join(out)
def gen_pdf(igs):
files = []
for inv in Invitation.objects.filter(generator__in=igs):
print(inv)
files.append(generate_pdf(inv, asbuf=True, inv=True))
return concat_pdf(files)
def get_ticket_format(invs, pf):
""" With a list of invitations or invitations,generate ticket output """
if pf == 'csv':
response = HttpResponse(content_type='application/csv')
response['Content-Disposition'] = 'filename="invs.csv"'
response.write(gen_csv_from_generators(invs))
elif pf == 'thermal':
pdf = gen_pdf(invs)
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'filename="tickets.pdf"'
response.write(pdf)
elif pf == 'A4':
pdf = gen_pdf(invs)
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename="tickets.pdf"'
response.write(pdf)
else:
raise "Ticket format not found"
return response
|
Python
| 0
|
@@ -430,16 +430,239 @@
+ line%0A
+ if inv.seat_layout and inv.seat:%0A row, col = inv.seat.split('-')%0A col = int(col) + inv.seat_layout.column_start_number - 1%0A line += ', %25s, %25s, %25s' %25 (inv.seat_layout.gate, row, col)%0A
|
d7284d82367a3f9b7a3db4de88d3c06e92542b23
|
fix bug in domain blacklist
|
muckrock/task/signals.py
|
muckrock/task/signals.py
|
"""Signals for the task application"""
from django.db.models.signals import post_save
from email.utils import parseaddr
import logging
from muckrock.task.models import OrphanTask, BlacklistDomain
logger = logging.getLogger(__name__)
def domain_blacklist(sender, instance, **kwargs):
"""Blacklist certain domains - automatically reject tasks from them"""
# pylint: disable=unused-argument
_, email = parseaddr(instance.communication.priv_from_who)
domain = email.split('@')[1]
logger.info('Checking domain %s against blacklist', domain)
if BlacklistDomain.objects.filter(domain=domain).exists():
instance.resolve()
post_save.connect(domain_blacklist, sender=OrphanTask,
dispatch_uid='muckrock.task.signals.domain_blacklist')
|
Python
| 0
|
@@ -457,16 +457,58 @@
om_who)%0A
+%0A if '@' not in email:%0A return%0A%0A
doma
|
009f1ec1580653dfc600c505622b95d153be231d
|
fix the id column
|
util/create_database.py
|
util/create_database.py
|
#!/usr/bin/env python
import os
import sys
import sqlite3
base_dir = os.path.dirname(os.path.realpath(os.path.join(__file__, '..')))
db_path = os.path.join(base_dir, 'db/lightspeed.db')
if len(sys.argv) == 2:
db_path = os.path.realpath(sys.argv[1])
try:
conn = sqlite3.connect(db_path)
c = conn.cursor();
c.execute('''
CREATE TABLE IF NOT EXISTS measurements (
id UNSIGNED INT AUTO_INCREMENT,
pingMs UNSIGNED DECIMAL(10, 3),
downloadMbit DECIMAL(5, 2),
uploadMbit DECIMAL(5, 2),
timestamp DATETIME,
durationSecs UNSIGNED INT,
isError INT DEFAULT 0,
PRIMARY KEY (id)
);
''')
conn.commit()
print('Database created in', db_path)
except sqlite3.Error as e:
print('Error:', e.args[0])
finally:
if conn:
conn.close()
|
Python
| 0.999679
|
@@ -393,35 +393,27 @@
id
-UNSIGNED INT AUTO_INCREMENT
+INTEGER PRIMARY KEY
,%0A
@@ -581,16 +581,20 @@
GNED INT
+EGER
,%0A
@@ -606,16 +606,20 @@
rror INT
+EGER
DEFAULT
@@ -624,34 +624,8 @@
LT 0
-,%0A PRIMARY KEY (id)
%0A
|
3f94d2420e047bddd2223fdb0d865f8b42ab8f10
|
add ip to json response
|
freegeoip/views.py
|
freegeoip/views.py
|
# coding: utf-8
import os
import functools
import socket
import struct
import cyclone.escape
import cyclone.locale
import cyclone.web
from twisted.internet import defer, threads
from twisted.names.client import getHostByName
from twisted.python import log
from freegeoip.utils import BaseHandler
from freegeoip.utils import DatabaseMixin
def _ip2uint32(address):
return struct.unpack("!I", socket.inet_aton(address))[0]
def _gethostbyname(hostname):
#return getHostByName(hostname)
return threads.deferToThread(socket.gethostbyname, hostname)
def checkQuota(method):
@defer.inlineCallbacks
@functools.wraps(method)
def wrapper(self, *args, **kwargs):
key = "ip:%s" % self.request.remote_ip
try:
n = yield self.redis.get(key)
except Exception, e:
log.msg("Redis failed to get('%s'): %s" % (key, str(e)))
raise cyclone.web.HTTPError(503)
if n is None:
try:
yield self.redis.set(key, 1)
except Exception, e:
log.msg("Redis failed to set('%s', 1): %s" % (key, str(e)))
raise cyclone.web.HTTPError(503)
try:
yield self.redis.expire(key, self.settings.expire)
except Exception, e:
log.msg("Redis failed to expire('%s', %d): %s" % \
(key, self.settings.expire, str(e)))
raise cyclone.web.HTTPError(503)
elif n <= self.settings.max_requests:
try:
yield self.redis.incr(key)
except Exception, e:
log.msg("Redis failed to incr('%s'): %s" % (key, str(e)))
raise cyclone.web.HTTPError(503)
else:
# Over quota, take this.
raise cyclone.web.HTTPError(403) # Forbidden
yield defer.maybeDeferred(method, self, *args, **kwargs)
defer.returnValue(None)
return wrapper
class IndexHandler(BaseHandler):
def get(self):
self.render("index.html")
class SearchIpHandler(BaseHandler, DatabaseMixin):
@checkQuota
@defer.inlineCallbacks
def get(self, fmt, address):
address = address or self.request.remote_ip
if len(address) > 256:
raise cyclone.web.HTTPError(400)
try:
ip = _ip2uint32(address)
except:
try:
address = yield _gethostbyname(address)
ip = _ip2uint32(address)
except:
raise cyclone.web.HTTPError(400)
rs = self.sqlite.runQuery("""
SELECT data FROM ip_group_city
WHERE ip_start < ?
ORDER BY ip_start DESC LIMIT 1""", (ip,))
if rs:
json_data = rs[0][0]
else:
raise cyclone.web.HTTPError(404)
if fmt in ("csv", "xml"):
rs = cyclone.escape.json_decode(json_data)
rs["ip"] = address
self.set_header("Content-Type", "text/%s" % fmt)
self.render("geoip.%s" % fmt, data=rs)
else:
callback = self.get_argument("callback", None)
if callback:
self.set_header("Content-Type", "text/javascript")
self.finish("%s(%s);" % (callback, json_data))
else:
self.finish(json_data)
class SearchTzHandler(BaseHandler, DatabaseMixin):
@checkQuota
def get(self, fmt, country_code, region_code):
try:
rs = self.sqlite.runQuery("""
SELECT tzd.gmtoff, tzd.isdst, tz.name
FROM timezones_data tzd
JOIN timezones tz ON tz.id = tzd.timezone
WHERE tzd.timezone = (
SELECT timezone
FROM fips_regions
WHERE country_code = ?
AND region_code = ? )
AND tzd.start < strftime('%s')
ORDER BY tzd.start DESC LIMIT 1
""", (country_code, region_code or "00"))
if rs:
rs = {"gmtoff":rs[0][0], "isdst":rs[0][1], "timezone":rs[0][2]}
except Exception, e:
log.err()
raise cyclone.web.HTTPError(503)
if not rs:
raise cyclone.web.HTTPError(404)
if fmt in ("csv", "xml"):
self.set_header("Content-Type", "text/%s" % fmt)
self.render("timezone.%s" % fmt, data=rs)
else:
callback = self.get_argument("callback", None)
json_data = cyclone.escape.json_encode(rs)
if callback:
self.finish("%s(%s);" % (callback, json_data))
else:
self.finish(json_data)
|
Python
| 0.000001
|
@@ -2820,46 +2820,8 @@
4)%0A%0A
- if fmt in (%22csv%22, %22xml%22):%0A
@@ -2875,20 +2875,16 @@
-
-
rs%5B%22ip%22%5D
@@ -2893,16 +2893,51 @@
address
+%0A%0A if fmt in (%22csv%22, %22xml%22):
%0A
@@ -3033,32 +3033,32 @@
%25 fmt, data=rs)%0A
-
else:%0A
@@ -3047,32 +3047,87 @@
)%0A else:%0A
+ json_data = cyclone.escape.json_encode(rs)%0A
call
|
49a6541d22e3f6bc0c7d81689127c2efa5e29ec8
|
Use dvipng to format sympy.Matrix to PNG display
|
IPython/extensions/sympyprinting.py
|
IPython/extensions/sympyprinting.py
|
"""
A print function that pretty prints sympy Basic objects.
:moduleauthor: Brian Granger
Usage
=====
Once the extension is loaded, Sympy Basic objects are automatically
pretty-printed.
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.lib.latextools import latex_to_png
from IPython.testing import decorators as dec
# use @dec.skipif_not_sympy to skip tests requiring sympy
try:
from sympy import pretty, latex
except ImportError:
pass
#-----------------------------------------------------------------------------
# Definitions of special display functions for use with IPython
#-----------------------------------------------------------------------------
def print_basic_unicode(o, p, cycle):
"""A function to pretty print sympy Basic objects."""
if cycle:
return p.text('Basic(...)')
out = pretty(o, use_unicode=True)
if '\n' in out:
p.text(u'\n')
p.text(out)
def print_png(o):
"""A function to display sympy expression using LaTex -> PNG."""
s = latex(o, mode='inline')
# mathtext does not understand certain latex flags, so we try to replace
# them with suitable subs.
s = s.replace('\\operatorname','')
s = s.replace('\\overline', '\\bar')
png = latex_to_png(s)
return png
def can_print_latex(o):
"""
Return True if type o can be printed with LaTeX.
If o is a container type, this is True if and only if every element of o
can be printed with LaTeX.
"""
import sympy
if isinstance(o, (list, tuple, set, frozenset)):
return all(can_print_latex(i) for i in o)
elif isinstance(o, dict):
return all((isinstance(i, basestring) or can_print_latex(i)) and can_print_latex(o[i]) for i in o)
elif isinstance(o,(sympy.Basic, sympy.matrices.Matrix, int, long, float)):
return True
return False
def print_latex(o):
"""A function to generate the latex representation of sympy
expressions."""
if can_print_latex(o):
s = latex(o, mode='plain')
s = s.replace('\\dag','\\dagger')
s = s.strip('$')
return '$$%s$$' % s
# Fallback to the string printer
return None
_loaded = False
def load_ipython_extension(ip):
"""Load the extension in IPython."""
import sympy
global _loaded
if not _loaded:
plaintext_formatter = ip.display_formatter.formatters['text/plain']
for cls in (object, str):
plaintext_formatter.for_type(cls, print_basic_unicode)
printable_containers = [list, tuple]
# set and frozen set were broken with SymPy's latex() function, but
# was fixed in the 0.7.1-git development version. See
# http://code.google.com/p/sympy/issues/detail?id=3062.
if sympy.__version__ > '0.7.1':
printable_containers += [set, frozenset]
else:
plaintext_formatter.for_type(cls, print_basic_unicode)
plaintext_formatter.for_type_by_name(
'sympy.core.basic', 'Basic', print_basic_unicode
)
plaintext_formatter.for_type_by_name(
'sympy.matrices.matrices', 'Matrix', print_basic_unicode
)
png_formatter = ip.display_formatter.formatters['image/png']
png_formatter.for_type_by_name(
'sympy.core.basic', 'Basic', print_png
)
for cls in [dict, int, long, float] + printable_containers:
png_formatter.for_type(cls, print_png)
latex_formatter = ip.display_formatter.formatters['text/latex']
latex_formatter.for_type_by_name(
'sympy.core.basic', 'Basic', print_latex
)
latex_formatter.for_type_by_name(
'sympy.matrices.matrices', 'Matrix', print_latex
)
for cls in printable_containers:
# Use LaTeX only if every element is printable by latex
latex_formatter.for_type(cls, print_latex)
_loaded = True
|
Python
| 0
|
@@ -1748,16 +1748,236 @@
rn png%0A%0A
+%0Adef print_display_png(o):%0A %22%22%22A function to display sympy expression using LaTex -%3E PNG.%22%22%22%0A s = latex(o, mode='plain')%0A s = s.strip('$')%0A png = latex_to_png('$$%25s$$' %25 s, backend='dvipng')%0A return png%0A%0A%0A
def can_
@@ -4013,32 +4013,149 @@
t_png%0A )%0A
+ png_formatter.for_type_by_name(%0A 'sympy.matrices.matrices', 'Matrix', print_display_png%0A )%0A
for cls
|
30c2463ea91a6ae5c43e3c31d8efae093e9708c3
|
fix attempt
|
viaduct/models/group.py
|
viaduct/models/group.py
|
#!/usr/bin/python
from viaduct import db
from viaduct.models.permission import GroupPermission
user_group = db.Table('user_group',
db.Column('user_id', db.Integer, db.ForeignKey('user.id')),
db.Column('group_id', db.Integer, db.ForeignKey('group.id'))
)
class Group(db.Model):
__tablename__ = 'group'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(256), unique=True)
users = db.relationship('User', secondary=user_group,
backref=db.backref('groups', lazy='dynamic'), lazy='dynamic')
def __init__(self, name):
self.name = name
def has_user(self, user):
return self.users.filter(user_group.c.user_id==user.id).count() > 0
def add_user(self, user):
if not self.has_user(user):
self.users.append(user)
return self
def delete_user(self, user):
if self.has_user(user):
self.users.remove(user)
def get_users(self):
# FIXME: backwards compatibility.
return self.users
#def get_permission(self, name):
# permission = self.permissions.join(Permission).filter(Permission.name==name).order_by(GroupPermission.allowed.desc()).first()
# if not permission:
# return 0
# if permission.allowed:
# return 1
# else:
# return -1
#def has_permission(self, name):
# permission = self.permissions.join(Permission).filter(Permission.name==name).order_by(GroupPermission.allowed.desc()).first()
# if permission:
# return permission.allowed
# return False
#
#def add_permission(self, name, allowed=True):
# self.delete_permission(name)
# permission = Permission.query.filter(Permission.name==name).first()
# db.session.add(GroupPermission(self, permission, allowed))
# db.session.commit()
#def delete_permission(self, name):
# for permission in self.permissions.join(Permission).filter(Permission.name==name).all():
# db.session.delete(permission)
# db.session.commit()
|
Python
| 0.000013
|
@@ -590,16 +590,57 @@
user):%0A
+%09%09if not user:%0A%09%09%09return False;%0A%09%09else:%0A%09
%09%09return
|
f164d08d5364ba6db333b1be6ce8e2f148f976ec
|
Basic 'hello' command
|
__init__.py
|
__init__.py
|
from __future__ import unicode_literals
from slackclient import SlackClient
import time
import secrets
sc = SlackClient(secrets.SLACK_API_KEY)
channels = sc.api_call('channels.list', exclude_archived=1)
print(channels)
if sc.rtm_connect():
while True:
events = sc.rtm_read()
for event in events:
print(time.strftime("%Y-%m-%d %H:%M:%S ", time.gmtime()) + str(event))
|
Python
| 0.99994
|
@@ -203,44 +203,1188 @@
)%0Apr
-int(channels)%0A%0Aif sc.rtm_connect():%0A
+efix = '!'%0A%0A%0Adef parse_command(event: dict):%0A # Validate event%0A if 'type' not in event:%0A return%0A if event%5B'type'%5D != 'message':%0A return%0A if 'text' not in event:%0A return%0A%0A # Get the message%0A message = event%5B'text'%5D.strip()%0A # Check if the message is for us%0A if not message.startswith(prefix):%0A return%0A%0A # Perform the appropriate action%0A command = message%5Blen(prefix):%5D.strip()%0A action = command.split()%5B0%5D%0A if action == 'help':%0A sc.api_call('chat.postMessage',%0A channel=event%5B'channel'%5D,%0A text='Right now, I support the following commands:%5Cn%60!help%60%5Cn%60!hello%60%5Cn'%0A + 'This help is also literally just a string right now.%5Cn'%0A + 'A more robust architecture would be nice.')%0A if action == 'hello':%0A greeting = 'Hey! :partyparrot:'%0A if 'user' in event:%0A user = event%5B'user'%5D%0A greeting = 'Hey %3C@%7B%7D%3E! :partyparrot:'.format(user)%0A sc.api_call('chat.postMessage',%0A channel=event%5B'channel'%5D,%0A text=greeting)%0A%0A%0Adef main():%0A if sc.rtm_connect():%0A
@@ -1403,16 +1403,20 @@
+
+
events =
@@ -1438,16 +1438,20 @@
+
for even
@@ -1463,16 +1463,57 @@
events:%0A
+ parse_command(event)%0A
@@ -1588,8 +1588,48 @@
event))%0A
+%0A%0Aif __name__ == '__main__':%0A main()%0A
|
1e0c72560537d35f46f532b7ac3e48f52bb5ae31
|
Add logger to __init__.py
|
__init__.py
|
__init__.py
|
from maya import cmds
import json
import imp
import os
def getClassList():
"""
Args:
param (logger): logger
Return:
list: list of classes
"""
moduleDirName = "rush"
mayaScriptDir = cmds.internalVar(userScriptDir=True)
moduleRoot = os.path.join(mayaScriptDir, moduleDirName)
moduleList = []
for root, dirs, files in os.walk(moduleRoot):
for f in files:
fullPath = os.path.join(root, f)
if (fullPath.endswith(".py") and not
fullPath.endswith("__init__.py")):
moduleList.append(fullPath)
moduleObjectList = []
for path in moduleList:
# Create module names for import, for exapmle ...
#
# "rush/template"
# "animation/animate"
# "common/create"
# "common/display"
name = os.path.splitext(path)[0].split("/")
name = "/".join(name[-2:])
try:
mod = imp.load_source(name, path)
moduleObjectList.append(mod)
except ImportError:
pass
commandClassList = [i.Commands for i in moduleObjectList]
cmdsDict = {}
for c in commandClassList:
cmdsDict.update(c.commandDict)
saveCommands(cmdsDict)
return commandClassList
def saveCommands(cmdsDict):
""" Save all commands as a json file in the maya user directory
Args:
cmdsDict (dict): All commands
Return:
None
"""
outPath = os.path.normpath(os.path.join(mayaScriptDir, "rushCmds.json"))
with open(outPath, 'w') as outFile:
json.dump(
cmdsDict,
outFile,
indent=4,
separators=(',', ':'),
sort_keys=True)
class RushCommands():
pass
# Re-difine RushCommands class to inherit all comamnd classes for the list
cl = tuple(getClassList())
RushCommands = type('RushCommands', cl, dict(RushCommands.__dict__))
|
Python
| 0.001012
|
@@ -16,16 +16,32 @@
t cmds%0D%0A
+import logging%0D%0A
import j
@@ -66,24 +66,226 @@
mport os%0D%0A%0D%0A
+# level = logging.DEBUG%0D%0Alevel = logging.ERROR%0D%0A%0D%0Alogger = logging.getLogger(__name__)%0D%0Ahandler = logging.StreamHandler()%0D%0Alogger.addHandler(handler)%0D%0Alogger.setLevel(level)%0D%0Ahandler.setLevel(level)%0D%0A%0D%0A
%0D%0Adef getCla
@@ -556,16 +556,67 @@
irName)%0D
+%0A logger.debug(%22Module path: %25s %22 %25 moduleRoot)%0D
%0A%0D%0A m
@@ -855,16 +855,74 @@
t__.py%22)
+ and not%0D%0A fullPath.endswith(%22rush.py%22)
):%0D%0A
@@ -1419,20 +1419,8 @@
cept
- ImportError
:%0D%0A
@@ -1426,28 +1426,73 @@
-pass
+logger.debug(%22Failed to load module : %25s%22 %25 path)
%0D%0A%0D%0A comm
@@ -1545,16 +1545,85 @@
ctList%5D%0D
+%0A logger.debug(%22All command classes: %25s%22 %25 str(commandClassList))%0D
%0A%0D%0A c
@@ -1706,24 +1706,102 @@
andDict)%0D%0A%0D%0A
+ outPath = os.path.normpath(os.path.join(mayaScriptDir, %22rushCmds.json%22))%0D%0A
saveComm
@@ -1797,32 +1797,41 @@
saveCommands(
+outPath,
cmdsDict)%0D%0A%0D%0A
@@ -1877,16 +1877,22 @@
ommands(
+path,
cmdsDict
@@ -1964,20 +1964,16 @@
ectory%0D%0A
-
%0D%0A Ar
@@ -1969,32 +1969,65 @@
y%0D%0A%0D%0A Args:%0D%0A
+ path (str): output path%0D%0A
cmdsDict
@@ -2049,20 +2049,16 @@
mmands%0D%0A
-
%0D%0A Re
@@ -2078,20 +2078,16 @@
None%0D%0A
-
%0D%0A %22%22
@@ -2093,88 +2093,80 @@
%22%22%0D%0A
+%0D%0A
-outPath = os.path.normpath(os.path.join(mayaScriptDir, %22rushCmds.json%22))%0D%0A%0D%0A
+logger.debug(%22Saving command file to %25s%22 %25 path)%0D%0A%0D%0A try:%0D%0A
@@ -2175,20 +2175,17 @@
th open(
-outP
+p
ath, 'w'
@@ -2199,16 +2199,20 @@
tFile:%0D%0A
+
@@ -2227,32 +2227,36 @@
p(%0D%0A
+
cmdsDict,%0D%0A
@@ -2262,16 +2262,20 @@
+
+
outFile,
@@ -2276,16 +2276,20 @@
tFile,%0D%0A
+
@@ -2315,16 +2315,20 @@
+
+
separato
@@ -2343,16 +2343,20 @@
':'),%0D%0A
+
@@ -2376,16 +2376,90 @@
=True)%0D%0A
+ except IOError:%0D%0A logger.debug(%22Failed to save command file%22)%0D%0A
%0D%0A%0D%0Aclas
@@ -2473,16 +2473,22 @@
ommands(
+object
):%0D%0A
|
980ea4be2fd6d05aa9ec64bfaa50d89161185ccd
|
rework httplib2.Http to be able to not verify certs if configuration tells the app not to verify them
|
pubs_ui/metrics/views.py
|
pubs_ui/metrics/views.py
|
from flask import Blueprint, render_template
from flask_login import login_required
from httplib2 import Http
from oauth2client.service_account import ServiceAccountCredentials
from .. import app
metrics = Blueprint('metrics', __name__,
template_folder='templates',
static_folder='static',
static_url_path='/metrics/static')
def get_access_token():
# verification_cert could be a boolean or a string representing a path to a certificate bundle
verification_cert = app.config.get('VERIFY_CERT')
keyfile_path = app.config.get('GA_KEY_FILE_PATH')
ga_auth_scope = app.config.get('GA_OAUTH2_SCOPE')
# if verification_cert is a str that means it's a cert bundle, use that in an Http object
http = Http(ca_certs=verification_cert) if isinstance(verification_cert, str) else None
credentials = ServiceAccountCredentials.from_json_keyfile_name(keyfile_path, ga_auth_scope)
access_token = credentials.get_access_token(http=http).access_token
return access_token
@metrics.context_processor
def add_ga_access_token():
return {
'ga_access_token': get_access_token(),
}
@metrics.route('/publications/acquisitions/')
@login_required
def publications_aquisitions():
return render_template('metrics/publications_aquisitions.html')
@metrics.route('/publications/')
@login_required
def publications():
return render_template('metrics/publications.html')
|
Python
| 0
|
@@ -762,16 +762,63 @@
object%0A
+ if isinstance(verification_cert, str):%0A
http
@@ -852,17 +852,23 @@
on_cert)
-
+%0A el
if isins
@@ -896,17 +896,235 @@
rt,
-str) else
+bool):%0A # if VERIFY_CERT is False, that means that disable_ssl_certificate_validation should be True and vice versa%0A http = Http(disable_ssl_certificate_validation=(not verification_cert))%0A else:%0A http =
Non
|
290f864f1bb44300cec9bb9e28679c3d7ba70c7e
|
Test 1 done
|
cookbook/seismic_conv.py
|
cookbook/seismic_conv.py
|
"""
Synthetic convolutional seismogram for a simple two layer velocity model
"""
import numpy as np
from fatiando.seismic import conv
from fatiando.vis import mpl
#model parameters
n_samples, n_traces = [600, 20]
rock_grid = 1500.*np.ones((n_samples, n_traces))
rock_grid[300:,:] = 2500.
#synthetic calculation
[vel_l, rho_l] = conv.depth_2_time(n_samples, n_traces, rock_grid, dt=2.e-3)
synt = conv.seismic_convolutional_model(n_traces, vel_l, 30., conv.rickerwave)
# plot input model
mpl.figure()
mpl.subplot(3,1,1)
mpl.ylabel('Depth (m)')
mpl.title("Depth Vp model", fontsize=13, family='sans-serif', weight='bold')
mpl.imshow(rock_grid, extent=[0,n_traces, n_samples, 0],cmap=mpl.pyplot.cm.bwr,
aspect='auto', origin='upper')
# plot resulted seismogram using wiggle
mpl.subplot(3, 1, 2)
mpl.seismic_wiggle(synt, dt = 2.e-3)
mpl.seismic_image(synt, dt = 2.e-3, cmap=mpl.pyplot.cm.jet, aspect='auto')
mpl.ylabel('time (seconds)')
mpl.title("Convolutional seismogram", fontsize=13, family='sans-serif',
weight='bold')
# plot resulted seismogram using wiggle over Vp model
mpl.subplot(3, 1, 3)
mpl.seismic_image(vel_l, dt= 2.e-3, cmap=mpl.pyplot.cm.jet, aspect='auto')
mpl.seismic_wiggle(synt, dt = 2.e-3)
mpl.ylabel('time (seconds)')
mpl.title("Convolutional seismogram over Vp model", fontsize=13, family='sans-serif',
weight='bold')
mpl.show()
|
Python
| 0.000001
|
@@ -270,16 +270,17 @@
id%5B300:,
+
:%5D = 250
@@ -507,18 +507,20 @@
bplot(3,
+
1,
+
1)%0Ampl.y
@@ -647,16 +647,17 @@
tent=%5B0,
+
n_traces
@@ -672,16 +672,29 @@
les, 0%5D,
+ %0A
cmap=mpl
@@ -708,27 +708,16 @@
.cm.bwr,
-%0A
aspect=
@@ -820,35 +820,33 @@
_wiggle(synt, dt
- =
+=
2.e-3)%0Ampl.seism
@@ -858,27 +858,25 @@
age(synt, dt
- =
+=
2.e-3, cmap=
@@ -1139,17 +1139,16 @@
l_l, dt=
-
2.e-3, c
@@ -1212,19 +1212,17 @@
synt, dt
- =
+=
2.e-3)%0Am
@@ -1306,32 +1306,43 @@
%22, fontsize=13,
+%0A
family='sans-ser
@@ -1341,34 +1341,24 @@
sans-serif',
-%0A
weight='bol
|
b4623bcdcd0a35091030057edc52870045a17223
|
fix for Anaconda compatibility
|
__init__.py
|
__init__.py
|
'''
Import all subdirectories and modules.
'''
import os as _os
__all__ = []
for _path in _os.listdir(_os.path.dirname(__file__)):
_full_path = _os.path.join(_os.path.dirname(__file__), _path)
if _os.path.isdir(_full_path) and _path not in ['.git', 'examples']:
__import__(_path, locals(), globals())
__all__.append(_path)
elif _path[-3:] == '.py' and _path not in ['__init__.py', 'gui.py']:
__import__(_path[:-3], locals(), globals())
__all__.append(_path[:-3])
|
Python
| 0
|
@@ -262,16 +262,27 @@
xamples'
+, 'widgets'
%5D:%0A
|
2ce8efa3bf227c9a769121a4d313963f0cfbde51
|
print sys args
|
add_data.py
|
add_data.py
|
import psycopg2
import sys
from connect import connect_to_db
# add argparse for options via command line
# add new temperature and date
def add_temp(date, temp):
print date, temp
# conn = connect_to_db()
|
Python
| 0.999189
|
@@ -136,80 +136,62 @@
ate%0A
-def add_temp(date, temp):%0A print date, temp%0A # conn = connect_to_db()
+conn = connect_to_db()%0Acur = conn.cursor()%0Aprint sys.argv
%0A
|
85da4c8cb3d613882eb46fb398e361286d4b4286
|
fix add_page
|
add_page.py
|
add_page.py
|
from widgy.models import *
page = ContentPage.objects.create(
title='widgy page'
)
page.root_widget = TwoColumnLayout.add_root().node
page.save()
for i in range(3):
page.root_widget.data.left_bucket.add_child(TextContent,
content='yay %s' % i
)
for i in range(2):
page.root_widget.data.right_bucket.add_child(TextContent,
content='yay right bucket %s' % i
)
|
Python
| 0.000001
|
@@ -207,32 +207,37 @@
ata.left_bucket.
+data.
add_child(TextCo
@@ -349,16 +349,21 @@
_bucket.
+data.
add_chil
|
8034a3f237fad994444cbc7edfffb658ef00f908
|
Test commit
|
__init__.py
|
__init__.py
|
Python
| 0
|
@@ -1,2 +1,8 @@
+# test
%0A%0A
|
|
f8e0ca3aac5530e0d1d93a1db79cbc17bde3ee89
|
Support saving to a file
|
__main__.py
|
__main__.py
|
import sys
from PyQt5.QtCore import QPoint, QRect, Qt
from PyQt5.QtGui import (
QColor,
QImage,
QPainter,
)
from PyQt5.QtWidgets import (
QApplication,
QDesktopWidget,
QWidget,
)
APPLICATION_TITLE = 'Wiggle'
APPLICATION_VERSION = '0.1'
class Image(object):
WIDTH = 256
HEIGHT = 256
def __init__(self):
self.layers = []
self.layers.append(
QImage(
self.WIDTH,
self.HEIGHT,
QImage.Format_ARGB32
)
)
self.current_layer = 0
red = QColor(255, 0, 0)
transparent = QColor(0, 0, 0, 0)
self.brush = QImage(3, 3, QImage.Format_ARGB32)
self.brush.setPixelColor(0, 0, transparent)
self.brush.setPixelColor(0, 1, red)
self.brush.setPixelColor(0, 2, transparent)
self.brush.setPixelColor(1, 0, red)
self.brush.setPixelColor(1, 1, red)
self.brush.setPixelColor(1, 2, red)
self.brush.setPixelColor(2, 0, transparent)
self.brush.setPixelColor(2, 1, red)
self.brush.setPixelColor(2, 2, transparent)
def width(self):
return self.WIDTH
def height(self):
return self.HEIGHT
def draw_with_brush(self, pos):
painter = QPainter()
painter.begin(self.layers[self.current_layer])
painter.drawImage(
pos,
self.brush,
QRect(0, 0, self.brush.width(), self.brush.height())
)
painter.end()
def composited(self):
target = QImage(self.width(), self.height(), QImage.Format_ARGB32)
painter = QPainter()
painter.begin(target)
for layer in self.layers:
painter.drawImage(
QPoint(0, 0),
layer,
QRect(0, 0, layer.width(), layer.height())
)
painter.end()
return target
class Canvas(QWidget):
ZOOM = 4
def __init__(self):
super().__init__()
self.image = Image()
# Resize and center on the screen
self.resize(
self.image.width() * self.ZOOM,
self.image.height() * self.ZOOM
)
geom = self.frameGeometry()
geom.moveCenter(QDesktopWidget().availableGeometry().center())
self.move(geom.topLeft())
self.setWindowTitle(
'{} v{}'.format(APPLICATION_TITLE, APPLICATION_VERSION))
self.show()
def mousePressEvent(self, event):
self.image.draw_with_brush(event.pos() / self.ZOOM)
self.update()
def mouseMoveEvent(self, event):
is_left_button_pressed = \
event.buttons() & Qt.LeftButton == Qt.LeftButton
if not is_left_button_pressed:
return
self.image.draw_with_brush(event.pos() / self.ZOOM)
self.update()
def paintEvent(self, event):
composited = \
self.image.composited().scaled(
self.image.width() * self.ZOOM,
self.image.height() * self.ZOOM,
Qt.IgnoreAspectRatio,
Qt.FastTransformation
)
painter = QPainter()
painter.begin(self)
painter.drawImage(
QPoint(0, 0),
composited,
QRect(0, 0, composited.width(), composited.height())
)
painter.end()
if __name__ == '__main__':
app = QApplication(sys.argv)
canvas = Canvas()
sys.exit(app.exec_())
|
Python
| 0
|
@@ -1900,16 +1900,101 @@
target%0A%0A
+ def save(self, filename):%0A self.composited().save(filename, format=None)%0A%0A
%0Aclass C
@@ -3444,16 +3444,289 @@
.end()%0A%0A
+ def keyPressEvent(self, event):%0A if event.key() == Qt.Key_S and %5C%0A bool(event.modifiers() and Qt.ControlModifier):%0A filename = 'image.png' # TODO%0A self.image.save(filename)%0A print('saved to %7B%7D'.format(filename))%0A%0A
%0Aif __na
|
9cd440760ea789cf712491080e61205d03a027c8
|
Support verbose and bleeding config from file
|
__main__.py
|
__main__.py
|
import json, os.path
import discord
from discord.ext import commands
from Fun import Fun
def main():
# variables
config_file = 'config.json'
# load config
with open(config_file) as f:
config = json.load(f)
# split config
description, token = config['description'], config['token']
# define bot
bot = Bot(description=description)
bot.add_cog(Fun(bot))
# launch bot
bot.run(token)
class Bot(commands.Bot):
def __init__(self, *args, **kwargs):
# Rewrite the command_prefix flag to force mention
super().__init__(*args, command_prefix=commands.when_mentioned, **kwargs)
self.admins = []
self.verbose = False
self.bleeding = False
def log(self, txt):
if self.verbose:
print(txt)
async def on_ready(self):
self.log('Logged as {}#{}'.format(self.user.name, self.user.id))
self.log('My boty is ready')
async def on_member_join(self, member):
if self.bleeding:
self.log('Initiating verification procedure for user "{}".'.format(member.name))
await self.verify(member)
async def verify(self, member):
msg = 'Please send your EPITECH mail adress\n'
msg += 'i.e.: ```yournam_e@epitech.eu```\n'
msg += 'It has to be an EPITECH adress, any other adress will not be accepted'
await self.send_message(member, msg)
def is_epitech(self, txt):
if txt[-11:] != '@epitech.eu':
return False
# TODO : mail username (check there are no @)
return True
if __name__ == '__main__':
main()
|
Python
| 0
|
@@ -310,16 +310,79 @@
'token'%5D
+%0A verbose, token = config%5B'verbose'%5D, config%5B'bleeding'%5D
%0A%0A #
@@ -429,16 +429,52 @@
cription
+, verbose=verbose, bleeding=bleeding
)%0A bo
@@ -728,16 +728,46 @@
ntioned,
+ verbose=False, bleeding=False
**kwarg
@@ -820,19 +820,21 @@
bose =
-Fal
+verbo
se%0A
@@ -852,21 +852,24 @@
eding =
-False
+bleeding
%0A%0A de
|
b434636d7b9109eea1064b08f672f43b39083bbf
|
Use pathlib for everything
|
__main__.py
|
__main__.py
|
#!/usr/bin/env python3
# Run script for CrabBot
# A mess of config args and terminal polling code
#
# See -h or read the argparse setup for argument details
import argparse
import datetime
import logging
import os
import readline # Only for better terminal input support, eg. history. Does not work on Windows, but does work in WSL.
import signal # So we can send SIGINT to ourselves
import sys
from tempfile import gettempdir # for PID file (for easier service management)
from threading import Thread
import crabbot.common
import crabbot.cogs.messages
import crabbot.cogs.quotes
#NOTE: Incomplete and buggy, rewrite transition not finished. Recommend commenting out.
import crabbot.cogs.voice # comment out to disable voice commands entirely
pid = str(os.getpid())
pidfile = gettempdir() + '/CrabBot.pid' # eg. so systemd's PIDFile can find a /tmp/CrabBot.pid
with open(pidfile, 'w') as temppidfile:
temppidfile.write(pid)
# Do argparse first so that -h can print and exit before anything else happens
parser = argparse.ArgumentParser(fromfile_prefix_chars='@', description='A silly Discord bot')
token_args = parser.add_mutually_exclusive_group(required=True)
token_args.add_argument('-t', '--token',
help="The bot user's login token. Use this or -f.")
token_args.add_argument('-f', '--file', type=argparse.FileType('r'),
help="A file with the bot user's login token as the first line. Use this or -t")
parser.add_argument('-p', '--prefix', default="!crab",
help="Command prefix the bot responds to")
parser.add_argument('--assets-path', default="assets/",
help="Path for general assets (ex. sir-places.txt)")
parser.add_argument('--memes-path', default="assets/memes",
help="Path for memes audio clips (and its filelist.txt)")
parser.add_argument('--quotes-path', default="../", # NOTE we write to this location, be careful where you put it
help="Path containing the quotes database. Will create quotes.sqlite3 if it does not exist.")
parser.add_argument('--use-libav', action='store_true',
help="Make Voice use Libav instead of FFmpeg")
parser.add_argument('--disable-voice', action='store_true',
help="Disable Voice commands (can be enabled later)")
parser.add_argument('-l', '--logfile', default="./CrabBot.log",
help="Path, with filename, to write the log to")
args = parser.parse_args()
logging.basicConfig(filename=args.logfile, level=logging.INFO) # Grr, ytdl doesn't log
logging.info("________\n" +
"Starting CrabBot at " + str(datetime.datetime.now()) + "\n"
"--------") # Make it clear in the log when a new run starts
# TODO? Might want a delimiter that is easier to write, eg. for a log parsing script
if args.file is not None:
login = args.file.readline().rstrip()
args.file.close()
else:
login = args.token
bot = crabbot.common.CrabBot(prefix=args.prefix)
def poll_terminal():
# TODO function dict instead of if/elif.
while True: # Run thread as daemon, so Python will exit despite this loop
term_input = input()
if term_input == "help":
# TODO write help for the terminal commands
print("Uh, no. I'm gonna be annoying instead.")
# NOTE could use function.__doc__ and docstrings for function help
elif term_input == "quit":
os.kill(int(pid), signal.SIGINT) # discord.Client.run() quits on KeyboardInterrupt, so...
# This might not work on Windows? It has a special signal.CTRL_C_EVENT.
elif term_input.startswith("update_profile"):
profile_args = term_input.split(' ')
bot._update_profile(username=profile_args[1], avatar=profile_args[2])
elif term_input.startswith("disable_voice"):
logging.info("Disabling voice commands")
bot.remove_cog("Voice")
elif term_input.startswith("enable_voice"):
if "crabbot.cogs.voice" in sys.modules:
logging.info("Enabling voice commands")
bot.add_cog(crabbot.cogs.voice.Voice(bot, args.memes_path, args.use_libav))
else:
logging.info("Voice disabled in source. Add/uncomment import for crabbot.voice and relaunch.")
elif term_input.startswith("update_lists"):
bot.update_all_lists()
# Start polling thread as a daemon so the program exits without waiting if ex. the bot crashes
input_thread = Thread(target=poll_terminal, daemon=True)
input_thread.start()
bot.add_cog(crabbot.cogs.messages.Messages(args.assets_path + "/messages"))
bot.add_cog(crabbot.cogs.quotes.Quotes(args.quotes_path))
# Comment out import of voice to completely disable voice commands
if "crabbot.cogs.voice" in sys.modules and args.disable_voice is False:
bot.add_cog(crabbot.cogs.voice.Voice(bot.loop , args.memes_path, args.use_libav))
# Blocking, must be last. See discord.py Client for more info.
bot.run(login)
# If it reaches here, CrabBot's probably logged out of Discord now
# (CrabBot doesn't log out if it's straight terminated)
logging.info("CrabBot has recieved a SIGINT and has now exited as intended\n" +
"————— CrabBot exited at " + str(datetime.datetime.now()))
print("CrabBot says goodbye")
# Cleanup pidfile
try:
os.remove(pidfile)
except:
pass # Don't try too hard to clean up
|
Python
| 0
|
@@ -208,16 +208,41 @@
port os%0A
+from pathlib import Path%0A
import r
@@ -802,16 +802,21 @@
dfile =
+Path(
gettempd
@@ -836,16 +836,17 @@
Bot.pid'
+)
# eg.
@@ -1637,32 +1637,43 @@
'--assets-path',
+ type=Path,
default=%22assets
@@ -1669,17 +1669,22 @@
ult=
-%22
+Path('
assets
-/%22
+')
,%0A
@@ -1781,32 +1781,43 @@
('--memes-path',
+ type=Path,
default=%22assets
@@ -1809,17 +1809,22 @@
default=
-%22
+Path('
assets/m
@@ -1827,17 +1827,18 @@
ts/memes
-%22
+')
,%0A
@@ -1950,21 +1950,37 @@
h',
-default=%22../%22
+type=Path, default=Path('..')
, #
@@ -2449,16 +2449,27 @@
ogfile',
+ type=Path,
default
@@ -2473,11 +2473,14 @@
ult=
-%22./
+Path('
Crab
@@ -2486,17 +2486,18 @@
bBot.log
-%22
+')
, %0A
@@ -4798,12 +4798,11 @@
ath
-+
+/
%22
-/
mess
|
6bb6f73b6dd5a497a670ec3dc4d85483253737d2
|
update dev version after 0.9.6 tag [skip ci]
|
py/desimodel/_version.py
|
py/desimodel/_version.py
|
__version__ = '0.9.6'
|
Python
| 0
|
@@ -13,10 +13,17 @@
= '0.9.6
+.dev431
'%0A
|
a88156ecd020ab9736bcc90856c7f6042d56fab9
|
raise exception if user aborts
|
py/mel/cmd/addcluster.py
|
py/mel/cmd/addcluster.py
|
"""A tool for adding a new cluster / constellation from photographs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import cv2
import numpy
def setup_parser(parser):
parser.add_argument(
'context',
type=str,
default=None,
help="Path to the context image to add.")
parser.add_argument(
'detail',
type=str,
default=None,
help="Path to the detail image to add.")
parser.add_argument(
'destination',
type=str,
default=None,
help="New path to create and store the constellation to.")
parser.add_argument(
'moles',
type=str,
default=None,
nargs='+',
help="Names of the moles to store.")
def process_args(args):
context_image = cv2.imread(args.context)
detail_image = cv2.imread(args.detail)
# print out the dimensions of the images
print('{}: {}'.format(args.context, context_image.shape))
print('{}: {}'.format(args.detail, detail_image.shape))
display_image = numpy.copy(context_image)
# display the context image in a reasonably sized window
cv2.namedWindow('display', cv2.WINDOW_NORMAL)
window_width = 800
window_height = 600
cv2.resizeWindow('display', window_width, window_height)
cv2.imshow('display', display_image)
circle_radius = 50
context_mole_positions = []
detail_mole_positions = []
current_mole_positions = context_mole_positions
cv2.setMouseCallback(
'display',
_make_mole_capture_callback(
'display',
display_image,
circle_radius,
context_mole_positions))
# main loop
print('Please mark all specified moles, double-click to mark.')
print('Press any key to exit.')
is_finished = False
while not is_finished:
key = cv2.waitKey(50)
if key != -1:
is_finished = True
if len(current_mole_positions) == len(args.moles):
if not detail_mole_positions:
current_mole_positions = detail_mole_positions
display_image = numpy.copy(detail_image)
cv2.setMouseCallback(
'display',
_make_mole_capture_callback(
'display',
display_image,
circle_radius,
detail_mole_positions))
cv2.imshow('display', display_image)
else:
print("context positions:")
print(context_mole_positions)
print("detail positions:")
print(detail_mole_positions)
is_finished = True
cv2.destroyAllWindows()
raise NotImplementedError()
def _make_mole_capture_callback(window_name, image, radius, mole_positions):
def draw_circle(event, x, y, flags, param):
if event == cv2.EVENT_LBUTTONDBLCLK:
cv2.circle(image, (x, y), radius, (255, 0, 0), -1)
mole_positions.append((x, y))
cv2.imshow(window_name, image)
return draw_circle
|
Python
| 0
|
@@ -1963,34 +1963,48 @@
-is_finished = True
+raise Exception('User aborted.')
%0A%0A
|
45e326128beafee61b6913098808fe9e51829615
|
remove print
|
pyLibrary/thread/till.py
|
pyLibrary/thread/till.py
|
# encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Author: Kyle Lahnakoski (kyle@lahnakoski.com)
#
# THIS THREADING MODULE IS PERMEATED BY THE please_stop SIGNAL.
# THIS SIGNAL IS IMPORTANT FOR PROPER SIGNALLING WHICH ALLOWS
# FOR FAST AND PREDICTABLE SHUTDOWN AND CLEANUP OF THREADS
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import thread
from time import sleep as _sleep
from time import time as _time
from pyLibrary.thread.signal import Signal
from pyLibrary.times.dates import Date
from pyLibrary.times.durations import Duration
DEBUG = True
INTERVAL = 0.1
next_ping = _time()
class Till(Signal):
"""
TIMEOUT AS A SIGNAL
"""
all_timers = []
locker = thread.allocate_lock()
def __init__(self, till=None, timeout=None, seconds=None):
global next_ping
Signal.__init__(self, "a timeout")
if till != None:
timeout = Date(till).unix
elif timeout != None:
timeout = (_time() + Duration(timeout).seconds).unix
elif seconds != None:
timeout = _time() + seconds
with Till.locker:
next_ping = min(next_ping, timeout)
Till.all_timers.append((timeout, self))
@classmethod
def daemon(cls, please_stop):
global next_ping
try:
while not please_stop:
now = _time()
with Till.locker:
if next_ping > now:
_sleep(min(next_ping - now, INTERVAL))
continue
next_ping = now + INTERVAL
work = None
if Till.all_timers:
Till.all_timers.sort(key=lambda r: r[0])
for i, (t, s) in enumerate(Till.all_timers):
if now < t:
work, Till.all_timers[:i] = Till.all_timers[:i], []
next_ping = min(next_ping, Till.all_timers[0][0])
break
else:
work, Till.all_timers = Till.all_timers, []
if work:
for t, s in work:
print unicode(t)
s.go()
except Exception, e:
from pyLibrary.debugs.logs import Log
Log.warning("timer shutdown", cause=e)
|
Python
| 0.000793
|
@@ -2420,49 +2420,8 @@
rk:%0A
- print unicode(t)%0A
|
615613a3213e7b4023135b2fc85ac725d5f12656
|
Add jvm_path argument to connect method
|
pyathenajdbc/__init__.py
|
pyathenajdbc/__init__.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import unicode_literals
import datetime
__version__ = '1.0.2'
__athena_driver_version__ = '1.0.0'
# Globals https://www.python.org/dev/peps/pep-0249/#globals
apilevel = '2.0'
threadsafety = 3
paramstyle = 'pyformat'
ATHENA_JAR = 'AthenaJDBC41-{0}.jar'.format(__athena_driver_version__)
ATHENA_DRIVER_DOWNLOAD_URL = 'https://s3.amazonaws.com/athena-downloads/drivers/{0}'.format(
ATHENA_JAR)
ATHENA_DRIVER_CLASS_NAME = 'com.amazonaws.athena.jdbc.AthenaDriver'
ATHENA_CONNECTION_STRING = 'jdbc:awsathena://athena.{region}.amazonaws.com:443/'
class DBAPITypeObject:
"""Type Objects and Constructors
https://www.python.org/dev/peps/pep-0249/#type-objects-and-constructors
"""
def __init__(self, *values):
self.values = values
def __cmp__(self, other):
if other in self.values:
return 0
if other < self.values:
return 1
else:
return -1
STRING = DBAPITypeObject('CHAR', 'NCHAR',
'VARCHAR', 'NVARCHAR',
'LONGVARCHAR', 'LONGNVARCHAR')
BINARY = DBAPITypeObject('BINARY', 'VARBINARY', 'LONGVARBINARY')
NUMBER = DBAPITypeObject('BOOLEAN', 'TINYINT', 'SMALLINT', 'BIGINT', 'INTEGER',
'REAL', 'DOUBLE', 'FLOAT', 'DECIMAL', 'NUMERIC')
DATETIME = DBAPITypeObject('TIMESTAMP')
ROWID = DBAPITypeObject('')
Date = datetime.date
Time = datetime.time
Timestamp = datetime.datetime
def connect(s3_staging_dir=None, access_key=None, secret_key=None,
region_name=None, profile_name=None, credential_file=None,
jvm_options=None, converter=None, formatter=None,
**kwargs):
from pyathenajdbc.connection import Connection
return Connection(s3_staging_dir, access_key, secret_key,
region_name, profile_name, credential_file,
jvm_options, converter, formatter,
**kwargs)
|
Python
| 0.000001
|
@@ -1712,24 +1712,39 @@
matter=None,
+ jvm_path=None,
%0A
@@ -1990,16 +1990,26 @@
rmatter,
+ jvm_path,
%0A
|
d4464407c923ecf75cadcb11cfcac1ad143b0f38
|
correct bug
|
pybioportal/Bioportal.py
|
pybioportal/Bioportal.py
|
import requests
import urllib
from requests import HTTPError
class Bioportal(object):
'''A Python binding of the BioPortal REST API
(http://data.bioontology.org/documentation)'''
BASE_URL = 'http://data.bioontology.org'
def __init__(self, api_key):
self.apikey = api_key
def classes(self, search_query, **kwargs):
# http://data.bioontology.org/documentation#nav_search
endpoint = '/search'
full_url = Bioportal.BASE_URL + endpoint
payload = kwargs
payload['q'] = search_query
return self._bioportal_api_request(full_url, payload)
def annotator(self, text, **kwargs):
# http://data.bioontology.org/documentation#nav_annotator
endpoint = '/annotator'
full_url = Bioportal.BASE_URL + endpoint
payload = kwargs
payload['text'] = text
complete_annotations = self._bioportal_api_request(full_url, payload)
return complete_annotations
def recommender(self, text_or_keywords, **kwargs):
# http://data.bioontology.org/documentation#nav_recommender
endpoint = '/recommender'
full_url = Bioportal.BASE_URL + endpoint
payload = kwargs
payload['input'] = text_or_keywords
return self._bioportal_api_request(full_url, payload)
def ontology_class(self, ontology, cls_id):
'''
Just supports the /ontologies/:ontology/classes/:cls endpoint, which
returns information about one class
ontology: name of the ontology
cls_id: @id of the class. Ex: http://www.radlex.org/RID/#RID43314
'''
# http://data.bioontology.org/documentation#Class
escaped_cls_id = urllib.quote(cls_id, safe='')
endpoint = '/ontologies/{}/classes/{}'.format(ontology, escaped_cls_id)
full_url = Bioportal.BASE_URL + endpoint
return self._bioportal_api_request(full_url)
def _bioportal_api_request(self, url, payload={}):
payload['apikey'] = self.apikey
processed_payload = self._process_payload(payload)
r = requests.get(url, params=processed_payload)
if r.status_code is 414:
raise HTTPError('Text is too long.')
json_response = r.json()
try:
# This will raise an HTTPError if the HTTP request returned an
# unsuccessful status code.
r.raise_for_status()
except HTTPError:
if 'errors' in json_response.keys():
error_messages = json_response['errors']
error_message = '\n'.join(error_messages)
elif 'error' in json_response.keys():
error_message = json_response['error']
raise HTTPError(error_message)
return json_response
def _process_payload(self, payload):
'''Turn boolean True to str 'true' and False to str 'false'. Otherwise,
server will ignore argument with boolean value.'''
def process_value(value):
if type(value) is bool:
return str(value).lower()
else:
return value
return {key: process_value(value) for key, value in payload.iteritems()}
|
Python
| 0.000004
|
@@ -2160,18 +2160,18 @@
us_code
-is
+==
414:%0A
|
462312c3acf2d6daf7d8cd27f251b8cb92647f5e
|
Fix a typo in the variable name
|
pybossa/auth/category.py
|
pybossa/auth/category.py
|
from flaskext.login import current_user
def create(app=None):
if current_user.is_authenticated():
if current_user.admin is True:
return True
else:
return False
else:
return False
def read(app=None):
return True
def update(app):
return create(app)
def delete(app):
return create(app)
|
Python
| 0.999999
|
@@ -46,19 +46,24 @@
create(
-app
+category
=None):%0A
@@ -246,19 +246,24 @@
ef read(
-app
+category
=None):%0A
@@ -291,19 +291,24 @@
update(
-app
+category
):%0A r
@@ -320,19 +320,24 @@
create(
-app
+category
)%0A%0A%0Adef
@@ -343,19 +343,24 @@
delete(
-app
+category
):%0A r
@@ -376,9 +376,14 @@
ate(
-app
+category
)%0A
|
9e8764128e83b104b6a7000451b7863209541d47
|
remove parent accounts from finance_accounts
|
pycroft/model/finance.py
|
pycroft/model/finance.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2013 The Pycroft Authors. See the AUTHORS file.
# This file is part of the Pycroft project and licensed under the terms of
# the Apache License, Version 2.0. See the LICENSE file for details.
"""
pycroft.model.finance
~~~~~~~~~~~~~~
This module contains the classes FinanceAccount, ...
:copyright: (c) 2011 by AG DSN.
"""
import datetime
from base import ModelBase
from sqlalchemy import ForeignKey
from sqlalchemy import Table, Column
from sqlalchemy.orm import relationship, backref
from sqlalchemy.types import Enum, Integer, Text, DateTime, String
from sqlalchemy import event
class FinanceAccount(ModelBase):
name = Column(String(127), nullable=False)
#LIABILITY=Passivkonto, EXPENSE=Aufwandskonto, ASSET=Aktivkonto, INCOME=Ertragskonto
type = Column(Enum("LIABILITY", "EXPENSE", "ASSET", "INCOME", "EQUITY",
name="financeaccounttypes"), nullable=False)
transactions = relationship("Transaction", secondary="split")
# many to one from FinanceAccount to User
user = relationship("User", backref=backref("finance_accounts"))
user_id = Column(Integer, ForeignKey("user.id"), nullable=True)
parent_account_id = Column(Integer, ForeignKey("financeaccount.id"),
nullable=True)
parent_account = relationship("FinanceAccount")
class Journal(ModelBase):
account = Column(String(255), nullable=False)
bank = Column(String(255), nullable=False)
hbci_url = Column(String(255), nullable=False)
last_update = Column(DateTime, nullable=False)
class JournalEntry(ModelBase):
amount = Column(Integer, nullable=False)
message = Column(Text, nullable=True)
journal_id = Column(Integer, ForeignKey("journal.id"), nullable=False)
journal = relationship("Journal", backref=backref("entries"))
other_account = Column(String(255), nullable=False)
other_bank = Column(String(255), nullable=False)
other_person = Column(String(255), nullable=False)
original_message = Column(Text, nullable=False)
timestamp = Column(DateTime, nullable=False)
class Transaction(ModelBase):
message = Column(Text(), nullable=False)
transaction_date = Column(DateTime, nullable=False, default=datetime.datetime.now)
journal_entry_id = Column(Integer(), ForeignKey("journalentry.id"),
nullable=True)
journal_entry = relationship("JournalEntry",
backref=backref("transaction"))
semester_id = Column(Integer, ForeignKey("semester.id"))
semester = relationship("Semester", backref=backref("transactions"))
@property
def is_balanced(self):
return sum([split.amount for split in self.splits]) == 0
def check_transaction_balance_on_save(mapper, connection, target):
assert target.is_balanced, 'Transaction "%s" is not balanced!' % target.message
event.listen(Transaction, "before_insert", check_transaction_balance_on_save)
event.listen(Transaction, "before_update", check_transaction_balance_on_save)
#soll ist positiv, haben ist negativ
class Split(ModelBase):
amount = Column(Integer, nullable=False)
account_id = Column(Integer, ForeignKey("financeaccount.id"),
nullable=False)
account = relationship("FinanceAccount")
transaction_id = Column(Integer, ForeignKey("transaction.id",
ondelete='CASCADE'),
nullable=False)
transaction = relationship("Transaction", backref=backref("splits", cascade="all, delete-orphan"))
class Semester(ModelBase):
name = Column(String, nullable=False)
semester_fee = Column(Integer, nullable=False)
registration_fee = Column(Integer, nullable=False)
begin_date = Column(DateTime, nullable=False)
end_date = Column(DateTime, nullable=False)
|
Python
| 0.000004
|
@@ -1204,157 +1204,8 @@
e)%0A%0A
- parent_account_id = Column(Integer, ForeignKey(%22financeaccount.id%22),%0A nullable=True)%0A parent_account = relationship(%22FinanceAccount%22)%0A%0A
%0Acla
|
ce384e6eb3f762f611bfd70874766248169a7d15
|
indent fix
|
nginpro/utils.py
|
nginpro/utils.py
|
"""
Utilities
"""
from string import Template
"""
Generate configuration blocks
"""
def make_block(name, content, pattern=""):
return Template("""
${name} ${pattern} {
${content}
}
""").safe_substitute(name=name, content=content, pattern=pattern)
"""
Takes a python dictionary and converts it to nginx compatible configuration block
"""
def to_nginx_template(config):
template = ""
for key, value in config.iteritems():
if isinstance(value, dict):
for key2, value2 in value.iteritems():
template += "{} {} {};\n".format(key, key2, value2)
else:
template += "{} {};\n".format(key, value)
return template
"""
nginx configuration indentation
"""
def make_indent(contents):
indents = ' '
lines = map(str.strip, contents.splitlines())
current_indent = 0
for index, line in enumerate(lines):
if line.endswith('}'):
current_indent -= 1
lines[index] = current_indent * indents + line
if line.endswith('{'):
current_indent += 1
return '\n'.join(lines)
"""
Get nginx config args
"""
def get_nginx_config_args():
# TODO: make this more pythonic
import subprocess
import re
options = {}
try:
process = subprocess.Popen(['nginx', '-V'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
err, out = process.communicate()
matches = re.findall('--([^\s]+)', out)
for option in matches:
if '=' in option:
v = option.split('=')
options[v[0]] = v[1]
else:
options[option] = True
except OSError:
print 'Nginx is not installed or not in $PATH'
return options
|
Python
| 0.000002
|
@@ -143,16 +143,25 @@
emplate(
+%0A
%22%22%22%0A
@@ -218,16 +218,20 @@
%7D%0A
+
%22%22%22)
|
96ad539fdf0302dd0e2996f746ce1fd055c8e590
|
fix log size to 5mb
|
vmmaster/core/logger.py
|
vmmaster/core/logger.py
|
import logging
import logging.handlers
import graypy
import os
import sys
from .config import config
class StreamToLogger(object):
"""
Fake file-like stream object that redirects writes to a logger instance.
"""
def __init__(self, logger, log_level=logging.INFO):
self.logger = logger
self.log_level = log_level
self.linebuf = ''
def write(self, buf):
for line in buf.rstrip().splitlines():
self.logger.log(self.log_level, line.rstrip())
def setup_logging(logdir=None, scrnlog=True, txtlog=True, loglevel=logging.DEBUG):
logdir = os.path.abspath(logdir)
if not os.path.exists(logdir):
os.mkdir(logdir)
log = logging.getLogger('')
log.setLevel(loglevel)
log_formatter = logging.Formatter("%(asctime)s - %(levelname)-7s :: %(name)-6s :: %(message)s")
if hasattr(config, 'GRAYLOG'):
graylog_handler = graypy.GELFHandler(config.GRAYLOG)
graylog_handler.setFormatter(log_formatter)
log.addHandler(graylog_handler)
if txtlog:
txt_handler = logging.handlers.RotatingFileHandler(
os.path.join(logdir, "vmmaster.log"), maxBytes=5120, backupCount=5
)
txt_handler.setFormatter(log_formatter)
log.addHandler(txt_handler)
log.info("Logger initialised.")
if scrnlog:
console_handler = logging.StreamHandler()
console_handler.setFormatter(log_formatter)
log.addHandler(console_handler)
stdout_logger = logging.getLogger('STDOUT')
slout = StreamToLogger(stdout_logger, logging.INFO)
sys.stdout = slout
stderr_logger = logging.getLogger('STDERR')
slerr = StreamToLogger(stderr_logger, logging.ERROR)
sys.stderr = slerr
return log
log = logging.getLogger('LOG')
|
Python
| 0.000949
|
@@ -1173,10 +1173,13 @@
es=5
-1
2
+4288
0, b
|
23ab8664d1ed16ea0339f9b94938e1c95b574132
|
Remove silly try/except blocks in button.py
|
pygametemplate/button.py
|
pygametemplate/button.py
|
import time
from pygametemplate import log
class Button(object):
"""Class representing keyboard keys."""
def __init__(self, game, number):
self.game = game
try:
self.number = number
self.event = None # The last event that caused the button press
self.pressed = 0 # If the button was just pressed
self.held = 0 # If the button is held
self.released = 0 # If the button was just released
self.press_time = 0.0
except Exception:
log("Failed to initialise button variable")
def press(self):
self.pressed = 1
self.held = 1
self.press_time = time.time()
def release(self):
self.held = 0
self.released = 1
def reset(self):
try:
self.pressed = 0
self.released = 0
except Exception:
log("Failed to reset button")
def time_held(self):
try:
if self.held:
return time.time() - self.press_time
else:
return 0.0
except Exception:
log("Failed to get button held time")
|
Python
| 0.000001
|
@@ -168,33 +168,17 @@
= game%0A
- try:%0A
+%0A
@@ -198,20 +198,16 @@
number%0A
-
@@ -273,36 +273,32 @@
press%0A%0A
-
self.pressed = 0
@@ -338,28 +338,24 @@
sed%0A
-
self.held =
@@ -386,36 +386,32 @@
is held%0A
-
-
self.released =
@@ -448,36 +448,32 @@
eleased%0A
-
self.press_time
@@ -481,90 +481,8 @@
0.0
-%0A except Exception:%0A log(%22Failed to initialise button variable%22)
%0A%0A
@@ -679,33 +679,16 @@
(self):%0A
- try:%0A
@@ -708,28 +708,24 @@
= 0%0A
-
-
self.release
@@ -733,76 +733,8 @@
= 0
-%0A except Exception:%0A log(%22Failed to reset button%22)
%0A%0A
@@ -760,25 +760,8 @@
f):%0A
- try:%0A
@@ -778,20 +778,16 @@
f.held:%0A
-
@@ -835,20 +835,16 @@
-
else:%0A
@@ -849,28 +849,24 @@
-
-
return 0.0%0A
@@ -868,80 +868,4 @@
0.0%0A
- except Exception:%0A log(%22Failed to get button held time%22)%0A
|
1ee2e880872c4744f4159df7fc64bb64b3f35632
|
Add docstring to Button.time_held() method
|
pygametemplate/button.py
|
pygametemplate/button.py
|
import time
class Button(object):
"""Class representing keyboard keys."""
def __init__(self, game, number):
self.game = game
self.number = number
self.event = None # The last event that caused the button press
self.pressed = 0 # If the button was just pressed
self.held = 0 # If the button is held
self.released = 0 # If the button was just released
self.press_time = 0.0
def press(self):
self.pressed = 1
self.held = 1
self.press_time = time.time()
def release(self):
self.held = 0
self.released = 1
def reset(self):
self.pressed = 0
self.released = 0
def time_held(self):
if self.held:
return time.time() - self.press_time
else:
return 0.0
|
Python
| 0.000001
|
@@ -718,25 +718,116 @@
e_held(self)
-:
+ -%3E float:%0A %22%22%22Return the amount of time this button has been held for in seconds.%22%22%22
%0A if
|
bc95e7472ed833125771109a3af4a81bec483e81
|
Add TODO
|
lib/output_textview.py
|
lib/output_textview.py
|
# coding: UTF-8
"""
Copyright (c) 2009 Marian Tietz
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ``AS IS'' AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
"""
import gtk
from lib.htmlbuffer import HTMLBuffer
from helper import URLHandler
import config
##
import gobject
from threading import Timer # for smooth scrolling
##
class OutputTextView(gtk.TextView):
def __init__(self):
gtk.TextView.__init__(self,
HTMLBuffer(handler = URLHandler.URLHandler))
self.set_property("editable", False)
self.set_property("can-focus", False)
self.set_property("wrap-mode", gtk.WRAP_WORD_CHAR)
self.set_property("cursor-visible", False)
self.read_line = ()
##
self.smooth_id = None
#self.smooth_scroll_timer is set in smooth_scroll_to_end
##
""" < """
# Smooth scrolling inspired by Gajim Code
# smooth scroll constants
SMOOTH_SCROLLING = False
MAX_SCROLL_TIME = 0.4 # seconds
SCROLL_DELAY = 33 # milliseconds
@classmethod
def set_smooth_scrolling(cls, switch):
cls.SMOOTH_SCROLLING = switch
def smooth_scroll(self):
""" idle add handler for smooth scrolling.
Returns True if it's going to be recalled.
Scrolls 1/3rd of the distance to the bottom.
TODO: add direction parameter for use with
TODO:: manual scrolling.
"""
parent = self.get_parent()
if not parent:
return False
vadj = parent.get_vadjustment()
max_val = vadj.upper - vadj.page_size + 1
cur_val = vadj.get_value()
# scroll by 1/3rd of remaining distance
onethird = cur_val + ((max_val - cur_val) / 3.0)
vadj.set_value(onethird)
if max_val - onethird < 0.01:
self.smooth_id = None
self.smooth_scroll_timer.cancel()
return False
return True
def _smooth_scroll_timeout(self):
gobject.idle_add(self._do_smooth_scroll_timeout)
def _do_smooth_scroll_timeout(self):
""" Timout handler.
Time's up, if we were done, ok, if not
and it's remaing space given,
remove the timer and jump fast forward.
"""
if not self.smooth_id:
# we finished scrolling
return False
gobject.source_remove(self.smooth_id)
self.smooth_id = None
parent = self.get_parent()
if parent:
vadj = parent.get_vadjustment()
vadj.set_value(vadj.upper - vadj.page_size + 1)
return False
def _smooth_scroll_to_end(self):
""" Call n times smooth_scroll() until
the end is reached.
"""
if None != self.smooth_id:
# already scrolling
return False
self.smooth_id = gobject.timeout_add(self.SCROLL_DELAY,
self.smooth_scroll)
self.smooth_scroll_timer = Timer(self.MAX_SCROLL_TIME,
self._smooth_scroll_timeout)
self.smooth_scroll_timer.start()
return False
def _scroll_to_end(self):
""" Scroll normally to the end of the buffer """
parent = self.get_parent()
buffer = self.get_buffer()
end_mark = buffer.create_mark("end", buffer.get_end_iter(), False)
self.scroll_to_mark(end_mark, 0, True, 0, 1)
# reset horizontal scrollbar (do avoid side effects)
if parent:
adjustment = parent.get_hadjustment()
adjustment.set_value(0)
# avoid recalling through idle_add
return False
""" > """
def stop_scrolling(self):
""" interrupts smooth scrolling procedure """
if self.smooth_id:
gobject.source_remove(self.smooth_id)
self.smooth_id = None
self.smooth_scroll_timer.cancel()
def scroll_to_bottom(self):
""" scroll to the end of the textbuffer """
if self.SMOOTH_SCROLLING:
self._smooth_scroll_to_end()
else:
self._scroll_to_end()
def get_last_line(self):
""" returns the last readable line
(without read_line)
"""
buffer = self.get_buffer()
count = buffer.get_line_count()
if self.read_line:
count -= 2
lineEnd = buffer.get_iter_at_mark(self.read_line[1])
else:
lineEnd = buffer.get_end_iter()
if count <= 0:
return ""
lineStart = buffer.get_iter_at_line(count)
return buffer.get_text(lineStart, lineEnd)
def set_read_line(self):
buffer = self.get_buffer()
if self.read_line:
markA, markB = self.read_line[1:]
iterA = buffer.get_iter_at_mark(markA)
iterB = buffer.get_iter_at_mark(markB)
if None in (iterA, iterB):
raise ValueError, "set_read_line: %s,%s in None." % (iterA, iterB)
return
buffer.delete(iterA, iterB)
buffer.remove_tag(self.read_line[0], iterA, iterB)
tag = buffer.create_tag(None,
justification = gtk.JUSTIFY_CENTER,
strikethrough = True)
end_iter = buffer.get_end_iter()
start_mark = buffer.create_mark(None, end_iter, True)
buffer.insert_with_tags(end_iter,
"\n"+" "*int(config.get("tekka","divider_length")), tag)
end_mark = buffer.create_mark(None, buffer.get_end_iter(), True)
self.read_line = (tag, start_mark, end_mark)
|
Python
| 0.000002
|
@@ -2076,16 +2076,238 @@
econds%0A%0A
+%09%22%22%22%0A%09TODO: optimize the whole code for manual smooth%0A%09TODO:: scrolling even if the slider is set directly%0A%09TODO:: to a position. This needs a replacement for%0A%09TODO:: the current end-mark (the bottom of the buffer)%0A%09%22%22%22%0A%0A
%09@classm
|
c0607fdcfdfbf42c719f75bef82680f38b1fb043
|
change all references to SeqPath.end to .stop
|
pygr/apps/maf2VSgraph.py
|
pygr/apps/maf2VSgraph.py
|
from seqref import *
from pygr.seqdb import *
import string
def refIntervals(s):
begin=0
gaps=0
end=0
for end in range(len(s)):
if(s[end]=='-'):
if(begin<end):
yield (begin,end,begin-gaps,end-gaps,s[begin:end])
begin=end+1
gaps+=1
if end==0:
return
end=end+1
if(begin<end):
yield (begin,end,begin-gaps,end-gaps,s[begin:end])
def reverse_complement(s):
compl={'a':'t', 'c':'g', 'g':'c', 't':'a', 'u':'a', 'n':'n',
'A':'T', 'C':'G', 'G':'C', 'T':'A', 'U':'A', 'N':'N'}
return ''.join([compl.get(c,c) for c in s[::-1]])
class MafParser:
"""
Parses .maf files as defined by the Haussler dataset. The results of parsing are
available as pathmapping between the sequences in the alignment. The sequences
themselves are assumed unknown and use AnonSequence class.
"""
options={}
def __init__(self,vbase=''):
self.mAlign=PathMapping()
self.sequences={}
self.vbase=vbase
self._vid=0
def setpar(self, arry):
"""internal function """
for p in arry:
(key,value)=p.split('=')
self.options[key]=value
def readalign(self,opt,fh):
"""internal function parses alignment record from .maf file """
## print "entering readalign:", opt
edgeInfo={}
for p in opt:
(key,value)=p.split('=')
edgeInfo[key]=value
s=fh.readline().split()
## print s;
if(len(s)==7 and s[0]=='s'):
vseq=self._vseq(len(s[6]))
self.mAlign+=vseq
while len(s)==7 and s[0]=='s':
# ADD THE SEQUENCE NAME TO THE DICT AND ADD CORRESPONDING NODE TO THE MAPPING
if(not self.sequences.has_key(s[1])):
self.sequences[s[1]]=AnonSequence(int(s[5]),s[1])
self.mAlign+=self.sequences[s[1]]
# PROCESS THE KNOWN INTERVALS
if(s[4]=='-'):
ns=self.sequences[s[1]][-int(s[2]):-int(s[2])-int(s[3])]
self.sequences[s[1]].seqsplice(reverse_complement(s[6].replace('-','')),ns.start,ns.end)
else:
ns=self.sequences[s[1]][ int(s[2]): int(s[2])+int(s[3])]
self.sequences[s[1]].seqsplice(s[6].replace('-',''),ns.start,ns.end)
for inter in refIntervals(s[6]):
self.mAlign[vseq[inter[0]:inter[1]]][ns[inter[2]:inter[3]]]=(inter[4])
self.mAlign[ns[inter[2]:inter[3]]][vseq[inter[0]:inter[1]]]=(inter[4])
s=fh.readline().split()
def parse(self,filehandle):
"""parses the .maf filehandle """
l=filehandle.readline();
if l.split()[0]!='##maf':
return
else:
self.setpar(l.split()[1:])
l=filehandle.readline()
while l:
la = l.split();
## print la
if(len(la)==0 or la[0]=='#'):
## print "skipping"
1
elif(la[0]=='a'):
## print "reading alignment"
self.readalign(la[1:],filehandle)
else:
## print "end of records"
return
l=filehandle.readline()
def _vseq(self,slen):
alen=len(string.letters)
uid=self.vbase
cum=self._vid
while cum/alen>0:
uid+=string.letters[cum%alen]
cum/=alen
uid+=string.letters[cum%alen]
self._vid+=1
return AnonSequence(slen,uid)
def _dump(self,alignTab,sequenceTab=None):
for row in self.mAlign.repr_dict():
alignTab.write('\t'.join(map(lambda x:str(x), row.values()))+'\n');
if(sequenceTab):
for s in self.sequences.values():
for inter in s.known_int():
sequenceTab.write('\t'.join(map(lambda x:str(x), inter.values()))+'\n')
del self.mAlign
del self.sequences
self.mAlign=PathMapping()
self.sequences={}
def parseIntoDB(self,filehandle,cursor,alignTab,sequenceTab=None, update=None):
"""parses the .maf filehandle into database using cursors"""
c=filehandle.tell()
filehandle.seek(0,2)
filesize=filehandle.tell()
filehandle.seek(c)
l=filehandle.readline();
rc=0
count=0
if l.split()[0]!='##maf':
return
else:
self.setpar(l.split()[1:])
l=filehandle.readline()
while l:
la = l.split();
## print la
if(len(la)==0 or la[0]=='#'):
## print "skipping"
1
elif(la[0]=='a'):
## print "reading alignment"
count+=1
self.readalign(la[1:],filehandle)
self._dump(alignTab,sequenceTab)
if(update and not count%1000):
cursor.execute(update %(int(filehandle.tell()*100./filesize)))
else:
## print "end of records"
return
l=filehandle.readline()
|
Python
| 0
|
@@ -2181,19 +2181,20 @@
tart,ns.
-end
+stop
)%0A
@@ -2358,19 +2358,20 @@
tart,ns.
-end
+stop
)%0A%0A
|
9777855e779dfc77901a694dd3a9f6dfcc89a23e
|
Fix command line usage
|
pyinstrument/__main__.py
|
pyinstrument/__main__.py
|
from optparse import OptionParser
import sys
import os
import codecs
from pyinstrument import Profiler
from pyinstrument.profiler import SignalUnavailableError
# Python 3 compatibility. Mostly borrowed from SymPy
PY3 = sys.version_info[0] > 2
if PY3:
import builtins
exec_ = getattr(builtins, "exec")
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("exec _code_ in _globs_, _locs_")
def main():
usage = ("usage: pyinstrument [options] scriptfile [arg] ..." % invocation)
parser = OptionParser(usage=usage)
parser.allow_interspersed_args = False
parser.add_option('', '--setprofile',
dest='setprofile', action='store_true',
help='run in setprofile mode, instead of signal mode', default=False)
parser.add_option('', '--html',
dest="output_html", action='store_true',
help="output HTML instead of text", default=False)
parser.add_option('-o', '--outfile',
dest="outfile", action='store',
help="save report to <outfile>", default=None)
parser.add_option('', '--unicode',
dest='unicode', action='store_true',
help='force unicode text output')
parser.add_option('', '--no-unicode',
dest='unicode', action='store_false',
help='force ascii text output')
parser.add_option('', '--color',
dest='color', action='store_true',
help='force ansi color text output')
parser.add_option('', '--no-color',
dest='color', action='store_false',
help='force no color text output')
if not sys.argv[1:]:
parser.print_help()
sys.exit(2)
(options, args) = parser.parse_args()
sys.argv[:] = args
if len(args) > 0:
progname = args[0]
sys.path.insert(0, os.path.dirname(progname))
with open(progname, 'rb') as fp:
code = compile(fp.read(), progname, 'exec')
globs = {
'__file__': progname,
'__name__': '__main__',
'__package__': None,
}
try:
profiler = Profiler(use_signal=not options.setprofile)
except SignalUnavailableError:
profiler = Profiler(use_signal=False)
profiler.start()
try:
exec_(code, globs, None)
except IOError as e:
import errno
if e.errno == errno.EINTR:
print(
'Failed to run program due to interrupted system system call.\n'
'This happens because pyinstrument is sending OS signals to the running\n'
'process to interrupt it. If your program has long-running syscalls this\n'
'can cause a problem.\n'
'\n'
'You can avoid this error by running in \'setprofile\' mode. Do this by\n'
'passing \'--setprofile\' when calling pyinstrument at the command-line.\n'
'\n'
'For more information, see\n'
'https://github.com/joerick/pyinstrument/issues/16\n'
)
raise
except (SystemExit, KeyboardInterrupt):
pass
profiler.stop()
if options.outfile:
f = codecs.open(options.outfile, 'w', 'utf-8')
else:
f = sys.stdout
unicode_override = options.unicode != None
color_override = options.color != None
unicode = options.unicode if unicode_override else file_supports_unicode(f)
color = options.color if color_override else file_supports_color(f)
if options.output_html:
f.write(profiler.output_html())
else:
f.write(profiler.output_text(unicode=unicode, color=color))
f.close()
else:
parser.print_usage()
return parser
def file_supports_color(file_obj):
"""
Returns True if the running system's terminal supports color, and False
otherwise.
Borrowed from Django
https://github.com/django/django/blob/master/django/core/management/color.py
"""
plat = sys.platform
supported_platform = plat != 'Pocket PC' and (plat != 'win32' or
'ANSICON' in os.environ)
is_a_tty = hasattr(file_obj, 'isatty') and file_obj.isatty()
if not supported_platform or not is_a_tty:
return False
return True
def file_supports_unicode(file_obj):
encoding = getattr(file_obj, 'encoding', None)
if not encoding:
return False
codec_info = codecs.lookup(encoding)
if 'utf' in codec_info.name:
return True
return False
if __name__ == '__main__':
main()
|
Python
| 0.540588
|
@@ -786,21 +786,8 @@
...%22
- %25 invocation
)%0A
|
d0bc51c735ebb48b7774d5ddb3e953e505df6b7a
|
add fallback for FileNotFoundError for py<3.4
|
pymdt/tests/test_init.py
|
pymdt/tests/test_init.py
|
import unittest
from os import path
import numpy as np
from pymdt import loadm
from pymdt import _parse_variable_name
from pymdt import _parse_array_assignment
from pymdt import _parse_is_sub_array_assignment
from pymdt import _parse_sub_array_assignment_index
_TEST_PATH = path.dirname(path.abspath(__file__))
FILE_SIMPLE_M = path.join(_TEST_PATH, 'test_files', 'simple.m')
class TestInit(unittest.TestCase):
def test_read_simple_m(self):
d = loadm(FILE_SIMPLE_M)
X = d['X']
X_should_be = np.array([236227.1094, 241782.6650, 247338.2207])
np.testing.assert_array_equal(X, X_should_be)
Y = d['Y']
Y_should_be = np.array([456467.5000, 462023.0557, 467578.6113])
np.testing.assert_array_equal(Y, Y_should_be)
WavelengthCalibr = d['WavelengthCalibr']
WavelengthCalibr_should_be = np.array([535.9459, 535.9609])
np.testing.assert_array_equal(WavelengthCalibr, WavelengthCalibr_should_be)
RamanShiftCalibr = d['RamanShiftCalibr']
RamanShiftCalibr_should_be = np.array([138.3936, 138.9163])
np.testing.assert_array_equal(RamanShiftCalibr, RamanShiftCalibr_should_be)
Map = d['Map']
Map_should_be = np.array([[[0.0, 1.0], [2.0, 3.0], [4.0, 5.0]], [[6.0, 7.0], [8.0, 9.0], [10.0, 11.0]], [[12.0, 13.0], [14.0, 15.0], [16.0, 17.0]]])
np.testing.assert_array_almost_equal(Map, Map_should_be)
def test_file_not_found_err(self):
not_filename = 'adsfkljsad.sdafkasdlflas.dfjsakddflkgh.sadglkasdlkjsdfeiwq'
self.assertRaises(FileNotFoundError, loadm, not_filename)
def test_version_exists(self):
from pymdt import __version__
class TestInitHidden(unittest.TestCase):
"""
Unittests for hidden functions in __init__.py
"""
def test_parse_variable_name(self):
line = 'X = [1.0 2.0];'
var = _parse_variable_name(line)
self.assertEqual(var, 'X')
line = 'Map(:,:,1) = [1 2]'
var = _parse_variable_name(line)
self.assertEqual(var, 'Map')
def test_parse_array_assignment_1d(self):
line = 'Map = [1.0 2.0 3.0 4.0];'
arr = _parse_array_assignment(line)
arr_should_be = np.array([1.0, 2.0, 3.0, 4.0])
np.testing.assert_array_almost_equal(arr, arr_should_be)
def test_parse_array_assignment_2d(self):
line = 'Map = [1.0 2.0 ;3.0 4.0 ];'
arr = _parse_array_assignment(line)
arr_should_be = np.array([[1.0, 2.0], [3.0, 4.0]])
np.testing.assert_array_almost_equal(arr, arr_should_be)
def test_parse_array_assignment_zeros(self):
line = 'Map = zeros(3,3,2);'
arr = _parse_array_assignment(line)
arr_should_be = np.zeros((3,3,2))
np.testing.assert_array_equal(arr, arr_should_be)
def test_parse_is_sub_array_assignment(self):
line = 'Map = zeros(3,3,2);'
self.assertFalse(_parse_is_sub_array_assignment(line))
line = 'Map(:,:,1) = [1 2];'
self.assertTrue(_parse_is_sub_array_assignment(line))
def test_parse_sub_array_assignment_index(self):
line = 'Map(:,:,99) = [1 2];'
index = _parse_sub_array_assignment_index(line)
self.assertEqual(index, 98)
|
Python
| 0
|
@@ -372,16 +372,183 @@
le.m')%0A%0A
+# FileNotFoundError is not defined until python3.4, so revert back to IOError%0A# if needed%0Atry:%0A FileNotFoundError%0Aexcept NameError:%0A FileNotFoundError = IOError%0A
%0Aclass T
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.