commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
0be0d20fc667f0734b85d98f1d359130f7ed5b98
|
Add failing specs for current/future class names.
|
plotly/tests/test_core/test_graph_objs/test_graph_objs.py
|
plotly/tests/test_core/test_graph_objs/test_graph_objs.py
|
Python
| 0
|
@@ -0,0 +1,1878 @@
+from unittest import TestCase%0A%0Aimport plotly.graph_objs as go%0Aimport plotly.graph_reference as gr%0A%0AOLD_CLASS_NAMES = %5B'AngularAxis', 'Annotation', 'Annotations', 'Area',%0A 'Bar', 'Box', 'ColorBar', 'Contour', 'Contours',%0A 'Data', 'ErrorX', 'ErrorY', 'ErrorZ', 'Figure',%0A 'Font', 'Heatmap', 'Histogram', 'Histogram2d',%0A 'Histogram2dContour', 'Layout', 'Legend', 'Line',%0A 'Margin', 'Marker', 'RadialAxis', 'Scatter',%0A 'Scatter3d', 'Scene', 'Stream', 'Surface', 'Trace',%0A 'XAxis', 'XBins', 'YAxis', 'YBins', 'ZAxis'%5D%0A%0A%0Aclass TestBackwardsCompat(TestCase):%0A%0A def test_old_class_names(self):%0A%0A # these were all defined at one point, we want to maintain backwards%0A # compat, so we basically just create a checkpoint with this test.%0A%0A for class_name in OLD_CLASS_NAMES:%0A self.assertIn(class_name, go.__dict__.keys())%0A%0A%0Aclass TestGraphObjs(TestCase):%0A%0A def test_traces_should_be_defined(self):%0A%0A # we *always* want to create classes for traces%0A%0A class_names = %5Bgr.string_to_class_name(object_name)%0A for object_name in gr.TRACE_NAMES%5D%0A for class_name in class_names:%0A self.assertIn(class_name, go.__dict__.keys())%0A%0A def test_no_new_classes(self):%0A%0A # for maintenance reasons, we don't want to generate new class defs%0A%0A expected_class_names = %7Bgr.string_to_class_name(object_name)%0A for object_name in gr.TRACE_NAMES%7D%0A expected_class_names.update(OLD_CLASS_NAMES)%0A%0A # assume that CapitalCased keys are the classes we defined%0A current_class_names = %7Bkey for key in go.__dict__.keys()%0A if key%5B0%5D.isupper()%7D%0A self.assertEqual(current_class_names, expected_class_names)%0A
|
|
6b53d081b78d3ea2073bdc13112b146660595b5f
|
Add tests for resource_renderer
|
tests/test_resource_renderer.py
|
tests/test_resource_renderer.py
|
Python
| 0
|
@@ -0,0 +1,863 @@
+from nose.tools import assert_true, assert_equals%0Afrom pyramid.testing import DummyRequest%0Afrom pyramid.response import Response%0A%0Aclass SUTResource(object):%0A%0A def __init__(self):%0A self.request = DummyRequest()%0A%0A def index(self):%0A return %7B%7D%0A%0Aclass TestResourceRenderer(object):%0A%0A def _getTargetClass(self):%0A from pyramid_restpike import resource_renderer%0A return resource_renderer%0A%0A def _makeOne(self, *arg, **kw):%0A decorator = self._getTargetClass()(*arg, **kw)%0A class SUTResource(object):%0A @decorator%0A def index(self):%0A return %7B%7D%0A sut = SUTResource()%0A sut.request = DummyRequest()%0A return sut%0A%0A def test_default_renderer(self):%0A sut = self._makeOne('json')%0A response = sut.index()%0A assert_true(isinstance(response, Response))%0A
|
|
22b2446546ce59b99980e98e81b3571d81085304
|
Test that daily westminster pages load
|
tests/test_westminster_daily.py
|
tests/test_westminster_daily.py
|
Python
| 0
|
@@ -0,0 +1,892 @@
+import datetime as dt%0A%0Afrom flask_application import app%0A%0A%0Adef test_daily_westminster_pages_exist():%0A start_date = dt.date(2015, 01, 01)%0A%0A with app.test_client() as c:%0A for days in range(365):%0A date = start_date + dt.timedelta(days=days)%0A month, day = date.month, date.day%0A response = c.get('/%7Bmonth:02d%7D/%7Bday:02d%7D/'.format(month=month, day=day))%0A assert response.status_code == 200%0A%0A%0Adef test_daily_westminster_bad_days():%0A with app.test_client() as c:%0A response = c.get('/01/32/')%0A assert response.status_code == 404%0A response = c.get('/02/30/')%0A assert response.status_code == 404%0A response = c.get('/04/31/')%0A assert response.status_code == 404%0A%0A%0Adef test_daily_leap_day():%0A with app.test_client() as c:%0A response = c.get('/02/29/')%0A assert response.status_code == 200%0A
|
|
eb828764ddbe3988f71b98082e1560e594c3f65d
|
Add a bot message to display TeamCity test results
|
ci/teamcity/comment_on_pr.py
|
ci/teamcity/comment_on_pr.py
|
Python
| 0
|
@@ -0,0 +1,1261 @@
+%22%22%22%0APost the comment like the following to the PR:%0A%60%60%60%0A:robot: TeamCity test results bot :robot:%0A%0A%3CLogs from pytest%3E%0A%60%60%60%0A%22%22%22%0A%0Afrom github import Github%0Aimport os%0Aimport sys%0A%0A# Check if this is a pull request or not based on the environment variable%0Atry:%0A pr_id = int(os.environ%5B%22GITHUB_PR_NUMBER%22%5D.split(%22/%22)%5B-1%5D)%0Aexcept Exception:%0A sys.exit(0)%0Aheader = %22%22%22%0A%3Ch2 align=%22center%22%3E:robot: TeamCity test results bot :robot:%3C/h2%3E%0A%0A%22%22%22%0A%0Apytest_outputs = %5B%22ray_tests.log%22, %22dask_tests.log%22, %22python_tests.log%22%5D%0A%0Afull_comment = header%0A%0Afor out in pytest_outputs:%0A%0A full_comment += %22%3Cdetails%3E%3Csummary%3E%7B%7D Tests%3C/summary%3E%5Cn%22.format(%0A out.split(%22_%22)%5B0%5D.title()%0A )%0A full_comment += %22%5Cn%5Cn%60%60%60%5Cn%22%0A full_comment += open(out, %22r%22).read()%0A full_comment += %22%5Cn%60%60%60%5Cn%5Cn%3C/details%3E%5Cn%22%0A%0Atoken = os.environ%5B%22GITHUB_TOKEN%22%5D%0Ag = Github(token)%0Arepo = g.get_repo(%22modin-project/modin%22)%0A%0Apr = repo.get_pull(pr_id)%0Aif any(i.user.login == %22modin-bot%22 for i in pr.get_issue_comments()):%0A pr_comment_list = %5B%0A i for i in list(pr.get_issue_comments()) if i.user.login == %22modin-bot%22%0A %5D%0A assert len(pr_comment_list) == 1, %22Too many comments from modin-bot already%22%0A pr_comment_list%5B0%5D.edit(full_comment)%0Aelse:%0A pr.create_issue_comment(full_comment)%0A
|
|
b0006bf92ae221558d47a0b3c9010cfaacde2bfe
|
add checkmein.py with __init__ function
|
autocheckin/checkmein.py
|
autocheckin/checkmein.py
|
Python
| 0.000029
|
@@ -0,0 +1,266 @@
+from selenium import webdriver%0Afrom selinium.webdriver.common.keys import Keys%0A%0Aclass CheckMeIn(object): %0A %0A%0A def __init__(self, firstName, lastName, confNum):%0A self.firstName = firstName%0A self.lastName = lastName%0A self.confNum = confNum%0A%0A
|
|
528401b2c5cab29e301814da1754f0c0c41bdcd1
|
Update shortest-distance-to-a-character.py
|
Python/shortest-distance-to-a-character.py
|
Python/shortest-distance-to-a-character.py
|
# Time: O(n)
# Space: O(n)
# Given a string S and a character C,
# return an array of integers representing the shortest distance
# from the character C in the string.
#
# Example 1:
#
# Input: S = "loveleetcode", C = 'e'
# Output: [3, 2, 1, 0, 1, 0, 0, 1, 2, 2, 1, 0]
#
# Note:
# - S string length is in [1, 10000].
# - C is a single character, and guaranteed to be in string S.
# - All letters in S and C are lowercase.
import itertools
try:
xrange # Python 2
except NameError:
xrange = range # Python 3
class Solution(object):
def shortestToChar(self, S, C):
"""
:type S: str
:type C: str
:rtype: List[int]
"""
result = [len(S)] * len(S)
prev = -len(S)
for i in itertools.chain(xrange(len(S)),
reversed(xrange(len(S)))):
if S[i] == C:
prev = i
result[i] = min(result[i], abs(i-prev))
return result
|
Python
| 0.000054
|
@@ -18,17 +18,17 @@
pace: O(
-n
+1
)%0A%0A# Giv
|
231bc7bb7bd7e373d4b4c9a3e33d6539d0637828
|
Add xfailing test for #3345
|
spacy/tests/regression/test_issue3345.py
|
spacy/tests/regression/test_issue3345.py
|
Python
| 0
|
@@ -0,0 +1,985 @@
+%22%22%22Test interaction between preset entities and sentence boundaries in NER.%22%22%22%0Aimport spacy%0Afrom spacy.tokens import Doc%0Afrom spacy.pipeline import EntityRuler, EntityRecognizer%0A%0A%0A@pytest.mark.xfail%0Adef test_issue3345():%0A %22%22%22Test case where preset entity crosses sentence boundary.%22%22%22%0A nlp = spacy.blank(%22en%22)%0A doc = Doc(nlp.vocab, words=%5B%22I%22, %22live%22, %22in%22, %22New%22, %22York%22%5D)%0A doc%5B4%5D.is_sent_start = True%0A%0A ruler = EntityRuler(nlp, patterns=%5B%7B%22label%22: %22GPE%22, %22pattern%22: %22New York%22%7D%5D)%0A ner = EntityRecognizer(doc.vocab)%0A # Add the OUT action. I wouldn't have thought this would be necessary...%0A ner.moves.add_action(5, %22%22)%0A ner.add_label(%22GPE%22)%0A%0A doc = ruler(doc)%0A # Get into the state just before %22New%22%0A state = ner.moves.init_batch(%5Bdoc%5D)%5B0%5D%0A ner.moves.apply_transition(state, %22O%22)%0A ner.moves.apply_transition(state, %22O%22)%0A ner.moves.apply_transition(state, %22O%22)%0A # Check that B-GPE is valid.%0A assert ner.moves.is_valid(state, %22B-GPE%22)%0A
|
|
d358a799ce726706543ac1d440f5b60112125a52
|
Add psf building directory
|
photutils/psf/building/__init__.py
|
photutils/psf/building/__init__.py
|
Python
| 0
|
@@ -0,0 +1,151 @@
+# Licensed under a 3-clause BSD style license - see LICENSE.rst%0A%22%22%22%0AThis subpackage contains modules and packages to build point spread%0Afunctions.%0A%22%22%22%0A
|
|
e19c8d52719d1bc00023406842e9a445580d98d3
|
add wrappers.py just a dispatch wrapper for linear models
|
scikits/statsmodels/wrappers.py
|
scikits/statsmodels/wrappers.py
|
Python
| 0
|
@@ -0,0 +1,1263 @@
+# -*- coding: utf-8 -*-%0A%22%22%22Convenience Wrappers%0A%0ACreated on Sat Oct 30 14:56:35 2010%0A%0AAuthor: josef-pktd%0ALicense: BSD%0A%22%22%22%0A%0Aimport numpy as np%0Aimport scikits.statsmodels as sm%0Afrom scikits.statsmodels import GLS, WLS, OLS%0A%0Adef remove_nanrows(y, x):%0A '''remove common rows in %5By,x%5D that contain at least one nan%0A%0A TODO: this should be made more flexible,%0A arbitrary number of arrays and 1d or 2d arrays%0A%0A '''%0A mask = ~np.isnan(y)%0A mask *= ~(np.isnan(x).any(-1)) #* or &%0A y = y%5Bmask%5D%0A x = x%5Bmask%5D%0A return y, x%0A%0A%0Adef linmod(y, x, weights=None, sigma=None, add_const=True, filter_missing=True,%0A **kwds):%0A '''get linear model with extra options for entry%0A%0A dispatches to regular model class and does not wrap the output%0A%0A If several options are exclusive, for example sigma and weights, then the%0A chosen class depends on the implementation sequence.%0A '''%0A%0A if filter_missing:%0A y, x = remove_nanrows(y, x)%0A #do the same for masked arrays%0A%0A if add_const:%0A x = sm.add_constant(x, prepend=True)%0A%0A if not sigma is None:%0A return GLS(y, x, sigma=sigma, **kwds)%0A elif not weights is None:%0A return WLS(y, x, weights=weights, **kwds)%0A else:%0A return OLS(y, x, **kwds)%0A%0A
|
|
c63ad26327f294393434dcfe4d5454656a0c1b4b
|
Add initial generate movie
|
scripts/generate-movie-plots.py
|
scripts/generate-movie-plots.py
|
Python
| 0.000001
|
@@ -0,0 +1,411 @@
+#!/usr/bin/env python3%0A%0Aimport os%0Aimport argparse%0A%0A%0Adef main():%0A parser = argparse.ArgumentParser()%0A parser.add_argument('sim_dir', help='Simulation directory')%0A parser.add_argument('output_dir', help='Output directory')%0A parser.add_argument('-s', '--skip', help='Skip pattern for outputs', default=1, type=int)%0A args = parser.parse_args()%0A%0A print(args)%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
dbdb247ad03ca6b9168f193eadaf28638d718072
|
Change docstring for NamedEntity filth
|
scrubadub/filth/named_entity.py
|
scrubadub/filth/named_entity.py
|
from .base import Filth
class NamedEntityFilth(Filth):
"""
Named entity filth. Upon initialisation provide a label for named entity (e.g. name, org)
"""
type = 'named_entity'
def __init__(self, *args, label: str, **kwargs):
super(NamedEntityFilth, self).__init__(*args, **kwargs)
self.label = label.lower()
self.replacement_string = "{}_{}".format(self.type, self.label)
|
Python
| 0
|
@@ -66,97 +66,175 @@
-Named entity filth. Upon initialisation provide a label for named entity (e.g. name, org)
+Default filth type, for named entities (e.g. the ones in https://nightly.spacy.io/models/en#en_core_web_lg-labels),%0A except the ones represented in any other filth.
%0A
|
e84414c822a073848152787868c41bd97f713551
|
Fix syntax (and unused variable) in #1675
|
beetsplug/embedart.py
|
beetsplug/embedart.py
|
# This file is part of beets.
# Copyright 2015, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Allows beets to embed album art into file metadata."""
from __future__ import (division, absolute_import, print_function,
unicode_literals)
import os.path
from beets.plugins import BeetsPlugin
from beets import ui
from beets.ui import decargs
from beets.util import syspath, normpath, displayable_path, bytestring_path
from beets.util.artresizer import ArtResizer
from beets import config
from beets import art
class EmbedCoverArtPlugin(BeetsPlugin):
"""Allows albumart to be embedded into the actual files.
"""
def __init__(self):
super(EmbedCoverArtPlugin, self).__init__()
self.config.add({
'maxwidth': 0,
'auto': True,
'compare_threshold': 0,
'ifempty': False,
'remove_art_file': False
})
if self.config['maxwidth'].get(int) and not ArtResizer.shared.local:
self.config['maxwidth'] = 0
self._log.warning(u"ImageMagick or PIL not found; "
u"'maxwidth' option ignored")
if self.config['compare_threshold'].get(int) and not \
ArtResizer.shared.can_compare:
self.config['compare_threshold'] = 0
self._log.warning(u"ImageMagick 6.8.7 or higher not installed; "
u"'compare_threshold' option ignored")
self.register_listener('art_set', self.process_album)
def commands(self):
# Embed command.
embed_cmd = ui.Subcommand(
'embedart', help='embed image files into file metadata'
)
embed_cmd.parser.add_option(
'-f', '--file', metavar='PATH', help='the image file to embed'
)
maxwidth = self.config['maxwidth'].get(int)
compare_threshold = self.config['compare_threshold'].get(int)
ifempty = self.config['ifempty'].get(bool)
remove_art_file = self.config['remove_art_file'].get(bool)
def embed_func(lib, opts, args):
if opts.file:
imagepath = normpath(opts.file)
if not os.path.isfile(syspath(imagepath)):
raise ui.UserError(u'image file {0} not found'.format(
displayable_path(imagepath)
))
for item in lib.items(decargs(args)):
art.embed_item(self._log, item, imagepath, maxwidth, None,
compare_threshold, ifempty)
else:
for album in lib.albums(decargs(args)):
art.embed_album(self._log, album, maxwidth, False,
compare_threshold, ifempty)
self.remove_artfile(album)
embed_cmd.func = embed_func
# Extract command.
extract_cmd = ui.Subcommand('extractart',
help='extract an image from file metadata')
extract_cmd.parser.add_option('-o', dest='outpath',
help='image output file')
extract_cmd.parser.add_option('-n', dest='filename',
help='image filename to create for all '
'matched albums')
extract_cmd.parser.add_option('-a', dest='associate',
action='store_true',
help='associate the extracted images '
'with the album')
def extract_func(lib, opts, args):
if opts.outpath:
art.extract_first(self._log, normpath(opts.outpath),
lib.items(decargs(args)))
else:
filename = bytestring_path(opts.filename or
config['art_filename'].get())
if os.path.dirname(filename) != '':
self._log.error(u"Only specify a name rather than a path "
u"for -n")
return
for album in lib.albums(decargs(args)):
artpath = normpath(os.path.join(album.path, filename))
artpath = art.extract_first(self._log, artpath,
album.items())
if artpath and opts.associate:
album.set_art(artpath)
album.store()
extract_cmd.func = extract_func
# Clear command.
clear_cmd = ui.Subcommand('clearart',
help='remove images from file metadata')
def clear_func(lib, opts, args):
art.clear(self._log, lib, decargs(args))
clear_cmd.func = clear_func
return [embed_cmd, extract_cmd, clear_cmd]
def process_album(self, album):
"""Automatically embed art after art has been set
"""
if self.config['auto'] and config['import']['write']:
max_width = self.config['maxwidth'].get(int)
art.embed_album(self._log, album, max_width, True,
self.config['compare_threshold'].get(int),
self.config['ifempty'].get(bool))
self.remove_artfile(album)
def remove_artfile(self, album)
if self.config['remove_art_file'] and album.artpath:
if os.path.isfile(album.artpath):
self._log.debug(u'Removing album art file '
u'for {0}', album)
os.remove(album.artpath)
album.artpath = None
album.store()
|
Python
| 0.000001
|
@@ -2531,75 +2531,8 @@
ool)
-%0A remove_art_file = self.config%5B'remove_art_file'%5D.get(bool)
%0A%0A
@@ -5867,24 +5867,25 @@
file(album)%0A
+%0A
def remo
@@ -5899,32 +5899,33 @@
ile(self, album)
+:
%0A if self
|
11f4add6873c7c089b5674415276a71b4c03cb42
|
add example mbsubmit plugin
|
beetsplug/mbsubmit.py
|
beetsplug/mbsubmit.py
|
Python
| 0
|
@@ -0,0 +1,2984 @@
+# -*- coding: utf-8 -*-%0A# This file is part of beets.%0A# Copyright 2015, Adrian Sampson and Diego Moreda.%0A#%0A# Permission is hereby granted, free of charge, to any person obtaining%0A# a copy of this software and associated documentation files (the%0A# %22Software%22), to deal in the Software without restriction, including%0A# without limitation the rights to use, copy, modify, merge, publish,%0A# distribute, sublicense, and/or sell copies of the Software, and to%0A# permit persons to whom the Software is furnished to do so, subject to%0A# the following conditions:%0A#%0A# The above copyright notice and this permission notice shall be%0A# included in all copies or substantial portions of the Software.%0A%0A%22%22%22Aid in submitting information to MusicBrainz.%0A%0AThis plugin allows the user to print track information in a format that is%0Aparseable by the MusicBrainz track parser. Programmatic submitting is not%0Aimplemented by MusicBrainz yet.%0A%22%22%22%0A%0Afrom __future__ import (division, absolute_import, print_function,%0A unicode_literals)%0A%0A%0Afrom beets.autotag import Recommendation%0Afrom beets.importer import action%0Afrom beets.plugins import BeetsPlugin%0Afrom beets.ui.commands import ExtraChoice%0Afrom beetsplug.info import print_data%0A%0A%0Aclass MBSubmitPlugin(BeetsPlugin):%0A def __init__(self):%0A super(MBSubmitPlugin, self).__init__()%0A%0A self.register_listener('before_choose_candidate',%0A self.before_choose_candidate_event)%0A%0A def before_choose_candidate_event(self, session, task):%0A # This intends to illustrate a simple plugin that adds choices%0A # depending on conditions.%0A # Plugins should return a list of ExtraChoices (basically, the%0A # %22cosmetic%22 values and a callback function). This list is received and%0A # flattened on plugins.send('before_choose_candidate').%0A if not task.candidates or task.rec == Recommendation.none:%0A return %5BExtraChoice(self, 'PRINT', 'Print tracks',%0A self.print_tracks),%0A ExtraChoice(self, 'PRINT_SKIP', 'print tracks and sKip',%0A self.print_tracks_and_skip)%5D%0A%0A # Callbacks for choices.%0A def print_tracks(self, session, task):%0A for i in task.items:%0A print_data(None, i, '$track. $artist - $title ($length)')%0A%0A def print_tracks_and_skip(self, session, task):%0A # Example of a function that automatically sets the next action,%0A # avoiding the user to be prompted again. It has some drawbacks (for%0A # example, actions such as action.MANUAL are not handled properly, as%0A # they do not exit the main TerminalImportSession.choose_match loop).%0A #%0A # The idea is that if a callback function returns an action.X value,%0A # task.action is set to that value after the callback is processed.%0A for i in task.items:%0A print_data(None, i, '$track. $artist - $title ($length)')%0A return action.SKIP%0A
|
|
27bec5bc3dab9798d4ddbfcd84563b3c5056e8c8
|
delete again
|
pychart/pychart_datarender/migrations/0001_initial.py
|
pychart/pychart_datarender/migrations/0001_initial.py
|
Python
| 0
|
@@ -0,0 +1,2121 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.10.5 on 2017-02-15 00:50%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0Aimport django.db.models.deletion%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A initial = True%0A%0A dependencies = %5B%0A ('pychart_profile', '0001_initial'),%0A %5D%0A%0A operations = %5B%0A migrations.CreateModel(%0A name='Data',%0A fields=%5B%0A ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),%0A ('title', models.CharField(max_length=255)),%0A ('description', models.CharField(blank=True, max_length=255, null=True)),%0A ('data', models.FileField(blank=True, null=True, upload_to='data')),%0A ('date_uploaded', models.DateField(auto_now=True)),%0A ('date_modified', models.DateField(auto_now=True)),%0A ('owner', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='data_sets', to='pychart_profile.PyChartProfile')),%0A %5D,%0A ),%0A migrations.CreateModel(%0A name='Render',%0A fields=%5B%0A ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),%0A ('title', models.CharField(max_length=255)),%0A ('description', models.CharField(blank=True, max_length=255, null=True)),%0A ('render_type', models.CharField(blank=True, choices=%5B('Scatter', 'Scatter'), ('Bar', 'Bar'), ('Histogram', 'Histogram')%5D, max_length=255, null=True)),%0A ('render', models.TextField(blank=True, null=True)),%0A ('date_uploaded', models.DateField(auto_now=True)),%0A ('date_modified', models.DateField(auto_now=True)),%0A ('data_sets', models.ManyToManyField(related_name='renders', to='pychart_datarender.Data')),%0A ('owner', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='renders', to='pychart_profile.PyChartProfile')),%0A %5D,%0A ),%0A %5D%0A
|
|
27896063f7632afa327c4933248435c874b91b7a
|
Create __init__.py
|
tests/__init__.py
|
tests/__init__.py
|
Python
| 0.000429
|
@@ -0,0 +1 @@
+%0A
|
|
b66b696483608de022c25b3e14a1b23351ba86da
|
4-9 cubed2
|
04/cubed2.py
|
04/cubed2.py
|
Python
| 0.999414
|
@@ -0,0 +1,146 @@
+squares = %5B%5D%0D%0Afor value in range(1,11):%0D%0A square = value **3%0D%0A squares.append(square)%0D%0A%0D%0A #print(squares)%0D%0Aprint(squares)%0D%0A#%E7%9B%B4%E6%8E%A5%E9%80%BB%E8%BE%91,99%E4%B9%98%E6%B3%95%E8%A1%A8%E7%9A%84%E5%8E%9F%E7%90%86
|
|
24c85bf4550c4560e2a192fd8513f3788ea2148e
|
add tools calcuate microseconds between two times
|
skills/time-tool/microsecond.py
|
skills/time-tool/microsecond.py
|
Python
| 0.000188
|
@@ -0,0 +1,1667 @@
+#!/usr/bin/env python%0A# -*- coding: UTF-8 -*-%0A#%0A# Copyright (c) 2016 ASMlover. All rights reserved.%0A#%0A# Redistribution and use in source and binary forms, with or without%0A# modification, are permitted provided that the following conditions%0A# are met:%0A#%0A# * Redistributions of source code must retain the above copyright%0A# notice, this list ofconditions and the following disclaimer.%0A#%0A# * Redistributions in binary form must reproduce the above copyright%0A# notice, this list of conditions and the following disclaimer in%0A# the documentation and/or other materialsprovided with the%0A# distribution.%0A#%0A# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS%0A# %22AS IS%22 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT%0A# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS%0A# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE%0A# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,%0A# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,%0A# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;%0A# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER%0A# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT%0A# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN%0A# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE%0A# POSSIBILITY OF SUCH DAMAGE.%0A%0Afrom __future__ import print_function%0A%0Afrom datetime import datetime%0Aimport time%0A%0Adef main():%0A t1 = datetime.now()%0A time.sleep(0.1)%0A t2 = datetime.now()%0A print ('step time is : %25d' %25 int((t2 - t1).total_seconds() * 1000))%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
b52a1aee01a95e499da47e8a039ee7106475cef3
|
Move Django log to resources/logs
|
source/memex/common_settings.py
|
source/memex/common_settings.py
|
"""
Common Django settings for memex explorer project (both dev and deploy)
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import logging
import os
import sys
from django.conf import global_settings
from local_settings import *
from supervisor_services import check_process_state
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '0#t((zq66&3*87djaltu-pn34%0p!*v_332f2p!$2i)w5y17f8'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'crispy_forms',
'base',
'task_manager',
'rest_framework',
)
EXPLORER_APPS = (
'crawl_space',
)
INSTALLED_APPS += tuple("apps.%s" % app for app in EXPLORER_APPS)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
TEMPLATE_CONTEXT_PROCESSORS = global_settings.TEMPLATE_CONTEXT_PROCESSORS + (
"base.views.project_context_processor",
)
ROOT_URLCONF = 'memex.urls'
WSGI_APPLICATION = 'memex.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, 'base/static')
STATIC_URL = '/static/'
TEMPLATE_DIRS = [os.path.join(BASE_DIR, 'templates')]
MEDIA_URL = '/resources/'
CRISPY_TEMPLATE_PACK = 'bootstrap3'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'file': {
'level': 'DEBUG',
'class': 'logging.FileHandler',
'filename': os.path.join(BASE_DIR, 'memex/logs/debug.log'),
},
},
'loggers': {
'django.request': {
'handlers': ['file'],
'level': 'DEBUG',
'propagate': True,
},
},
}
# Celery Config
BROKER_URL = 'redis://localhost'
CELERY_RESULT_BACKEND = 'redis://localhost'
CELERY_TASK_SERIALIZER = 'pickle'
CELERY_RESULT_SERIALIZER = 'pickle'
CELERY_ACCEPT_CONTENT=['pickle']
CELERY_TRACK_STARTED = True
#These must be specified
MEDIA_ROOT = None
PROJECT_PATH = None
EXTERNAL_APP_LOCATIONS = {}
# Update this set if you add an external application
EXTERNAL_APPS = {'celery',
'ddt',
'elasticsearch',
'kibana',
'rabbitmq',
'bokeh-server',
'redis',
'tad',
'tika'}
sys.stderr.write("[%d]: Querying supervisor for state of external applications\n" % (os.getpid()))
READY_EXTERNAL_APPS = {app for app in EXTERNAL_APPS if check_process_state(app)}
if READY_EXTERNAL_APPS:
sys.stderr.write("[%d]: The following applications are ready %s\n" % (os.getpid(), str(READY_EXTERNAL_APPS)))
else:
sys.stderr.write("[%d]: Supervisord not running or no applications are running\n" % (os.getpid()))
ENABLE_STREAM_VIZ = 'bokeh-server' in READY_EXTERNAL_APPS and 'rabbitmq' in READY_EXTERNAL_APPS
sys.stderr.write("Setting enable stream viz to: {}".format(ENABLE_STREAM_VIZ))
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.AllowAny'
],
'DEFAULT_FILTER_BACKENDS': ('rest_framework.filters.DjangoFilterBackend',)
}
|
Python
| 0.000001
|
@@ -2929,24 +2929,33 @@
R, '
-memex/logs/debug
+resources/logs/django-app
.log
|
cb3158cbb116153d516a01ba1f63e26301338bbb
|
Create sigmoid_upper_bounds.py (#629)
|
scripts/sigmoid_upper_bounds.py
|
scripts/sigmoid_upper_bounds.py
|
Python
| 0.000005
|
@@ -0,0 +1,785 @@
+# Upper bounds for sigmoid function%0A%0Aimport numpy as np%0Aimport math%0Aimport matplotlib.pyplot as plt%0Aimport pyprobml_utils as pml%0A%0Asigmoid = lambda x: np.exp(x) / (1 + np.exp(x))%0Afstar = lambda eta: -eta * math.log(eta) - (1 - eta) * math.log(1 - eta)%0Asigmoid_upper = lambda eta, x: np.exp(eta * x - fstar(eta))%0A%0Aeta1, eta2 = 0.2, 0.7%0Astart, stop, step = -6, 6, 1 / 10%0Axs = np.arange(start, stop + step, step)%0A%0A%0Aplt.plot(xs, sigmoid(xs), 'r', linewidth=3)%0Aplt.plot(xs, sigmoid_upper(eta1, xs), 'b', linewidth=3)%0Aplt.plot(xs, sigmoid_upper(eta2, xs), 'b', linewidth=3)%0Aplt.text(1 / 2 + 1 / 2, sigmoid_upper(eta1, 1 / 2), 'eta=0.2')%0Aplt.text(0 + 1 / 2, sigmoid_upper(eta2, 0), 'eta=0.7')%0Aplt.xlim(%5Bstart, stop%5D)%0Aplt.ylim(%5B0, 1%5D)%0Apml.savefig('sigmoid_upper_bound.pdf', dpi=300)%0Aplt.show()%0A
|
|
9bfb182f92b8ac82ddb1b35c886b4a3f79708696
|
Add script for train/test split
|
scripts/split_train_and_test.py
|
scripts/split_train_and_test.py
|
Python
| 0
|
@@ -0,0 +1,1075 @@
+import os%0Aimport shutil%0Aimport argparse%0Aimport random%0Arandom.seed(47297)%0A%0A%0Aparser = argparse.ArgumentParser(description='Split data into train and test sets.')%0Aparser.add_argument('subjects_root_path', type=str, help='Directory containing subject sub-directories.')%0Aargs, _ = parser.parse_known_args()%0A%0A%0Adef move_to_partition(patients, partition):%0A if not os.path.exists(os.path.join(args.subjects_root_path, partition)):%0A os.mkdir(os.path.join(args.subjects_root_path, partition))%0A for patient in patients:%0A src = os.path.join(args.subjects_root_path, patient)%0A dest = os.path.join(args.subjects_root_path, partition, patient)%0A shutil.move(src, dest) %0A%0A%0Afolders = os.listdir(args.subjects_root_path)%0Afolders = filter(str.isdigit, folders)%0Arandom.shuffle(folders)%0Atrain_cnt = int(0.85 * len(folders))%0A%0Atrain_patients = sorted(folders%5B:train_cnt%5D)%0Atest_patients = sorted(folders%5Btrain_cnt:%5D)%0Aassert len(set(train_patients) & set(test_patients)) == 0%0A%0Amove_to_partition(train_patients, %22train%22)%0Amove_to_partition(test_patients, %22test%22)%0A
|
|
284da54cc9fc322c32e44706716b548bcd652dc4
|
Test axe app.
|
tests/test_axe.py
|
tests/test_axe.py
|
Python
| 0
|
@@ -0,0 +1,547 @@
+# -*- coding: utf-8 -*-%0A%0Aimport pytest%0Afrom axe import Axe, errors%0A%0A@pytest.fixture%0Adef axe():%0A return Axe()%0A%0Adef test_build_from_urls(axe):%0A func = lambda: ''%0A axe.build(%7B'/': func%7D)%0A assert '/' in axe.urls%0A assert axe.urls%5B'/'%5D == func%0A%0Adef test_register_ext_success(axe):%0A @axe.register_ext%0A def test(request):%0A pass%0A assert axe.exts%5B'test'%5D == test%0A%0Adef test_register_ext_duplicated(axe):%0A with pytest.raises(errors.DuplicatedExtension):%0A @axe.register_ext%0A def query(request):%0A pass%0A
|
|
134445f62244c4b85395be1c381d60cbdb1fd20e
|
Fix some rfxtrx devices with multiple sensors (#12264)
|
homeassistant/components/sensor/rfxtrx.py
|
homeassistant/components/sensor/rfxtrx.py
|
"""
Support for RFXtrx sensors.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.rfxtrx/
"""
import logging
import voluptuous as vol
import homeassistant.components.rfxtrx as rfxtrx
from homeassistant.components.rfxtrx import (
ATTR_DATA_TYPE, ATTR_FIRE_EVENT, ATTR_NAME, CONF_AUTOMATIC_ADD,
CONF_DATA_TYPE, CONF_DEVICES, CONF_FIRE_EVENT, DATA_TYPES)
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import ATTR_ENTITY_ID, CONF_NAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.util import slugify
DEPENDENCIES = ['rfxtrx']
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_DEVICES, default={}): {
cv.string: vol.Schema({
vol.Optional(CONF_NAME): cv.string,
vol.Optional(CONF_FIRE_EVENT, default=False): cv.boolean,
vol.Optional(CONF_DATA_TYPE, default=[]):
vol.All(cv.ensure_list, [vol.In(DATA_TYPES.keys())]),
})
},
vol.Optional(CONF_AUTOMATIC_ADD, default=False): cv.boolean,
}, extra=vol.ALLOW_EXTRA)
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the RFXtrx platform."""
from RFXtrx import SensorEvent
sensors = []
for packet_id, entity_info in config[CONF_DEVICES].items():
event = rfxtrx.get_rfx_object(packet_id)
device_id = "sensor_{}".format(slugify(event.device.id_string.lower()))
if device_id in rfxtrx.RFX_DEVICES:
continue
_LOGGER.info("Add %s rfxtrx.sensor", entity_info[ATTR_NAME])
sub_sensors = {}
data_types = entity_info[ATTR_DATA_TYPE]
if not data_types:
data_types = ['']
for data_type in DATA_TYPES:
if data_type in event.values:
data_types = [data_type]
break
for _data_type in data_types:
new_sensor = RfxtrxSensor(None, entity_info[ATTR_NAME],
_data_type, entity_info[ATTR_FIRE_EVENT])
sensors.append(new_sensor)
sub_sensors[_data_type] = new_sensor
rfxtrx.RFX_DEVICES[device_id] = sub_sensors
add_devices(sensors)
def sensor_update(event):
"""Handle sensor updates from the RFXtrx gateway."""
if not isinstance(event, SensorEvent):
return
device_id = "sensor_" + slugify(event.device.id_string.lower())
if device_id in rfxtrx.RFX_DEVICES:
sensors = rfxtrx.RFX_DEVICES[device_id]
for key in sensors:
sensor = sensors[key]
sensor.event = event
# Fire event
if sensors[key].should_fire_event:
sensor.hass.bus.fire(
"signal_received", {
ATTR_ENTITY_ID: sensors[key].entity_id,
}
)
return
# Add entity if not exist and the automatic_add is True
if not config[CONF_AUTOMATIC_ADD]:
return
pkt_id = "".join("{0:02x}".format(x) for x in event.data)
_LOGGER.info("Automatic add rfxtrx.sensor: %s", pkt_id)
data_type = ''
for _data_type in DATA_TYPES:
if _data_type in event.values:
data_type = _data_type
break
new_sensor = RfxtrxSensor(event, pkt_id, data_type)
sub_sensors = {}
sub_sensors[new_sensor.data_type] = new_sensor
rfxtrx.RFX_DEVICES[device_id] = sub_sensors
add_devices([new_sensor])
if sensor_update not in rfxtrx.RECEIVED_EVT_SUBSCRIBERS:
rfxtrx.RECEIVED_EVT_SUBSCRIBERS.append(sensor_update)
class RfxtrxSensor(Entity):
"""Representation of a RFXtrx sensor."""
def __init__(self, event, name, data_type, should_fire_event=False):
"""Initialize the sensor."""
self.event = event
self._name = name
self.should_fire_event = should_fire_event
self.data_type = data_type
self._unit_of_measurement = DATA_TYPES.get(data_type, '')
def __str__(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
if not self.event:
return None
return self.event.values.get(self.data_type)
@property
def name(self):
"""Get the name of the sensor."""
return "{} {}".format(self._name, self.data_type)
@property
def device_state_attributes(self):
"""Return the device state attributes."""
if not self.event:
return None
return self.event.values
@property
def unit_of_measurement(self):
"""Return the unit this state is expressed in."""
return self._unit_of_measurement
|
Python
| 0
|
@@ -2695,23 +2695,306 @@
for
-key in sensors:
+data_type in sensors:%0A # Some multi-sensor devices send individual messages for each%0A # of their sensors. Update only if event contains the%0A # right data_type for the sensor.%0A if data_type not in event.values:%0A continue
%0A
@@ -3023,19 +3023,25 @@
sensors%5B
-key
+data_type
%5D%0A
@@ -3125,22 +3125,16 @@
f sensor
-s%5Bkey%5D
.should_
@@ -3232,16 +3232,16 @@
ved%22, %7B%0A
+
@@ -3286,14 +3286,8 @@
nsor
-s%5Bkey%5D
.ent
|
ed82f23da38f9663f55b32f05046b235bf5da9bd
|
fix overlooked issue with statically defined names (#10053)
|
homeassistant/components/switch/tplink.py
|
homeassistant/components/switch/tplink.py
|
"""
Support for TPLink HS100/HS110/HS200 smart switch.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/switch.tplink/
"""
import logging
import time
import voluptuous as vol
from homeassistant.components.switch import (SwitchDevice, PLATFORM_SCHEMA)
from homeassistant.const import (CONF_HOST, CONF_NAME, ATTR_VOLTAGE)
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['pyHS100==0.3.0']
_LOGGER = logging.getLogger(__name__)
ATTR_CURRENT_CONSUMPTION = 'current_consumption'
ATTR_TOTAL_CONSUMPTION = 'total_consumption'
ATTR_DAILY_CONSUMPTION = 'daily_consumption'
ATTR_CURRENT = 'current'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME): cv.string,
})
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the TPLink switch platform."""
from pyHS100 import SmartPlug
host = config.get(CONF_HOST)
name = config.get(CONF_NAME)
add_devices([SmartPlugSwitch(SmartPlug(host), name)], True)
class SmartPlugSwitch(SwitchDevice):
"""Representation of a TPLink Smart Plug switch."""
def __init__(self, smartplug, name):
"""Initialize the switch."""
self.smartplug = smartplug
self._name = None
self._state = None
# Set up emeter cache
self._emeter_params = {}
@property
def name(self):
"""Return the name of the Smart Plug, if any."""
return self._name
@property
def is_on(self):
"""Return true if switch is on."""
return self._state
def turn_on(self, **kwargs):
"""Turn the switch on."""
self.smartplug.turn_on()
def turn_off(self):
"""Turn the switch off."""
self.smartplug.turn_off()
@property
def device_state_attributes(self):
"""Return the state attributes of the device."""
return self._emeter_params
def update(self):
"""Update the TP-Link switch's state."""
from pyHS100 import SmartDeviceException
try:
self._state = self.smartplug.state == \
self.smartplug.SWITCH_STATE_ON
if self._name is None:
self._name = self.smartplug.alias
if self.smartplug.has_emeter:
emeter_readings = self.smartplug.get_emeter_realtime()
self._emeter_params[ATTR_CURRENT_CONSUMPTION] \
= "%.1f W" % emeter_readings["power"]
self._emeter_params[ATTR_TOTAL_CONSUMPTION] \
= "%.2f kW" % emeter_readings["total"]
self._emeter_params[ATTR_VOLTAGE] \
= "%.2f V" % emeter_readings["voltage"]
self._emeter_params[ATTR_CURRENT] \
= "%.1f A" % emeter_readings["current"]
emeter_statics = self.smartplug.get_emeter_daily()
try:
self._emeter_params[ATTR_DAILY_CONSUMPTION] \
= "%.2f kW" % emeter_statics[int(time.strftime("%e"))]
except KeyError:
# device returned no daily history
pass
except (SmartDeviceException, OSError) as ex:
_LOGGER.warning('Could not read state for %s: %s', self.name, ex)
|
Python
| 0
|
@@ -1339,27 +1339,27 @@
elf._name =
-Non
+nam
e%0A se
|
f9d7612dfbad8d5f394bd7c0a9ed6db5f6234eb5
|
add imf_tools
|
imf_tools.py
|
imf_tools.py
|
Python
| 0.000001
|
@@ -0,0 +1,1228 @@
+'''%0ADefine several stellar initial mass functions,%0A with some tools for working with them%0A'''%0A%0Aclass IMF(object):%0A '''%0A stellar initial mass function%0A '''%0A%0A __version__ = '0.1'%0A%0A def __init__(self, imftype='salpeter', ml=0.1, mh=150., mf=1., dm=.005):%0A '''%0A set up an IMF with some probability distribution, lower mass limit,%0A and upper mass limit, that formed some mass%0A%0A all masses & luminosities are implicitly in solar units, and times%0A are in Gyr%0A%0A I've provided several choices of IMF%0A '''%0A%0A self.imftype = imftype%0A self.ml = ml # low mass limit%0A self.mh = mh # high mass limit%0A self.dm = dm # standard mass differential for computations%0A%0A @staticmethod%0A def salpeter_pdf_u(m):%0A '''straight up power law'''%0A return 1./2.28707 * m**-2.35%0A%0A @staticmethod%0A def miller_scalo_pdf_u(m):%0A bdy = 1.%0A inds = %5B0., -2.3%5D%0A branch = np.argmax(np.stack(%5B%5D))%0A return m**-2.35%0A%0A @staticmethod%0A def kroupa_u(m):%0A bdys = %5B.08, .5%5D%0A inds = %5B-0.3, -1.3, -2.3%5D%0A%0A @staticmethod%0A def chabrier_u(m):%0A%0A def mass_at_age(t):%0A raise NotImplementedError%0A%0A%0A%0A%0A
|
|
9efa97198f81f5afce03e30c3bce5f5fc23a8d28
|
add test for Row
|
tests/test_row.py
|
tests/test_row.py
|
Python
| 0
|
@@ -0,0 +1,512 @@
+%0Aimport unittest%0Afrom eatable import Table, Row%0A%0Aclass RowTestCase(unittest.TestCase):%0A def setUp(self):%0A self.header = ('A', 'B', 'C')%0A self.table = Table(self.header)%0A%0A def test_init(self):%0A Row(self.table, 0, ('a1', 'b2', 'c2'))%0A%0A def test_getitem(self):%0A row = Row(self.table, 0, ('a1', 'b1', 'c1'))%0A self.assertEqual(row%5B'A'%5D, 'a1')%0A self.assertEqual(row%5B'B'%5D, 'b1')%0A self.assertEqual(row%5B'C'%5D, 'c1')%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
|
|
fd5ccdb154cc00a1ff58d13043435f7b1927ca68
|
fix apparent paste-buffer corruption
|
direct/src/distributed/CRCache.py
|
direct/src/distributed/CRCache.py
|
"""CRCache module: contains the CRCache class"""
import DirectNotifyGlobal
import DistributedObject
class CRCache:
notify = DirectNotifyGlobal.directNotify.newCategory("CRCache")
def __init__(self, maxCacheItems=10):
self.maxCacheItems = maxCacheItems
self.dict = {}
self.fifo = []
return None
def flush(self):
"""
Delete each item in the cache then clear all references to them
"""
CRCache.notify.debug("Flushing the cache")
for distObj in self.dict.values():
distObj.delete()
# Null out all references to the objects so they will get gcd
self.dict = {}
self.fifo = []
def cache(self, distObj):
# Only distributed objects are allowed in the cache
assert(isinstance(distObj, DistributedObject.DistributedObject))
# Get the doId
doId = distObj.getDoId()
# Error check
if self.dict.has_key(doId):
CRCache.notify.warning("Double cache attempted for distObj "
+ str(doId))
else:
# Call disable on the distObj
distObj.disableAndAnnounce()
# Put the distObj in the fifo and the dict
self.fifo.append(distObj)
self.dict[doId] = distObj
if len(self.fifo) > self.maxCacheItems:
# if the cache is full, pop the oldest item
oldestDistObj = self.fifo.pop(0)
# and remove it from the dictionary
del(self.dict[oldestDistObj.getDoId()])
# and delete it
oldestDistObj.delete()
# Make sure that the fifo and the dictionary are sane
assert(len(self.dict) == len(self.fifo))
return None
def retrieve(self, doId):
if self.dict.has_key(doId):
# Find the object
distObj = self.dict[doId]
# Remove it from the dictionary
del(self.dict[doId])
# Remove it from the fifo
self.fifo.remove(distObj)
# return the distObj
return distObj
else:
# If you can't find it, return None
return None
def contains(self, doId):
return self.dict.has_key(doId)
def delete(self, doId):
assert(self.dict.has_key(doId))
# Look it up
distObj = self.dict[doId]
# Remove it from the dict and fifo
del(self.dict[doId])
self.fifo.remove(distObj)
# and delete it
oldestDistObj.delete()
|
Python
| 0.000026
|
@@ -2590,39 +2590,33 @@
lete it%0A
-oldestD
+d
istObj.delete()%0A
|
4407d1e42d3be45cbbd7b746d961f02cfc0d6d85
|
use new api for display region
|
direct/src/showbase/ShadowDemo.py
|
direct/src/showbase/ShadowDemo.py
|
"""Create a cheesy shadow effect by rendering the view of an
object (e.g. the local avatar) from a special camera as seen from
above (as if from the sun), using a solid gray foreground and a
solid white background, and then multitexturing that view onto the
world.
This is meant primarily as a demonstration of multipass and
multitexture rendering techniques. It's not a particularly great
way to do shadows.
"""
from pandac.PandaModules import *
from direct.task import Task
sc = None
class ShadowCaster:
texXSize = 256
texYSize = 256
def __init__(self, lightPath, objectPath):
self.lightPath = lightPath
self.objectPath = objectPath
self.groundPath = None
# Create an offscreen buffer to render the view of the avatar
# into a texture.
self.buffer = base.win.makeTextureBuffer(
'shadowBuffer', self.texXSize, self.texYSize)
# The background of this buffer--and the border of the
# texture--is pure white.
clearColor = VBase4(1, 1, 1, 1)
self.buffer.setClearColor(clearColor)
self.tex = self.buffer.getTexture()
self.tex.setBorderColor(clearColor)
self.tex.setWrapu(Texture.WMBorderColor)
self.tex.setWrapv(Texture.WMBorderColor)
# Set up a display region on this buffer, and create a camera.
layer = self.buffer.getChannel(0).makeLayer()
dr = layer.makeDisplayRegion()
self.camera = Camera('shadowCamera')
self.cameraPath = self.lightPath.attachNewNode(self.camera)
self.camera.setScene(self.objectPath)
dr.setCamera(self.cameraPath)
# Use a temporary NodePath to define the initial state for the
# camera. The initial state will render everything in a
# flat-shaded gray, as if it were a shadow.
initial = NodePath('initial')
initial.setColor(0.5, 0.5, 0.5, 1, 1)
initial.setTextureOff(2)
self.camera.setInitialState(initial.getState())
# Use an orthographic lens for this camera instead of the
# usual perspective lens. An orthographic lens is better to
# simulate sunlight, which is (almost) orthographic. We set
# the film size large enough to render a typical avatar (but
# not so large that we lose detail in the texture).
self.lens = OrthographicLens()
self.lens.setFilmSize(4, 6)
self.camera.setLens(self.lens)
# Finally, we'll need a unique TextureStage to apply this
# shadow texture to the world.
self.stage = TextureStage('shadow')
# Make sure the shadowing object doesn't get its own shadow
# applied to it.
self.objectPath.setTextureOff(self.stage)
def setGround(self, groundPath):
""" Specifies the part of the world that is to be considered
the ground: this is the part onto which the rendered texture
will be applied. """
if self.groundPath:
self.groundPath.clearProjectTexture(self.stage)
self.groundPath = groundPath
self.groundPath.projectTexture(self.stage, self.tex, self.cameraPath)
def clear(self):
""" Undoes the effect of the ShadowCaster. """
if self.groundPath:
self.groundPath.clearProjectTexture(self.stage)
self.groundPath = None
if self.lightPath:
self.lightPath.detachNode()
self.lightPath = None
if self.cameraPath:
self.cameraPath.detachNode()
self.cameraPath = None
self.camera = None
self.lens = None
if self.buffer:
base.graphicsEngine.removeWindow(self.buffer)
self.tex = None
self.buffer = None
def avatarShadow():
# Turn off the existing drop shadow.
base.localAvatar.dropShadow.hide()
# Set up a new node to hold the "light": this is an abitrary point
# somewhere above the avatar, looking down, as if from the sun.
objectPath = base.localAvatar.getGeomNode()
shadowCamera = objectPath.attachNewNode('shadowCamera')
lightPath = shadowCamera.attachNewNode('lightPath')
# We can change this position at will to change the angle of the
# sun.
lightPath.setPos(5, 0, 7)
# We need a task to keep the shadowCamera rotated in the same
# direction relative to render (otherwise, the shadow seems to
# rotate when you rotate your avatar, which is strange). We can't
# just use a compass effect, since that doesn't work on cameras.
def shadowCameraRotate(task, shadowCamera = shadowCamera):
shadowCamera.setHpr(render, 0, 0, 0)
lightPath.lookAt(shadowCamera, 0, 0, 3)
return Task.cont
taskMgr.remove('shadowCamera')
taskMgr.add(shadowCameraRotate, 'shadowCamera')
global sc
if sc != None:
sc.clear()
sc = ShadowCaster(lightPath, objectPath)
# Naively, just apply the shadow to everything in the world. It
# would probably be better to use a little restraint.
sc.setGround(render)
return sc
|
Python
| 0
|
@@ -1357,20 +1357,17 @@
-laye
+d
r = self
@@ -1378,53 +1378,8 @@
fer.
-getChannel(0).makeLayer()%0A dr = layer.
make
|
e0bfc2bdff3d44c8839e4c04948e8da824f7b260
|
Write requests-like get()
|
spyglass/util.py
|
spyglass/util.py
|
Python
| 0.004638
|
@@ -0,0 +1,169 @@
+from urllib2 import urlopen%0Afrom collections import namedtuple%0A%0AResponse = namedtuple('Response', %5B'text'%5D)%0A%0Adef get(url):%0A return Response(text=urlopen(url).read())%0A
|
|
96c08b94d40850b5dd703b052943de2827ebf9f9
|
create command.py and abstract command template
|
foxybot/command.py
|
foxybot/command.py
|
Python
| 0
|
@@ -0,0 +1,1268 @@
+%22%22%22Provide a template for making commands and a decorator to register them.%22%22%22%0A%0Afrom abc import abstractmethod, abstractclassmethod, ABCMeta%0Afrom enum import Enum%0A%0Afrom registrar import CommandRegistrar%0A%0Adef bot_command(cls):%0A command = cls()%0A%0A if not issubclass(command.__class__, AbstractCommand):%0A print(f'%5BERROR%5D %7Bcommand.__module__%7D is not a subclass of AbstractCommand and wont be loaded.')%0A return%0A%0A command_registrar = CommandRegistrar.instance()%0A%0A for alias in command.aliases:%0A if alias.lower() not in command_registrar.command_table.keys():%0A command_registrar.command_table%5Balias%5D = command%0A else:%0A print(f'Error: duplicate alias %7Balias.lower()%7D in %7Bcommand.__module__%7D.py...')%0A print(f'Duplicate is in %7Bcommand_registrar.command_table%5Balias.lower()%5D.__module__%7D')%0A%0A%0Aclass AbstractCommand(metaclass=ABCMeta):%0A %22%22%22Ensure all commands have a consistent interface%22%22%22%0A%0A @staticmethod%0A @abstractclassmethod%0A def execute(shards, shard, msg):%0A %22%22%22Executes this instances command%22%22%22%0A raise NotImplementedError%0A%0A @property%0A @abstractmethod%0A def aliases(self):%0A %22%22%22The aliases that can be used to call this command%22%22%22%0A raise NotImplementedError%0A
|
|
657620bcb755185244363062b41a3e6b942d1e77
|
Fix config error on ubuntu machine
|
module/pych/configuration.py
|
module/pych/configuration.py
|
"""
Loads and stores pyChapel configuration.
"""
# pylint: disable=maybe-no-member
# The configuration object does have the "__file__" member via the module.
# pylint: disable=too-few-public-methods
# The configuration wraps around the configuration state, access is provided
# through __getitem__ it is perfectly valid that it has few public methods.
import inspect
import glob
import json
import os
from pych.utils import info, warn, error, prepend_path
class Configuration(object):
"""
Load pyChapel configuration from file and internalize configuration state.
"""
PPRINT_IGNORE = ['empty', 'write_test']
def __init__(self, config_fn=None):
"""
:config_fn str: Path to configuration file.
"""
if not config_fn: # Load configuration
config_path = []
path = inspect.getmodule(self).__file__.split(os.sep)
for directory in path:
if directory == "lib":
break
config_path.append(directory)
config_path += [
"share",
"pych",
"config",
"pych.json"
]
config_fn = os.sep.join(config_path)
config_raw = json.load(open(config_fn))
config = config_raw
# TODO: Configuration here should do whatever path-magic is needed...
#
# The paths in configuration file should just be made absolute... this
# is too much complexity for simple thing.
#
#
# Object store paths
#
root_path = config_raw["object_store"]["root_path"]
if not root_path: # Root-path defaults to cwd
root_path = os.getcwd()
# Search paths
search_paths = dict((source, []) for source in config_raw["object_store"]["search_paths"])
for source in config_raw["object_store"]["search_paths"]:
for search_path in config_raw["object_store"]["search_paths"][source]:
search_paths[source].append(prepend_path(
root_path, search_path
))
# Output paths
output_paths = dict((source, []) for source in config_raw["object_store"]["output_paths"])
for source in config_raw["object_store"]["output_paths"]:
output_path = config_raw["object_store"]["output_paths"][source]
output_paths[source] = prepend_path(
root_path,
output_path
)
config["object_store"]["search_paths"] = search_paths
config["object_store"]["output_paths"] = output_paths
#
# TODO: Specializer paths
#
# TODO:
self._config = config
def __getitem__(self, index):
return self._config[index]
def pprint_specializers(self, stypes=['bfiles', 'sfiles', 'templates']):
"""Pretty-print specializer-configuration as described in pych.json."""
for stype in stypes:
info("** Listing %s" % stype)
bdirs = self._config["specializers"][stype]
for slang in bdirs:
for bdir in bdirs[slang]:
info("* %s %s(s) in '%s':" % (slang.title(), stype, bdir))
for fname in glob.glob("%s%s*" % (bdir, os.sep)):
if os.path.basename(fname) in self.PPRINT_IGNORE:
continue
info(os.path.basename(fname))
info("** End of %s listing." % stype)
def pprint_objects(self, slangs=['c', 'chapel', 'tp']):
"""Pretty-print objects storage."""
for slang in slangs:
paths = self._config["object_store"]["search_paths"][slang]
for path in paths:
info("* %s objects in '%s':" % (slang.title(), path))
for object_path in glob.glob("%s%s*" % (path, os.sep)):
if os.path.basename(object_path) in self.PPRINT_IGNORE:
continue
info(os.path.basename(object_path))
|
Python
| 0.000001
|
@@ -968,16 +968,40 @@
== %22lib%22
+ or directory == %22local%22
:%0A
|
e85d1f0e9b198184103973f198bf1ceddbca6a65
|
declare the federica rspec schemas
|
sfa/rspecs/versions/federica.py
|
sfa/rspecs/versions/federica.py
|
Python
| 0.998416
|
@@ -0,0 +1,594 @@
+from sfa.rspecs.versions.pgv2 import PGv2Ad, PGv2Request, PGv2Manifest%0A%0Aclass FedericaAd (PGv2Ad):%0A enabled = True%0A schema = 'http://sorch.netmode.ntua.gr/ws/RSpec/ad.xsd'%0A namespace = 'http://sorch.netmode.ntua.gr/ws/RSpec'%0A%0Aclass FedericaRequest (PGv2Request):%0A enabled = True%0A schema = 'http://sorch.netmode.ntua.gr/ws/RSpec/request.xsd'%0A namespace = 'http://sorch.netmode.ntua.gr/ws/RSpec'%0A%0Aclass FedericaManifest (PGv2Manifest):%0A enabled = True%0A schema = 'http://sorch.netmode.ntua.gr/ws/RSpec/manifest.xsd'%0A namespace = 'http://sorch.netmode.ntua.gr/ws/RSpec'%0A%0A
|
|
cbac6a809eb401dfc64ef4fe7d7935d06f7b703e
|
comment un-necesssary codes
|
hps/settings.py
|
hps/settings.py
|
"""
Django settings for hps project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '70)$&#pmn+gg+z5h&=5xyghu&imupw&o$zrjw#(9p+w2f*msh='
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'HBlog',
)
TEMPLATE_DIRS = (
os.path.join(PROJECT_ROOT, 'templates'),
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'hps.urls'
WSGI_APPLICATION = 'hps.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
MEDIA_ROOT = os.path.join(PROJECT_ROOT, '..', 'media')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = '/media/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = os.path.join(PROJECT_ROOT, '..', 'static')
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
os.path.join(PROJECT_ROOT, 'static'),
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
AUTHENTICATION_BACKENDS = (
'mongoengine.django.auth.MongoEngineBackend',
)
SESSION_ENGINE = 'mongoengine.django.sessions'
MONGO_DATABASE_NAME = 'HBlog'
from mongoengine import connect
connect(MONGO_DATABASE_NAME)
|
Python
| 0.000007
|
@@ -1852,17 +1852,20 @@
tabases%0A
-%0A
+#%0A#
DATABASE
@@ -1870,20 +1870,22 @@
SES = %7B%0A
+#
+
'default
@@ -1885,24 +1885,26 @@
default': %7B%0A
+#
'ENG
@@ -1939,16 +1939,18 @@
lite3',%0A
+#
@@ -1995,22 +1995,26 @@
ite3'),%0A
+#
%7D%0A
+#
%7D%0A%0A# Int
|
c055009077546b22090897f79f4facce8bdb97d5
|
change module names in hvc/__init__.py
|
hvc/__init__.py
|
hvc/__init__.py
|
"""
__init__.py imports key functions from modules to package level
"""
from .utils.features import load_feature_file
from .featureextract import extract
from .labelpredict import predict
from .modelselect import select
from .parseconfig import parse_config
from . import metrics
from . import plot
|
Python
| 0.000017
|
@@ -118,23 +118,16 @@
e%0Afrom .
-feature
extract
@@ -151,13 +151,8 @@
om .
-label
pred
@@ -180,13 +180,8 @@
om .
-model
sele
|
67737629c2d507c0e9fc96bb53695d2fbdcc1e8f
|
point_of_sale : Removed print statement.
|
addons/point_of_sale/report/pos_invoice.py
|
addons/point_of_sale/report/pos_invoice.py
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from report import report_sxw
from osv import osv
from tools.translate import _
class pos_invoice(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(pos_invoice, self).__init__(cr, uid, name, context)
self.localcontext.update({
'time': time,
})
print
def set_context(self, objects, data, ids, report_type=None):
super(pos_invoice, self).set_context(objects, data, ids, report_type)
iids = []
nids = []
for order in objects:
order.write({'nb_print': order.nb_print + 1})
if order.invoice_id and order.invoice_id not in iids:
if not order.invoice_id:
raise osv.except_osv(_('Error !'), _('Please create an invoice for this sale.'))
iids.append(order.invoice_id)
nids.append(order.invoice_id.id)
self.cr.commit()
data['ids'] = nids
self.datas = data
self.ids = nids
self.objects = iids
self.localcontext['data'] = data
self.localcontext['objects'] = iids
report_sxw.report_sxw('report.pos.invoice', 'pos.order', 'addons/account/report/invoice.rml', parser= pos_invoice)
|
Python
| 0.998475
|
@@ -149,9 +149,8 @@
tion
-%09
%0A#
@@ -1308,22 +1308,8 @@
%7D)
-%0A print
%0A%0A
|
fa375d06128e493f86524e82fa93c892f4d925b7
|
Add script to find forms missing in ES
|
corehq/apps/data_pipeline_audit/management/commands/find_sql_forms_not_in_es.py
|
corehq/apps/data_pipeline_audit/management/commands/find_sql_forms_not_in_es.py
|
Python
| 0.000001
|
@@ -0,0 +1,2757 @@
+from __future__ import absolute_import%0Afrom __future__ import division%0Afrom __future__ import unicode_literals%0A%0Afrom __future__ import print_function%0Afrom datetime import datetime%0Afrom django.core.management.base import BaseCommand%0Aimport sys%0Afrom django.db.models import Q, F%0Afrom django.db.models.functions import Greatest%0Afrom corehq.form_processor.models import XFormInstanceSQL%0Afrom corehq.apps.es import FormES%0Aimport argparse%0Afrom dimagi.utils.chunked import chunked%0A%0ADATE_FORMAT = %22%25Y-%25m-%25d%22%0A%0A%0Adef valid_date(s):%0A try:%0A return datetime.strptime(s, DATE_FORMAT)%0A except ValueError:%0A msg = %22Not a valid date: '%7B0%7D'.%22.format(s)%0A raise argparse.ArgumentTypeError(msg)%0A%0A%0Aclass Command(BaseCommand):%0A help = %22Print IDs of sql forms that are in the primary DB but not in ES.%22%0A%0A def add_arguments(self, parser):%0A parser.add_argument(%0A '-s',%0A '--startdate',%0A dest='start',%0A type=valid_date,%0A help=%22The start date. Only applicable to forms on SQL domains. - format YYYY-MM-DD%22,%0A )%0A parser.add_argument(%0A '-e',%0A '--enddate',%0A dest='end',%0A type=valid_date,%0A help=%22The end date. Only applicable to forms on SQL domains. - format YYYY-MM-DD%22,%0A )%0A%0A def handle(self, **options):%0A startdate = options.get('start')%0A enddate = options.get('end')%0A print(%22Fetching all form ids...%22, file=sys.stderr)%0A all_ids = list(iter_form_ids_by_last_modified(startdate, enddate))%0A print(%22Woo! Done fetching. Here we go%22, file=sys.stderr)%0A for doc_ids in chunked(all_ids, 100):%0A es_ids = (FormES()%0A .remove_default_filter('is_xform_instance')%0A .doc_id(doc_ids).values_list('_id', flat=True))%0A missing_ids = set(doc_ids) - set(es_ids)%0A for form_id in missing_ids:%0A print(form_id)%0A%0A%0Adef iter_form_ids_by_last_modified(start_datetime, end_datetime):%0A from corehq.sql_db.util import run_query_across_partitioned_databases%0A%0A annotate = %7B%0A 'last_modified': Greatest('received_on', 'edited_on', 'deleted_on'),%0A %7D%0A%0A return run_query_across_partitioned_databases(%0A XFormInstanceSQL,%0A (Q(last_modified__gt=start_datetime, last_modified__lt=end_datetime) &%0A Q(state=F('state').bitand(XFormInstanceSQL.DELETED) +%0A F('state').bitand(XFormInstanceSQL.DEPRECATED) +%0A F('state').bitand(XFormInstanceSQL.DUPLICATE) +%0A F('state').bitand(XFormInstanceSQL.ERROR) +%0A F('state').bitand(XFormInstanceSQL.SUBMISSION_ERROR_LOG) +%0A F('state'))),%0A annotate=annotate,%0A values=%5B'form_id'%5D,%0A )%0A
|
|
51372b15e9abe4c0ae35294ec51694751fe2ae32
|
Add a py2exe configuration setup.
|
src/bin/setup.py
|
src/bin/setup.py
|
Python
| 0
|
@@ -0,0 +1,260 @@
+from distutils.core import setup%0Aimport py2exe, sys%0Afrom glob import glob%0Asys.path.append(%22C:%5C%5CTemp%5C%5CMicrosoft.VC90.CRT%22)%0Adata_files = %5B(%22Microsoft.VC90.CRT%22, glob(r'C:%5CTemp%5CMicrosoft.VC90.CRT%5C*.*'))%5D%0Asetup(%0A data_files=data_files,%0A console=%5B'ride.py'%5D)%0A
|
|
d8521011d5be28812c222b58901a07e8f30e87ac
|
Add testing code for memory leak.
|
neuralstyle/testing-train.py
|
neuralstyle/testing-train.py
|
Python
| 0
|
@@ -0,0 +1,2177 @@
+from __future__ import print_function%0A%0Aimport argparse%0A%0Aimport numpy as np%0Aimport torch%0Afrom torch.autograd import Variable%0Afrom torch.optim import Adam%0Afrom torch.utils.data import DataLoader%0Afrom torchvision import transforms%0Afrom torchvision import datasets%0A%0Afrom transformernet import TransformerNet%0Afrom vgg16 import Vgg16%0Aimport utils%0Aimport os%0A%0A%0Adef main():%0A parser = argparse.ArgumentParser(description=%22parser for fast-neural-style%22)%0A parser.add_argument(%22--batch-size%22, %22-b%22, type=int, default=4)%0A parser.add_argument(%22--epochs%22, %22-e%22, type=int, default=2)%0A parser.add_argument(%22--cuda%22, type=int, default=0)%0A parser.add_argument(%22--dataset%22, type=str, default=%22MSCOCO%22)%0A parser.add_argument(%22--image-size%22, type=int, default=256)%0A args = parser.parse_args()%0A%0A%0A if args.cuda and not torch.cuda.is_available():%0A print(%22WARNING: torch.cuda not available, using CPU.%22)%0A args.cuda = 0%0A%0A if args.cuda:%0A kwargs = %7B'num_workers': 1, 'pin_memory': False%7D%0A else:%0A kwargs = %7B%7D%0A%0A print(%22=====================%22)%0A print(%22TEST MODE%22)%0A print(%22using 1 worker%22)%0A print(%22=====================%22)%0A%0A print(%22=====================%22)%0A print(%22TORCH VERSION:%22, torch.__version__)%0A print(%22BATCH SIZE:%22, args.batch_size)%0A print(%22EPOCHS:%22, args.epochs)%0A print(%22CUDA:%22, args.cuda)%0A print(%22DATASET:%22, args.dataset)%0A print(%22IMAGE SIZE:%22, args.image_size)%0A print(%22=====================%5Cn%22)%0A%0A transform = transforms.Compose(%5Btransforms.Scale(args.image_size),%0A transforms.CenterCrop(args.image_size),%0A transforms.ToTensor(),%0A transforms.Lambda(lambda x: x.mul(255))%5D)%0A train_dataset = datasets.ImageFolder(args.dataset, transform)%0A train_loader = DataLoader(train_dataset, batch_size=args.batch_size, **kwargs)%0A%0A for e in range(args.epochs):%0A batch_id = 0%0A for x in train_loader:%0A if batch_id %3C 10 or batch_id %25 500 == 0:%0A print(%22Processing batch:%22, batch_id)%0A batch_id += 1%0A%0A print(%22%5CnDone :)%22)%0A%0A%0Aif __name__ == %22__main__%22:%0A main()%0A
|
|
5a2308cc98a99e9c74c14611fdb45adf7601d390
|
prepare bruteforce for http basic authentication; do not forget to create the b64 encoder in zap payload processor;
|
payload_generator/bruteforce.py
|
payload_generator/bruteforce.py
|
Python
| 0
|
@@ -0,0 +1,1648 @@
+# Auxiliary variables/constants for payload generation.%0A%0AINITIAL_VALUE = 0;%0Acount = INITIAL_VALUE;%0Auser = str('admin');%0Apassfile_path = 'C:%5C%5CUsers%5C%5Cuser%5C%5CDocuments%5C%5Cwordlists%5C%5Ctest.txt';%0ANUMBER_OF_PAYLOADS = sum(1 for line in open(passfile_path));%0Apasswd = list();%0Afor line in open(passfile_path): # initializing passwords into list%0A passwd.append(line.rstrip());%0Aprint('NUMBER_OF_PAYLOADS = ' + str(NUMBER_OF_PAYLOADS));%0Aprint('len(passwd) = ' + str(len(passwd)));%0Aprint('count = '+str(count));%0A%0A%0A# The number of generated payloads, zero to indicate unknown number.%0A# The number is used as a hint for progress calculations.%0Adef getNumberOfPayloads():%0A return NUMBER_OF_PAYLOADS;%0A%0A%0A# Returns true if there are still payloads to generate, false otherwise.%0A# Called before each call to next().%0Adef hasNext():%0A return (count %3C NUMBER_OF_PAYLOADS);%0A%0A%0A# Returns the next generated payload.%0A# This method is called while hasNext() returns true.%0Adef next():%0A global count;%0A print('next_count = ' + str(count));%0A payload = count;%0A print('payload = ' + str(payload));%0A count+=1;%0A print('incremented next_count = ' + str(count));%0A print(user+':'+passwd%5Bpayload%5D);%0A return user+':'+passwd%5Bpayload%5D;%0A%0A%0A# Resets the internal state of the payload generator, as if no calls to%0A# hasNext() or next() have been previously made.%0A# Normally called once the method hasNext() returns false and while payloads%0A# are still needed.%0Adef reset():%0A count = INITIAL_VALUE;%0A%0A%0A# Releases any resources used for generation of payloads (for example, a file).%0A# Called once the payload generator is no longer needed.%0Adef close():%0A pass;%0A%0A%0A
|
|
0ced2a66affd65a3dda90dc49bac8bd43e1c6fa7
|
Remove index on LogRecord.message.
|
peavy/migrations/0004_drop_message_index.py
|
peavy/migrations/0004_drop_message_index.py
|
Python
| 0
|
@@ -0,0 +1,2042 @@
+# encoding: utf-8%0Afrom south.db import db%0Afrom south.v2 import SchemaMigration%0A%0A%0Aclass Migration(SchemaMigration):%0A%0A def forwards(self, orm):%0A # Removing index on 'LogRecord', fields %5B'message'%5D%0A db.delete_index('peavy_logrecord', %5B'message'%5D)%0A%0A def backwards(self, orm):%0A # Adding index on 'LogRecord', fields %5B'message'%5D%0A db.create_index('peavy_logrecord', %5B'message'%5D)%0A%0A models = %7B%0A 'peavy.logrecord': %7B%0A 'Meta': %7B'ordering': %22('-timestamp',)%22, 'object_name': 'LogRecord'%7D,%0A 'application': ('django.db.models.fields.CharField', %5B%5D, %7B'default': %22'sandbox'%22, 'max_length': '256', 'db_index': 'True'%7D),%0A 'client_ip': ('django.db.models.fields.CharField', %5B%5D, %7B'db_index': 'True', 'max_length': '128', 'blank': 'True'%7D),%0A 'debug_page': ('django.db.models.fields.TextField', %5B%5D, %7B'blank': 'True'%7D),%0A 'id': ('django.db.models.fields.AutoField', %5B%5D, %7B'primary_key': 'True'%7D),%0A 'level': ('django.db.models.fields.CharField', %5B%5D, %7B'max_length': '32', 'db_index': 'True'%7D),%0A 'logger': ('django.db.models.fields.CharField', %5B%5D, %7B'max_length': '1024', 'db_index': 'True'%7D),%0A 'message': ('django.db.models.fields.TextField', %5B%5D, %7B%7D),%0A 'origin_server': ('django.db.models.fields.CharField', %5B%5D, %7B'default': %22'kaze.jkcl.local'%22, 'max_length': '256', 'db_index': 'True'%7D),%0A 'stack_trace': ('django.db.models.fields.TextField', %5B%5D, %7B'blank': 'True'%7D),%0A 'timestamp': ('django.db.models.fields.DateTimeField', %5B%5D, %7B'default': 'datetime.datetime.now', 'db_index': 'True'%7D),%0A 'user_pk': ('django.db.models.fields.IntegerField', %5B%5D, %7B'db_index': 'True', 'null': 'True', 'blank': 'True'%7D),%0A 'username': ('django.db.models.fields.CharField', %5B%5D, %7B'db_index': 'True', 'max_length': '256', 'blank': 'True'%7D),%0A 'uuid': ('django.db.models.fields.CharField', %5B%5D, %7B'db_index': 'True', 'max_length': '256', 'blank': 'True'%7D)%0A %7D%0A %7D%0A%0A complete_apps = %5B'peavy'%5D%0A
|
|
f3e91020f0426fedfe229e94bf1ddc69dd64a136
|
Add new example plot for `match_template`.
|
doc/examples/plot_template_alt.py
|
doc/examples/plot_template_alt.py
|
Python
| 0
|
@@ -0,0 +1,1662 @@
+%22%22%22%0A=================%0ATemplate Matching%0A=================%0A%0AIn this example, we use template matching to identify the occurrence of an%0Aimage patch (in this case, a sub-image centered on a single coin). Here, we%0Areturn a single match (the exact same coin), so the maximum value in the%0A%60%60match_template%60%60 result corresponds to the coin location. The other coins%0Alook similar, and thus have local maxima; if you expect multiple matches, you%0Ashould use a proper peak-finding function.%0A%0AThe %60%60match_template%60%60 function uses fast, normalized cross-correlation %5B1%5D_%0Ato find instances of the template in the image. Note that the peaks in the%0Aoutput of %60%60match_template%60%60 correspond to the origin (i.e. top-left corner) of%0Athe template.%0A%0A.. %5B1%5D J. P. Lewis, %22Fast Normalized Cross-Correlation%22, Industrial Light and%0A Magic.%0A%22%22%22%0A%0Aimport numpy as np%0Aimport matplotlib.pyplot as plt%0Afrom skimage import data%0Afrom skimage.feature import match_template%0A%0Aimage = data.coins()%0Acoin = image%5B170:220, 75:130%5D%0A%0Aresult = match_template(image, coin)%0Aij = np.unravel_index(np.argmax(result), result.shape)%0Ax, y = ij%5B::-1%5D%0A%0Afig, (ax1, ax2, ax3) = plt.subplots(ncols=3, figsize=(8, 3))%0A%0Aax1.imshow(coin)%0Aax1.set_axis_off()%0Aax1.set_title('template')%0A%0Aax2.imshow(image)%0Aax2.set_axis_off()%0Aax2.set_title('image')%0A# highlight matched region%0Ahcoin, wcoin = coin.shape%0Arect = plt.Rectangle((x, y), wcoin, hcoin, edgecolor='r', facecolor='none')%0Aax2.add_patch(rect)%0A%0Aax3.imshow(result)%0Aax3.set_axis_off()%0Aax3.set_title('%60match_template%60%5Cnresult')%0A# highlight matched region%0Aax3.autoscale(False)%0Aax3.plot(x, y, 'o', markeredgecolor='r', markerfacecolor='none', markersize=10)%0A%0Aplt.show()%0A%0A
|
|
fc1c0a563f8bd4fd33e63285ab6af79825b8b927
|
Add a modified terminalcolors.py
|
bin/terminalcolors.py
|
bin/terminalcolors.py
|
Python
| 0.000001
|
@@ -0,0 +1,792 @@
+#!/usr/bin/env python%0A# Copyright (C) 2006 by Johannes Zellner, %3Cjohannes@zellner.org%3E%0A# modified by mac@calmar.ws to fit my output needs%0A# modified by crncosta@carloscosta.org to fit my output needs%0A# modified by joeyates, 2014%0A%0Afrom os import system%0A%0Adef foreground(n):%0A system('tput setaf %25u' %25 n)%0A%0Adef background(n):%0A system('tput setab %25u' %25 n)%0A%0Adef out(n):%0A background(n)%0A system('echo -n %22%25 4d%22' %25 n)%0A background(0)%0A%0Adef table(start, end, width):%0A def is_end_of_row(n):%0A return (n - start + 1) %25 width == 0%0A%0A for n in range(start, end + 1):%0A out(n)%0A if is_end_of_row(n):%0A print%0A%0Aforeground(16)%0A%0A# normal colors%0Atable(0, 15, 8)%0Aprint%0A# other colors%0Atable(16, 231, 6)%0Aprint%0A# greyscale%0Atable(232, 255, 6)%0A%0Aforeground(7)%0Abackground(0)%0A
|
|
1de610b2460b3b3bff24b79398d214001097e562
|
Implement Gmail OAuth 2.0.
|
notifyhere/dash/api/gmail.py
|
notifyhere/dash/api/gmail.py
|
Python
| 0.000055
|
@@ -0,0 +1,1552 @@
+from httplib import HTTPSConnection%0Aimport json%0A%0Aimport base%0Aimport tools%0Aimport secrets%0A%0Aclass GmailApi(base.ApiBase):%0A%0A def __init__(self):%0A base.ApiBase.__init__(self, %22gmail%22)%0A self.token = %22%22%0A%0A def icon_url(self):%0A return %22https://mail.google.com/favicon.ico%22%0A %0A def oauth_link(self):%0A url = %22https://accounts.google.com/o/oauth2/auth%22%0A args = %7B%0A %22response_type%22:%22code%22,%0A %22client_id%22:secrets.GMAIL_CLIENT_ID,%0A %22redirect_uri%22:secrets.BASE_REDIRECT_URL + %22gmail%22,%0A %22scope%22:%22https://mail.google.com/%22,%0A %7D%0A return url + %22?%22 + tools.encode_params(args)%0A %0A def oauth_callback(self, params):%0A%0A if 'code' not in params:%0A return None%0A%0A conn = HTTPSConnection(%22accounts.google.com%22)%0A body = tools.encode_params(%7B%0A %22grant_type%22:%22authorization_code%22,%0A %22code%22:params%5B'code'%5D,%0A %22client_id%22:secrets.GMAIL_CLIENT_ID,%0A %22client_secret%22:secrets.GMAIL_CLIENT_SECRET,%0A %22redirect_uri%22:secrets.BASE_REDIRECT_URL + %22gmail%22,%0A %7D)%0A headers = %7B%0A %22Content-Type%22:%22application/x-www-form-urlencoded%22,%0A %7D%0A conn.request(%22POST%22, %22/o/oauth2/token%22, body, headers)%0A%0A resp = conn.getresponse()%0A try:%0A self.token = json.loads(resp.read())%5B'access_token'%5D%0A self.is_auth = True%0A except (KeyError, ValueError):%0A return None%0A%0A def logout():%0A self.is_auth = False%0A self.token = %22%22%0A%0A
|
|
5f81d53c16816289cf52a5b4118e482b7650defe
|
Add MaintenanceMiddleware
|
app/soc/middleware/maintenance.py
|
app/soc/middleware/maintenance.py
|
Python
| 0
|
@@ -0,0 +1,2257 @@
+#!/usr/bin/python2.5%0A#%0A# Copyright 2009 the Melange authors.%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%0A%22%22%22Middleware to handle exceptions.%0A%22%22%22%0A%0A__authors__ = %5B%0A '%22Sverre Rabbelier%22 %3Csverre@rabbelier.nl%3E',%0A %5D%0A%0A%0Afrom google.appengine.runtime.apiproxy_errors import CapabilityDisabledError%0A%0Afrom django import http%0Afrom django.utils.translation import ugettext%0A%0Afrom soc.views.helper import responses%0A%0A%0ADEF_DOWN_FOR_MAINTENANCE_MSG = ugettext(%22Down for maintenance%22)%0ADEF_IN_UNEXPECTED_MAINTENANCE_MSG = ugettext(%0A %22Down for unexpected maintenance.%22)%0A%0A%0Aclass MaintenanceMiddleware(object):%0A %22%22%22Middleware to handle maintenance mode.%0A %22%22%22%0A%0A def maintenance(self, request):%0A %22%22%22Returns a 'down for maintenance' view.%0A %22%22%22%0A%0A context = responses.getUniversalContext(request)%0A context%5B'page_name'%5D = ugettext('Maintenance')%0A%0A notice = context.pop('site_notice')%0A%0A if not notice:%0A context%5B'body_content'%5D = DEF_IN_UNEXPECTED_MAINTENANCE_MSG%0A else:%0A context%5B'body_content'%5D = notice%0A%0A context%5B'header_title'%5D = DEF_DOWN_FOR_MAINTENANCE_MSG%0A context%5B'sidebar_menu_items'%5D = %5B%0A %7B'heading': DEF_DOWN_FOR_MAINTENANCE_MSG,%0A 'group': ''%7D,%0A %5D%0A%0A template = 'soc/base.html'%0A%0A return responses.respond(request, template, context=context)%0A%0A def process_request(self, request):%0A context = responses.getUniversalContext(request)%0A%0A if not context%5B'is_admin'%5D and context%5B'in_maintenance'%5D:%0A return self.maintenance(request)%0A%0A def process_exception(self, request, exception):%0A if isinstance(exception, CapabilityDisabledError):%0A # assume the site is in maintenance if we get CDE%0A return maintenance(request)%0A%0A # let the exception handling middleware handle it%0A return None%0A
|
|
4f1bb01bba0c2241a190bbf7fb21683be630abfa
|
Create Glyph3D.py
|
src/Python/Filtering/Glyph3D.py
|
src/Python/Filtering/Glyph3D.py
|
Python
| 0.000001
|
@@ -0,0 +1,1182 @@
+#!/usr/bin/env python%0A%0Aimport vtk%0A%0Adef main():%0A colors = vtk.vtkNamedColors()%0A %0A points = vtk.vtkPoints()%0A points.InsertNextPoint(0,0,0)%0A points.InsertNextPoint(1,1,1)%0A points.InsertNextPoint(2,2,2)%0A %0A polydata = vtk.vtkPolyData()%0A polydata.SetPoints(points)%0A %0A # Create anything you want here, we will use a cube for the demo.%0A cubeSource = vtk.vtkCubeSource()%0A %0A glyph3D = vtk.vtkGlyph3D()%0A glyph3D.SetSourceConnection(cubeSource.GetOutputPort())%0A glyph3D.SetInputData(polydata)%0A glyph3D.Update()%0A %0A # Visualize%0A mapper = vtk.vtkPolyDataMapper()%0A mapper.SetInputConnection(glyph3D.GetOutputPort())%0A %0A actor = vtk.vtkActor()%0A actor.SetMapper(mapper)%0A %0A renderer = vtk.vtkRenderer()%0A renderWindow = vtk.vtkRenderWindow()%0A renderWindow.AddRenderer(renderer)%0A renderWindowInteractor = vtk.vtkRenderWindowInteractor()%0A renderWindowInteractor.SetRenderWindow(renderWindow)%0A %0A renderer.AddActor(actor)%0A renderer.SetBackground(colors.GetColor3d(%22SlateGray%22)) # Background Slate Gray%0A %0A renderWindow.Render()%0A renderWindowInteractor.Start()%0A%09%0Aif __name__ == '__main__':%0A main()%0A
|
|
ff4f7273925df677b67b31e2b532768a392e18f8
|
Implement Section.__hash__() to avoid Python 2’s DeprecationWarning
|
elftools/elf/sections.py
|
elftools/elf/sections.py
|
#-------------------------------------------------------------------------------
# elftools: elf/sections.py
#
# ELF sections
#
# Eli Bendersky (eliben@gmail.com)
# This code is in the public domain
#-------------------------------------------------------------------------------
from ..construct import CString
from ..common.utils import struct_parse, elf_assert, parse_cstring_from_stream
class Section(object):
""" Base class for ELF sections. Also used for all sections types that have
no special functionality.
Allows dictionary-like access to the section header. For example:
> sec = Section(...)
> sec['sh_type'] # section type
"""
def __init__(self, header, name, stream):
self.header = header
self.name = name
self.stream = stream
def data(self):
""" The section data from the file.
"""
self.stream.seek(self['sh_offset'])
return self.stream.read(self['sh_size'])
def is_null(self):
""" Is this a null section?
"""
return False
def __getitem__(self, name):
""" Implement dict-like access to header entries
"""
return self.header[name]
def __eq__(self, other):
return self.header == other.header
class NullSection(Section):
""" ELF NULL section
"""
def __init__(self, header, name, stream):
super(NullSection, self).__init__(header, name, stream)
def is_null(self):
return True
class StringTableSection(Section):
""" ELF string table section.
"""
def __init__(self, header, name, stream):
super(StringTableSection, self).__init__(header, name, stream)
def get_string(self, offset):
""" Get the string stored at the given offset in this string table.
"""
table_offset = self['sh_offset']
s = parse_cstring_from_stream(self.stream, table_offset + offset)
return s
class SymbolTableSection(Section):
""" ELF symbol table section. Has an associated StringTableSection that's
passed in the constructor.
"""
def __init__(self, header, name, stream, elffile, stringtable):
super(SymbolTableSection, self).__init__(header, name, stream)
self.elffile = elffile
self.elfstructs = self.elffile.structs
self.stringtable = stringtable
elf_assert(self['sh_entsize'] > 0,
'Expected entry size of section %s to be > 0' % name)
elf_assert(self['sh_size'] % self['sh_entsize'] == 0,
'Expected section size to be a multiple of entry size in section %s' % name)
def num_symbols(self):
""" Number of symbols in the table
"""
return self['sh_size'] // self['sh_entsize']
def get_symbol(self, n):
""" Get the symbol at index #n from the table (Symbol object)
"""
# Grab the symbol's entry from the stream
entry_offset = self['sh_offset'] + n * self['sh_entsize']
entry = struct_parse(
self.elfstructs.Elf_Sym,
self.stream,
stream_pos=entry_offset)
# Find the symbol name in the associated string table
name = self.stringtable.get_string(entry['st_name'])
return Symbol(entry, name)
def iter_symbols(self):
""" Yield all the symbols in the table
"""
for i in range(self.num_symbols()):
yield self.get_symbol(i)
class Symbol(object):
""" Symbol object - representing a single symbol entry from a symbol table
section.
Similarly to Section objects, allows dictionary-like access to the
symbol entry.
"""
def __init__(self, entry, name):
self.entry = entry
self.name = name
def __getitem__(self, name):
""" Implement dict-like access to entries
"""
return self.entry[name]
class SUNWSyminfoTableSection(Section):
""" ELF .SUNW Syminfo table section.
Has an associated SymbolTableSection that's passed in the constructor.
"""
def __init__(self, header, name, stream, elffile, symboltable):
super(SUNWSyminfoTableSection, self).__init__(header, name, stream)
self.elffile = elffile
self.elfstructs = self.elffile.structs
self.symboltable = symboltable
def num_symbols(self):
""" Number of symbols in the table
"""
return self['sh_size'] // self['sh_entsize'] - 1
def get_symbol(self, n):
""" Get the symbol at index #n from the table (Symbol object).
It begins at 1 and not 0 since the first entry is used to
store the current version of the syminfo table.
"""
# Grab the symbol's entry from the stream
entry_offset = self['sh_offset'] + n * self['sh_entsize']
entry = struct_parse(
self.elfstructs.Elf_Sunw_Syminfo,
self.stream,
stream_pos=entry_offset)
# Find the symbol name in the associated symbol table
name = self.symboltable.get_symbol(n).name
return Symbol(entry, name)
def iter_symbols(self):
""" Yield all the symbols in the table
"""
for i in range(1, self.num_symbols() + 1):
yield self.get_symbol(i)
|
Python
| 0.000001
|
@@ -1279,16 +1279,73 @@
.header%0A
+ def __hash__(self):%0A return hash(self.header)%0A
%0A%0Aclass
|
89262fbd2375724ff9120fe01799a036b1c34f6f
|
add new package at v1.1.6 (#20598)
|
var/spack/repos/builtin/packages/py-mercantile/package.py
|
var/spack/repos/builtin/packages/py-mercantile/package.py
|
Python
| 0
|
@@ -0,0 +1,674 @@
+# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other%0A# Spack Project Developers. See the top-level COPYRIGHT file for details.%0A#%0A# SPDX-License-Identifier: (Apache-2.0 OR MIT)%0A%0Afrom spack import *%0A%0A%0Aclass PyMercantile(PythonPackage):%0A %22%22%22Web mercator XYZ tile utilities.%22%22%22%0A%0A homepage = %22https://github.com/mapbox/mercantile%22%0A url = %22https://pypi.io/packages/source/m/mercantile/mercantile-1.1.6.tar.gz%22%0A%0A maintainers = %5B'adamjstewart'%5D%0A%0A version('1.1.6', sha256='0dff4cbc2c92ceca0e0dfbb3dc74392a96d33cfa29afb1bdfcc80283d3ef4207')%0A%0A depends_on('py-setuptools', type='build')%0A depends_on('py-click@3.0:', type=('build', 'run'))%0A
|
|
41bc3c33cc1442105f019e06c40d189c27f65758
|
add save_json helper
|
vsmlib/misc/data.py
|
vsmlib/misc/data.py
|
Python
| 0.000001
|
@@ -0,0 +1,227 @@
+import json%0A%0A%0Adef save_json(data, path):%0A # if not os.path.isdir(path):%0A # os.makedirs(path)%0A s = json.dumps(data, ensure_ascii=False, indent=4, sort_keys=True)%0A f = open(path, 'w')%0A f.write(s)%0A f.close()%0A
|
|
8483a311f75a3d3682e66fba2f805ea20ebf6870
|
add memory usage beacon
|
salt/beacons/memusage.py
|
salt/beacons/memusage.py
|
Python
| 0.000001
|
@@ -0,0 +1,1590 @@
+# -*- coding: utf-8 -*-%0A'''%0ABeacon to monitor memory usage.%0A%0A.. versionadded::%0A%0A:depends: python-psutil%0A'''%0A%0A# Import Python libs%0Afrom __future__ import absolute_import%0Aimport logging%0Aimport re%0A%0A# Import Salt libs%0Aimport salt.utils%0A%0A# Import Third Party Libs%0Atry:%0A import psutil%0A HAS_PSUTIL = True%0Aexcept ImportError:%0A HAS_PSUTIL = False%0A%0Alog = logging.getLogger(__name__)%0A%0A__virtualname__ = 'memusage'%0A%0A%0Adef __virtual__():%0A if salt.utils.is_windows():%0A return False%0A elif HAS_PSUTIL is False:%0A return False%0A else:%0A return __virtualname__%0A%0A%0Adef validate(config):%0A '''%0A Validate the beacon configuration%0A '''%0A # Configuration for diskusage beacon should be a list of dicts%0A if not isinstance(config, dict):%0A log.info('Configuration for diskusage beacon must be a dictionary.')%0A return False%0A return True%0A%0A%0A%0Adef beacon(config):%0A '''%0A Monitor the memory usage of the minion%0A%0A Specify thresholds for percent used and only emit a beacon if it is exceeded.%0A%0A .. code-block:: yaml%0A%0A beacons:%0A memusage:%0A - percent: 63%25%0A '''%0A ret = %5B%5D%0A for memusage in config:%0A mount = memusage.keys()%5B0%5D%0A _current_usage = psutil.virtual_memory()%0A%0A current_usage = _current_usage.percent%0A monitor_usage = memusage%5Bmount%5D%0A if '%25' in monitor_usage:%0A monitor_usage = re.sub('%25', '', monitor_usage)%0A monitor_usage = float(monitor_usage)%0A if current_usage %3E= monitor_usage:%0A ret.append(%7B'memusage': current_usage%7D)%0A return ret%0A
|
|
9e6f8768d60d38e69074c5275637deaa62e6fc9e
|
check how often URL matching would match the right documents in the test corpus
|
baseline/url_matching.py
|
baseline/url_matching.py
|
Python
| 0.000001
|
@@ -0,0 +1,3041 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0Aimport sys%0Aimport os%0Afrom strip_language_from_uri import LanguageStripper%0Aimport chardet%0Afrom collections import defaultdict%0Aimport re%0Aimport urlparse%0A%0A%0Adef has_prefix(prefixes, s):%0A %22Returns true if s starts with one of the prefixes%22%0A for p in prefixes:%0A if s.startswith(p):%0A return True%0A return False%0A%0A%0Adef original_url(html):%0A m = re.search(r%22%3C!-- Mirrored from (%5B%5E%3E%5D+) by HTTrack Website Copier%22,%0A html)%0A if m is None:%0A return %22unknown_url%22%0A return m.groups()%5B0%5D%0A%0A%0Adef clean_whitespace(s):%0A # remove empty lines%0A s = %5Bl.strip() for l in s.split(%22%5Cn%22) if l.strip()%5D%0A return %22%5Cn%22.join(re.sub(%22%5Cs+%22, %22 %22, l) for l in s)%0A%0A%0Adef read_file(filename):%0A # sys.stderr.write(%22reading: %25s%5Cn%22 %25 filename)%0A f = open(filename, 'r')%0A html = f.read()%0A try:%0A html = html.decode(%22utf-8%22)%0A except:%0A encoding = chardet.detect(html)%0A try:%0A html = html.decode(encoding%5B%22encoding%22%5D)%0A except:%0A sys.stderr.write(%0A %22Fallback: ignoring errors for file%25s%5Cn%22 %25 filename)%0A return html.decode(%22utf-8%22, errors='ignore')%0A return html%0A%0A%0Aif __name__ == %22__main__%22:%0A import argparse%0A parser = argparse.ArgumentParser()%0A parser.add_argument('outfile', type=argparse.FileType('w'),%0A help='output file')%0A parser.add_argument('-prefix', help='prefix added to make filenames',%0A default=%22/fs/syn0/pkoehn/crawl/data/site-crawls%22)%0A parser.add_argument('-slang', help='source language', default='en')%0A parser.add_argument('-tlang', help='target language', default='fr')%0A args = parser.parse_args(sys.argv%5B1:%5D)%0A%0A correct = 0%0A stripper = LanguageStripper()%0A for line in sys.stdin:%0A was_stripped = 0%0A domain, a, b = line.strip().split(%22%5Ct%22)%0A%0A urls = defaultdict(list)%0A for s in (a, b):%0A filename = os.path.join(args.prefix, domain, s)%0A html = read_file(filename)%0A%0A url = original_url(html)%0A url = %22http://%22 + url%0A # print url%0A%0A parsed_url = urlparse.urlparse(url)%0A stripped_path = stripper.strip(parsed_url.path).replace(%22//%22, %22/%22)%0A stripped_query = stripper.strip(%0A parsed_url.query).replace(%22//%22, %22/%22)%0A stripped_url = urlparse.ParseResult(parsed_url.scheme,%0A parsed_url.netloc,%0A stripped_path,%0A parsed_url.params,%0A stripped_query,%0A parsed_url.fragment).geturl()%0A%0A urls%5Bstripped_url%5D.append(url)%0A if stripped_url != url:%0A was_stripped += 1%0A if len(urls) == 1:%0A correct += 1%0A%0A print len(urls), was_stripped, correct, urls.items()%0A%0A print %22correct: %22, correct%0A
|
|
d1ffd984bae034076244ac4449632a1aa04d5ffe
|
Refactor to Linter v2 API
|
bears/php/PHPLintBear.py
|
bears/php/PHPLintBear.py
|
import re
from coalib.bearlib.abstractions.Lint import Lint
from coalib.bears.LocalBear import LocalBear
from coalib.results.RESULT_SEVERITY import RESULT_SEVERITY
class PHPLintBear(LocalBear, Lint):
executable = 'php'
arguments = '-l -n -d display_errors=On -d log_errors=Off {filename}'
output_regex = re.compile(
r'(?P<severity>\S+) error: '
r'(?P<message>.*) in (?P<file_name>.*) on line (?P<line>\d+)')
severity_map = {
"Parse": RESULT_SEVERITY.MAJOR,
"Fatal": RESULT_SEVERITY.MAJOR}
LANGUAGES = "PHP"
def run(self, filename, file):
'''
Checks the code with `php -l`. This runs it on each file separately.
'''
return self.lint(filename)
|
Python
| 0
|
@@ -1,15 +1,4 @@
-import re%0A%0A
from
@@ -30,24 +30,26 @@
ons.Lint
+er
import
Lint%0Afro
@@ -44,56 +44,13 @@
ort
-Lint%0Afrom coalib.bears.LocalBear import LocalBea
+linte
r%0Afr
@@ -112,48 +112,16 @@
Y%0A%0A%0A
-class PHPLintBear(LocalBear, Lint):%0A
+@linter(
exec
@@ -130,205 +130,149 @@
able
- =
+=
'php'
+,
%0A
-arguments = '-l -n -d display_errors=On -d log_errors=Off %7Bfilename%7D'%0A output_regex = re.compile(%0A r'(?P%3Cseverity%3E%5CS+) error: '%0A r'(?P%3Cmessage%3E.*) in (?P%3Cfile_name%3E
+ output_format='regex',%0A output_regex=r'(?P%3Cseverity%3EParse%7CFatal) error: (?P%3Cmessage%3E.*) in '%0A r'
.*
-)
on
@@ -290,18 +290,22 @@
ne%3E%5Cd+)'
-)%0A
+,%0A
seve
@@ -316,28 +316,17 @@
_map
- = %7B%0A %22
+=%7B'
Parse
-%22
+'
: RE
@@ -358,15 +358,29 @@
-%22
+ '
Fatal
-%22
+'
: RE
@@ -403,83 +403,37 @@
JOR%7D
-%0A LANGUAGES = %22PHP%22%0A%0A def run(self, filename, file):%0A '''%0A
+)%0Aclass PHPLintBear:%0A %22%22%22%0A
@@ -457,16 +457,17 @@
ith
+%60
%60php -l%60
. Th
@@ -462,16 +462,17 @@
%60php -l%60
+%60
. This r
@@ -511,41 +511,203 @@
- '''%0A return self.lint(
+%22%22%22%0A LANGUAGES = %22PHP%22%0A%0A @staticmethod%0A def create_arguments(filename, file, config_file):%0A return ('-l', '-n', '-d', 'display_errors=On', '-d', 'log_errors=Off',%0A
file
|
a0b9ce2f711aaf7c4bc94cdae3ddb0262164d607
|
remove comment
|
Integrations/PhishLabsIOC/PhishLabsIOC_test.py
|
Integrations/PhishLabsIOC/PhishLabsIOC_test.py
|
from CommonServerPython import *
def test_populate_context_files():
from PhishLabsIOC import populate_context, get_file_properties, create_phishlabs_object
files_json = """
{
"attributes": [
{
"createdAt": "2019-05-14T13:03:45Z",
"id": "xyz",
"name": "md5",
"value": "c8092abd8d581750c0530fa1fc8d8318"
},
{
"createdAt": "2019-05-14T13:03:45Z",
"id": "abc",
"name": "filetype",
"value": "application/zip"
},
{
"createdAt": "2019-05-14T13:03:45Z",
"id": "qwe",
"name": "name",
"value": "Baycc.zip"
}
],
"createdAt": "2019-05-14T13:03:45Z",
"falsePositive": false,
"id": "def",
"type": "Attachment",
"updatedAt": "0001-01-01T00:00:00Z",
"value": "c8092abd8d581750c0530fa1fc8d8318"
} """
file = json.loads(files_json)
file_md5, file_name, file_type = get_file_properties(file)
phishlabs_entry = create_phishlabs_object(file)
phishlabs_entry['Name'] = file_name
phishlabs_entry['Type'] = file_type
phishlabs_entry['MD5'] = file_md5
phishlabs_result = [{
'ID': 'def',
'CreatedAt': '2019-05-14T13:03:45Z',
'Name': 'Baycc.zip',
'Type': 'application/zip',
'MD5': 'c8092abd8d581750c0530fa1fc8d8318',
'Attribute': [
{
'CreatedAt': '2019-05-14T13:03:45Z',
'Type': None,
'Name': 'md5',
'Value': 'c8092abd8d581750c0530fa1fc8d8318'
},
{
'CreatedAt': '2019-05-14T13:03:45Z',
'Type': None,
'Name': 'filetype',
'Value': 'application/zip'
},
{
'CreatedAt': '2019-05-14T13:03:45Z',
'Type': None,
'Name': 'name',
'Value': 'Baycc.zip'
}
]
}]
global_entry = {
'Name': file_name,
'Type': file_type,
'MD5': file_md5
}
global_result = [{
'Name': 'Baycc.zip',
'Type': 'application/zip',
'MD5': 'c8092abd8d581750c0530fa1fc8d8318'
}]
context = populate_context([], [], [(global_entry, phishlabs_entry)], [])
assert len(context.keys()) == 2
assert context[outputPaths['file']] == global_result
assert context['PhishLabs.File(val.ID && val.ID === obj.ID)'] == phishlabs_result
def test_populate_context_emails():
from PhishLabsIOC import populate_context, get_email_properties, create_phishlabs_object
emails_json = """
{
"attributes":[
{
"createdAt":"2019-05-13T16:54:18Z",
"id":"abc",
"name":"email-body",
"value":"-----Original Message-----From: A Sent: Monday, May 13, 2019 12:22 PMTo:"
},
{
"createdAt":"2019-05-13T16:54:18Z",
"id":"def",
"name":"from",
"value":"someuser@contoso.com"
},
{
"createdAt":"2019-05-13T16:54:18Z",
"id":"cf3182ca-92ec-43b6-8aaa-429802a99fe5",
"name":"to",
"value":"example@gmail.com"
}
],
"createdAt":"2019-05-13T16:54:18Z",
"falsePositive":false,
"id":"ghi",
"type":"E-mail",
"updatedAt":"0001-01-01T00:00:00Z",
"value":"FW: Task"
} """
email = json.loads(emails_json)
email_body, email_to, email_from = get_email_properties(email)
phishlabs_entry = create_phishlabs_object(email)
phishlabs_entry['To'] = email_to,
phishlabs_entry['From'] = email_from,
phishlabs_entry['Body'] = email_body
phishlabs_entry['Subject'] = email.get('value')
phishlabs_result = [{
'ID': 'ghi',
'CreatedAt': '2019-05-13T16:54:18Z',
'To': ('example@gmail.com',),
'From': ('someuser@contoso.com',),
'Body': '-----Original Message-----From: A Sent: Monday, May 13, 2019 12:22 PMTo:',
'Subject': 'FW: Task',
'Attribute':
[{
'CreatedAt': '2019-05-13T16:54:18Z',
'Type': None,
'Name': 'email-body',
'Value': '-----Original Message-----From: A Sent: Monday, May 13, 2019 12:22 PMTo:'
},
{
'CreatedAt': '2019-05-13T16:54:18Z',
'Type': None,
'Name': 'from',
'Value': 'someuser@contoso.com'
},
{
'CreatedAt': '2019-05-13T16:54:18Z',
'Type': None,
'Name': 'to',
'Value': 'example@gmail.com'
}]
}]
global_entry = {
'To': email_to,
'From': email_from,
'Body': email_body,
'Subject': email.get('value')
}
global_result = [{
'To': 'example@gmail.com',
'From': 'someuser@contoso.com',
'Body': '-----Original Message-----From: A Sent: Monday, May 13, 2019 12:22 PMTo:',
'Subject': 'FW: Task'
}]
context = populate_context([], [], [], [], [(global_entry, phishlabs_entry)])
# context['PhishLabs.Email(val.ID && val.ID === obj.ID)'][0].pop('Attribute')
assert len(context.keys()) == 2
assert context['Email'] == global_result
assert context['PhishLabs.Email(val.ID && val.ID === obj.ID)'] == phishlabs_result
|
Python
| 0
|
@@ -5580,91 +5580,8 @@
%5D)%0A%0A
- # context%5B'PhishLabs.Email(val.ID && val.ID === obj.ID)'%5D%5B0%5D.pop('Attribute')%0A%0A
|
34560978ee8f33ab8ddc60a1a3525979119a952e
|
Add run script
|
profile_compressible_solver/run_profiler.py
|
profile_compressible_solver/run_profiler.py
|
Python
| 0.000001
|
@@ -0,0 +1,2789 @@
+from firedrake.petsc import PETSc%0Afrom argparse import ArgumentParser%0Afrom driver import run_profliler%0Aimport sys%0A%0A%0APETSc.Log.begin()%0A%0Aparser = ArgumentParser(description=(%22%22%22%0AProfile of 3D compressible solver for the Euler equations (dry atmosphere).%0A%22%22%22), add_help=False)%0A%0Aparser.add_argument(%22--hybridization%22,%0A action=%22store_true%22,%0A help=%22Use a hybridized compressible solver.%22)%0A%0Aparser.add_argument(%22--model_degree%22,%0A default=1,%0A type=int,%0A action=%22store%22,%0A help=%22Model degree%22)%0A%0Aparser.add_argument(%22--model_family%22,%0A default=%22RTCF%22,%0A choices=%5B%22RTCF%22%5D,%0A help=%22Family of finite element spaces%22)%0A%0Aparser.add_argument(%22--mesh_degree%22,%0A default=3,%0A type=int,%0A action=%22store%22,%0A help=%22Coordinate space degree%22)%0A%0Aparser.add_argument(%22--cfl%22,%0A default=1.,%0A type=float,%0A action=%22store%22,%0A help=%22CFL number to run at (determines dt).%22)%0A%0Aparser.add_argument(%22--dt%22,%0A default=0.0,%0A type=float,%0A action=%22store%22,%0A help=%22Manually set dt%22)%0A%0Aparser.add_argument(%22--refinements%22,%0A default=4,%0A type=int,%0A action=%22store%22,%0A help=%22Resolution scaling parameter.%22)%0A%0Aparser.add_argument(%22--richardson_scale%22,%0A default=1.0,%0A type=float,%0A action=%22store%22,%0A help=%22Set the Richardson parameter for the trace system.%22)%0A%0Aparser.add_argument(%22--flexsolver%22,%0A action=%22store_true%22,%0A help=%22Switch to flex-GMRES and AMG.%22)%0A%0Aparser.add_argument(%22--gmres_ilu_only%22,%0A action=%22store_true%22,%0A help=%22Switch to only gmres+bilu on traces%22)%0A%0Aparser.add_argument(%22--layers%22,%0A default=16,%0A type=int,%0A action=%22store%22,%0A help=%22Number of vertical layers.%22)%0A%0Aparser.add_argument(%22--debug%22,%0A action=%22store_true%22,%0A help=%22Turn on KSP monitors%22)%0A%0Aparser.add_argument(%22--rtol%22,%0A default=1.0e-6,%0A type=float,%0A help=%22Rtolerance for the linear solve.%22)%0A%0Aparser.add_argument(%22--help%22,%0A action=%22store_true%22,%0A help=%22Show help.%22)%0A%0Aargs, _ = parser.parse_known_args()%0A%0Aif args.help:%0A help = parser.format_help()%0A PETSc.Sys.Print(%22%25s%5Cn%22 %25 help)%0A sys.exit(1)%0A%0A%0Arun_profliler(args, suppress_data_output=True)%0A
|
|
55f0e8bbddee976f020628c552eb22d8ed894c1a
|
question 0001 solved
|
vvzwvv/0001/0001.py
|
vvzwvv/0001/0001.py
|
Python
| 0.999999
|
@@ -0,0 +1,228 @@
+import uuid%0A%0Adef gen(num, len):%0A%09L = %5B%5D%0A%09for i in range(num):%0A%09%09ran = str(uuid.uuid4()).replace('-', '')%5B:len%5D%0A%09%09if not ran in L:%0A%09%09%09L.append(ran)%0A%09return L%0A%0Aif __name__ == '__main__':%0A%09for item in gen(200, 16):%0A%09%09print(item)%0A%09%09
|
|
ee2a4c1edb6d2f1273bb08080e8fc00b0a0e9074
|
add pack1/mymodule1.py
|
python/18-package/parent/pack1/mymodule1.py
|
python/18-package/parent/pack1/mymodule1.py
|
Python
| 0.000023
|
@@ -0,0 +1,209 @@
+#!/usr/bin/env python%0A#-*- coding=utf-8 -*-%0A%0Adef function1():%0A print %22function1 running%22%0A%0A%0Aif __name__ == %22__main__%22:%0A print %22mymodule1 running as main program%22%0Aelse:%0A print %22mymodule1 initializing%22%0A%0A
|
|
a29e340efa60ecb05d85e9c6d87ec709ba26822f
|
Add new extractor(closes #14361)
|
youtube_dl/extractor/bibeltv.py
|
youtube_dl/extractor/bibeltv.py
|
Python
| 0
|
@@ -0,0 +1,1102 @@
+# coding: utf-8%0Afrom __future__ import unicode_literals%0A%0Afrom .common import InfoExtractor%0A%0A%0Aclass BibelTVIE(InfoExtractor):%0A _VALID_URL = r'https?://(?:www%5C.)?bibeltv%5C.de/mediathek/videos/(?:crn/)?(?P%3Cid%3E%5Cd+)'%0A _TESTS = %5B%7B%0A 'url': 'https://www.bibeltv.de/mediathek/videos/329703-sprachkurs-in-malaiisch',%0A 'md5': '252f908192d611de038b8504b08bf97f',%0A 'info_dict': %7B%0A 'id': 'ref:329703',%0A 'ext': 'mp4',%0A 'title': 'Sprachkurs in Malaiisch',%0A 'description': 'md5:3e9f197d29ee164714e67351cf737dfe',%0A 'timestamp': 1608316701,%0A 'uploader_id': '5840105145001',%0A 'upload_date': '20201218',%0A %7D%0A %7D, %7B%0A 'url': 'https://www.bibeltv.de/mediathek/videos/crn/326374',%0A 'only_matching': True,%0A %7D%5D%0A BRIGHTCOVE_URL_TEMPLATE = 'http://players.brightcove.net/5840105145001/default_default/index.html?videoId=ref:%25s'%0A%0A def _real_extract(self, url):%0A crn_id = self._match_id(url)%0A return self.url_result(%0A self.BRIGHTCOVE_URL_TEMPLATE %25 crn_id, 'BrightcoveNew')%0A
|
|
76cce82d65868619b096d74a5adb3a616cfe771d
|
Create new package. (#5810)
|
var/spack/repos/builtin/packages/r-affyilm/package.py
|
var/spack/repos/builtin/packages/r-affyilm/package.py
|
Python
| 0
|
@@ -0,0 +1,2062 @@
+##############################################################################%0A# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.%0A# Produced at the Lawrence Livermore National Laboratory.%0A#%0A# This file is part of Spack.%0A# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.%0A# LLNL-CODE-647188%0A#%0A# For details, see https://github.com/llnl/spack%0A# Please also see the NOTICE and LICENSE files for our notice and the LGPL.%0A#%0A# This program is free software; you can redistribute it and/or modify%0A# it under the terms of the GNU Lesser General Public License (as%0A# published by the Free Software Foundation) version 2.1, February 1999.%0A#%0A# This program is distributed in the hope that it will be useful, but%0A# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and%0A# conditions of the GNU Lesser General Public License for more details.%0A#%0A# You should have received a copy of the GNU Lesser General Public%0A# License along with this program; if not, write to the Free Software%0A# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA%0A##############################################################################%0Afrom spack import *%0A%0A%0Aclass RAffyilm(RPackage):%0A %22%22%22affyILM is a preprocessing tool which estimates gene%0A expression levels for Affymetrix Gene Chips. Input from%0A physical chemistry is employed to first background subtract%0A intensities before calculating concentrations on behalf%0A of the Langmuir model.%22%22%22%0A%0A homepage = %22https://www.bioconductor.org/packages/affyILM/%22%0A url = %22https://git.bioconductor.org/packages/affyILM%22%0A%0A version('1.28.0', git='https://git.bioconductor.org/packages/affyILM', commit='307bee3ebc599e0ea4a1d6fa8d5511ccf8bef7de')%0A%0A depends_on('r@3.4.0:3.4.9', when='@1.28.0')%0A depends_on('r-gcrma', type=('build', 'run'))%0A depends_on('r-affxparser', type=('build', 'run'))%0A depends_on('r-affy', type=('build', 'run'))%0A depends_on('r-biobase', type=('build', 'run'))%0A
|
|
da42b3854d85b1df42c67e4e5f3d9131aacecd2c
|
Turn on template debugging in test settings
|
{{cookiecutter.project_slug}}/config/settings/test.py
|
{{cookiecutter.project_slug}}/config/settings/test.py
|
# -*- coding: utf-8 -*-
'''
Test settings
- Used to run tests fast on the continuous integration server and locally
'''
from .common import * # noqa
# DEBUG
# ------------------------------------------------------------------------------
# Turn debug off so tests run faster
DEBUG = False
TEMPLATES[0]['OPTIONS']['debug'] = False
# SECRET CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Note: This key only used for development and testing.
SECRET_KEY = env('DJANGO_SECRET_KEY', default='CHANGEME!!!')
# Mail settings
# ------------------------------------------------------------------------------
EMAIL_HOST = 'localhost'
EMAIL_PORT = 1025
# In-memory email backend stores messages in django.core.mail.outbox
# for unit testing purposes
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
# CACHING
# ------------------------------------------------------------------------------
# Speed advantages of in-memory caching without having to run Memcached
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': ''
}
}
# TESTING
# ------------------------------------------------------------------------------
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
# PASSWORD HASHING
# ------------------------------------------------------------------------------
# Use fast password hasher so tests run faster
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.MD5PasswordHasher',
)
# TEMPLATE LOADERS
# ------------------------------------------------------------------------------
# Keep templates in memory so tests run faster
TEMPLATES[0]['OPTIONS']['loaders'] = [
('django.template.loaders.cached.Loader', [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
]),
]
|
Python
| 0.997035
|
@@ -287,16 +287,84 @@
= False%0A
+# But template debugging must be enabled for django_coverage_plugin%0A
TEMPLATE
@@ -390,20 +390,19 @@
bug'%5D =
-Fals
+Tru
e%0A%0A# SEC
|
d7017acef8ed540bb2f3c00d268cd417d75f09e3
|
add import script for Fareham (closes #858)
|
polling_stations/apps/data_collection/management/commands/import_fareham.py
|
polling_stations/apps/data_collection/management/commands/import_fareham.py
|
Python
| 0
|
@@ -0,0 +1,392 @@
+from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter%0A%0Aclass Command(BaseXpressDemocracyClubCsvImporter):%0A council_id = 'E07000087'%0A addresses_name = 'parl.2017-06-08/Version 1/Democracy_Club__08June2017 (1).tsv'%0A stations_name = 'parl.2017-06-08/Version 1/Democracy_Club__08June2017 (1).tsv'%0A elections = %5B'parl.2017-06-08'%5D%0A csv_delimiter = '%5Ct'%0A
|
|
57d66ddf50b31c2561526075be9c8796df209e2d
|
Fix Linux Swarming bots
|
infra/bots/flavor/default_flavor.py
|
infra/bots/flavor/default_flavor.py
|
#!/usr/bin/env python
#
# Copyright 2016 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Default flavor utils class, used for desktop bots."""
import os
import shutil
import sys
class DeviceDirs(object):
def __init__(self,
dm_dir,
perf_data_dir,
resource_dir,
images_dir,
skp_dir,
tmp_dir):
self._dm_dir = dm_dir
self._perf_data_dir = perf_data_dir
self._resource_dir = resource_dir
self._images_dir = images_dir
self._skp_dir = skp_dir
self._tmp_dir = tmp_dir
@property
def dm_dir(self):
"""Where DM writes."""
return self._dm_dir
@property
def perf_data_dir(self):
return self._perf_data_dir
@property
def resource_dir(self):
return self._resource_dir
@property
def images_dir(self):
return self._images_dir
@property
def skp_dir(self):
return self._skp_dir
@property
def tmp_dir(self):
return self._tmp_dir
class DefaultFlavorUtils(object):
"""Utilities to be used by build steps.
The methods in this class define how certain high-level functions should
work. Each build step flavor should correspond to a subclass of
DefaultFlavorUtils which may override any of these functions as appropriate
for that flavor.
For example, the AndroidFlavorUtils will override the functions for
copying files between the host and Android device, as well as the
'step' function, so that commands may be run through ADB.
"""
def __init__(self, bot_info, *args, **kwargs):
self._bot_info = bot_info
self.chrome_path = os.path.join(self._bot_info.build_dir, 'src')
def run(self, cmd, **kwargs):
"""Runs a step as appropriate for this flavor."""
path_to_app = os.path.join(self._bot_info.out_dir,
self._bot_info.configuration, cmd[0])
if ('linux' in sys.platform and
'x86_64' in self._bot_info.bot_name and
not 'TSAN' in self._bot_info.bot_name):
new_cmd = ['catchsegv', path_to_app]
else:
if sys.platform == 'win32':
path_to_app += '.exe'
new_cmd = [path_to_app]
new_cmd.extend(cmd[1:])
return self._bot_info.run(new_cmd, **kwargs)
def bootstrap_win_toolchain(self):
"""Run bootstrapping script for the Windows toolchain."""
bootstrap_script = os.path.join(self._bot_info.infrabots_dir,
'bootstrap_win_toolchain_json.py')
win_toolchain_json = os.path.join(
self._bot_info.build_dir, 'src', 'build', 'win_toolchain.json')
self._bot_info.run([
'python', bootstrap_script,
'--win_toolchain_json', win_toolchain_json,
'--depot_tools_parent_dir', self._bot_info.build_dir])
def compile(self, target):
"""Build the given target."""
# The CHROME_PATH environment variable is needed for bots that use
# toolchains downloaded by Chrome.
env = {}
if sys.platform == 'win32':
make_cmd = ['python', 'make.py']
env['CHROME_PATH'] = self.chrome_path
self._bot_info._run_once(self.bootstrap_win_toolchain)
else:
make_cmd = ['make']
cmd = make_cmd + [target]
self._bot_info.run(cmd, env=env)
def device_path_join(self, *args):
"""Like os.path.join(), but for paths on a connected device."""
return os.path.join(*args)
def device_path_exists(self, path):
"""Like os.path.exists(), but for paths on a connected device."""
return os.path.exists(path, infra_step=True) # pragma: no cover
def copy_directory_contents_to_device(self, host_dir, device_dir):
"""Like shutil.copytree(), but for copying to a connected device."""
# For "normal" bots who don't have an attached device, we expect
# host_dir and device_dir to be the same.
if str(host_dir) != str(device_dir):
raise ValueError('For bots who do not have attached devices, copying '
'from host to device is undefined and only allowed if '
'host_path and device_path are the same (%s vs %s).' % (
str(host_dir), str(device_dir))) # pragma: no cover
def copy_directory_contents_to_host(self, device_dir, host_dir):
"""Like shutil.copytree(), but for copying from a connected device."""
# For "normal" bots who don't have an attached device, we expect
# host_dir and device_dir to be the same.
if str(host_dir) != str(device_dir):
raise ValueError('For bots who do not have attached devices, copying '
'from device to host is undefined and only allowed if '
'host_path and device_path are the same (%s vs %s).' % (
str(host_dir), str(device_dir))) # pragma: no cover
def copy_file_to_device(self, host_path, device_path):
"""Like shutil.copyfile, but for copying to a connected device."""
# For "normal" bots who don't have an attached device, we expect
# host_dir and device_dir to be the same.
if str(host_path) != str(device_path): # pragma: no cover
raise ValueError('For bots who do not have attached devices, copying '
'from host to device is undefined and only allowed if '
'host_path and device_path are the same (%s vs %s).' % (
str(host_path), str(device_path)))
def create_clean_device_dir(self, path):
"""Like shutil.rmtree() + os.makedirs(), but on a connected device."""
self.create_clean_host_dir(path)
def create_clean_host_dir(self, path):
"""Convenience function for creating a clean directory."""
if os.path.exists(path):
shutil.rmtree(path)
os.makedirs(path)
def install(self):
"""Run device-specific installation steps."""
pass
def cleanup_steps(self):
"""Run any device-specific cleanup steps."""
pass
def get_device_dirs(self):
""" Set the directories which will be used by the build steps.
These refer to paths on the same device where the test executables will
run, for example, for Android bots these are paths on the Android device
itself. For desktop bots, these are just local paths.
"""
join = lambda p: os.path.join(self._bot_info.build_dir, p)
return DeviceDirs(
dm_dir=os.path.join(self._bot_info.swarm_out_dir, 'dm'),
perf_data_dir=self._bot_info.perf_data_dir,
resource_dir=self._bot_info.resource_dir,
images_dir=join('images'),
skp_dir=self._bot_info.local_skp_dir,
tmp_dir=join('tmp'))
def __repr__(self):
return '<%s object>' % self.__class__.__name__ # pragma: no cover
|
Python
| 0.998793
|
@@ -2002,28 +2002,24 @@
f._bot_info.
-bot_
name and%0A
@@ -2052,20 +2052,16 @@
ot_info.
-bot_
name):%0A
|
675b87d5bc072d5b6fbd1f9a54ec61d98b1139ac
|
Add lab2 file.
|
lab2.py
|
lab2.py
|
Python
| 0
|
@@ -0,0 +1,596 @@
+# -*- coding: utf-8 -*-%0A%0Afrom math import pow%0A%0Adef mymap1(fun, l):%0A res = %5B%5D%0A for i in l:%0A res.append(fun(i))%0A return res%0A%0A#print(mymap1(str, %5B3, 1, 7, 4, 6, 9%5D))%0A%0A%0Adef mymap2(fun, *l):%0A res = %5B%5D%0A for i in zip(*l):%0A print(i)%0A res.append(fun(*i))%0A return res%0A%0A#print(mymap2(lambda a, b, c: a+b+c, %5B3, 1, 7, 4, 6%5D, %5B6, 3, 8, 5, 9%5D, %5B2, 5, 2, 4, 5%5D))%0A%0A%0Adef mymap3(fun, *l):%0A for i in zip(*l):%0A print(i)%0A yield fun(*i)%0A%0Aprint(mymap3(pow, %5B3, 1, 7, 4, 6, 9%5D, %5B1, 5, 7, 3, 2, 1%5D))%0Aprint(list(mymap3(pow, %5B3, 1, 7, 4, 6, 9%5D, %5B1, 5, 7, 3, 2, 1%5D)))%0A
|
|
252925fa998412ac868eb63790fbd515c429ac67
|
add main entry point (untested, but should be complete now)
|
main.py
|
main.py
|
Python
| 0
|
@@ -0,0 +1,2138 @@
+%22%22%22%0ACore namespace. Handles: %0A1. Call out to hashio to check hashes, save log, and return results%0A2. Load tweetlog and tweet creds%0A3. Generate and log tweets for changed files%0A4. Generate and log tweets for new files%0A4. Save tweetlog %0A%22%22%22%0A%0Aimport hash, hashio, twitter, json%0Afrom copy import deepcopy%0A%0Adef load_tweetlog():%0A%09try:%0A%09%09with open(%22tweetlog.json%22, 'r') as tl:%0A%09%09%09tweetlog = json.load(tl)%0A%09except FileNotFoundError:%0A%09%09tweetlog = %5B%5D%0A%09return tweetlog%0A%0Adef load_tweetcreds():%0A%09%22%22%22%0A%09I'm perfectly happy with this just throwing if there are no %0A%09twitter creds. Maybe for next version there can be %0A%09some kind of functionality to run with a no-twitter mode that %0A%09just dumps hashes to printable form to disk. %0A%09%22%22%22%0A%09with open(%22twittercreds.json%22) as tc:%0A%09%09creds = json.load(tc)%0A%09return creds%0A%0A## NEED TO CREATE TARGET ID 'uuid' IN ADDITIONS %0A## just use (str(uuid.uuid4())) which is 36 chars.%0A## put creation into hashio%0A## and remove length limitation from name since it shouldn't %0A## get tweeted. %0A## uuid = 36 + hash = 64 = 100, leaving 40 char for words etc.%0A%0Adef tweet_new_targets(newlist, tweetfn, tweetlog):%0A%09log = deepcopy(tweetlog)%0A%09for n in newlist:%0A%09%09tweet = %22Watching: %22 + n%5B%22uuid%22%5D + %22 hash: %22 + n%5B%22hash%22%5D + %22.%22%0A%09%09response = tweetfn(tweet)%0A%09%09response%5B%22uuid%22%5D = n%5B%22uuid%22%5D%0A%09%09response%5B%22hash%22%5D = n%5B%22hash%22%5D # just to facilitate searching%0A%09%09log.append(response)%0A%09return log%0A%0Adef tweet_changed_targets(changed, tweetfn, tweetlog):%0A%09log = deepcopy(tweetlog)%0A%09for c in changed:%0A%09%09tweet = %22CHANGED! %22 + n%5B%22uuid%22%5D + %22 new hash: %22 + n%5B%22hash%22%5D + %22.%22%0A%09%09response = tweetfn(tweet)%0A%09%09response%5B%22uuid%22%5D = n%5B%22uuid%22%5D%0A%09%09response%5B%22hash%22%5D = n%5B%22hash%22%5D # just to facilitate searching%0A%09%09log.append(response)%0A%09return log%0A%0Aif __name__ == %22__main__%22:%0A%09checked = hashio.check_from_file(%22targets.json%22)%0A%09tweetlog = load_tweetlog()%0A%09creds = load_tweetcreds()%0A%09post_tweet = twitter.twitter_poster(creds)%0A%09tweetlog = tweet_new_targets(checked%5B%22additions%22%5D, post_tweet, tweetlog)%0A%09tweetlog = tweet_changed_targets(checked%5B%22changes%22%5D, post_tweet, tweetlog)%0A%09with open('tweetlog.json', %22w%22) as tl:%0A%09%09json.dump(tweetlog, tl, sort_keys = True, indent = 4)
|
|
21028c13585fbcd5315efd74ab55f5d03d69c500
|
add probe nsrl
|
nsrl.py
|
nsrl.py
|
Python
| 0.000001
|
@@ -0,0 +1,1093 @@
+import hashlib%0Afrom pymongo import MongoClient%0Afrom lib.irma.common.exceptions import IrmaDatabaseError%0A%0Aclass NsrlInfo(object):%0A _uri = %22mongodb://localhost:27017/%22%0A _dbname = %22nsrl%22%0A _collection = %22hashset%22%0A%0A def __init__(self):%0A self._dbh = None%0A%0A def _connect(self):%0A try:%0A if not self._dbh:%0A print %22DEBUG: mongo connection%22%0A client = MongoClient(self._uri)%0A dbh = client%5Bself._dbname%5D%0A self._dbh = dbh%5Bself._collection%5D%0A except Exception as e:%0A raise IrmaDatabaseError(%22%7B0%7D%22.format(e))%0A%0A%0A def get_info(self, sha1):%0A try:%0A self._connect()%0A res = self._collection.find_one(%7B'SHA-1':sha1%7D, %7B'_id': False%7D)%0A if not res:%0A return 'Not found'%0A return res%0A except Exception as e:%0A raise IrmaDatabaseError(%22%7B0%7D%22.format(e))%0A%0Ansrlinfo = NsrlInfo()%0A%0Adef scan(sfile):%0A res = %7B%7D%0A sha1 = hashlib.sha1(sfile.data).hexdigest()%0A res%5B'result'%5D = nsrlinfo.get_info(sha1.upper())%0A return res%0A
|
|
f151d1cc5ddb3b60c6410153e147ccd5c0378904
|
Add Sequence object
|
oeis.py
|
oeis.py
|
Python
| 0.00001
|
@@ -0,0 +1,757 @@
+#!/usr/bin/env python3%0A# -*- coding: utf-8 -*-%0A%22%22%22%0Apy-oeis%0A%0AA Python library to access the OEIS.%0A%0ASumant Bhaskaruni%0Av0.1%0A%22%22%22%0A%0Aimport requests%0A%0A%0Aclass Sequence(object):%0A %22%22%22An object to represent a single OEIS sequence.%0A%0A Initializer arguments:%0A number (int): The OEIS sequence ID%0A %22%22%22%0A%0A def __init__(self, seq_id):%0A %22%22%22See class docstring for details.%22%22%22%0A%0A self.seq_id = seq_id%0A self.val_url = 'https://oeis.org/A%7B0:d%7D/b%7B0:d%7D.txt'.format(seq_id)%0A self.info = requests.get(%0A 'https://oeis.org/search?fmt=json&q=id:A%7B:d%7D'.format(%0A seq_id)).json()%5B'results'%5D%5B0%5D%0A%0A self.name = self.info%5B'name'%5D%0A self.author = self.info%5B'author'%5D%0A self.created = self.info%5B'created'%5D%0A
|
|
eb56d833efad16e9a84724d18121528177f37adb
|
add 41
|
p041.py
|
p041.py
|
Python
| 0.999998
|
@@ -0,0 +1,296 @@
+import utils%0A%0Aprimes = utils.primes(7654321)%0A%0Adef p(n):%0A sn = str(n)%0A lsn = len(sn)%0A%0A if lsn %3E 10:%0A return False%0A %0A return set(%5B int(d) for d in sn %5D) == set(range(1, len(sn) + 1))%0A%0A%0Aprimes.reverse()%0Afor prime in primes:%0A if p(prime):%0A print prime%0A break%0A%0A%0A
|
|
9efa33b28b86feaa204ebb84955022b7716a98ba
|
resolve conflicts
|
seqr/migrations/0057_merge_20190513_2009.py
|
seqr/migrations/0057_merge_20190513_2009.py
|
Python
| 0.00004
|
@@ -0,0 +1,333 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.11.20 on 2019-05-13 20:09%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('seqr', '0056_auto_20190513_1621'),%0A ('seqr', '0056_auto_20190424_2059'),%0A %5D%0A%0A operations = %5B%0A %5D%0A
|
|
ba8d38f278169b5d71e85e4d74a43fcd4a3892ae
|
Test decorator
|
99_misc/decorator.py
|
99_misc/decorator.py
|
Python
| 0.000002
|
@@ -0,0 +1,774 @@
+#/usr/bin/env python%0Adef my_func1(callback):%0A def func_wrapper(x):%0A print(%22my_func1: %7B0%7D %22.format(callback(x)))%0A return func_wrapper%0A%0A@my_func1%0Adef my_func2(x):%0A return x%0A%0A# Actuall call sequence is similar to:%0A# deco = my_func1(my_func2)%0A# deco(%22test%22) =%3E func_wrapper(%22test%22)%0Amy_func2(%22test%22)%0A%0A#-------------------------------------------%0A# Test decorator with parameter%0A%0Adef dec_param(param):%0A def my_func3(callback):%0A def func_wrapper(x):%0A print(%22my_func3: %7B0%7D %7B1%7D %22.format(param, callback(x)))%0A return func_wrapper%0A return my_func3%0A%0A@dec_param(%22tag%22)%0Adef my_func4(x):%0A return x%0A%0A# Actuall call sequence is similar to:%0A# deco = dec_pram(%22tag%22, my_func3(my_func4))%0A# deco(%22test%22) =%3E func_wrapper(%22test%22)%0Amy_func4(%22test%22)%0A
|
|
82d5856b09c42b09f857976075d40b6c6568a7c8
|
Create gate_chk_aws.py
|
gate_chk/gate_chk_aws.py
|
gate_chk/gate_chk_aws.py
|
Python
| 0.000003
|
@@ -0,0 +1,1679 @@
+#!/usr/bin/env python%0A%0A# -*- coding: utf-8 -*-%0A%0Aimport nfc%0Aimport spidev%0Aimport smbus%0Aimport re%0Aimport mysql.connector%0Aimport time%0A%0Adef getid(tag):%0A global id%0A a = '%25s' %25 tag%0A id = re.findall(%22ID=(%5B0-9A-F%5D*)%22,a)%5B0%5D%0A%0Acon = mysql.connector.connect(user=%E2%80%98xxxxxxxxxx', password=%E2%80%98xxxxxxxxxx', host=%E2%80%98xxxxxxxxxx-xxxxx-xxx.xxx.amazonaws.com', database='fablabkitakagaya', charset='utf8', ssl_ca='/home/pi/xxxxxxxxx.pem')%0Acursor = con.cursor()%0Aclf = nfc.ContactlessFrontend('usb')%0A%0Awhile (True):%0A print %22%E4%BC%9A%E5%93%A1%E3%82%AB%E3%83%BC%E3%83%89%E3%82%92%E3%81%8B%E3%81%96%E3%81%97%E3%81%A6%E4%B8%8B%E3%81%95%E3%81%84%E3%80%82%22%0A clf.connect(rdwr=%7B'on-connect': getid%7D)%0A sql = %22select now()%22%0A cursor.execute(sql)%0A now = cursor.fetchone()%5B0%5D%0A cardid = id%0A sql = %22select userid,validity from card where cardid = '%25s'%22 %25 cardid%0A cursor.execute(sql)%0A ans = cursor.fetchone()%0A try:%0A ans != None%0A userid = ans%5B0%5D%0A validity = ans%5B1%5D%0A if (validity ==0):%0A print %22Not a valid card !!%22%0A else:%0A print userid%0A sql = %22select start_at, end_at from riyou where userid = '%25s' and end_at is NULL%22 %25 userid%0A cursor.execute(sql)%0A ans = cursor.fetchone()%0A if (ans == None):%0A print %22Hello !%22%0A sql = %22insert into riyou(userid,start_at) values ('%25s','%25s')%22 %25 (userid, now)%0A cursor.execute(sql)%0A con.commit()%0A else:%0A print %22Bye !%22%0A sql = %22update riyou set end_at = '%25s' where userid = '%25s' and end_at is NULL%22 %25 (now, userid)%0A cursor.execute(sql)%0A con.commit()%0A except:%0A print %22Invalid card !!%22 %0A time.sleep(2)%0Acursor.close()%0Acon.close()%0A
|
|
870d30a0cb7788055cfc9c22854cdbe6293036fa
|
create class to list preset and metapreset
|
settingMod/PresetList.py
|
settingMod/PresetList.py
|
Python
| 0
|
@@ -0,0 +1,1280 @@
+#!/usr/bin/python3.4%0A# -*-coding:Utf-8 -*%0A'''module to manage preset list'''%0Aimport xml.etree.ElementTree as xmlMod%0Aimport os%0A%0Aclass PresetList:%0A%09'''class to manage preset list'''%0A%09%0A%09%0A%09def __init__(self, xml= None):%0A%09%09'''initialize preset list with default value or values extracted from an xml object'''%0A%09%09if xml is None:%0A%09%09%09self.defaultInit()%0A%09%09else:%0A%09%09%09self.fromXml(xml)%0A%09%0A%09%0A%09%0A%09%0A%09%0A%09def defaultInit(self):%0A%09%09'''initialize preset list with default value'''%0A%09%0A%09%0A%09%0A%09%0A%09%0A%09def fromXml(self, xml):%0A%09%09'''initialize preset list with values extracted from an xml object'''%0A%09%09%0A%09%0A%09%0A%09%0A%09%0A%09%0A%09def toXml(self):%0A%09%09'''export preset list into xml syntaxed string'''%0A%09%09txt = '%3CpresetList%3E%5Cn'%0A%09%09%0A%09%09txt += '%3C/presetList%3E%5Cn'%0A%09%09return txt%0A%09%0A%09%0A%09%0A%09%0A%09%0A%09def see(self, log, versions):%0A%09%09'''menu to explore and edit preset list settings'''%0A%09%09change = False%0A%09%09log.menuIn('preset list')%0A%09%09%0A%09%09while True:%0A%09%09%09os.system('clear')%0A%09%09%09log.print()%0A%09%09%09%0A%09%09%09self.print()%0A%09%09%09%0A%09%09%09print('''%5Cn%5Cn Menu :%0A0- Save and quit%0A%0A''')%0A%09%09%09%0A%09%09%09choice = input('Action?').strip().lower()%0A%09%09%09%0A%09%09%09if choice in %5B'0', 'q', 'quit', 'cancel'%5D:%0A%09%09%09%09log.menuOut()%0A%09%09%09%09return change%0A%09%09%09else:%0A%09%09%09%09log.error('Unvalid menu choice', False)%0A%09%09%0A%09%0A%09%0A%09%0A%09%0A%09%0A%09def print(self):%0A%09%09'''a method to print preset list'''%0A%09%09%0A%09%0A%09%0A%09%0A%09%0A%09%0A%09%0A%09%0A%09%0A%09%0A%09%0A%09%0A%09%0A%09%0A%09%0A%09%0A%09%0A
|
|
430ca4b6a6f134346efaae430fac2bfaff195fe1
|
Add files via upload
|
1stANNrecoded2Python.py
|
1stANNrecoded2Python.py
|
Python
| 0
|
@@ -0,0 +1,582 @@
+#imports here: numpy, os, whatever I need%0A%0An = 1000%0Ae = (1.0 + 1.0/n)%5En%0A%0A#Instantiate a new layer with the number of neurons desired, give the neurons (Q: do neurons have separate values than their weights?) random values.%0Adef layerFactory(numberOfNeurons):%0A%0A%0A#create weights between layers (essentially, populate the first layer's weight matrix)%0Adef connectLayers(layer1, layer2):%0A%0A%0A#I don't think I'll have a network factory this time right away - there are too many variables. I will have a network object, but it'll just have%0A#a list of layers.%0A#I need the network class here.
|
|
5b098392cee7f6526947d45bfc620573c631e4cf
|
Create add-P67-wikidata-url
|
my-ACG/add-P67-wikidata-url/edit.py
|
my-ACG/add-P67-wikidata-url/edit.py
|
Python
| 0
|
@@ -0,0 +1,1142 @@
+# -*- coding: utf-8 -*-%0Aimport argparse%0Aimport csv%0Aimport os%0A%0Aos.environ%5B'PYWIKIBOT_DIR'%5D = os.path.dirname(os.path.realpath(__file__))%0Aimport pywikibot%0A%0A%0Asite = pywikibot.Site()%0Asite.login()%0Adatasite = site.data_repository()%0A%0A%0Adef addWikidataUrl(title, targettitle):%0A print(title)%0A%0A if title%5B0%5D == 'Q':%0A myitem = pywikibot.ItemPage(datasite, title)%0A url = 'https://www.wikidata.org/wiki/%7B%7D'.format(targettitle)%0A elif title%5B0%5D == 'P':%0A url = 'https://www.wikidata.org/wiki/Property:%7B%7D'.format(targettitle)%0A myitem = pywikibot.PropertyPage(datasite, title)%0A else:%0A print('%5Ct Not Wikibase page')%0A return%0A%0A new_claim = pywikibot.page.Claim(datasite, 'P67')%0A new_claim.setTarget(url)%0A print('%5Ct', new_claim)%0A myitem.addClaim(new_claim, summary='%E8%A8%AD%E5%AE%9A%E7%B6%AD%E5%9F%BA%E6%95%B8%E6%93%9A%E7%B6%B2%E5%9D%80')%0A%0A%0Adef main(filename):%0A with open(filename) as csvfile:%0A reader = csv.reader(csvfile)%0A for row in reader:%0A addWikidataUrl(row%5B0%5D, row%5B1%5D)%0A%0A%0Aif __name__ == %22__main__%22:%0A parser = argparse.ArgumentParser()%0A parser.add_argument('filename')%0A args = parser.parse_args()%0A main(args.filename)%0A
|
|
52fd7e5e6ae5ec6ab7de8a858fd2b132fe0d4081
|
Create CGOLprintToScreen.py
|
CGOLprintToScreen.py
|
CGOLprintToScreen.py
|
Python
| 0
|
@@ -0,0 +1,2606 @@
+import sys%0A%0Atiles_size = 64%0A%0A%0Aclass cell:%0A def __init__(self, location, alive=False):%0A self.alive = alive%0A self.location = location%0A%0A%0Aclass Rules:%0A def rule(self): # if alive%0A for i in range(tiles_size):%0A for j in range(tiles_size):%0A c = self.neighbourscounter(tile%5Bi%5D%5Bj%5D)%0A%0A if tile%5Bi%5D%5Bj%5D.alive:%0A if c != 2 or c != 3:%0A tile%5Bi%5D%5Bj%5D.alive = False%0A else:%0A tile%5Bi%5D%5Bj%5D.alive = True%0A%0A else:%0A if c == 3:%0A tile%5Bi%5D%5Bj%5D.alive = True%0A%0A def neighbourscounter(self, cell_): # Return the number of Neighbours alive#%0A c = 0%0A cell_loc = cell_.location%0A try:%0A if tile%5Babs(cell_loc%5B0%5D - 1)%5D%5Babs(cell_loc%5B1%5D - 1)%5D.alive:%0A c += 1%0A except Exception:%0A pass%0A%0A try:%0A if tile%5Babs(cell_loc%5B0%5D - 1)%5D%5Babs(cell_loc%5B1%5D)%5D.alive:%0A c += 1%0A except Exception:%0A pass%0A try:%0A if tile%5Babs(cell_loc%5B0%5D)%5D%5Babs(cell_loc%5B1%5D - 1)%5D.alive:%0A c += 1%0A except Exception:%0A pass%0A try:%0A if tile%5Babs(cell_loc%5B0%5D + 1)%5D%5Babs(cell_loc%5B1%5D - 1)%5D.alive:%0A c += 1%0A except Exception:%0A pass%0A try:%0A if tile%5Babs(cell_loc%5B0%5D + 1)%5D%5Babs(cell_loc%5B1%5D)%5D.alive:%0A c += 1%0A except Exception:%0A pass%0A try:%0A if tile%5Babs(cell_loc%5B0%5D - 1)%5D%5Babs(cell_loc%5B1%5D + 1)%5D.alive:%0A c += 1%0A except Exception:%0A pass%0A try:%0A if tile%5Babs(cell_loc%5B0%5D)%5D%5Babs(cell_loc%5B1%5D + 1)%5D.alive:%0A c += 1%0A except Exception:%0A pass%0A try:%0A if tile%5Babs(cell_loc%5B0%5D + 1)%5D%5Babs(cell_loc%5B1%5D + 1)%5D.alive:%0A c += 1%0A except Exception:%0A pass%0A%0A return c%0A%0A%0Atile = %5B%5D%0Afor i in range(tiles_size):%0A tile.append(%5B%5D)%0A for g in range(tiles_size):%0A tile%5Bi%5D.insert(g, cell((i, g)))%0A%0ARules = Rules()%0A%0A# Hardcode pattern.%0Atile %5B1%5D%5B1%5D.alive = True%0Atile %5B1%5D%5B2%5D.alive = True%0Atile %5B2%5D%5B1%5D.alive = True%0Atile %5B3%5D%5B1%5D.alive = True%0Atile %5B3%5D%5B2%5D.alive = True%0Atile %5B2%5D%5B2%5D.alive = True%0A%0A# Print too screen implementing rules.%0Afor a in range(0,3):%0A for i in range(tiles_size):%0A for g in range(tiles_size):%0A if tile%5Bi%5D%5Bg%5D.alive == True:%0A sys.stdout.write(%22X%22)%0A else:%0A sys.stdout.write(%22O%22)%0A print()%0A print()%0A print()%0A Rules.rule()%0A
|
|
9b5f070705de9896c8c6f8347dc0f733ae748793
|
Add harvesting blog data example
|
harvesting_blog_data.py
|
harvesting_blog_data.py
|
Python
| 0
|
@@ -0,0 +1,709 @@
+import os%0Aimport sys%0Aimport json%0Aimport feedparser%0Afrom bs4 import BeautifulSoup%0A%0AFEED_URL = 'http://g1.globo.com/dynamo/rss2.xml'%0A%0Adef cleanHtml(html):%0A return BeautifulSoup(html, 'lxml').get_text()%0A%0Afp = feedparser.parse(FEED_URL)%0A%0Aprint %22Fetched %25s entries from '%25s'%22 %25 (len(fp.entries%5B0%5D.title), fp.feed.title)%0A%0Ablog_posts = %5B%5D%0Afor e in fp.entries:%0A blog_posts.append(%7B'title': e.title,%0A 'published': e.published,%0A 'summary': cleanHtml(e.summary),%0A 'link': e.link%7D)%0A%0Aout_file = os.path.join('./', 'feed.json')%0A%0Af = open(out_file, 'w')%0Af.write(json.dumps(blog_posts, indent=1))%0Af.close()%0A%0Aprint 'Wrote output file to %25s' %25 (f.name, )%0A
|
|
f40788bdc60566fc15a7abb46bfca61bb9131823
|
Test update
|
test.py
|
test.py
|
Python
| 0
|
@@ -0,0 +1,100 @@
+#!/usr/bin/env python%0A%0A%0Adef main():%0A print %22Hello world%22%0A%0A%0Aif __name__ == %22__main__%22:%0A main()%0A
|
|
b07ca938d68dff3386007885a6da4f5b2e593941
|
Add prototype
|
test.py
|
test.py
|
Python
| 0.000001
|
@@ -0,0 +1,1351 @@
+#!/usr/bin/python%0A%0Afrom construct import *%0Aimport sys%0A%0Adef align4(n):%0A%09return n + ((n+4) %25 4)%0A%0Achunk_atom = Struct(%22chunk_atom%22,%0A%09UBInt32(%22len%22),%0A%09Array(lambda ctx: ctx.len, PascalString(%22atom%22))%0A%09)%0A%0Achunk_expt = Struct(%22chunk_expt%22,%0A%09UBInt32(%22len%22),%0A%09Array(lambda ctx: ctx.len, Struct(%22entry%22,%0A%09%09%09UBInt32(%22function%22),%0A%09%09%09UBInt32(%22arity%22),%0A%09%09%09UBInt32(%22label%22),%0A%09%09)%0A%09)%0A%09)%0A%0Achunk_impt = Struct(%22chunk_impt%22,%0A%09UBInt32(%22len%22),%0A%09Array(lambda ctx: ctx.len, Struct(%22entry%22,%0A%09%09%09UBInt32(%22module%22),%0A%09%09%09UBInt32(%22function%22),%0A%09%09%09UBInt32(%22arity%22),%0A%09%09)%0A%09)%0A%09)%0A%0Achunk_loct = Struct(%22chunk_loct%22,%0A%09UBInt32(%22len%22),%0A%09Array(lambda ctx: ctx.len, Struct(%22entry%22,%0A%09%09%09UBInt32(%22function%22),%0A%09%09%09UBInt32(%22arity%22),%0A%09%09%09UBInt32(%22label%22),%0A%09%09)%0A%09)%0A%09)%0A%0Achunk = Struct(%22chunk%22,%0A%09String(%22chunk_name%22,4),%0A%09UBInt32(%22size%22),%0A%09Switch(%22payload%22, lambda ctx: ctx.chunk_name,%0A%09%09%7B%0A%09%09%22Atom%22 : chunk_atom,%0A%09%09%22ExpT%22 : chunk_expt,%0A%09%09%22ImpT%22 : chunk_impt,%0A#%09%09%22Code%22 : chunk_code,%0A#%09%09%22StrT%22 : chunk_strt,%0A#%09%09%22Attr%22 : chunk_attr,%0A#%09%09%22CInf%22 : chunk_cinf,%0A%09%09%22LocT%22 : chunk_loct,%0A#%09%09%22Trac%22 : chunk_trac,%0A%09%09%7D,%0A%09%09default = String(%22skip%22, lambda ctx: align4(ctx.size))%0A%09),%0A%09)%0A%0Abeam_construct = Struct(%22beam%22,%0A%09OneOf(String('for1',4),%5B'FOR1'%5D),%0A%09UBInt32(%22size%22),%0A%09OneOf(String('beam',4),%5B'BEAM'%5D),%0A%09GreedyRange(chunk),%0A%09)%0A%0Afilename = sys.argv%5B1%5D%0Abeam = file(filename,%22r%22).read()%0Aprint beam_construct.parse(beam)%0A
|
|
d5aecde4806a130550786f21f8fdd13c27996e16
|
add test.py and copyright comments
|
test.py
|
test.py
|
Python
| 0
|
@@ -0,0 +1,342 @@
+# encoding: utf-8%0Afrom toPersian import *%0A%0Aprint enToPersianNumb('%D8%B4%D9%85%D8%A7%D8%B1%D9%87 %DA%A9%D9%84%D8%A7%D8%B3 312')%0Aprint enToPersianNumb(3123123.9012)%0Aprint enToPersianNumb(123)%0Aprint enToPersianchar('sghl %5Di ofv')%0Aprint arToPersianNumb('%D9%A3%D9%A4%D9%A5%D9%A6')%0Aprint arToPersianChar(' %D9%83 %D8%AC%D9%85%D9%87%D9%88%D8%B1%D9%8A %D8%A7%D8%B3%D9%84%D8%A7%D9%85%D9%8A %D8%A7%D9%8A%D8%B1%D8%A7%D9%86')%0A%0A'''%0A%D8%B4%D9%85%D8%A7%D8%B1%D9%87 %DA%A9%D9%84%D8%A7%D8%B3 %DB%B3%DB%B1%DB%B2%0A%DB%B3%DB%B1%DB%B2%DB%B3%DB%B1%DB%B2%DB%B3.%DB%B9%DB%B0%DB%B1%DB%B2%0A%DB%B1%DB%B2%DB%B3%0A%D8%B3%D9%84%D8%A7%D9%85 %DA%86%D9%87 %D8%AE%D8%A8%D8%B1%0A%DB%B3%DB%B4%DB%B5%DB%B6%0A %DA%A9 %D8%AC%D9%85%D9%87%D9%88%D8%B1%DB%8C %D8%A7%D8%B3%D9%84%D8%A7%D9%85%DB%8C %D8%A7%DB%8C%D8%B1%D8%A7%D9%86%0A'''
|
|
3a160d3aed9d5eb7cebe2427f9009b4e0e2f07c4
|
return doi resolver url instead of doi resolver name
|
searx/plugins/oa_doi_rewrite.py
|
searx/plugins/oa_doi_rewrite.py
|
from flask_babel import gettext
import re
from searx.url_utils import urlparse, parse_qsl
from searx import settings
regex = re.compile(r'10\.\d{4,9}/[^\s]+')
name = gettext('Open Access DOI rewrite')
description = gettext('Avoid paywalls by redirecting to open-access versions of publications when available')
default_on = False
preference_section = 'privacy'
doi_resolvers = settings['doi_resolvers']
def extract_doi(url):
match = regex.search(url.path)
if match:
return match.group(0)
for _, v in parse_qsl(url.query):
match = regex.search(v)
if match:
return match.group(0)
return None
def get_doi_resolver(args, preference_doi_resolver):
doi_resolvers = settings['doi_resolvers']
doi_resolver = args.get('doi_resolver', preference_doi_resolver)[0]
if doi_resolver not in doi_resolvers:
doi_resolvers = settings['default_doi_resolver']
return doi_resolver
def on_result(request, search, result):
doi = extract_doi(result['parsed_url'])
if doi and len(doi) < 50:
for suffix in ('/', '.pdf', '/full', '/meta', '/abstract'):
if doi.endswith(suffix):
doi = doi[:-len(suffix)]
result['url'] = get_doi_resolver(request.args, request.preferences.get_value('doi_resolver')) + doi
result['parsed_url'] = urlparse(result['url'])
return True
|
Python
| 0.003718
|
@@ -913,16 +913,68 @@
olver'%5D%0A
+ doi_resolver_url = doi_resolvers%5Bdoi_resolver%5D%0A
retu
@@ -988,16 +988,20 @@
resolver
+_url
%0A%0A%0Adef o
|
995e35c2a66fd51f9216ed5acc829bac0ac3ddeb
|
add i3-debug-console script to examples
|
examples/i3-debug-console.py
|
examples/i3-debug-console.py
|
Python
| 0.000001
|
@@ -0,0 +1,1438 @@
+#!/usr/bin/env python3%0A%0Aimport i3ipc%0Afrom curses import wrapper%0Afrom threading import Timer%0A%0Adef con_type_to_text(con):%0A if con.type != 'con':%0A return con.type%0A if len(con.nodes):%0A return 'container'%0A else:%0A return 'view'%0A%0Adef layout_txt(con):%0A if con.layout == 'splith':%0A return 'HORIZ'%0A elif con.layout == 'splitv':%0A return 'VERT'%0A else:%0A return ''%0A%0Adef container_to_text(con, indent):%0A t = con_type_to_text(con)%0A txt = (' ' * indent) + '('%0A txt += t + ' ' + layout_txt(con)%0A%0A if con.focused:%0A txt += ' focus'%0A%0A has_children = len(con.nodes)%0A%0A for c in con.nodes:%0A txt += '%5Cn'%0A txt += container_to_text(c, indent + 4)%0A%0A if has_children:%0A txt += '%5Cn' + (' ' * indent)%0A%0A txt += ')'%0A%0A return txt%0A%0Alast_txt = ''%0A%0Adef main(stdscr):%0A ipc = i3ipc.Connection()%0A%0A def on_event(ipc, e):%0A txt = ''%0A for ws in ipc.get_tree().workspaces():%0A txt += container_to_text(ws, 0) + '%5Cn'%0A%0A global last_txt%0A if txt == last_txt:%0A return%0A%0A stdscr.clear()%0A for l in txt:%0A try:%0A stdscr.addstr(l)%0A except Exception:%0A break%0A stdscr.refresh()%0A last_txt = txt%0A%0A on_window(ipc, None)%0A%0A ipc.on('window', on_event)%0A ipc.on('binding', on_event)%0A ipc.on('workspace', on_event)%0A%0A ipc.main()%0A%0Awrapper(main)%0A
|
|
1524a8fd55c682bd8b77b52b9d2d5e5c030c9d2d
|
Add first tests
|
test/sciluigi_test.py
|
test/sciluigi_test.py
|
Python
| 0.000001
|
@@ -0,0 +1,489 @@
+import sciluigi%0Afrom nose.tools import with_setup%0A%0A# Make these variables global%0A#shell_task = None%0A%0Adef setup():%0A global shell_task%0A shell_task = sciluigi.shell(%22cat %3Ci:input%3E %3E %3Co:output:out.txt%3E%22)%0A return shell_task%0A%0Adef teardown():%0A global shell_task%0A shell_task = None%0A%0A@with_setup(setup, teardown)%0Adef test_inports_nonempty():%0A assert len(shell_task.inports) == 1%0A%0A@with_setup(setup, teardown)%0Adef test_outports_nonempty():%0A assert len(shell_task.outports) == 1%0A
|
|
6f3c212a3e376bed93d0a65722486ba36f432ae7
|
Install pip_requirements_dev to DEV environment as well as SOLO.
|
buedafab/environments.py
|
buedafab/environments.py
|
"""Application environments, which determine the servers, database and other
conditions for deployment.
"""
from fabric.api import require, env
import os
from buedafab import aws
def _not_localhost():
"""All non-localhost environments need to install the "production" pip
requirements, which typically includes the Python database bindings.
"""
if (hasattr(env, 'pip_requirements')
and hasattr(env, 'pip_requirements_production')):
env.pip_requirements += env.pip_requirements_production
def development():
"""[Env] Development server environment
- Sets the hostname of the development server (using the default ssh port)
- Sets the app environment to "DEV"
- Permits developers to deploy without creating a tag in git
"""
_not_localhost()
if len(env.hosts) == 0:
env.hosts = ['dev.bueda.com:%(ssh_port)d' % env]
env.allow_no_tag = True
env.deployment_type = "DEV"
def staging():
"""[Env] Staging server environment
- Sets the hostname of the staging server (using the default ssh port)
- Sets the app environment to "STAGING"
- Permits developers to deploy without creating a tag in git
- Appends "-staging" to the target directory to allow development and
staging servers to be the same machine
"""
_not_localhost()
if len(env.hosts) == 0:
env.hosts = ['dev.bueda.com:%(ssh_port)d' % env]
env.allow_no_tag = True
env.deployment_type = "STAGING"
env.path += '-staging'
def production():
"""[Env] Production servers. Stricter requirements.
- Collects production servers from the Elastic Load Balancer specified by
the load_balancer env attribute
- Sets the app environment to "PRODUCTION"
- Requires that developers deploy from the 'master' branch in git
- Requires that developers tag the commit in git before deploying
"""
_not_localhost()
env.allow_no_tag = False
env.deployment_type = "PRODUCTION"
if hasattr(env, 'load_balancer'):
if len(env.hosts) == 0:
env.hosts = aws.collect_load_balanced_instances()
env.default_revision = '%(master_remote)s/master' % env
def localhost(deployment_type=None):
"""[Env] Bootstrap the localhost - can be either dev, production or staging.
We don't really use this anymore except for 'fab setup', and even there it
may not be neccessary. It was originally intended for deploying
automatically with Chef, but we moved away from that approach.
"""
require('root_dir')
if len(env.hosts) == 0:
env.hosts = ['localhost']
env.allow_no_tag = True
env.deployment_type = deployment_type
env.virtualenv = os.environ.get('VIRTUAL_ENV', 'env')
if deployment_type is None:
deployment_type = "SOLO"
env.deployment_type = deployment_type
if env.deployment_type == "STAGING":
env.path += '-staging'
if (hasattr(env, 'pip_requirements')
and hasattr(env, 'pip_requirements_dev')):
env.pip_requirements += env.pip_requirements_dev
def django_development():
"""[Env] Django development server environment
In addition to everything from the development() task, also:
- loads any database fixtures named "dev"
- loads a crontab from the scripts directory (deprecated at Bueda)
"""
development()
env.extra_fixtures += ["dev"]
env.crontab = os.path.join('scripts', 'crontab', 'development')
def django_staging():
"""[Env] Django staging server environment
In addition to everything from the staging() task, also:
- loads a production crontab from the scripts directory (deprecated at
Bueda)
"""
staging()
env.crontab = os.path.join('scripts', 'crontab', 'production')
def django_production():
"""[Env] Django production server environment
In addition to everything from the production() task, also:
- loads a production crontab from the scripts directory (deprecated at
Bueda)
"""
production()
env.crontab = os.path.join('scripts', 'crontab', 'production')
|
Python
| 0
|
@@ -941,16 +941,169 @@
= %22DEV%22
+%0A if (hasattr(env, 'pip_requirements')%0A and hasattr(env, 'pip_requirements_dev')):%0A env.pip_requirements += env.pip_requirements_dev
%0A%0Adef st
|
ed63c9c828cc609d82eb5afb21f6e24b358bc3cf
|
Add DoubleLinkedQueue
|
DoubleLinkedQueue.py
|
DoubleLinkedQueue.py
|
Python
| 0.000001
|
@@ -0,0 +1,1772 @@
+class _DoubleLinkedList:%0A class _Node:%0A __slots__ = '_element', '_prev', '_next'%0A%0A def __init__(self, element, prev, next):%0A self._element = element%0A self._prev = prev%0A self._next = next%0A%0A def __init__(self):%0A self.header = self._Node(None, None, None)%0A self.tailer = self._Node(None, None, None)%0A self.header._next = self.tailer%0A self.tailer._prev = self.header%0A self._size = 0%0A%0A def __len__(self):%0A return self._size%0A%0A def is_empty(self):%0A return self._size == 0%0A%0A def insert_between(self, e, prev, next):%0A node = self._Node(e, prev, next)%0A prev._next = node%0A next._prev = node%0A self._size += 1%0A return node%0A%0A def delete_node(self, node):%0A node._prev._next = node._next%0A node._next._prev = node._prev%0A self._size -= 1%0A e = node._element%0A node._prev = node._next = node._element = None%0A return e%0A%0Aclass DoubleLinedQueue(_DoubleLinkedList):%0A def first(self):%0A if self.is_empty():%0A return None%0A else:%0A return self.header._next%0A %0A def last(self):%0A if self.is_empty():%0A return None%0A else:%0A return self.tailer._prev%0A%0A def insert_first(self, e):%0A self.insert_between(e, self.header, self.header._next)%0A%0A def insert_last(self, e):%0A self.insert_between(e, self.tailer._prev, self.tailer)%0A%0A def delete_first(self):%0A if self.is_empty():%0A return None%0A else:%0A return self.delete_node(self.header._next)%0A%0A def delete_last(self):%0A if self.is_empty():%0A return None%0A else:%0A return self.delete_node(self.tailer._prev)%0A
|
|
42f66ea6e1921040d6e3055c41372b02511e6a5a
|
Add directory for CYK tests
|
tests/CYK/__init__.py
|
tests/CYK/__init__.py
|
Python
| 0
|
@@ -0,0 +1,113 @@
+#!/usr/bin/env python%0A%22%22%22%0A:Author Patrik Valkovic%0A:Created 31.08.2017 14:50%0A:Licence GNUv3%0APart of pyparsers%0A%0A%22%22%22
|
|
4dc6462a0a8231ba4ffca09d5c9546d8b6d0dd6f
|
Fix bug in config.
|
DIE/Lib/DieConfig.py
|
DIE/Lib/DieConfig.py
|
import logging
import os
import ConfigParser
import idaapi
import yaml
from attrdict import AttrMap
class DIEConfig(object):
DEFAULT = os.path.join(os.path.dirname(__file__), "config.yml")
def __init__(self):
with open(self.DEFAULT, "rb") as f:
default = yaml.safe_load(f)
self._config = AttrMap(default)
@property
def install_path(self):
return os.path.normpath(os.path.join(os.path.dirname(__file__), ".."))
@property
def icons_path(self):
return self.install_path + "\\Icons"
@property
def parser_path(self):
return self.install_path + "\\Plugins\\DataParsers"
def load(self, path):
with open(path, "rb") as f:
custom = yaml.safe_load(f)
self._config.update(custom)
def save(self, path):
with open(path, "wb") as f:
yaml.safe_dump(dict(self._config), f, default_flow_style=False)
def __getattr__(self, name):
return getattr(self._config, name)
def __setattr__(self, name, value):
if name.startswith("_"):
return super(DIEConfig, self).__setattr__(name, value)
return setattr(self._config, name, value)
#############################################################################
# Singleton
#############################################################################
_config_parser = None
def get_config():
"""
Return a singleton instance of the global configuration object
"""
return _config_parser
def initialize():
global _config_parser
_config_parser = DIEConfig()
|
Python
| 0
|
@@ -761,28 +761,141 @@
-self._config
+custom = AttrMap(custom)%0A%0A for attr in self._config:%0A if attr in custom:%0A self._config%5Battr%5D
.update(
@@ -896,25 +896,32 @@
pdate(custom
-)
+%5Battr%5D)%0A
%0A%0A def sa
|
5ac4f0be3f9f1179a50670989915bae0d3ae157e
|
Add globals.ffmpeg module to retrieve ffmpeg executable
|
source/globals/ffmpeg.py
|
source/globals/ffmpeg.py
|
Python
| 0.000001
|
@@ -0,0 +1,527 @@
+# -*- coding: utf-8 -*-%0A%0A## %5Cpackage globals.ffmpeg%0A# %0A# Retrieves the FFmpeg executable%0A%0A# MIT licensing%0A# See: LICENSE.txt%0A%0A%0Aimport subprocess%0Afrom subprocess import PIPE%0Afrom subprocess import STDOUT%0A%0A%0Adef GetExecutable(cmd):%0A sp = subprocess.Popen(%5Bu'which', cmd,%5D, stdout=PIPE, stderr=STDOUT)%0A output, returncode = sp.communicate()%0A %0A print(u'Return code: %7B%7D%5CnSTDOUT: %7B%7D'.format(returncode, output))%0A %0A if returncode:%0A return None%0A %0A return output%0A%0A%0ACMD_ffmpeg = GetExecutable(u'ffmpeg')%0A%0A
|
|
4ae114dd1da8118cc9d2ee87e30f5e0a1f3324f2
|
Add some tests for monitor class
|
tests/test_monitor.py
|
tests/test_monitor.py
|
Python
| 0
|
@@ -0,0 +1,2285 @@
+import unittest%0Aimport Monitors.monitor%0A%0A%0Aclass TestMonitor(unittest.TestCase):%0A%0A safe_config = %7B'partition': '/', 'limit': '10G'%7D%0A%0A one_KB = 1024%0A one_MB = one_KB * 1024%0A one_GB = one_MB * 1024%0A one_TB = one_GB * 1024%0A%0A def test_MonitorInit(self):%0A m = Monitors.monitor.Monitor(config_options=%7B%0A 'depend': 'a, b',%0A 'urgent': 0,%0A 'tolerance': 2,%0A 'remote_alert': 1,%0A 'recover_command': 'true'%0A %7D)%0A self.assertEqual(m.name, 'unnamed', 'Monitor did not set name')%0A self.assertEqual(m.urgent, 0, 'Monitor did not set urgent')%0A self.assertEqual(m.tolerance, 2, 'Monitor did not set tolerance')%0A self.assertTrue(m.remote_alerting, 'Monitor did not set remote_alerting')%0A self.assertEqual(m.recover_command, 'true', 'Monitor did not set recover_command')%0A%0A def test_MonitorSuccess(self):%0A m = Monitors.monitor.Monitor()%0A m.record_success('yay')%0A self.assertEqual(m.get_error_count(), 0, 'Error count is not 0')%0A self.assertEqual(m.get_success_count(), 1, 'Success count is not 1')%0A self.assertEqual(m.tests_run, 1, 'Tests run is not 1')%0A self.assertFalse(m.was_skipped, 'was_skipped is not false')%0A self.assertEqual(m.last_result, 'yay', 'Last result is not correct')%0A%0A def test_MonitorFail(self):%0A m = Monitors.monitor.Monitor()%0A m.record_fail('boo')%0A self.assertEqual(m.get_error_count(), 1, 'Error count is not 1')%0A self.assertEqual(m.get_success_count(), 0, 'Success count is not 0')%0A self.assertEqual(m.tests_run, 1, 'Tests run is not 1')%0A self.assertFalse(m.was_skipped, 'was_skipped is not false')%0A self.assertEqual(m.last_result, 'boo', 'Last result is not correct')%0A%0A def test_MonitorWindows(self):%0A m = Monitors.monitor.Monitor()%0A self.assertFalse(m.is_windows())%0A%0A def test_MonitorSkip(self):%0A m = Monitors.monitor.Monitor()%0A m.record_skip('a')%0A self.assertEqual(m.get_success_count(), 1, 'Success count is not 1')%0A self.assertTrue(m.was_skipped, 'was_skipped is not true')%0A self.assertEqual(m.skip_dep, 'a', 'skip_dep is not correct')%0A self.assertTrue(m.skipped(), 'skipped() is not true')%0A%0A
|
|
b0f0ee685ca525de90fdcd5a57a203c8b42b936a
|
test for the bootstrap
|
tests/install_test.py
|
tests/install_test.py
|
Python
| 0.000001
|
@@ -0,0 +1,905 @@
+import urllib2%0Aimport sys%0Aimport os%0A%0Aprint '**** Starting Test'%0Aprint '%5Cn%5Cn'%0A%0Ais_jython = sys.platform.startswith('java')%0Aif is_jython:%0A import subprocess%0A%0Aprint 'Downloading bootstrap'%0Afile = urllib2.urlopen('http://nightly.ziade.org/bootstrap.py')%0Af = open('bootstrap.py', 'w')%0Af.write(file.read())%0Af.close()%0A%0A# running it%0Aargs = %5Bsys.executable%5D + %5B'bootstrap.py'%5D%0Aif is_jython:%0A subprocess.Popen(%5Bsys.executable%5D + args).wait()%0Aelse:%0A os.spawnv(os.P_WAIT, sys.executable, args)%0A%0A# now checking if Distribute is installed%0A%0Aargs = %5Bsys.executable%5D + %5B'-c', 'import setuptools; import sys; sys.exit(hasattr(setuptools, %22_distribute%22))'%5D%0Aif is_jython:%0A res = subprocess.call(%5Bsys.executable%5D + args)%0Aelse:%0A res = os.spawnv(os.P_WAIT, sys.executable, args)%0A%0Aprint '%5Cn%5Cn'%0Aif res:%0A print '**** Test is OK'%0Aelse:%0A print '**** Test failed, please send me the output at tarek@ziade.org'%0A%0A
|
|
f9b2bba394ad6ce31ffae5cf6ccf445dc280ba95
|
Solve C Mais ou Menos? in python
|
solutions/beecrowd/2486/2486.py
|
solutions/beecrowd/2486/2486.py
|
Python
| 1
|
@@ -0,0 +1,743 @@
+import sys%0A%0AMIN_VITAMIN_C = 110%0AMAX_VITAMIN_C = 130%0A%0Avitamin_c_catalogue = %7B%0A 'suco de laranja': 120,%0A 'morango fresco': 85,%0A 'mamao': 85,%0A 'goiaba vermelha': 70,%0A 'manga': 56,%0A 'laranja': 50,%0A 'brocolis': 34,%0A%7D%0A%0Afor test in sys.stdin:%0A t = int(test)%0A%0A if not t:%0A break%0A%0A total_c_vitamin = 0%0A%0A for _ in range(t):%0A line = input()%0A n, food = line.split(' ', 1)%0A n = int(n)%0A%0A total_c_vitamin += n * vitamin_c_catalogue%5Bfood%5D%0A%0A if total_c_vitamin %3C MIN_VITAMIN_C:%0A print(f'Mais %7BMIN_VITAMIN_C - total_c_vitamin%7D mg')%0A elif total_c_vitamin %3E MAX_VITAMIN_C:%0A print(f'Menos %7Btotal_c_vitamin - MAX_VITAMIN_C%7D mg')%0A else:%0A print(f'%7Btotal_c_vitamin%7D mg')%0A
|
|
076fa3fcc50c9c9b236fc3e35e4e32f77f9fadbb
|
Fix power_spectrum tests
|
tests/test__signal.py
|
tests/test__signal.py
|
import numpy as np
from acoustics import Signal
import pytest
import itertools as it
#def test_operator():
#n = 10000
#fs = 5000
class TestSignal():
@pytest.fixture(params=[(1, 88200, 22050), (3, 88200, 22050), (3, 88200, 44100)])
def signal(self, request):
return Signal(np.random.randn(request.param[0], request.param[1]), request.param[2])
def test_spectrum(self, signal):
freq, power = signal.spectrum()
def test_octaves(self, signal):
freq, octaves = signal.octaves()
def test_levels(self, signal):
times, levels = signal.levels()
def test_leq(self, signal):
#s = Signal(np.random.randn(10000), 22050)
leq = signal.leq()
assert(type(leq) is np.ndarray)
## Plot methods with arguments to test.
#plot_methods = {'plot' : None,
#'plot_levels' : {
#'time' : [None, 0.125, 1.0],
#'method' : ['average', 'weighting'],
#},
#'plot_octaves' : None,
#'plot_third_octaves' : None,
#'plot_fractional_octaves' : {
#'fraction' : [3, 6]
#},
#'plot_spectrum' : {
#'N' : [None, 8000]
#},
#}
#@pytest.yield_fixture
#def plot_function_with_argument(self):
## This won't work with pytest. Apparently they do teardown after the yield
## statement and therefore don't support multiple yield statements.
## Using a closure doesn't help either.
#for func, arguments in self.plot_methods.items():
#if arguments is not None:
#for prod in it.product(*arguments.values()):
#yield (func, dict(zip(arguments.keys(), prod)))
#else:
#yield (func, None)
#def test_plot_functions(self, signal, plot_function_with_argument):
#func, arguments = plot_function_with_argument
#if arguments is None:
#getattr(signal, func)()
#else:
#getattr(signal, func)(**arguments)
def test_plot(self, signal):
signal.plot()
def test_plot_levels(self, signal):
signal.plot_levels()
signal.plot_levels(method='average', time=1.0)
signal.plot_levels(method='weighting', time=1.0)
def test_plot_octaves(self, signal):
signal.plot_octaves()
def test_plot_third_octaves(self, signal):
signal.plot_third_octaves()
def test_plot_fractional_octaves(self, signal):
signal.plot_fractional_octaves(3)
signal.plot_fractional_octaves(6)
signal.plot_fractional_octaves(9)
def plot_spectrum(self, signal):
signal.plot_spectrum()
def test_spectrogram(self, signal):
if signal.channels > 1:
with pytest.raises(ValueError):
signal.spectrogram()
else:
signal.spectrogram()
|
Python
| 0.000009
|
@@ -397,24 +397,30 @@
def test_
+power_
spectrum(sel
@@ -465,24 +465,30 @@
er = signal.
+power_
spectrum()%0A
@@ -3085,24 +3085,30 @@
def plot_
+power_
spectrum(sel
@@ -3135,24 +3135,30 @@
signal.plot_
+power_
spectrum()%0A
|
96c8d93cf1b6a01e867ca8250fee4dea5e870c79
|
Add files via upload
|
4ChanWebScraper.py
|
4ChanWebScraper.py
|
Python
| 0
|
@@ -0,0 +1,1176 @@
+import requests%0Aimport os%0Aimport sys%0Aimport re%0Afrom BeautifulSoup import BeautifulSoup%0Afrom PIL import Image%0Afrom StringIO import StringIO%0A%0A# try: %0A# opts, args = getopt.getopt(argv, %22u:%22, %5B%22url=%22%5D) %0A# except getopt.GetoptError: %0A# print('usage: python 4ChanWebScraper.py -u=%3Curl%3E') %0A# sys.exit(2) %0A%0Aurl = sys.argv%5B1%5D%0Aprint('Attempting to capture ' + url)%0Aresponse = requests.get(url)%0Ahtml = response.content%0A%0Asoup = BeautifulSoup(html)%0A%0AfolderName = soup.title.string.replace('/','')%0A%0Aprint('Capturing ' + folderName)%0A%0Aif not os.path.exists('./' + folderName):%0A os.makedirs('./'+folderName)%0A print('created folder '+ folderName)%0Aelse:%0A print('folder already exists')%0A%0Afor link in soup.findAll('a', 'fileThumb'):%0A imageName = link.get('href')%0A print('Getting ' + imageName)%0A fileName = re.search('%5Cd+%5C.%5Cw+$',imageName)%0A savePath = './'+ folderName +'/' + fileName.group(0)%0A print('saving:' + savePath)%0A img = requests.get('http:' + imageName)%0A i = Image.open(StringIO(img.content))%0A i.save(savePath)%0A%0A%0A# for thumb in table.findAll('fileThumb'):%0A# print row.text
|
|
67c90811afb47fa57af6b61b894e6efd78fa699c
|
Find a key within a dictionary
|
python/reddit/find_my_key.py
|
python/reddit/find_my_key.py
|
Python
| 0.999718
|
@@ -0,0 +1,807 @@
+def find_key(info, key):%0A value = -1%0A if isinstance(info, dict):%0A if key in info:%0A print('Found %7B%7D in %7B%7D'.format(key, info))%0A return info.get(key)%0A else:%0A for element in info:%0A print('Testing element %7B%7D'.format(element))%0A value = find_key(info.get(element), key)%0A if value != -1:%0A return value%0A return value%0A%0Adef main():%0A tests = %5B%0A %7B%0A 'a': 1,%0A 'e': 5,%0A %7D,%0A %7B%0A 'a': 1,%0A 'b': 2,%0A 'c': %7B'd': 4, 'e': 5%7D,%0A 'f': 6,%0A %7D,%0A %7B%0A 'a': 1,%0A 'b': 2,%0A %7D,%0A %5D%0A for test in tests:%0A print('Test: %7B%7D, Value for e: %7B%7D'.format(test, find_key(test, 'e')))%0A%0Amain()%0A
|
|
27fca35a08278a44bb7ba693f222c6c182061872
|
Add the enemy file and start it up.
|
Enemy.py
|
Enemy.py
|
Python
| 0
|
@@ -0,0 +1,253 @@
+import pygame%0A%0Aclass Enemy(pygame.sprite.Sprite):%0A%0A def __init__(self, x, y):%0A super().__init__()%0A self.image = pygame.image.load(%22images/enemy.png%22).convert_alpha()%0A self.rect = self.image.get_rect(center=(x, y))%0A%0A def %0A%0A
|
|
982cd61d7532365d9de56b308c7a4d8308302c15
|
Add a test to demonstrate issue with django 1.11
|
tests/testapp/tests/test_model_create_with_generic.py
|
tests/testapp/tests/test_model_create_with_generic.py
|
Python
| 0
|
@@ -0,0 +1,1177 @@
+try:%0A from django.contrib.contenttypes.fields import GenericForeignKey%0Aexcept ImportError:%0A # Django 1.6%0A from django.contrib.contenttypes.generic import GenericForeignKey%0Afrom django.contrib.contenttypes.models import ContentType%0Afrom django.db import models%0Afrom django.test import TestCase%0Afrom django_fsm import FSMField, transition%0A%0A%0Aclass Ticket(models.Model):%0A%0A class Meta:%0A app_label = 'testapp'%0A%0A%0Aclass Task(models.Model):%0A class STATE:%0A NEW = 'new'%0A DONE = 'done'%0A%0A content_type = models.ForeignKey(ContentType)%0A object_id = models.PositiveIntegerField()%0A causality = GenericForeignKey('content_type', 'object_id')%0A state = FSMField(default=STATE.NEW)%0A%0A @transition(field=state, source=STATE.NEW, target=STATE.DONE)%0A def do(self):%0A pass%0A%0A class Meta:%0A app_label = 'testapp'%0A%0A%0Aclass Test(TestCase):%0A def setUp(self):%0A self.ticket = Ticket.objects.create()%0A%0A def test_model_objects_create(self):%0A %22%22%22Check a model with state field can be created%0A if one of the other fields is a property or a virtual field.%0A %22%22%22%0A Task.objects.create(causality=self.ticket)%0A
|
|
efcda7dad6efb189713b8cebb20b4d8b64a85c71
|
Add tools/msgpack2json.py
|
tools/msgpack2json.py
|
tools/msgpack2json.py
|
Python
| 0.000005
|
@@ -0,0 +1,84 @@
+import sys, json, umsgpack%0Ajson.dump(umsgpack.unpack(sys.stdin.buffer), sys.stdout)%0A
|
|
0dc5154daa12ea196bb5fdeb1342f6f7b3e6e62b
|
Add markov model baseline
|
MarkovModel/model.py
|
MarkovModel/model.py
|
Python
| 0
|
@@ -0,0 +1,1241 @@
+'''%0A Markov Model for transportation%0A Ankur Goswami%0A'''%0A%0Adef load_inputs(datafiles):%0A inputs = %7B%7D%0A for file in datafiles:%0A with open(file, 'r') as rf:%0A for line in rf:%0A split = line.split('%5Ct', 1)%0A segnum = split%5B0%5D%0A if segnum is in inputs:%0A inputs%5Bsegnum%5D += segnum%5B1%5D%0A else:%0A inputs%5Bsegnum%5D = segnum%5B1%5D%0A final_inputs = %5B%5D%0A for key in inputs:%0A final_inputs.append(inputs%5Bkey%5D)%0A return final_inputs%0A %0A%0Adef run(inputs):%0A counts = %7B%7D%0A for transition in inputs:%0A if transition is in counts:%0A counts%5Btransition%5D += 1%0A else:%0A counts%5Btransition%5D = 1%0A sum = 0%0A for key, val in counts.values():%0A # There must be a minimum of 2 trips to be considered%0A if counts%5Bkey%5D == 1:%0A del counts%5Bkey%5D%0A else:%0A sum += counts%5Bkey%5D%0A for key, val in counts.values():%0A counts%5Bkey%5D = val / sum%0A return counts%0A%0Adef output(counts, output_file):%0A sorted_tuples = sorted(counts.items(), key=lambda x: x%5B1%5D)%0A with open(output_file, 'w+') as wf:%0A for tup in sorted_tuples:%0A wf.write(%22%25s%5Ct%25f%5Cn%22)%0A
|
|
50bfa56b660d5d39c1dd7b3d426fcd589a9719bb
|
add univdump.py for extracting password dumps [wip]
|
univdump.py
|
univdump.py
|
Python
| 0
|
@@ -0,0 +1,1476 @@
+#!/usr/bin/env python3%0A# -*- coding: utf-8 -*-%0A%0Aimport re%0Aimport sys%0Aimport collections%0A%0A'''%0AThis script tries its best to fvck these various esoteric hard-to-process%0Auser database dump or leak files.%0A'''%0A%0Are_field = re.compile(r'(%3C%5Cw+%3E)')%0A%0ARecFormat = collections.namedtuple('RecFormat', ('regex', 'fields'))%0A%0AFORMATS = (%0A'%3Cpassword%3E',%0A'%3Cemail%3E,%3Cpassword%3E',%0A'%3Cemail%3E%5Ct%3Cpassword%3E',%0A'%3Cemail%3E----%3Cpassword%3E',%0A'%3Cemail%3E %3Cpassword%3E',%0A'%3Cemail%3E %3Cpassword%3E',%0A'%3Cusername%3E%5Ct%3Cpassword%3E%5Ct%3Cemail%3E',%0A'%3Cusername%3E%5Cs+%3Cmd5%3E%5Cs+%3Cemail%3E',%0A'%3Cusername%3E%5Ct%3Cmd5%3E%5Ct%3Cemail%3E',%0A'%3Cusername%3E%5Ct%3Cmd5%3E%5Ct%3Cemail%3E%5Ct%3Cpassword%3E',%0A'%3Cusername%3E%5Ct%3Cmd5%3E%5Ct%5Ct%5Ct%3Cemail%3E%5Ct%3Cpassword%3E',%0A'%3Cusername%3E%5Ct%5C%7C%5Ct%3Cmd5%3E%5Ct%5C%7C%5Ct%3Cemail%3E%5Ct%5C%7C%5Ct%3Cpassword%3E',%0A'%3Cemail%3E%5Ct%3Cmd5%3E%5Ct%3Cusername%3E%5Ct%3Cemail%3E%5Ct%3Cpassword%3E',%0A'%3Cusername%3E,%3Cpassword%3E,%3Cemail%3E',%0A'%3Cemail%3E%5Ct%3Cmd5%3E%5Ct%3Cname%3E%5Ct%3Cusername%3E%5Ct%3Cmd5%3E%5Ct%3Cphone%3E%5Ct%3Cdigits%3E',%0A'%3Cdigits%3E%5Ct%3Cusername%3E%5Ct%3Cmd5%3E%5Ct%3Cother%3E%5Ct%3Cother%3E%5Ct%3Cdigits%3E%5Ct%3Cemail%3E%5Ct%3Cignore%3E',%0A%22%5C(%3Cdigits%3E,%5Cs+'%3Cemail%3E',%5Cs+'%3Cextuname%3E',%5Cs+'%3Cmd5%3E',%5Cs+%3Cdigits%3E%5C),%22,%0A)%0A%0Aclass FormatDetector:%0A TEMPLATES = %7B%0A 'password': '%5B -~%5D+',%0A 'email': '%5BA-Za-z0-9._-%5D+@%5BA-Za-z0-9.-%5D+',%0A 'username': '%5B%5Cw.%5D+',%0A 'extuname': '%5CS+',%0A 'name': '%5B%5Cw .%5D+',%0A 'md5': '%5BA-Fa-f0-9%5D%7B32%7D',%0A 'phone': '%5B0-9 +-%5D%7B5,%7D',%0A 'digits': '%5B0-9%5D+',%0A 'other': '.+?',%0A 'ignore': '.+',%0A %7D%0A def __init__(self, formats):%0A self.formats = %5B%5D%0A%0A def makeindex():%0A pass%0A%0Aif __name__ == '__main__':%0A pass%0A
|
|
b2e27f42b3f8de10e11faf128183ca5fa3c0ea3f
|
Add 0025
|
Jimmy66/0025/0025.py
|
Jimmy66/0025/0025.py
|
Python
| 0.999934
|
@@ -0,0 +1,1046 @@
+#!/usr/bin/env python3%0A%0Aimport speech_recognition as sr%0Aimport webbrowser%0A%0A# obtain path to %22test.wav%22 in the same folder as this script%0Afrom os import path%0AWAV_FILE = path.join(path.dirname(path.realpath(__file__)), %22test.wav%22)%0A%0A# use %22test.wav%22 as the audio source%0Ar = sr.Recognizer()%0Awith sr.WavFile(WAV_FILE) as source:%0A audio = r.record(source) # read the entire WAV file%0A%0A# recognize speech using Google Speech Recognition%0Atry:%0A # for testing purposes, we're just using the default API key%0A # to use another API key, use %60r.recognize_google(audio, key=%22GOOGLE_SPEECH_RECOGNITION_API_KEY%22)%60%0A # instead of %60r.recognize_google(audio)%60%0A print(%22Google Speech Recognition thinks you said %22 + r.recognize_google(audio))%0A result = r.recognize_google(audio)%0A webbrowser.open_new_tab(result)%0A print(result)%0Aexcept sr.UnknownValueError:%0A print(%22Google Speech Recognition could not understand audio%22)%0Aexcept sr.RequestError as e:%0A print(%22Could not request results from Google Speech Recognition service; %7B0%7D%22.format(e))%0A%0A
|
|
07f522bed6a285507aadd66df89b14022e1e2a04
|
add new package : openresty (#14169)
|
var/spack/repos/builtin/packages/openresty/package.py
|
var/spack/repos/builtin/packages/openresty/package.py
|
Python
| 0
|
@@ -0,0 +1,1054 @@
+# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other%0A# Spack Project Developers. See the top-level COPYRIGHT file for details.%0A#%0A# SPDX-License-Identifier: (Apache-2.0 OR MIT)%0A%0Afrom spack import *%0A%0A%0Aclass Openresty(AutotoolsPackage):%0A %22%22%22%0A OpenResty is a full-fledged web application server by bundling the%0A standard nginx core, lots of 3rd-party nginx modules, as well as%0A most of their external dependencies.%0A %22%22%22%0A%0A homepage = %22https://github.com/openresty%22%0A url = %22https://github.com/openresty/openresty/releases/download/v1.15.8.2/openresty-1.15.8.2.tar.gz%22%0A%0A version('1.15.8.2', sha256='bf92af41d3ad22880047a8b283fc213d59c7c1b83f8dae82e50d14b64d73ac38')%0A version('1.15.8.1', sha256='89a1238ca177692d6903c0adbea5bdf2a0b82c383662a73c03ebf5ef9f570842')%0A version('1.13.6.2', sha256='946e1958273032db43833982e2cec0766154a9b5cb8e67868944113208ff2942')%0A%0A depends_on('pcre', type='build')%0A%0A def configure_args(self):%0A args = %5B'--without-http_rewrite_module'%5D%0A return args%0A
|
|
20fc164862f72527ef7d06bcbfe9dc4329ef9fa7
|
add problem, hackerrank 005 plus minus
|
hackerrank/005_plus_minus.py
|
hackerrank/005_plus_minus.py
|
Python
| 0.001174
|
@@ -0,0 +1,1448 @@
+#!/bin/python3%0D%0A%0D%0A%22%22%22%0D%0Ahttps://www.hackerrank.com/challenges/plus-minus?h_r=next-challenge&h_v=zen%0D%0A%0D%0AGiven an array of integers, calculate which fraction of its elements are positive, which fraction of its elements are negative, and which fraction of its elements are zeroes, respectively. Print the decimal value of each fraction on a new line.%0D%0A%0D%0ANote: This challenge introduces precision problems. The test cases are scaled to six decimal places, though answers with absolute error of up to 10%5E(-4) are acceptable.%0D%0A%0D%0AInput Format%0D%0A%0D%0AThe first line contains an integer, N, denoting the size of the array. %0D%0AThe second line contains N space-separated integers describing an array of numbers (a0, a1, a2,...,a(n-1)).%0D%0A%0D%0AOutput Format%0D%0A%0D%0AYou must print the following lines:%0D%0A%0D%0AA decimal representing of the fraction of positive numbers in the array.%0D%0AA decimal representing of the fraction of negative numbers in the array.%0D%0AA decimal representing of the fraction of zeroes in the array.%0D%0A%0D%0ASample Input%0D%0A%0D%0A6%0D%0A-4 3 -9 0 4 1 %0D%0A%0D%0ASample Output%0D%0A%0D%0A0.500000%0D%0A0.333333%0D%0A0.166667%0D%0A%0D%0AExplanation%0D%0A%0D%0AThere are 3 positive numbers, 2 negative numbers, and 1 zero in the array. %0D%0AThe respective fractions of positive numbers, negative numbers and zeroes are 3 / 6 = 0.500000, 2 / 6 = 0.333333 and , 1 / 6 = 0.166667, respectively.%0D%0A%22%22%22%0D%0A%0D%0A%0D%0Aimport sys%0D%0A%0D%0A%0D%0An = int(input().strip())%0D%0Aarr = %5Bint(arr_temp) for arr_temp in input().strip().split(' ')%5D%0D%0A
|
|
6e0f585a8f8433d4f6800cb1f093f97f8a1d4ff7
|
Update imports for new functions
|
imageutils/__init__.py
|
imageutils/__init__.py
|
Python
| 0
|
@@ -0,0 +1,840 @@
+# Licensed under a 3-clause BSD style license - see LICENSE.rst%0A%22%22%22%0AImage processing utilities for Astropy.%0A%22%22%22%0A%0A# Affiliated packages may add whatever they like to this file, but%0A# should keep this content at the top.%0A# ----------------------------------------------------------------------------%0Afrom ._astropy_init import *%0A# ----------------------------------------------------------------------------%0A%0A# For egg_info test builds to pass, put package imports here.%0Aif not _ASTROPY_SETUP_:%0A from .scale_img import *%0A from .array_utils import *%0A from .sampling import *%0A%0A__all__ = %5B'find_imgcuts', 'img_stats', 'rescale_img', 'scale_linear',%0A 'scale_sqrt', 'scale_power', 'scale_log', 'scale_asinh',%0A 'downsample', 'upsample', 'extract_array_2d', 'add_array_2d',%0A 'subpixel_indices', 'fix_prf_nan'%5D%0A
|
|
0573ed88c4de497b2da7088795b0d747bb2bd2ce
|
Add ICT device
|
pymodels/middlelayer/devices/ict.py
|
pymodels/middlelayer/devices/ict.py
|
Python
| 0.000001
|
@@ -0,0 +1,1686 @@
+#!/usr/bin/env python-sirius%0A%0Afrom epics import PV%0A%0A%0Aclass ICT:%0A%0A def __init__(self, name):%0A if name in %5B'ICT-1', 'ICT-2'%5D:%0A self._charge = PV('LI-01:DI-' + name + ':Charge-Mon')%0A self._charge_avg = PV('LI-01:DI-' + name + 'ICT-1:ChargeAvg-Mon')%0A self._charge_max = PV('LI-01:DI-' + name + 'ICT-1:ChargeMax-Mon')%0A self._charge_min = PV('LI-01:DI-' + name + 'ICT-1:ChargeMin-Mon')%0A self._charge_std = PV('LI-01:DI-' + name + 'ICT-1:ChargeStd-Mon')%0A self._pulse_cnt = PV('LI-01:DI-' + name + ':PulseCount-Mon')%0A else:%0A raise Exception('Set device name: ICT-1 or ICT-2')%0A%0A @property%0A def connected(self):%0A conn = self._charge.connected%0A conn &= self._charge_avg.connected%0A conn &= self._charge_max.connected%0A conn &= self._charge_min.connected%0A conn &= self._charge_std.connected%0A conn &= self._pulse_cnt.connected%0A return conn%0A%0A @property%0A def charge(self):%0A return self._charge.get()%0A%0A @property%0A def charge_avg(self):%0A return self._charge_avg.get()%0A%0A @property%0A def charge_max(self):%0A return self._charge_max.get()%0A%0A @property%0A def charge_min(self):%0A return self._charge_min.get()%0A%0A @property%0A def charge_std(self):%0A return self._charge_std.get()%0A%0A @property%0A def pulse_count(self):%0A return self._pulse_cnt.get()%0A%0A%0Aclass TranspEff:%0A%0A def __init__(self):%0A self._eff = PV('LI-Glob:AP-TranspEff:Eff-Mon')%0A%0A @property%0A def connected(self):%0A return self._eff.connected%0A%0A @property%0A def efficiency(self):%0A return self._eff.get()%0A
|
|
a080713a1dd0dd0c1b9c487f9c5413f3e4419db9
|
Create MQTT2StepperMotor.py
|
MQTT2StepperMotor.py
|
MQTT2StepperMotor.py
|
Python
| 0.000027
|
@@ -0,0 +1,132 @@
+# Author: Anton Gustafsson%0A# Released under MIT license %0A%0A#!/usr/bin/python%0A%0A%0A%0Afrom StepperMotorDriver import MotorControl%0A%0A%0Aclass %0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.