commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
d199510ab03975832b262cbc2160c3d6f3371e8d
|
Add solution in Python
|
codeforces/dominated_subarray.py
|
codeforces/dominated_subarray.py
|
Python
| 0.004351
|
@@ -0,0 +1,1460 @@
+def read_first_line():%0A return int(input())%0A%0A%0Adef read_cases(number_of_cases):%0A cases = %5B%5D%0A for i in range(number_of_cases):%0A line = input()%0A if i %25 2 == 1:%0A case = %5Bint(string) for string in line.strip().split(' ')%5D%0A cases.append(case)%0A return cases%0A%0A%0Adef updateHistory(index, number, history):%0A if not number in history:%0A history%5Bnumber%5D = %7B %22latestIndex%22: index, %22minDifference%22: None %7D%0A return%0A%0A value = history%5Bnumber%5D%0A latestIndex = value%5B%22latestIndex%22%5D%0A minDifference = value%5B%22minDifference%22%5D%0A minimum = index - latestIndex%0A%0A if not minDifference or minimum %3C minDifference:%0A history%5Bnumber%5D = %7B %22latestIndex%22: index, %22minDifference%22: minimum %7D%0A return%0A %0A # Update index, preserve minDifference%0A history%5Bnumber%5D = %7B %22latestIndex%22: index, %22minDifference%22: minDifference %7D%0A%0A%0Adef solve(case):%0A history = %7B%7D%0A for index, number in enumerate(case):%0A updateHistory(index, number, history)%0A %0A mins = %5B%5D%0A for value in history.values():%0A minDifference = value%5B%22minDifference%22%5D%0A if minDifference:%0A mins.append(minDifference)%0A %0A if len(mins) == 0:%0A return -1%0A%0A return min(mins) + 1%0A%0A%0Aif __name__ == %22__main__%22:%0A test_cases = read_first_line()%0A lines_per_case = 2%0A cases = read_cases(test_cases * lines_per_case)%0A for case in cases:%0A solution = solve(case)%0A print(solution)%0A
|
|
6a8ff154b8468d61b18d390db9e710fc0b224ac7
|
Add Left-Handed toons crawler
|
comics/comics/lefthandedtoons.py
|
comics/comics/lefthandedtoons.py
|
Python
| 0
|
@@ -0,0 +1,730 @@
+%0Afrom comics.aggregator.crawler import CrawlerBase, CrawlerResult%0Afrom comics.meta.base import MetaBase%0A%0Aclass Meta(MetaBase):%0A name = 'Left-Handed Toons'%0A language = 'en'%0A url = 'http://www.lefthandedtoons.com/'%0A start_date = '2007-01-14'%0A rights = 'Justin & Drew'%0A%0Aclass Crawler(CrawlerBase):%0A history_capable_days = 12%0A schedule = 'Mo,Tu,We,Th,Fr'%0A time_zone = -5%0A%0A def crawl(self, pub_date):%0A feed = self.parse_feed(%0A 'http://feeds.feedburner.com/lefthandedtoons/awesome')%0A%0A for entry in feed.for_date(pub_date):%0A url = entry.summary.src('img%5Bsrc*=%22/toons/%22%5D')%0A title = entry.title%0A%0A if url:%0A return CrawlerResult(url, title)%0A
|
|
59e8fe848da5cfa3874c82776205082764efbe63
|
Enable Jenkins Python3 monster for i19
|
tests/test_python3_regression.py
|
tests/test_python3_regression.py
|
Python
| 0
|
@@ -0,0 +1,392 @@
+from __future__ import absolute_import, division, print_function%0A%0Adef test_no_new_python3_incompatible_code_is_introduced_into_this_module():%0A import i19%0A import pytest%0A import dials.test.python3_regression as py3test%0A result = py3test.find_new_python3_incompatible_code(i19)%0A if result is None:%0A pytest.skip('No python3 interpreter available')%0A elif result:%0A pytest.fail(result)%0A
|
|
0c3f3c444d863ec4acff704efee71a29ab8cdf34
|
Add ip_reverse module
|
plugins/modules/ip_reverse.py
|
plugins/modules/ip_reverse.py
|
Python
| 0
|
@@ -0,0 +1,2118 @@
+#!/usr/bin/python%0Afrom __future__ import (absolute_import, division, print_function)%0A%0Afrom ansible.module_utils.basic import AnsibleModule%0A%0A__metaclass__ = type%0A%0ADOCUMENTATION = '''%0A---%0Amodule: ip_reverse%0Ashort_description: Modify reverse on IP%0Adescription:%0A - Modify reverse on IP%0Aauthor: Synthesio SRE Team%0Arequirements:%0A - ovh %3E= 0.5.0%0Aoptions:%0A ip:%0A required: true%0A description: The ip%0A reverse:%0A required: true%0A description: The reverse to assign%0A%0A'''%0A%0AEXAMPLES = '''%0Asynthesio.ovh.ip_reverse:%0A ip: 192.0.2.1%0A reverse: host.domain.example.%0Adelegate_to: localhost%0A'''%0A%0ARETURN = ''' # '''%0A%0Afrom ansible_collections.synthesio.ovh.plugins.module_utils.ovh import ovh_api_connect, ovh_argument_spec%0A%0Atry:%0A from ovh.exceptions import APIError, ResourceNotFoundError%0A HAS_OVH = True%0Aexcept ImportError:%0A HAS_OVH = False%0A%0A%0Adef run_module():%0A module_args = ovh_argument_spec()%0A module_args.update(dict(%0A ip=dict(required=True),%0A reverse=dict(required=True)%0A ))%0A%0A module = AnsibleModule(%0A argument_spec=module_args,%0A supports_check_mode=True%0A )%0A client = ovh_api_connect(module)%0A%0A ip = module.params%5B'ip'%5D%0A reverse = module.params%5B'reverse'%5D%0A%0A if module.check_mode:%0A module.exit_json(msg=%22Reverse %7B%7D to %7B%7D succesfully set ! - (dry run mode)%22.format(ip, reverse), changed=True)%0A%0A result = %7B%7D%0A try:%0A result = client.get('/ip/%25s/reverse/%25s' %25 (ip, ip))%0A except ResourceNotFoundError:%0A result%5B'reverse'%5D = ''%0A%0A if result%5B'reverse'%5D == reverse:%0A module.exit_json(msg=%22Reverse %7B%7D to %7B%7D already set !%22.format(ip, reverse), changed=False)%0A%0A try:%0A client.post(%0A '/ip/%25s/reverse' %25 ip,%0A ipReverse=ip,%0A reverse=reverse%0A )%0A module.exit_json(%0A msg=%22Reverse %7B%7D to %7B%7D succesfully set !%22.format(ip, reverse),%0A changed=True)%0A except APIError as api_error:%0A return module.fail_json(msg=%22Failed to call OVH API: %7B0%7D%22.format(api_error))%0A%0A%0Adef main():%0A run_module()%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
c5ac422ff1e4628ad8ea53e4f1442e6a70bf959f
|
add first command test
|
tests/test_commands.py
|
tests/test_commands.py
|
Python
| 0.000016
|
@@ -0,0 +1,598 @@
+import unittest%0A%0A%0Aclass CreateRangeVotingCommand():%0A def __init__(self, question, choices):%0A self.question = question%0A self.choices = choices%0A%0A%0Aclass CreateRangeVotingCommandTestCase(unittest.TestCase):%0A def test_has_choices_and_question(self):%0A question = 'Question ?'%0A choices = %5B'a', 'b'%5D%0A create_rangevoting_command = CreateRangeVotingCommand(question, choices)%0A self.assertEqual(question, create_rangevoting_command.question)%0A self.assertEqual(choices, create_rangevoting_command.choices)%0A%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
|
|
391e145b6e82aaa87e2ab23cfea53cb7ae98bc2a
|
Add a work-in-progress parser for the ClientHello message.
|
tlsenum/parse_hello.py
|
tlsenum/parse_hello.py
|
Python
| 0.000006
|
@@ -0,0 +1,828 @@
+import construct%0A%0Afrom tlsenum import hello_constructs%0A%0A%0Aclass ClientHello(object):%0A%0A @property%0A def protocol_version(self):%0A return self._protocol_version%0A%0A @protocol_version.setter%0A def protocol_version(self, protocol_version):%0A assert protocol_version in %5B%223.0%22, %221.0%22, %221.1%22, %221.2%22%5D%0A%0A self._protocol_version = protocol_version%0A%0A if protocol_version == %223.0%22:%0A self._protocol_minor = 0%0A elif protocol_version == %221.0%22:%0A self._protocol_minor = 1%0A elif protocol_version == %221.1%22:%0A self._protocol_minor = 2%0A elif protocol_version == %221.2%22:%0A self._protocol_minor = 3%0A%0A%0A def build(self):%0A return hello_constructs.ProtocolVersion.build(%0A construct.Container(major=3, minor=self._protocol_minor)%0A )%0A
|
|
764bad33b598841333d4d1674bf5667957ada551
|
Add a no-op measurement
|
tools/perf/measurements/no_op.py
|
tools/perf/measurements/no_op.py
|
Python
| 0.000014
|
@@ -0,0 +1,550 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.%0A# Use of this source code is governed by a BSD-style license that can be%0A# found in the LICENSE file.%0A%0Afrom telemetry.page import page_measurement%0A%0Aclass NoOp(page_measurement.PageMeasurement):%0A def __init__(self):%0A super(NoOp, self).__init__('no_op')%0A%0A def CanRunForPage(self, page):%0A return hasattr(page, 'no_op')%0A%0A def WillRunAction(self, page, tab, action):%0A pass%0A%0A def DidRunAction(self, page, tab, action):%0A pass%0A%0A def MeasurePage(self, page, tab, results):%0A pass%0A
|
|
a704a1964659a45b007e696ed1547b563dcffa4f
|
create 2.py
|
2.py
|
2.py
|
Python
| 0
|
@@ -0,0 +1,10 @@
+# content%0A
|
|
a1db6c4379c787124d7ee825adbcc76d2069a3c6
|
Add check to travis to make sure new boards are built, fix #1886
|
tools/travis_new_boards_check.py
|
tools/travis_new_boards_check.py
|
Python
| 0
|
@@ -0,0 +1,1192 @@
+#! /usr/bin/env python3%0A%0Aimport os%0Aimport re%0Aimport json%0A%0Aimport build_board_info%0A%0A# Get boards in json format%0Aboards_info_json = build_board_info.get_board_mapping()%0A# print(boards_info_json)%0A%0A# TODO (Carlos) Find all the boards on the json format%0A%0A# We need to know the path of the .travis.yml file%0Abase_path = os.path.dirname(__file__)%0Atravis_path = os.path.abspath(os.path.join(base_path, '..', '.travis.yml'))%0A%0A# Loading board list based on TRAVIS_BOARDS env variable on .travis.yml%0Atravis_boards = %5B%5D%0Awith open(travis_path, 'r') as travis:%0A%0A # Get all lines that contain the substring 'TRAVIS_BOARDS'%0A for line in travis:%0A line = travis.readline()%0A%0A if 'TRAVIS_BOARDS' in line:%0A print('TRAVIS_BOARDS found')%0A print(line)%0A # TODO (Carlos) Store the line content%0A%0A # We've reached the end of the env: section%0A elif 'addons' in line:%0A break%0A else:%0A pass%0A%0A # TODO (Carlos) Getting all the boards on TRAVIS_BOARDS using regex matching%0A # Tranks sommersoft for the pattern%0A pattern = '(.+)'%0A%0A# TODO (Carlos) Comparing boards listed in TRAVIS_BOARDS and boards got from get_board_mapping%0A
|
|
9a1c9e2cbe7f9b9decbe93d567458b6a6976e420
|
complete 14 longest collatz sequence
|
14-longest-collatz-sequence.py
|
14-longest-collatz-sequence.py
|
Python
| 0.000092
|
@@ -0,0 +1,588 @@
+from functools import lru_cache%0A%0Adef sequence(n):%0A 'bad idea'%0A while n is not 1:%0A yield n%0A n = 3*n+1 if n%252 else n/2%0A yield n%0A%0Adef next_num(n):%0A if n %25 2:%0A return 3 * n + 1%0A else:%0A return n / 2%0A%0A@lru_cache(None)%0Adef collatz_length(n):%0A if n == 1:%0A return 1%0A else:%0A return 1 + collatz_length(next_num(n))%0A%0Aif __name__ == '__main__':%0A i = 0%0A largest = 0%0A for n in range(1, 1_000_001):%0A length = collatz_length(n)%0A if length %3E largest:%0A largest = length%0A i = n%0A print(i, largest)%0A
|
|
9392f7215c77749f94908e8f4c0899a712177bfe
|
Hello, Flask
|
app.py
|
app.py
|
Python
| 0.999123
|
@@ -0,0 +1,146 @@
+from flask import Flask%0A%0Aapp = Flask(__name__)%0A%0A@app.route('/')%0Adef index():%0A return %22Hello, world!%22%0A%0Aif __name__ == '__main__':%0A app.run()%0A
|
|
3f5a752a7978c2432ce3106492d771c00a5f1279
|
Create geo.py
|
geo.py
|
geo.py
|
Python
| 0.000006
|
@@ -0,0 +1,964 @@
+import requests%0A%0A%0Adef example():%0A # grab some lat/long coords from wherever. For this example,%0A # I just opened a javascript console in the browser and ran:%0A #%0A # navigator.geolocation.getCurrentPosition(function(p) %7B%0A # console.log(p);%0A # %7D)%0A #%0A latitude = 35.1330343%0A longitude = -90.0625056%0A%0A # Did the geocoding request comes from a device with a%0A # location sensor? Must be either true or false.%0A sensor = 'true'%0A%0A # Hit Google's reverse geocoder directly%0A # NOTE: I *think* their terms state that you're supposed to%0A # use google maps if you use their api for anything.%0A base = %22http://maps.googleapis.com/maps/api/geocode/json?%22%0A params = %22latlng=%7Blat%7D,%7Blon%7D&sensor=%7Bsen%7D%22.format(%0A lat=latitude,%0A lon=longitude,%0A sen=sensor%0A )%0A url = %22%7Bbase%7D%7Bparams%7D%22.format(base=base, params=params)%0A response = requests.get(url)%0A return response.json%5B'results'%5D%5B0%5D%5B'formatted_address'%5D%0A
|
|
1110311ef90a45497af4cdfb8558d1b05fc799d0
|
add a script to run the server
|
run.py
|
run.py
|
Python
| 0.000001
|
@@ -0,0 +1,1174 @@
+#!/usr/bin/env python%0A# coding: utf-8%0A%0Aimport bottle%0Afrom logging import info%0A%0Afrom devmine import Devmine%0Afrom devmine.config import (%0A environment,%0A settings%0A)%0A%0A%0Adef main():%0A info('Devmine server started')%0A db_url = settings.db_url%0A server = settings.server%0A if not db_url:%0A db_url = environment.db_url%0A if not server:%0A server = environment.server%0A%0A info('%5CnServer settings:%5Cn'%0A 'server = %25s%5Cn'%0A 'host = %25s%5Cn'%0A 'port = %25s%5Cn'%0A 'db_url = %25s%5Cn'%0A 'db_echo = %25s%5Cn'%0A 'reloader = %25s%5Cn'%0A 'debug = %25s%5Cn',%0A server,%0A settings.host,%0A settings.port,%0A db_url,%0A environment.db_echo,%0A environment.reloader,%0A environment.debug)%0A%0A a = Devmine(%0A server=server,%0A host=settings.host,%0A port=settings.port,%0A db_url=db_url,%0A db_echo=environment.db_echo,%0A reloader=environment.reloader,%0A debug=environment.debug%0A )%0A%0A bottle.run(%0A a.app,%0A server=a.server_type,%0A reloader=a.reloader,%0A host=a.host,%0A port=a.port%0A )%0A%0Aif __name__ == %22__main__%22:%0A main()%0A
|
|
e007695e38b2207c9229856c95f37a12e740cb91
|
Add view demographics tests
|
radar/tests/permissions/test_can_user_view_demographics.py
|
radar/tests/permissions/test_can_user_view_demographics.py
|
Python
| 0
|
@@ -0,0 +1,2350 @@
+from radar.permissions import can_user_view_demographics%0Afrom radar.roles import COHORT_RESEARCHER, COHORT_SENIOR_RESEARCHER, ORGANISATION_CLINICIAN%0Afrom helpers.permissions import make_cohorts, make_user, make_patient, make_organisations%0A%0A%0Adef test_admin():%0A patient = make_patient()%0A user = make_user()%0A%0A assert not can_user_view_demographics(user, patient)%0A%0A user.is_admin = True%0A%0A assert can_user_view_demographics(user, patient)%0A%0A%0Adef test_intersecting_cohorts_with_view_demographics_permission():%0A cohorts = make_cohorts(3)%0A cohort_a, cohort_b, cohort_c = cohorts%0A patient = make_patient(cohorts=cohorts)%0A user = make_user(cohorts=%5Bcohort_a, %5Bcohort_b, COHORT_SENIOR_RESEARCHER%5D, cohort_c%5D)%0A%0A assert can_user_view_demographics(user, patient)%0A%0A%0Adef test_intersecting_cohorts_without_view_demographics_permission():%0A cohort_a, cohort_b = make_cohorts(2)%0A%0A patient = make_patient(cohorts=%5Bcohort_a%5D)%0A user = make_user(cohorts=%5B%5Bcohort_b, COHORT_RESEARCHER%5D%5D)%0A%0A assert not can_user_view_demographics(user, patient)%0A%0A%0Adef test_disjoint_cohorts_with_view_demographics_permission():%0A cohort_a, cohort_b = make_cohorts(2)%0A%0A patient = make_patient(cohorts=%5Bcohort_a%5D)%0A user = make_user(cohorts=%5B%5Bcohort_b, COHORT_SENIOR_RESEARCHER%5D%5D)%0A%0A assert not can_user_view_demographics(user, patient)%0A%0A%0Adef test_intersecting_organisations_with_view_demographics_permission():%0A organisations = make_organisations(3)%0A organisation_a, organisation_b, organisation_c = organisations%0A patient = make_patient(organisations=organisations)%0A user = make_user(organisations=%5Borganisation_a, %5Borganisation_b, ORGANISATION_CLINICIAN%5D, organisation_c%5D)%0A%0A assert can_user_view_demographics(user, patient)%0A%0A%0Adef test_intersecting_organisations_without_view_demographics_permission():%0A organisations = make_organisations(3)%0A patient = make_patient(organisations=organisations)%0A user = make_user(organisations=organisations)%0A%0A assert not can_user_view_demographics(user, patient)%0A%0A%0Adef test_disjoint_organisations_with_view_demographics_permission():%0A organisation_a, organisation_b = make_organisations(2)%0A%0A patient = make_patient(organisations=%5Borganisation_a%5D)%0A user = make_user(cohorts=%5B%5Borganisation_b, ORGANISATION_CLINICIAN%5D%5D)%0A%0A assert not can_user_view_demographics(user, patient)%0A
|
|
a46f2b8e42852b3c51d31c9402328c82d5d1f78c
|
Create new package. (#8144)
|
var/spack/repos/builtin/packages/swap-assembler/package.py
|
var/spack/repos/builtin/packages/swap-assembler/package.py
|
Python
| 0
|
@@ -0,0 +1,1835 @@
+##############################################################################%0A# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.%0A# Produced at the Lawrence Livermore National Laboratory.%0A#%0A# This file is part of Spack.%0A# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.%0A# LLNL-CODE-647188%0A#%0A# For details, see https://github.com/spack/spack%0A# Please also see the NOTICE and LICENSE files for our notice and the LGPL.%0A#%0A# This program is free software; you can redistribute it and/or modify%0A# it under the terms of the GNU Lesser General Public License (as%0A# published by the Free Software Foundation) version 2.1, February 1999.%0A#%0A# This program is distributed in the hope that it will be useful, but%0A# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and%0A# conditions of the GNU Lesser General Public License for more details.%0A#%0A# You should have received a copy of the GNU Lesser General Public%0A# License along with this program; if not, write to the Free Software%0A# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA%0A##############################################################################%0Afrom spack import *%0A%0A%0Aclass SwapAssembler(MakefilePackage):%0A %22%22%22A scalable and fully parallelized genome assembler.%22%22%22%0A%0A homepage = %22https://sourceforge.net/projects/swapassembler/%22%0A url = %22https://sourceforge.net/projects/swapassembler/files/SWAP_Assembler-0.4.tar.bz2/download%22%0A%0A version('0.4', '944f2aeae4f451be81160bb625304fc3')%0A%0A depends_on('mpich')%0A%0A def edit(self, spec, prefix):%0A makefile = FileFilter('Makefile')%0A makefile.filter('$(CC) -O2', '$(CC) -pthread -O2', string=True)%0A%0A def install(self, spec, prefix):%0A install_tree('.', prefix.bin)%0A
|
|
2904992eb431ac4a92442ccb1fcff5715ae8c7fa
|
add migrations for new policy parameters
|
webapp/apps/taxbrain/migrations/0035_auto_20161110_1624.py
|
webapp/apps/taxbrain/migrations/0035_auto_20161110_1624.py
|
Python
| 0
|
@@ -0,0 +1,990 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import models, migrations%0Aimport webapp.apps.taxbrain.models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('taxbrain', '0034_auto_20161004_1953'),%0A %5D%0A%0A operations = %5B%0A migrations.AddField(%0A model_name='taxsaveinputs',%0A name='CG_nodiff',%0A field=webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True),%0A ),%0A migrations.AddField(%0A model_name='taxsaveinputs',%0A name='CTC_new_refund_limit_rt',%0A field=webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True),%0A ),%0A migrations.AddField(%0A model_name='taxsaveinputs',%0A name='EITC_indiv',%0A field=webapp.apps.taxbrain.models.CommaSeparatedField(default=None, max_length=200, null=True, blank=True),%0A ),%0A %5D%0A
|
|
b068e4f8c3e5e8d7a0f1c45d5f1b6ac424b44153
|
Make validate recipients to ignore empty values
|
src/ggrc/models/comment.py
|
src/ggrc/models/comment.py
|
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: andraz@reciprocitylabs.com
# Maintained By: andraz@reciprocitylabs.com
"""Module containing comment model and comment related mixins."""
from sqlalchemy.orm import validates
from ggrc import db
from ggrc.models.mixins import Base
from ggrc.models.mixins import Described
from ggrc.models.object_document import Documentable
from ggrc.models.relationship import Relatable
class Commentable(object):
"""Mixin for commentable objects.
This is a mixin for adding default options to objects on which people can
comment.
recipients is used for setting who gets notified (Verifer, Requester, ...).
send_by_default should be used for setting the "send notification" flag in
the comment modal.
"""
VALID_RECIPIENTS = frozenset([
"Assessor",
"Assignee",
"Creator",
"Requester",
"Verifier",
])
@validates("recipients")
def validate_recipients(self, key, value):
"""
Validate recipients list
Args:
value (string): Can be either empty, or
list of comma separated `VALID_RECIPIENTS`
"""
# pylint: disable=unused-argument
if value and set(value.split(',')).issubset(self.VALID_RECIPIENTS):
return value
elif not value:
return ""
else:
raise ValueError(value,
'Value should be either empty ' +
'or comma separated list of ' +
', '.join(sorted(self.VALID_RECIPIENTS))
)
recipients = db.Column(db.String, nullable=True)
send_by_default = db.Column(db.Boolean)
_publish_attrs = [
"recipients",
"send_by_default",
]
_aliases = {
"recipients": "Recipients",
"send_by_default": "Send by default",
}
class Comment(Relatable, Described, Documentable, Base, db.Model):
"""Basic comment model."""
__tablename__ = "comments"
assignee_type = db.Column(db.String)
# REST properties
_publish_attrs = [
"assignee_type",
]
_sanitize_html = [
"description",
]
|
Python
| 0.000001
|
@@ -877,16 +877,60 @@
.%0A %22%22%22%0A
+ # pylint: disable=too-few-public-methods%0A%0A
VALID_
@@ -1351,86 +1351,274 @@
alue
- and set(value.split(',')).issubset(self.VALID_RECIPIENTS):%0A return
+:%0A value = set(name for name in value.split(%22,%22) if name)%0A%0A if value and value.issubset(self.VALID_RECIPIENTS):%0A # The validator is a bit more smart and also makes some filtering of the%0A # given data - this is intended.%0A return %22,%22.join(
value
+)
%0A
|
8840340bbd8310cf03f12accbb51dd81921ccf86
|
Fix use of `format` for unicode
|
src/ggrc/models/request.py
|
src/ggrc/models/request.py
|
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: dan@reciprocitylabs.com
# Maintained By: vraj@reciprocitylabs.com
from ggrc import db
from .mixins import deferred, Base, Described
class Request(Described, Base, db.Model):
__tablename__ = 'requests'
VALID_TYPES = (u'documentation', u'interview', u'population sample')
VALID_STATES = (u'Draft', u'Requested', u'Responded', u'Amended Request',
u'Updated Response', u'Accepted')
assignee_id = db.Column(db.Integer, db.ForeignKey('people.id'),
nullable=False)
assignee = db.relationship('Person')
request_type = deferred(db.Column(db.Enum(*VALID_TYPES), nullable=False),
'Request')
status = deferred(db.Column(db.Enum(*VALID_STATES), nullable=False),
'Request')
requested_on = deferred(db.Column(db.Date, nullable=False), 'Request')
due_on = deferred(db.Column(db.Date, nullable=False), 'Request')
audit_id = db.Column(db.Integer, db.ForeignKey('audits.id'), nullable=False)
objective_id = db.Column(db.Integer, db.ForeignKey('objectives.id'),
nullable=False)
gdrive_upload_path = deferred(db.Column(db.String, nullable=True),
'Request')
test = deferred(db.Column(db.Text, nullable=True), 'Request')
notes = deferred(db.Column(db.Text, nullable=True), 'Request')
auditor_contact = deferred(db.Column(db.String, nullable=True), 'Request')
responses = db.relationship('Response', backref='request',
cascade='all, delete-orphan')
_publish_attrs = [
'assignee',
'request_type',
'gdrive_upload_path',
'requested_on',
'due_on',
'status',
'audit',
'objective',
'responses',
'test',
'notes',
'auditor_contact',
]
_sanitize_html = [
'gdrive_upload_path',
'test',
'notes',
'auditor_contact',
]
def _display_name(self):
return 'Request with id={0} for Audit "{1}"'.format(
self.id, self.audit.display_name)
@classmethod
def eager_query(cls):
from sqlalchemy import orm
query = super(Request, cls).eager_query()
return query.options(
orm.joinedload('audit'),
orm.joinedload('objective'),
orm.subqueryload('responses'))
|
Python
| 0.001803
|
@@ -1923,16 +1923,17 @@
return
+u
'Request
|
dbc761530b77c606038f62ed498c192b67321e8f
|
Test co2 load_data for Python 3.
|
statsmodels/datasets/tests/test_data.py
|
statsmodels/datasets/tests/test_data.py
|
Python
| 0
|
@@ -0,0 +1,160 @@
+from statsmodels.datasets import co2%0A%0Adef test_co2_python3():%0A # this failed in pd.to_datetime on Python 3 with pandas %3C= 0.12.0%0A dta = co2.load_pandas()%0A
|
|
efc935b030750c26e24217d5f97dde1dc8a7ea66
|
add script to clone mvn dependency to local from gradle
|
python/mirror-mvn-dependency.py
|
python/mirror-mvn-dependency.py
|
Python
| 0
|
@@ -0,0 +1,2556 @@
+#!/usr/bin/python%0A%22%22%22%0AThis script is used to make a mirror maven repository from a gradle build%0A%0A1. make sure your project can be build correctly%0A2. run this script in your project root directory%0A3. add following code to your gradle file%0A%0A%09buildscript %7B%0A%09%09repositories %7B%0A%09%09%09maven %7B url %22file://$%7BrootProject.projectDir%7D/maven_local/%22 %7D%0A%09%09%7D%0A%0A%09%09dependencies %7B%0A%09%09%09classpath 'com.android.tools.build:gradle:2.1.3'%0A%09%09%09classpath 'io.dator:packageinfo:1.0-SNAPSHOT'%0A%09%09%09classpath 'io.dator:staticcheck:1.0-SNAPSHOT'%0A%09%09%7D%0A%09%7D%0A%0A%22%22%22%0Aimport sys%0Aimport os%0Aimport subprocess%0Aimport glob%0Aimport shutil%0A%0Adef main(argv):%0A project_dir = os.path.dirname(os.path.realpath(__file__))%0A repo_dir = os.path.join(project_dir, %22maven_local%22)%0A temp_home = os.path.join(project_dir, %22.gradle_home%22)%0A if not os.path.isdir(temp_home):%0A os.makedirs(temp_home)%0A %0A if os.path.isdir(repo_dir):%0A shutil.rmtree(repo_dir)%0A %0A subprocess.call(%5B%22gradle%22, %22-g%22, temp_home, %22-Dbuild.network_access=allow%22%5D)%0A %0A cache_files = os.path.join(temp_home, %22caches/modules-*/files-*%22)%0A for cache_dir in glob.glob(cache_files):%0A for cache_group_id in os.listdir(cache_dir):%0A cache_group_dir = os.path.join(cache_dir, cache_group_id)%0A repo_group_dir = os.path.join(repo_dir, cache_group_id.replace('.', '/'))%0A for cache_artifact_id in os.listdir(cache_group_dir):%0A cache_artifact_dir = os.path.join(cache_group_dir, cache_artifact_id)%0A repo_artifact_dir = os.path.join(repo_group_dir, cache_artifact_id)%0A for cache_version_id in os.listdir(cache_artifact_dir):%0A cache_version_dir = os.path.join(cache_artifact_dir, cache_version_id)%0A repo_version_dir = os.path.join(repo_artifact_dir, cache_version_id)%0A if not os.path.isdir(repo_version_dir):%0A os.makedirs(repo_version_dir)%0A cache_items = os.path.join(cache_version_dir, %22*/*%22)%0A for cache_item in glob.glob(cache_items):%0A cache_item_name = os.path.basename(cache_item)%0A repo_item_path = os.path.join(repo_version_dir, cache_item_name)%0A print %22%25s:%25s:%25s (%25s)%22 %25 (cache_group_id, cache_artifact_id, cache_version_id, cache_item_name)%0A shutil.copyfile(cache_item, repo_item_path)%0A shutil.rmtree(temp_home)%0A print %22repo location: %25s%22 %25 (repo_dir)%0A return 0%0A%0Aif __name__ == %22__main__%22:%0A sys.exit(main(sys.argv))%0A
|
|
1b2c67a0d4a237ce56dc40616b1a023b515aee0f
|
add setup.py
|
sldc/setup.py
|
sldc/setup.py
|
Python
| 0.000001
|
@@ -0,0 +1,222 @@
+from distutils.core import setup%0A%0Asetup(name=%22sldc%22,%0A version=%221.0%22,%0A description=%22Segment Locate Dispatch Classify workflow%22,%0A author=%22Romain Mormont%22,%0A author_email=%22romain.mormont@gmail.com%22,%0A )%0A
|
|
bb065a747215b6665eec78c5141b0a0d82296dac
|
Add migration to replace '<removed>' with '<removed>@{uuid}.com'.format(uuid=str(uuid4())) in contact_information.email to pass validation
|
migrations/versions/1400_repair_contact_information_emails_post_data_retention_removal.py
|
migrations/versions/1400_repair_contact_information_emails_post_data_retention_removal.py
|
Python
| 0.000054
|
@@ -0,0 +1,1471 @@
+%22%22%22Replace '%3Cremoved%3E' with '%3Cremoved%3E@%7Buuid%7D.com'.format(uuid=str(uuid4())) in contact_information to pass validation.%0A%0ARevision ID: 1400%0ARevises: 1390%0ACreate Date: 2019-10-29 09:09:00.000000%0A%0A%22%22%22%0Afrom uuid import uuid4%0Afrom alembic import op%0Aimport sqlalchemy as sa%0Afrom sqlalchemy.sql import table, column%0A%0A%0A# revision identifiers, used by Alembic.%0Arevision = '1400'%0Adown_revision = '1390'%0A%0A%0Acontact_information = table(%0A 'contact_information',%0A column('id', sa.INTEGER),%0A column('email', sa.VARCHAR),%0A)%0A%0A%0Adef upgrade():%0A %22%22%22Update any contact_information rows where the email is set to '%3Cremoved%3E' to use the UUID email format we use%0A on the user object in User.remove_personal_data%0A%0A Loop over the ids so we get a unique UUID for each update.%0A %22%22%22%0A conn = op.get_bind()%0A%0A # SELECT id FROM contact_information WHERE email = '%3Cremoved%3E';%0A query = contact_information.select().where(%0A contact_information.c.email == '%3Cremoved%3E'%0A ).with_only_columns(%0A (contact_information.c.id,)%0A )%0A%0A ci_ids = (ci_id for ci_id, in conn.execute(query).fetchall())%0A%0A for ci_id in ci_ids:%0A # UPDATE contact_information SET email = '%3Cremoved%3E@uuid-etc.com' WHERE id = %3Cci_id%3E;%0A query = contact_information.update().where(%0A contact_information.c.id == ci_id%0A ).values(%0A email='%3Cremoved%3E@%7Buuid%7D.com'.format(uuid=str(uuid4()))%0A )%0A%0A conn.execute(query)%0A%0A%0Adef downgrade():%0A pass%0A
|
|
434827540d4e11254615cd52b7efb36b746f9d0d
|
Create tf_simple_LR.py
|
tf_simple_LR.py
|
tf_simple_LR.py
|
Python
| 0.000401
|
@@ -0,0 +1,1144 @@
+# -*- coding: utf-8 -*-%0A%22%22%22%0ACreated on Mon Aug 1 19:50:54 2016%0A%0A@author: max%0A%22%22%22%0A%0Aimport tensorflow as tf%0Aimport numpy as np%0A%0Aimport matplotlib.pylab as m%0A%0Ax_data = np.linspace(0.0,1.0,num = 500,dtype='float32')%0Ax_data = np.reshape(x_data,(500,))%0Ay_data = np.linspace(0.0,1.0,num = 500,dtype='float32')%0Ay_data = y_data + np.random.rand(500,)/10+1%0A%0Ax = tf.placeholder(dtype='float32')%0Ay = tf.placeholder(dtype='float32')%0A%0AW = tf.Variable(tf.random_uniform((1,1),-1,1),name='W',dtype='float32')%0Ab = tf.Variable(tf.random_uniform((1,1),-1,1),name='b',dtype='float32')%0A%0AY = W*x+b%0A%0Aloss = tf.reduce_mean(tf.square(Y-y))%0A%0Aopt = tf.train.RMSPropOptimizer(0.01).minimize(loss)%0A%0Ainit = tf.initialize_all_variables()%0A%0Ases = tf.Session()%0A%0Ases.run(init)%0A%0Afor i in range(1000):%0A ses.run(opt,feed_dict=%7Bx:x_data,y:y_data%7D)%0A if i%2550 == True:%0A print loss.eval(session = ses,feed_dict=%7Bx:x_data,y:y_data%7D)%0A B = b.eval(session = ses)%0A final_w = W.eval(session = ses)%0A print B,final_w%0A %0Afinal_y = np.multiply(x_data,final_w) + B%0Afinal_y = np.reshape(final_y,(500,))%0Am.plot(x_data,final_y)%0Am.plot(x_data,y_data,'r+')%0A %0A
|
|
3fb15a0e2fd4b1c9d6fb90ea5db92e99fda578c7
|
Create topKFrequent.py
|
topKFrequent.py
|
topKFrequent.py
|
Python
| 0
|
@@ -0,0 +1,775 @@
+#%0A# Given a non-empty array of integers, return the k most frequent elements.%0A# %0A# For example,%0A# Given %5B1,1,1,2,2,3%5D and k = 2, return %5B1,2%5D.%0A# %0A# Note: %0A# You may assume k is always valid, 1 %E2%89%A4 k %E2%89%A4 number of unique elements.%0A# Your algorithm's time complexity must be better than O(n log n), where n is the array's size.%0A#%0Aclass Solution(object):%0A def topKFrequent(self, nums, k):%0A %22%22%22%0A :type nums: List%5Bint%5D%0A :type k: int%0A :rtype: List%5Bint%5D%0A %22%22%22%0A res = %7B%7D%0A for n in nums:%0A if n not in res:%0A res%5Bn%5D = 0%0A res%5Bn%5D = res%5Bn%5D+1%0A return %5Belem%5B0%5D for pos, elem in enumerate(sorted(%5B(key, res%5Bkey%5D) for key in res%5D, key=lambda x: x%5B1%5D, reverse=True)) if pos %3C k%5D%0A
|
|
3190b1e90c4f5de71e766fc97acb6c03b5c6888b
|
Create tweet-ip.py
|
tweet-ip.py
|
tweet-ip.py
|
Python
| 0.002257
|
@@ -0,0 +1,1469 @@
+from twitter import *%0Aimport subprocess%0Afrom random import randint%0Aimport time%0A%0Aimport urllib2%0A%0Adef internet_on():%0A try:%0A response=urllib2.urlopen('http://twitter.com',timeout=1)%0A return True%0A except urllib2.URLError as err: pass%0A return False%0A%0Adef getserial():%0A # Extract serial from cpuinfo file%0A cpuserial = %220000000000000000%22%0A try:%0A f = open('/proc/cpuinfo','r')%0A for line in f:%0A if line%5B0:6%5D=='Serial':%0A cpuserial = line%5B10:26%5D%0A f.close()%0A except:%0A cpuserial = %22ERROR000000000%22%0A%0A return cpuserial%0A%0Awhile not internet_on():%0A print %22no internetz%22%0A time.sleep(5)%0A%0Aprint %22internetz!%22%0Arng = randint(1, 999)%0A%0Acomplete = None%0A%0Awhile not complete:%0A try:%0A time.sleep(2)%0A twitter = Twitter(auth=OAuth('2387200483-QN8mAtIweLdF70TfsEtBuR47zzw86p06MXCY0er', '6nqftquJnaqfqqM8IEiId98iU3i8GzMSDrYDWwQ3o9Z90', 'qtM32rShCUvdwEnO0FwRCw', 'VquAJnz7WXWi190gtnfYla4hGSZ0SsSqylJxvv2Y7o'))%0A print %22Authed with twitter!%22%0A arg='ip route list'%0A p=subprocess.Popen(arg,shell=True,stdout=subprocess.PIPE)%0A data = p.communicate()%0A split_data = data%5B0%5D.split()%0A ipaddr = split_data%5Bsplit_data.index('src')+1%5D%0A%0A my_ip = '%3C%25d%3E(%25s) piip: %25s' %25 (rng,getserial(), ipaddr)%0A print my_ip%0A%0A twitter.statuses.update(status=my_ip)%0A print %22tweeted!%22%0A complete = True%0A except TwitterError:%0A print %22TwitterError!! Trying again%22%0A continue%0A
|
|
d7a5743bf92627280c2067be7dc496cd81b8353c
|
add unit tests file
|
unit_tests.py
|
unit_tests.py
|
Python
| 0
|
@@ -0,0 +1,117 @@
+import pytest%0A%0Ar = pytest.main(%5B%22-s%22, %22tests/unit%22%5D)%0Aif r:%0A raise Exception(%22There were test failures or errors.%22)
|
|
5a2042ebd62cefdda82b6e288b4b6d5b0f527fcd
|
Add script to add uplaoders to a repo
|
repomgmt/management/commands/repo-add-uploader.py
|
repomgmt/management/commands/repo-add-uploader.py
|
Python
| 0
|
@@ -0,0 +1,1140 @@
+#%0A# Copyright 2012 Cisco Systems, Inc.%0A#%0A# Author: Soren Hansen %3Csorhanse@cisco.com%3E%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A#%0Afrom django.core.management.base import BaseCommand%0Afrom repomgmt.models import Repository%0Afrom django.contrib.auth.models import User%0A%0A%0Aclass Command(BaseCommand):%0A args = '%3Crepository%3E %3Cusername%3E'%0A help = 'Poll all upstream archives and update local db accordingly'%0A%0A def handle(self, reponame, username, **options):%0A repo = Repository.objects.get(name=reponame)%0A user = User.objects.get(username=username)%0A repo.uploaders.add(user)%0A repo.save()%0A
|
|
09c8399092c3c97be068051306fda057170cf290
|
Add LPC residual computation.
|
scikits/talkbox/linpred/common.py
|
scikits/talkbox/linpred/common.py
|
Python
| 0
|
@@ -0,0 +1,972 @@
+from scipy.signal import lfilter%0Afrom scikits.talkbox.linpred import lpc%0A%0Adef lpcres(signal, order, axis = -1):%0A %22%22%22Compute the LPC residual of a signal.%0A%0A The LPC residual is the 'error' signal from LPC analysis, and is defined%0A as:%0A%0A res%5Bn%5D = x%5Bn%5D - xe%5Bn%5D = 1 + a%5B1%5D x%5Bn-1%5D + ... + a%5Bp%5D x%5Bn-p%5D%0A%0A Where x is the input signal and xe the linear prediction of x.%0A%0A Parameters%0A ----------%0A signal : array-like%0A input signal%0A order : int%0A LPC order%0A axis : int%0A axis along which to compute the LPC residual%0A%0A Returns%0A -------%0A res : array-like%0A LPC residual%0A%0A Note%0A ----%0A The LPC residual can also be seen as the input of the LPC analysis filter.%0A As the LPC filter is a whitening filter, it is a whitened version of the%0A signal.%0A%0A In AR modelling, the residual is simply the estimated excitation of the AR%0A filter.%0A %22%22%22%0A return lfilter(lpc(signal, order)%5B0%5D, 1., signal, axis)%0A
|
|
1afe54b237724ce8f06379ef461e5d849ddeec74
|
Add Persian Badwords
|
revscoring/languages/persian.py
|
revscoring/languages/persian.py
|
import warnings
import enchant
from .language import Language, LanguageUtility
DICTIONARY = enchant.Dict("fa")
def is_misspelled_process():
def is_misspelled(word):
return not DICTIONARY.check(word)
return is_misspelled
is_misspelled = LanguageUtility("is_misspelled", is_misspelled_process,
depends_on=[])
persian = Language("revscoring.languages.persian", [is_misspelled])
|
Python
| 0.999746
|
@@ -111,134 +111,563 @@
a%22)%0A
-%0Adef is_misspelled_process():%0A def is_misspelled(word):%0A return not DICTIONARY.check(word)%0A return is_misspelled%0A
+BADWORDS = set(%5B%0A %22%DA%A9%DB%8C%D8%B1%D9%85%22, %22%D8%A7%DB%8C%D8%AA%D8%A7%D9%84%DB%8C%DA%A9%22, %22%DA%A9%D9%88%D9%86%DB%8C%22, %22%DA%A9%DB%8C%D8%B1%22, %22%D9%81%D8%B1%D9%85%D9%88%D8%AF%22, %22%D8%A2%D9%84%D9%87%22, %22%D9%81%D8%B1%D9%85%D9%88%D8%AF%D9%87%22, %22%D9%81%D8%B1%D9%85%D9%88%D8%AF%D9%86%D8%AF%22,%0A %22%D8%AC%D9%86%D8%AF%D9%87%22, %22%D8%A8%D8%B1%D9%88%D9%88%D8%AA%D9%88%22, %22%D9%84%D8%B9%D9%86%D8%AA%22, %22%DA%A9%D9%88%D9%86%22, %22%D8%A7%D9%84%D8%B3%D9%84%D8%A7%D9%85%22, %22%D8%AC%D9%85%D9%87%D9%88%D8%B1%D9%85%D8%AD%D8%AA%D8%B1%D9%85%22, %22%DA%A9%D9%88%D9%86%DB%8C%22,%0A %22%DA%A9%D8%A7%DA%A9%D8%A7%D8%B3%DB%8C%D8%A7%D9%87%22, %22%D8%A2%D8%B4%D8%BA%D8%A7%D9%84%22, %22%DA%AF%D8%A7%D8%A6%DB%8C%D8%AF%D9%85%22, %22%DA%AF%D9%88%D8%B2%DB%8C%D8%AF%D9%87%22, %22%D9%85%D8%B4%D9%86%DA%AF%22, %22%D9%86%D9%86%D8%AA%D9%88%22, %22%D8%A8%D8%AE%D9%88%D8%A7%D8%A8%22%0A%5D)%0A%0A%0Adef is_misspelled_process():%0A def is_misspelled(word):%0A return not DICTIONARY.check(word)%0A return is_misspelled%0A%0A%0Adef is_badword_process():%0A def is_badword(word):%0A return word.lower() in BADWORDS%0A return is_badword%0A%0A%0Ais_badword = LanguageUtility(%22is_badword%22, is_badword_process, depends_on=%5B%5D)
%0Ais_
@@ -835,16 +835,28 @@
sian%22, %5B
+is_badword,
is_missp
|
4fe8df5d09c554b45d5097ca0574b47703c9b581
|
Add another simpler test for %f
|
tests/strings/string_format_f_simple.py
|
tests/strings/string_format_f_simple.py
|
Python
| 0.000159
|
@@ -0,0 +1,152 @@
+a = 1.123456%0Ab = 10%0Ac = -30%0Ad = 34%0Ae = 123.456789%0Af = 892122.129899%0A%0A# form 0%0As = %22b=%25f%22 %25 a%0Aprint s%0A%0A# form 1%0As = %22b,c,d=%25f+%25f+%25f%22 %25 (a, e, f)%0Aprint s%0A
|
|
215822f6edb48f156a15548ff40d21d76e14d692
|
Add markdown as submodule
|
dash_core_components/markdown/__init__.py
|
dash_core_components/markdown/__init__.py
|
Python
| 0.000005
|
@@ -0,0 +1,338 @@
+from .Markdown import Markdown%0A%0Afrom .. import _js_dist%0Afrom .. import _css_dist%0A%0A_js_dist.append(%0A %7B%0A 'relative_package_path': 'highlight.pack.js',%0A 'namespace': 'dash_core_components'%0A %7D%0A)%0A%0A_css_dist.append(%0A %7B%0A 'relative_package_path': 'highlight.css',%0A 'namespace': 'dash_core_components'%0A %7D%0A)%0A
|
|
3657eed1c0f0cf29be85bce03983e5b2c2581b9b
|
test showing bug in cyl mesh face inner product
|
tests/mesh/test_cylMeshInnerProducts.py
|
tests/mesh/test_cylMeshInnerProducts.py
|
Python
| 0
|
@@ -0,0 +1,1986 @@
+from SimPEG import Mesh%0Aimport numpy as np%0Aimport sympy%0Afrom sympy.abc import r, t, z%0Aimport unittest%0A%0ATOL = 1e-1%0A%0Aclass CylInnerProducts_Test(unittest.TestCase):%0A%0A def test_FaceInnerProduct(self):%0A # Here we will make up some j vectors that vary in space%0A # j = %5Bj_r, j_z%5D - to test face inner products%0A j = sympy.Matrix(%5B%0A r**2 * z,%0A r * z**2%0A %5D)%0A%0A # Create an isotropic sigma vector%0A Sig = sympy.Matrix(%5B%0A %5B540/sympy.pi*(r*z)**2, 0 %5D,%0A %5B 0 , 540/sympy.pi*(r*z)**4%5D,%0A %5D)%0A%0A # Do the inner product! - we are in cyl coordinates!%0A jTSj = j.T*Sig*j%0A ans = sympy.integrate(%0A sympy.integrate(%0A sympy.integrate(r * jTSj, (r,0,1)), # we are in cyl coordinates%0A (t,0,2.*sympy.pi)),%0A (z,0,1))%5B0%5D # The %60%5B0%5D%60 is to make it an int.%0A%0A def get_vectors(mesh):%0A %22%22%22 Get Vectors sig, sr. jx from sympy%22%22%22%0A%0A f_jr = sympy.lambdify((r,z), j%5B0%5D, 'numpy')%0A f_jz = sympy.lambdify((r,z), j%5B1%5D, 'numpy')%0A f_sigr = sympy.lambdify((r,z), Sig%5B0%5D, 'numpy')%0A f_sigz = sympy.lambdify((r,z), Sig%5B1%5D, 'numpy')%0A%0A jr = f_jr(mesh.gridFx%5B:,0%5D, mesh.gridFx%5B:,2%5D)%0A jz = f_jz(mesh.gridFz%5B:,0%5D, mesh.gridFz%5B:,2%5D)%0A sigr = f_sigr(mesh.gridCC%5B:,0%5D, mesh.gridCC%5B:,2%5D)%0A%0A return sigr, np.r_%5Bjr, jz%5D%0A%0A%0A n = 100.%0A mesh = Mesh.CylMesh(%5Bn, 1, n%5D)%0A%0A sig, jv = get_vectors(mesh)%0A MfSig = mesh.getFaceInnerProduct(sig)%0A numeric_ans = jv.T.dot(MfSig.dot(jv))%0A%0A print('------ Testing Face Inner Product-----------')%0A print(' Analytic: %7Banalytic%7D, Numeric: %7Bnumeric%7D'.format(analytic=ans, numeric=numeric_ans))%0A assert(np.abs(ans-numeric_ans) %3C TOL)%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
|
|
8ad86651a9d07984c0b1afb0ec7e400288ac6d2e
|
add pyRpc2
|
python/proto/pyRpc2/__init__.py
|
python/proto/pyRpc2/__init__.py
|
Python
| 0.999108
|
@@ -0,0 +1,1396 @@
+#!/usr/bin/env python%0A# -*- encoding: utf-8 -*-%0A#%0A# Copyright (c) 2016 ASMlover. All rights reserved.%0A#%0A# Redistribution and use in source and binary forms, with or without%0A# modification, are permitted provided that the following conditions%0A# are met:%0A#%0A# * Redistributions of source code must retain the above copyright%0A# notice, this list ofconditions and the following disclaimer.%0A#%0A# * Redistributions in binary form must reproduce the above copyright%0A# notice, this list of conditions and the following disclaimer in%0A# the documentation and/or other materialsprovided with the%0A# distribution.%0A#%0A# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS%0A# %22AS IS%22 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT%0A# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS%0A# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE%0A# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,%0A# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,%0A# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;%0A# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER%0A# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT%0A# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN%0A# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE%0A# POSSIBILITY OF SUCH DAMAGE.%0A%0A
|
|
89e158a7baf24666754b723695bc1f2a728f7ef1
|
Test initialization of categorical node
|
bayespy/inference/vmp/nodes/tests/test_categorical.py
|
bayespy/inference/vmp/nodes/tests/test_categorical.py
|
######################################################################
# Copyright (C) 2014 Jaakko Luttinen
#
# This file is licensed under Version 3.0 of the GNU General Public
# License. See LICENSE for a text of the license.
######################################################################
######################################################################
# This file is part of BayesPy.
#
# BayesPy is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# BayesPy is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BayesPy. If not, see <http://www.gnu.org/licenses/>.
######################################################################
"""
Unit tests for `categorical` module.
"""
import numpy as np
import scipy
from bayespy.nodes import (Categorical,
Dirichlet,
Mixture,
Gamma)
from bayespy.utils import utils
from bayespy.utils import random
from bayespy.utils.utils import TestCase
class TestCategorical(TestCase):
"""
Unit tests for Categorical node
"""
def test_init(self):
"""
Test the creation of categorical nodes.
"""
# Some simple initializations
X = Categorical([0.1, 0.3, 0.6])
X = Categorical(Dirichlet([5,4,3]))
# Check that plates are correct
X = Categorical([0.1, 0.3, 0.6], plates=(3,4))
self.assertEqual(X.plates,
(3,4))
X = Categorical(0.25*np.ones((2,3,4)))
self.assertEqual(X.plates,
(2,3))
X = Categorical(Dirichlet([2,1,9], plates=(3,4)))
self.assertEqual(X.plates,
(3,4))
# Probabilities not a vector
self.assertRaises(ValueError,
Categorical,
0.5)
# Invalid probability
self.assertRaises(ValueError,
Categorical,
[-0.5, 1.5],
n=10)
self.assertRaises(ValueError,
Categorical,
[0.5, 1.5],
n=10)
# Inconsistent plates
self.assertRaises(ValueError,
Categorical,
0.25*np.ones((2,4)),
plates=(3,),
n=10)
# Explicit plates too small
self.assertRaises(ValueError,
Categorical,
0.25*np.ones((2,4)),
plates=(1,),
n=10)
pass
def test_moments(self):
"""
Test the moments of categorical nodes.
"""
# Simple test
X = Categorical([0.7,0.2,0.1])
u = X._message_to_child()
self.assertEqual(len(u), 1)
self.assertAllClose(u[0],
[0.7,0.2,0.1])
# Test plates in p
p = np.random.dirichlet([1,1], size=3)
X = Categorical(p)
u = X._message_to_child()
self.assertAllClose(u[0],
p)
# Test with Dirichlet prior
P = Dirichlet([7, 3])
logp = P._message_to_child()[0]
p0 = np.exp(logp[0]) / (np.exp(logp[0]) + np.exp(logp[1]))
p1 = np.exp(logp[1]) / (np.exp(logp[0]) + np.exp(logp[1]))
X = Categorical(P)
u = X._message_to_child()
p = np.array([p0, p1])
self.assertAllClose(u[0],
p)
# Test with broadcasted plates
P = Dirichlet([7, 3], plates=(10,))
X = Categorical(P)
u = X._message_to_child()
self.assertAllClose(u[0] * np.ones(X.get_shape(0)),
p*np.ones((10,1)))
pass
def test_observed(self):
"""
Test observed categorical nodes
"""
# Single observation
X = Categorical([0.7,0.2,0.1])
X.observe(2)
u = X._message_to_child()
self.assertAllClose(u[0],
[0,0,1])
# One plate axis
X = Categorical([0.7,0.2,0.1], plates=(2,))
X.observe([2,1])
u = X._message_to_child()
self.assertAllClose(u[0],
[[0,0,1],
[0,1,0]])
# Several plate axes
X = Categorical([0.7,0.1,0.1,0.1], plates=(2,3,))
X.observe([[2,1,1],
[0,2,3]])
u = X._message_to_child()
self.assertAllClose(u[0],
[ [[0,0,1,0],
[0,1,0,0],
[0,1,0,0]],
[[1,0,0,0],
[0,0,1,0],
[0,0,0,1]] ])
# Check invalid observations
X = Categorical([0.7,0.2,0.1])
self.assertRaises(ValueError,
X.observe,
-1)
self.assertRaises(ValueError,
X.observe,
3)
self.assertRaises(ValueError,
X.observe,
1.5)
pass
def test_constant(self):
"""
Test constant categorical nodes
"""
# Basic test
Y = Mixture(2, Gamma, [1, 2, 3], [1, 1, 1])
u = Y._message_to_child()
self.assertAllClose(u[0],
3/1)
# Test with one plate axis
alpha = [[1, 2, 3],
[4, 5, 6]]
Y = Mixture([2, 1], Gamma, alpha, 1)
u = Y._message_to_child()
self.assertAllClose(u[0],
[3, 5])
# Test with two plate axes
alpha = [ [[1, 2, 3],
[4, 5, 6]],
[[7, 8, 9],
[10, 11, 12]] ]
Y = Mixture([[2, 1], [0, 2]], Gamma, alpha, 1)
u = Y._message_to_child()
self.assertAllClose(u[0],
[[3, 5],
[7, 12]])
pass
|
Python
| 0
|
@@ -6503,24 +6503,468 @@
%5B7, 12%5D%5D)%0A%0A pass%0A
+%0A%0A def test_initialization(self):%0A %22%22%22%0A Test initialization of categorical nodes%0A %22%22%22%0A%0A # Test initialization from random%0A Z = Categorical(%5B%5B0.0, 1.0, 0.0%5D,%0A %5B0.0, 0.0, 1.0%5D%5D)%0A Z.initialize_from_random()%0A u = Z._message_to_child()%0A self.assertAllClose(u%5B0%5D,%0A %5B%5B0, 1, 0%5D,%0A %5B0, 0, 1%5D%5D)%0A %0A pass%0A
|
47c2a98d28c8e592035761b4ecfcd1026038fd14
|
Add an option to not automatically record interaction for gesture actions.
|
tools/telemetry/telemetry/page/actions/gesture_action.py
|
tools/telemetry/telemetry/page/actions/gesture_action.py
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page.actions import page_action
from telemetry.page.actions import wait
from telemetry import decorators
from telemetry.page.actions import action_runner
from telemetry.web_perf import timeline_interaction_record as tir_module
class GestureAction(page_action.PageAction):
def __init__(self, attributes=None):
super(GestureAction, self).__init__(attributes)
if hasattr(self, 'wait_after'):
self.wait_action = wait.WaitAction(self.wait_after)
else:
self.wait_action = None
assert self.wait_until is None or self.wait_action is None, (
'gesture cannot have wait_after and wait_until at the same time.')
def RunAction(self, page, tab):
runner = action_runner.ActionRunner(None, tab)
if self.wait_action:
interaction_name = 'Action_%s' % self.__class__.__name__
else:
interaction_name = 'Gesture_%s' % self.__class__.__name__
runner.BeginInteraction(interaction_name, [tir_module.IS_SMOOTH])
self.RunGesture(page, tab)
if self.wait_action:
self.wait_action.RunAction(page, tab)
runner.EndInteraction(interaction_name, [tir_module.IS_SMOOTH])
def RunGesture(self, page, tab):
raise NotImplementedError()
@staticmethod
def GetGestureSourceTypeFromOptions(tab):
gesture_source_type = tab.browser.synthetic_gesture_source_type
return 'chrome.gpuBenchmarking.' + gesture_source_type.upper() + '_INPUT'
@staticmethod
@decorators.Cache
def IsGestureSourceTypeSupported(tab, gesture_source_type):
# TODO(dominikg): remove once support for
# 'chrome.gpuBenchmarking.gestureSourceTypeSupported' has
# been rolled into reference build.
if tab.EvaluateJavaScript("""
typeof chrome.gpuBenchmarking.gestureSourceTypeSupported ===
'undefined'"""):
return True
return tab.EvaluateJavaScript("""
chrome.gpuBenchmarking.gestureSourceTypeSupported(
chrome.gpuBenchmarking.%s_INPUT)"""
% (gesture_source_type.upper()))
|
Python
| 0.000018
|
@@ -535,16 +535,130 @@
ibutes)%0A
+ if not hasattr(self, 'automatically_record_interaction'):%0A self.automatically_record_interaction = True%0A%0A
if h
@@ -1167,24 +1167,73 @@
__.__name__%0A
+%0A if self.automatically_record_interaction:%0A
runner.B
@@ -1290,16 +1290,17 @@
MOOTH%5D)%0A
+%0A
self
@@ -1387,24 +1387,73 @@
(page, tab)%0A
+%0A if self.automatically_record_interaction:%0A
runner.E
|
20a191ad9325909434a6ca806ef69c515cbce6a8
|
add new package (#24749)
|
var/spack/repos/builtin/packages/py-neurokit2/package.py
|
var/spack/repos/builtin/packages/py-neurokit2/package.py
|
Python
| 0
|
@@ -0,0 +1,1183 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other%0A# Spack Project Developers. See the top-level COPYRIGHT file for details.%0A#%0A# SPDX-License-Identifier: (Apache-2.0 OR MIT)%0A%0Afrom spack import *%0A%0A%0Aclass PyNeurokit2(PythonPackage):%0A %22%22%22The Python Toolbox for Neurophysiological Signal Processing.%0A%0A This package is the continuation of NeuroKit 1. It's a user-friendly%0A package providing easy access to advanced biosignal processing routines.%0A Researchers and clinicians without extensive knowledge of programming or%0A biomedical signal processing can analyze physiological data with only two%0A lines of code.%0A %22%22%22%0A%0A homepage = %22https://github.com/neuropsychology/NeuroKit%22%0A pypi = %22neurokit2/neurokit2-0.1.2.tar.gz%22%0A%0A version('0.1.2', sha256='5ef40037c2d7078ecb713ab0b77b850267babf133856b59595de9613f29787bc')%0A%0A depends_on('py-setuptools@040.6.0:', type='build')%0A depends_on('py-numpy', type=('build', 'run'))%0A depends_on('py-pandas', type=('build', 'run'))%0A depends_on('py-scipy', type=('build', 'run'))%0A depends_on('py-scikit-learn', type=('build', 'run'))%0A depends_on('py-matplotlib', type=('build', 'run'))%0A
|
|
2322b349aac06395382d26a95b5d965ab0f0b326
|
Test save, load functionality in Statespace
|
statsmodels/tsa/statespace/tests/test_save.py
|
statsmodels/tsa/statespace/tests/test_save.py
|
Python
| 0
|
@@ -0,0 +1,2295 @@
+%22%22%22%0ATests of save / load / remove_data state space functionality.%0A%22%22%22%0A%0Afrom __future__ import division, absolute_import, print_function%0A%0Aimport numpy as np%0Aimport pandas as pd%0Aimport os%0A%0Afrom statsmodels import datasets%0Afrom statsmodels.tsa.statespace import (sarimax, structural, varmax,%0A dynamic_factor)%0Afrom numpy.testing import assert_allclose%0Amacrodata = datasets.macrodata.load_pandas().data%0A%0A%0Adef test_sarimax():%0A mod = sarimax.SARIMAX(macrodata%5B'realgdp'%5D.values, order=(4, 1, 0))%0A res = mod.smooth(mod.start_params)%0A res.summary()%0A res.save('test_save_sarimax.p')%0A res2 = sarimax.SARIMAXResults.load('test_save_sarimax.p')%0A assert_allclose(res.params, res2.params)%0A assert_allclose(res.bse, res2.bse)%0A assert_allclose(res.llf, res2.llf)%0A os.unlink('test_save_sarimax.p')%0A%0A%0Adef test_structural():%0A mod = structural.UnobservedComponents(%0A macrodata%5B'realgdp'%5D.values, 'llevel')%0A res = mod.smooth(mod.start_params)%0A res.summary()%0A res.save('test_save_structural.p')%0A res2 = structural.UnobservedComponentsResults.load(%0A 'test_save_structural.p')%0A assert_allclose(res.params, res2.params)%0A assert_allclose(res.bse, res2.bse)%0A assert_allclose(res.llf, res2.llf)%0A os.unlink('test_save_structural.p')%0A%0A%0Adef test_dynamic_factor():%0A mod = dynamic_factor.DynamicFactor(%0A macrodata%5B%5B'realgdp', 'realcons'%5D%5D.diff().iloc%5B1:%5D.values, k_factors=1,%0A factor_order=1)%0A res = mod.smooth(mod.start_params)%0A res.summary()%0A res.save('test_save_dynamic_factor.p')%0A res2 = dynamic_factor.DynamicFactorResults.load(%0A 'test_save_dynamic_factor.p')%0A assert_allclose(res.params, res2.params)%0A assert_allclose(res.bse, res2.bse)%0A assert_allclose(res.llf, res2.llf)%0A os.unlink('test_save_dynamic_factor.p')%0A%0A%0Adef test_varmax():%0A mod = varmax.VARMAX(%0A macrodata%5B%5B'realgdp', 'realcons'%5D%5D.diff().iloc%5B1:%5D.values,%0A order=(1, 0))%0A res = mod.smooth(mod.start_params)%0A res.summary()%0A res.save('test_save_varmax.p')%0A res2 = varmax.VARMAXResults.load(%0A 'test_save_varmax.p')%0A assert_allclose(res.params, res2.params)%0A assert_allclose(res.bse, res2.bse)%0A assert_allclose(res.llf, res2.llf)%0A os.unlink('test_save_varmax.p')%0A
|
|
58624ba3b267fdc0e1ae6d8509c0a1315f22c22f
|
Initialize P4_autoDownloadTorrent
|
books/AutomateTheBoringStuffWithPython/Chapter16/PracticeProjects/P4_autoDownloadTorrent.py
|
books/AutomateTheBoringStuffWithPython/Chapter16/PracticeProjects/P4_autoDownloadTorrent.py
|
Python
| 0.000004
|
@@ -0,0 +1,741 @@
+# Write a program that checks an email account every 15 minutes for any instructions%0A# you email it and executes those instructions automatically.%0A#%0A# For example, BitTorrent is a peer-to-peer downloading system. Using free BitTorrent%0A# software such as qBittorrent, you can download large media files on your home computer.%0A# If you email the program a (completely legal, not at all piratical) BitTorrent link,%0A# the program will eventually check its email, find this message, extract the link, and%0A# then launch qBittorrent to start downloading the file. This way, you can have your%0A# home computer begin downloads while you%E2%80%99re away, and the (completely legal, not at%0A# all piratical) download can be finished by the time you return home.%0A
|
|
db2135d269058ed381239e725797322b95072d3f
|
Predict some data similar to problem on assignment
|
outlier_detection/svm_classification_with_synthetic_data.py
|
outlier_detection/svm_classification_with_synthetic_data.py
|
Python
| 0.999856
|
@@ -0,0 +1,1017 @@
+import numpy as np%0Afrom matplotlib import pyplot as plt%0Aimport matplotlib.font_manager%0Afrom sklearn import svm%0A%0A%0Adef main():%0A tests = 20%0A%0A # Generate train data%0A X = (np.random.randn(120, 2) * %0A np.array(%5B0.08, 0.02%5D) + %0A np.array(%5B0.3, 0.6%5D))%0A%0A X_train = X%5B:-tests%5D%0A X_test = X%5B-tests:%5D%0A X_outliers = np.copy(X_test)%0A X_outliers = (X_outliers + %0A np.random.uniform(low=-0.1, high=0.1, size=(tests, 2)))%0A%0A # fit the model%0A clf = svm.OneClassSVM(nu=0.1, kernel='rbf', gamma=0.1)%0A clf.fit(X_train)%0A%0A y_pred_train = clf.predict(X_train)%0A y_pred_test = clf.predict(X_test)%0A y_pred_outliers = clf.predict(X_outliers)%0A %0A print(y_pred_test)%0A print(y_pred_outliers)%0A%0A s = 40%0A plt.scatter(X_train%5B:, 0%5D, X_train%5B:, 1%5D, c='white', s=s)%0A plt.scatter(X_test%5B:, 0%5D, X_test%5B:, 1%5D, c='blueviolet', s=s)%0A plt.scatter(X_outliers%5B:, 0%5D, X_outliers%5B:, 1%5D, c='gold', s=s)%0A%0A plt.axis('equal')%0A plt.show()%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
a2f20be78ad54a6fe118b197cc416dcfdfb6dddf
|
add tf test file
|
TF-Demo/AlexNetDemo/test_tf.py
|
TF-Demo/AlexNetDemo/test_tf.py
|
Python
| 0
|
@@ -0,0 +1,726 @@
+#!/usr/bin/python%0A# -*- coding: utf-8 -*-%0A# Author: violinsolo%0A# Created on 28/12/2017%0A%0Aimport tensorflow as tf%0Aimport numpy as np%0A%0Ax = %5B%5D%0Afor i in range(0, 20):%0A x += %5Bi%5D%0A%0Aprint x%0A%0A# trans to float32%0Ax1 = np.asarray(x, dtype=np.float32)%0Aprint 'new x:'%0Aprint x1%0A%0Awith tf.Session() as sess:%0A m = np.reshape(x, %5B-1, 5%5D)%0A print 'int m: %5B%25s%5D' %25 (str(m.shape))%0A print m%0A print sess.run(tf.reduce_mean(m))%0A print sess.run(tf.reduce_mean(m, axis=0))%0A print sess.run(tf.reduce_mean(m, axis=1))%0A%0A m = np.reshape(x1, %5B-1, 5%5D)%0A print 'float m: %5B%25s%5D' %25 (str(m.shape))%0A print m%0A print sess.run(tf.reduce_mean(m))%0A print sess.run(tf.reduce_mean(m, axis=0))%0A print sess.run(tf.reduce_mean(m, axis=1))%0A%0A
|
|
404f95c637590e6fa5b51a3246c5bdc56a602ba4
|
Add an e2e test
|
test_caniusepython3.py
|
test_caniusepython3.py
|
# Copyright 2014 Google Inc. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import caniusepython3 as ciu
import tempfile
import unittest
EXAMPLE_REQUIREMENTS = """
FooProject >= 1.2
Fizzy [foo, bar]
PickyThing<1.6,>1.9,!=1.9.6,<2.0a0,==2.4c1
Hello
"""
EXAMPLE_METADATA = """Metadata-Version: 1.2
Name: CLVault
Version: 0.5
Summary: Command-Line utility to store and retrieve passwords
Home-page: http://bitbucket.org/tarek/clvault
Author: Tarek Ziade
Author-email: tarek@ziade.org
License: PSF
Keywords: keyring,password,crypt
Requires-Dist: foo; sys.platform == 'okook'
Requires-Dist: bar
Platform: UNKNOWN
"""
class CLITests(unittest.TestCase):
expected_requirements = {'FooProject', 'Fizzy', 'PickyThing', 'Hello'}
expected_metadata = {'foo', 'bar'}
def test_requirements(self):
got = ciu.projects_from_requirements(EXAMPLE_REQUIREMENTS)
self.assertEqual(set(got), self.expected_requirements)
def test_metadata(self):
got = ciu.projects_from_metadata(EXAMPLE_METADATA)
self.assertEqual(set(got), self.expected_metadata)
def test_cli_for_requirements(self):
with tempfile.NamedTemporaryFile('w') as file:
file.write(EXAMPLE_REQUIREMENTS)
file.flush()
args = ['--requirements', file.name]
got = ciu.projects_from_cli(args)
self.assertEqual(set(got), self.expected_requirements)
def test_cli_for_metadata(self):
with tempfile.NamedTemporaryFile('w') as file:
file.write(EXAMPLE_METADATA)
file.flush()
args = ['--metadata', file.name]
got = ciu.projects_from_cli(args)
self.assertEqual(set(got), self.expected_metadata)
def test_cli_for_projects(self):
args = ['--projects', 'foo,bar']
got = ciu.projects_from_cli(args)
self.assertEqual(set(got), {'foo', 'bar'})
def test_message_plural(self):
blockers = [['A'], ['B']]
messages = ciu.message(blockers)
self.assertEqual(2, len(messages))
want = 'You need 2 projects to transition to Python 3.'
self.assertEqual(messages[0], want)
want = ('Of those 2 projects, 2 have no direct dependencies blocking '
'their transition:')
self.assertEqual(messages[1], want)
def test_message_plural(self):
blockers = [['A']]
messages = ciu.message(blockers)
self.assertEqual(2, len(messages))
want = 'You need 1 project to transition to Python 3.'
self.assertEqual(messages[0], want)
want = ('Of that 1 project, 1 has no direct dependencies blocking '
'its transition:')
self.assertEqual(messages[1], want)
def test_message_no_blockers(self):
messages = ciu.message([])
self.assertEqual(
['You have 0 projects blocking you from using Python 3!'],
messages)
class NameTests(unittest.TestCase):
def test_simple(self):
want = 'simple-name_with.everything-separator_known'
got = ciu.just_name(want)
self.assertEqual(got, want)
def test_requirements(self):
want = 'project.name'
got = ciu.just_name(want + '>=2.0.1')
self.assertEqual(got, want)
def test_bad_requirements(self):
# From the OpenStack requirements file:
# https://raw2.github.com/openstack/requirements/master/global-requirements.txt
want = 'warlock'
got = ciu.just_name(want + '>1.01<2')
self.assertEqual(got, want)
def test_metadata(self):
want = 'foo'
got = ciu.just_name("foo; sys.platform == 'okook'")
self.assertEqual(got, want)
class GraphResolutionTests(unittest.TestCase):
def test_all_projects_okay(self):
# A, B, and C are fine on their own.
self.assertEqual(set(), ciu.reasons_to_paths({}))
def test_leaf_okay(self):
# A -> B where B is okay.
reasons = {'A': None}
self.assertEqual({('A',)}, ciu.reasons_to_paths(reasons))
def test_leaf_bad(self):
# A -> B -> C where all projects are bad.
reasons = {'A': None, 'B': 'A', 'C': 'B'}
self.assertEqual({('C', 'B', 'A')}, ciu.reasons_to_paths(reasons))
class NetworkTests(unittest.TestCase):
def test_all_py3_projects(self):
projects = ciu.all_py3_projects()
self.assertGreater(len(projects), 3000)
self.assertTrue(all(project == project.lower() for project in projects))
self.assertTrue(ciu.OVERRIDES.issubset(projects))
if __name__ == '__main__':
unittest.main()
|
Python
| 0.000741
|
@@ -617,16 +617,26 @@
as ciu%0A%0A
+import io%0A
import t
@@ -659,16 +659,37 @@
nittest%0A
+import unittest.mock%0A
%0A%0AEXAMPL
@@ -5090,16 +5090,162 @@
ects))%0A%0A
+ @unittest.mock.patch('sys.stdout', io.StringIO())%0A def test_e2e(self):%0A ciu.main(%5B'--projects', 'numpy,scipy,matplotlib,ipython'%5D)%0A%0A
%0Aif __na
|
0e2a2b8c173d382da9e256a91b10e267228bbd2c
|
Update import_timeseries.py
|
ddsc_core/management/commands/import_timeseries.py
|
ddsc_core/management/commands/import_timeseries.py
|
#from adaptor.model import CsvDbModel
from django.core.management.base import BaseCommand
from ddsc_core.models.models import Timeseries, Source, Location, IdMapping
from django.contrib.auth.models import User
from ddsc_core.models.aquo import Unit, Parameter, Compartment, MeasuringDevice, MeasuringMethod
from ddsc_core.models.aquo import ReferenceFrame, ProcessingMethod
from lizard_security import DataOwner
import string
#from ddsc_core.models import Location
import csv
class Command(BaseCommand):
args = '<CSV/sql file>'
help = 'Imports a CSV file of timeseries into the database.'
def handle(self, *args, **options):
dataowner = args[1]
do, created = DataOwner.objects.get_or_create(name=dataowner)
i = Timeseries.objects.count() + 1
with open(args[0], 'rb') as f:
reader = csv.reader(f)
for row in reader:
if len(row) >= 0 :
uuid = row[0]
name = row[1]
description = row[2]
type = string.lower(row[3])
def f(x):
return {
'integer': 0,
'float': 1,
'text': 4,
'image': 5,
'geo_remote_sensing': 7,
'movie': 8,
'file': 10,
}.get(x, 1)
value_type = f(type)
try:
lc = Location.objects.get(uuid=row[7])
except:
lc = Location.objects.get(name='unknown')
location_id = lc.id
try:
pr = Parameter.objects.get(code=row[8].split('-')[0])
except:
pr = Parameter.objects.get(code='unknown')
parameter_id = pr.id
try:
ut = Unit.objects.get(code=row[9])
except:
try:
ut = Unit.objects.get(description=row[9])
except:
ut = Unit.objects.get(code='unknown')
unit_id = ut.id
try:
rf = ReferenceFrame.objects.get(code=row[10].split('-')[0])
except:
rf = ReferenceFrame.objects.get(code='unknown')
reference_f_id = rf.id
try:
cp = Compartment.objects.get(code=row[11].split('-')[0])
except:
cp = Compartment.objects.get(code='unknown')
compartment_id = cp.id
try:
md = MeasuringDevice.objects.get(code=row[12].split('-')[0])
except:
md = MeasuringDevice.objects.get(code='unknown')
measuring_device_id = md.id
try:
mm = MeasuringMethod.objects.get(code=row[13].split('-')[0])
except:
mm = MeasuringMethod.objects.get(code='unknown')
measuring_method_id = mm.id
try:
pm = ProcessingMethod.objects.get(code=row[14].split('-')[0])
except:
pm = ProcessingMethod.objects.get(code='unknown')
processing_method_id = pm.id
try:
src = Source.objects.get(uuid=row[6])
except:
src = Source.objects.get(uuid='unknown')
source_id = src.id
Timeseries.objects.create(name=name, description=description, value_type=value_type,
location_id=location_id, parameter_id=parameter_id,
unit_id=unit_id, reference_frame_id = reference_f_id,
compartment_id=compartment_id, measuring_device_id=measuring_device_id,
measuring_method_id=measuring_method_id, id=i, uuid=uuid,
processing_method_id=processing_method_id, source_id=source_id)
usr = row[4]
remote_id = row[5]
ts = Timeseries.objects.get(uuid=uuid)
user = User.object.get(username=usr)
IdMapping.objects.create(user_id=user.pk, remote_id=remote_id, timeseries_id=ts.pk)
ts.owner_id = do.pk
ts.save()
i += 1
print "completeted~!"
|
Python
| 0.000001
|
@@ -393,16 +393,23 @@
security
+.models
import
|
f81a8d33c4865f51750ae4168e0646979e6eb262
|
Translate original pseudocode algorithms
|
hearsay.py
|
hearsay.py
|
Python
| 0.999999
|
@@ -0,0 +1,2100 @@
+__all__ = %5B'Detect', 'DistToReference', 'Dist', 'ProbClass'%5D%0A%0A%0Aimport math%0A%0A%0Adef Detect(s_inf, N_obs, R_pos, R_neg, gamma=1, theta=1, D_req=1):%0A %22%22%22Algorithm 1%0A%0A Perform online binary classification on the infinite stream s_inf using%0A sets of positive and negative reference signals R_pos and R_neg.%0A %22%22%22%0A%0A ConsecutiveDetections = 0%0A s = %5B%5D%0A i = -1%0A%0A def UpdateObservation(s_inf, N_obs):%0A try:%0A s_i = s_inf.next()%0A s.append(s_i)%0A except StopIteration as e:%0A return%0A if len(s) %3E N_obs:%0A del s%5B:-N_obs%5D%0A return s%0A%0A while True:%0A i += 1%0A s = UpdateObservation(s_inf, N_obs)%0A if s is None:%0A return%0A elif len(s) %3C N_obs:%0A continue%0A PosDists, NegDists = %5B%5D, %5B%5D%0A for r in R_pos:%0A PosDists.append(DistToReference(s, r))%0A for r in R_neg:%0A NegDists.append(DistToReference(s, r))%0A R = ProbClass(PosDists, gamma) / ProbClass(NegDists, gamma)%0A if R %3E= theta:%0A ConsecutiveDetections += 1%0A if ConsecutiveDetections %3E= D_req:%0A return i%0A else:%0A ConsecutiveDetections = 0%0A%0A%0Adef DistToReference(s, r):%0A %22%22%22Algorithm 2%0A%0A Compute the minimum distance between s and all pieces of r of the same%0A length as s.%0A %22%22%22%0A%0A N_obs = len(s)%0A N_ref = len(r)%0A MinDist = float('inf')%0A for i in range(N_ref - N_obs + 1):%0A MinDist = min(MinDist, Dist(r%5Bi:(i + N_obs)%5D, s))%0A return MinDist%0A%0A%0Adef Dist(s, t):%0A %22%22%22Algorithm 3%0A%0A Compute the distance between two signals s and t of the same length.%0A %22%22%22%0A%0A D = 0%0A for i in range(len(s)):%0A D += (s%5Bi%5D - t%5Bi%5D) ** 2%0A return D%0A%0A%0Adef ProbClass(Dists, gamma=1):%0A %22%22%22Algorithm 4%0A%0A Using the distances Dists of an observation to the reference signals of a%0A certain class, compute a number proportional to the probability that the%0A observation belongs to that class.%0A %22%22%22%0A%0A P = 0%0A for i in range(len(Dists)):%0A P += math.exp(-gamma * Dists%5Bi%5D)%0A return P%0A
|
|
9ad5cf7a663b83b725f0ae19d5190e0d6634fbb4
|
exhaustive n by m test
|
blaze/api/tests/test_into_exhaustive.py
|
blaze/api/tests/test_into_exhaustive.py
|
Python
| 0.999616
|
@@ -0,0 +1,967 @@
+from __future__ import absolute_import, division, print_function%0A%0Afrom dynd import nd%0Aimport numpy as np%0Afrom pandas import DataFrame%0A%0Afrom blaze.api.into import into, discover%0Afrom datashape import dshape%0Aimport blaze%0Afrom blaze import Table%0Aimport bcolz%0A%0A%0AL = %5B(1, 'Alice', 100),%0A (2, 'Bob', 200),%0A (3, 'Charlie', 300)%5D%0A%0Adf = DataFrame(L, columns=%5B'id', 'name', 'amount'%5D)%0A%0Ax = np.array(L, dtype=%5B('id', 'i8'), ('name', 'S7'), ('amount', 'i8')%5D)%0A%0Aarr = nd.array(L, dtype='%7Bid: int64, name: string, amount: int64%7D')%0A%0Abc = bcolz.ctable(%5Bnp.array(%5B1, 2, 3%5D, dtype=np.int64),%0A np.array(%5B'Alice', 'Bob', 'Charlie'%5D, dtype='S7'),%0A np.array(%5B100, 200, 300%5D, dtype=np.int64)%5D,%0A names=%5B'id', 'name', 'amount'%5D)%0A%0Adef test_base():%0A A = %5BTable(L, columns=%5B'id', 'name', 'amount'%5D), df, x, arr, bc%5D%0A B = %5BL, df, x, arr, bc%5D%0A for a in A:%0A for b in B:%0A assert str(into(type(b), a)) == str(b)%0A
|
|
5856ceb23cf639ee1cc3ea45d81a1917c0ef031d
|
Make a pnacl-finalize tool, that runs the final steps for pnacl ABI stability.
|
pnacl/driver/pnacl-finalize.py
|
pnacl/driver/pnacl-finalize.py
|
Python
| 0.000665
|
@@ -0,0 +1,1826 @@
+#!/usr/bin/python%0A# Copyright (c) 2013 The Native Client Authors. All rights reserved.%0A# Use of this source code is governed by a BSD-style license that can be%0A# found in the LICENSE file.%0A#%0A# IMPORTANT NOTE: If you make local mods to this file, you must run:%0A# %25 pnacl/build.sh driver%0A# in order for them to take effect in the scons build. This command%0A# updates the copy in the toolchain/ tree.%0A#%0A%0Aimport driver_tools%0Aimport pathtools%0Afrom driver_env import env%0Afrom driver_log import Log%0A%0AEXTRA_ENV = %7B%0A 'INPUTS' : '',%0A 'OUTPUT' : '',%0A 'OPT_FLAGS' : '-disable-opt -strip',%0A 'RUN_OPT' : '$%7BLLVM_OPT%7D $%7BOPT_FLAGS%7D $%7Binput%7D -o $%7Boutput%7D',%0A%7D%0A%0APrepPatterns = %5B%0A ( ('-o','(.*)'), %22env.set('OUTPUT', pathtools.normalize($0))%22),%0A ( '(-.*)', driver_tools.UnrecognizedOption),%0A ( '(.*)', %22env.append('INPUTS', pathtools.normalize($0))%22),%0A%5D%0A%0Adef main(argv):%0A env.update(EXTRA_ENV)%0A driver_tools.ParseArgs(argv, PrepPatterns)%0A inputs = env.get('INPUTS')%0A output = env.getone('OUTPUT')%0A%0A if len(inputs) != 1:%0A Log.Fatal('Can only have one input')%0A%0A # Allow in-place file changes if output isn't specified..%0A if output != '':%0A f_output = output%0A else:%0A f_output = inputs%5B0%5D%0A%0A # Transform the file, and re-wrap the result.%0A driver_tools.RunWithEnv('$%7BRUN_OPT%7D', input=inputs%5B0%5D, output=f_output)%0A driver_tools.WrapBitcode(f_output)%0A return 0%0A%0A%0Adef get_help(unused_argv):%0A script = env.getone('SCRIPT_NAME')%0A return %22%22%22Usage: %25s %3Coptions%3E in-file%0A This tool prepares a PNaCl bitcode application for ABI stability.%0A%0A The options are:%0A -h --help Display this output%0A -o %3Cfile%3E Place the output into %3Cfile%3E. Otherwise, the%0A input file is modified in-place.%0A%22%22%22 %25 script%0A
|
|
7f65c70b786024e8213c56448f8d715bda8c0197
|
add jsonrpc
|
skitai/saddle/jsonrpc_executor.py
|
skitai/saddle/jsonrpc_executor.py
|
Python
| 0.000002
|
@@ -0,0 +1,2278 @@
+from . import wsgi_executor%0D%0Atry:%0D%0A import jsonrpclib%0D%0Aexcept ImportError:%0D%0A pass%0D%0Afrom aquests.protocols.http import respcodes%0D%0A%0D%0Aclass Executor (wsgi_executor.Executor): %0D%0A def __call__ (self):%0D%0A request = self.env %5B%22skitai.was%22%5D.request%0D%0A %0D%0A data = self.env %5B%22wsgi.input%22%5D.read ()%0D%0A args = jsonrpclib.loads (data)%0D%0A%0D%0A is_multicall = False %0D%0A jsonrpc = %222.0%22%0D%0A path = %22%22%0D%0A %0D%0A if type (args) == type (%5B%5D):%0D%0A is_multicall = True%0D%0A thunks = %5B%5D%0D%0A for each in args:%0D%0A thunks.append ((each %5B%22method%22%5D, each.get (%22params%22, %5B%5D), each %5B'id'%5D, each %5B'jsonrpc'%5D))%0D%0A %0D%0A else:%0D%0A thunks = %5B(args %5B%22method%22%5D, args.get (%22params%22, %5B%5D), args %5B%22id%22%5D, args %5B%22jsonrpc%22%5D)%5D%0D%0A %0D%0A self.build_was ()%0D%0A %0D%0A results = %5B%5D %0D%0A for _method, _args, _rpcid, _jsonrpc in thunks:%0D%0A path_info = self.env %5B%22PATH_INFO%22%5D = %22/%22 + _method.replace (%22.%22, %22/%22) %0D%0A current_app, thing, param, respcode = self.find_method (request, path_info, is_multicall is False) %0D%0A if respcode: %0D%0A results.append (jsonrpclib.dumps (jsonrpclib.Fault (1, respcodes.get (respcode, %22Undefined Error%22)), rpcid = _rpcid, version = _jsonrpc))%0D%0A %0D%0A self.was.subapp = current_app%0D%0A try:%0D%0A result = self.chained_exec (thing, _args, %7B%7D)%0D%0A except:%0D%0A results.append (jsonrpclib.dumps (jsonrpclib.Fault (1, self.was.app.debug and wsgi_executor.traceback () or %22Error Occured%22))) %0D%0A else:%0D%0A result = jsonrpclib.dumps (%0D%0A result, methodresponse = True,%0D%0A rpcid = _rpcid, version = _jsonrpc%0D%0A )%0D%0A results.append (result)%0D%0A del self.was.subapp%0D%0A %0D%0A self.commit ()%0D%0A self.was.response %5B%22Content-Type%22%5D = %22application/json-rpc%22%0D%0A %0D%0A del self.was.env %0D%0A if len (results) == 1:%0D%0A results = results %5B0%5D%0D%0A else:%0D%0A results = %22%5B%22 + %22,%22.join (results) + %22%5D%22 %0D%0A return results%0D%0A
|
|
26ff3cbfcd9aee35da3645573c01717518467e8d
|
Create main.py
|
unit-3-mixed-reading-and-assignment-lessons/lesson-4-assignment-multiple-code-blocks/main.py
|
unit-3-mixed-reading-and-assignment-lessons/lesson-4-assignment-multiple-code-blocks/main.py
|
Python
| 0.000001
|
@@ -0,0 +1,381 @@
+class Operation(object):%0A def __init__(self, *args):%0A # Do something here%0A pass%0A%0A def operate(self):%0A raise NotImplementedError()%0A%0A%0Aclass AddOperation(Operation):%0A # The only method present in this class%0A def operate(self):%0A pass%0A%0A%0Aclass SubtractOperation(Operation):%0A def operate(self):%0A pass%0A%0A%0Aclass Calculator(object):%0A pass%0A
|
|
c95772e8b3119f464dba4b8fd864812a525a4379
|
add tests
|
tests/test_core.py
|
tests/test_core.py
|
Python
| 0
|
@@ -0,0 +1,593 @@
+# -*- coding: utf-8 -*-%0Afrom saltmill import Mill%0Afrom pepper import PepperException%0Aimport pytest%0A%0A%0Adef test_login():%0A mill = Mill()%0A mill.login()%0A%0Adef test_auto_login():%0A mill = Mill()%0A MSG = 'This is a test.'%0A ret = mill.local('*', 'test.echo',MSG)%0A assert len(ret%5B'return'%5D%5B0%5D) %3E 0%0A for salt_id, msg in ret%5B'return'%5D%5B0%5D.iteritems():%0A assert msg == MSG%0A%0Adef test_renew_auth_token():%0A mill = Mill()%0A mill.login()%0A%0A mill.auth%5B'token'%5D = 'invalid'%0A MSG = 'This is a test.'%0A ret = mill.local('*', 'test.echo',MSG)%0A assert len(ret%5B'return'%5D%5B0%5D) %3E 0%0A
|
|
83cfb4d135b5eb3eaa4efb3f74ce13d44afb4c5a
|
Add a test for __main__
|
tests/test_main.py
|
tests/test_main.py
|
Python
| 0.00053
|
@@ -0,0 +1,168 @@
+import pytest%0A%0Afrom cutadapt.__main__ import main%0A%0A%0Adef test_help():%0A with pytest.raises(SystemExit) as e:%0A main(%5B%22--help%22%5D)%0A assert e.value.args%5B0%5D == 0%0A%0A
|
|
0a5ab42263c508cb46e59dfeab00b6b3c2591120
|
Fix message
|
tests/twisted/connect/test-nonblocking-tls.py
|
tests/twisted/connect/test-nonblocking-tls.py
|
"""
Test connecting to a server with 2 accounts. Check one account does not block
the second account.
"""
import os
import sys
import dbus
import servicetest
import twisted
from twisted.words.xish import domish, xpath
from twisted.words.protocols.jabber import xmlstream
from gabbletest import make_connection, make_stream, JabberAuthenticator, \
XmppAuthenticator, \
XmppXmlStream, JabberXmlStream
NS_XMPP_TLS = 'urn:ietf:params:xml:ns:xmpp-tls'
NS_XMPP_SASL = 'urn:ietf:params:xml:ns:xmpp-sasl'
print "FIXME: test-connect-twice.py disabled due to a bug in Loudmouth:"
print " http://developer.imendio.com/issues/browse/LM-44"
print " https://bugs.freedesktop.org/show_bug.cgi?id=14341"
# exiting 77 causes automake to consider the test to have been skipped
raise SystemExit(77)
class BlockForeverTlsAuthenticator(xmlstream.Authenticator):
"""A TLS stream authenticator that is deliberately broken. It sends
<proceed/> to the client but then do nothing, so the TLS handshake will
not work. Useful for testing regression of bug #14341."""
def __init__(self, username, password):
xmlstream.Authenticator.__init__(self)
self.username = username
self.password = password
self.authenticated = False
def streamStarted(self, root=None):
if root:
self.xmlstream.sid = root.getAttribute('id')
self.xmlstream.sendHeader()
features = domish.Element((xmlstream.NS_STREAMS, 'features'))
mechanisms = features.addElement((NS_XMPP_SASL, 'mechanisms'))
mechanism = mechanisms.addElement('mechanism', content='DIGEST-MD5')
starttls = features.addElement((NS_XMPP_TLS, 'starttls'))
starttls.addElement('required')
self.xmlstream.send(features)
self.xmlstream.addOnetimeObserver("/starttls", self.auth)
def auth(self, auth):
proceed = domish.Element((NS_XMPP_TLS, 'proceed'))
self.xmlstream.send(proceed)
return; # auth blocks
self.xmlstream.reset()
self.authenticated = True
def test(q, bus, conn1, conn2, stream1, stream2):
# Connection 1
conn1.Connect()
q.expect('dbus-signal', signal='StatusChanged', args=[1, 1])
# Connection 1 blocks because the fake jabber server behind conn1 does not
# proceed to the tls handshake. The second connection is independant and
# should work.
# Connection 2
conn2.Connect()
q.expect('dbus-signal', signal='StatusChanged', args=[1, 1])
q.expect('stream-authenticated')
q.expect('dbus-signal', signal='PresenceUpdate',
args=[{1L: (0L, {u'available': {}})}])
q.expect('dbus-signal', signal='StatusChanged', args=[0, 1])
# Disconnection 2
conn2.Disconnect()
q.expect('dbus-signal', signal='StatusChanged', args=[2, 1])
return True
if __name__ == '__main__':
queue = servicetest.IteratingEventQueue(None)
queue.verbose = (
os.environ.get('CHECK_TWISTED_VERBOSE', '') != ''
or '-v' in sys.argv)
bus = dbus.SessionBus()
params = {
'account': 'test1@localhost/Resource',
'password': 'pass',
'resource': 'Resource',
'server': 'localhost',
'port': dbus.UInt32(4242),
}
conn1 = make_connection(bus, queue.append, params)
authenticator = BlockForeverTlsAuthenticator('test1', 'pass')
stream1 = make_stream(queue.append, authenticator, protocol=XmppXmlStream,
port=4242)
params = {
'account': 'test2@localhost/Resource',
'password': 'pass',
'resource': 'Resource',
'server': 'localhost',
'port': dbus.UInt32(4343),
}
conn2 = make_connection(bus, queue.append, params)
authenticator = XmppAuthenticator('test2', 'pass')
stream2 = make_stream(queue.append, authenticator, protocol=XmppXmlStream,
port=4343)
try:
test(queue, bus, conn1, conn2, stream1, stream2)
finally:
try:
conn1.Disconnect()
conn2.Disconnect()
# second call destroys object
conn1.Disconnect()
conn2.Disconnect()
except dbus.DBusException, e:
pass
|
Python
| 0.000001
|
@@ -566,21 +566,23 @@
est-
-connect-twice
+nonblocking-tls
.py
|
0275556bcb29f4468c4a7e5b0771686c031e3c94
|
Add context test.
|
demos/context.py
|
demos/context.py
|
Python
| 0.000003
|
@@ -0,0 +1,335 @@
+#!/usr/bin/env python%0A%0Aimport fluidsynth%0A%0Asettings = fluidsynth.FluidSettings()%0A%0Asettings%5B%22synth.chorus.active%22%5D = %22off%22%0Asettings%5B%22synth.reverb.active%22%5D = %22off%22%0Asettings%5B%22synth.sample-rate%22%5D = 22050%0A%0Asynth = fluidsynth.FluidSynth(settings)%0A%0Adriver = fluidsynth.FluidAudioDriver(settings, synth)%0A%0Aplayer = fluidsynth.FluidPlayer(synth)%0A
|
|
eb15e17e99212f2d779ef33a1a9dfa7293ad96ad
|
Add `ProtectedFieldsMixin` for use with `ChangeProtected`s
|
shoop/core/utils/form_mixins.py
|
shoop/core/utils/form_mixins.py
|
Python
| 0
|
@@ -0,0 +1,2227 @@
+# -*- coding: utf-8 -*-%0A# This file is part of Shoop.%0A#%0A# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.%0A#%0A# This source code is licensed under the AGPLv3 license found in the%0A# LICENSE file in the root directory of this source tree.%0A%0Afrom django.utils.translation import ugettext_lazy as _%0A%0A%0Aclass ProtectedFieldsMixin(object):%0A change_protect_field_text = _(%22This field cannot be changed since it is protected.%22)%0A%0A def _get_protected_fields(self):%0A %22%22%22%0A Get a tuple of protected fields if set.%0A The fields are set in model level when model has %60ChangeProtected%60%0A %22%22%22%0A if self.instance and self.instance.pk:%0A are_changes_protected = getattr(self.instance, %22_are_changes_protected%22, None)%0A if are_changes_protected: # Supports the %60_are_changes_protected%60 protocol?%0A if not are_changes_protected(): # Not protected though?%0A return () # Nothing protected, then.%0A return getattr(self.instance, %22protected_fields%22, ())%0A return ()%0A%0A def disable_protected_fields(self):%0A for field in self._get_protected_fields():%0A self.fields%5Bfield%5D.widget.attrs%5B%22disabled%22%5D = True%0A self.fields%5Bfield%5D.help_text = self.change_protect_field_text%0A self.fields%5Bfield%5D.required = False%0A%0A def clean_protected_fields(self, cleaned_data):%0A %22%22%22%0A Ignore protected fields (they are set to %60disabled%60,%0A so they will not be in the form data).%0A%0A As a side effect, this removes the fields from %60changed_data%60 too.%0A%0A :param cleaned_data: Cleaned data%0A :type cleaned_data: dict%0A :return: Cleaned data without protected field data%0A :rtype: dict%0A %22%22%22%0A for field in self._get_protected_fields():%0A if field in self.changed_data:%0A self.changed_data.remove(field)%0A cleaned_data%5Bfield%5D = getattr(self.instance, field)%0A return cleaned_data%0A%0A def clean(self):%0A return self.clean_protected_fields(super(ProtectedFieldsMixin, self).clean())%0A%0A def __init__(self, **kwargs):%0A super(ProtectedFieldsMixin, self).__init__(**kwargs)%0A self.disable_protected_fields()%0A
|
|
9a82eb7fe4f587b00cca155b84a36c6d590e0e16
|
Add tests to patterns
|
tests/test_patterns.py
|
tests/test_patterns.py
|
Python
| 0.000001
|
@@ -0,0 +1,2488 @@
+from bottery import patterns%0A%0A%0Adef test_message_handler_check_positive_match():%0A message = type('Message', (), %7B'text': 'ping'%7D)%0A handler = patterns.MessageHandler(pattern='ping')%0A assert handler.check(message)%0A%0A%0Adef test_message_handler_check_negative_match():%0A message = type('Message', (), %7B'text': 'Ping'%7D)%0A handler = patterns.MessageHandler(pattern='ping')%0A assert not handler.check(message)%0A%0A%0Adef test_message_handler_check_positive_match_with_sensitive():%0A message = type('Message', (), %7B'text': 'Ping'%7D)%0A handler = patterns.MessageHandler(pattern='ping', sensitive=False)%0A assert handler.check(message)%0A%0A%0Adef test_message_handler_check_negative_match_with_sensitive():%0A message = type('Message', (), %7B'text': 'pong'%7D)%0A handler = patterns.MessageHandler(pattern='ping', sensitive=False)%0A assert not handler.check(message)%0A%0A%0Adef test_startswith_handler_check_positive_match():%0A message = type('Message', (), %7B'text': 'hello my friend'%7D)%0A handler = patterns.StartswithHandler(pattern='hello')%0A assert handler.check(message)%0A%0A%0Adef test_startswith_handler_check_negative_match():%0A message = type('Message', (), %7B'text': 'Ping'%7D)%0A handler = patterns.StartswithHandler(pattern='hello my friend')%0A assert not handler.check(message)%0A%0A%0Adef test_startswith_handler_check_positive_match_with_sensitive():%0A message = type('Message', (), %7B'text': 'Hello my friend'%7D)%0A handler = patterns.StartswithHandler(pattern='hello', sensitive=False)%0A assert handler.check(message)%0A%0A%0Adef test_startswith_handler_check_negative_match_with_sensitive():%0A message = type('Message', (), %7B'text': 'pong'%7D)%0A handler = patterns.StartswithHandler(pattern='hello', sensitive=False)%0A assert not handler.check(message)%0A%0A%0Adef test_default_handler():%0A message = type('Message', (), %7B'text': 'pong'%7D)%0A assert patterns.DefaultHandler().check(message)%0A%0A%0Adef test_patterns_handlers():%0A handler = patterns.PatternsHandler()%0A assert not handler.registered%0A%0A%0Adef test_patterns_handler_message():%0A handler = patterns.PatternsHandler()%0A decorator = handler.message('ping')%0A%0A def view(): 'pong'%0A assert callable(decorator)%0A assert decorator(view) == view%0A assert handler.registered%0A%0A%0Adef test_patterns_handler_startswith():%0A handler = patterns.PatternsHandler()%0A decorator = handler.startswith('ping')%0A%0A def view(): 'pong'%0A assert callable(decorator)%0A assert decorator(view) == view%0A assert handler.registered%0A
|
|
2087394a69b3d4ca47e441b2561a0645c9a99e68
|
Add test_recharge
|
tests/test_recharge.py
|
tests/test_recharge.py
|
Python
| 0
|
@@ -0,0 +1,325 @@
+import pastas as ps%0Aimport pandas as pd%0A%0Adef test_linear():%0A index = pd.date_range(%222000-01-01%22, %222000-01-10%22)%0A prec = pd.Series(%5B1, 2%5D * 5, index=index)%0A evap = prec / 2%0A rm = ps.RechargeModel(prec=prec, evap=evap, rfunc=ps.Exponential,%0A recharge=%22Linear%22, name=%22recharge%22)%0A return rm
|
|
170373e6f0a1a416a50e16a3fbfb6a2da2b2e700
|
Add Site traversal object
|
usingnamespace/api/traversal/v1/site.py
|
usingnamespace/api/traversal/v1/site.py
|
Python
| 0
|
@@ -0,0 +1,1890 @@
+import logging%0Alog = logging.getLogger(__name__)%0A%0Afrom pyramid.compat import string_types%0A%0Afrom .... import models as m%0A%0Aclass Site(object):%0A %22%22%22Site%0A%0A Traversal object for a site ID%0A %22%22%22%0A%0A __name__ = None%0A __parent__ = None%0A%0A def __init__(self, site_id):%0A %22%22%22Create the default root object%0A%0A :request: The Pyramid request object%0A %22%22%22%0A log.debug(%22Creating new Site: %7B%7D%22.format(site_id))%0A%0A if isinstance(site_id, int):%0A self.__name__ = '%7B%7D'.format(site_id)%0A self.id = site_id%0A%0A if isinstance(site_id, string_types):%0A self.__name__ = site_id%0A%0A try:%0A self.id = int(site_id)%0A except ValueError:%0A raise ValueError('Site ID is not an valid integer value')%0A%0A def __getitem__(self, key):%0A %22%22%22Check to see if we can traverse this ...%22%22%22%0A%0A next_ctx = None%0A%0A if next_ctx is None:%0A raise KeyError%0A else:%0A next_ctx.__parent__ = self%0A return next_ctx%0A%0A def finalise(self, last=True):%0A %22%22%22Attempts to find out if the site ID is valid%0A%0A :last: If this is the last context in the tree.%0A :returns: None%0A%0A %22%22%22%0A if self.__parent__ is not None:%0A # Finalise the parent first%0A self.__parent__.finalise(last=False)%0A%0A # Get the entries variable from the parent%0A self.site = self.__parent__.sites%0A self.site = self.site.filter(m.Site.id == self.id)%0A else:%0A # We need a parent ...%0A raise ValueError%0A%0A if last:%0A # Attempt to get a single entry, if we get nothing back we return%0A # ValueError%0A first = self.site.first()%0A%0A if first is None:%0A raise ValueError%0A else:%0A self.entries = first.entries%0A%0A
|
|
97fe3384b0e614e17010623af5bccf515ce21845
|
Migrate jupyter_{notebook => server}_config.py
|
.jupyter/jupyter_server_config.py
|
.jupyter/jupyter_server_config.py
|
Python
| 0.000001
|
@@ -0,0 +1,702 @@
+# https://jupyter-server.readthedocs.io/en/stable/operators/migrate-from-nbserver.html%0A%0A#c.ServerApp.browser = 'chromium-browser'%0A#c.ServerApp.terminado_settings = %7B %22shell_command%22: %5B%22/usr/bin/env%22, %22bash%22%5D %7D%0Ac.ServerApp.open_browser = False%0Ac.ServerApp.port_retries = 0%0Ac.KernelSpecManager.ensure_native_kernel = False%0A%0A# Needs: pip install environment_kernels%0Ac.ServerApp.kernel_spec_manager_class = 'environment_kernels.EnvironmentKernelSpecManager'%0Ac.EnvironmentKernelSpecManager.find_conda_envs = False%0Ac.EnvironmentKernelSpecManager.virtualenv_env_dirs = %5B'/home/verdi/.pyenv/versions'%5D%0A%0Ac.FileCheckpoints.checkpoint_dir = '/tmp/.ipynb_checkpoints'%0Ac.FileContentsManager.delete_to_trash = False%0A
|
|
d4541113581433b63f19f23a9bde249acf8324a8
|
Add a vizualization tool
|
tools/visualize.py
|
tools/visualize.py
|
Python
| 0.000011
|
@@ -0,0 +1,1033 @@
+#!/usr/bin/python%0A%0Aimport matplotlib.pyplot as plt%0Aimport sys%0A%0Aif len(sys.argv) %3C 2:%0A print %22Usage: vizualize.py file1%5B:label1%5D file2%5B:label2%5D ...%22%0A%0Acolors = %5B'g', 'b', 'r', '#F800F0', '#00E8CC', '#E8E800'%5D%0Amarkers = %7B 'I' : '*', 'P' : 's', 'B' : 'o' %7D%0A%0Aif len(sys.argv) - 1 %3E len(colors):%0A print %22Too many files specified%22%0A%0Adef parse_arg(s):%0A if ':' in s:%0A return tuple(s.split(s, 1))%0A else:%0A return (s, s)%0A%0Asources = map(parse_arg, sys.argv%5B1:%5D)%0A%0Afor (filename, label), color in zip(sources, colors):%0A f = open(filename, 'r')%0A x, y = %5B%5D, %5B%5D%0A per_type = %7B 'I' : %5B%5D, 'P' : %5B%5D, 'B' : %5B%5D %7D%0A for line in f:%0A num, ssim, frametype, _ = line.strip().split(' ', 3)%0A num = int(num)%0A ssim = float(ssim)%0A x.append(num)%0A y.append(ssim)%0A per_type%5Bframetype%5D.append(num)%0A plt.plot(x, y, '-', color = color)%0A for frametype, marker in markers.items():%0A mx = per_type%5Bframetype%5D%0A plt.plot(mx, %5By%5Bx%5D for x in mx%5D, marker, color = color)%0A%0Aplt.show()%0A
|
|
5fc7fa839616213d07ad85e164f6639ff1225065
|
Add override for createsuperuser
|
src/sentry/management/commands/createsuperuser.py
|
src/sentry/management/commands/createsuperuser.py
|
Python
| 0.000001
|
@@ -0,0 +1,418 @@
+from __future__ import absolute_import, print_function%0A%0Afrom django.core.management import call_command%0Afrom django.contrib.auth.management.commands.createsuperuser import Command%0A%0A%0Aclass Command(Command):%0A help = 'Performs any pending database migrations and upgrades'%0A%0A def handle(self, **options):%0A call_command(%0A 'createuser',%0A is_superuser=True,%0A **options%0A )%0A
|
|
4f2df39d909632e0d7a25c739daf8f2c1fa52cbb
|
Use prints and str.format
|
tvrenamr/config.py
|
tvrenamr/config.py
|
import logging
import sys
from yaml import safe_load
from .errors import ShowNotInConfigException
class Config(object):
def __init__(self, config):
self.log = logging.getLogger('Config')
self.config = self._load_config(config)
self.log.debug('Config loaded')
self.defaults = self._get_defaults()
self.log.debug('Defaults retrieved')
def exists(self, show):
if show in self.config:
return True
else:
return False
def get(self, show, option):
try:
return self.config[show][option]
except KeyError:
try:
return self.defaults[option]
except KeyError:
return False
def get_canonical(self, show):
try:
return self.config[show]['canonical']
except KeyError:
try:
return self.config[show.lower()]['canonical']
except KeyError:
self.log.debug('No canonical defined, returning: %s' % show)
return show
def get_output(self, show):
try:
return self.config[show.lower()]['output']
except KeyError:
try:
return self.config[show.lower()]['canonical']
except KeyError:
raise ShowNotInConfigException(show)
def _load_config(self, config):
try:
return safe_load(file(config))
except Exception as e:
self.log.critical(e)
print ''
print '-' * 79
print ' Malformed configuration file, common reasons:'
print '-' * 79
print ''
print ' o Indentation error'
print ' o Missing : from end of the line'
print ' o Non ASCII characters (use UTF8)'
print " o If text contains any of :[]{}% characters it must be \
single-quoted ('')\n"
lines = 0
if e.problem is not None:
print ' Reason: %s\n' % e.problem
if e.problem == 'mapping values are not allowed here':
print ' ----> MOST LIKELY REASON: Missing : from end of \
the line!'
print ''
if e.context_mark is not None:
print ' Check configuration near line %s, column %s' % \
(e.context_mark.line, e.context_mark.column)
lines += 1
if e.problem_mark is not None:
print ' Check configuration near line %s, column %s' % \
(e.problem_mark.line, e.problem_mark.column)
lines += 1
if lines:
print ''
if lines == 1:
print ' Fault is almost always in this or previous line\n'
if lines == 2:
print ' Fault is almost always in one of these lines or \
previous ones\n'
# if self.options.debug:
# raise
sys.exit(1)
def _get_defaults(self):
if 'defaults' in self.config:
return self.config['defaults']
else:
message = """
The defaults section of your config is missing.
For an example see: https://gist.github.com/586062
"""
raise NameError(message)
|
Python
| 0.000001
|
@@ -1529,35 +1529,36 @@
print
-
+(
''
+)
%0A pri
@@ -1555,33 +1555,32 @@
print
- '-' *
+('-'*
79
+)
%0A
@@ -1585,17 +1585,17 @@
print
-
+(
' Malfor
@@ -1634,16 +1634,17 @@
easons:'
+)
%0A
@@ -1657,17 +1657,17 @@
rint
-
+(
'-' * 79
%0A
@@ -1662,16 +1662,17 @@
'-' * 79
+)
%0A
@@ -1673,35 +1673,36 @@
print
-
+(
''
+)
%0A pri
@@ -1699,25 +1699,25 @@
print
-
+(
' o Indentat
@@ -1726,16 +1726,17 @@
n error'
+)
%0A
@@ -1741,25 +1741,25 @@
print
-
+(
' o Missing
@@ -1781,16 +1781,17 @@
he line'
+)
%0A
@@ -1800,17 +1800,17 @@
print
-
+(
' o Non
@@ -1837,16 +1837,17 @@
e UTF8)'
+)
%0A
@@ -1856,17 +1856,17 @@
print
-
+(
%22 o If t
@@ -1957,16 +1957,17 @@
('')%5Cn%22
+)
%0A
@@ -2040,17 +2040,17 @@
print
-
+(
' Reason
@@ -2056,19 +2056,24 @@
n: %25s%5Cn'
- %25
+.format(
e.proble
@@ -2073,16 +2073,18 @@
.problem
+))
%0A
@@ -2172,17 +2172,17 @@
print
-
+(
' ----%3E
@@ -2263,16 +2263,17 @@
e line!'
+)
%0A
@@ -2282,35 +2282,36 @@
print
-
+(
''
+)
%0A if
@@ -2358,344 +2358,364 @@
-print ' Check configuration near line %25s, column %25s' %25 %5C%0A (e.context_mark.line, e.context_mark.column)%0A lines += 1%0A if e.problem_mark is not None:%0A print ' Check configuration near line %25s, column %25s' %25 %5C%0A (e.problem_mark.line, e.problem_mark.column
+args = (e.context_mark.line, e.context_mark.column)%0A print(' Check configuration near line %7B%7D, column %7B%7D'.format(*args))%0A lines += 1%0A if e.problem_mark is not None:%0A args = (e.problem_mark.line, e.problem_mark.column)%0A print(' Check configuration near line %7B%7D, column %7B%7D'.format(*args)
)%0A
@@ -2786,11 +2786,12 @@
rint
-
+(
''
+)
%0A
@@ -2827,33 +2827,33 @@
print
-
+(
' Fault is almos
@@ -2888,16 +2888,17 @@
line%5Cn'
+)
%0A
@@ -2938,17 +2938,17 @@
print
-
+(
' Fault
@@ -3035,69 +3035,9 @@
s%5Cn'
-%0A # if self.options.debug:%0A # raise
+)
%0A
@@ -3385,9 +3385,8 @@
essage)%0A
-%0A
|
7f860b23975150642bd6f8d244bce96d401603b0
|
Improve the help text for the rdp options
|
nova/conf/rdp.py
|
nova/conf/rdp.py
|
# Copyright 2015 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
enabled = cfg.BoolOpt('enabled',
default=False,
help='Enable RDP related features')
html5_proxy_base_url = cfg.StrOpt('html5_proxy_base_url',
default='http://127.0.0.1:6083/',
help='Location of RDP html5 console proxy, in the form '
'"http://127.0.0.1:6083/"')
ALL_OPTS = [enabled,
html5_proxy_base_url]
def register_opts(conf):
conf.register_opts(ALL_OPTS, group="rdp")
def list_opts():
return {"rdp": ALL_OPTS}
|
Python
| 0.957736
|
@@ -760,15 +760,19 @@
elp=
-'
+%22%22%22%0A
Enable
+s
RDP
@@ -788,18 +788,425 @@
features
-')
+%0A%0AHyper-V, unlike the majority of the hypervisors employed on%0ANova compute nodes, uses RDP instead of VNC and SPICE as a%0Adesktop sharing protocol to provide instance console access.%0AThis option enables RDP for graphical console access%0Afor virtual machines created by Hyper-V.%0A%0A* Possible values:%0A%0A True or False(default).%0A%0A* Services that use this:%0A%0A %60%60nova-compute%60%60%0A%0A* Related options:%0A%0A None%0A%22%22%22)%0A
%0A%0Ahtml5_
@@ -1312,17 +1312,20 @@
help=
-'
+%22%22%22%0A
Location
@@ -1355,63 +1355,377 @@
roxy
-, in the form '%0A '%22http://127.0.0.1:6083/%22'
+%0A%0AIn order to use the web based console access, FreeRDP HTML5%0Aproxy should be configured and installed.%0A%0A* Possible values:%0A%0A Must be a valid URL of the form:%60%60http://host:port/%22%0A where host and port should be configured in the node%0A running FreeRDP HTML5 proxy.%0A%0A* Services that use this:%0A%0A %60%60nova-compute%60%60%0A%0A* Related options:%0A%0A %5Brdp%5Denabled = True%0A%22%22%22
)%0A%0AA
|
2a635a797a9828e047aff6c57b375138f0cd7ed0
|
206.reverse-ll
|
206.reverse-ll/206.reverse-ll.py
|
206.reverse-ll/206.reverse-ll.py
|
Python
| 0.999977
|
@@ -0,0 +1,744 @@
+# Definition for singly-linked list.%0A# class ListNode:%0A# def __init__(self, x):%0A# self.val = x%0A# self.next = None%0A%0Adef show(head):%0A out = %22%22%0A while head:%0A out += %22 %22 + str(head.val)%0A head = head.next%0A print(out)%0A%0Aclass Solution:%0A def reverseList(self, head):%0A %22%22%22%0A :type head: ListNode%0A :rtype: ListNode%0A %22%22%22%0A if head is None or head.next is None:%0A return head%0A left = head%0A middle = head.next%0A left.next = None%0A while middle.next is not None:%0A temp = middle.next%0A middle.next = left%0A left = middle%0A middle = temp%0A middle.next = left%0A return middle %0A %0A%0A
|
|
e7d86c77471d3b0890287e0ca32ecfb94b80abda
|
add util method for Leave One Out crossvalidation
|
scikits/learn/utils/crossval.py
|
scikits/learn/utils/crossval.py
|
Python
| 0.000005
|
@@ -0,0 +1,1859 @@
+# Author: Alexandre Gramfort %3Calexandre.gramfort@inria.fr%3E%0A# License: BSD Style.%0A%0A# $Id: cd.py 473 2010-03-03 16:27:38Z twigster $%0A%0Aimport numpy as np%0Aimport exceptions%0A%0Aclass LOO:%0A %22%22%22%0A Leave-One-Out cross validation:%0A Provides train/test indexes to split data in train test sets%0A%0A Examples:%0A import scikits.learn.utils.crossval%0A import numpy as np%0A n_samples, n_features = 5, 10%0A X = np.random.randn(n_samples, n_features)%0A print X%0A loo = crossval.LOO(n_samples)%0A print loo%5B1%5D%0A for train_index, test_index in loo:%0A print %22TRAIN:%22, train_index, %22TEST:%22, test_index%0A %22%22%22%0A%0A def __init__(self,n):%0A %22%22%22%0A n : is the size of the dataset to split%0A %22%22%22%0A self.n_folds = n%0A self.iter = 0%0A%0A def __getitem__(self,item):%0A test_index = np.zeros(self.n_folds,dtype=np.bool)%0A test_index%5Bitem%5D = True%0A train_index = np.logical_not(test_index)%0A return train_index, test_index%0A%0A def next(self):%0A if self.iter %3C self.n_folds:%0A self.iter += 1%0A return self.__getitem__(self.iter-1)%0A raise StopIteration%0A%0A def __iter__(self):%0A return self%0A%0Adef crossval_split(train_indexes, test_indexes, *args):%0A %22%22%22%0A For each arg return a train and test subsets defined by indexes provided%0A in train_indexes and test_indexes%0A %22%22%22%0A ret = %5B%5D%0A for arg in args:%0A arg_train = arg%5BtrainIndexes,:%5D%0A arg_test = arg%5BtestIndexes,:%5D%0A ret.append(arg_train)%0A ret.append(arg_test)%0A return ret%0A%0Aif __name__ == %22__main__%22:%0A print %22Leave One Out crossvalidation%22%0A n_samples, n_features = 5, 10%0A X = np.random.randn(n_samples, n_features)%0A print X%0A loo = LOO(n_samples)%0A print loo%5B1%5D%0A for train_index, test_index in loo:%0A print %22TRAIN:%22, train_index, %22TEST:%22, test_index%0A
|
|
e42fcd8a7dfd213c3de8ccc925410ab3dfe68a3c
|
Test Lemniscate of Bernoulli trajectory
|
src/test/trajectory/test_lemniscate_trajectory.py
|
src/test/trajectory/test_lemniscate_trajectory.py
|
Python
| 0.000123
|
@@ -0,0 +1,1955 @@
+#!/usr/bin/env python%0Aimport unittest%0A%0Afrom geometry_msgs.msg import Point%0A%0Afrom trajectory.lemniscate_trajectory import LemniscateTrajectory%0A%0A%0Aclass LemniscateTrajectoryTest(unittest.TestCase):%0A%0A def setUp(self):%0A self.trajectory = LemniscateTrajectory(5, 4)%0A self.expected_position = Point()%0A%0A def test_given_lemniscate_trajectory_when_getting_position_after_0s_then_position_is_returned(self):%0A self.expected_position.x = 2.8284%0A self.expected_position.y = 0.0%0A self.assertPositionAlmostEqual(self.expected_position, self.trajectory.get_position_at(0), 0.01)%0A%0A def test_given_lemniscate_trajectory_when_getting_position_after_1s_then_position_is_returned(self):%0A self.expected_position.x = 0%0A self.expected_position.y = 0%0A self.assertPositionAlmostEqual(self.expected_position, self.trajectory.get_position_at(1), 0.01)%0A%0A%0A def test_given_lemniscate_trajectory_when_getting_position_after_2s_then_position_is_returned(self):%0A self.expected_position.x = -2.8284%0A self.expected_position.y = 0%0A self.assertPositionAlmostEqual(self.expected_position, self.trajectory.get_position_at(2), 0.01)%0A%0A def test_given_lemniscate_trajectory_when_getting_position_after_3s_then_position_is_returned(self):%0A self.expected_position.x = 0%0A self.expected_position.y = 0%0A self.assertPositionAlmostEqual(self.expected_position, self.trajectory.get_position_at(3), 0.01)%0A%0A def test_given_lemniscate_trajectory_when_getting_position_after_4s_then_position_is_returned(self):%0A self.expected_position.x = 2.8284%0A self.expected_position.y = 0%0A self.assertPositionAlmostEqual(self.expected_position, self.trajectory.get_position_at(4), 0.01)%0A%0A def assertPositionAlmostEqual(self, expected, actual, delta):%0A self.assertAlmostEqual(expected.x, actual.x, delta=delta)%0A self.assertAlmostEqual(expected.y, actual.y, delta=delta)%0A%0A
|
|
7b4107cfb465faf70110b72da9b655758d62d9b3
|
add extraction tool as per request from Renee
|
scripts/mec/extract_rshowers.py
|
scripts/mec/extract_rshowers.py
|
Python
| 0
|
@@ -0,0 +1,1624 @@
+import pytz%0Aimport datetime%0Aimport psycopg2%0Apgconn = psycopg2.connect(host='127.0.0.1', port=5555, user='mesonet', database='mec')%0Acursor = pgconn.cursor()%0A%0Adates = %22%22%2206-02-2008 00z - 06-07-2008 06z%0A06-09-2008 00z - 06-14-2008 06z%0A06-23-2008 00z - 06-25-2008 06z%0A07-04-2008 00z - 07-06-2008 06z%0A08-15-2008 00z - 08-23-2008 06z%0A02-19-2009 00z - 02-25-2009 06z%0A03-02-2009 00z - 03-07-2009 06z%0A03-28-2009 00z - 04-03-2009 06z%22%22%22%0A%0Adef c(val):%0A if val is None:%0A return 'M'%0A return val%0A%0Afor line in dates.split(%22%5Cn%22):%0A tokens = line.split(%22 - %22)%0A sts = datetime.datetime.strptime(tokens%5B0%5D%5B:12%5D, '%25m-%25d-%25Y %25H')%0A sts = sts.replace(tzinfo=pytz.timezone(%22UTC%22))%0A ets = datetime.datetime.strptime(tokens%5B1%5D%5B:12%5D, '%25m-%25d-%25Y %25H')%0A ets = ets.replace(tzinfo=pytz.timezone(%22UTC%22))%0A output = open('extract/%25s-%25s.txt' %25 (sts.strftime(%22%25Y%25m%25d%25H%25M%22),%0A ets.strftime(%22%25Y%25m%25d%25H%25M%22)), 'w')%0A output.write(%22utcvalid,avg_power,avg_windspeed,stddev_windspeed,count%5Cn%22)%0A cursor.execute(%22%22%22%0A select valid, avg(power), avg(windspeed), stddev(windspeed),%0A count(*) from sampled_data %0A WHERE valid %3E= %25s and valid %3C %25s %0A and extract(minute from valid)::int %25%25 10 = 0 and power is not null%0A and windspeed is not null GROUP by valid ORDER by valid ASC%0A %22%22%22, (sts, ets))%0A print sts, ets, cursor.rowcount%0A for row in cursor:%0A ts = row%5B0%5D.astimezone(pytz.timezone(%22UTC%22))%0A output.write(%22%25s,%25s,%25s,%25s,%25s%5Cn%22 %25 ( %0A row%5B0%5D.strftime(%22%25Y-%25m-%25d %25H:%25M:%25S%22), %0A c(row%5B1%5D), c(row%5B2%5D), c(row%5B3%5D), row%5B4%5D ))%0A%0A output.close()%0A
|
|
74bfc85ef4533e93a4edf4c16e5a7a6bb175f36b
|
Simplify the view as the validation logic has already moved to the model
|
onetime/views.py
|
onetime/views.py
|
from datetime import datetime
from django.http import HttpResponseRedirect, HttpResponseGone
from django.contrib.auth import login
from django.conf import settings
from onetime import utils
from onetime.models import Key
def cleanup(request):
utils.cleanup()
def login(request, key, redirect_invalid_to=None, redirect_expired_to=None):
data = Key.objects.get(key=key)
if data is None:
if redirect_invalid_to is not None:
return HttpResponseRedirect(redirect_invalid_to)
else:
return HttpResponseGone()
expired = False
if data.usage_left is not None and data.usage_left <= 0:
expired = True
if data.expires is not None and data.expires < datetime.now():
expired = True
if expired:
if redirect_expired_to is not None:
return HttpResponseRedirect(redirect_expired_to)
else:
return HttpResponseGone()
if data.usage_left is not None:
data.usage_left -= 1
data.save()
login(request, data.user)
next = request.GET.get('next', None)
if data.next is not None:
next = data.next
if next is None:
next = settings.LOGIN_REDIRECT_URL
return HttpResponseRedirect(next)
|
Python
| 0.000001
|
@@ -47,16 +47,30 @@
p import
+ HttpResponse,
HttpRes
@@ -124,21 +124,16 @@
trib
-.auth
import
logi
@@ -128,21 +128,20 @@
import
-login
+auth
%0Afrom dj
@@ -271,112 +271,145 @@
p()%0A
-%0Adef login(request, key, redirect_invalid_to=None, redirect_expired_to=None):%0A data = Key.objects.get
+ return HttpResponse('ok', content_type='text/plain')%0A%0Adef login(request, key, redirect_invalid_to=None):%0A user = auth.authenticate
(key
@@ -421,20 +421,20 @@
%0A if
-data
+user
is None
@@ -601,489 +601,99 @@
-expired = False%0A if data.usage_left is not None and data.usage_left %3C= 0:%0A expired = True%0A if data.expires is not None and data.expires %3C datetime.now():%0A expired = True%0A%0A if expired:%0A if redirect_expired_to is not None:%0A return HttpResponseRedirect(redirect_expired_to)%0A else:%0A return HttpResponseGone()%0A%0A
+auth.login(request, user)%0A%0A data = Key.objects.get(key=key)%0A
-if
data.u
-sage_left is not None:%0A data.usage_left -= 1%0A data.sav
+pdate_usag
e()%0A
-%0A
-login(request, data.user)%0A
%0A
|
159b971ae95501f9093dedb881ed030eed74241e
|
Create __init__.py
|
docs/__init__.py
|
docs/__init__.py
|
Python
| 0.000429
|
@@ -0,0 +1,366 @@
+# -*- coding: utf-8 -*-%0A%22%22%22%0A sphinxcontrib%0A ~~~~~~~~~~~~~%0A%0A This package is a namespace package that contains all extensions%0A distributed in the %60%60sphinx-contrib%60%60 distribution.%0A%0A :copyright: Copyright 2007-2009 by the Sphinx team, see AUTHORS.%0A :license: BSD, see LICENSE for details.%0A%22%22%22%0A%0A__import__('pkg_resources').declare_namespace(__name__)%0A%0A
|
|
88d4139fdfdcb11be7cbe42fe1223cfde5752950
|
debug path
|
pyethereum/config.py
|
pyethereum/config.py
|
import os
import uuid
import StringIO
import ConfigParser
from pyethereum.utils import default_data_dir
from pyethereum.packeter import Packeter
from pyethereum.utils import sha3
def default_config_path():
return os.path.join(default_data_dir, 'config.txt')
def default_client_version():
return Packeter.CLIENT_VERSION # FIXME
def default_node_id():
x = (sha3(str(uuid.uuid1())) * 2).encode('hex')
assert len(x) == 128
return x
config_template = \
"""
# NETWORK OPTIONS ###########
[network]
# Connect to remote host/port
# poc-7.ethdev.com:30300
remote_host = 207.12.89.180
remote_port = 30300
# Listen on the given host/port for incoming connections
listen_host = 0.0.0.0
listen_port = 30303
# Number of peer to connections to establish
num_peers = 10
# unique id of this node
node_id = {0}
# API OPTIONS ###########
[api]
# Serve the restful json api on the given host/port
listen_host = 0.0.0.0
listen_port = 30203
# path to server the api at
api_path = /api/v02a
# MISC OIPTIONS #########
[misc]
# Load database from path
data_dir = {1}
# percent cpu devoted to mining 0=off
mining = 30
# see help for available log groups
logging = :INFO
# log as json output
log_json = 0
# WALLET OPTIONS ##################
[wallet]
# Set the coinbase (mining payout) address
coinbase = 6c386a4b26f73c802f34673f7248bb118f97424a
""".format(default_node_id(), default_data_dir)
def get_default_config():
f = StringIO.StringIO()
f.write(config_template)
f.seek(0)
config = ConfigParser.ConfigParser()
config.readfp(f)
config.set('network', 'client_version', default_client_version())
return config
def read_config(cfg_path=default_config_path()):
print cfg_path
# create default if not existent
if not os.path.exists(cfg_path):
open(cfg_path, 'w').write(config_template)
# extend on the default config
config = get_default_config()
config.read(cfg_path)
return config
def validate_config(config):
assert len(config.get('network', 'node_id')) == 128 # 512bit hex encoded
assert len(config.get('wallet', 'coinbase')) == 40 # 160bit hex encoded
|
Python
| 0.000001
|
@@ -1715,27 +1715,8 @@
)):%0A
- print cfg_path%0A
|
7f0658ee700174bae100a12b8c8c22377e829d6f
|
Create BlepiInit.py
|
BlepiInit.py
|
BlepiInit.py
|
Python
| 0
|
@@ -0,0 +1,436 @@
+import sqlite3%0A%0Aconnection = sqlite3.connect('/home/pi/blepimesh/data/client.db')%0A%0Acursor = connection.cursor()%0A%0Aprint %22Adding Data To DB%22%0A%0A%0Acursor.execute(%22INSERT INTO log(tagDate) values(date('now'))%22)%0Acursor.execute(%22INSERT INTO log values('5',date('now'),time('now'),'34','43','TagAddr','')%22)%0A%0Aconnection.commit()%0A%0Aprint %22Entire Database Contents%22%0A%0Afor row in cursor.execute(%22SELECT * FROM log%22):%0A print row%0A%0Aconnection.close()%0A
|
|
b5fda5ff78f97c7bdd23f3ca4ed2b2d2ab33d101
|
Create _init_.py
|
luowang/tools/tree-tagger-windows-3.2/TreeTagger/bin/_init_.py
|
luowang/tools/tree-tagger-windows-3.2/TreeTagger/bin/_init_.py
|
Python
| 0.000145
|
@@ -0,0 +1 @@
+%0A
|
|
f3fbb6ca517314ab7ac1330e766da1de89970e13
|
Add debug plugin
|
plugins/debug.py
|
plugins/debug.py
|
Python
| 0.000001
|
@@ -0,0 +1,431 @@
+import time%0A%0Aclass Plugin:%0A def __call__(self, bot):%0A bot.on_respond(r%22ping$%22, lambda bot, msg, reply: reply(%22PONG%22))%0A bot.on_respond(r%22echo (.*)$%22, lambda bot, msg, reply: reply(msg%5B%22match%22%5D.group(1)))%0A bot.on_respond(r%22time$%22, lambda bot, msg, reply: reply(time.time()))%0A bot.on_help(%22debug%22, self.on_help)%0A%0A def on_help(self, bot, msg, reply):%0A reply(%22Syntax: ping %7C echo %3Cmsg%3E %7C time%22)%0A
|
|
1a7acfd59f48522f0dda984b2f33d20d843ee8ba
|
set up role.py
|
pycanvas/role.py
|
pycanvas/role.py
|
Python
| 0.000002
|
@@ -0,0 +1,140 @@
+from canvas_object import CanvasObject%0Afrom util import combine_kwargs%0A%0Aclass Role(CanvasObject):%0A%0A def __str__(self):%0A return %22%22%0A
|
|
df7b60767ac00beb81e48b245ac79f5c8fe7db64
|
Revert "Temporarily disable custom 404 handler to debug an issue"
|
pydotorg/urls.py
|
pydotorg/urls.py
|
from django.conf.urls import include, url, handler404
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.conf.urls.static import static
from django.views.generic.base import TemplateView, RedirectView
from django.conf import settings
from cms.views import custom_404
from users.views import HoneypotSignupView, CustomPasswordChangeView
from . import views
from .urls_api import v1_api, router
# TODO: Disable this temporarily to debug #28488 in Django tracker.
# handler404 = custom_404
urlpatterns = [
# homepage
url(r'^$', views.IndexView.as_view(), name='home'),
url(r'^humans.txt$', TemplateView.as_view(template_name='humans.txt', content_type='text/plain')),
url(r'^robots.txt$', TemplateView.as_view(template_name='robots.txt', content_type='text/plain')),
url(r'^shell/$', TemplateView.as_view(template_name="python/shell.html"), name='shell'),
# python section landing pages
url(r'^about/$', TemplateView.as_view(template_name="python/about.html"), name='about'),
# Redirect old download links to new downloads pages
url(r'^download/$', RedirectView.as_view(url='https://www.python.org/downloads/', permanent=True)),
url(r'^download/source/$', RedirectView.as_view(url='https://www.python.org/downloads/source/', permanent=True)),
url(r'^download/mac/$', RedirectView.as_view(url='https://www.python.org/downloads/mac-osx/', permanent=True)),
url(r'^download/windows/$', RedirectView.as_view(url='https://www.python.org/downloads/windows/', permanent=True)),
# duplicated downloads to getit to bypass China's firewall. See
# https://github.com/python/pythondotorg/issues/427 for more info.
url(r'^getit/', include('downloads.urls', namespace='getit')),
url(r'^downloads/', include('downloads.urls', namespace='download')),
url(r'^doc/$', views.DocumentationIndexView.as_view(), name='documentation'),
url(r'^blog/$', RedirectView.as_view(url='/blogs/', permanent=True)),
url(r'^blogs/', include('blogs.urls')),
url(r'^inner/$', TemplateView.as_view(template_name="python/inner.html"), name='inner'),
# other section landing pages
url(r'^psf-landing/$', TemplateView.as_view(template_name="psf/index.html"), name='psf-landing'),
url(r'^docs-landing/$', TemplateView.as_view(template_name="docs/index.html"), name='docs-landing'),
url(r'^pypl-landing/$', TemplateView.as_view(template_name="pypl/index.html"), name='pypl-landing'),
url(r'^shop-landing/$', TemplateView.as_view(template_name="shop/index.html"), name='shop-landing'),
# Override /accounts/signup/ to add Honeypot.
url(r'^accounts/signup/', HoneypotSignupView.as_view()),
# Override /accounts/password/change/ to add Honeypot
# and change success URL.
url(r'^accounts/password/change/$', CustomPasswordChangeView.as_view(),
name='account_change_password'),
url(r'^accounts/', include('allauth.urls')),
url(r'^box/', include('boxes.urls')),
url(r'^community/', include('community.urls', namespace='community')),
url(r'^community/microbit/$', TemplateView.as_view(template_name="community/microbit.html"), name='microbit'),
url(r'^events/', include('events.urls', namespace='events')),
url(r'^jobs/', include('jobs.urls', namespace='jobs')),
url(r'^sponsors/', include('sponsors.urls')),
url(r'^success-stories/', include('successstories.urls')),
url(r'^users/', include('users.urls', namespace='users')),
url(r'^psf/records/board/minutes/', include('minutes.urls')),
url(r'^membership/', include('membership.urls')),
url(r'^search/', include('haystack.urls')),
# admin
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', admin.site.urls),
# api
url(r'^api/', include(v1_api.urls)),
url(r'^api/v2/', include(router.urls)),
]
urlpatterns += staticfiles_urlpatterns()
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
Python
| 0
|
@@ -455,78 +455,8 @@
er%0A%0A
-# TODO: Disable this temporarily to debug #28488 in Django tracker.%0A#
hand
|
4b3e9cca98adca34f12967392f1e98d6fc57440b
|
Remove redundant assert.
|
speech/unit_tests/test__gax.py
|
speech/unit_tests/test__gax.py
|
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
class TestSpeechGAXMakeRequests(unittest.TestCase):
SAMPLE_RATE = 16000
HINTS = ['hi']
AUDIO_CONTENT = b'/9j/4QNURXhpZgAASUkq'
def _callFUT(self, sample, language_code, max_alternatives,
profanity_filter, speech_context, single_utterance,
interim_results):
from google.cloud.speech._gax import _make_streaming_request
return _make_streaming_request(sample=sample,
language_code=language_code,
max_alternatives=max_alternatives,
profanity_filter=profanity_filter,
speech_context=speech_context,
single_utterance=single_utterance,
interim_results=interim_results)
def test_ctor(self):
from google.cloud import speech
from google.cloud.speech.sample import Sample
from google.cloud.grpc.speech.v1beta1.cloud_speech_pb2 import (
SpeechContext)
from google.cloud.grpc.speech.v1beta1.cloud_speech_pb2 import (
RecognitionConfig)
from google.cloud.grpc.speech.v1beta1.cloud_speech_pb2 import (
StreamingRecognitionConfig)
from google.cloud.grpc.speech.v1beta1.cloud_speech_pb2 import (
StreamingRecognizeRequest)
sample = Sample(content=self.AUDIO_CONTENT,
encoding=speech.Encoding.FLAC,
sample_rate=self.SAMPLE_RATE)
language_code = 'US-en'
max_alternatives = 2
profanity_filter = True
speech_context = SpeechContext(phrases=self.HINTS)
single_utterance = True
interim_results = False
streaming_request = self._callFUT(sample, language_code,
max_alternatives, profanity_filter,
speech_context, single_utterance,
interim_results)
self.assertIsInstance(streaming_request, StreamingRecognizeRequest)
# This isn't set by _make_streaming_request().
# The first request can only have `streaming_config` set.
# The following requests can only have `audio_content` set.
self.assertEqual(streaming_request.audio_content, b'')
self.assertIsInstance(streaming_request.streaming_config,
StreamingRecognitionConfig)
streaming_config = streaming_request.streaming_config
self.assertTrue(streaming_config.single_utterance)
self.assertFalse(streaming_config.interim_results)
config = streaming_config.config
self.assertIsInstance(config, RecognitionConfig)
self.assertEqual(config.encoding, 2) # speech.Encoding.FLAC maps to 2.
self.assertEqual(config.sample_rate, self.SAMPLE_RATE)
self.assertEqual(config.language_code, language_code)
self.assertEqual(config.max_alternatives, max_alternatives)
self.assertTrue(config.profanity_filter)
self.assertEqual(config.speech_context.phrases, self.HINTS)
class TestSpeechGAXMakeRequestsStream(unittest.TestCase):
SAMPLE_RATE = 16000
HINTS = ['hi']
AUDIO_CONTENT = b'/9j/4QNURXhpZgAASUkq'
def _callFUT(self, sample, language_code, max_alternatives,
profanity_filter, speech_context, single_utterance,
interim_results):
from google.cloud.speech._gax import _stream_requests
return _stream_requests(sample=sample,
language_code=language_code,
max_alternatives=max_alternatives,
profanity_filter=profanity_filter,
speech_context=speech_context,
single_utterance=single_utterance,
interim_results=interim_results)
def test_stream_requests(self):
from io import BytesIO
from google.cloud import speech
from google.cloud.speech.sample import Sample
from google.cloud.grpc.speech.v1beta1.cloud_speech_pb2 import (
SpeechContext)
from google.cloud.grpc.speech.v1beta1.cloud_speech_pb2 import (
StreamingRecognitionConfig)
from google.cloud.grpc.speech.v1beta1.cloud_speech_pb2 import (
StreamingRecognizeRequest)
sample = Sample(content=BytesIO(self.AUDIO_CONTENT),
encoding=speech.Encoding.FLAC,
sample_rate=self.SAMPLE_RATE)
language_code = 'US-en'
max_alternatives = 2
profanity_filter = True
speech_context = SpeechContext(phrases=self.HINTS)
single_utterance = True
interim_results = False
streaming_requests = self._callFUT(sample, language_code,
max_alternatives, profanity_filter,
speech_context, single_utterance,
interim_results)
all_requests = []
for streaming_request in streaming_requests:
self.assertIsInstance(streaming_request, StreamingRecognizeRequest)
all_requests.append(streaming_request)
self.assertEqual(len(all_requests), 2)
config_request = all_requests[0]
streaming_request = all_requests[1]
self.assertIsInstance(config_request, StreamingRecognizeRequest)
# This isn't set by _make_streaming_request().
# The first request can only have `streaming_config` set.
# The following requests can only have `audio_content` set.
self.assertEqual(config_request.audio_content, b'')
self.assertEqual(streaming_request.audio_content, self.AUDIO_CONTENT)
self.assertIsInstance(config_request.streaming_config,
StreamingRecognitionConfig)
|
Python
| 0.00012
|
@@ -6090,81 +6090,8 @@
%5B1%5D%0A
- self.assertIsInstance(config_request, StreamingRecognizeRequest)%0A
|
0d85832a82c0973c89f3f321e1f2e2486a197882
|
Add script to perform partial upload
|
bin/partial_upload.py
|
bin/partial_upload.py
|
Python
| 0
|
@@ -0,0 +1,1971 @@
+#!/bin/env python%0A# -*- coding: utf8 -*-%0A%22%22%22 Triggers a partial upload process with the specified raw.xz URL. %22%22%22%0A%0Aimport argparse%0A%0Afrom fedimg.config import AWS_ACCESS_ID%0Afrom fedimg.config import AWS_SECRET_KEY%0Afrom fedimg.config import AWS_BASE_REGION, AWS_REGIONS%0Afrom fedimg.services.ec2.ec2copy import main as ec2copy%0Afrom fedimg.services.ec2.ec2initiate import main as ec2main%0A%0A%0Adef get_args():%0A parser = argparse.ArgumentParser(%0A description=%22Trigger a partial upload based on the arguments%22)%0A parser.add_argument(%0A %22-u%22, %22--url%22, type=str, help=%22.raw.xz URL%22, required=True)%0A parser.add_argument(%0A %22-c%22, %22--compose-id%22, type=str, help=%22compose id of the .raw.xz file%22,%0A required=True)%0A parser.add_argument(%0A %22-p%22, %22--push-notifications%22,%0A help=%22Bool to check if we need to push fedmsg notifications%22,%0A action=%22store_true%22, required=False)%0A parser.add_argument(%0A %22-v%22, %22--volume%22, help=%22volume type for the image%22, required=False)%0A%0A args = parser.parse_args()%0A%0A return (%0A args.url,%0A args.compose_id,%0A args.push_notifications,%0A args.volume%0A )%0A%0A%0Adef main():%0A url, compose_id, push_notifications, volume = get_args()%0A%0A if volume is not None:%0A volume = %5Bvolume%5D%0A%0A images_metadata = ec2main(%0A image_urls=url,%0A access_id=AWS_ACCESS_ID,%0A secret_key=AWS_SECRET_KEY,%0A regions=None,%0A volume_types=volume,%0A push_notifications=push_notifications,%0A compose_id=compose_id%0A )%0A%0A for image_metadata in images_metadata:%0A image_id = image_metadata%5B'image_id'%5D%0A aws_regions = list(set(AWS_REGIONS) - set(%5BAWS_BASE_REGION%5D))%0A ec2copy(%0A aws_regions,%0A AWS_ACCESS_ID,%0A AWS_SECRET_KEY,%0A image_ids=%5Bimage_id%5D,%0A push_notifications=push_notifications,%0A compose_id=compose_id%0A )%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
1a49426497819c13ccf858d51e5fa333d95f1f7d
|
Add basic unit test for parseCommand
|
src/autobot/src/udpRemote_test.py
|
src/autobot/src/udpRemote_test.py
|
Python
| 0.000001
|
@@ -0,0 +1,802 @@
+#!/usr/bin/env python%0A%0Aimport unittest%0Afrom udpRemote import parseCommand%0A%0A%0Aclass MockDriveParam:%0A velocity = 0.0%0A angle = 0.0%0A%0A%0Aclass UdpRemoteTest(unittest.TestCase):%0A def testValidParse(self):%0A p = MockDriveParam()%0A p = parseCommand(%22V44.4%22, p)%0A self.assertEqual(p.velocity, 44.4)%0A self.assertEqual(p.angle, 0.0)%0A%0A p = parseCommand(%22A81.3%22, p)%0A self.assertEqual(p.velocity, 44.4)%0A self.assertEqual(p.angle, 81.3)%0A%0A def testInvalidParse(self):%0A p = MockDriveParam()%0A p = parseCommand(%22X44.4%22, p)%0A self.assertEqual(p.velocity, 0.0)%0A self.assertEqual(p.angle, 0.0)%0A%0A p = MockDriveParam()%0A p = parseCommand(%22V0F.4%22, p)%0A self.assertEqual(p.velocity, 0.0)%0A self.assertEqual(p.angle, 0.0)%0A
|
|
5b899181f14c65778f23312ddd31078fac46cd9c
|
Fix template filter.
|
django_assets/filter.py
|
django_assets/filter.py
|
"""Django specific filters.
For those to be registered automatically, make sure the main
django_assets namespace imports this file.
"""
from django.template import Template, Context
from webassets import six
from webassets.filter import Filter, register_filter
class TemplateFilter(Filter):
"""
Will compile all source files as Django templates.
"""
name = 'template'
max_debug_level = None
def __init__(self, context=None):
super(TemplateFilter, self).__init__()
self.context = context
def input(self, _in, out, source_path, output_path, **kw):
t = Template(_in.read(), origin='django-assets', name=source_path)
rendered = t.render(Context(self.context if self.context else {}))
if not six.PY3:
rendered = rendered.encode('utf-8')
out.write(rendered)
register_filter(TemplateFilter)
|
Python
| 0
|
@@ -742,81 +742,8 @@
%7B%7D))
-%0A%0A if not six.PY3:%0A rendered = rendered.encode('utf-8')
%0A
|
8a0fbe899739d0b19525ab2873d7dd9073675466
|
Fix Python 3.4 test failure
|
ironic/drivers/modules/msftocs/msftocsclient.py
|
ironic/drivers/modules/msftocs/msftocsclient.py
|
# Copyright 2015 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
MSFT OCS ChassisManager v2.0 REST API client
https://github.com/MSOpenTech/ChassisManager
"""
import posixpath
from xml.etree import ElementTree
from oslo_log import log
import requests
from requests import auth
from requests import exceptions as requests_exceptions
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common.i18n import _LE
LOG = log.getLogger(__name__)
WCSNS = 'http://schemas.datacontract.org/2004/07/Microsoft.GFS.WCS.Contracts'
COMPLETION_CODE_SUCCESS = "Success"
BOOT_TYPE_UNKNOWN = 0
BOOT_TYPE_NO_OVERRIDE = 1
BOOT_TYPE_FORCE_PXE = 2
BOOT_TYPE_FORCE_DEFAULT_HDD = 3
BOOT_TYPE_FORCE_INTO_BIOS_SETUP = 4
BOOT_TYPE_FORCE_FLOPPY_OR_REMOVABLE = 5
BOOT_TYPE_MAP = {
'Unknown': BOOT_TYPE_UNKNOWN,
'NoOverride': BOOT_TYPE_NO_OVERRIDE,
'ForcePxe': BOOT_TYPE_FORCE_PXE,
'ForceDefaultHdd': BOOT_TYPE_FORCE_DEFAULT_HDD,
'ForceIntoBiosSetup': BOOT_TYPE_FORCE_INTO_BIOS_SETUP,
'ForceFloppyOrRemovable': BOOT_TYPE_FORCE_FLOPPY_OR_REMOVABLE,
}
POWER_STATUS_ON = "ON"
POWER_STATUS_OFF = "OFF"
class MSFTOCSClientApi(object):
def __init__(self, base_url, username, password):
self._base_url = base_url
self._username = username
self._password = password
def _exec_cmd(self, rel_url):
"""Executes a command by calling the chassis manager API."""
url = posixpath.join(self._base_url, rel_url)
try:
response = requests.get(
url, auth=auth.HTTPBasicAuth(self._username, self._password))
response.raise_for_status()
except requests_exceptions.RequestException as ex:
LOG.exception(_LE("HTTP call failed: %s"), ex)
raise exception.MSFTOCSClientApiException(
_("HTTP call failed: %s") % ex.message)
xml_response = response.text
LOG.debug("Call to %(url)s got response: %(xml_response)s",
{"url": url, "xml_response": xml_response})
return xml_response
def _check_completion_code(self, xml_response):
try:
et = ElementTree.fromstring(xml_response)
except ElementTree.ParseError as ex:
LOG.exception(_LE("XML parsing failed: %s"), ex)
raise exception.MSFTOCSClientApiException(
_("Invalid XML: %s") % xml_response)
item = et.find("./n:completionCode", namespaces={'n': WCSNS})
if item is None or item.text != COMPLETION_CODE_SUCCESS:
raise exception.MSFTOCSClientApiException(
_("Operation failed: %s") % xml_response)
return et
def get_blade_state(self, blade_id):
"""Returns whether a blade's chipset is receiving power (soft-power).
:param blade_id: the blade id
:returns: one of:
POWER_STATUS_ON,
POWER_STATUS_OFF
:raises: MSFTOCSClientApiException
"""
et = self._check_completion_code(
self._exec_cmd("GetBladeState?bladeId=%d" % blade_id))
return et.find('./n:bladeState', namespaces={'n': WCSNS}).text
def set_blade_on(self, blade_id):
"""Supplies power to a blade chipset (soft-power state).
:param blade_id: the blade id
:raises: MSFTOCSClientApiException
"""
self._check_completion_code(
self._exec_cmd("SetBladeOn?bladeId=%d" % blade_id))
def set_blade_off(self, blade_id):
"""Shuts down a given blade (soft-power state).
:param blade_id: the blade id
:raises: MSFTOCSClientApiException
"""
self._check_completion_code(
self._exec_cmd("SetBladeOff?bladeId=%d" % blade_id))
def set_blade_power_cycle(self, blade_id, off_time=0):
"""Performs a soft reboot of a given blade.
:param blade_id: the blade id
:param off_time: seconds to wait between shutdown and boot
:raises: MSFTOCSClientApiException
"""
self._check_completion_code(
self._exec_cmd("SetBladeActivePowerCycle?bladeId=%(blade_id)d&"
"offTime=%(off_time)d" %
{"blade_id": blade_id, "off_time": off_time}))
def get_next_boot(self, blade_id):
"""Returns the next boot device configured for a given blade.
:param blade_id: the blade id
:returns: one of:
BOOT_TYPE_UNKNOWN,
BOOT_TYPE_NO_OVERRIDE,
BOOT_TYPE_FORCE_PXE, BOOT_TYPE_FORCE_DEFAULT_HDD,
BOOT_TYPE_FORCE_INTO_BIOS_SETUP,
BOOT_TYPE_FORCE_FLOPPY_OR_REMOVABLE
:raises: MSFTOCSClientApiException
"""
et = self._check_completion_code(
self._exec_cmd("GetNextBoot?bladeId=%d" % blade_id))
return BOOT_TYPE_MAP[
et.find('./n:nextBoot', namespaces={'n': WCSNS}).text]
def set_next_boot(self, blade_id, boot_type, persistent=True, uefi=True):
"""Sets the next boot device for a given blade.
:param blade_id: the blade id
:param boot_type: possible values:
BOOT_TYPE_UNKNOWN,
BOOT_TYPE_NO_OVERRIDE,
BOOT_TYPE_FORCE_PXE,
BOOT_TYPE_FORCE_DEFAULT_HDD,
BOOT_TYPE_FORCE_INTO_BIOS_SETUP,
BOOT_TYPE_FORCE_FLOPPY_OR_REMOVABLE
:param persistent: whether this setting affects the next boot only or
every subsequent boot
:param uefi: True if UEFI, False otherwise
:raises: MSFTOCSClientApiException
"""
self._check_completion_code(
self._exec_cmd(
"SetNextBoot?bladeId=%(blade_id)d&bootType=%(boot_type)d&"
"uefi=%(uefi)s&persistent=%(persistent)s" %
{"blade_id": blade_id,
"boot_type": boot_type,
"uefi": str(uefi).lower(),
"persistent": str(persistent).lower()}))
|
Python
| 0.999998
|
@@ -2437,15 +2437,15 @@
ex.
-message
+args%5B0%5D
)%0A%0A
|
d843a2198b87a41d73ab19e09ac8d0c78a6e0ef9
|
Create IC74139.py
|
BinPy/examples/ic/Series_7400/IC74139.py
|
BinPy/examples/ic/Series_7400/IC74139.py
|
Python
| 0.000001
|
@@ -0,0 +1,884 @@
+from __future__ import print_function%0Afrom BinPy import *%0Aprint ('Usage of IC 74139:%5Cn')%0A%0Aic = IC_74139()%0Aprint (%22%22%22This is a dial 1:4 demultiplexer(2:4 decoder) with output being inverted input%22%22%22%22)%0Aprint ('%5CnThe Pin configuration is:%5Cn')%0Ap = %7B1:0,2:0,3:0,14:0,13:1,15:0%7D%0Aprint (p)%0Aprint ('%5CnPin initialization -using -- ic.setIC(p) --%5Cn')%0Aic.setIC(p)%0Aprint ('%5CnPowering up the IC - using -- ic.setIC(%7B16:1, 8:0%7D) -- %5Cn')%0Aic.setIC(%7B16: 1, 8: 0%7D)%0Aprint ('%5CnDraw the IC with the current configuration%5Cn')%0Aic.drawIC()%0Aprint (%0A '%5CnRun the IC with the current configuration using -- print ic.run() -- %5Cn')%0Aprint (%0A 'Note that the ic.run() returns a dict of pin configuration similar to :')%0Aprint (ic.run())%0Aprint (%0A '%5CnSeting the outputs to the current IC configuration using -- ic.setIC(ic.run()) --%5Cn')%0Aic.setIC(ic.run())%0Aprint ('%5CnDraw the final configuration%5Cn')%0Aic.drawIC()%0A
|
|
dcd19e7982024f4f196f24b71fc2d73bef6723eb
|
add new package (#25505)
|
var/spack/repos/builtin/packages/cupla/package.py
|
var/spack/repos/builtin/packages/cupla/package.py
|
Python
| 0
|
@@ -0,0 +1,1564 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other%0A# Spack Project Developers. See the top-level COPYRIGHT file for details.%0A#%0A# SPDX-License-Identifier: (Apache-2.0 OR MIT)%0A%0A%0Afrom spack import *%0A%0A%0Aclass Cupla(Package):%0A %22%22%22C++ User interface for the Platform independent Library Alpaka%22%22%22%0A%0A homepage = %22https://github.com/alpaka-group/cupla%22%0A git = %22https://github.com/alpaka-group/cupla.git%22%0A url = %22https://github.com/alpaka-group/cupla/archive/refs/tags/0.3.0.tar.gz%22%0A%0A maintainers = %5B'vvolkl'%5D%0A%0A version('develop', branch='dev')%0A version('master', branch='master')%0A version('0.3.0', sha256='035512517167967697e73544c788453de5e3f0bc4e8d4864b41b2e287365cbaf')%0A%0A depends_on('alpaka@0.6.0:')%0A%0A def install(self, spec, prefix):%0A install_tree('include', self.prefix.include)%0A install_tree('src', self.prefix.src)%0A install_tree('doc', self.prefix.share.cupla.doc)%0A install_tree('example', self.prefix.example)%0A install_tree('cmake', self.prefix.cmake)%0A install('Findcupla.cmake', self.prefix)%0A install('cuplaConfig.cmake', self.prefix)%0A%0A def setup_run_environment(self, env):%0A env.set(%22CUPLA_ROOT%22, self.prefix)%0A env.prepend_path(%22CMAKE_PREFIX_PATH%22, self.prefix)%0A env.set(%22CUPLA%22, self.prefix.share.cupla)%0A%0A def setup_dependent_build_environment(self, env, dependent_spec):%0A env.set(%22CUPLA_ROOT%22, self.prefix)%0A env.prepend_path(%22CMAKE_PREFIX_PATH%22, self.prefix)%0A env.set(%22CUPLA%22, self.prefix.share.cupla)%0A
|
|
adcf089505f089ff93a48ee2672ce9d8b9b4fadf
|
corrects typo
|
axelrod/tournament.py
|
axelrod/tournament.py
|
"""Recreate Axelrod's tournament."""
import inspect
class Game(object):
"""A class to hold the game matrix and to score a game accordingly."""
def __init__(self, r=2, s=0, t=5, p=4):
self.scores = {
('C', 'C'): (r, r),
('D', 'D'): (p, p),
('C', 'D'): (t, s),
('D', 'C'): (s, t),
}
def score(self, pair):
"""Return the appropriate score for decision pair.
Returns the appropriate score (as a tuple) from the scores dictionary
for a given pair of plays (passed in as a tuple).
e.g. score(('C', 'C')) returns (2, 2)
"""
return self.scores[pair]
class RoundRobin(object):
"""A class to define play a round robin game of players"""
def __init__(self, players, game, turns, deterministic_cache):
"""Initialise the players, game and deterministic cache"""
self.players = players
self.nplayers = len(players)
self.game = game
self.turns = turns
self.deterministic_cache = deterministic_cache
def calculate_scores(self, p1, p2):
"""Calculates the score for two players based their history"""
s1, s2 = 0, 0
for pair in zip(p1.history, p2.history):
score = self.game.score(pair)
s1 += score[0]
s2 += score[1]
return s1, s2
def play(self):
"""Plays a round robin where each match lasts turns.
We can cache scores for paris of deterministic strategies, since the outcome
will always be the same. There are many possible keys to cache by, but perhaps
the most versatile is a tuple with the classes of both players.
Returns the total payoff matrix and the deterministic cache.
"""
payoffs = [[0 for j in range(self.nplayers)] for i in range(self.nplayers)]
for ip1 in range(self.nplayers):
for ip2 in range(ip1 + 1, self.nplayers):
p1 = self.players[ip1]
p2 = self.players[ip2]
cl1 = p1.__class__
cl2 = p2.__class__
key = (cl1, cl2)
if (p1.stochastic or p2.stochastic or key not in self.deterministic_cache):
turn = 0
p1.reset()
p2.reset()
while turn < self.turns:
turn += 1
p1.play(p2)
scores = self.calculate_scores(p1, p2)
if not (p1.stochastic or p2.stochastic):
self.deterministic_cache[key] = scores
else:
scores = self.deterministic_cache[key]
payoffs[ip1][ip2] = scores[0]
payoffs[ip2][ip1] = scores[1]
return payoffs, self.deterministic_cache
class Tournament(object):
def __init__(self, players, game=None, turns=200, repetitions=10):
"""Initiate a tournmanent of players"""
self.players = players
self.nplayers = len(players)
self.plist = list(range(self.nplayers))
if game is None:
self.game = Game()
else:
self.game = game
self.turns = turns
self.replist = list(range(repetitions))
self.results = self.initialise_results()
self.deterministic_cache = {}
def initialise_results(self):
"""
Build the initial results containing just zeros. This is an embedded
that could be made more efficient using a NumPy array.
"""
results = [[[0 for irep in self.replist] for j in self.plist]
for i in self.plist]
return results
def play(self):
"""Play the tournament with repetitions of round robin"""§§
round_robin = RoundRobin(self.players, self.game, self.turns, self.deterministic_cache)
for irep in self.replist:
payoffs, self.deterministic_cache = round_robin.play()
for i in self.plist:
for j in self.plist:
self.results[i][j][irep] = payoffs[i][j]
return self.results
class Player(object):
"""An class for a player in the tournament.
This is an abstract base class, not intended to be used directly.
"""
name = "Player"
def __init__(self):
"""Initiates an empty history and 0 score for a player."""
self.history = []
self.stochastic = "random" in inspect.getsource(self.__class__)
def play(self, opponent):
"""This pits two players against each other.
"""
s1, s2 = self.strategy(opponent), opponent.strategy(self)
self.history.append(s1)
opponent.history.append(s2)
def reset(self):
"""Resets history.
When creating strategies that create new attributes then this method should be
re-written (in the inherited class) and should not only reset history but also
rest all other attributes.
"""
self.history = []
def strategy(self, opponent):
"""This is a placeholder strategy."""
return None
def __repr__(self):
"""The string method for the strategy."""
return self.name
|
Python
| 0.999712
|
@@ -3791,10 +3791,8 @@
n%22%22%22
-%C2%A7%C2%A7
%0A
|
7e5b4e178a5d36ca89034287168560a73bd9e63d
|
Create drivers.py
|
chips/sensor/lis3dh/drivers.py
|
chips/sensor/lis3dh/drivers.py
|
Python
| 0.000001
|
@@ -0,0 +1,96 @@
+# This code has to be added to the corresponding __init__.py%0A%0ADRIVERS%5B%22lis3dh%22%5D = %5B%22LIS3DH%22%5D%0A%0A
|
|
f342dbf8d9455db91286823ec5d6ef64e2ace68c
|
Create MCP3202.py
|
Other_Applications/Ultrasonic/MCP3202.py
|
Other_Applications/Ultrasonic/MCP3202.py
|
Python
| 0.000005
|
@@ -0,0 +1,1395 @@
+#!/usr/bin/python%0Aimport RPi.GPIO as GPIO%0Aimport time%0Aimport datetime%0Aimport os%0Afrom time import strftime%0A%0ACS = 4%0ACS2 = 7%0ACLK = 11%0AMOSI = 10%0AMISO = 9%0ALDAC = 8%0A%0AGPIO.setwarnings(False)%0AGPIO.setmode(GPIO.BCM)%0AGPIO.setup(CS, GPIO.OUT)%0AGPIO.setup(CLK, GPIO.OUT)%0AGPIO.setup(MOSI, GPIO.OUT)%0AGPIO.setup(CS2, GPIO.OUT)%0AGPIO.setup(LDAC, GPIO.OUT)%0AGPIO.setup(MISO, GPIO.IN,pull_up_down = GPIO.PUD_UP)%0AGPIO.output(CS, True)%0AGPIO.output(CLK, False)%0AGPIO.output(MOSI, True)%0A%0Adef myspi(rdata):%0A%09dataX = 0%0A%09mask = 0x80%0A%09for i in range(8):%0A%09%09if(rdata & mask):%0A%09%09%09GPIO.output(MOSI, True)%0A%09%09else:%0A%09%09%09GPIO.output(MOSI, False)%0A%09%09GPIO.output(CLK, True)%0A%09%09if(GPIO.input(MISO) == 1):%0A%09%09%09dataX = dataX + mask%0A%09%09GPIO.output(CLK, False)%0A%09%09mask = mask %3E%3E 1%0A%09return dataX;%0A%0Adef readADC(ch):%0A%09cmd = 0%0A%09if ch == 0: cmd = 0x80 %09 %09%0A%09elif ch == 1: cmd = 0xc0%09%0A%09#elif ch == 2: cmd = 0x00%0A%09#elif ch == 4: cmd = 0x04%0A%09GPIO.output(CS, False)%0A%09a = myspi(0x01)%0A%09#print %22a: %22,a%0A%09b = myspi(cmd)%0A%09#print %22b: %22,b%0A%09c = myspi(0x00)%0A%09#print %22c: %22,c %0A%09v = ((b & 0x0f) %3C%3C 8) + c%0A%09#print %22v: %22,v%0A%09GPIO.output(CS, True)%0A%09v = round(float(v), 2)%0A%09#v=round(float(v)/4095*3.3,2)%0A%09return v;%0A%0Adef setDAC(data, channel):%0A%09cmd = 0%0A%09if channel == 1: cmd = 0xF0%0A%09else: cmd = 0x70%0A%0A%09GPIO.output(LDAC,False)%0A%09GPIO.output(CS2,False)%0A%09data = int(float(data * 4095 / 255))%0A%09a = myspi((data %3E%3E 8) + cmd)%0A%09b = myspi(data & 0xFF)%0A%09GPIO.output(CS2, True)%0A%09return;%0A
|
|
64b842d0af6c4e07971a733d8ed6e70109e26979
|
Add sample logging
|
samples/sample_logging.py
|
samples/sample_logging.py
|
Python
| 0
|
@@ -0,0 +1,1370 @@
+#!/usr/bin/env python%0A#%0A# Author: Ying Xiong.%0A# Created: Dec 04, 2015.%0A%0Aimport logging%0Aimport sys%0A%0Aclass DebugOrInfoFilter(logging.Filter):%0A %22%22%22Keep the record only if the level is debug or info.%22%22%22%0A def filter(self, record):%0A return record.levelno in (logging.DEBUG, logging.INFO)%0A%0Adef config_logger(logger, formatter):%0A %22%22%22Configure the logger such that debug and info messages are directed to stdout,%0A while more critical warnings and errors to stderr.%0A %22%22%22%0A stdoutHandler = logging.StreamHandler(sys.stdout)%0A stdoutHandler.setLevel(logging.DEBUG)%0A stdoutHandler.setFormatter(formatter)%0A stdoutHandler.addFilter(DebugOrInfoFilter())%0A logger.addHandler(stdoutHandler)%0A%0A stderrHandler = logging.StreamHandler(sys.stderr)%0A stderrHandler.setLevel(logging.WARNING)%0A stderrHandler.setFormatter(formatter)%0A logger.addHandler(stderrHandler)%0A%0Aif __name__ == %22__main__%22:%0A root_logger = logging.getLogger()%0A root_logger.setLevel(logging.DEBUG)%0A log_format = '%25(asctime)s.%25(msecs)03d%5Ct%25(process)d%5Ct%25(filename)s:%25(lineno)s%5Ct%25(funcName)s%5Ct%25(message)s'%0A formatter = logging.Formatter(log_format, datefmt=%22%25Y-%25m-%25d %25H:%25M:%25S%22)%0A config_logger(logging.getLogger(), formatter)%0A%0A # To stdout.%0A logging.debug(%22debug%22)%0A logging.info(%22info%22)%0A%0A # To stderr.%0A logging.warning(%22warning%22)%0A logging.error(%22error%22)%0A
|
|
a6fcf0fdc9a97773453f8ca17ddb071d1a2dfd79
|
hello world
|
contact/app.py
|
contact/app.py
|
Python
| 0.999981
|
@@ -0,0 +1,179 @@
+from flask import Flask%0A%0Aapp = Flask(__name__)%0A%0A%0A@app.route('/')%0Adef hello_world():%0A return %22Hello world!%22%0A%0A%0Aif __name__ == '__main__':%0A app.run(host='0.0.0.0', debug=True)%0A
|
|
4f0b6a6eefd6848a702fe4b808f137ef0b2ee2f8
|
rename as "config.py" after adding keys
|
exampleconfig.py
|
exampleconfig.py
|
Python
| 0.000047
|
@@ -0,0 +1,138 @@
+URL_F = 'http://datamine.mta.info/mta_esi.php?key='KEY'&feed_id=21'%0A%0AURL_AC = 'http://datamine.mta.info/mta_esi.php?key='KEY'&feed_id=26'%0A
|
|
85daad5401267b613e546896bb2abd1658f730b1
|
Create 1_triple_step.py
|
ch09/1_triple_step.py
|
ch09/1_triple_step.py
|
Python
| 0.000006
|
@@ -0,0 +1,769 @@
+# 0 - (1) %5B0%5D%0A# 1 - (1) %5B1%5D%0A# 2 - (2) %5B1, 1%5D, %5B2%5D %0A# 3 - (4) %5B1, 1, 1%5D, %5B1, 2%5D, %5B2, 1%5D, %5B3%5D%0A# 4 - %0A%0A#subtract 1%0A#subtract 2%0A#subtract 3%0A%0Aways = %7B0: 0, 1:1, 2: 2, 3: 4%7D%0A%0Adef calculate_ways(steps):%0A if steps %3C 4:%0A return ways%5Bsteps%5D%0A for i in range(4, steps + 1):%0A ways%5Bi%5D = ways%5Bi-1%5D + ways%5Bi-2%5D + ways%5Bi-3%5D%0A return ways%5Bsteps%5D%0A%0Aways = %7B0: 0, 1:1, 2: 2, 3: 4%7D%0A%0Adef ways_recurse(steps):%0A if steps %3C 4:%0A return ways%5Bsteps%5D%0A return ways_recurse(steps-1) + ways_recurse(steps-2) + ways_recurse(steps-3)%0A%0Adef ways_recurse(steps):%0A if steps not in ways:%0A ways%5Bsteps%5D = ways_recurse(steps-1) + ways_recurse(steps-2) + ways_recurse(steps-3)%0A return ways%5Bsteps%5D%0A%0Afor i in range(29):%0A print str(i) + ' ' + str(ways_recurse(i))%0A
|
|
3e51c57a8611a8ebfb4f2eb045510c50587bd781
|
Test password tokens not in response
|
api/radar_api/tests/test_users.py
|
api/radar_api/tests/test_users.py
|
Python
| 0.000001
|
@@ -0,0 +1,449 @@
+import json%0A%0Afrom radar_api.tests.fixtures import get_user%0A%0A%0Adef test_serialization(app):%0A admin = get_user('admin')%0A%0A client = app.test_client()%0A client.login(admin)%0A%0A response = client.get('/users')%0A%0A assert response.status_code == 200%0A%0A data = json.loads(response.data)%0A%0A for user in data%5B'data'%5D:%0A assert 'username' in user%0A assert 'password_hash' not in user%0A assert 'reset_password_token' not in user%0A
|
|
3660767a92750eae3c3ede69ef6778a23d3074a7
|
Add the Action enum
|
wdim/client/actions.py
|
wdim/client/actions.py
|
Python
| 0.000001
|
@@ -0,0 +1,84 @@
+import enum%0A%0A%0Aclass Action(enum.Enum):%0A create = 0%0A delete = 1%0A update = 2%0A
|
|
71bab0603cbf52d6b443cfff85ef19a04f882a36
|
Add the SQL statements because I forgot
|
inventory_control/database/sql.py
|
inventory_control/database/sql.py
|
Python
| 0.000837
|
@@ -0,0 +1,1413 @@
+%22%22%22%0ASo this is where all the SQL commands live%0A%22%22%22%0A%0ACREATE_SQL = %22%22%22%0ACREATE TABLE component_type (%0A id INT PRIMARY KEY AUTO_INCREMENT,%0A type VARCHAR(255) UNIQUE%0A);%0A%0A%0ACREATE TABLE components (%0A id INT PRIMARY KEY AUTO_INCREMENT,%0A sku TEXT,%0A type INT,%0A status INT,%0A FOREIGN KEY (type) REFERENCES component_type(id)%0A);%0A%0ACREATE TABLE projects (%0A id INT PRIMARY KEY AUTO_INCREMENT,%0A motherboard INT,%0A power_supply INT,%0A cpu INT,%0A hard_drive INT,%0A proj_case INT,%0A memory INT,%0A FOREIGN KEY (motherboard) REFERENCES components(id) ON DELETE CASCADE,%0A FOREIGN KEY (cpu) REFERENCES components(id) ON DELETE CASCADE,%0A FOREIGN KEY (power_supply) REFERENCES components(id) ON DELETE CASCADE,%0A FOREIGN KEY (hard_drive) REFERENCES components(id) ON DELETE CASCADE,%0A FOREIGN KEY (proj_case) REFERENCES components(id) ON DELETE CASCADE,%0A FOREIGN KEY (memory) REFERENCES components(id) ON DELETE CASCADE%0A);%0A%22%22%22%0A%0AADD_COMPONENT_TYPE = %22%22%22INSERT IGNORE INTO component_type (type) VALUES ('%7Btext%7D')%0A%22%22%22%0A%0AGET_COMPONENT_TYPE=%22%22%22SELECT * FROM component_type WHERE type='%7Btext%7D'%22%22%22%0A%0ADELETE_COMPONENT_TYPE = %22%22%22DELETE FROM component_type WHERE type='%7Btext%7D'%0A%22%22%22%0A%0ASELECT_ALL_COMPONENTS = %22%22%22%0ASELECT * FROM components INNER JOIN component_type%0A ON components.type = component_type.id;%0A %22%22%22%0A%0A%0ADROP_SQL = %22%22%22%0ADROP TABLE projects;%0ADROP TABLE components;%0ADROP TABLE component_type;%0A%22%22%22%0A
|
|
52076834e04fd735d4bba88472163c31347bc201
|
Create scarp_diffusion_no_component.py
|
scripts/diffusion/scarp_diffusion_no_component.py
|
scripts/diffusion/scarp_diffusion_no_component.py
|
Python
| 0.000004
|
@@ -0,0 +1,1390 @@
+#Import statements so that you will have access to the necessary methods%0Aimport numpy%0Afrom landlab import RasterModelGrid%0Afrom landlab.plot.imshow import imshow_node_grid, imshow_core_node_grid%0Afrom pylab import show, figure%0A%0A#Create a raster grid with 25 rows, 40 columns, and cell spacing of 10 m%0Amg = RasterModelGrid(25, 40, 10.0)%0A%0A#Create a field of node data (an array) on the grid called elevation. %0A#Initially populate this array with zero values.%0Az = mg.add_zeros('node', 'elevation')%0A%0A#Check the size of the array%0Alen(z)%0A%0A#Create a diagonal fault across the grid%0Afault_y = 50.0 + 0.25*mg.node_x%0Aupthrown_nodes = numpy.where(mg.node_y%3Efault_y)%0Az%5Bupthrown_nodes%5D += 10.0 + 0.01*mg.node_x%5Bupthrown_nodes%5D%0A%0A#Illustrate the grid%0Aimshow_node_grid(mg, z, cmap='jet', grid_units=%5B'm','m'%5D)%0Ashow()%0A%0A#Define paramaters%0Akd = 0.01 # 0.01 m2 per year%0Adt = 0.2*mg.dx*mg.dx/kd # CFL condition%0A%0A#Set boundary conditions%0Amg.set_closed_boundaries_at_grid_edges(False, True, False, True)%0A%0A#Get id values of the cord nodes on which you will operate%0Ainterior_nodes = mg.get_core_nodes()%0A%0A#Evolve landscape%0Afor i in range(25):%0A %09g = mg.calculate_gradients_at_active_links(z)%0A %09qs = -kd*g%0A %09dqsdx = mg.calculate_flux_divergence_at_nodes(qs)%0A %09dzdt = -dqsdx%0A %09z%5Binterior_nodes%5D += dzdt%5Binterior_nodes%5D*dt%0A%0A#Plot new landscape%0Afigure()%0Aimshow_node_grid(mg, z, cmap='jet', grid_units=%5B'm','m'%5D)%0Ashow()%0A
|
|
9d8278e98e505ffb68c2dcf870e61c0239721e5b
|
Add the gpio proxy for the Intel Edison
|
elpiwear/Edison/gpio.py
|
elpiwear/Edison/gpio.py
|
Python
| 0.000001
|
@@ -0,0 +1,1573 @@
+# The MIT License (MIT)%0A#%0A# Copyright (c) 2015 Frederic Jacob%0A#%0A# Permission is hereby granted, free of charge, to any person obtaining a copy%0A# of this software and associated documentation files (the %22Software%22), to deal%0A# in the Software without restriction, including without limitation the rights%0A# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell%0A# copies of the Software, and to permit persons to whom the Software is%0A# furnished to do so, subject to the following conditions:%0A#%0A# The above copyright notice and this permission notice shall be included in all%0A# copies or substantial portions of the Software.%0A#%0A# THE SOFTWARE IS PROVIDED %22AS IS%22, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR%0A# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,%0A# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE%0A# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER%0A# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,%0A# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE%0A# SOFTWARE.%0A#%0A#%0A# Simple proxy class for access of the GPIO bus on the Raspberry Pi.%0A#%0A%0Aimport mraa%0A%0AIN = mraa.DIR_IN%0AOUT = mraa.DIR_OUT%0A%0Aclass gpio:%0A def __init__(self, pin, direction):%0A self.gpio = mraa.Gpio(pin)%0A self.gpio.dir(direction)%0A%0A def input(self):%0A return self.gpio.read()%0A%0A def output(self, value):%0A self.gpio.write(value)%0A%0A def on(self):%0A self.gpio.write(1)%0A%0A def off(self):%0A self.gpio.write(0)%0A
|
|
a49d28c552600ee2a0fe24ee83ed5cc7bbe36417
|
Add wrist tracker class
|
wristtracker.py
|
wristtracker.py
|
Python
| 0
|
@@ -0,0 +1,1812 @@
+import math%0Afrom markerutils import *%0A%0A%0Aclass TrackedMarker(object):%0A def __init__(self, marker, size, distance, position):%0A self.marker = marker%0A self.size = size%0A self.distance = distance%0A self.position = position%0A%0A%0Aclass WristTracker(object):%0A def __init__(self, marker_finder, marker_size, focal_length, origin_y=0):%0A self.marker_finder = marker_finder%0A self.S = marker_size%0A self.F = focal_length%0A self.origin_y = origin_y%0A self.cur_track = None%0A%0A def set_origin(self, origin_y):%0A self.origin_y = origin_y%0A%0A def get_origin(self):%0A return self.origin_y%0A%0A def get_marker(self, img, output=None):%0A h, w, _ = img.shape%0A center = (w / 2, h / 2)%0A%0A # Find tracker (closest to last location or center line)%0A track_matches = self.marker_finder.find_markers(img, output)%0A if self.cur_track:%0A # Ellipse: ((x, y), (MA, ma), angle)%0A track_matches.sort(key=lambda x: math.sqrt(%0A math.pow(x%5B1%5D%5B0%5D%5B0%5D - self.cur_track%5B1%5D%5B0%5D%5B0%5D, 2) + math.pow(x%5B1%5D%5B0%5D%5B1%5D - self.cur_track%5B1%5D%5B0%5D%5B1%5D, 2))) # Get closest to center%0A else:%0A track_matches.sort(key=lambda x: x%5B1%5D%5B0%5D%5B0%5D - center%5B0%5D) # Get closest to center%0A%0A # Get best ellipse%0A if track_matches:%0A self.cur_track = track_matches%5B0%5D%0A%0A # Operate on prev_track (retain last known position if no new marker found)%0A if self.cur_track:%0A # Calculate distance%0A px = get_pixel_size(self.cur_track)%0A D = self.F * self.S / px%0A%0A # Calculate position%0A y = self.cur_track%5B1%5D%5B0%5D%5B1%5D%0A pos = (y - self.origin_y) * (self.S / px)%0A%0A return TrackedMarker(self.cur_track, px, D, pos)%0A%0A return None%0A
|
|
cbc1609758762c7db4d3477248e87ecf29fdd288
|
add dep
|
hilbert/common/__accessdata__.py
|
hilbert/common/__accessdata__.py
|
Python
| 0.000001
|
@@ -0,0 +1,763 @@
+from sys import platform%0Afrom platform import architecture%0A%0A%0Adef install_data_files():%0A %22%22%22 %22%22%22%0A if sys.platform.startswith('netbsd'):%0A %22%22%22 %22%22%22%0A pass%0A elif sys.platform.startswith('freebsd'):%0A %22%22%22 %22%22%22%0A pass%0A elif sys.platform.startswith('linux'):%0A if PY3:%0A data_files = %5B('share/applications', %5B'script/hilbmetric.desktop'%5D),%0A ('share/pixmaps', %5B'data/hilbmetric.png'%5D)%5D%0A else:%0A data_files = %5B('share/applications', %5B'script/hilmetric.desktop'%5D ), %0A ('share/pixmaps', %5B'data/hilbmetric.png'%5D)%5D%0A elif os.name =='nt':%0A data_files = %5B('script', %5B'data/hilbmetric.ico'%5D)%5D%0A else:%0A data_files = %5B%5D%0A return data_files
|
|
dfe3f7fd7775ce13a670e1d27beddba5c1254a4a
|
Define the HPACK reference structure.
|
hyper/http20/hpack_structures.py
|
hyper/http20/hpack_structures.py
|
Python
| 0
|
@@ -0,0 +1,1634 @@
+# -*- coding: utf-8 -*-%0A%22%22%22%0Ahyper/http20/hpack_structures%0A~~~~~~~~~~~~~~~~~~~~~~~~~~~~~%0A%0AContains data structures used in hyper's HPACK implementation.%0A%22%22%22%0Aclass Reference(object):%0A %22%22%22%0A The reference object is essentially an object that 'points to' another%0A object, not unlike a pointer in C or similar languages. This object is%0A distinct from the normal Python name because we can tell the difference%0A between a reference and the 'actual' object.%0A%0A It behaves in the following ways:%0A%0A - Two references to the same object evaluate equal.%0A - Two references to different objects evaluate not equal, even if those%0A objects themselves evaluate equal.%0A - Two references to the same object hash to the same value.%0A - Two references to different objects hash to different values.%0A%0A The reference is distinct from and does not use weak references. A%0A reference may never point at an object that has been garbage collected.%0A This means that, to ensure objects get GC'd, any reference to them must%0A also go out of scope.%0A%0A This object is _conceptually_ immutable, but the implementation doesn't%0A attempt to enforce that to avoid the overhead. Be warned that changing%0A the object being referenced after creation could lead to all sorts of weird%0A nonsense.%0A%0A :param obj: The object being referenced.%0A %22%22%22%0A def __init__(self, obj):%0A self.obj = obj%0A%0A def __eq__(self, other):%0A return (isinstance(other, Reference) and self.obj is other.obj)%0A%0A def __ne__(self, other):%0A return not self.__eq__(other)%0A%0A def __hash__(self):%0A return id(self.obj)%0A
|
|
37b1250e213b78262075664e4291707ff369e981
|
Create clase-3.py
|
Ene-Jun-2019/Ejemplos/clase-3.py
|
Ene-Jun-2019/Ejemplos/clase-3.py
|
Python
| 0.000002
|
@@ -0,0 +1,2189 @@
+diccionario = %7B%0A 'a': %5B'accion', 'arte', 'arquitectura', 'agrego', 'actual'%5D,%0A 'b': %5B'bueno', 'bien', 'bonito'%5D,%0A 'c': %5B'casa', 'clase', 'coctel'%5D%0A%7D%0A%0Adiccionario%5B'd'%5D = %5B'dado', 'diccionario', 'duda'%5D%0A%0A# print(diccionario)%0A# print(diccionario%5B'a'%5D)%0A%0Afor llave, valor in diccionario.items():%0A pass #print(%22sho soy la llavesita -%3E %7B%7D, y el valor es: %7B%7D%22.format(llave, valor))%0A%0Afor llave in diccionario.keys():%0A pass #print(%22sho soy la puro llavesita: %7B%7D%22.format(llave))%0A%0Afor valor in diccionario.values():%0A pass #print(%22sho soy el puro valor: %7B%7D%22.format(valor))%0A%0A# print(sorted(diccionario.items()))%0A%0Atupla = (1, 2, 3)%0Alista = %5B4, 2, 3%5D%0Alista%5B0%5D = 0%0A# print(lista%5B0%5D)%0A# print(len(lista))%0A# print(len(diccionario))%0A# print(len(diccionario%5B'a'%5D))%0A%0Aclass Automovil(object):%0A bolsas_de_aire = 0%0A%0A %22%22%22docstring for Automovil%22%22%22%0A def __init__(self, **argumentos):%0A self.llantas = argumentos.get('llantas')%0A self.motor = argumentos.get('motor')%0A self.transmision = argumentos.get('transmision')%0A self.bolsas_de_aire = argumentos.get('bolsas_de_aire', 0)%0A self.marca = argumentos.get('marca')%0A%0A def set_llantas(self, llantas):%0A self.llantas = llantas%0A return self%0A%0A def set_motor(self, motor):%0A self.motor = motor%0A return self%0A%0A def set_transmision(self, transmision):%0A self.transmision = transmision%0A return self%0A%0A def get_llantas(self):%0A return self.llantas%0A%0A def get_motor(self):%0A return self.motor%0A%0A def get_transmision(self):%0A return self.transmision%0A%0A def __str__(self):%0A return %22%22%22%0A Llantas: %7B%7D%5CnMotor: %7B%7D%5CnTransmision: %7B%7D%5Cn# de Bolsas de Aire: %7B%7D%5CnMarca: %7B%7D%0A %22%22%22.format(self.llantas, self.motor, self.transmision, self.bolsas_de_aire, self.marca).strip().lower()%0A%0A#auto = Automovil(motor='v8', transmision='est%C3%A1ndar')%0A#auto2 = Automovil(motor='v4')%0Aauto3 = Automovil(motor='v6', llantas='euzkadi', transmision='cvt', marca='honda')%0A#auto3.set_motor('v6').set_llantas('euzkadi').set_transmision('cvt')%0A#print(auto.get_motor())%0A#print(auto.get_transmision())%0A#print(auto.get_llantas())%0A#print(auto)%0A#print(auto2)%0Aprint(auto3)%0A
|
|
48f2be780f6aa569bb1d8b8c0623e54cac49f613
|
add instance action model
|
core/models/instance_action.py
|
core/models/instance_action.py
|
Python
| 0.000001
|
@@ -0,0 +1,227 @@
+from django.db import models%0A%0A%0Aclass InstanceAction(models.Model):%0A name = models.CharField(max_length=50)%0A description = models.TextField()%0A%0A class Meta:%0A db_table = 'instance_action'%0A app_label = 'core'
|
|
78aaccb71fc64e52497abf0d0c768f3767a3d932
|
Update expenses status on database
|
fellowms/migrations/0020_auto_20160602_1607.py
|
fellowms/migrations/0020_auto_20160602_1607.py
|
Python
| 0
|
@@ -0,0 +1,635 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.9.6 on 2016-06-02 16:07%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('fellowms', '0019_auto_20160601_1512'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='expense',%0A name='status',%0A field=models.CharField(choices=%5B('W', 'Not submitted yet'), ('S', 'Submitted (but not processed yet)'), ('P', 'Processing'), ('A', 'Approved (waiting reply from finances)'), ('F', 'Finished')%5D, default='P', max_length=1),%0A ),%0A %5D%0A
|
|
1c6b74129d6e6a815d73e2a935fc86755ffb4f8a
|
Improve sourcecode (issue #11 and #17).
|
imagedownloader/requester/api.py
|
imagedownloader/requester/api.py
|
from requester.models import AutomaticDownload
from tastypie import fields
from tastypie.authentication import SessionAuthentication
from tastypie.resources import ModelResource
from libs.tastypie_polymorphic import ModelResource
class AutomaticDownloadResource(ModelResource):
class Meta(object):
queryset = AutomaticDownload.objects.all()
resource_name = 'automatic_download'
filtering = {
'created': ['exact', 'lt', 'lte', 'gte', 'gt'],
'modified': ['exact', 'lt', 'lte', 'gte', 'gt'],
}
authentication = SessionAuthentication()
|
Python
| 0
|
@@ -44,36 +44,8 @@
oad%0A
-from tastypie import fields%0A
from
@@ -147,60 +147,8 @@
rce%0A
-from libs.tastypie_polymorphic import ModelResource%0A
%0A%0Acl
|
65830295d30507e632a1a71c15083c0e58977c9c
|
add badchans.py, for honeypot purposes...
|
2.0/plugins/badchans.py
|
2.0/plugins/badchans.py
|
Python
| 0
|
@@ -0,0 +1,1381 @@
+%22%22%22%0Abadchans.py - Kills unopered users when they join specified channels.%0A%22%22%22%0A%0Afrom pylinkirc import utils, conf, world%0Afrom pylinkirc.log import log%0A%0AREASON = %22You have si%22 + %22nned...%22 # XXX: config option%0Adef handle_join(irc, source, command, args):%0A %22%22%22%0A killonjoin JOIN listener.%0A %22%22%22%0A # Ignore our own clients and other Ulines%0A if irc.is_privileged_service(source) or irc.is_internal_client(source):%0A return%0A%0A badchans = irc.serverdata.get('badchans')%0A if not badchans:%0A return%0A%0A channel = args%5B'channel'%5D%0A for badchan in badchans:%0A if irc.match_text(badchan, channel):%0A asm_uid = None%0A # Try to kill from the antispam service if available%0A if 'antispam' in world.services:%0A asm_uid = world.services%5B'antispam'%5D.uids.get(irc.name)%0A%0A for user in args%5B'users'%5D:%0A if irc.is_oper(user):%0A irc.msg(user, %22Warning: %25s kills unopered users!%22 %25 channel,%0A notice=True,%0A source=asm_uid or irc.pseudoclient.uid)%0A else:%0A log.info('(%25s) badchans: killing user %25s for joining channel %25s',%0A irc.name, irc.get_hostmask(user), channel)%0A irc.kill(asm_uid or irc.sid, user, REASON)%0A%0Autils.add_hook(handle_join, 'JOIN')%0A
|
|
e3ad95017bced8dac5474d6de5958decf4f58279
|
add migration file
|
corehq/apps/auditcare/migrations/0005_auditcaremigrationmeta.py
|
corehq/apps/auditcare/migrations/0005_auditcaremigrationmeta.py
|
Python
| 0.000001
|
@@ -0,0 +1,881 @@
+# Generated by Django 2.2.24 on 2021-06-20 14:02%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('auditcare', '0004_add_couch_id'),%0A %5D%0A%0A operations = %5B%0A migrations.CreateModel(%0A name='AuditcareMigrationMeta',%0A fields=%5B%0A ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),%0A ('key', models.CharField(db_index=True, max_length=50)),%0A ('state', models.CharField(choices=%5B('s', 'Started'), ('f', 'Finished'), ('e', 'Errored')%5D, max_length=1)),%0A ('record_count', models.PositiveIntegerField(null=True)),%0A ('created_at', models.DateTimeField(auto_now_add=True)),%0A ('modified_at', models.DateTimeField(auto_now=True)),%0A %5D,%0A ),%0A %5D%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.