commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
|---|---|---|---|---|---|---|---|
c1c5f58d12ff9a8e532de971c28e3676915a7117
|
Add py-cairocffi package (#12161)
|
var/spack/repos/builtin/packages/py-cairocffi/package.py
|
var/spack/repos/builtin/packages/py-cairocffi/package.py
|
Python
| 0
|
@@ -0,0 +1,1157 @@
+# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other%0A# Spack Project Developers. See the top-level COPYRIGHT file for details.%0A#%0A# SPDX-License-Identifier: (Apache-2.0 OR MIT)%0A%0Afrom spack import *%0A%0A%0Aclass PyCairocffi(PythonPackage):%0A %22%22%22cairocffi is a CFFI-based drop-in replacement for Pycairo, a set of%0A Python bindings and object-oriented API for cairo. Cairo is a 2D vector%0A graphics library with support for multiple backends including image%0A buffers, PNG, PostScript, PDF, and SVG file output.%22%22%22%0A%0A homepage = %22https://github.com/Kozea/cairocffi%22%0A url = %22https://pypi.io/packages/source/c/cairocffi/cairocffi-1.0.2.tar.gz%22%0A import_modules = %5B'cairocffi'%5D%0A%0A version('1.0.2', sha256='01ac51ae12c4324ca5809ce270f9dd1b67f5166fe63bd3e497e9ea3ca91946ff')%0A%0A depends_on('python@3.5:', type=('build', 'run'))%0A depends_on('py-setuptools@39.2.0:', type='build')%0A depends_on('py-cffi@1.1.0:', type=('build', 'run'))%0A depends_on('py-pytest-runner', type='test')%0A depends_on('py-pytest-cov', type='test')%0A depends_on('py-pytest-flake8', type='test')%0A depends_on('py-pytest-isort', type='test')%0A
|
|
eda01c6b45629d6c039785ba502dcde47cabd020
|
Add python implementation
|
probability.py
|
probability.py
|
Python
| 0.000099
|
@@ -0,0 +1,1035 @@
+#!/usr/bin/env python%0Aimport random%0Aimport sets%0A%0ABOARD_SIZE = 9%0AMINES = 10%0ASAMPLES = 10**5%0A%0Adef generate_mines(board_size, num_of_mines):%0A mines = sets.Set()%0A while len(mines) != num_of_mines:%0A mines.add((random.randint(0, board_size - 1),%0A random.randint(0, board_size - 1)))%0A return mines%0A%0Adef has_adjacent(pos, mines):%0A x, y = pos%0A if (x + 1, y) in mines:%0A return True%0A if (x - 1, y) in mines:%0A return True%0A if (x, y + 1) in mines:%0A return True%0A if (x, y - 1) in mines:%0A return True%0A if (x + 1, y + 1) in mines:%0A return True%0A if (x + 1, y - 1) in mines:%0A return True%0A if (x - 1, y + 1) in mines:%0A return True%0A if (x - 1, y - 1) in mines:%0A return True%0A return False%0A%0Adef random_var_function(mines):%0A for mine in mines:%0A if not (has_adjacent(mine, mines)):%0A return 0%0A return 1%0A%0Aprint sum(%5Brandom_var_function(generate_mines(BOARD_SIZE, MINES)) for i in range(0, SAMPLES)%5D) / float(SAMPLES)%0A%0A
|
|
3352236ff27dbfd749b71dd152f6809b2019bee4
|
add tests
|
test_czech_holidays.py
|
test_czech_holidays.py
|
Python
| 0
|
@@ -0,0 +1,1521 @@
+import re%0Afrom datetime import date%0A%0Aimport requests%0Aimport pytest%0A%0Afrom czech_holidays import holidays, Holidays, Holiday%0A%0A%0AWIKIPEDIA_RE = re.compile(r'Rok%5Cs%5Cd%7B4%7D%3C/th%3E%5Cs%3Ctd%3E(?P%3Cdate%3E%5Cw+)')%0A%0AWIKIPEDIA_DATE_RE = re.compile(r'(?P%3Cday%3E%5Cd+)%5C.%5Cs+(?P%3Cmonth%3E%5Cw+)%5Cs+(?P%3Cyear%3E%5Cd%7B4%7D)')%0A%0A%0Adef fetch_easter_dates():%0A response = requests.get('https://cs.wikipedia.org/wiki/Velikono%25C4%258Dn%25C3%25AD_pond%25C4%259Bl%25C3%25AD')%0A response.raise_for_status()%0A return %5Bparse_wikipedia_date(match.group('date'))%0A for match in WIKIPEDIA_RE.finditer(response.text)%5D%0A%0A%0Adef parse_wikipedia_date(date_text):%0A match = WIKIPEDIA_DATE_RE.search(date_text)%0A groups = match.groupdict()%0A return date(int(groups%5B'year'%5D),%0A 3 if 'b%C5%99e' in groups%5B'month'%5D else 4,%0A int(groups%5B'day'%5D))%0A%0A%0A@pytest.mark.parametrize('date_text, expected', %5B%0A ('5. dubna 2021', date(2021, 4, 5)),%0A ('18. dubna 2022', date(2022, 4, 18)),%0A ('10. dubna 2023', date(2023, 4, 10)),%0A ('31. b%C5%99ezna 2024', date(2024, 3, 31)),%0A%5D)%0Adef test_parse_wikipedia_date(date_text, expected):%0A assert parse_wikipedia_date(date_text) == expected%0A%0A%0A@pytest.mark.parametrize('easter_date', fetch_easter_dates())%0Adef test_easter(easter_date):%0A holiday = Holidays(easter_date.year).easter%0A assert (holiday.year, holiday.month, holiday.day) == (easter_date.year, easter_date.month, easter_date.day)%0A%0A%0Adef test_christmas():%0A holiday = Holidays(2022).christmas%0A assert (holiday.year, holiday.month, holiday.day) == (2022, 12, 24)%0A
|
|
0415071808b1bfa659a790e50692dc65d479b627
|
add config.sample.py
|
tests/config.sample.py
|
tests/config.sample.py
|
Python
| 0.000003
|
@@ -0,0 +1,230 @@
+%22%22%22%0AThis is the sample config.py%0A%0Acreate your own config.py which match your testsetup%0AIt will be imported by device tests%0A%22%22%22%0A%0Afrom testWrt import testsetup%0A%0ATestSetup = testsetup.TestSetup()%0ATestSetup.set_openwrt(%22192.168.2.1%22)%0A
|
|
ca823664d40a9ee3eb0d70cdc20a6aa3e13318d3
|
Fix for WidgetsTree
|
designer/nodetree.py
|
designer/nodetree.py
|
from kivy.uix.treeview import TreeViewLabel
from kivy.uix.scrollview import ScrollView
from kivy.properties import ObjectProperty, BooleanProperty
from kivy.app import App
from kivy.clock import Clock
from kivy.uix.tabbedpanel import TabbedPanel
from designer.common import widgets
class WidgetTreeElement(TreeViewLabel):
'''WidgetTreeElement represents each node in WidgetsTree
'''
node = ObjectProperty(None)
class WidgetsTree(ScrollView):
'''WidgetsTree class is used to display the Root Widget's Tree in a
Tree hierarchy.
'''
playground = ObjectProperty(None)
'''This property is an instance of :class:`~designer.playground.Playground`
:data:`playground` is a :class:`~kivy.properties.ObjectProperty`
'''
tree = ObjectProperty(None)
'''This property is an instance of :class:`~kivy.uix.treeview.TreeView`.
This TreeView is responsible for showing Root Widget's Tree.
:data:`tree` is a :class:`~kivy.properties.ObjectProperty`
'''
project_loader = ObjectProperty()
'''Reference to :class:`~designer.project_loader.ProjectLoader` instance.
:data:`project_loader` is a :class:`~kivy.properties.ObjectProperty`
'''
dragging = BooleanProperty(False)
'''Specifies whether a node is dragged or not.
:data:`dragging` is a :class:`~kivy.properties.BooleanProperty`
'''
selected_widget = ObjectProperty(allownone=True)
'''Current selected widget.
:data:`dragging` is a :class:`~kivy.properties.ObjectProperty`
'''
def recursive_insert(self, node, treenode):
'''This function will add a node to TreeView, by recursively travelling
through the Root Widget's Tree.
'''
if node is None:
return
b = WidgetTreeElement(node=node)
self.tree.add_node(b, treenode)
class_rules = self.project_loader.class_rules
root_widget = self.project_loader.root_rule.widget
is_child_custom = False
for rule in class_rules:
if rule.name == type(node).__name__:
is_child_custom = True
break
is_child_complex = False
for widget in widgets:
if widget[0] == type(node).__name__ and widget[1] == 'complex':
is_child_complex = True
break
if root_widget == node or (not is_child_custom and not is_child_complex):
if isinstance(node, TabbedPanel):
self.insert_for_tabbed_panel(node, b)
else:
for child in node.children:
self.recursive_insert(child, b)
def insert_for_tabbed_panel(self, node, treenode):
for tab in node.tab_list:
b = WidgetTreeElement(node=tab)
self.tree.add_node(b, treenode)
self.recursive_insert(tab.content, b)
def refresh(self, *l):
'''This function will refresh the tree. It will first remove all nodes
and then insert them using recursive_insert
'''
for node in self.tree.root.nodes:
self.tree.remove_node(node)
self.recursive_insert(self.playground.root, self.tree.root)
def on_touch_up(self, touch):
self.dragging = False
if self.collide_point(*touch.pos):
Clock.unschedule(self._start_dragging)
super(WidgetsTree, self).on_touch_up(touch)
return False
def on_touch_down(self, touch):
if self.collide_point(*touch.pos) and not self.dragging:
self.dragging = True
self.touch = touch
Clock.schedule_once(self._start_dragging, 2)
node = self.tree.get_node_at_pos((self.touch.x, self.touch.y))
if node:
self.selected_widget = node.node
self.playground.selected_widget = self.selected_widget
else:
self.selected_widget = None
self.playground.selected_widget = None
super(WidgetsTree, self).on_touch_down(touch)
return False
def _start_dragging(self, *args):
if self.dragging and self.selected_widget:
self.playground.selected_widget = self.selected_widget
self.playground.dragging = False
self.playground.touch = self.touch
self.playground.start_widget_dragging()
|
Python
| 0
|
@@ -3288,55 +3288,8 @@
lse%0A
- if self.collide_point(*touch.pos):%0A
@@ -3331,35 +3331,38 @@
agging)%0A
-
-
+return
super(WidgetsTr
@@ -3391,38 +3391,16 @@
touch)%0A%0A
- return False%0A%0A
def
@@ -3955,19 +3955,22 @@
-
+return
super(W
@@ -4013,30 +4013,8 @@
h)%0A%0A
- return False%0A%0A
|
eb091fc81dc374d0eb0800a596d6e0db95a55687
|
Create CombinationSumII_001.py
|
leetcode/040-Combination-Sum-II/CombinationSumII_001.py
|
leetcode/040-Combination-Sum-II/CombinationSumII_001.py
|
Python
| 0.000021
|
@@ -0,0 +1,1084 @@
+#Node simplification, improvement & optimization%0A#How, it's good because it can be done on the original code of %22Combination Sum%22%0A%0Aclass Solution:%0A # @param %7Binteger%5B%5D%7D candidates%0A # @param %7Binteger%7D target%0A # @return %7Binteger%5B%5D%5B%5D%7D%0A def combinationSum2(self, candidates, target):%0A candidates.sort()%0A return self.combsum(candidates, target)%0A %0A def combsum(self, nums, target):%0A if target == 0:%0A return %5B%5B%5D%5D%0A if not nums or nums%5B0%5D %3E target or target %3C 1:%0A return %5B%5D%0A%0A res = %5B%5D%0A i = 0%0A while i %3C len(nums):%0A num = nums%5Bi%5D%0A cnt = 1%0A while i + cnt %3C len(nums) and nums%5Bi + cnt%5D == num:%0A cnt += 1%0A j = i + cnt%0A pre = %5Bnum%5D%0A t = target%0A while t %3E= num and cnt %3E 0:%0A t -= num%0A cnt -= 1%0A subs = self.combsum(nums%5Bj:%5D, t)%0A for sub in subs:%0A res.append(pre + sub)%0A pre += %5Bnum%5D%0A i = j%0A return res%0A
|
|
d3a9a4a300acc204111e19945381a995f0f7cdda
|
add import script for Dumfries and Galloway
|
polling_stations/apps/data_collection/management/commands/import_dumfries_and_galloway.py
|
polling_stations/apps/data_collection/management/commands/import_dumfries_and_galloway.py
|
Python
| 0
|
@@ -0,0 +1,258 @@
+from data_collection.management.commands import BaseScotlandSpatialHubImporter%0A%0Aclass Command(BaseScotlandSpatialHubImporter):%0A council_id = 'S12000006'%0A council_name = 'Dumfries and Galloway'%0A elections = %5B'local.dumfries-and-galloway.2017-05-04'%5D%0A
|
|
ba5d87ff551df47df2ed4de15058df28ad49fe41
|
add error classes.
|
pumblr/error.py
|
pumblr/error.py
|
Python
| 0
|
@@ -0,0 +1,371 @@
+#!/usr/bin/python%0A# -*- coding: utf-8 -*-%0A%0Aclass PumblrError(object):%0A %22%22%22Pumblr exception%22%22%22%0A %0A def __init__(self, msg):%0A self._msg = msg%0A%0A def __str__(self):%0A return self._msg%0A%0A%0Aclass PumblrAuthError(PumblrError):%0A %22%22%22403 Forbidden exception%22%22%22%0A pass%0A%0A%0Aclass PumblrReqestError(PumblrError):%0A %22%22%22400 Bad Request exception%22%22%22%0A pass%0A%0A
|
|
e6fb23f0bfb095b63bb9427f7838719f5248177c
|
Update Mindreader to use copy and no longer manipulate state
|
axelrod/strategies/mindreader.py
|
axelrod/strategies/mindreader.py
|
import copy
import inspect
from axelrod import Player, RoundRobin, Game, update_histories
def simulate_match(player_1, player_2, strategy, rounds=10):
"""Simulates a number of matches."""
for match in range(rounds):
play_1, play_2 = strategy, player_2.strategy(player_1)
# Update histories and counts
update_histories(player_1, player_2, play_1, play_2)
def roll_back_history(player, rounds):
"""Undo the last `rounds` rounds as sufficiently as possible."""
for i in range(rounds):
play = player.history.pop(-1)
if play == 'C':
player.cooperations -= 1
elif play == 'D':
player.defections -= 1
def look_ahead(player_1, player_2, rounds=10):
"""Looks ahead for `rounds` and selects the next strategy appropriately."""
results = []
game = Game()
# Simulate plays for `rounds` rounds
strategies = ['C', 'D']
for strategy in strategies:
#opponent_ = copy.deepcopy(player_2) # need deepcopy here
opponent_ = player_2
round_robin = RoundRobin(players=[player_1, opponent_], game=game,
turns=rounds)
simulate_match(player_1, opponent_, strategy, rounds)
results.append(round_robin._calculate_scores(player_1, opponent_)[0])
# Restore histories and counts
roll_back_history(player_1, rounds)
roll_back_history(player_2, rounds)
return strategies[results.index(max(results))]
class MindReader(Player):
"""A player that looks ahead at what the opponent will do and decides what to do."""
name = 'Mind Reader'
classifier = {
'memory_depth': -10,
'stochastic': False,
'inspects_source': True, # Finds out what opponent will do
'manipulates_source': False,
'manipulates_state': True
}
def strategy(self, opponent):
"""Pretends to play the opponent a number of times before each match.
The primary purpose is to look far enough ahead to see if a defect will
be punished by the opponent.
If the MindReader attempts to play itself (or another similar
strategy), then it will cause a recursion loop, so this is also handled
in this method, by defecting if the method is called by strategy
"""
curframe = inspect.currentframe()
calframe = inspect.getouterframes(curframe, 2)
calname = calframe[1][3]
if calname in ('strategy', 'simulate_match'):
return 'D'
best_strategy = look_ahead(self, opponent)
return best_strategy
class ProtectedMindReader(MindReader):
"""A player that looks ahead at what the opponent will do and decides what to do.
It is also protected from mind control strategies"""
name = 'Protected Mind Reader'
classifier = {
'memory_depth': -10,
'stochastic': False,
'inspects_source': True, # Finds out what opponent will do
'manipulates_source': True, # Stops opponent's strategy
'manipulates_state': False
}
def __setattr__(self, name, val):
"""Stops any other strategy altering the methods of this class """
if name == 'strategy':
pass
else:
self.__dict__[name] = val
|
Python
| 0
|
@@ -952,17 +952,16 @@
-#
opponent
@@ -1013,37 +1013,8 @@
ere%0A
- opponent_ = player_2%0A
@@ -1358,52 +1358,8 @@
nds)
-%0A roll_back_history(player_2, rounds)
%0A%0A
@@ -1761,19 +1761,20 @@
state':
-Tru
+Fals
e%0A %7D%0A
|
86fe554e8cc67ad346d2ecc532cea6e94461a0c6
|
Add support for file session
|
app/tools/session.py
|
app/tools/session.py
|
Python
| 0
|
@@ -0,0 +1,2058 @@
+#-*- coding:utf-8 -*-%0A%0Aimport uuid%0Aimport time%0Aimport os%0Aimport json%0Aclass Session(dict):%0A%09def __init__(self,session_id=None,expire=None,*args,**kw):%0A%09%09if session_id==None:%0A%09%09%09self._session_id=self._generate_session_id()%0A%09%09else:%0A%09%09%09self._session_id=session_id%0A%09%09self._expire=expire%0A%09%09super(Session,self).__init__(*args,**kw)%0A%09def _generate_session_id(self):%0A%09%09return str(uuid.uuid1().hex)%09%0A%09def set(self,sname,svalue):%0A%09%09pass%0A%09def get(self,sname):%0A%09%09pass%0A%09def __getattr__(self,k):%0A%09%09pass%0A%09def __setattr__(self,k,v):%0A%09%09pass%0A%09def save(self):%0A%09%09pass%0Aclass FileSession(Session):%0A%09_sesion_dir='/tmp/session'%0A%09def __init__(self,session_id=None,expire=None,*args,**kw):%0A%09%09if not os.path.exists(self._session_dir):%0A%09%09%09os.mkdir(self._session_dir)%0A%09%09if session_id==None:%0A%09%09%09self._session_id=str(uuid.uuid1().hex)%0A%09%09%09self._session_file=os.path.join(self._session_dir,self._session_id)%0A%09%09else:%0A%09%09%09self._session_id=session_id%0A%09%09%09self._session_file=os.path.join(self._session_dir,session_id)%0A%09%09if os.path.exists(self._session_file):%09%0A%09%09%09with open(self._session_file,'r',errors='ignore',encoding='utf-8') as f:%0A%09%09%09%09self%5Bself._session_id%5D=json.load(self._fp)%0A%09%09else:%0A%09%09%09self%5Bself._session_id%5D=%7B%7D%0A%09%09super(FileSession,self).__init__(self._session_id,expire,*args,**kw)%0A%09def set(self,sname,svalue):%0A%09%09self%5Bself._session_id%5D%5Bsname%5D=svalue%0A%09def get(self,sname):%0A%09%09return self%5Bself._session_id%5D.get(sname,None)%0A%09def save(self):%0A%09%09with open(self._session_file,'w',errors='ignore',encoding='utf-8') as f:%0A%09%09%09json.dump(self%5Bself._session_if%5D,f)%0A%09def __getattr__(self,k):%0A%09%09if k in self%5Bself._session_id%5D:%0A%09%09%09return self%5Bself._session_id%5D.get(k)%0A%09%09return None%0A%09def __setattr__(self,k,v):%0A%09%09self%5Bself._session_id%5D%5Bk%5D=v%0A%0Aclass MongoSession(Session):%0A%09def __init__(self,session_id=None,expire=None,*args,**kw):%0A%09%09super(MongoSession,self).__init__(session_id,expire,*args,**kw)%0Aclass RedisSession(Session):%0A%09def __init__(self,session_id=None,expire=None,*args,**kw):%0A%09%09super(RedisSession,self).__init__(session_id,expire,*args,**kw)%0A%0Aclass SessionManager(object):%0A%09def __init__(self,%0A%0A%09%09%0A
|
|
8bfd0031c4a93b644cd8f9892a0cc1a8671a9024
|
add build/BuildSpawn.py
|
Source/Python/build/BuildSpawn.py
|
Source/Python/build/BuildSpawn.py
|
Python
| 0.000001
|
@@ -0,0 +1,594 @@
+import os%0D%0Afrom threading import *%0D%0Afrom subprocess import *%0D%0A%0D%0A%0D%0A%0D%0Aclass BuildSpawn(Thread):%0D%0A def __init__(self, Sem=None, Filename=None, Args=None, Num=0):%0D%0A Thread.__init__(self)%0D%0A self.sem=Sem%0D%0A self.filename=Filename%0D%0A self.args=Args%0D%0A self.num=Num%0D%0A %0D%0A%0D%0A def run(self):%0D%0A self.sem.acquire()%0D%0A p = Popen(%5B%22nmake%22, %22/nologo%22, %22-f%22, self.filename, self.args%5D, env=os.environ, cwd=os.path.dirname(self.filename))%0D%0A p.communicate()%0D%0A if p.returncode != 0:%0D%0A return p.returncode%0D%0A self.sem.release()
|
|
00a00621f005e3db3fd25c4c09fb1540ba165fed
|
Test the VenvBuilder
|
tests/unit/builders/test_venv.py
|
tests/unit/builders/test_venv.py
|
Python
| 0
|
@@ -0,0 +1,1648 @@
+import subprocess%0A%0Aimport pretend%0Aimport pytest%0A%0Aimport virtualenv.builders.venv%0A%0Afrom virtualenv.builders.venv import VenvBuilder, _SCRIPT%0A%0A%0Adef test_venv_builder_check_available_success(monkeypatch):%0A check_output = pretend.call_recorder(lambda *a, **kw: None)%0A monkeypatch.setattr(%0A virtualenv.builders.venv,%0A %22check_output%22,%0A check_output,%0A )%0A%0A assert VenvBuilder.check_available(%22wat%22)%0A assert check_output.calls == %5B%0A pretend.call(%5B%22wat%22, %22-c%22, %22import venv%22%5D, stderr=subprocess.STDOUT),%0A %5D%0A%0A%0Adef test_venv_builder_check_available_fails(monkeypatch):%0A @pretend.call_recorder%0A def check_output(*args, **kwargs):%0A raise subprocess.CalledProcessError(1, %22an error!%22)%0A%0A monkeypatch.setattr(%0A virtualenv.builders.venv,%0A %22check_output%22,%0A check_output,%0A )%0A%0A assert not VenvBuilder.check_available(%22wat%22)%0A assert check_output.calls == %5B%0A pretend.call(%5B%22wat%22, %22-c%22, %22import venv%22%5D, stderr=subprocess.STDOUT),%0A %5D%0A%0A%0A@pytest.mark.parametrize(%22system_site_packages%22, %5BTrue, False%5D)%0Adef test_venv_builder_create_venv(tmpdir, monkeypatch, system_site_packages):%0A check_call = pretend.call_recorder(lambda *a, **kw: None)%0A monkeypatch.setattr(subprocess, %22check_call%22, check_call)%0A builder = VenvBuilder(%0A %22wat%22,%0A None,%0A system_site_packages=system_site_packages,%0A )%0A builder.create_virtual_environment(str(tmpdir))%0A%0A script = _SCRIPT.format(%0A system_site_packages=system_site_packages,%0A destination=str(tmpdir),%0A )%0A%0A assert check_call.calls == %5B%0A pretend.call(%5B%22wat%22, %22-c%22, script%5D)%0A %5D%0A
|
|
efc8d3182f79111b3a1b7df445dafd46fef9862a
|
Add YAML wrapper allowing conf strings to be used in YAML source files
|
controlbeast/utils/yaml.py
|
controlbeast/utils/yaml.py
|
Python
| 0
|
@@ -0,0 +1,1495 @@
+# -*- coding: utf-8 -*-%0A%22%22%22%0A controlbeast.utils.yaml%0A ~~~~~~~~~~~~~~~~~~~~~~~%0A%0A :copyright: Copyright 2014 by the ControlBeast team, see AUTHORS.%0A :license: ISC, see LICENSE for details.%0A%22%22%22%0Aimport os%0Aimport yaml%0Afrom controlbeast.conf import CbConf%0Afrom controlbeast.utils.dynamic import CbDynamicIterable%0Afrom controlbeast.utils.file import CbFile%0A%0A%0Aclass CbYaml(CbDynamicIterable, CbFile):%0A %22%22%22%0A Wrapper class providing access to YAML data sources.%0A%0A This wrapper allows using Python format strings within YAML source%0A files, referring to any name defined in :py:mod:%60~controlbeast.conf.default%60.%0A %22%22%22%0A%0A #: File name of the YAML file used as data source%0A _filename = None%0A%0A def __init__(self, filename=''):%0A %22%22%22%0A CbYaml constructor%0A %22%22%22%0A if self._check_file_exists(filename) and self._check_access(filename, os.R_OK):%0A self._filename = filename%0A if self._filename:%0A conf = CbConf.get_instance()%0A with open(self._filename, 'r') as fp:%0A content = fp.read()%0A content = content.format(**conf)%0A yaml_dict = yaml.safe_load(content)%0A else:%0A yaml_dict = None%0A super(CbYaml, self).__init__(dict=yaml_dict)%0A%0A @property%0A def filename(self):%0A %22%22%22%0A File name of the YAML file to read from.%0A Expected to be a string representing a valid and accessible YAML file.%0A %22%22%22%0A return self._filename%0A
|
|
79f92d050fbf9ebe4f088aeabb5e832abeefe0d5
|
Initialize unit tests for Coursera API module
|
tests/test_coursera.py
|
tests/test_coursera.py
|
Python
| 0.000001
|
@@ -0,0 +1,1040 @@
+import unittest%0A%0Afrom mooc_aggregator_restful_api import coursera%0A%0A%0Aclass CourseraTestCase(unittest.TestCase):%0A '''%0A Unit Tests for module udacity%0A%0A '''%0A%0A def setUp(self):%0A self.coursera_test_object = coursera.CourseraAPI()%0A%0A def test_coursera_api_courses_response(self):%0A self.assertEqual(self.coursera_test_object.response_courses.status_code, 200)%0A%0A def test_coursera_api_universities_response(self):%0A self.assertEqual(self.coursera_test_object.response_universities.status_code, 200)%0A%0A def test_coursera_api_categories_response(self):%0A self.assertEqual(self.coursera_test_object.response_categories.status_code, 200)%0A%0A def test_coursera_api_instructors_response(self):%0A self.assertEqual(self.coursera_test_object.response_instructors.status_code, 200)%0A%0A def test_coursera_api_sessions_response(self):%0A self.assertEqual(self.coursera_test_object.response_sessions.status_code, 200)%0A%0A def tearDown(self):%0A pass%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
|
|
7d65a128ee71bc5c85170b247730ea385ab58d0c
|
add first handler test
|
tests/test_handlers.py
|
tests/test_handlers.py
|
Python
| 0
|
@@ -0,0 +1,988 @@
+import unittest%0Afrom unittest.mock import Mock%0A%0A%0Aclass RangeVotingHandler():%0A def __init__(self, member_repository):%0A self.repository = member_repository%0A%0A def handle(self, command):%0A self.repository.save()%0A%0A%0Aclass RangeVotingRepository():%0A def save(self, rangevoting):%0A pass%0A%0A%0Aclass RangeVotingHandlerTestCase(unittest.TestCase):%0A def test_creation(self):%0A member_mock_repository = RangeVotingRepository()%0A rangevoting_handler = RangeVotingHandler(member_mock_repository)%0A self.assertEqual(member_mock_repository, rangevoting_handler.repository)%0A%0A def test_handle_calls_save_method_from_repository(self):%0A member_mock_repository = RangeVotingRepository()%0A member_mock_repository.save = Mock()%0A%0A rangevoting_handler = RangeVotingHandler(member_mock_repository)%0A rangevoting_handler.handle(%7B%7D)%0A%0A self.assertTrue(member_mock_repository.save.called)%0A%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
|
|
f647545b98693e309fa535c1c917be965df12cd7
|
allow null
|
apps/intel/models.py
|
apps/intel/models.py
|
from django.db import models
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
from django.db import connection
from domain.models import Membership
from intel.schema_models import *
# Adding roles and clinics to the user profile
class Role(models.Model):
name = models.CharField(max_length=255)
def __unicode__(self):
return self.name
class Meta:
verbose_name = _("Role")
class Clinic(models.Model):
name = models.CharField(max_length=255)
def __unicode__(self):
return self.name
class Meta:
verbose_name = _("Clinic")
class UserProfile(models.Model):
user = models.ForeignKey(User, unique=True)
clinic = models.ForeignKey(Clinic)
role = models.ForeignKey(Role)
# schema specific methods - these use the inspectdb general schema_models.py which in turn dumps the models generated per the domain's xforms
REGISTRATION_TABLE = IntelGrameenMotherRegistration._meta.db_table
FOLLOWUP_TABLE = IntelGrameenSafeMotherhoodFollowup._meta.db_table
def registrations():
return IntelGrameenMotherRegistration.objects.filter(sampledata_meta_userid__gt=0)
def hi_risk():
return IntelGrameenMotherRegistration.objects.filter(sampledata_meta_userid__gt=0, sampledata_hi_risk="yes")
def follow_up():
return IntelGrameenSafeMotherhoodFollowup.objects.all()
# this is adapted from the various SqlReport stuff, which wasn't accurate (references xforms_ tables instead of actual data)
# and was a pain to use
#
# TODO: see if Django GROUP BY equivalent is powerful enough to turn this to ORM code instead of SQL
def registrations_by_clinic():
sql = '''
select clinic_id, count(sampledata_case_id)
from %s, intel_userprofile
where sampledata_meta_userid > 0
and sampledata_meta_userid = intel_userprofile.user_id
group by clinic_id
''' % REGISTRATION_TABLE
return _result_to_dict(_rawquery(sql))
def hi_risk_by_clinic():
sql = '''
select clinic_id, count(sampledata_case_id)
from %s, intel_userprofile
where sampledata_hi_risk = 'yes' and sampledata_meta_userid > 0
and sampledata_meta_userid = intel_userprofile.user_id
group by clinic_id
''' % REGISTRATION_TABLE
return _result_to_dict(_rawquery(sql))
def followup_by_clinic():
sql = '''
select clinic_id, count(safe_pregnancy_case_id)
from %s, intel_userprofile
where safe_pregnancy_meta_userid > 0
and safe_pregnancy_meta_userid = intel_userprofile.user_id
group by clinic_id
''' % FOLLOWUP_TABLE
return _result_to_dict(_rawquery(sql))
def _rawquery( sql):
cursor = connection.cursor()
cursor.execute(sql)
return cursor.fetchall()
# turns a bunch of 2 value lists to dictionary. Used for group results
def _result_to_dict(results):
res = {}
for row in results:
res[row[0]] = row[1]
return res
# # MemberRole.objects.filter(member = 4)
# # [<MemberRole: brian: chw>, <MemberRole: brian: doctor>]
#
# class MemberRole(models.Model):
# member = models.ForeignKey(Membership)
# role = models.ForeignKey(Role)
#
# def __unicode__():
# return "%s: %s" % (member.member_object, role)
#
# class Meta:
# verbose_name = _("Member Role")
#
#
# # helper method: get array of roles for a Member
# # >>> MemberRole.per(4)
# # [u'chw', u'doctor']
# # useful for easily checking a user's role, eg:
# # >>> if 'chw' in MemberRole.per(4): ...
#
# @staticmethod
# def per(member):
# roles = []
# for r in MemberRole.objects.filter(member = member):
# roles.append(r.role.name)
#
# return roles
|
Python
| 0.000813
|
@@ -756,16 +756,27 @@
y(Clinic
+, null=True
)%0A ro
@@ -805,16 +805,27 @@
Key(Role
+, null=True
)%0A%0A%0A# sc
|
8415decb4fea7cb8ad3a2800ecd9c9a9190fa331
|
Add rename script in python
|
rename.py
|
rename.py
|
Python
| 0.000001
|
@@ -0,0 +1,322 @@
+import os%0A%0ASEPARATOR = '_'%0AEXT = %22.png%22%0Adigits = range(1, 10)%0A%0Afor d in digits:%0A%09i = 1%0A%09for file in os.listdir(str(d)):%0A%09%09if file.endswith(EXT):%0A%09%09%09filepath = str(d) + '/'%0A%09%09%09old_filename = filepath + file%0A%09%09%09new_filename = filepath + str(d) + SEPARATOR + str(i) + EXT%0A%09%09%09os.rename(old_filename, new_filename)%0A%09 %09i += 1
|
|
96cfe4d55ae6dd34cc30a72f19118aa66c65f7ca
|
add __main__ file to for python 2.7 entrypoint
|
selenium_odoo_qunit/__main__.py
|
selenium_odoo_qunit/__main__.py
|
Python
| 0.000007
|
@@ -0,0 +1,105 @@
+if __name__ == '__main__':%0A from selenium_odoo_qunit import selenium_odoo_qunit as soq%0A soq.main()%0A
|
|
769f350802b78ffa9c74bc5b9a1e912b64ab718d
|
Add new package: py-asgiref (#16233)
|
var/spack/repos/builtin/packages/py-asgiref/package.py
|
var/spack/repos/builtin/packages/py-asgiref/package.py
|
Python
| 0
|
@@ -0,0 +1,1486 @@
+# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other%0A# Spack Project Developers. See the top-level COPYRIGHT file for details.%0A#%0A# SPDX-License-Identifier: (Apache-2.0 OR MIT)%0A%0Afrom spack import *%0A%0A%0Aclass PyAsgiref(PythonPackage):%0A %22%22%22ASGI specification and utilities.%22%22%22%0A%0A homepage = %22https://asgi.readthedocs.io/en/latest/%22%0A url = %22https://github.com/django/asgiref/archive/3.2.7.tar.gz%22%0A%0A version('3.2.7', sha256='8a0b556b9e936418475f6670d59e14592c41d15d00b5ea4ad26f2b46f9f4fb9a')%0A version('3.2.6', sha256='29788163bdad8d494475a0137eba39b111fd86fbe825534a9376f9f2ab44251a')%0A version('3.2.5', sha256='eeb01ba02e86859746ee2a7bc8a75c484a006dc9575723563f24642a12b2bba8')%0A version('3.2.4', sha256='89e47532340338b7eafd717ab28658e8b48f4565d8384628c88d2d41565c8da0')%0A version('3.2.3', sha256='d38e16141c7189e23bfe03342d9cd3dbfd6baab99217892bfa7bc5646315b6bd')%0A version('3.2.2', sha256='47edf327aa70f317c9bc810d469ce681f1b35a7f499f68cf2b5da3ba6a651e69')%0A version('3.2.1', sha256='06a21df1f4456d29079f3c475c09ac31167bcc5f024c637dedf4e00d2dd9020b')%0A version('3.2.0', sha256='5db8c7a6c1ff54ea04a52f994d8af959427f1cab8e427aa802492a89fb0b635a')%0A version('3.1.4', sha256='bf01c52111ef7af2adc1e6d90282d2a32c5ebe09e84ae448389ceff7cef53fa9')%0A version('3.1.3', sha256='5b8bb7b3719b8c12a6c2363784a4d8c0eb5e980d8b4fdb6f38eccb52071dfab5')%0A%0A depends_on('python@3.5:', type=('build', 'run'))%0A depends_on('py-setuptools', type='build')%0A
|
|
e6e641e3beb2aad3e6d4eb1a37a7ee029006631b
|
add download_data.py for downloading data from nrao, it is not working yet
|
aws/download_data.py
|
aws/download_data.py
|
Python
| 0
|
@@ -0,0 +1,1317 @@
+import sys%0Aimport os%0Aimport urllib %0Aimport urllib2 %0Aimport webbrowser%0A%0Afrom mechanize import ParseResponse, urlopen, urljoin, Browser%0A%0Aurl = 'https://archive.nrao.edu/archive/advquery.jsp'%0A%0A%0Adef download_with_mech(email, destination, file):%0A%09'''%0A%09download data from nrao archive. Now it only works for filling the form. It cannot %0A%09submit by clicking %22Get my data%22 buttom%0A%09'''%0A%09br = Browser()%0A%09br.set_handle_robots(False) # ignore robots%0A%09br.open(url)%0A%09br.select_form(nr=0)%0A%09br%5B%22PROJECT_CODE%22%5D = %2214A-425%22%0A%09submit_response = br.submit(name = %22SUBMIT%22, label = %22Submit Query%22)%0A%09content = submit_response.read()%0A%09#print br.read()%0A%0A%09'''redirect to the download page'''%0A%09br.select_form(name = %22Form1%22)%0A%09br%5B%22EMAILADDR%22%5D = %22jtan0325@berkeley.edu%22 #replace by email%0A%09br%5B%22COPYFILEROOT%22%5D = %22/lustre/aoc/projects/fasttransients/moving%22 #replace by destination%0A%09br%5B%22CONVERT2FORMAT%22%5D = %5B%22SDM%22%5D%0A%09achive_files = br.form.find_control(name = %22FTPCHECKED%22)%0A%0A%09for v in range(0, len(achive_files.items)):%0A%09%09# file name should be replaced by FILE%0A%09 if %2214A-425_sb29260830_1_000.56825.290659375%22 in str(achive_files.items%5Bv%5D):%0A%09 achive_files.items%5Bv%5D.selected = True%0A%09 break%0A%09print str(br.read())%0A%09#submit_response = br.submit(name = %22DOWNLOADFTPCHK%22)%0A%09#submit_content = submit_response.read()%0A%09#print submit_response%0A%0A
|
|
f3ed5d434b83d7531aecd1431645267dedfecb45
|
Create mqtt_sender.py
|
raspi/mqtt_sender.py
|
raspi/mqtt_sender.py
|
Python
| 0.000002
|
@@ -0,0 +1,160 @@
+import paho.mqtt.publish as publish%0A %0AMQTT_SERVER = %22192.168.1.10%22%0AMQTT_PATH = %22test_channel%22%0A %0Apublish.single(MQTT_PATH, %22Hello World!%22, hostname=MQTT_SERVER)%0A
|
|
696402e5e292f52f96fc9dcd07457ac54387b21a
|
Remove fallback related code from backend interface.
|
redis_cache/cache.py
|
redis_cache/cache.py
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.core.cache.backends.base import BaseCache
from django.core.exceptions import ImproperlyConfigured
from django.core.cache import get_cache
from .util import load_class
from .exceptions import ConnectionInterrupted
import functools
DJANGO_REDIS_IGNORE_EXCEPTIONS = getattr(settings,
'DJANGO_REDIS_IGNORE_EXCEPTIONS', False)
def omit_exception(method):
"""
Simple decorator that intercepts connection
errors and ignores these if settings specify this.
Note: this doesn't handle the `default` argument in .get().
"""
if not DJANGO_REDIS_IGNORE_EXCEPTIONS:
return method
@functools.wraps(method)
def _decorator(self, *args, **kwargs):
try:
return method(self, *args, **kwargs)
except ConnectionInterrupted:
return None
return _decorator
class RedisCache(BaseCache):
def __init__(self, server, params):
super(RedisCache, self).__init__(params)
self._server = server
self._params = params
options = params.get('OPTIONS', {})
self._client_cls = options.get('CLIENT_CLASS', 'redis_cache.client.DefaultClient')
self._client_cls = load_class(self._client_cls)
self._client = None
self._fallback_name = options.get('FALLBACK', None)
self._fallback = None
self._fallback_counter = 0
self._on_fallback = False
@property
def client(self):
"""
Lazy client connection property.
"""
if self._client is None:
self._client = self._client_cls(self._server, self._params, self)
return self._client
@property
def raw_client(self):
"""
Return a raw redis client (connection). Not all
pluggable clients supports this feature. If not supports
this raises NotImplementedError
"""
return self.client.get_client(write=True)
@property
def fallback_client(self):
"""
Used in fallback mode on the primary client does not
connect to the server.
"""
if self._fallback is None:
try:
self._fallback = get_cache(self._fallback_name)
except TypeError:
raise ImproperlyConfigured("%s cache backend is not configured" % (self._fallback_name))
return self._fallback
@omit_exception
def set(self, *args, **kwargs):
return self.client.set(*args, **kwargs)
@omit_exception
def incr_version(self, *args, **kwargs):
return self.client.incr_version(*args, **kwargs)
@omit_exception
def add(self, *args, **kwargs):
return self.client.add(*args, **kwargs)
@omit_exception
def get(self, key, default=None, version=None, client=None):
try:
return self.client.get(key, default=default, version=version,
client=client)
except ConnectionInterrupted:
if DJANGO_REDIS_IGNORE_EXCEPTIONS:
return default
raise
@omit_exception
def delete(self, *args, **kwargs):
return self.client.delete(*args, **kwargs)
@omit_exception
def delete_pattern(self, *args, **kwargs):
return self.client.delete_pattern(*args, **kwargs)
@omit_exception
def delete_many(self, *args, **kwargs):
return self.client.delete_many(*args, **kwargs)
@omit_exception
def clear(self):
return self.client.clear()
@omit_exception
def get_many(self, *args, **kwargs):
return self.client.get_many(*args, **kwargs)
@omit_exception
def set_many(self, *args, **kwargs):
return self.client.set_many(*args, **kwargs)
@omit_exception
def incr(self, *args, **kwargs):
return self.client.incr(*args, **kwargs)
@omit_exception
def decr(self, *args, **kwargs):
return self.client.decr(*args, **kwargs)
@omit_exception
def has_key(self, *args, **kwargs):
return self.client.has_key(*args, **kwargs)
@omit_exception
def keys(self, *args, **kwargs):
return self.client.keys(*args, **kwargs)
@omit_exception
def close(self, **kwargs):
self.client.close(**kwargs)
|
Python
| 0
|
@@ -1304,167 +1304,8 @@
ne%0A%0A
- self._fallback_name = options.get('FALLBACK', None)%0A self._fallback = None%0A self._fallback_counter = 0%0A self._on_fallback = False%0A
%0A
@@ -1822,452 +1822,8 @@
e)%0A%0A
- @property%0A def fallback_client(self):%0A %22%22%22%0A Used in fallback mode on the primary client does not%0A connect to the server.%0A %22%22%22%0A%0A if self._fallback is None:%0A try:%0A self._fallback = get_cache(self._fallback_name)%0A except TypeError:%0A raise ImproperlyConfigured(%22%25s cache backend is not configured%22 %25 (self._fallback_name))%0A return self._fallback%0A%0A
|
a51a226dc0a134e01915e514e2146a664671d998
|
Update dates for CFP
|
pyconcz_2017/proposals/pyconcz2016_config.py
|
pyconcz_2017/proposals/pyconcz2016_config.py
|
from datetime import datetime
from django.utils.timezone import get_current_timezone
from pyconcz_2017.proposals.models import Talk, Workshop, FinancialAid
tz = get_current_timezone()
class TalksConfig:
model = Talk
key = 'talks'
title = 'Talks'
cfp_title = 'Submit your talk'
template_about = 'proposals/talks_about.html'
date_start = datetime(year=2016, month=8, day=1, hour=12, minute=0, tzinfo=tz)
date_end = datetime(year=2016, month=9, day=15, hour=23, minute=59, second=59, tzinfo=tz)
class WorkshopsConfig:
model = Workshop
key = 'workshops'
title = 'Workshops'
cfp_title = 'Submit your workshop'
template_about = 'proposals/workshops_about.html'
date_start = datetime(year=2016, month=8, day=1, hour=12, minute=0, tzinfo=tz)
date_end = datetime(year=2016, month=10, day=5, hour=12, minute=0, tzinfo=tz)
class FinancialAidConfig:
model = FinancialAid
key = 'financial-aid'
title = 'Financial Aid'
cfp_title = 'Financial Aid Programme'
template_about = 'proposals/financial_aid_about.html'
date_start = datetime(year=2016, month=9, day=8, hour=12, minute=0, tzinfo=tz)
date_end = datetime(year=2016, month=10, day=8, hour=12, minute=0, tzinfo=tz)
|
Python
| 0.000001
|
@@ -371,33 +371,33 @@
ime(year=201
-6
+7
, month=
8, day=1, ho
@@ -380,32 +380,32 @@
2017, month=
-8
+3
, day=
-1
+6
, hour=12, m
@@ -452,33 +452,33 @@
ime(year=201
-6
+7
, month=
9, day=15, h
@@ -469,17 +469,17 @@
nth=
-9
+4
, day=1
-5
+7
, ho
@@ -737,25 +737,25 @@
year=201
-6
+7
, month=
8, day=1
@@ -750,16 +750,16 @@
nth=
-8
+3
, day=
-1
+6
, ho
@@ -814,33 +814,33 @@
ime(year=201
-6
+7
, month=
10, day=5, h
@@ -831,26 +831,26 @@
nth=
-10
+4
, day=
-5
+17
, hour=
-1
2
+3
, mi
@@ -846,33 +846,45 @@
hour=23, minute=
-0
+59, second=59
, tzinfo=tz)%0A%0A%0Ac
|
ca842aee42fcb149e72a39334035cba81e969c65
|
add clock
|
files/bin/bin/clock.py
|
files/bin/bin/clock.py
|
Python
| 0.000741
|
@@ -0,0 +1,1013 @@
+#!/usr/bin/env python3%0A# -*- coding: utf-8 -*-%0A%0A##############################################################################%0A#%0A# clock%0A# -----%0A#%0A# This script prints an icon representation of the time of day.%0A#%0A# Dependencies: python3, nerd-fonts%0A#%0A# :authors: J.P.H. Bruins Slot%0A# :date: 07-01-2019%0A# :version: 0.1.0%0A#%0A##############################################################################%0A%0Aimport datetime%0A%0A%0Adef clock():%0A now = datetime.datetime.now().hour %25 12%0A%0A if now == 0:%0A return %22%EE%8E%81%22%0A elif now == 1:%0A return %22%EE%8E%82%22%0A elif now == 2:%0A return %22%EE%8E%83%22%0A elif now == 3:%0A return %22%EE%8E%84%22%0A elif now == 4:%0A return %22%EE%8E%85%22%0A elif now == 5:%0A return %22%EE%8E%86%22%0A elif now == 6:%0A return %22%EE%8E%87%22%0A elif now == 7:%0A return %22%EE%8E%88%22%0A elif now == 8:%0A return %22%EE%8E%89%22%0A elif now == 9:%0A return %22%EE%8E%8A%22%0A elif now == 10:%0A return %22%EE%8E%8B%22%0A elif now == 11:%0A return %22%EE%8E%8C%22%0A else:%0A return %22%22%0A%0A%0Aif __name__ == %22__main__%22:%0A print(clock())%0A
|
|
0c09a85ff19a48dd69f44720823e8bb2cb75eef8
|
add the visualization of the 1st conv layer kernels
|
chap9/visualize_conv1_kernels.py
|
chap9/visualize_conv1_kernels.py
|
Python
| 0
|
@@ -0,0 +1,1087 @@
+import sys%0Aimport numpy as np%0Aimport matplotlib.pyplot as plt%0Aimport cv2%0Asys.path.append('/path/to/caffe/python')%0Aimport caffe%0A%0AZOOM_IN_SIZE = 50%0APAD_SIZE = 4%0A%0AWEIGHTS_FILE = 'freq_regression_iter_10000.caffemodel'%0ADEPLOY_FILE = 'deploy.prototxt'%0A%0Anet = caffe.Net(DEPLOY_FILE, WEIGHTS_FILE, caffe.TEST)%0Akernels = net.params%5B'conv1'%5D%5B0%5D.data%0A%0Akernels -= kernels.min()%0Akernels /= kernels.max()%0A%0Azoomed_in_kernels = %5B%5D%0Afor kernel in kernels:%0A zoomed_in_kernels.append(cv2.resize(kernel%5B0%5D, (ZOOM_IN_SIZE, ZOOM_IN_SIZE), interpolation=cv2.INTER_NEAREST))%0A%0A# plot 12*8 squares kernels%0Ahalf_pad = PAD_SIZE / 2%0Apadded_size = ZOOM_IN_SIZE+PAD_SIZE%0Apadding = ((0, 0), (half_pad, half_pad), (half_pad, half_pad))%0A%0Apadded_kernels = np.pad(zoomed_in_kernels, padding, 'constant', constant_values=1)%0Apadded_kernels = padded_kernels.reshape(8, 12, padded_size, padded_size).transpose(0, 2, 1, 3)%0Akernels_img = padded_kernels.reshape((8*padded_size, 12*padded_size))%5Bhalf_pad:-half_pad, half_pad: -half_pad%5D%0A%0Aplt.imshow(kernels_img, cmap='gray', interpolation='nearest')%0Aplt.axis('off')%0A%0Aplt.show()%0A
|
|
65546f4cd97331455a3309509d076825f07a078c
|
Add set_up file to run all tests
|
set_up.py
|
set_up.py
|
Python
| 0
|
@@ -0,0 +1,184 @@
+import os%0Aimport unittest2 as unittest%0A%0Adef suite():%0A return unittest.TestLoader().discover('tests','test_*.py')%0A%0Aif __name__ == '__main__':%0A unittest.main(defaultTest = 'suite')
|
|
abea1f4598928fddf750358efcedbfaade019bf4
|
Add migration to fix Attachment cache.
|
zerver/migrations/0386_fix_attachment_caches.py
|
zerver/migrations/0386_fix_attachment_caches.py
|
Python
| 0
|
@@ -0,0 +1,2291 @@
+# Generated by Django 3.2.12 on 2022-03-23 04:32%0A%0Afrom django.db import migrations, models%0Afrom django.db.backends.postgresql.schema import DatabaseSchemaEditor%0Afrom django.db.migrations.state import StateApps%0Afrom django.db.models import Exists, Model, OuterRef%0A%0A%0Adef fix_attachment_caches(apps: StateApps, schema_editor: DatabaseSchemaEditor) -%3E None:%0A Attachment = apps.get_model(%22zerver%22, %22Attachment%22)%0A ArchivedAttachment = apps.get_model(%22zerver%22, %22ArchivedAttachment%22)%0A Message = apps.get_model(%22zerver%22, %22Message%22)%0A ArchivedMessage = apps.get_model(%22zerver%22, %22ArchivedMessage%22)%0A%0A BATCH_SIZE = 10000%0A%0A def update_batch(attachment_model: Model, message_model: Model, lower_bound: int) -%3E None:%0A attachment_model.objects.filter(%0A id__gt=lower_bound, id__lte=lower_bound + BATCH_SIZE%0A ).update(%0A is_web_public=Exists(%0A message_model.objects.filter(%0A attachment=OuterRef(%22id%22),%0A recipient__stream__invite_only=False,%0A recipient__stream__is_web_public=True,%0A ),%0A ),%0A is_realm_public=Exists(%0A message_model.objects.filter(%0A attachment=OuterRef(%22id%22),%0A recipient__stream__invite_only=False,%0A )%0A ),%0A )%0A%0A max_id = Attachment.objects.aggregate(models.Max(%22id%22))%5B%22id__max%22%5D%0A if max_id is not None:%0A lower_bound = 0%0A%0A while lower_bound %3C max_id:%0A print(f%22Processed %7Blower_bound%7D/%7Bmax_id%7D attachments.%22)%0A update_batch(Attachment, Message, lower_bound)%0A lower_bound += BATCH_SIZE%0A%0A max_id = ArchivedAttachment.objects.aggregate(models.Max(%22id%22))%5B%22id__max%22%5D%0A if max_id is not None:%0A lower_bound = 0%0A%0A while lower_bound %3C max_id:%0A print(f%22Processed %7Blower_bound%7D/%7Bmax_id%7D archived attachments.%22)%0A update_batch(ArchivedAttachment, ArchivedMessage, lower_bound)%0A lower_bound += BATCH_SIZE%0A%0A%0Aclass Migration(migrations.Migration):%0A atomic = False%0A%0A dependencies = %5B%0A (%22zerver%22, %220385_attachment_flags_cache%22),%0A %5D%0A%0A operations = %5B%0A migrations.RunPython(fix_attachment_caches, reverse_code=migrations.RunPython.noop),%0A %5D%0A
|
|
e7232c4050b4cae1302d2c638ed20f3ac69bf22c
|
comment out ui tests temporarily
|
tests/ui/test_login.py
|
tests/ui/test_login.py
|
import unittest
from tests.common.common import BASE_URL, USERNAME, PASSWORD
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
LOGIN_URL = BASE_URL + u'/accounts/login/'
HOME_URL = BASE_URL + u'/home/my/'
LOGOUT_URL = BASE_URL + u'/accounts/logout/'
def get_logged_instance():
browser = webdriver.PhantomJS()
browser.get(LOGIN_URL)
username_input = browser.find_element_by_name('username')
password_input = browser.find_element_by_name('password')
username_input.send_keys(USERNAME)
password_input.send_keys(PASSWORD)
password_input.send_keys(Keys.RETURN)
if browser.current_url != HOME_URL:
browser.quit()
return None
return browser
class LoginTestCase(unittest.TestCase):
def setUp(self):
self.browser = get_logged_instance()
self.assertIsNotNone(self.browser)
self.addCleanup(self.browser.quit)
def test_login(self):
self.assertRegexpMatches(self.browser.current_url, HOME_URL)
def test_logout(self):
myinfo_bar = self.browser.find_element_by_css_selector('#my-info')
logout_input = self.browser.find_element_by_css_selector('a#logout')
myinfo_bar.click()
logout_input.click()
self.assertRegexpMatches(self.browser.current_url, LOGOUT_URL)
|
Python
| 0
|
@@ -1,12 +1,14 @@
+#
import unitt
@@ -11,16 +11,18 @@
nittest%0A
+#
from tes
@@ -74,16 +74,18 @@
ASSWORD%0A
+#
from sel
@@ -107,16 +107,18 @@
bdriver%0A
+#
from sel
@@ -158,16 +158,18 @@
t Keys%0A%0A
+#
LOGIN_UR
@@ -203,16 +203,18 @@
login/'%0A
+#
HOME_URL
@@ -240,16 +240,18 @@
me/my/'%0A
+#
LOGOUT_U
@@ -288,16 +288,18 @@
gout/'%0A%0A
+#
def get_
@@ -317,16 +317,18 @@
ance():%0A
+#
brow
@@ -355,16 +355,18 @@
tomJS()%0A
+#
brow
@@ -384,16 +384,18 @@
IN_URL)%0A
+#
user
@@ -448,16 +448,18 @@
rname')%0A
+#
pass
@@ -512,16 +512,18 @@
sword')%0A
+#
user
@@ -553,16 +553,18 @@
ERNAME)%0A
+#
pass
@@ -594,16 +594,18 @@
SSWORD)%0A
+#
pass
@@ -638,16 +638,18 @@
RETURN)%0A
+#
if b
@@ -680,16 +680,18 @@
ME_URL:%0A
+#
@@ -705,16 +705,18 @@
.quit()%0A
+#
@@ -727,16 +727,18 @@
rn None%0A
+#
retu
@@ -749,16 +749,18 @@
rowser%0A%0A
+#
class Lo
@@ -792,16 +792,18 @@
Case):%0A%0A
+#
def
@@ -807,32 +807,34 @@
ef setUp(self):%0A
+#
self.bro
@@ -862,16 +862,18 @@
tance()%0A
+#
@@ -907,16 +907,18 @@
rowser)%0A
+#
@@ -953,16 +953,18 @@
.quit)%0A%0A
+#
def
@@ -973,32 +973,34 @@
st_login(self):%0A
+#
self.ass
@@ -1053,16 +1053,18 @@
E_URL)%0A%0A
+#
def
@@ -1082,16 +1082,18 @@
(self):%0A
+#
@@ -1159,16 +1159,18 @@
-info')%0A
+#
@@ -1238,16 +1238,18 @@
ogout')%0A
+#
@@ -1267,24 +1267,25 @@
click()%0A
+#
logout_i
@@ -1268,32 +1268,33 @@
lick()%0A#
+
logout_input.cli
@@ -1298,16 +1298,18 @@
click()%0A
+#
|
7b063825ca3fbd8a638d56a477d3b2380b7901be
|
Add tests for list_max_two
|
domaci-zadaci/06/test_list_max_two.py
|
domaci-zadaci/06/test_list_max_two.py
|
Python
| 0.00001
|
@@ -0,0 +1,1066 @@
+from solutions import list_max_two%0A%0Aimport unittest%0Aimport random%0A%0Aclass TestListMax(unittest.TestCase):%0A def test_two_equal_elements(self):%0A in_list = %5Brandom.randint(0, 1000)%5D * 2%0A%0A expected = (in_list%5B0%5D, in_list%5B1%5D)%0A actual = list_max_two(in_list)%0A%0A self.assertEqual(expected, actual)%0A%0A def test_two_different_elements(self):%0A num = random.randint(0, 1000)%0A%0A expected = (num, num - 1)%0A%0A in_list = %5Bnum, num - 1%5D%0A actual = list_max_two(in_list)%0A self.assertEqual(expected, actual)%0A%0A in_list = %5Bnum - 1, num%5D%0A actual = list_max_two(in_list)%0A self.assertEqual(expected, actual)%0A%0A def test_random(self):%0A in_list = %5B1e6 * random.random() for _ in range(100000)%5D%0A%0A sorted_list = in_list%5B:%5D%0A sorted_list.sort()%0A%0A expected = (sorted_list%5B-1%5D, sorted_list%5B-2%5D)%0A actual = list_max_two(in_list)%0A print(len(in_list))%0A print(in_list%5B:100%5D)%0A%0A self.assertEqual(expected, actual)%0A%0Aif __name__ == %22__main__%22:%0A unittest.main()%0A
|
|
bcb3948eea1903dc0127c8aee1b1decf11040496
|
Add tolower simproc.
|
angr/procedures/libc/tolower.py
|
angr/procedures/libc/tolower.py
|
Python
| 0
|
@@ -0,0 +1,694 @@
+import angr%0Afrom angr.sim_type import SimTypeInt%0A%0Aimport logging%0Al = logging.getLogger(%22angr.procedures.libc.tolower%22)%0A%0A%0Aclass tolower(angr.SimProcedure):%0A def run(self, c):%0A self.argument_types = %7B0: SimTypeInt(self.state.arch, True)%7D%0A self.return_type = SimTypeInt(self.state.arch, True)%0A%0A if not self.state.solver.symbolic(c):%0A try:%0A ret_expr = chr(self.state.solver.eval(c)).lower()%0A except ValueError: # not in range(256)%0A ret_expr = c%0A return ret_expr%0A else:%0A return self.state.solver.If(%0A self.state.solver.And(c %3E= 65, c %3C= 90), # A - Z%0A c + 32, c)%0A
|
|
405bef33c1c68029b31ec6cb8f88b1edc28e2a6e
|
Create extract_wavelength tests module
|
tests/plantcv/hyperspectral/test_extract_wavelength.py
|
tests/plantcv/hyperspectral/test_extract_wavelength.py
|
Python
| 0
|
@@ -0,0 +1,280 @@
+import numpy as np%0Afrom plantcv.plantcv.hyperspectral import extract_wavelength%0A%0A%0Adef test_extract_wavelength(hyperspectral_test_data):%0A new = extract_wavelength(spectral_data=hyperspectral_test_data.load_hsi(), wavelength=500)%0A assert np.shape(new.array_data) == (1, 1600)%0A
|
|
d1abba72b79262c0b1462d7f7e42c798dc30003e
|
Create twinkle-status
|
twinkle-status/edit.py
|
twinkle-status/edit.py
|
Python
| 0.000245
|
@@ -0,0 +1,797 @@
+# -*- coding: utf-8 -*-%0Aimport os%0Aimport re%0A%0Aos.environ%5B'PYWIKIBOT_DIR'%5D = os.path.dirname(os.path.realpath(__file__))%0Aimport pywikibot%0A%0A%0Aos.environ%5B'TZ'%5D = 'UTC'%0A%0Asite = pywikibot.Site()%0Asite.login()%0A%0Awith open('list.txt', 'r') as f:%0A for user in f:%0A user = user.strip()%0A page = pywikibot.Page(site, 'User:%7B%7D/common.js'.format(user))%0A if not page.exists():%0A print('%7B%7D%5Ct%7B%7D'.format(user, 'gadget'))%0A continue%0A text = page.text%0A if 'Xiplus/Twinkle.js' in text:%0A print('%7B%7D%5Ct%7B%7D'.format(user, 'Xiplus'))%0A continue%0A m = re.search(r'User:(.+?)/Twinkle.js', text)%0A if m:%0A print('%7B%7D%5Ct%7B%7D'.format(user, m.group(1)))%0A continue%0A print('%7B%7D%5Ct%7B%7D'.format(user, 'gadget'))%0A continue%0A
|
|
af062396637c86e5f12fcbf2a8250d6189ac207b
|
Create flask_fysql_example.py
|
flask_fysql_example.py
|
flask_fysql_example.py
|
Python
| 0.000032
|
@@ -0,0 +1,958 @@
+# -*- coding: utf-8 -*-%0Afrom fysql.databases import MySQLDatabase%0Afrom flask import current_app as app%0A%0Aclass FySQL(object):%0A config = %7B%7D%0A name = %22%22%0A engine = MySQLDatabase%0A%0A def __init__(self, app=None):%0A self.app = None%0A if app is not None:%0A self.init_app(app)%0A%0A%0A def init_app(self, app):%0A self.config = app.config.get('DATABASE', %7B%7D)%0A self.name = self.config%5B'db'%5D%0A%0A self.conn_kwargs = %7B%7D%0A self.engine = MySQLDatabase%0A for key, value in self.config.items():%0A if key not in %5B'engine', 'db'%5D:%0A self.conn_kwargs%5Bkey%5D = value%0A%0A if hasattr(app, 'teardown_appcontext'):%0A app.teardown_appcontext(self.teardown)%0A else:%0A app.teardown_request(self.teardown)%0A%0A self.connect()%0A%0A def connect(self):%0A self.db = self.engine(self.name, **self.conn_kwargs)%0A%0A def teardown(self, exception):%0A self.db.close()%0A%0A
|
|
8b7c32f2058ce3c24ef3c19eb7d2d3f8d3154037
|
Disable large profile startup benchmarks.
|
tools/perf/benchmarks/startup.py
|
tools/perf/benchmarks/startup.py
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import benchmark
from measurements import startup
import page_sets
class _StartupCold(benchmark.Benchmark):
"""Measures cold startup time with a clean profile."""
options = {'pageset_repeat': 5}
@classmethod
def Name(cls):
return 'startup'
def CreatePageTest(self, options):
return startup.Startup(cold=True)
class _StartupWarm(benchmark.Benchmark):
"""Measures warm startup time with a clean profile."""
options = {'pageset_repeat': 20}
@classmethod
def Name(cls):
return 'startup'
@classmethod
def ValueCanBeAddedPredicate(cls, _, is_first_result):
return not is_first_result
def CreatePageTest(self, options):
return startup.Startup(cold=False)
@benchmark.Enabled('has tabs')
@benchmark.Disabled('snowleopard') # crbug.com/336913
class StartupColdBlankPage(_StartupCold):
"""Measures cold startup time with a clean profile."""
tag = 'cold'
page_set = page_sets.BlankPageSet
@classmethod
def Name(cls):
return 'startup.cold.blank_page'
@benchmark.Enabled('has tabs')
class StartupWarmBlankPage(_StartupWarm):
"""Measures warm startup time with a clean profile."""
tag = 'warm'
page_set = page_sets.BlankPageSet
@classmethod
def Name(cls):
return 'startup.warm.blank_page'
@benchmark.Enabled('has tabs')
@benchmark.Enabled('win', 'linux', 'mac')
class StartupLargeProfileColdBlankPage(_StartupCold):
"""Measures cold startup time with a large profile."""
tag = 'cold'
page_set = page_sets.BlankPageSet
options = {'pageset_repeat': 1}
def __init__(self, max_failures=None):
super(StartupLargeProfileColdBlankPage, self).__init__(max_failures)
self.generated_profile_archive = "large_profile.zip"
@classmethod
def Name(cls):
return 'startup.large_profile.cold.blank_page'
@benchmark.Enabled('has tabs')
@benchmark.Enabled('win', 'linux', 'mac')
class StartupLargeProfileWarmBlankPage(_StartupWarm):
"""Measures warm startup time with a large profile."""
tag = 'warm'
page_set = page_sets.BlankPageSet
options = {'pageset_repeat': 1}
def __init__(self, max_failures=None):
super(StartupLargeProfileWarmBlankPage, self).__init__(max_failures)
self.generated_profile_archive = "large_profile.zip"
@classmethod
def Name(cls):
return 'startup.large_profile.warm.blank_page'
|
Python
| 0.00002
|
@@ -1499,32 +1499,147 @@
'linux', 'mac')%0A
+# TODO(erikchen): Investigate source of 30s browser hang on startup.%0A# http://crbug.com/473827%0A@benchmark.Disabled%0A
class StartupLar
@@ -2148,16 +2148,131 @@
'mac')%0A
+# TODO(erikchen): Investigate source of 30s browser hang on startup.%0A# http://crbug.com/473827%0A@benchmark.Disabled%0A
class St
|
2ec3614693e8343e04400e51326be5305b5abbf3
|
Set a max_length for all snippets. Nobody needs more than 256k of space.
|
dpaste/forms.py
|
dpaste/forms.py
|
from django import forms
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from dpaste.models import Snippet
from dpaste.highlight import LEXER_LIST, LEXER_DEFAULT
import datetime
#===============================================================================
# Snippet Form and Handling
#===============================================================================
EXPIRE_CHOICES = (
(3600, _(u'In one hour')),
(3600 * 24 * 7, _(u'In one week')),
(3600 * 24 * 30, _(u'In one month')),
)
EXPIRE_DEFAULT = 3600 * 24 * 30
class SnippetForm(forms.ModelForm):
content = forms.CharField(
label=_('Content'),
widget=forms.Textarea(attrs={'placeholder': _('Awesome code goes here...')}),
)
lexer = forms.ChoiceField(
label=_(u'Lexer'),
initial=LEXER_DEFAULT,
widget=forms.TextInput,
)
expire_options = forms.ChoiceField(
label=_(u'Expires'),
choices=EXPIRE_CHOICES,
initial=EXPIRE_DEFAULT,
)
# Honeypot field
title = forms.CharField(
label=_(u'Title'),
required=False,
widget=forms.TextInput(attrs={'autocomplete': 'off'}),
)
def __init__(self, request, *args, **kwargs):
super(SnippetForm, self).__init__(*args, **kwargs)
self.request = request
self.fields['lexer'].choices = LEXER_LIST
self.fields['lexer'].widget.attrs = {
'autocomplete': 'off',
'data-provide': 'typeahead',
'data-source': '["%s"]' % '","'.join(dict(LEXER_LIST).keys())
}
# Set the recently used lexer if we have any
session_lexer = self.request.session.get('lexer')
if session_lexer and session_lexer in dict(LEXER_LIST).keys():
self.fields['lexer'].initial = session_lexer
def clean_lexer(self):
lexer = self.cleaned_data.get('lexer')
if not lexer:
return LEXER_DEFAULT
lexer = dict(LEXER_LIST).get(lexer, LEXER_DEFAULT)
return lexer
def clean(self):
if self.cleaned_data.get('title'):
raise forms.ValidationError('This snippet was identified as Spam.')
return self.cleaned_data
def save(self, parent=None, *args, **kwargs):
# Set parent snippet
if parent:
self.instance.parent = parent
# Add expire datestamp
self.instance.expires = datetime.datetime.now() + \
datetime.timedelta(seconds=int(self.cleaned_data['expire_options']))
# Save snippet in the db
super(SnippetForm, self).save(*args, **kwargs)
# Add the snippet to the user session list
if self.request.session.get('snippet_list', False):
if len(self.request.session['snippet_list']) >= getattr(settings, 'MAX_SNIPPETS_PER_USER', 10):
self.request.session['snippet_list'].pop(0)
self.request.session['snippet_list'] += [self.instance.pk]
else:
self.request.session['snippet_list'] = [self.instance.pk]
# Save the lexer in the session so we can use it later again
self.request.session['lexer'] = self.cleaned_data['lexer']
return self.request, self.instance
class Meta:
model = Snippet
fields = (
'content',
'lexer',
)
|
Python
| 0
|
@@ -572,16 +572,51 @@
4 * 30%0A%0A
+MAX_CONTENT_LENGTH = 250*1024*1024%0A
%0Aclass S
@@ -785,24 +785,63 @@
ere...')%7D),%0A
+ max_length=MAX_CONTENT_LENGTH,%0A
)%0A%0A l
|
0c2d22ed98917c5115a01b3d44172fd9bdd0429f
|
Fix to api to allow images to launch
|
django-openstack/django_openstack/api.py
|
django-openstack/django_openstack/api.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
from django.conf import settings
import logging
import glance.client
import httplib
import json
import openstack.compute
import openstackx.admin
import openstackx.extras
import openstackx.auth
from urlparse import urlparse
import json
def url_for(request, service_name, admin=False):
catalog = request.session['serviceCatalog']
if admin:
rv = catalog[service_name][0]['adminURL']
else:
rv = catalog[service_name][0]['internalURL']
return rv
def compute_api(request):
compute = openstack.compute.Compute(
auth_token=request.session['token'],
management_url=url_for(request, 'nova'))
# this below hack is necessary to make the jacobian compute client work
compute.client.auth_token = request.session['token']
compute.client.management_url = url_for(request, 'nova')
return compute
def account_api(request):
return openstackx.extras.Account(
auth_token=request.session['token'],
management_url=url_for(request, 'keystone', True))
def glance_api(request):
o = urlparse(url_for(request, 'glance'))
return glance.client.Client(o.hostname, o.port)
def admin_api(request):
return openstackx.admin.Admin(auth_token=request.session['token'],
management_url=url_for(request, 'nova', True))
def extras_api(request):
return openstackx.extras.Extras(auth_token=request.session['token'],
management_url=url_for(request, 'nova'))
def auth_api():
return openstackx.auth.Auth(management_url=\
settings.OPENSTACK_KEYSTONE_URL)
def console_create(request, instance_id, kind=None):
return extras_api(request).consoles.create(instance_id, kind)
def flavor_create(request, name, memory, vcpu, disk, flavor_id):
return admin_api(request).flavors.create(
name, int(memory), int(vcpu), int(disk), flavor_id)
def flavor_delete(request, flavor_id, purge=False):
return admin_api(request).flavors.delete(flavor_id, purge)
def flavor_get(request, flavor_id):
return compute_api(request).flavors.get(flavor_id)
def flavor_list(request):
return extras_api(request).flavors.list()
def flavor_list_admin(request):
return extras_api(request).flavors.list()
def image_all_metadata(request):
images = glance_api(request).get_images_detailed()
image_dict = {}
for image in images:
image_dict[image['id']] = image
return image_dict
def image_create(request, image_meta, image_file):
return glance_api(request).add_image(image_meta, image_file)
def image_delete(request, image_id):
return glance_api(request).delete_image(image_id)
def image_get(request, image_id):
return glance_api(request).get_image(image_id)[0]
def image_list_detailed(request):
return glance_api(request).get_images_detailed()
def image_update(request, image_id, image_meta=None):
image_meta = image_meta and image_meta or {}
return glance_api(request).update_image(image_id, image_meta=image_meta)
def keypair_create(request, name):
return extras_api(request).keypairs.create(name)
def keypair_delete(request, keypair_id):
return extras_api(request).keypairs.delete(keypair_id)
def keypair_list(request):
return extras_api(request).keypairs.list()
def server_create(request, name, image, flavor, user_data, key_name):
return extras_api(request).servers.create(
name, image, flavor, user_data, key_name)
def server_delete(request, instance):
return compute_api(request).servers.delete(instance)
def server_get(request, instance_id):
return compute_api(request).servers.get(instance_id)
def server_list(request):
return extras_api(request).servers.list()
def server_reboot(request,
instance_id,
hardness=openstack.compute.servers.REBOOT_HARD):
server = server_get(request, instance_id)
return server.reboot(hardness)
def service_get(request, name):
return admin_api(request).services.get(name)
def service_list(request):
return admin_api(request).services.list()
def service_update(request, name, enabled):
return admin_api(request).services.update(name, enabled)
def token_get_tenant(request, tenant_id):
tenants = auth_api().tenants.for_token(request.session['token'])
for t in tenants:
if str(t.id) == str(tenant_id):
return t
def token_list_tenants(request, token):
return auth_api().tenants.for_token(token)
def tenant_create(request, tenant_id, description, enabled):
return account_api(request).tenants.create(tenant_id, description, enabled)
def tenant_get(request, tenant_id):
return account_api(request).tenants.get(tenant_id)
def tenant_list(request):
return account_api(request).tenants.list()
def tenant_update(request, tenant_id, description, enabled):
return account_api(request).tenants.update(tenant_id, description, enabled)
def token_create(request, tenant, username, password):
return auth_api().tokens.create(tenant, username, password)
def token_info(request, token):
hdrs = {"Content-type": "application/json",
"X_AUTH_TOKEN": settings.OPENSTACK_ADMIN_TOKEN,
"Accept": "text/json"}
o = urlparse(token.serviceCatalog['keystone'][0]['adminURL'])
conn = httplib.HTTPConnection(o.hostname, o.port)
conn.request("GET", "/v2.0/tokens/%s" % token.id, headers=hdrs)
response = conn.getresponse()
data = json.loads(response.read())
admin = False
for role in data['auth']['user']['roleRefs']:
if role['roleId'] == 'Admin':
admin = True
return {'tenant': data['auth']['user']['tenantId'],
'user': data['auth']['user']['username'],
'admin': admin}
def usage_get(request, tenant_id, start, end):
return extras_api(request).usage.get(tenant_id, start, end)
def usage_list(request, start, end):
return extras_api(request).usage.list(start, end)
def user_create(request, user_id, email, password, tenant_id):
return account_api(request).users.create(
user_id, email, password, tenant_id)
def user_delete(request, user_id):
return account_api(request).users.delete(user_id)
def user_get(request, user_id):
return account_api(request).users.get(user_id)
def user_list(request):
return account_api(request).users.list()
def user_update_email(request, user_id, email):
return account_api(request).users.update_email(user_id, email)
def user_update_password(request, user_id, password):
return account_api(request).users.update_password(user_id, password)
def user_update_tenant(request, user_id, tenant_id):
return account_api(request).users.update_tenant(user_id, tenant_id)
|
Python
| 0
|
@@ -3521,34 +3521,53 @@
lavor, user_data
-,
+=user_data, key_name=
key_name)%0A%0A%0Adef
|
e13e714fa179544bef895274baf4f8ddb52ddd4a
|
add script to separate source columns to 6 columns with boolean values
|
python_scripts/seperating_source_column.py
|
python_scripts/seperating_source_column.py
|
Python
| 0
|
@@ -0,0 +1,1331 @@
+%22%22%22%0Aseperating the Variant_Source column merged_v4.tsv into six columns:%0AVariant_in_ENIGMA%0AVariant_in_ClinVar%0AVariant_in_1000_Genomes%0AVariant_in_ExAC%0AVariant_in_LOVD%0AVariant_in_BIC%0A%22%22%22%0A%0ACOLUMNS = %5B%22Variant_in_ENIGMA%22,%0A %22Variant_in_ClinVar%22,%0A %22Variant_in_1000_Genomes%22,%0A %22Variant_in_ExAC%22,%0A %22Variant_in_LOVD%22,%0A %22Variant_in_BIC%22%5D%0A%0ASOURCES = %5B%22ENIGMA%22, %22ClinVar%22, %221000_Genomes%22, %22ExAC%22, %22LOVD%22, %22BIC%22%5D%0A%0A%0Adef main():%0A f_in = open('/Users/Molly/Desktop/BRCA Research/data/merged_v4.tsv', %22r%22)%0A f_out = open('/Users/Molly/Desktop/BRCA Research/data/merged_v5.tsv', %22w%22)%0A line_num = 1%0A for line in f_in:%0A items = line.strip().split(%22%5Ct%22)%0A if line_num == 1:%0A items.pop(0)%0A items = COLUMNS + items%0A else:%0A source = items%5B0%5D%0A items.pop(0)%0A items = separate_source(source) + items%0A line_num += 1%0A new_line = %22%5Ct%22.join(items) + %22%5Cn%22%0A f_out.write(new_line)%0A%0Adef separate_source(source):%0A boolean_list = %5B%5D%0A exisiting_sources = source.split(%22%7C%22)%0A for each_source in SOURCES:%0A if each_source in exisiting_sources:%0A boolean_list.append(%22True%22)%0A else:%0A boolean_list.append(%22False%22)%0A return boolean_list%0A%0A%0Aif __name__==%22__main__%22:%0A main()
|
|
e0ab65f3877da992ac3705475ea0bdc520677cbe
|
Test CommandObjectMultiword functionality
|
packages/Python/lldbsuite/test/functionalities/multiword-commands/TestMultiWordCommands.py
|
packages/Python/lldbsuite/test/functionalities/multiword-commands/TestMultiWordCommands.py
|
Python
| 0.000177
|
@@ -0,0 +1,1152 @@
+%22%22%22%0ATest multiword commands ('platform' in this case).%0A%22%22%22%0A%0Aimport lldb%0Afrom lldbsuite.test.decorators import *%0Afrom lldbsuite.test.lldbtest import *%0A%0Aclass MultiwordCommandsTestCase(TestBase):%0A%0A mydir = TestBase.compute_mydir(__file__)%0A%0A @no_debug_info_test%0A def test_ambiguous_subcommand(self):%0A self.expect(%22platform s%22, error=True,%0A substrs=%5B%22ambiguous command 'platform s'. Possible completions:%22,%0A %22%5Ctselect%5Cn%22,%0A %22%5Ctshell%5Cn%22,%0A %22%5Ctsettings%5Cn%22%5D)%0A%0A @no_debug_info_test%0A def test_empty_subcommand(self):%0A # FIXME: This has no error message.%0A self.expect(%22platform %5C%22%5C%22%22, error=True)%0A%0A @no_debug_info_test%0A def test_help(self):%0A # %3Cmultiword%3E help brings up help.%0A self.expect(%22platform help%22,%0A substrs=%5B%22Commands to manage and create platforms.%22,%0A %22Syntax: platform %5B%22,%0A %22The following subcommands are supported:%22,%0A %22connect%22,%0A %22Select the current platform%22%5D)%0A
|
|
a23e45a65221ec076059bd32cdb1d5bb787e123b
|
add less filter
|
dukpy/webassets/lessfilter.py
|
dukpy/webassets/lessfilter.py
|
Python
| 0.000001
|
@@ -0,0 +1,703 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import absolute_import, print_function%0A%0Aimport os%0A%0Afrom webassets.filter import Filter%0A%0Aimport dukpy%0A%0A%0A__all__ = ('CompileLess', )%0A%0A%0Aclass CompileLess(Filter):%0A name = 'lessc'%0A max_debug_level = None%0A%0A def setup(self):%0A self.less_includes = self.get_config('LIBSASS_INCLUDES', require=False) or None%0A%0A def input(self, _in, out, **kw):%0A options = %7B'paths': %5B%5D%7D%0A if self.less_includes:%0A options%5B'paths'%5D.extend(self.less_includes)%0A if 'source_path' in kw:%0A options%5B'paths'%5D.append(os.path.dirname(kw%5B'source_path'%5D))%0A%0A src = dukpy.less_compile(_in.read(), options=options)%0A out.write(src)%0A
|
|
0f251e6c8620e19fc5e16e88b1ffbd5d51f7a7be
|
Add initial HDF storage class
|
gcmstools/datastore.py
|
gcmstools/datastore.py
|
Python
| 0
|
@@ -0,0 +1,1953 @@
+import numpy as np%0Aimport pandas as pd%0Aimport tables as tb%0A%0Aclass HDFStore(object):%0A def __init__(self, hdfname):%0A self.pdh5 = pd.HDFStore(hdfname, mode='a', complevel=9,%0A complib='blosc')%0A self.h5 = self.pdh5._handle%0A self._filters = tb.Filters(complevel=9, complib='blosc')%0A self._files_df_columns = ('name', 'filenames')%0A %0A if not hasattr(self.h5.root, 'files'):%0A df = pd.DataFrame(columns=self._files_df_columns).set_index('name')%0A self.pdh5%5B'files'%5D = df%0A self.files = self.pdh5%5B'files'%5D%0A%0A if not hasattr(self.h5.root, 'data'):%0A self.h5.create_group('/', 'data', filters=self._filters)%0A%0A def append_files(self, datafiles):%0A if not isinstance(datafiles, (tuple, list)):%0A datafiles = %5Bdatafiles,%5D%0A %0A names = %5B%5D%0A for data in datafiles:%0A filename = data.filename%0A name = self._name_fix(filename)%0A names.append((name, filename))%0A self._append(name, data)%0A temp_df = pd.DataFrame(names, columns=self._files_df_columns)%5C%0A .set_index('name')%0A%0A self.pdh5%5B'files'%5D = pd.concat( (self.pdh5%5B'files'%5D, temp_df) )%0A%0A def _append(self, name, gcmsobj):%0A group = self.h5.create_group('/data', name)%0A%0A for key, val in gcmsobj.__dict__.items():%0A if isinstance(val, np.ndarray):%0A self.h5.create_carray(group, key, obj=val,)%0A else:%0A setattr(group._v_attrs, key, val)%0A%0A def _name_fix(self, badname):%0A sp = badname.split('.')%0A nosuffix = '_'.join(sp%5B:-1%5D)%0A nospace = nosuffix.replace(' ', '_')%0A if nospace%5B0%5D.isdigit():%0A nonum = 'num' + nospcace%0A return nonum%0A else:%0A return nospace%0A%0A def close(self, ):%0A self.pdh5.close()%0A%0A def recompress(self,):%0A # Copy file to recompress%0A pass%0A%0A%0A
|
|
585a5fa27321134623dcf431ebf80ba1dcd708de
|
add example script test equality of coefficients in two regression (basic example for onewaygls)
|
scikits/statsmodels/examples/try_2regress.py
|
scikits/statsmodels/examples/try_2regress.py
|
Python
| 0
|
@@ -0,0 +1,1868 @@
+# -*- coding: utf-8 -*-%0A%22%22%22F test for null hypothesis that coefficients in two regressions are the same%0A%0Asee discussion in http://mail.scipy.org/pipermail/scipy-user/2010-March/024851.html%0A%0ACreated on Thu Mar 25 22:56:45 2010%0AAuthor: josef-pktd%0A%22%22%22%0A%0Aimport numpy as np%0Afrom numpy.testing import assert_almost_equal%0Aimport scikits.statsmodels as sm%0A%0Anp.random.seed(87654589)%0A%0Anobs = 10 #100%0Ax1 = np.random.randn(nobs)%0Ay1 = 10 + 15*x1 + 2*np.random.randn(nobs)%0A%0Ax1 = sm.add_constant(x1) #, prepend=True)%0Aassert_almost_equal(x1, np.vander(x1%5B:,0%5D,2), 16)%0Ares1 = sm.OLS(y1, x1).fit()%0Aprint res1.params%0Aprint np.polyfit(x1%5B:,0%5D, y1, 1)%0Aassert_almost_equal(res1.params, np.polyfit(x1%5B:,0%5D, y1, 1), 14)%0Aprint res1.summary(xname=%5B'x1','const1'%5D)%0A%0A#regression 2%0Ax2 = np.random.randn(nobs)%0Ay2 = 19 + 17*x2 + 2*np.random.randn(nobs)%0A#y2 = 10 + 15*x2 + 2*np.random.randn(nobs) # if H0 is true%0A%0Ax2 = sm.add_constant(x2) #, prepend=True)%0Aassert_almost_equal(x2, np.vander(x2%5B:,0%5D,2), 16)%0A%0Ares2 = sm.OLS(y2, x2).fit()%0Aprint res2.params%0Aprint np.polyfit(x2%5B:,0%5D, y2, 1)%0Aassert_almost_equal(res2.params, np.polyfit(x2%5B:,0%5D, y2, 1), 14)%0Aprint res2.summary(xname=%5B'x2','const2'%5D)%0A%0A%0A# joint regression%0A%0Ax = np.concatenate((x1,x2),0)%0Ay = np.concatenate((y1,y2))%0Adummy = np.arange(2*nobs)%3Enobs-1%0Ax = np.column_stack((x,x*dummy%5B:,None%5D))%0A%0Ares = sm.OLS(y, x).fit()%0Aprint res.summary(xname=%5B'x','const','x2','const2'%5D)%0A%0Aprint '%5CnF test for equal coefficients in 2 regression equations'%0A#effect of dummy times second regression is zero%0A#is equivalent to 3rd and 4th coefficient are both zero%0Aprint res.f_test(%5B%5B0,0,1,0%5D,%5B0,0,0,1%5D%5D)%0A%0Aprint '%5Cnchecking coefficients individual versus joint'%0Aprint res1.params, res2.params%0Aprint res.params%5B:2%5D, res.params%5B:2%5D+res.params%5B2:%5D%0Aassert_almost_equal(res1.params, res.params%5B:2%5D, 13)%0Aassert_almost_equal(res2.params, res.params%5B:2%5D+res.params%5B2:%5D, 13)%0A
|
|
06945ae360bdab9726ea78757d8e63b10ea252fe
|
Create cbalusek_02.py
|
Week01/Problem02/cbalusek_02.py
|
Week01/Problem02/cbalusek_02.py
|
Python
| 0.000182
|
@@ -0,0 +1,297 @@
+%22%22%22%0ACreated on Fri Jul 21 10:17:21 2017%0AThis short program will sum all of the even numbers in the fibonnacci%0Asequence less than 4 million.%0A@author: cbalusek3%0A%22%22%22%0A%0Ai1 = 1%0Ai2 = 2%0Acum = 0%0Awhile i2 %3C 4000000:%0A itemp = i2%0A i2 += i1%0A i1 = itemp%0A if i2%252 == 0:%0A cum += i2%0Aprint(cum) %0A
|
|
5e42ed2908f6186db4da92f283ca9b9b30ac9e31
|
Add executable file
|
bin/paxit.py
|
bin/paxit.py
|
Python
| 0.000009
|
@@ -0,0 +1,249 @@
+#!/usr/bin/env python%0Afrom pax import pax%0A%0Aif __name__ == '__main__':%0A pax.Processor(input='DumbExample.DumbExampleInput',%0A transform='DumbExample.DumbExampleTransform',%0A output='DumbExample.DumbExampleOutput')%0A%0A
|
|
74c2563f55c71ff8d3d2cbb7ff2b69a4be8767ec
|
Create client.py
|
client.py
|
client.py
|
Python
| 0.000001
|
@@ -0,0 +1,152 @@
+#!/usr/bin/env python%0A%0A# TODO take user choice for c2 method%0A%0A# TODO start with pipe delimited HTTP POST data to test%0A%0A# TODO loop and output to STDOUT%0A
|
|
6693c02315ba16c8a849c7a1892397bf819a2efd
|
Add meshDisplay commands
|
app/module/polygon/meshDisplay.py
|
app/module/polygon/meshDisplay.py
|
Python
| 0.000001
|
@@ -0,0 +1,1213 @@
+import maya.cmds as cmds%0D%0Aimport maya.mel as mel%0D%0A%0D%0A%0D%0A# class name must be 'Commands'%0D%0Aclass Commands(object):%0D%0A%0D%0A commandDict = %7B%7D%0D%0A%0D%0A def _applyColor(self):%0D%0A mel.eval(%22polyColorPerVertex -r 0.5 -g 0.5 -b 0.5 -a 1 -cdo;%22)%0D%0A commandDict%5B'applyColor'%5D = %22polyApplyColor.png%22%0D%0A%0D%0A def _paintVertexColorTool(self):%0D%0A cmds.PaintVertexColorTool()%0D%0A commandDict%5B'paintVertexColorTool'%5D = %22paintVertexColour.png%22%0D%0A%0D%0A def _createEmptyColorSet(self):%0D%0A mel.eval(%22colorSetEditCmdNew new none 1 RGB 0%22)%0D%0A commandDict%5B'createEmptyColorSet'%5D = %22polyColorSetCreateEmpty.png%22%0D%0A%0D%0A def _deleteCurrentColorSet(self):%0D%0A mel.eval(%22colorSetEditCmd delete none%22)%0D%0A commandDict%5B'deleteCurrentColorSet'%5D = %22polyColorSetDelete.png%22%0D%0A%0D%0A def _renameCurrentColorSet(self):%0D%0A mel.eval(%22colorSetEditCmd rename none%22)%0D%0A commandDict%5B'renameCurrentColorSet'%5D = %22polyColorSetRename.png%22%0D%0A%0D%0A def _modifyCurrentColorSet(self):%0D%0A mel.eval(%22colorSetEditCmd modify none%22)%0D%0A commandDict%5B'modifyCurrentColorSet'%5D = %22polyColorSetModify.png%22%0D%0A%0D%0A def _colorSetEditor(self):%0D%0A mel.eval(%22colorSetEditor%22)%0D%0A commandDict%5B'colorSetEditor'%5D = %22polyColorSetEditor.png%22%0D%0A
|
|
65df19a82df1432f72674eb35765937aa6c889eb
|
Add migration removing database columns following removal of FileUpload fields (#16035)
|
src/olympia/files/migrations/0005_auto_20201120_0926.py
|
src/olympia/files/migrations/0005_auto_20201120_0926.py
|
Python
| 0
|
@@ -0,0 +1,581 @@
+# Generated by Django 2.2.17 on 2020-11-20 09:26%0A%0Afrom django.db import migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('files', '0004_auto_20200923_1808'),%0A %5D%0A%0A operations = %5B%0A migrations.RemoveIndex(%0A model_name='fileupload',%0A name='file_uploads_afe99c5e',%0A ),%0A migrations.RemoveField(%0A model_name='fileupload',%0A name='compat_with_app',%0A ),%0A migrations.RemoveField(%0A model_name='fileupload',%0A name='compat_with_appver',%0A ),%0A %5D%0A
|
|
48d7fc87c51cb15829e9cf161b38dc004affee9e
|
ADD example_sequential
|
example/example_sequential.py
|
example/example_sequential.py
|
Python
| 0.000004
|
@@ -0,0 +1,1347 @@
+import sklearn.cross_validation%0Aimport sklearn.datasets%0Aimport sklearn.metrics%0A%0Aimport autosklearn.classification%0A%0A%0Adef main():%0A digits = sklearn.datasets.load_digits()%0A X = digits.data%0A y = digits.target%0A X_train, X_test, y_train, y_test = %5C%0A sklearn.cross_validation.train_test_split(X, y, random_state=1)%0A%0A automl = autosklearn.classification.AutoSklearnClassifier(%0A time_left_for_this_task=1200, per_run_time_limit=30,%0A tmp_folder='/tmp/autoslearn_sequential_example_tmp',%0A output_folder='/tmp/autosklearn_sequential_example_out',%0A # Do not construct ensembles in parallel to avoid using more than one%0A # core at a time. The ensemble will be constructed after auto-sklearn%0A # finished fitting all machine learning models.%0A ensemble_size=0, delete_tmp_folder_after_terminate=False)%0A automl.fit(X_train, y_train, dataset_name='digits')%0A # This call to fit_ensemble uses all models trained in the previous call%0A # to fit to build an ensemble which can be used with automl.predict()%0A automl.fit_ensemble(y_train, ensemble_size=50)%0A%0A print(automl.show_models())%0A predictions = automl.predict(X_test)%0A print(automl.sprint_statistics())%0A print(%22Accuracy score%22, sklearn.metrics.accuracy_score(y_test, predictions))%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
3246873aad0482b9087663f452c2fe32c37daa36
|
Add new migration
|
custom/icds_reports/migrations/0101_auto_20190227_1801.py
|
custom/icds_reports/migrations/0101_auto_20190227_1801.py
|
Python
| 0
|
@@ -0,0 +1,1468 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.11.20 on 2019-02-27 18:01%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('icds_reports', '0100_add_supervisor_id'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='aggregatebirthpreparednesforms',%0A name='supervisor_id',%0A field=models.TextField(max_length=40, null=True),%0A ),%0A migrations.AlterField(%0A model_name='aggregateccsrecorddeliveryforms',%0A name='supervisor_id',%0A field=models.TextField(max_length=40, null=True),%0A ),%0A migrations.AlterField(%0A model_name='aggregateccsrecordpostnatalcareforms',%0A name='supervisor_id',%0A field=models.TextField(max_length=40, null=True),%0A ),%0A migrations.AlterField(%0A model_name='aggregateccsrecordthrforms',%0A name='supervisor_id',%0A field=models.TextField(max_length=40, null=True),%0A ),%0A migrations.AlterField(%0A model_name='aggregatecomplementaryfeedingforms',%0A name='supervisor_id',%0A field=models.TextField(max_length=40, null=True),%0A ),%0A migrations.AlterField(%0A model_name='awwincentivereport',%0A name='supervisor_id',%0A field=models.TextField(max_length=40, null=True),%0A ),%0A %5D%0A
|
|
c1d04467fdc58e4f97e7768f77706ac4c8099c42
|
clear the error queue
|
src/cryptography/hazmat/bindings/openssl/binding.py
|
src/cryptography/hazmat/bindings/openssl/binding.py
|
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import os
import threading
from cryptography.hazmat.bindings._openssl import ffi, lib
_osrandom_engine_id = ffi.new("const char[]", b"osrandom")
_osrandom_engine_name = ffi.new("const char[]", b"osrandom_engine")
@ffi.callback("int (*)(unsigned char *, int)", error=0)
def osrandom_rand_bytes(buf, size):
signed = ffi.cast("char*", buf)
result = os.urandom(size)
signed[0:size] = result
return 1
@ffi.callback("int (*)(unsigned char *, int)", error=0)
def osrandom_pseudo_rand_bytes(buf, size):
result = osrandom_rand_bytes(buf, size)
if result == 0:
return -1
else:
return result
@ffi.callback("int (*)(void)")
def osrandom_rand_status():
return 1
method = ffi.new(
"RAND_METHOD*", dict(bytes=osrandom_rand_bytes,
pseudorand=osrandom_pseudo_rand_bytes,
status=osrandom_rand_status)
)
def _register_osrandom_engine():
looked_up_engine = lib.ENGINE_by_id(_osrandom_engine_id)
if looked_up_engine != ffi.NULL:
return 2
engine = lib.ENGINE_new()
try:
result = lib.ENGINE_set_id(engine, _osrandom_engine_id)
assert result == 1
result = lib.ENGINE_set_name(engine, _osrandom_engine_name)
assert result == 1
result = lib.ENGINE_set_RAND(engine, method)
assert result == 1
result = lib.ENGINE_add(engine)
assert result == 1
finally:
result = lib.ENGINE_free(engine)
assert result == 1
looked_up_engine = lib.ENGINE_by_id(_osrandom_engine_id)
assert looked_up_engine != ffi.NULL
return 1
class Binding(object):
"""
OpenSSL API wrapper.
"""
lib = lib
ffi = ffi
_lib_loaded = False
_locks = None
_lock_cb_handle = None
_rand_method = None
_init_lock = threading.Lock()
_lock_init_lock = threading.Lock()
# aliases for the convenience of tests.
_osrandom_engine_id = _osrandom_engine_id
_osrandom_engine_name = _osrandom_engine_name
_register_osrandom_engine = staticmethod(_register_osrandom_engine)
def __init__(self):
self._ensure_ffi_initialized()
@classmethod
def _ensure_ffi_initialized(cls):
if cls._lib_loaded:
return
with cls._init_lock:
if not cls._lib_loaded:
cls._lib_loaded = True
_register_osrandom_engine()
@classmethod
def init_static_locks(cls):
with cls._lock_init_lock:
cls._ensure_ffi_initialized()
if not cls._lock_cb_handle:
cls._lock_cb_handle = cls.ffi.callback(
"void(int, int, const char *, int)",
cls._lock_cb
)
# Use Python's implementation if available, importing _ssl triggers
# the setup for this.
__import__("_ssl")
if cls.lib.CRYPTO_get_locking_callback() != cls.ffi.NULL:
return
# If nothing else has setup a locking callback already, we set up
# our own
num_locks = cls.lib.CRYPTO_num_locks()
cls._locks = [threading.Lock() for n in range(num_locks)]
cls.lib.CRYPTO_set_locking_callback(cls._lock_cb_handle)
@classmethod
def _lock_cb(cls, mode, n, file, line):
lock = cls._locks[n]
if mode & cls.lib.CRYPTO_LOCK:
lock.acquire()
elif mode & cls.lib.CRYPTO_UNLOCK:
lock.release()
else:
raise RuntimeError(
"Unknown lock mode {0}: lock={1}, file={2}, line={3}.".format(
mode, n, file, line
)
)
|
Python
| 0
|
@@ -1167,24 +1167,61 @@
m_engine():%0A
+ assert lib.ERR_peek_error() == 0%0A
looked_u
@@ -1318,16 +1318,84 @@
-return 2
+assert lib.ERR_peek_error() == 0%0A return 2%0A%0A lib.ERR_clear_error()
%0A%0A
|
d7cd9951581df833ac3e6eecd099035f2d30df0e
|
Add initial Starling mapping
|
csv2ofx/mappings/starling.py
|
csv2ofx/mappings/starling.py
|
Python
| 0
|
@@ -0,0 +1,477 @@
+from __future__ import (%0A absolute_import, division, print_function, unicode_literals)%0A%0Afrom operator import itemgetter%0A%0A%0Adef fixdate(ds):%0A dmy = ds.split('/')%0A # BUG (!?): don't understand but stolen from ubs-ch-fr.py%0A return '.'.join((dmy%5B1%5D, dmy%5B0%5D, dmy%5B2%5D))%0A%0Amapping = %7B%0A 'has_header': True,%0A 'date': lambda tr: fixdate(tr%5B'Date'%5D),%0A 'amount': itemgetter('Amount (GBP)'),%0A 'desc': itemgetter('Reference'),%0A 'payee': itemgetter('Counter Party')%0A%7D%0A
|
|
67a5e2797e362bb5a38b8af5b05cf72d1e28a262
|
add import script for Wokingham (closes #795)
|
polling_stations/apps/data_collection/management/commands/import_wokingham.py
|
polling_stations/apps/data_collection/management/commands/import_wokingham.py
|
Python
| 0
|
@@ -0,0 +1,344 @@
+from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter%0A%0Aclass Command(BaseXpressDemocracyClubCsvImporter):%0A council_id = 'E06000041'%0A addresses_name = 'parl.2017-06-08/Version 1/merged.tsv'%0A stations_name = 'parl.2017-06-08/Version 1/merged.tsv'%0A elections = %5B'parl.2017-06-08'%5D%0A csv_delimiter = '%5Ct'%0A
|
|
c86b61a23ad9b3a152bb6644cb5dde5a4b42fbc9
|
Add Management Command to Dump Project and RemoteRepository Relationship in JSON format
|
readthedocs/projects/management/commands/dump_project_remote_repo_relation.py
|
readthedocs/projects/management/commands/dump_project_remote_repo_relation.py
|
Python
| 0
|
@@ -0,0 +1,1589 @@
+import json%0A%0Afrom django.core.management.base import BaseCommand%0A%0Afrom readthedocs.projects.models import Project%0A%0A%0Aclass Command(BaseCommand):%0A help = %22Dump Project and RemoteRepository Relationship in JSON format%22%0A%0A def handle(self, *args, **options):%0A data = %5B%5D%0A%0A queryset = Project.objects.filter(%0A remote_repository__isnull=False,%0A ).values_list('id', 'remote_repository__json').distinct()%0A%0A for project_id, remote_repository__json in queryset:%0A try:%0A json_data = json.loads(remote_repository__json)%0A # GitHub and GitLab uses %60id%60 and Bitbucket uses %60uuid%60%0A # for the repository id%0A remote_id = json_data.get('id') or json_data.get('uuid')%0A%0A if remote_id:%0A data.append(%7B%0A 'remote_id': remote_id,%0A 'project_id': project_id%0A %7D)%0A else:%0A self.stdout.write(%0A self.style.ERROR(%0A f'Project %7Bproject_id%7D does not have a remote_repository remote_id'%0A )%0A )%0A except json.decoder.JSONDecodeError:%0A self.stdout.write(%0A self.style.ERROR(%0A f'Project %7Bproject_id%7D does not have a valid remote_repository__json'%0A )%0A )%0A%0A # Dump the data to a json file%0A with open('project-remote-repo-dump.json', 'w') as f:%0A f.write(json.dumps(data))%0A
|
|
e358061bb433c52b8d545791582d71cd4eab48a7
|
Move example to README.md
|
lib/storage/ellipticsbackend.py
|
lib/storage/ellipticsbackend.py
|
"""
Elliptics is a fault tolerant distributed key/value storage.
See http://reverbrain.com/elliptics and
https://github.com/reverbrain/elliptics for additional info.
Docs: http://doc.reverbrain.com/
Deployment guide: http://doc.reverbrain.com/elliptics:server-tutorial
Packages: http://repo.reverbrain.com/
"""
import itertools
import cache
from . import Storage
import elliptics
NAMESPACE = "DOCKER"
class EllipticsStorage(Storage):
def __init__(self, config):
"""
Example:
storage: elliptics
nodes:
elliptics-host1: 1025
elliptics-host2: 1025
...
<hostN>: <port>
wait-timeout: 60
check_timeout: 60
io-thread-num: 2
net-thread-num: 2
nonblocking_io_thread_num: 2
groups: [1, 2, 3]
verbosity: 4
logfile: "/tmp/logfile.log"
loglevel: debug
More info:
http://doc.reverbrain.com/elliptics:api-python
"""
cfg = elliptics.Config()
# The parameter which sets the time to wait for the operation complete
cfg.config.wait_timeout = config.get("wait-timeout", 60)
# The parameter which sets the timeout for pinging node
cfg.config.check_timeout = config.get("check_timeout", 60)
# Number of IO threads in processing pool
cfg.config.io_thread_num = config.get("io-thread-num", 2)
# Number of threads in network processing pool
cfg.config.net_thread_num = config.get("net-thread-num", 2)
# Number of IO threads in processing pool dedicated to nonblocking ops
nonblock_io_threads = config.get("nonblocking_io_thread_num", 2)
cfg.config.nonblocking_io_thread_num = nonblock_io_threads
groups = config.get('groups', [])
if len(groups) == 0:
raise ValueError("Specify groups")
# loglevel of elliptics logger
elliptics_log_level = config.get('verbosity', 0)
# path to logfile
elliptics_log_file = config.get('logfile', '/dev/stderr')
log = elliptics.Logger(elliptics_log_file, elliptics_log_level)
self._elliptics_node = elliptics.Node(log, cfg)
for host, port in config.get('nodes').iteritems():
self._elliptics_node.add_remote(host, port)
self._session = elliptics.Session(self._elliptics_node)
self._session.groups = groups
self._session.set_namespace(NAMESPACE)
def s_find(self, tags):
r = self._session.find_all_indexes(list(tags))
r.wait()
result = r.get()
return [str(i.indexes[0].data) for i in itertools.chain(result)]
def s_remove(self, key):
self._session.remove(key)
self._session.set_indexes(key, [], [])
def s_read(self, path):
res = self._session.read_data(path, offset=0, size=0).get()[0]
return str(res.data)
def s_write(self, key, value, tags):
self._session.write_data(key, str(value)).wait()
r = self._session.set_indexes(key, list(tags), [key] * len(tags))
r.wait()
return r.successful()
@cache.get
def get_content(self, path):
try:
return self.s_read(path)
except Exception as err:
raise IOError(err)
@cache.put
def put_content(self, path, content):
tag, _, _ = path.rpartition('/')
if len(content) == 0:
content = "EMPTY"
self.s_write(path, content, ('docker', tag))
spl_path = path.rsplit('/')[:-1]
while spl_path:
_path = '/'.join(spl_path)
_tag = '/'.join(spl_path[:-1])
spl_path.pop()
self.s_write(_path, "DIRECTORY", ('docker', _tag))
return path
def stream_write(self, path, fp):
chunks = []
while True:
try:
buf = fp.read(self.buffer_size)
if not buf:
break
chunks += buf
except IOError:
break
self.put_content(path, ''.join(chunks))
def stream_read(self, path):
yield self.get_content(path)
def list_directory(self, path=None):
if path is None:
path = ""
items = self.s_find(('docker', path))
if not items:
raise OSError('No such directory: \'{0}\''.format(path))
for item in items:
yield item
def exists(self, path):
tag, _, _ = path.rpartition('/')
res = self.s_find(('docker', tag))
return path in res
@cache.remove
def remove(self, path):
self.s_remove(path)
def get_size(self, path):
return len(self.get_content(path))
|
Python
| 0.000002
|
@@ -472,597 +472,8 @@
g):%0A
- %22%22%22%0A Example:%0A%0A storage: elliptics%0A nodes:%0A elliptics-host1: 1025%0A elliptics-host2: 1025%0A ...%0A %3ChostN%3E: %3Cport%3E%0A wait-timeout: 60%0A check_timeout: 60%0A io-thread-num: 2%0A net-thread-num: 2%0A nonblocking_io_thread_num: 2%0A groups: %5B1, 2, 3%5D%0A verbosity: 4%0A logfile: %22/tmp/logfile.log%22%0A loglevel: debug%0A%0A More info:%0A http://doc.reverbrain.com/elliptics:api-python%0A %22%22%22%0A
|
0f92843ec20007dc4379ff88d2e711bdad5e9125
|
Add ftp module, the backend for salt-ftp
|
salt/cli/ftp.py
|
salt/cli/ftp.py
|
Python
| 0.000001
|
@@ -0,0 +1,2236 @@
+'''%0AThe ftp module is used to execute the logic used by the salt-ftp command%0Aline application, salt-ftp is NOT intended to broadcast large files, it is%0Aintened to handle text files.%0ASalt-ftp can be used to distribute configuration files%0A'''%0A# Import python modules%0Aimport os%0Aimport stat%0Aimport sys%0A# Import salt modules%0Aimport salt.client%0A%0Aclass SaltFTP(object):%0A '''%0A Create a salt ftp object, used to distribute simple files with salt%0A '''%0A def __init__(self, opts):%0A self.opts = opts%0A%0A def _file_dict(self, fn_):%0A '''%0A Take a path and return the contents of the file as a string%0A '''%0A if not os.path.isfile(fn_):%0A err = 'The referenced file, ' + fn_ + ' is not available.'%0A sys.stderr.write(err + '%5Cn')%0A sys.exit(42)%0A return %7Bfn_: open(fn_, 'r').read()%7D%0A%0A def _recurse_dir(self, fn_, files=%7B%7D):%0A '''%0A Recursively pull files from a directory%0A '''%0A for base in os.listdir(fn_):%0A path = os.path.join(fn_, base)%0A if os.path.isdir(path):%0A files.update(self._recurse_dir(path))%0A else:%0A files.update(self._file_dict(path))%0A return files%0A%0A def _load_files(self):%0A '''%0A Parse the files indicated in opts%5B'src'%5D and load them into a python%0A object for transport%0A '''%0A files = %7B%7D%0A for fn_ in self.opts%5B'src'%5D:%0A if os.path.isfile(fn_):%0A files.update(self._file_dict(fn_))%0A elif os.path.isdir(fn_):%0A files.update(self._recurse_dir(fn_))%0A return files%0A%0A%0A def run(self):%0A '''%0A Make the salt client call%0A '''%0A arg = %5Bself._load_files(), self.opts%5B'dest'%5D%5D%0A local = salt.client.LocalClient(self.opts%5B'conf_file'%5D)%0A args = %5Bself.opts%5B'tgt'%5D,%0A 'ftp.recv',%0A arg,%0A self.opts%5B'timeout'%5D,%0A %5D%0A if self.opts%5B'pcre'%5D:%0A args.append('pcre')%0A elif self.opts%5B'list'%5D:%0A args.append('list')%0A elif self.opts%5B'facter'%5D:%0A args.append('facter')%0A %0A ret = local.cmd(*args)%0A%0A print yaml.dump(ret)%0A
|
|
b6e0daa9ee06961743ad5a5f728f3385bfdb0b4c
|
Add a helper script to apply changes in zuul
|
playbooks/roles/ironic-install/files/parse_zuul_changes.py
|
playbooks/roles/ironic-install/files/parse_zuul_changes.py
|
Python
| 0
|
@@ -0,0 +1,2318 @@
+#!/usr/bin/env python%0A%0A# (c) 2015, Hewlett-Packard Development Company, L.P.%0A#%0A# This module is free software: you can redistribute it and/or modify%0A# it under the terms of the GNU General Public License as published by%0A# the Free Software Foundation, either version 3 of the License, or%0A# (at your option) any later version.%0A#%0A# This software is distributed in the hope that it will be useful,%0A# but WITHOUT ANY WARRANTY; without even the implied warranty of%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the%0A# GNU General Public License for more details.%0A#%0A# You should have received a copy of the GNU General Public License%0A# along with this software. If not, see %3Chttp://www.gnu.org/licenses/%3E.%0A%0Aimport re%0Aimport subprocess%0Aimport sys%0A%0Aif len(sys.argv) is 1:%0A print(%22ERROR: This script requires arguments!%5Cn%22%0A %22%25s repository_path review_url repository_name %22%0A %22zuul_changes%22 %25 sys.argv%5B0%5D)%0A sys.exit(1)%0A%0Arepo_path = sys.argv%5B1%5D%0Areview_url = sys.argv%5B2%5D%0Arepo_name = sys.argv%5B3%5D%0Achange_list = str(sys.argv%5B4%5D).split('%5E')%0Aapplicable_changes = %5Bx for x in change_list if repo_name in x%5D%0A%0Atry:%0A for change in applicable_changes:%0A (project, branch, ref) = change.split(':')%0A if re.search(repo_name, project):%0A if not re.search(branch, subprocess.check_output(%0A %5B'git', '-C', repo_path, 'status', '-s', '-b'%5D)):%0A command = %5B'git', '-C', repo_path, 'checkout', branch%5D%0A subprocess.call(command, stdout=True)%0A%0A command = %5B'git', '-C', repo_path, 'fetch',%0A review_url + %22/%22 + repo_name, ref%5D%0A if subprocess.call(command, stdout=True) is 0:%0A if subprocess.call(%0A %5B'git', '-C', repo_path, 'cherry-pick', 'FETCH_HEAD'%5D,%0A stdout=True) is 0:%0A print(%22Applied %25s%22 %25 ref)%0A else:%0A print(%22Failed to cherry pick %25s on to %25s branch %25s%22%0A %25 (ref, repo_name, branch))%0A sys.exit(1)%0A else:%0A print(%22Failed to download %25s on to %25s branch %25s%22%0A %25 (ref, repo_name, branch))%0A sys.exit(1)%0A%0Aexcept Exception as e:%0A print(%22Failed to process change: %25s%22 %25 e)%0A
|
|
bc40b4c25c4f45d8d1e75963bb84fad2e1008762
|
Past 12, should ideally be in day 9
|
day8/stringCompression.py
|
day8/stringCompression.py
|
Python
| 0.999977
|
@@ -0,0 +1,624 @@
+import unittest%0Afrom collections import Counter%0A%0Adef stringCompression(str1):%0A dicty = dict(Counter(str1))%0A result = %5B%5D%0A for x in sorted(dicty.keys()):%0A temp = x + str(dicty%5Bx%5D)%0A result.append(temp)%0A result = ''.join(result)%0A if(len(result)%3Elen(str1)):%0A return str1%0A return result%0A%0Aclass Test(unittest.TestCase):%0A%0A data = %5B('aabbccddd', 'a2b2c2d3'),('abc','abc')%5D%0A%0A def test_stringCompression(self):%0A for test,solution in self.data:%0A result = stringCompression(test)%0A self.assertEqual(result,solution)%0A%0Aif __name__ == %22__main__%22:%0A unittest.main()%0A
|
|
855fe9e95651626c2737c65424ad1067f253e28a
|
Implement invisible user mode
|
txircd/modules/rfc/umode_i.py
|
txircd/modules/rfc/umode_i.py
|
Python
| 0.000047
|
@@ -0,0 +1,956 @@
+from twisted.plugin import IPlugin%0Afrom txircd.module_interface import IMode, IModuleData, Mode, ModuleData%0Afrom txircd.utils import ModeType%0Afrom zope.interface import implements%0A%0Aclass InvisibleMode(ModuleData, Mode):%0A implements(IPlugin, IModuleData, IMode)%0A %0A name = %22InvisibleMode%22%0A core = True%0A affectedActions = %5B %22showchanneluser%22 %5D%0A %0A def actions(self):%0A return %5B (%22modeactioncheck-user-i-showchanneluser%22, 1, self.isInvisible) %5D%0A %0A def userModes(self):%0A return %5B (%22i%22, ModeType.NoParam, self) %5D%0A %0A def isInvisible(self, user, channel, fromUser, userSeeing):%0A if %22i%22 in user.modes:%0A return True%0A return None%0A %0A def apply(self, actionName, user, param, channel, fromUser, sameUser):%0A if user != sameUser:%0A return None%0A if not channel or fromUser not in channel.users:%0A return False%0A return None%0A%0AinvisibleMode = InvisibleMode()
|
|
cfec967be4602dff636adb951b582d1db114f578
|
Add single dataset doc gen
|
tensorflow_datasets/scripts/document_dataset_version.py
|
tensorflow_datasets/scripts/document_dataset_version.py
|
Python
| 0.000001
|
@@ -0,0 +1,1653 @@
+# coding=utf-8%0A# Copyright 2020 The TensorFlow Datasets Authors.%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%0A# Lint as: python3%0Ar%22%22%22Dump the list of all registered datasets/config/version in a %60.txt%60 file.%0A%0AInstructions:%0A%0A%60%60%60%0Apython tensorflow_datasets/scripts/freeze_dataset_version.py%0A%60%60%60%0A%0A%0A%22%22%22%0A%0Aimport os%0A%0Afrom absl import app%0Afrom absl import flags%0A%0Aimport tensorflow.compat.v2 as tf%0Aimport tensorflow_datasets as tfds%0Afrom tensorflow_datasets.scripts.document_datasets import document_single_builder%0A%0A%0AFLAGS = flags.FLAGS%0A%0Aflags.DEFINE_string('tfds_dir', tfds.core.utils.tfds_dir(),%0A 'Path to tensorflow_datasets directory')%0A%0ADATASET_TO_TESTS = %5B'waymo_open_dataset'%5D %0A%0Adef version_doc(ds_name):%0A builder = tfds.builder(ds_name)%0A version_path = os.path.join(FLAGS.tfds_dir, 'stable_versions.txt')%0A catalog_dir = tfds.core.get_tfds_path('../docs/catalog/')%0A with tf.io.gfile.GFile(os.path.join(catalog_dir, ds_name + %22.md%22), 'w') as f:%0A doc_builder = document_single_builder(builder)%0A f.write(doc_builder)%0A%0Adef main(_):%0A for name in DATASET_TO_TESTS:%0A version_doc(name)%0A%0Aif __name__ == '__main__':%0A app.run(main)
|
|
ba5a251896e51bab70fa6b664e6974d3c42ff7b4
|
add asciidoctor gem (#9905)
|
var/spack/repos/builtin/packages/asciidoctor/package.py
|
var/spack/repos/builtin/packages/asciidoctor/package.py
|
Python
| 0
|
@@ -0,0 +1,604 @@
+# Copyright 2013-2018 Lawrence Livermore National Security, LLC and other%0A# Spack Project Developers. See the top-level COPYRIGHT file for details.%0A#%0A# SPDX-License-Identifier: (Apache-2.0 OR MIT)%0A%0Afrom spack import *%0A%0A%0Aclass Asciidoctor(Package):%0A %22%22%22Modern asciidoc tool based on ruby%22%22%22%0A%0A homepage = %22https://asciidoctor.org/%22%0A url = %22https://rubygems.org/downloads/asciidoctor-1.5.8.gem%22%0A%0A version('1.5.8', '5f55200cab8d1cfcf561e66d3f477159', expand=False)%0A%0A extends('ruby')%0A%0A def install(self, spec, prefix):%0A gem('install', 'asciidoctor-%7B0%7D.gem'.format(self.version))%0A
|
|
a189203bf2dc7ddd925ed8cfbeb191ee52d43711
|
Add Python script to investigate s3 speeds
|
src/aws_scripts/benchmark_s3.py
|
src/aws_scripts/benchmark_s3.py
|
Python
| 0.000001
|
@@ -0,0 +1,1776 @@
+from __future__ import print_function%0Aimport boto3%0Aimport subprocess%0Aimport os%0Aimport pandas as pd%0Aimport time%0A%0Adef DownloadFile(local_file, bucket, bucket_path):%0A print(%22Downloading file...%22, end=%22%22)%0A s3 = boto3.resource('s3')%0A s3.Bucket(bucket).download_file(bucket_path, local_file)%0A print(%22Done!%22)%0A%0Adef UploadFile(local_file, bucket, bucket_path):%0A print(%22Uploading file...%22, end=%22%22)%0A s3 = boto3.resource('s3')%0A s3.Bucket(bucket).upload_file(local_file, bucket_path)%0A print(%22Done!%22)%0A%0Adef CreateFile(file_size):%0A file_size = file_size *1e6%0A file_name = 'file.bin'%0A subprocess.check_output(%5B'dd', 'if=/dev/zero', 'of='+file_name, 'bs='+str(int(file_size)), 'count=1'%5D)%0A return os.path.abspath(file_name)%0A%0A%0Adef main():%0A file_sizes_mb = %5B1, 2, 4, 8, 16, 32, 64, 128, 256, 512, 1024%5D%0A #file_sizes_mb = %5B1, 2, 4, 8%5D%0A num_iterations=10%0A bucket = 'aolme'%0A bucket_path = 'dummy/file.bin'%0A%0A times =%7B%7D%0A for sz in file_sizes_mb:%0A local_file = CreateFile(sz)%0A upload_times =%5B%5D%0A download_times = %5B%5D%0A print(%22Testing size: %22 + str(sz))%0A for iter in range(1, num_iterations+1):%0A start = time.time()%0A UploadFile(local_file, bucket, bucket_path)%0A elapsed_time = time.time() - start%0A upload_times.append(elapsed_time)%0A%0A start = time.time()%0A DownloadFile(local_file, bucket, bucket_path)%0A elapsed_time = time.time() - start%0A download_times.append(elapsed_time)%0A%0A times%5B'Download ' + str(sz) + 'MB (s)'%5D= download_times%0A times%5B'Upload ' + str(sz) + 'MB (s)'%5D = upload_times%0A df = pd.DataFrame(times)%0A print(df.head())%0A df.to_csv(%22s3_timing_results.txt%22)%0A%0Aif __name__=='__main__':%0A main()%0A
|
|
a78e2566d26b229192e94fecb3c89801afbdce02
|
Create MQTT.py
|
service/MQTT.py
|
service/MQTT.py
|
Python
| 0
|
@@ -0,0 +1,365 @@
+from time import sleep%0A%0Atopic = %22mrl%22%0Aqos = 2%0Abroker = %22tcp://iot.eclipse.org:1883%22 //if you have your own just change the hostname/IP%0AclientID = %22MRL MQTT python%22%0A%0Amqtt1 = Runtime.createAndStart(%22mqtt%22, %22MQTT%22)%0Amqtt1.startService()%0Aprint mqtt1.getDescription()%0Amqtt1.startClient(topic, qos, broker, clientID)%0A%0Asleep(1)%0A%0Amqtt1.publish(%22Greetings from MRL python%22);%0A
|
|
2f0be29357a8b649b59cd685eb6dea646813feac
|
Add discovery example (#361)
|
examples/discovery_example.py
|
examples/discovery_example.py
|
Python
| 0
|
@@ -0,0 +1,1005 @@
+%22%22%22%0AExample that shows how to receive updates on discovered chromecasts.%0A%22%22%22%0Aimport argparse%0Aimport logging%0Aimport time%0A%0Aimport pychromecast%0A%0Aparser = argparse.ArgumentParser(description=%22Example on how to receive updates on discovered chromecasts.%22)%0Aparser.add_argument(%22--show-debug%22, help=%22Enable debug log%22, action=%22store_true%22)%0Aargs = parser.parse_args()%0A%0Aif args.show_debug:%0A logging.basicConfig(level=logging.DEBUG)%0A%0Adef list_devices():%0A print(%22Currently known cast devices:%22)%0A for name, service in listener.services.items():%0A print(%22 %7B%7D %7B%7D%22.format(name, service))%0A%0Adef add_callback(name):%0A print(%22Found cast device %7B%7D%22.format(name))%0A list_devices()%0A%0Adef remove_callback(name, service):%0A print(%22Lost cast device %7B%7D %7B%7D%22.format(name, service))%0A list_devices()%0A%0Alistener, browser = pychromecast.discovery.start_discovery(add_callback, remove_callback)%0A%0Atry:%0A while True:%0A time.sleep(1)%0Aexcept KeyboardInterrupt:%0A pass%0A%0Apychromecast.stop_discovery(browser)%0A
|
|
182d4229f6293eec791a5eba716917ace3bcfaca
|
Make looking for outliers easier
|
cal_pipe/manual_outlier_flag.py
|
cal_pipe/manual_outlier_flag.py
|
Python
| 0.000013
|
@@ -0,0 +1,1028 @@
+%0Aimport sys%0A%0A'''%0ACheck for garbage points in a MS by SPW.%0A'''%0A%0Atry:%0A vis_name = sys.argv%5B1%5D%0A corrstring = sys.argv%5B2%5D%0A starting_spw = int(sys.argv%5B3%5D)%0Aexcept IndexError:%0A vis_name = raw_input(%22MS Name? : %22)%0A corrstring = raw_input(%22Corrstring? : %22)%0A starting_spw = int(raw_input(%22SPW to start at? : %22))%0A%0Atb.open(vis_name + '/SPECTRAL_WINDOW')%0Afreqs = tb.getcol('REF_FREQUENCY')%0Anchans = tb.getcol('NUM_CHAN')%0Atb.close()%0A%0Aspws = range(starting_spw, len(freqs))%0A%0Afor spw_num in spws:%0A%0A print %22On %22 + str(spw_num+1) + %22 of %22 + str(len(freqs))%0A%0A default('plotms')%0A vis = vis_name%0A xaxis = 'time'%0A yaxis = 'amp'%0A ydatacolumn = 'corrected'%0A selectdata = True%0A field = ''%0A spw = str(spw_num)%0A scan = bp_scan%0A correlation = corrstring%0A averagedata = False%0A avgscan = False%0A transform = False%0A extendflag = False%0A iteraxis = ''%0A coloraxis = 'antenna2'%0A plotrange = %5B%5D%0A xlabel = ''%0A ylabel = ''%0A showmajorgrid = False%0A showminorgrid = False%0A plotms()%0A
|
|
8bf7b5ae2464721be8270b159b99cd728109f000
|
Create __init__.py
|
Python_Version/Ex7.K-Means_PCA_-_Clustering/__init__.py
|
Python_Version/Ex7.K-Means_PCA_-_Clustering/__init__.py
|
Python
| 0.000429
|
@@ -0,0 +1 @@
+%0A
|
|
13056d362fceb0aa20487a440c7347ad4231ea97
|
Add tiltoc.py
|
tiltoc.py
|
tiltoc.py
|
Python
| 0.001485
|
@@ -0,0 +1,2065 @@
+import os%0Afrom datetime import datetime%0A%0A# Directory%0Aroot = os.getcwd()%0A%0A# Exclude%0Aexcludes = (root, %22drafts%22)%0A%0Adef relative(root, path):%0A return '/'.join(path.replace(root, '').split(os.path.sep)%5B1:%5D)%0A%0Adef tils(root):%0A for (path, dirs, files) in os.walk(root):%0A dirs%5B:%5D = %5Bd for d in dirs if d not in excludes and not d.startswith(%22.%22)%5D%0A paths = %5Bos.path.join(path, f) for f in files if f.endswith(%22.md%22)%5D%0A if path != root:%0A yield relative(root, path), paths%0A%0Adef flat(tils):%0A for (relative, paths) in tils:%0A for path in paths:%0A yield relative, path%0A%0Adef recent(tils, limit):%0A modified = %5B%5D%0A for relative, filename in tils:%0A date = os.path.getmtime(filename)%0A modified.append((date, filename))%0A modified.sort(key=lambda data: data%5B0%5D, reverse=True)%0A return modified%5B:limit%5D%0A%0Adef link(root, path):%0A path = relative(root, path)%0A filename = path.split('/')%5B-1%5D%0A title = ' '.join(n.capitalize() for n in os.path.splitext(filename)%5B0%5D.split('-'))%0A return f%22%5B%7Btitle%7D%5D(%7Bpath%7D)%22%0A%0Adef total(root):%0A return len(list(flat(tils(root))))%0A%0Adef readme():%0A lines = %5B%5D%0A lines.append(%22# TIL%5Cn%22)%0A lines.append(%22%3E Today I Learned%5Cn%22)%0A%0A # Recents%0A lines.append(%22## Recently Modified%5Cn%22)%0A for date, filename in recent(flat(tils(root)), 15):%0A date = datetime.utcfromtimestamp(date).strftime(%22%25Y-%25m-%25d%22)%0A l = link(root, filename)%0A lines.append(f%22- *%7Bdate%7D* : %7Bl%7D%22)%0A%0A # Categories%0A lines.append(%22%5Cn## Categories%5Cn%22)%0A lines.append(%22Total %60%25s%60 TILs%5Cn%22 %25 total(root))%0A for relative, paths in tils(root):%0A count = len(paths)%0A lines.append(f%22- %5B%7Brelative%7D%5D(#%7Brelative%7D) *(%7Bcount%7D)*%22)%0A%0A # Links%0A for relative, paths in tils(root):%0A lines.append(f%22%5Cn### %7Brelative%7D%5Cn%22)%0A for path in paths:%0A l = link(root, path)%0A lines.append(f%22- %7Bl%7D%22)%0A%0A return lines%0A%0Aoutput = open(os.path.join(root, %22README.md%22), 'w')%0Afor line in readme():%0A output.write(line)%0A output.write('%5Cn')%0Aoutput.close()%0A
|
|
7fa62ea8374c839f8f188f34505de4d3114e6b56
|
Create app.py
|
channels/r_BetterEveryLoop/app.py
|
channels/r_BetterEveryLoop/app.py
|
Python
| 0.000003
|
@@ -0,0 +1,1720 @@
+#encoding:utf-8%0A%0Afrom utils import get_url, weighted_random_subreddit%0A%0A%0A# Subreddit that will be a source of content%0Asubreddit = weighted_random_subreddit(%7B%0A 'BetterEveryLoop': 1.0,%0A # If we want get content from several subreddits%0A # please provide here 'subreddit': probability%0A # 'any_other_subreddit': 0.02%0A%7D)%0A# Telegram channel with @reddit2telegram_bot as an admin%0At_channel = '@r_BetterEveryLoop'%0A%0A%0Adef send_post(submission, r2t):%0A what, url, ext = get_url(submission)%0A%0A # If this func returns:%0A # False %E2%80%93 it means that we will not send%0A # this submission, let's move to the next.%0A # True %E2%80%93 everything is ok, we send the submission%0A # None %E2%80%93 we do not want to send anything this time,%0A # let's just sleep.%0A%0A # Get all data from submission that we need%0A title = submission.title%0A link = submission.shortlink%0A text = '%7B%7D%5Cn%7B%7D'.format(title, link)%0A%0A if what == 'text':%0A # If it is text submission, it is not really funny.%0A # return r2t.send_text(submission.selftext)%0A return False%0A elif what == 'other':%0A # Also we are not interesting in any other content.%0A return False%0A elif what == 'album':%0A # It is ok if it is an album.%0A base_url = submission.url%0A text = '%7B%7D%5Cn%7B%7D%5Cn%5Cn%7B%7D'.format(title, base_url, link)%0A r2t.send_text(text)%0A r2t.send_album(url)%0A return True%0A elif what in ('gif', 'img'):%0A # Also it is ok if it is gif or any kind of image.%0A%0A # Check if content has already appeared in%0A # out telegram channel.%0A if r2t.dup_check_and_mark(url) is True:%0A return False%0A return r2t.send_gif_img(what, url, ext, text)%0A else:%0Areturn False%0A
|
|
7a8c6520beb7dfb086d1c9b5254e7083d694f35c
|
check for globals in the lib that are not prefixed with toku. addresses #74
|
src/tokuglobals.py
|
src/tokuglobals.py
|
Python
| 0.00001
|
@@ -0,0 +1,1252 @@
+#!/usr/bin/python%0A%0Aimport sys%0Aimport os%0Aimport re%0A%0Adef checkglobals(libname, exceptsymbols, verbose):%0A badglobals = 0%0A nmcmd = %22nm -g %22 + libname%0A f = os.popen(nmcmd)%0A b = f.readline()%0A while b != %22%22:%0A match = re.match(%22%5E(%5B0-9a-f%5D+)%5Cs(.?)%5Cs(.*)$%22, b)%0A if match == None:%0A match = re.match(%22%5E%5Cs+(.*)$%22, b)%0A if match == None:%0A print %22unknown%22, b%0A badglobals = 1%0A else:%0A type = match.group(2)%0A symbol = match.group(3)%0A if verbose: print type, symbol%0A match = re.match(%22%5Etoku_%22, symbol)%0A if match == None and not exceptsymbols.has_key(symbol):%0A print %22non toku symbol=%22, symbol%0A badglobals = 1%0A b = f.readline()%0A f.close()%0A return badglobals%0A%0Adef main():%0A verbose = 0%0A for arg in sys.argv%5B1:%5D:%0A if arg == %22-v%22:%0A verbose += 1%0A exceptsymbols = %7B%7D%0A for n in %5B %22_init%22, %22_fini%22, %22_end%22, %22_edata%22, %22__bss_start%22 %5D:%0A exceptsymbols%5Bn%5D = 1%0A for n in %5B %22db_env_create%22, %22db_create%22, %22db_strerror%22, %22db_version%22, %22log_compare%22 %5D:%0A exceptsymbols%5Bn%5D = 1%0A return checkglobals(%22libdb.so%22, exceptsymbols, verbose)%0A %0Asys.exit(main())%0A
|
|
e53adf003c38f826b82a090489b2deb8c438dece
|
Version bump to 0.16
|
homeassistant/const.py
|
homeassistant/const.py
|
# coding: utf-8
"""Constants used by Home Assistant components."""
__version__ = "0.16.0.dev0"
REQUIRED_PYTHON_VER = (3, 4)
# Can be used to specify a catch all when registering state or event listeners.
MATCH_ALL = '*'
# If no name is specified
DEVICE_DEFAULT_NAME = "Unnamed Device"
# #### CONFIG ####
CONF_ICON = "icon"
CONF_LATITUDE = "latitude"
CONF_LONGITUDE = "longitude"
CONF_TEMPERATURE_UNIT = "temperature_unit"
CONF_NAME = "name"
CONF_TIME_ZONE = "time_zone"
CONF_CUSTOMIZE = "customize"
CONF_PLATFORM = "platform"
CONF_HOST = "host"
CONF_HOSTS = "hosts"
CONF_USERNAME = "username"
CONF_PASSWORD = "password"
CONF_API_KEY = "api_key"
CONF_ACCESS_TOKEN = "access_token"
CONF_FILENAME = "filename"
CONF_SCAN_INTERVAL = "scan_interval"
CONF_VALUE_TEMPLATE = "value_template"
# #### EVENTS ####
EVENT_HOMEASSISTANT_START = "homeassistant_start"
EVENT_HOMEASSISTANT_STOP = "homeassistant_stop"
EVENT_STATE_CHANGED = "state_changed"
EVENT_TIME_CHANGED = "time_changed"
EVENT_CALL_SERVICE = "call_service"
EVENT_SERVICE_EXECUTED = "service_executed"
EVENT_PLATFORM_DISCOVERED = "platform_discovered"
EVENT_COMPONENT_LOADED = "component_loaded"
EVENT_SERVICE_REGISTERED = "service_registered"
# #### STATES ####
STATE_ON = 'on'
STATE_OFF = 'off'
STATE_HOME = 'home'
STATE_NOT_HOME = 'not_home'
STATE_UNKNOWN = 'unknown'
STATE_OPEN = 'open'
STATE_CLOSED = 'closed'
STATE_PLAYING = 'playing'
STATE_PAUSED = 'paused'
STATE_IDLE = 'idle'
STATE_STANDBY = 'standby'
STATE_ALARM_DISARMED = 'disarmed'
STATE_ALARM_ARMED_HOME = 'armed_home'
STATE_ALARM_ARMED_AWAY = 'armed_away'
STATE_ALARM_PENDING = 'pending'
STATE_ALARM_TRIGGERED = 'triggered'
STATE_LOCKED = 'locked'
STATE_UNLOCKED = 'unlocked'
STATE_UNAVAILABLE = 'unavailable'
# #### STATE AND EVENT ATTRIBUTES ####
# Contains current time for a TIME_CHANGED event
ATTR_NOW = "now"
# Contains domain, service for a SERVICE_CALL event
ATTR_DOMAIN = "domain"
ATTR_SERVICE = "service"
ATTR_SERVICE_DATA = "service_data"
# Data for a SERVICE_EXECUTED event
ATTR_SERVICE_CALL_ID = "service_call_id"
# Contains one string or a list of strings, each being an entity id
ATTR_ENTITY_ID = 'entity_id'
# String with a friendly name for the entity
ATTR_FRIENDLY_NAME = "friendly_name"
# A picture to represent entity
ATTR_ENTITY_PICTURE = "entity_picture"
# Icon to use in the frontend
ATTR_ICON = "icon"
# The unit of measurement if applicable
ATTR_UNIT_OF_MEASUREMENT = "unit_of_measurement"
# Temperature attribute
ATTR_TEMPERATURE = "temperature"
TEMP_CELCIUS = "°C"
TEMP_FAHRENHEIT = "°F"
# Contains the information that is discovered
ATTR_DISCOVERED = "discovered"
# Location of the device/sensor
ATTR_LOCATION = "location"
ATTR_BATTERY_LEVEL = "battery_level"
# For devices which support a code attribute
ATTR_CODE = 'code'
ATTR_CODE_FORMAT = 'code_format'
# For devices which support an armed state
ATTR_ARMED = "device_armed"
# For devices which support a locked state
ATTR_LOCKED = "locked"
# For sensors that support 'tripping', eg. motion and door sensors
ATTR_TRIPPED = "device_tripped"
# For sensors that support 'tripping' this holds the most recent
# time the device was tripped
ATTR_LAST_TRIP_TIME = "last_tripped_time"
# For all entity's, this hold whether or not it should be hidden
ATTR_HIDDEN = "hidden"
# Location of the entity
ATTR_LATITUDE = "latitude"
ATTR_LONGITUDE = "longitude"
# Accuracy of location in meters
ATTR_GPS_ACCURACY = 'gps_accuracy'
# If state is assumed
ATTR_ASSUMED_STATE = 'assumed_state'
# #### SERVICES ####
SERVICE_HOMEASSISTANT_STOP = "stop"
SERVICE_HOMEASSISTANT_RESTART = "restart"
SERVICE_TURN_ON = 'turn_on'
SERVICE_TURN_OFF = 'turn_off'
SERVICE_TOGGLE = 'toggle'
SERVICE_VOLUME_UP = "volume_up"
SERVICE_VOLUME_DOWN = "volume_down"
SERVICE_VOLUME_MUTE = "volume_mute"
SERVICE_VOLUME_SET = "volume_set"
SERVICE_MEDIA_PLAY_PAUSE = "media_play_pause"
SERVICE_MEDIA_PLAY = "media_play"
SERVICE_MEDIA_PAUSE = "media_pause"
SERVICE_MEDIA_NEXT_TRACK = "media_next_track"
SERVICE_MEDIA_PREVIOUS_TRACK = "media_previous_track"
SERVICE_MEDIA_SEEK = "media_seek"
SERVICE_ALARM_DISARM = "alarm_disarm"
SERVICE_ALARM_ARM_HOME = "alarm_arm_home"
SERVICE_ALARM_ARM_AWAY = "alarm_arm_away"
SERVICE_ALARM_TRIGGER = "alarm_trigger"
SERVICE_LOCK = "lock"
SERVICE_UNLOCK = "unlock"
SERVICE_OPEN = "open"
SERVICE_CLOSE = "close"
SERVICE_MOVE_UP = 'move_up'
SERVICE_MOVE_DOWN = 'move_down'
SERVICE_STOP = 'stop'
# #### API / REMOTE ####
SERVER_PORT = 8123
URL_ROOT = "/"
URL_API = "/api/"
URL_API_STREAM = "/api/stream"
URL_API_CONFIG = "/api/config"
URL_API_STATES = "/api/states"
URL_API_STATES_ENTITY = "/api/states/{}"
URL_API_EVENTS = "/api/events"
URL_API_EVENTS_EVENT = "/api/events/{}"
URL_API_SERVICES = "/api/services"
URL_API_SERVICES_SERVICE = "/api/services/{}/{}"
URL_API_EVENT_FORWARD = "/api/event_forwarding"
URL_API_COMPONENTS = "/api/components"
URL_API_ERROR_LOG = "/api/error_log"
URL_API_LOG_OUT = "/api/log_out"
URL_API_TEMPLATE = "/api/template"
HTTP_OK = 200
HTTP_CREATED = 201
HTTP_MOVED_PERMANENTLY = 301
HTTP_BAD_REQUEST = 400
HTTP_UNAUTHORIZED = 401
HTTP_NOT_FOUND = 404
HTTP_METHOD_NOT_ALLOWED = 405
HTTP_UNPROCESSABLE_ENTITY = 422
HTTP_INTERNAL_SERVER_ERROR = 500
HTTP_HEADER_HA_AUTH = "X-HA-access"
HTTP_HEADER_ACCEPT_ENCODING = "Accept-Encoding"
HTTP_HEADER_CONTENT_TYPE = "Content-type"
HTTP_HEADER_CONTENT_ENCODING = "Content-Encoding"
HTTP_HEADER_VARY = "Vary"
HTTP_HEADER_CONTENT_LENGTH = "Content-Length"
HTTP_HEADER_CACHE_CONTROL = "Cache-Control"
HTTP_HEADER_EXPIRES = "Expires"
CONTENT_TYPE_JSON = "application/json"
CONTENT_TYPE_MULTIPART = 'multipart/x-mixed-replace; boundary={}'
CONTENT_TYPE_TEXT_PLAIN = 'text/plain'
# The exit code to send to request a restart
RESTART_EXIT_CODE = 100
|
Python
| 0
|
@@ -86,13 +86,8 @@
16.0
-.dev0
%22%0ARE
|
f709f00e5c4e112774cebbc7ab3f546354290da8
|
Create human_readable_time.py
|
human_readable_time.py
|
human_readable_time.py
|
Python
| 0.002008
|
@@ -0,0 +1,269 @@
+#Kunal Gautam%0A#Codewars : @Kunalpod%0A#Problem name: Human Readable Time%0A#Problem level: 5 kyu%0A%0Adef make_readable(seconds):%0A s = '%7B:0%3E2%7D'.format(seconds%2560) %0A m = '%7B:0%3E2%7D'.format((seconds//60)%2560)%0A h = '%7B:0%3E2%7D'.format(seconds//3600)%0A return ':'.join(%5Bh,m,s%5D)%0A
|
|
17d1bb1e381a1bfcbf0a4dab866dedf2840d1d26
|
substrings must be of length 3
|
043_sub_string_divisibility.py
|
043_sub_string_divisibility.py
|
Python
| 0.999999
|
@@ -0,0 +1,927 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A#%0A# A Solution to %22Sub-string divisibility%22 %E2%80%93 Project Euler Problem No. 43%0A# by Florian Buetow%0A#%0A# Sourcecode: https://github.com/fbcom/project-euler%0A# Problem statement: https://projecteuler.net/problem=43%0Aimport itertools%0A%0A%0Adef is_substring_divisible(n):%0A n = str(n)%0A primes = %5B2, 3, 5, 7, 11, 13, 17%5D%0A for i, prime in enumerate(primes):%0A substring = n%5Bi+1:i+1+3%5D%0A assert(len(substring) == 3), %22Substring must be of length 3%22%0A if int(substring) %25 prime != 0:%0A return False%0A return True%0A%0A# Testcase%0Aassert is_substring_divisible(1406357289), %22Testcase failed%22%0A%0A# Solve%0Asum = 0%0Afor permutation in itertools.permutations(%221234567890%22):%0A n = int(%22%22.join(permutation))%0A if len(str(n)) == 10:%0A if is_substring_divisible(n):%0A # print %22%25d is asubstring divisible.%22 %25 n%0A sum = sum + n%0A%0Aprint %22Solution:%22, sum%0A
|
|
56e51e48de854c83870549c1cab2893c24116734
|
add tests
|
check_mk_web_api/test_WebApi.py
|
check_mk_web_api/test_WebApi.py
|
Python
| 0
|
@@ -0,0 +1,2372 @@
+import os%0A%0Aimport pytest%0A%0Afrom check_mk_web_api import WebApi, CheckMkWebApiException%0A%0Aapi = WebApi(%0A os.environ%5B'CHECK_MK_URL'%5D,%0A os.environ%5B'CHECK_MK_USER'%5D,%0A os.environ%5B'CHECK_MK_SECRET'%5D%0A)%0A%0A%0Adef setup():%0A api.delete_all_hosts()%0A%0A%0Adef test_add_host():%0A api.add_host('host00')%0A assert 'host00' in api.get_all_hosts()%0A%0A%0Adef test_add_duplicate_host():%0A with pytest.raises(CheckMkWebApiException):%0A api.add_host('host00')%0A api.add_host('host00')%0A%0A%0Adef test_edit_host():%0A api.add_host('host00', ipaddress='192.168.0.100')%0A assert api.get_host('host00')%5B'attributes'%5D%5B'ipaddress'%5D == '192.168.0.100'%0A%0A api.edit_host('host00', ipaddress='192.168.0.101')%0A assert api.get_host('host00')%5B'attributes'%5D%5B'ipaddress'%5D == '192.168.0.101'%0A%0A%0Adef test_unset_attribute():%0A api.add_host('host00', ipaddress='192.168.0.100')%0A assert api.get_host('host00')%5B'attributes'%5D%5B'ipaddress'%5D == '192.168.0.100'%0A api.edit_host('host00', unset_attributes=%5B'ipaddress'%5D)%0A assert 'ipaddress' not in api.get_host('host00')%5B'attributes'%5D%0A%0A%0Adef test_edit_nonexistent_host():%0A with pytest.raises(CheckMkWebApiException):%0A api.edit_host('host00', ipaddress='192.168.0.101')%0A%0A%0Adef test_get_host():%0A api.add_host('host00')%0A assert api.get_host('host00')%5B'hostname'%5D == 'host00'%0A%0A%0Adef test_get_nonexistent_host():%0A with pytest.raises(CheckMkWebApiException):%0A api.get_host('host00')%0A%0A%0Adef test_get_all_hosts():%0A api.add_host('host00')%0A api.add_host('host01')%0A%0A all_hosts = api.get_all_hosts()%0A assert len(all_hosts) == 2%0A assert 'host00' in all_hosts%0A assert 'host01' in all_hosts%0A%0A%0Adef test_delete_host():%0A api.add_host('host00')%0A assert len(api.get_all_hosts()) == 1%0A%0A api.delete_host('host00')%0A assert len(api.get_all_hosts()) == 0%0A%0A%0Adef test_delete_nonexistent_host():%0A with pytest.raises(CheckMkWebApiException):%0A api.delete_host('host00')%0A%0A%0Adef test_delete_all_hosts():%0A api.add_host('host00')%0A api.add_host('host01')%0A assert len(api.get_all_hosts()) == 2%0A%0A api.delete_all_hosts()%0A assert len(api.get_all_hosts()) == 0%0A%0A%0Adef test_discover_services():%0A api.add_host('localhost')%0A api.discover_services('localhost')%0A%0A%0Adef test_discover_services_for_nonexistent_host():%0A with pytest.raises(CheckMkWebApiException):%0A api.discover_services('localhost')%0A
|
|
bb638c49f0a73289392e63dff7bec6e9b49b8199
|
Add a simple wrapper around losetup
|
datapath/raw+file/losetup.py
|
datapath/raw+file/losetup.py
|
Python
| 0
|
@@ -0,0 +1,1323 @@
+#!/usr/bin/env python%0A%0Aimport xapi%0Aimport commands%0A%0Adef log(txt):%0A print %3E%3Esys.stderr, txt%0A%0A# %5Brun dbg cmd%5D executes %5Bcmd%5D, throwing a BackendError if exits with%0A# a non-zero exit code.%0Adef run(dbg, cmd):%0A code, output = commands.getstatusoutput(cmd)%0A if code %3C%3E 0:%0A log(%22%25s: %25s exitted with code %25d: %25s%22 %25 (dbg, cmd, code, output))%0A raise (xapi.InternalError(%22%25s exitted with non-zero code %25d: %25s%22 %25 (cmd, code, output)))%0A return output%0A%0A# Use Linux %22losetup%22 to create block devices from files%0Aclass Losetup:%0A # %5B_find dbg path%5D returns the loop device associated with %5Bpath%5D%0A def _find(self, dbg, path):%0A for line in run(dbg, %22losetup -a%22).split(%22%5Cn%22):%0A line = line.strip()%0A if line %3C%3E %22%22:%0A bits = line.split()%0A loop = bits%5B0%5D%5B0:-1%5D%0A this_path = bits%5B2%5D%5B1:-1%5D%0A if this_path == path:%0A return loop%0A return None%0A # %5Badd dbg path%5D creates a new loop device for %5Bpath%5D and returns it%0A def add(self, dbg, path):%0A run(dbg, %22losetup -f %25s%22 %25 path)%0A return self._find(dbg, path)%0A # %5Bremove dbg path%5D removes the loop device associated with %5Bpath%5D%0A def remove(self, dbg, path):%0A loop = self._find(dbg, path)%0A run(dbg, %22losetup -d %25s%22 %25 loop)%0A%0A
|
|
79f7a947d9fbf1903957aea0ef459bee53d2ba82
|
use subnet_service_types extension from neutron-lib
|
neutron/extensions/subnet_service_types.py
|
neutron/extensions/subnet_service_types.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.api.definitions import subnet as subnet_def
from neutron_lib.api import extensions
from neutron_lib.api import validators
from neutron_lib import constants
from neutron_lib import exceptions
import six
import webob.exc
from neutron._i18n import _
# List for service plugins to register their own prefixes
valid_prefixes = []
class InvalidSubnetServiceType(exceptions.InvalidInput):
message = _("Subnet service type %(service_type)s does not correspond "
"to a valid device owner.")
class InvalidInputSubnetServiceType(exceptions.InvalidInput):
message = _("Subnet service type %(service_type)s is not a string.")
def _validate_subnet_service_types(service_types, valid_values=None):
if service_types:
if not isinstance(service_types, list):
raise webob.exc.HTTPBadRequest(
_("Subnet service types must be a list."))
prefixes = valid_prefixes
# Include standard prefixes
prefixes += list(constants.DEVICE_OWNER_PREFIXES)
prefixes += constants.DEVICE_OWNER_COMPUTE_PREFIX
for service_type in service_types:
if not isinstance(service_type, six.text_type):
raise InvalidInputSubnetServiceType(service_type=service_type)
elif not service_type.startswith(tuple(prefixes)):
raise InvalidSubnetServiceType(service_type=service_type)
validators.add_validator('type:validate_subnet_service_types',
_validate_subnet_service_types)
EXTENDED_ATTRIBUTES_2_0 = {
subnet_def.COLLECTION_NAME: {
'service_types': {
'allow_post': True,
'allow_put': True,
'default': constants.ATTR_NOT_SPECIFIED,
'validate': {'type:validate_subnet_service_types': None},
'is_visible': True,
},
},
}
class Subnet_service_types(extensions.ExtensionDescriptor):
"""Extension class supporting subnet service types."""
@classmethod
def get_name(cls):
return "Subnet service types"
@classmethod
def get_alias(cls):
return "subnet-service-types"
@classmethod
def get_description(cls):
return "Provides ability to set the subnet service_types field"
@classmethod
def get_updated(cls):
return "2016-03-15T18:00:00-00:00"
def get_extended_resources(self, version):
if version == "2.0":
return EXTENDED_ATTRIBUTES_2_0
else:
return {}
|
Python
| 0.000003
|
@@ -616,19 +616,29 @@
bnet
- as subnet_
+_service_types as api
def%0A
@@ -680,1772 +680,8 @@
ons%0A
-from neutron_lib.api import validators%0Afrom neutron_lib import constants%0Afrom neutron_lib import exceptions%0Aimport six%0Aimport webob.exc%0A%0Afrom neutron._i18n import _%0A%0A%0A# List for service plugins to register their own prefixes%0Avalid_prefixes = %5B%5D%0A%0A%0Aclass InvalidSubnetServiceType(exceptions.InvalidInput):%0A message = _(%22Subnet service type %25(service_type)s does not correspond %22%0A %22to a valid device owner.%22)%0A%0A%0Aclass InvalidInputSubnetServiceType(exceptions.InvalidInput):%0A message = _(%22Subnet service type %25(service_type)s is not a string.%22)%0A%0A%0Adef _validate_subnet_service_types(service_types, valid_values=None):%0A if service_types:%0A if not isinstance(service_types, list):%0A raise webob.exc.HTTPBadRequest(%0A _(%22Subnet service types must be a list.%22))%0A%0A prefixes = valid_prefixes%0A # Include standard prefixes%0A prefixes += list(constants.DEVICE_OWNER_PREFIXES)%0A prefixes += constants.DEVICE_OWNER_COMPUTE_PREFIX%0A%0A for service_type in service_types:%0A if not isinstance(service_type, six.text_type):%0A raise InvalidInputSubnetServiceType(service_type=service_type)%0A elif not service_type.startswith(tuple(prefixes)):%0A raise InvalidSubnetServiceType(service_type=service_type)%0A%0A%0Avalidators.add_validator('type:validate_subnet_service_types',%0A _validate_subnet_service_types)%0A%0A%0AEXTENDED_ATTRIBUTES_2_0 = %7B%0A subnet_def.COLLECTION_NAME: %7B%0A 'service_types': %7B%0A 'allow_post': True,%0A 'allow_put': True,%0A 'default': constants.ATTR_NOT_SPECIFIED,%0A 'validate': %7B'type:validate_subnet_service_types': None%7D,%0A 'is_visible': True,%0A %7D,%0A %7D,%0A%7D%0A
%0A%0Acl
@@ -716,16 +716,19 @@
ensions.
+API
Extensio
@@ -804,526 +804,32 @@
%22%22%22%0A
-%0A
-@classmethod%0A def get_name(cls):%0A return %22Subnet service types%22%0A%0A @classmethod%0A def get_alias(cls):%0A return %22subnet-service-types%22%0A%0A @classmethod%0A def get_description(cls):%0A return %22Provides ability to set the subnet service_types field%22%0A%0A @classmethod%0A def get_updated(cls):%0A return %222016-03-15T18:00:00-00:00%22%0A%0A def get_extended_resources(self, version):%0A if version == %222.0%22:%0A return EXTENDED_ATTRIBUTES_2_0%0A else:%0A return %7B%7D
+api_definition = apidef
%0A
|
cd8110b0ca4d53477b8331af2317e267da057df6
|
add example 'audit' where chat is replicated in a secondary space
|
examples/audit.py
|
examples/audit.py
|
Python
| 0
|
@@ -0,0 +1,2745 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%0A# Licensed to the Apache Software Foundation (ASF) under one or more%0A# contributor license agreements. See the NOTICE file distributed with%0A# this work for additional information regarding copyright ownership.%0A# The ASF licenses this file to You under the Apache License, Version 2.0%0A# (the %22License%22); you may not use this file except in compliance with%0A# the License. You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%0A%22%22%22%0AAudit interactions in real-time%0A%0AIn this example we create a shell with one simple command: audit%0A%0A- command: audit%0A- provides clear status if this room is currently audited or not%0A%0A- command: audit on%0A- starts auditing%0A%0A- command: audit off%0A- ensure private interactions%0A%0A%0ATo run this script you have to provide a custom configuration, or set%0Aenvironment variables instead::%0A%0A- %60%60CHAT_ROOM_MODERATORS%60%60 - You have at least your e-mail address%0A- %60%60CHAT_TOKEN%60%60 - Received from Cisco Spark when you register your bot%0A- %60%60SERVER_URL%60%60 - Public link used by Cisco Spark to reach your server%0A%0AThe token is specific to your run-time, please visit Cisco Spark for%0ADevelopers to get more details:%0A%0A https://developer.ciscospark.com/%0A%0AFor example, if you run this script under Linux or macOs with support from%0Angrok for exposing services to the Internet::%0A%0A export CHAT_ROOM_MODERATORS=%22alice@acme.com%22%0A export CHAT_TOKEN=%22%3Ctoken id from Cisco Spark for Developers%3E%22%0A export SERVER_URL=%22http://1a107f21.ngrok.io%22%0A python hello.py%0A%0A%0A%22%22%22%0A%0Aimport logging%0Afrom multiprocessing import Process, Queue%0Aimport os%0A%0Afrom shellbot import ShellBot, Context, Command, Speaker%0Afrom shellbot.commands import Audit%0Afrom shellbot.spaces import SparkSpace%0Afrom shellbot.updaters import SpaceUpdater%0AContext.set_logger()%0A%0A# create an audit command%0A#%0Aaudit = Audit()%0A%0A# create a bot and load command%0A#%0Abot = ShellBot(command=audit)%0A%0A# load configuration%0A#%0Aos.environ%5B'CHAT_ROOM_TITLE'%5D = 'Audit tutorial'%0Abot.configure()%0A%0A# create a chat room%0A#%0Abot.bond(reset=True)%0A%0A# create a mirror chat room%0A#%0Amirror = SparkSpace(bot=bot)%0Amirror.connect()%0A%0Atitle = u%22%7B%7D - %7B%7D%22.format(%0A mirror.configured_title(), u%22Audited content%22)%0A%0Amirror.bond(title=title)%0A%0A# enable auditing%0A#%0Aaudit.arm(updater=SpaceUpdater(space=mirror))%0A%0A# run the bot%0A#%0Abot.run()%0A%0A# delete chat rooms when the bot is stopped%0A#%0Amirror.delete_space()%0Abot.dispose()%0A
|
|
0229c868a8f204759c76ecae92cecc4e7a6312b5
|
Add link module + Link class stub
|
flask_hal/link.py
|
flask_hal/link.py
|
Python
| 0
|
@@ -0,0 +1,679 @@
+#!/usr/bin/env python%0A# encoding: utf-8%0A%0A%22%22%22%0Aflask_hal.link%0A==============%0A%0AImplements the %60%60HAL%60%60 Link specification.%0A%22%22%22%0A%0A%0Aclass Link(object):%0A %22%22%22%0A %22%22%22%0A%0A def __init__(%0A self,%0A href,%0A name=None,%0A title=None,%0A link_type=None,%0A deprecation=None,%0A profile=None,%0A templated=None,%0A hreflang=None):%0A %22%22%22%0A %22%22%22%0A%0A self.href = href%0A self.name = name%0A self.title = title%0A self.link_type = link_type,%0A self.deprecation = deprecation%0A self.profile = profile%0A self.templated = templated%0A self.hreflang = hreflang%0A
|
|
34206ab96b5546e617b1478f59357a6a3f7ed8b6
|
Add a custom gunicorn Sync worker implementation which works around some issues which occur when eventlet monkey patching is used with sync worker.
|
st2common/st2common/util/gunicorn_workers.py
|
st2common/st2common/util/gunicorn_workers.py
|
Python
| 0
|
@@ -0,0 +1,1568 @@
+# Licensed to the StackStorm, Inc ('StackStorm') under one or more%0A# contributor license agreements. See the NOTICE file distributed with%0A# this work for additional information regarding copyright ownership.%0A# The ASF licenses this file to You under the Apache License, Version 2.0%0A# (the %22License%22); you may not use this file except in compliance with%0A# the License. You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%0Aimport sys%0A%0Afrom gunicorn.workers.sync import SyncWorker%0A%0A__all__ = %5B%0A 'EventletSyncWorker'%0A%5D%0A%0A%0Aclass EventletSyncWorker(SyncWorker):%0A %22%22%22%0A Custom sync worker for gunicorn which works with eventlet monkey patching.%0A%0A This worker class fixes %22AssertionError: do not call blocking functions from%0A the mainloop%22 and some other issues on SIGINT / SIGTERM.%0A %22%22%22%0A%0A def handle_quit(self, sig, frame):%0A try:%0A return super(EventletSyncWorker, self).handle_quit(sig=sig, frame=frame)%0A except AssertionError as e:%0A msg = str(e)%0A%0A if 'do not call blocking functions from the mainloop' in msg:%0A # Workaround for %22do not call blocking functions from the mainloop%22 issue%0A sys.exit(0)%0A%0A raise e%0A
|
|
1650824ef5886a12715bd0004e95ab3bf4dc5dfd
|
Add new package: phoenix (#18143)
|
var/spack/repos/builtin/packages/phoenix/package.py
|
var/spack/repos/builtin/packages/phoenix/package.py
|
Python
| 0
|
@@ -0,0 +1,745 @@
+# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other%0A# Spack Project Developers. See the top-level COPYRIGHT file for details.%0A#%0A# SPDX-License-Identifier: (Apache-2.0 OR MIT)%0A%0Afrom spack import *%0A%0A%0Aclass Phoenix(Package):%0A %22%22%22Apache Phoenix is a SQL skin over HBase delivered as a client-embedded%0A JDBC driver targeting low latency queries over HBase data.%22%22%22%0A%0A homepage = %22https://github.com%22%0A git = %22https://github.com/apache/phoenix.git%22%0A%0A version('master', branch='master')%0A%0A depends_on('java@8:', type=('build', 'run'))%0A depends_on('maven', type='build')%0A%0A def install(self, spec, prefix):%0A mvn = which('mvn')%0A mvn('package', '-DskipTests')%0A install_tree('.', prefix)%0A
|
|
7b688747a02f9ac29d4f119bf142f7ef0ad805e9
|
Test for COM collections; IEnumVariant so far only.
|
comtypes/test/test_collections.py
|
comtypes/test/test_collections.py
|
Python
| 0
|
@@ -0,0 +1,2279 @@
+import unittest%0Afrom comtypes.client import CreateObject, GetModule #, Constants%0Afrom find_memleak import find_memleak%0Afrom ctypes import ArgumentError%0A%0Aclass Test(unittest.TestCase):%0A%0A def test_IEnumVARIANT(self):%0A # The XP firewall manager.%0A fwmgr = CreateObject('HNetCfg.FwMgr')%0A # apps has a _NewEnum property that implements IEnumVARIANT%0A apps = fwmgr.LocalPolicy.CurrentProfile.AuthorizedApplications%0A%0A self.failUnlessEqual(apps.Count, len(apps))%0A%0A cv = iter(apps)%0A%0A names = %5Bp.ProcessImageFileName for p in cv%5D%0A self.failUnlessEqual(len(apps), len(names))%0A%0A # The iterator is consumed now:%0A self.failUnlessEqual(%5Bp.ProcessImageFileName for p in cv%5D, %5B%5D)%0A%0A # But we can reset it:%0A cv.Reset()%0A self.failUnlessEqual(%5Bp.ProcessImageFileName for p in cv%5D, names)%0A%0A # Reset, then skip:%0A cv.Reset()%0A cv.Skip(3)%0A self.failUnlessEqual(%5Bp.ProcessImageFileName for p in cv%5D, names%5B3:%5D)%0A%0A # Reset, then skip:%0A cv.Reset()%0A cv.Skip(300)%0A self.failUnlessEqual(%5Bp.ProcessImageFileName for p in cv%5D, names%5B300:%5D)%0A%0A # Hm, do we want to allow random access to the iterator?%0A # Should the iterator support __getitem__ ???%0A self.failUnlessEqual(cv%5B0%5D.ProcessImageFileName, names%5B0%5D)%0A self.failUnlessEqual(cv%5B0%5D.ProcessImageFileName, names%5B0%5D)%0A self.failUnlessEqual(cv%5B0%5D.ProcessImageFileName, names%5B0%5D)%0A%0A if len(names) %3E 1:%0A self.failUnlessEqual(cv%5B1%5D.ProcessImageFileName, names%5B1%5D)%0A self.failUnlessEqual(cv%5B1%5D.ProcessImageFileName, names%5B1%5D)%0A self.failUnlessEqual(cv%5B1%5D.ProcessImageFileName, names%5B1%5D)%0A%0A # We can now call Next(celt) with celt != 1, the call always returns a list:%0A cv.Reset()%0A self.failUnlessEqual(names%5B:3%5D, %5Bp.ProcessImageFileName for p in cv.Next(3)%5D)%0A%0A # calling Next(0) makes no sense, but should work anyway:%0A self.failUnlessEqual(cv.Next(0), %5B%5D)%0A%0A cv.Reset()%0A self.failUnlessEqual(len(cv.Next(len(names) * 2)), len(names))%0A%0A # slicing is not (yet?) supported%0A cv.Reset()%0A self.failUnlessRaises(ArgumentError, lambda: cv%5B:%5D)%0A%0Aif __name__ == %22__main__%22:%0A unittest.main()%0A
|
|
e055cc963eb5eb05c2685ae75d1614c81ef3c59a
|
load qld data into elasticsearch
|
injest/search_index.py
|
injest/search_index.py
|
Python
| 0
|
@@ -0,0 +1,865 @@
+# -*- coding: utf-8 -*-%0A%0A%22%22%22%0AThis module contains the code for putting heritage places into a search index.%0A%0A%22%22%22%0A%0Afrom elasticsearch import Elasticsearch%0A%0AES_HOST = %22192.168.10.200%22%0A%0Aes = Elasticsearch(ES_HOST)%0A%0A%0Adef add_heritage_place(place):%0A %22%22%22Add a heritage place to the search index%0A%0A :param place: Dictionary defining a heritage place.%0A %22%22%22%0A try:%0A id = %22%25s-%25s%22 %25 (place%5B'state'%5D, place%5B'id'%5D)%0A result = es.index(index=%22eheritage%22, doc_type='heritage_place', id=id, body=place)%0A print result%0A except AttributeError as e:%0A print e%0A print place%0A return False%0A%0A return True%0A%0A%0A%0Aif __name__ == %22__main__%22:%0A from qld import parse_ahpi_xml%0A%0A hp_filename = %22/mnt/groups/maenad/activities/e-Heritage/QLD/heritage_list.xml%22%0A%0A result = parse_ahpi_xml(hp_filename, add_heritage_place)%0A%0A print result
|
|
7cc0f1cc461d6ccd4c191ec243184deab7f1028f
|
Create strings.py
|
strings/strings.py
|
strings/strings.py
|
Python
| 0.999865
|
@@ -0,0 +1 @@
+%0A
|
|
6917e3b8689ace77bda912f82163f0f91a1a881b
|
Solve conflict in DB schema for circle CI
|
digikey/migrations/0022_merge.py
|
digikey/migrations/0022_merge.py
|
Python
| 0.001334
|
@@ -0,0 +1,284 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('digikey', '0019_merge'),%0A ('digikey', '0021_auto_20151125_1353'),%0A %5D%0A%0A operations = %5B%0A %5D%0A
|
|
502e04b031036ea4506c85efb2903a85647627f8
|
remove logout_path from public_urls (CVE-2015-5963)
|
django_auth_policy/middleware.py
|
django_auth_policy/middleware.py
|
import logging
from django.core.urlresolvers import resolve, reverse
from django.views.decorators.csrf import requires_csrf_token
from django.conf import settings
from django import http
from django_auth_policy.handlers import PasswordChangePolicyHandler
from django_auth_policy.forms import StrictPasswordChangeForm
from django_auth_policy.password_change import update_password, password_changed
from django_auth_policy.settings import (PASSWORD_CHANGE_VIEW_NAME,
LOGIN_VIEW_NAME,
LOGOUT_VIEW_NAME,
PUBLIC_URLS,
LOGOUT_AFTER_PASSWORD_CHANGE)
logger = logging.getLogger(__name__)
class AuthenticationPolicyMiddleware(object):
""" This middleware enforces the following policy:
- Change of password when password has expired;
- Change of password when user has a temporary password;
- Logout disabled users;
This is enforced using middleware to prevent users from accessing any page
handled by Django without the policy being enforced.
"""
change_password_path = reverse(PASSWORD_CHANGE_VIEW_NAME)
login_path = reverse(LOGIN_VIEW_NAME)
logout_path = reverse(LOGOUT_VIEW_NAME)
password_change_policy_handler = PasswordChangePolicyHandler()
def process_request(self, request):
assert hasattr(request, 'user'), (
'AuthenticationPolicyMiddleware needs a user attribute on '
'request, add AuthenticationMiddleware before '
'AuthenticationPolicyMiddleware in MIDDLEWARE_CLASSES')
# This middleware does nothing for unauthenticated users
if not request.user.is_authenticated():
return None
# Check if users' password has been changed, and then logout user.
# To prevent logout at password change views call the
# `update_password` function in that view
if not 'password_hash' in request.session:
update_password(request.session, request.user)
# Log out disabled users
if not request.user.is_active:
logger.info('Log out inactive user, user=%s', request.user)
return self.logout(request)
# Do not do password change for certain URLs
if request.path in (self.change_password_path, self.login_path,
self.logout_path):
return None
# Check for 'enforce_password_change' in session set by login view
if request.session.get('password_change_enforce', False):
return self.password_change(request)
return None
def process_response(self, request, response):
if not hasattr(request, 'user') or not request.user.is_authenticated():
return response
# When password change is enforced, check if this is still required
# for next request
if request.session.get('password_change_enforce', False):
self.password_change_policy_handler.update_session(
request, request.user)
# Check if users' password has been changed, and then logout user.
# To prevent logout at password change views call the
# `update_password` function in that view
# Ignore non 2xx responses (e.g. redirects).
if (response.status_code >= 200 and
response.status_code < 300 and
LOGOUT_AFTER_PASSWORD_CHANGE and
password_changed(request.session, request.user)):
logger.info('Logout session because user changed its password')
return self.logout(request)
return response
def password_change(self, request):
""" Return 'password_change' view.
This resolves the view with the name 'password_change'.
Overwrite this method when needed.
"""
view_func, args, kwargs = resolve(self.change_password_path)
if 'password_change_form' in kwargs:
assert issubclass(kwargs['password_change_form'],
StrictPasswordChangeForm), (
"Use django_auth_policy StrictPasswordChangeForm for password "
"changes.")
# Provide extra context to be used in the password_change template
if 'extra_context' in kwargs:
kwargs['extra_context']['password_change_enforce'] = \
request.session.get('password_change_enforce')
kwargs['extra_context']['password_change_enforce_msg'] = \
request.session.get('password_change_enforce_msg')
# Run 'requires_csrf_token' because CSRF middleware might have been
# skipped over here
resp = requires_csrf_token(view_func)(request, *args, **kwargs)
update_password(request.session, request.user)
return resp
def logout(self, request):
view_func, args, kwargs = resolve(self.logout_path)
return view_func(request, *args, **kwargs)
class LoginRequiredMiddleware(object):
""" Middleware which enforces authentication for all requests.
"""
LOGIN_NOT_REQUIRED_MARKER = 'django_auth_policy__login_not_required'
login_path = reverse(LOGIN_VIEW_NAME)
logout_path = reverse(LOGOUT_VIEW_NAME)
public_urls = list(PUBLIC_URLS)
public_urls.append(login_path)
public_urls.append(logout_path)
def process_view(self, request, view_func, view_args, view_kwargs):
if not hasattr(request, 'user'):
raise Exception('Install Authentication middleware before '
'LoginRequiredMiddleware')
if request.user.is_authenticated():
return None
# Do not require authentication for certain URLs
if request.path in self.public_urls:
return None
# Per-view exceptions
if getattr(view_func, self.LOGIN_NOT_REQUIRED_MARKER, False):
return None
# Django should not serve STATIC files in production, but for
# DEBUG mode this should be no problem (development)
if (settings.STATIC_URL and
request.path.startswith(settings.STATIC_URL)):
if settings.DEBUG:
return None
else:
return http.HttpResponse('Unauthenticated', status=401)
# When serving MEDIA files through Django we will not display a login
# form, but instead return HTTP 401, but for DEBUG mode this should be
# no problem (development)
if (settings.MEDIA_URL and
request.path.startswith(settings.MEDIA_URL)):
if settings.DEBUG:
return None
else:
return http.HttpResponse('Unauthenticated', status=401)
# Ajax views should not display a login form, we use HTTP 401 to
# indicate an unauthorized request, like a session timeout
if request.is_ajax():
return http.HttpResponse('Unauthenticated', status=401)
view_func, args, kwargs = resolve(self.login_path)
return requires_csrf_token(view_func)(request, *args, **kwargs)
def login_not_required(view):
"""Decorator to bypass LoginRequiredMiddleware for a view."""
setattr(view, LoginRequiredMiddleware.LOGIN_NOT_REQUIRED_MARKER, True)
return view
|
Python
| 0
|
@@ -5358,44 +5358,8 @@
ath)
-%0A public_urls.append(logout_path)
%0A%0A
|
5155312d5ce092cc0e1e6e8724ae2d4f0e03cb04
|
Add pylint disable for alembic in CA migration
|
src/ggrc/migrations/versions/20160208143508_1839dabd2357_custom_attributes_should_be_unique.py
|
src/ggrc/migrations/versions/20160208143508_1839dabd2357_custom_attributes_should_be_unique.py
|
# Copyright (C) 2015 Reciprocity, Inc - All Rights Reserved
# Unauthorized use, copying, distribution, displaying, or public performance
# of this file, via any medium, is strictly prohibited. All information
# contained herein is proprietary and confidential and may not be shared
# with any third party without the express written consent of Reciprocity, Inc.
# Created By: rok@reciprocitylabs.com
# Maintained By: rok@reciprocitylabs.com
"""Custom attributes should be unique
Revision ID: 1839dabd2357
Revises: 46a791604e98
Create Date: 2015-12-07 15:33:08.728216
"""
from alembic import op
from sqlalchemy.exc import IntegrityError
from sqlalchemy.sql import text
# revision identifiers, used by Alembic.
revision = '1839dabd2357'
down_revision = '4e989ef86619'
def upgrade():
"""Custom attributes have to be unique, so we find all of those that aren't,
i.e. they have the same title and definition type and apply a consecutive
index. Also deleting an attribute should cascade delete values."""
conn = op.get_bind()
# first, find potential duplicates
find_duplicates_query = """
select ca.id, ca.title, ca.definition_type
from custom_attribute_definitions ca inner join
custom_attribute_definitions ca2 on (
ca.title = ca2.title and
ca.definition_type = ca2.definition_type and
ca.id != ca2.id)
group by ca.id
order by ca.title;
"""
result = conn.execute(find_duplicates_query)
duplicate_records = result.fetchall()
attributes = [dict(zip(result.keys(), r)) for r in duplicate_records]
# now, let's create a list of all the updates that we need to do
previous_value = ""
index = 0
unique_count_query = ("SELECT COUNT(*) FROM custom_attribute_definitions "
"WHERE title = :title AND "
"definition_type = :definition_type")
records_to_insert = {}
for a in attributes:
lowercase_title = a['title'].lower() # mysql comparison is case insensitive (!) # noqa
if previous_value != lowercase_title:
previous_value = lowercase_title
index = 0
while True:
index += 1
new_title = "{} {}".format(a['title'], index)
# we still need to check that the newly generated title does not collide
# with an existing record in the db
existing_records_alike = conn.execute(
text(unique_count_query), title=new_title,
definition_type=a['definition_type']
).first()[0]
if existing_records_alike == 0 and \
records_to_insert.get((new_title, a['definition_type'])) is None:
break
if index > 1000:
# for more than 1000 duplicates we raise error and leave it to the user
print '-----------------------------------------------------'
print 'More than 1000 duplicates for title {} and definition_type {}'\
.format(a['title'], a['definition_type'])
raise StandardError
records_to_insert[(new_title, a['definition_type'])] = (a['id'], new_title)
# now, do the updates in a transaction
update_record_query = ("UPDATE custom_attribute_definitions SET title = "
":title WHERE id = :id")
try:
transaction = conn.begin()
for _, v in records_to_insert.items():
conn.execute(text(update_record_query), title=v[1], id=v[0])
transaction.commit()
except IntegrityError as error:
print '---------------------------------------------'
print(error)
transaction.rollback()
raise StandardError
# finally, create unique constraints for future on now resolved duplicates
op.create_unique_constraint(
"uq_custom_attribute", "custom_attribute_definitions",
["title", "definition_type"])
# make custom attributes cascade delete values
sql1 = """
alter table custom_attribute_values
drop foreign key custom_attribute_values_ibfk_1"""
sql2 = """
alter table custom_attribute_values
add constraint custom_attribute_values_ibfk_1
foreign key (custom_attribute_id)
references custom_attribute_definitions (id)
on delete cascade"""
op.execute(sql1)
op.execute(sql2)
def downgrade():
op.drop_constraint("uq_custom_attribute", "custom_attribute_definitions",
type_="unique")
sql1 = """
alter table custom_attribute_values
drop foreign key custom_attribute_values_ibfk_1"""
sql2 = """
alter table custom_attribute_values
add constraint custom_attribute_values_ibfk_1
foreign key (custom_attribute_id)
references custom_attribute_definitions (id)"""
op.execute(sql1)
op.execute(sql2)
|
Python
| 0
|
@@ -666,16 +666,78 @@
t text%0A%0A
+# mandatory alembic variables%0A# pylint: disable=invalid-name%0A%0A
# revisi
|
875ca73c4cab4c3d036f824c3942589f0ced935f
|
Test password update
|
tests/services/authentication/test_update_password_hash.py
|
tests/services/authentication/test_update_password_hash.py
|
Python
| 0.000001
|
@@ -0,0 +1,1637 @@
+%22%22%22%0A:Copyright: 2006-2017 Jochen Kupperschmidt%0A:License: Modified BSD, see LICENSE for details.%0A%22%22%22%0A%0Afrom byceps.services.authentication.password.models import Credential%0Afrom byceps.services.authentication.password import service as password_service%0Afrom byceps.services.user import event_service%0A%0Afrom tests.base import AbstractAppTestCase%0A%0A%0Aclass UpdatePasswordHashTest(AbstractAppTestCase):%0A%0A def test_update_password_hash(self):%0A orga_id = self.create_user('Party_Orga').id%0A%0A user = self.create_user('Party_User')%0A user_id = user.id%0A password_service.create_password_hash(user_id, 'InitialPassw0rd')%0A%0A password_hash_before = get_password_hash(user_id)%0A assert password_hash_before is not None%0A%0A events_before = event_service.get_events_for_user(user_id)%0A assert len(events_before) == 0%0A%0A # -------------------------------- #%0A%0A password_service.update_password_hash(user_id, 'ReplacementPassw0rd', orga_id)%0A%0A # -------------------------------- #%0A%0A password_hash_after = get_password_hash(user_id)%0A assert password_hash_after is not None%0A assert password_hash_after != password_hash_before%0A%0A events_after = event_service.get_events_for_user(user_id)%0A assert len(events_after) == 1%0A%0A password_updated_event = events_after%5B0%5D%0A assert password_updated_event.event_type == 'password-updated'%0A assert password_updated_event.data == %7B%0A 'initiator_id': str(orga_id),%0A %7D%0A%0A%0Adef get_password_hash(user_id):%0A credential = Credential.query.get(user_id)%0A return credential.password_hash%0A
|
|
4b040fd365c2a8609b2943525b87c2e27924eb19
|
add novaclient sample
|
test-novaclient.py
|
test-novaclient.py
|
Python
| 0
|
@@ -0,0 +1,664 @@
+from novaclient.v1_1.client import Client%0A%0Acontext = %7B%0A 'user': 'admin',%0A 'auth_token': '22b07d939fa544769bf625753ecaec18',%0A 'tenant': '7dfd3b6a98664f7cb78808f57b7984da',%0A%7D%0A%0A# nova_compute_url%0Aurl = 'http://192.168.242.10:8774/v2/7dfd3b6a98664f7cb78808f57b7984da'%0APROXY_AUTH_URL = 'http://192.168.242.10:5000/v2.0'%0A%0Aclient = Client(username=context%5B'user'%5D,%0A api_key=context%5B'auth_token'%5D,%0A project_id=context%5B'tenant'%5D,%0A bypass_url=url,%0A auth_url=PROXY_AUTH_URL)%0Aclient.client.auth_token = context%5B'auth_token'%5D%0Aclient.client.management_url = url%0A%0Aprint dir(client)%0Aprint client.flavors.list()%0A
|
|
1d355343700bd74ebc2944053e5086a22c1c0b5c
|
add post-commit.py
|
git/post-commit.py
|
git/post-commit.py
|
Python
| 0
|
@@ -0,0 +1,1560 @@
+#!/usr/bin/python%0Aimport os%0Aimport re%0Aimport subprocess as sp%0A%0A%0Adef up_version(ver, idx):%0A ver%5Bidx%5D += 1%0A return ver%5B:idx + 1%5D + %5B0%5D * (3 - idx)%0A%0Acommands = %7B%0A '#vtwe++': lambda ver: up_version(ver, 3),%0A '#vpat++': lambda ver: up_version(ver, 2),%0A '#vmin++': lambda ver: up_version(ver, 1),%0A '#vmaj++': lambda ver: up_version(ver, 0),%0A%7D%0A%0Amessage = sp.check_output(%5B'git', 'log', '-1', 'HEAD', '--pretty=format:%25s'%5D)%0A%0Aget_new_version = None%0Afor cmd, func in commands.items():%0A if message.find(cmd) != -1:%0A get_new_version = func%0A break%0Aif get_new_version is None:%0A exit()%0A%0Aroot_dir = sp.check_output(%5B'git', 'rev-parse', '--show-toplevel'%5D).strip()%0Acmake_path = os.path.join(root_dir, 'CMakeLists.txt')%0A%0Awith open(cmake_path) as cmake_file:%0A cmake_lines = cmake_file.readlines()%0A%0Aversion_str = ''%0Aproject_line_idx = 0%0A%0Afor idx, line in enumerate(cmake_lines):%0A project_found = re.search('%5Eproject%5C(.*%5C)', line)%0A if project_found is None:%0A continue%0A project_line = project_found.group(0)%0A version_found = re.search('VERSION %5Cd+.%5Cd+.%5Cd+.%5Cd+', project_line)%0A if version_found is None:%0A continue%0A version_str = version_found.group(0).replace('VERSION ', '')%0A project_line_idx = idx%0A break%0A%0Aif version_str:%0A version = %5Bint(v) for v in version_str.split('.')%5D%0A new_version = %5Bstr(v) for v in get_new_version(version)%5D%0A cmake_lines%5Bproject_line_idx%5D.replace(version_str, '.'.join(new_version))%0A%0Awith open(cmake_path, 'w') as cmake_file:%0A cmake_file.write(''.join(cmake_lines))%0A%0A
|
|
b778e3438909d290ce88bbb0d187aa793652ff5e
|
Create euler2.py
|
euler2.py
|
euler2.py
|
Python
| 0.000001
|
@@ -0,0 +1 @@
+%0A
|
|
fe7e82257703e3c5773d3e321435cb4443eed46d
|
Add gunicorn configuration file
|
gunicorn_config.py
|
gunicorn_config.py
|
Python
| 0
|
@@ -0,0 +1,214 @@
+import multiprocessing, os.path%0A%0A%0Acommand = '/usr/local/bin/gunicorn'%0Apythonpath = os.path.dirname( os.path.abspath(__file__) )%0Abind = '127.0.0.1:9000'%0Aworkers = multiprocessing.cpu_count() * 2 + 1%0Auser = 'nobody'%0A
|
|
91370b54e4a974fe51f563edda75e2738d3fb00c
|
Add noise sensor
|
haffmpeg/sensor.py
|
haffmpeg/sensor.py
|
Python
| 0.000004
|
@@ -0,0 +1,859 @@
+%22%22%22For HA camera components.%22%22%22%0Afrom .core import HAFFmpegQue%0A%0A%0Aclass SensorNoise(HAFFmpegQue):%0A %22%22%22Implement a noise detection on a autio stream.%22%22%22%0A%0A def __init__(self, ffmpeg_bin):%0A %22%22%22Init CameraMjpeg.%22%22%22%0A HAFFmpegQue.__init__(self, ffmpeg_bin=ffmpeg_bin)%0A%0A self._peak = -30%0A self._time_period = 2%0A%0A @property%0A def peak(self, val):%0A self._peak = val%0A%0A @property%0A def time_period(self, val):%0A self._time_period = val%0A%0A def open_sensor(self, input_source, output_dest=None, extra_cmd=None):%0A %22%22%22Open FFmpeg process as mjpeg video stream.%22%22%22%0A command = %5B%0A %22-i%22,%0A input_source,%0A %22-vn%22,%0A %22-c:v%22,%0A %22mjpeg%22,%0A %22-f%22,%0A %22mpjpeg%22%0A %5D%0A%0A self.open(cmd=command, output=output_dest, extra_cmd=extra_cmd)%0A
|
|
65f05e93edc2e9a7033edb8d54bd25b04c32d084
|
test script..
|
test_elemaccess.py
|
test_elemaccess.py
|
Python
| 0
|
@@ -0,0 +1,165 @@
+from cern import cpymad%0Alhc=cpymad.model('lhc')%0Aprint lhc.get_sequences()%0Aall_elements=lhc.get_element_list('lhcb1')%0Aprint lhc.get_element('lhcb1',all_elements%5B3%5D)%0A%0A
|
|
10ac7ea7a67b7a15146ae7c9c0ba9ba74876df81
|
Add a testcase for RubyGems
|
tests/test_gems.py
|
tests/test_gems.py
|
Python
| 0.000005
|
@@ -0,0 +1,205 @@
+from tests.helper import ExternalVersionTestCase%0A%0A%0Aclass RubyGemsTest(ExternalVersionTestCase):%0A def test_gems(self):%0A self.assertEqual(self.sync_get_version(%22example%22, %7B%22gems%22: None%7D), %221.0.2%22)%0A
|
|
e9115cb3c52386dc7b74b4d06070b44697725811
|
Add manage.py
|
tests/manage.py
|
tests/manage.py
|
Python
| 0.000001
|
@@ -0,0 +1,219 @@
+import os%0Aimport sys%0A%0Aif __name__ == %22__main__%22:%0A os.environ.setdefault(%22DJANGO_SETTINGS_MODULE%22, %22config%22)%0A%0A from django.core.management import execute_from_command_line%0A%0A execute_from_command_line(sys.argv)%0A%0A
|
|
9eaf735bcdba9f3fd8a51219d0eebb61f8b2f166
|
add 20.6.7
|
var/spack/repos/builtin/packages/py-setuptools/package.py
|
var/spack/repos/builtin/packages/py-setuptools/package.py
|
from spack import *
class PySetuptools(Package):
"""Easily download, build, install, upgrade, and uninstall Python packages."""
homepage = "https://pypi.python.org/pypi/setuptools"
url = "https://pypi.python.org/packages/source/s/setuptools/setuptools-11.3.tar.gz"
version('20.5', 'fadc1e1123ddbe31006e5e43e927362b')
version('19.2', '78353b1f80375ca5e088f4b4627ffe03')
version('18.1', 'f72e87f34fbf07f299f6cb46256a0b06')
version('16.0', '0ace0b96233516fc5f7c857d086aa3ad')
version('11.3.1', '01f69212e019a2420c1693fb43593930')
extends('python')
def install(self, spec, prefix):
python('setup.py', 'install', '--prefix=%s' % prefix)
|
Python
| 0.000002
|
@@ -277,16 +277,74 @@
ar.gz%22%0A%0A
+ version('20.6.7', '45d6110f3ec14924e44c33411db64fe6')%0A
vers
|
bcc48ecba38450bd77b5dcce7ae60d2966cbd7d2
|
Migrate object_sections to relationships
|
src/ggrc/migrations/versions/20150521150652_23880aa43323_migrate_object_sections_to_relationships.py
|
src/ggrc/migrations/versions/20150521150652_23880aa43323_migrate_object_sections_to_relationships.py
|
Python
| 0.000103
|
@@ -0,0 +1,1490 @@
+# Copyright (C) 2015 Google Inc., authors, and contributors %3Csee AUTHORS file%3E%0A# Licensed under http://www.apache.org/licenses/LICENSE-2.0 %3Csee LICENSE file%3E%0A# Created By: anze@reciprocitylabs.com%0A# Maintained By: anze@reciprocitylabs.com%0A%0A%22%22%22Migrate object_sections to relationships%0A%0ARevision ID: 23880aa43323%0ARevises: 324d461206%0ACreate Date: 2015-05-21 15:06:52.172183%0A%0A%22%22%22%0A%0Afrom alembic import op%0A%0A# revision identifiers, used by Alembic.%0Arevision = '23880aa43323'%0Adown_revision = '324d461206'%0A%0A%0Adef upgrade():%0A sql = %22%22%22%0A REPLACE INTO relationships (%0A modified_by_id, created_at, updated_at, source_id,%0A source_type, destination_id, destination_type, context_id%0A )%0A SELECT os.modified_by_id, os.created_at, os.updated_at,%0A os.section_id as source_id, s.type as source_type,%0A os.sectionable_id as destination_id,%0A os.sectionable_type as destination_type, os.context_id%0A FROM object_sections as os JOIN sections as s ON os.section_id = s.id;%0A %22%22%22%0A op.execute(sql)%0A op.drop_constraint(%0A 'object_sections_ibfk_1', 'object_sections', type_='foreignkey')%0A op.drop_constraint(%0A 'object_sections_ibfk_2', 'object_sections', type_='foreignkey')%0A%0A%0Adef downgrade():%0A op.create_foreign_key(%0A 'object_sections_ibfk_1',%0A 'object_sections',%0A 'contexts',%0A %5B'context_id'%5D,%0A %5B'id'%5D%0A )%0A op.create_foreign_key(%0A 'object_sections_ibfk_2',%0A 'object_sections',%0A 'sections',%0A %5B'section_id'%5D,%0A %5B'id'%5D%0A )%0A
|
|
2f0bf45ec747778d38801892e97d5a902443841d
|
Define "Assessment updated" notification type
|
src/ggrc/migrations/versions/20170207134238_562ec606ff7c_add_assessment_updated_notification_type.py
|
src/ggrc/migrations/versions/20170207134238_562ec606ff7c_add_assessment_updated_notification_type.py
|
Python
| 0.000001
|
@@ -0,0 +1,1582 @@
+# Copyright (C) 2017 Google Inc.%0A# Licensed under http://www.apache.org/licenses/LICENSE-2.0 %3Csee LICENSE file%3E%0A%0A%22%22%22%0AAdd Assessment updated notification type%0A%0ACreate Date: 2017-02-07 13:42:38.921370%0A%22%22%22%0A# disable Invalid constant name pylint warning for mandatory Alembic variables.%0A# pylint: disable=invalid-name%0A%0Afrom datetime import datetime%0A%0Afrom alembic import op%0A%0A%0A# revision identifiers, used by Alembic.%0Arevision = '562ec606ff7c'%0Adown_revision = '6e9a3ed063d2'%0A%0A%0Adef upgrade():%0A %22%22%22Add new notification type: Assessment updated.%22%22%22%0A description = (%0A %22Send an Assessment updated notification to %22%0A %22Assessors, Creators and Verifiers.%22%0A )%0A%0A now = datetime.utcnow().strftime(%22%25Y-%25m-%25d %25H-%25M-%25S%22)%0A%0A sql = %22%22%22%0A INSERT INTO notification_types (%0A name,%0A description,%0A template,%0A advance_notice,%0A instant,%0A created_at,%0A updated_at%0A )%0A VALUES (%0A %22assessment_updated%22,%0A %22%7Bdescription%7D%22,%0A %22assessment_updated%22,%0A 0,%0A FALSE,%0A '%7Bnow%7D',%0A '%7Bnow%7D'%0A )%0A %22%22%22.format(description=description, now=now)%0A%0A op.execute(sql)%0A%0A%0Adef downgrade():%0A %22%22%22Remove the %22Assessment updated%22 notification type.%0A%0A Also delete all notifications of that type.%0A %22%22%22%0A sql = %22%22%22%0A DELETE n%0A FROM notifications AS n%0A LEFT JOIN notification_types AS nt ON%0A n.notification_type_id = nt.id%0A WHERE%0A nt.name = %22assessment_updated%22%0A %22%22%22%0A op.execute(sql)%0A%0A sql = %22%22%22%0A DELETE%0A FROM notification_types%0A WHERE name = %22assessment_updated%22%0A %22%22%22%0A op.execute(sql)%0A
|
|
3ec80d1a0de750af7831cca5a29294558600e88f
|
Add python_bootstrap.py
|
xunit-autolabeler-v2/ast_parser/python/python_bootstrap.py
|
xunit-autolabeler-v2/ast_parser/python/python_bootstrap.py
|
Python
| 0.999875
|
@@ -0,0 +1,977 @@
+# Copyright 2020 Google LLC.%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%0A%0Aimport os%0Aimport sys%0A%0Afrom python import invoker%0A%0Aif len(sys.argv) != 2:%0A raise ValueError('Please specify exactly one %5Broot%5D directory.')%0A%0Aroot_dir = sys.argv%5B1%5D%0Aoutput_file = os.path.join(root_dir, 'repo.json')%0A%0Ajson_out = invoker.get_json_for_dir(root_dir)%0Awith open(output_file, 'w') as f:%0A f.write(json_out + '%5Cn')%0A%0Aprint(f'JSON written to: %7Boutput_file%7D')%0Aprint('Do not move this file!')%0A
|
|
3ccc8357d5d5466acc97f1b065f500e9b096eeb7
|
add exception for cursor
|
addons/mrp/wizard/wizard_procurement.py
|
addons/mrp/wizard/wizard_procurement.py
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2008 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import wizard
import threading
import pooler
parameter_form = '''<?xml version="1.0"?>
<form string="Parameters" colspan="4">
<label string="This wizard will schedule procurements." colspan="4" align="0.0"/>
</form>'''
parameter_fields = {
}
def _procure_calculation_procure(self, db_name, uid, data, context):
db, pool = pooler.get_db_and_pool(db_name)
cr = db.cursor()
proc_obj = pool.get('mrp.procurement')
proc_obj._procure_confirm(cr, uid, use_new_cursor=cr.dbname, context=context)
return {}
def _procure_calculation(self, cr, uid, data, context):
threaded_calculation = threading.Thread(target=_procure_calculation_procure, args=(self, cr.dbname, uid, data, context))
threaded_calculation.start()
return {}
class procurement_compute(wizard.interface):
states = {
'init': {
'actions': [],
'result': {'type': 'form', 'arch':parameter_form, 'fields': parameter_fields, 'state':[('end','Cancel'),('compute','Compute Procurements') ]}
},
'compute': {
'actions': [_procure_calculation],
'result': {'type': 'state', 'state':'end'}
},
}
procurement_compute('mrp.procurement.compute')
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Python
| 0
|
@@ -1370,16 +1370,29 @@
ursor()%0A
+ try:%0A
proc
@@ -1426,16 +1426,20 @@
ement')%0A
+
proc
@@ -1512,16 +1512,48 @@
ontext)%0A
+ finally:%0A cr.close()%0A
retu
|
f1145fc98f825b20452dd924d287349b4a8ae628
|
Update templates.py
|
infra/templates.py
|
infra/templates.py
|
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
JENKINS_TEMPLATE = """\
// Copyright 2016 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
////////////////////////////////////////////////////////////////////////////////
def libfuzzerBuild = fileLoader.fromGit('infra/libfuzzer-pipeline.groovy',
'https://github.com/google/oss-fuzz.git')
libfuzzerBuild {
git = "put git url here"
}
"""
DOCKER_TEMPLATE = """\
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
FROM ossfuzz/base-libfuzzer
MAINTAINER your@email.com
RUN apt-get install -y make autoconf automake libtool
RUN git checkout put git url here
COPY build.sh /src/
"""
BUILD_TEMPLATE = """\
#!/bin/bash -eu
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
cd /src/%s
# build the target.
# e.g.
#
# ./autogen.sh
# ./configure
# make clean all
# build your fuzzer(s)
# e.g.
# $CXX $CXXFLAGS -std=c++11 -Iinclude \\
# /path/to/name_of_fuzzer.cc -o /out/name_of_fuzzer \\
# -lfuzzer /path/to/library.a $FUZZER_LDFLAGS
"""
|
Python
| 0.000001
|
@@ -2364,32 +2364,56 @@
it c
-heckout put git url here
+lone %3Cgit_url%3E # or use other version control
%0ACOP
|
7318cadbb63cd2077ba73dd16cade2165a702c8a
|
Change client side tests to work with functions.py
|
distarray/tests/test_umath.py
|
distarray/tests/test_umath.py
|
"""
Tests for distarray ufuncs.
Many of these tests require a 4-engine cluster to be running locally.
"""
import unittest
import warnings
import numpy as np
from IPython.parallel import Client
from distarray.context import Context
from numpy.testing import assert_array_equal
def add_checkers(cls, ops, checker_name):
"""Helper function to dynamically add a list of tests.
Add tests to cls for each op in ops. Where checker_name is
the name of the test you want to call on each op. So we add:
TestCls.test_op_name(): return op_checker(op_name)
for each op.
"""
op_checker = getattr(cls, checker_name)
def check(op_name):
return lambda self: op_checker(self, op_name)
for op_name in ops:
op_test_name = 'test_' + op_name
setattr(cls, op_test_name, check(op_name))
class TestDistArrayUfuncs(unittest.TestCase):
"""Test ufuncs operating on distarrays"""
@classmethod
def setUpClass(cls):
cls.client = Client()
cls.context = Context(cls.client)
# Standard data
cls.a = np.arange(1, 99)
cls.b = np.ones_like(cls.a)*2
# distributed array data
cls.da = cls.context.fromndarray(cls.a)
cls.db = cls.context.fromndarray(cls.b)
@classmethod
def tearDownClass(cls):
cls.client.close()
def check_binary_op(self, op_name):
"""Check binary operation for success.
Check the two- and three-arg ufunc versions as well as the
method version attached to a LocalArray.
"""
op = getattr(self.context, op_name)
ufunc = getattr(np, op_name)
with warnings.catch_warnings():
# ignore inf, NaN warnings etc.
warnings.simplefilter("ignore", category=RuntimeWarning)
expected = ufunc(self.a, self.b, casting='unsafe')
result = op(self.da, self.db, casting='unsafe')
assert_array_equal(result.toarray(), expected)
def check_unary_op(self, op_name):
"""Check unary operation for success.
Check the two- and three-arg ufunc versions as well as the
method version attached to a LocalArray.
"""
op = getattr(self.context, op_name)
ufunc = getattr(np, op_name)
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=RuntimeWarning)
expected = ufunc(self.a, casting='unsafe')
result = op(self.da, casting='unsafe')
assert_array_equal(result.toarray(), expected)
class TestSpecialMethods(unittest.TestCase):
"""Test the __methods__"""
@classmethod
def setUpClass(cls):
cls.client = Client()
cls.context = Context(cls.client)
# Standard data
cls.a = np.arange(1, 33)
cls.b = np.ones_like(cls.a)*2
# distributed array data
cls.da = cls.context.fromndarray(cls.a)
cls.db = cls.context.fromndarray(cls.b)
@classmethod
def tearDownClass(cls):
cls.client.close()
def check_op(self, op_name):
distop = getattr(self.da, op_name)
numpyop = getattr(self.a, op_name)
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=RuntimeWarning)
result = distop(self.db)
expected = numpyop(self.b)
assert_array_equal(result.toarray(), expected)
unary_ops = ('absolute', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan',
'arctanh', 'conjugate', 'cos', 'cosh', 'exp', 'expm1', 'log',
'log10', 'log1p', 'negative', 'reciprocal', 'rint', 'sign', 'sin',
'sinh', 'sqrt', 'square', 'tan', 'tanh', 'invert')
binary_ops = ('add', 'arctan2', 'divide', 'floor_divide', 'fmod', 'hypot',
'multiply', 'power', 'remainder', 'subtract', 'true_divide',
'less', 'less_equal', 'equal', 'not_equal', 'greater',
'greater_equal', 'mod', 'bitwise_and', 'bitwise_or',
'bitwise_xor', 'left_shift', 'right_shift',)
binary_special_methods = ('__lt__', '__le__', '__eq__', '__ne__', '__gt__',
'__ge__', '__add__', '__sub__', '__mul__',
'__floordiv__', '__mod__', '__pow__', '__radd__',
'__rsub__', '__rmul__', '__rfloordiv__', '__rmod__',
'__rpow__', '__rrshift__', '__rlshift__',
'__rand__', '__rxor__', '__ror__', '__lshift__',
'__rshift__', '__and__', '__xor__', '__or__',)
# There is no divmod function in numpy. And there is no __div__
# attribute on ndarrays.
problematic_special_methods = ('__divmod__', '__rdivmod__', '__div__')
add_checkers(TestDistArrayUfuncs, binary_ops, 'check_binary_op')
add_checkers(TestDistArrayUfuncs, unary_ops, 'check_unary_op')
add_checkers(TestSpecialMethods, binary_special_methods, 'check_op')
if __name__ == '__main__':
unittest.main(verbosity=2)
|
Python
| 0
|
@@ -133,16 +133,17 @@
arnings%0A
+%0A
import n
@@ -153,16 +153,61 @@
y as np%0A
+from numpy.testing import assert_array_equal%0A
from IPy
@@ -234,20 +234,23 @@
Client%0A
-from
+%0Aimport
distarr
@@ -255,76 +255,38 @@
rray
-.context import Context%0Afrom numpy.testing import assert_array_equal
+%0Afrom distarray import Context
%0A%0A%0Ad
@@ -1107,10 +1107,10 @@
(1,
-99
+11
)%0A
@@ -1581,36 +1581,33 @@
p = getattr(
-self.context
+distarray
, op_name)%0A
@@ -2213,20 +2213,17 @@
ttr(
-self.context
+distarray
, op
@@ -2786,10 +2786,10 @@
(1,
-33
+11
)%0A
|
4efc039389b9aab3813a3815a58281c898f1acfd
|
Create user_exceptions.py
|
user_exceptions.py
|
user_exceptions.py
|
Python
| 0.000005
|
@@ -0,0 +1,764 @@
+#/bin/python%0A# Python program for playing around with user defined exceptions%0A%0Aclass TooSmallError(Exception):%0A message = %22Too small! Try again ;)%22%0A%0Aclass TooBigError(Exception):%0A message = %22Too big! Try again ;)%22%0A %0Aclass ExactError(Exception):%0A def __init__(self):%0A print %22HAHAHA You hit the trap%22%0A%09 %0Aclass unhandledError(Exception):pass%0A%0Adef checkNumber(num):%0A if(num %3C= 4):%0A raise TooSmallError%0A elif(num %3E= 7):%0A raise TooBigError%0A elif(num == 5):%0A raise ExactError%0A return num%0A %0Awhile 1:%0A try:%0A usrInpt = int(raw_input(%22Enter the magic number: %22))%0A print checkNumber(usrInpt)%0A except TooSmallError, e:%0A print e.message%0A except TooBigError, e:%0A print e.message%0A except ExactError, e:%0A print e.message%0A else:%0A break%0A %0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.