function stringlengths 11 56k | repo_name stringlengths 5 60 | features list |
|---|---|---|
def RunSteps(api):
target = api.target('fuchsia-arm64')
assert not target.is_win
assert not target.is_linux
assert not target.is_mac
assert api.target.host.is_host
assert target != api.target.host
assert target != 'foo'
step_result = api.step('platform things', cmd=None)
step_result.presentation.logs[... | ric2b/Vivaldi-browser | [
131,
27,
131,
3,
1490828945
] |
def end_headers(self):
self.send_header("Cache-Control", "no-cache, no-store, must-revalidate")
self.send_header("Pragma", "no-cache")
self.send_header("Expires", "0")
super().end_headers() | endlessm/chromium-browser | [
21,
16,
21,
3,
1435959644
] |
def __init__(self, *args: Any) -> None:
super().__init__(*args)
self.papi = PublishAPISection("http://test/")
self.maxDiff = None | gopythongo/aptly-api-client | [
19,
16,
19,
7,
1496180979
] |
def test_update(self, *, rmock: requests_mock.Mocker) -> None:
rmock.put("http://test/api/publish/s3%3Aaptly-repo%3Atest_xyz__1/test",
text='{"AcquireByHash":false,"Architectures":["amd64"],"Distribution":"test","Label":"",'
'"Origin":"","Prefix":"test/xyz_1","SkipConten... | gopythongo/aptly-api-client | [
19,
16,
19,
7,
1496180979
] |
def test_update_no_sign(self, *, rmock: requests_mock.Mocker) -> None:
rmock.put("http://test/api/publish/s3%3Aaptly-repo%3Atest_xyz__1/test",
text='{"AcquireByHash":false,"Architectures":["amd64"],"Distribution":"test","Label":"",'
'"Origin":"","Prefix":"test/xyz_1","Sk... | gopythongo/aptly-api-client | [
19,
16,
19,
7,
1496180979
] |
def test_publish(self, *, rmock: requests_mock.Mocker) -> None:
rmock.post("http://test/api/publish/s3%3Amyendpoint%3Atest_a__1",
text='{"AcquireByHash":false,"Architectures":["amd64"],"Distribution":"test","Label":"test",'
'"Origin":"origin","Prefix":"test/a_1","SkipC... | gopythongo/aptly-api-client | [
19,
16,
19,
7,
1496180979
] |
def test_publish_no_sign(self, *, rmock: requests_mock.Mocker) -> None:
rmock.post("http://test/api/publish/s3%3Amyendpoint%3Atest_a__1",
text='{"AcquireByHash":false,"Architectures":["amd64"],"Distribution":"test","Label":"test",'
'"Origin":"origin","Prefix":"test/a_1... | gopythongo/aptly-api-client | [
19,
16,
19,
7,
1496180979
] |
def test_update_snapshot_default_key(self, *, rmock: requests_mock.Mocker) -> None:
rmock.put("http://test/api/publish/s3%3Aaptly-repo%3Atest_xyz__1/test",
text='{"AcquireByHash":false,"Architectures":["amd64"],"Distribution":"test","Label":"",'
'"Origin":"","Prefix":"te... | gopythongo/aptly-api-client | [
19,
16,
19,
7,
1496180979
] |
def test_no_name(self, *, rmock: requests_mock.Mocker) -> None:
with self.assertRaises(AptlyAPIException):
self.papi.publish(sources=[{'nope': 'nope'}], architectures=['amd64'],
prefix='s3:myendpoint:test/a_1', distribution='test', sign_skip=False,
... | gopythongo/aptly-api-client | [
19,
16,
19,
7,
1496180979
] |
def __init__(self, model, color = 'BLUE'):
self.node_radius = 10 # Radius of a node
self.node_color = 'GREEN' # TODO not currently used
self.node_outline = 'BLACK' # TODO not currently used
# Setting this flag prevents drawing this node and links while dragging
self.dragging ... | turon/mantis | [
3,
2,
3,
1,
1271876993
] |
def HitTest(self, point):
rect = self.GetRect()
return rect.InsideXY(point.x, point.y) | turon/mantis | [
3,
2,
3,
1,
1271876993
] |
def Erase(self, dc):
if self.dragging:
return
dc.SetBrush(wx.Brush("WHITE"))
dc.SetPen(wx.Pen("WHITE"))
x, y = self.model.GetPosition()
#dc.DrawRectangle(x-self.node_radius, y-self.node_radius,
# self.node_radius * 2 + 4, self.node_radius * 2 + ... | turon/mantis | [
3,
2,
3,
1,
1271876993
] |
def Update(self):
#self.led = state
# create a DC for drawing in to the bitmap memory
bdc = wx.MemoryDC();
bdc.SelectObject(self.bmp);
# First clear the background
#bdc.SetBrush(wx.Brush("WHITE"))
#bdc.SetPen(wx.Pen("WHITE"))
#bdc.DrawRectangle(0, 0, se... | turon/mantis | [
3,
2,
3,
1,
1271876993
] |
def __str__(self):
return 'node_view:'+str(self.model.id) | turon/mantis | [
3,
2,
3,
1,
1271876993
] |
def __init__(self, src, dst):
self.src = src
self.dst = dst
self.flashcount = 0 | turon/mantis | [
3,
2,
3,
1,
1271876993
] |
def Draw(self, dc, op = wx.COPY):
if self.src.dragging or self.dst.dragging:
return
if self.flashcount:
pen = wx.Pen("GOLD")
else:
pen = wx.Pen("BLUE")
pen.SetWidth(4)
dc.SetPen(pen)
dc.DrawLine(self.src.model.pos[0], self.src.model.pos... | turon/mantis | [
3,
2,
3,
1,
1271876993
] |
def __init__(self):
self.lock = thread.allocate_lock()
self.list = [] | turon/mantis | [
3,
2,
3,
1,
1271876993
] |
def put(self, obj):
"Add an object to the queue atomically."
self.lock.acquire()
self.list.append(obj)
self.lock.release() | turon/mantis | [
3,
2,
3,
1,
1271876993
] |
def get(self):
"Return the entire queue as a list and clear the queue atomically."
self.lock.acquire()
list = self.list
self.list = []
self.lock.release()
return list | turon/mantis | [
3,
2,
3,
1,
1271876993
] |
def __init__(self, parent, id, model):
wx.ScrolledWindow.__init__(self, parent, id, style=wx.NO_FULL_REPAINT_ON_RESIZE)
self.model = model
self.node_dict = {}
self.link_dict = {}
self.node_size = 25
self.dragNode = None
self.dragImage = None
self.queue = e... | turon/mantis | [
3,
2,
3,
1,
1271876993
] |
def DispatchEvent(self, callback, *args):
""""Queue a net event to be handled on the GUI thread. | turon/mantis | [
3,
2,
3,
1,
1271876993
] |
def FindNode(self, point):
"Return the node that contains the point."
for n in self.node_dict.itervalues():
if n.HitTest(point):
return n
return None | turon/mantis | [
3,
2,
3,
1,
1271876993
] |
def OnLeftUp(self, evt):
if not self.dragImage or not self.dragNode:
self.dragImage = None
self.dragNode = None
return
# Hide the image, end dragging, and nuke out the drag image.
self.dragImage.Hide()
self.dragImage.EndDrag()
self.dragImage =... | turon/mantis | [
3,
2,
3,
1,
1271876993
] |
def OnRightDown(self, event):
pass | turon/mantis | [
3,
2,
3,
1,
1271876993
] |
def OnMotion(self, evt):
# Ignore mouse movement if we're not dragging.
if not self.dragNode or not evt.Dragging() or not evt.LeftIsDown():
return
# if we have a node, but haven't started dragging yet
if self.dragNode and not self.dragImage:
# only start the drag... | turon/mantis | [
3,
2,
3,
1,
1271876993
] |
def OnIdle(self, event):
"""Handle queued network events. See net_view.DispatchEvent()."""
for callback, args in self.queue.get():
callback(*args) | turon/mantis | [
3,
2,
3,
1,
1271876993
] |
def Draw(self, dc):
dc.BeginDrawing() # for Windows compatibility
# Since we are a scrolling window we need to prepare the DC
self.PrepareDC(dc)
dc.SetBackground(wx.Brush(self.GetBackgroundColour()))
dc.Clear()
for link in self.link_dict.itervalues():
lin... | turon/mantis | [
3,
2,
3,
1,
1271876993
] |
def add_node(self, nodemodel, color = 'BLUE'):
n = node_view(nodemodel, color)
self.node_dict[nodemodel] = n
nodemodel.Bind(net_model.LED_CHANGED, self.DispatchEvent, self.node_state_changed)
n.Update() | turon/mantis | [
3,
2,
3,
1,
1271876993
] |
def del_node(self, node):
if self.node_dict.has_key(node):
dc = wx.ClientDC(self)
self.node_dict[node].Erase(dc)
del self.node_dict[node] | turon/mantis | [
3,
2,
3,
1,
1271876993
] |
def node_state_changed(self, node):
if self.node_dict.has_key(node):
n = self.node_dict[node]
n.Update()
dc = wx.ClientDC(self)
n.Draw(dc) | turon/mantis | [
3,
2,
3,
1,
1271876993
] |
def del_radio_link(self, link):
if self.link_dict.has_key(link):
l = self.link_dict[link]
dc = wx.ClientDC(self)
l.Erase(dc)
l.src.Draw(dc)
l.dst.Draw(dc)
del self.link_dict[link] | turon/mantis | [
3,
2,
3,
1,
1271876993
] |
def new_network(self, model):
self.node_dict.clear()
self.link_dict.clear()
self.dragNode = None
self.dragImage = None
dummy = self.queue.get() # empties the list | turon/mantis | [
3,
2,
3,
1,
1271876993
] |
def forward_radio_packet(self, link):
if link in self.link_dict:
l = self.link_dict[link]
l.flashcount += 1
# Return the link to its original color after a delay.
wx.FutureCall(500, self.flash_link_off, l, link) | turon/mantis | [
3,
2,
3,
1,
1271876993
] |
def threaded_encode_job(job):
"""
Given a job, run it through its encoding workflow in a non-blocking manner.
"""
# Update the timestamp for when the node last did something so it
# won't terminate itself.
NodeStateManager.i_did_something()
job.nommer.onomnom() | duointeractive/media-nommer | [
24,
4,
24,
2,
1292949659
] |
def threaded_heartbeat():
"""
Fires off a threaded task to check in with feederd via SimpleDB_. There
is a domain that contains all of the running EC2_ instances and their
unique IDs, along with some state data. | duointeractive/media-nommer | [
24,
4,
24,
2,
1292949659
] |
def task_heartbeat():
"""
Checks in with feederd in a non-blocking manner via
:py:meth:`threaded_heartbeat`. | duointeractive/media-nommer | [
24,
4,
24,
2,
1292949659
] |
def test_blog_page_entries(self, browser, site_url):
browser.visit(site_url + '/blog/')
entries = browser.find_by_css('.page-content')
assert browser.status_code == 200
assert len(entries) > 0 | APSL/puput | [
541,
148,
541,
18,
1438122609
] |
def test_entry_page_author(self, browser, site_url):
browser.visit(site_url + '/blog/author/admin/')
entries = browser.find_by_css('.page-content')
assert browser.status_code == 200
assert browser.is_text_present('Entries for author')
assert len(entries) > 0 | APSL/puput | [
541,
148,
541,
18,
1438122609
] |
def test_entry_page_tag(self, browser, site_url):
browser.visit(site_url + '/blog/tag/test/')
entries = browser.find_by_css('.page-content')
assert browser.status_code == 200
assert browser.is_text_present('Entries for tag')
assert len(entries) > 0 | APSL/puput | [
541,
148,
541,
18,
1438122609
] |
def __init__(self, model, input_record, num_to_collect,
name='last_n_window_collector', **kwargs):
super(LastNWindowCollector, self).__init__(
model, name, input_record, **kwargs)
assert num_to_collect > 0
self.num_to_collect = num_to_collect
assert isinstanc... | ryfeus/lambda-packs | [
1086,
234,
1086,
13,
1476901359
] |
def __init__(self):
Hardware.__init__(self, CLASS_NAME, CLASS_ID, VENDOR_ID, DEVICES, PRIORITY) | DecisionSystemsGroup/DSGos | [
2,
1,
2,
1,
1427035979
] |
def get_packages():
pkgs = ["catalyst-hook", "catalyst-libgl", "catalyst-utils", "acpid", "qt4"]
if os.uname()[-1] == "x86_64":
pkgs.extend(["lib32-catalyst-libgl", "lib32-catalyst-utils", "lib32-opencl-catalyst"])
return pkgs | DecisionSystemsGroup/DSGos | [
2,
1,
2,
1,
1427035979
] |
def add_repositories(path):
""" Adds [xorg116] and [catalyst-hd234k] repos to pacman.conf """
with open(path, 'r') as pacman_conf:
lines = pacman_conf.readlines()
with open(path, "w") as pacman_conf:
for line in lines:
# xorg11x needs to be present before ... | DecisionSystemsGroup/DSGos | [
2,
1,
2,
1,
1427035979
] |
def post_install(self, dest_dir):
# Add repos to user's pacman.conf
path = os.path.join(dest_dir, "etc/pacman.conf")
self.add_repositories(path)
super().chroot(["systemctl", "enable", "atieventsd"])
super().chroot(["systemctl", "enable", "catalyst-hook"])
super().chroot(... | DecisionSystemsGroup/DSGos | [
2,
1,
2,
1,
1427035979
] |
def __init__(self):
pass | inventree/InvenTree | [
2517,
401,
2517,
134,
1490233450
] |
def plugin_name(self):
"""
Name of plugin
"""
return self.PLUGIN_NAME | inventree/InvenTree | [
2517,
401,
2517,
134,
1490233450
] |
def plugin_title(self):
"""
Title of plugin
"""
if self.PLUGIN_TITLE:
return self.PLUGIN_TITLE
else:
return self.plugin_name() | inventree/InvenTree | [
2517,
401,
2517,
134,
1490233450
] |
def is_active(self):
"""
Return True if this plugin is currently active
"""
cfg = self.plugin_config()
if cfg:
return cfg.active
else:
return False | inventree/InvenTree | [
2517,
401,
2517,
134,
1490233450
] |
def Replace_BaseRF_Gap_to_AxisField_Nodes(accLattice,z_step,dir_location="",accSeq_Names = [],cavs_Names = []):
"""
Function will replace BaseRF_Gap nodes by AxisFieldRF_Gap.
It is assumed that AxisFieldRF_Gap nodes do not overlap any
others nodes (except Drifts).
The replacement will be performed only for specif... | PyORBIT-Collaboration/py-orbit | [
19,
36,
19,
5,
1481146278
] |
def Make_AxisFieldRF_Gaps_and_Find_Neihbor_Nodes(rf_length_tolerance,accLattice,accSeq,dir_location,cavs):
"""
It returns (af_rf_gap_dict,rf_gap_ind_up_down_arr).
This function analyzes the nodes in the accSeq and creates a dictionary and
an array:
af_rf_gap_dict[rf_gap] = AxisFieldRF_Gap(rf_gap)
and
rf_gap_ind... | PyORBIT-Collaboration/py-orbit | [
19,
36,
19,
5,
1481146278
] |
def RenormalizeFunction(func,z_min,z_max):
"""
It re-normalizes the Function in the new limits (z_min,z_max).
We assume that region of the function definition will be cut not extended.
"""
spline = SplineCH()
spline.compile(func)
integrator = GaussLegendreIntegrator(500)
integrator.setLimits(z_min,z_max)
... | PyORBIT-Collaboration/py-orbit | [
19,
36,
19,
5,
1481146278
] |
def create(kernel):
result = Tangible()
result.template = "object/tangible/mission/quest_item/shared_slooni_jong_q1_needed.iff"
result.attribute_template_id = -1
result.stfName("loot_tals_n","slooni_jong_q1_needed") | anhstudios/swganh | [
62,
37,
62,
37,
1297996365
] |
def get_printable_location_up(block_method):
from som.vmobjects.method_bc import BcAbstractMethod
assert isinstance(block_method, BcAbstractMethod)
return "to:do: " + block_method.merge_point_string() | SOM-st/PySOM | [
24,
4,
24,
5,
1382259745
] |
def get_printable_location_down(block_method):
from som.vmobjects.method_bc import BcAbstractMethod
assert isinstance(block_method, BcAbstractMethod)
return "downToto:do: " + block_method.merge_point_string() | SOM-st/PySOM | [
24,
4,
24,
5,
1382259745
] |
def _to_do_int(i, by_increment, top, block, block_method):
assert isinstance(i, int)
assert isinstance(top, int)
while i <= top:
jitdriver_int.jit_merge_point(block_method=block_method)
block_method.invoke_2(block, Integer(i))
i += by_increment | SOM-st/PySOM | [
24,
4,
24,
5,
1382259745
] |
def _to_do(rcvr, limit, block):
block_method = block.get_method()
i = rcvr.get_embedded_integer()
if isinstance(limit, Double):
_to_do_double(i, 1, limit.get_embedded_double(), block, block_method)
else:
_to_do_int(i, 1, limit.get_embedded_integer(), block, block_method)
return rcv... | SOM-st/PySOM | [
24,
4,
24,
5,
1382259745
] |
def _down_to_do_int(i, by_increment, bottom, block, block_method):
assert isinstance(i, int)
assert isinstance(bottom, int)
while i >= bottom:
jitdriver_int_down.jit_merge_point(block_method=block_method)
block_method.invoke_2(block, Integer(i))
i -= by_increment | SOM-st/PySOM | [
24,
4,
24,
5,
1382259745
] |
def _down_to_do(rcvr, limit, block):
block_method = block.get_method()
i = rcvr.get_embedded_integer()
if isinstance(limit, Double):
_down_to_do_double(i, 1, limit.get_embedded_double(), block, block_method)
else:
_down_to_do_int(i, 1, limit.get_embedded_integer(), block, block_method)
... | SOM-st/PySOM | [
24,
4,
24,
5,
1382259745
] |
def __init__(self, _tagLst, _attrName, _attrVal, _data):
self.tagList = _tagLst
self.attrName = _attrName
self.attrVal = _attrVal
self.dataToCheck = _data
self.status_baseline = False
self.status_superior = False
self.status_exemplary = False
self.... | KhronosGroup/COLLADA-CTS | [
30,
9,
30,
11,
1336571488
] |
def JudgeBaseline(self, context):
# No step should crash
self.__assistant.CheckCrashes(context) | KhronosGroup/COLLADA-CTS | [
30,
9,
30,
11,
1336571488
] |
def JudgeSuperior(self, context):
self.status_superior = self.status_baseline
return self.status_superior | KhronosGroup/COLLADA-CTS | [
30,
9,
30,
11,
1336571488
] |
def JudgeExemplary(self, context):
if (self.status_superior == False):
self.status_exemplary = self.status_superior
return self.status_exemplary | KhronosGroup/COLLADA-CTS | [
30,
9,
30,
11,
1336571488
] |
def __init__(
self,
queue: q.Queue = None,
burst_limit: int = 30,
time_limit_ms: int = 1000,
exc_route: Callable[[Exception], None] = None,
autostart: bool = True,
name: str = None, | tzpBingo/github-trending | [
42,
20,
42,
1,
1504755582
] |
def run(self) -> None:
"""
Do not use the method except for unthreaded testing purposes, the method normally is
automatically called by autostart argument.
"""
times: List[float] = [] # used to store each callable processing time
while True:
item = self._que... | tzpBingo/github-trending | [
42,
20,
42,
1,
1504755582
] |
def _default_exception_handler(exc: Exception) -> NoReturn:
"""
Dummy exception handler which re-raises exception in thread. Could be possibly overwritten
by subclasses.
"""
raise exc | tzpBingo/github-trending | [
42,
20,
42,
1,
1504755582
] |
def __init__(
self,
all_burst_limit: int = 30,
all_time_limit_ms: int = 1000,
group_burst_limit: int = 20,
group_time_limit_ms: int = 60000,
exc_route: Callable[[Exception], None] = None,
autostart: bool = True, | tzpBingo/github-trending | [
42,
20,
42,
1,
1504755582
] |
def start(self) -> None:
"""Method is used to manually start the ``MessageQueue`` processing."""
self._all_delayq.start()
self._group_delayq.start() | tzpBingo/github-trending | [
42,
20,
42,
1,
1504755582
] |
def __call__(self, promise: Callable, is_group_msg: bool = False) -> Callable:
"""
Processes callables in throughput-limiting queues to avoid hitting limits (specified with
:attr:`burst_limit` and :attr:`time_limit`.
Args:
promise (:obj:`callable`): Mainly the ``telegram.uti... | tzpBingo/github-trending | [
42,
20,
42,
1,
1504755582
] |
def wrapped(self: 'Bot', *args: object, **kwargs: object) -> object:
# pylint: disable=W0212
queued = kwargs.pop(
'queued', self._is_messages_queued_default # type: ignore[attr-defined]
)
isgroup = kwargs.pop('isgroup', False)
if queued:
prom = Promise(me... | tzpBingo/github-trending | [
42,
20,
42,
1,
1504755582
] |
def __init__(self, _tagLst, _attrName, _attrVal, _data):
self.tagList = _tagLst
self.attrName = _attrName
self.attrVal = _attrVal
self.dataToCheck = _data
self.status_baseline = False
self.status_superior = False
self.status_exemplary = False
self.__assist... | KhronosGroup/COLLADA-CTS | [
30,
9,
30,
11,
1336571488
] |
def JudgeBaseline(self, context):
# No step should crash
self.__assistant.CheckCrashes(context) | KhronosGroup/COLLADA-CTS | [
30,
9,
30,
11,
1336571488
] |
def JudgeSuperior(self, context): | KhronosGroup/COLLADA-CTS | [
30,
9,
30,
11,
1336571488
] |
def JudgeExemplary(self, context):
self.status_exemplary = self.status_superior
return self.status_exemplary | KhronosGroup/COLLADA-CTS | [
30,
9,
30,
11,
1336571488
] |
def forwards(self, orm):
# Adding model 'Review'
db.create_table(u'review_review', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('content_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['contenttypes.ContentType'])),
(... | bitmazk/django-review | [
125,
48,
125,
8,
1376058990
] |
def oscillation_period(lambda_jeans):
return np.sqrt(np.pi/rho0)*wavelength/np.sqrt(lambda_jeans*lambda_jeans - wavelength*wavelength) | gandalfcode/gandalf | [
41,
13,
41,
44,
1375101397
] |
def jeans_unstable_solution(x, omega, rhofit):
return rho0*(1.0 + amp*np.sin(2.0*math.pi*x/wavelength)*np.cosh(omega*tsim)) | gandalfcode/gandalf | [
41,
13,
41,
44,
1375101397
] |
def jeans_stable_solution(x, omega, rhofit):
return rhofit*(1.0 + amp*np.sin(2.0*math.pi*x/wavelength)*np.cos(omega*tsim)) | gandalfcode/gandalf | [
41,
13,
41,
44,
1375101397
] |
def load_config(path):
# Load and parse required config file
try:
config = configparser()
config.optionxform = str
if len(config.read(path)) != 1:
raise IOError
except StandardError:
# Either couldn't read/find the file, or couldn't parse it.
print "Warnin... | ellonweb/merlin | [
19,
22,
19,
1,
1229460974
] |
def gen_random_label():
label = ""
for i in range(gen.randint(1, maxsize)):
label = label + gen.choice(ldh)
return label | atmark-techno/atmark-dist | [
3,
2,
3,
4,
1476164728
] |
def usage():
sys.stdout.write("Usage: " + sys.argv[0] + " [-n number] " + \
"[-p percent-random] [-t TLD]\n")
sys.stdout.write(" [-m MAXSIZE] [-f zone-file]\n") | atmark-techno/atmark-dist | [
3,
2,
3,
4,
1476164728
] |
def pam_conv(auth, query_list, userData):
resp = []
for i in range(len(query_list)):
query, type = query_list[i]
if type == PAM.PAM_PROMPT_ECHO_ON:
val = raw_input(query)
resp.append((val, 0))
elif type == PAM.PAM_PROMPT_ECHO_OFF:
val = getpass(query)
resp.append((val, 0))
elif type == PAM.PAM_P... | unix4you2/practico | [
5,
2,
5,
1,
1656680052
] |
def setUp(self):
"""
Add a user and a course
"""
super().setUp()
# create and log in a staff user.
# create and log in a non-staff user
self.user = UserFactory()
self.factory = RequestFactory()
self.client = AjaxEnabledTestClient()
self.cli... | eduNEXT/edunext-platform | [
28,
7,
28,
10,
1414072000
] |
def test_rerun(self):
"""
Just testing the functionality the view handler adds over the tasks tested in test_clone_course
"""
add_organization({
'name': 'Test Organization',
'short_name': self.source_course_key.org,
'description': 'Testing Organization... | eduNEXT/edunext-platform | [
28,
7,
28,
10,
1414072000
] |
def test_newly_created_course_has_web_certs_enabled(self, store):
"""
Tests newly created course has web certs enabled by default.
"""
with modulestore().default_store(store):
response = self.client.ajax_post(self.course_create_rerun_url, {
'org': 'orgX',
... | eduNEXT/edunext-platform | [
28,
7,
28,
10,
1414072000
] |
def test_course_creation_for_unknown_organization_relaxed(self, store):
"""
Tests that when ORGANIZATIONS_AUTOCREATE is True,
creating a course-run with an unknown org slug will create an organization
and organization-course linkage in the system.
"""
with self.assertRais... | eduNEXT/edunext-platform | [
28,
7,
28,
10,
1414072000
] |
def test_course_creation_for_unknown_organization_strict(self, store):
"""
Tests that when ORGANIZATIONS_AUTOCREATE is False,
creating a course-run with an unknown org slug will raise a validation error.
"""
with modulestore().default_store(store):
response = self.cli... | eduNEXT/edunext-platform | [
28,
7,
28,
10,
1414072000
] |
def setUp(self):
self._app = create_test_app()
with self._app.app_context():
model.syncdb()
self.app = self._app.test_client() | spacewiki/spacewiki | [
10,
1,
10,
16,
1427854923
] |
def test_no_page(self):
self.assertEqual(self.app.get('/missing-page').status_code, 200) | spacewiki/spacewiki | [
10,
1,
10,
16,
1427854923
] |
def setup(core, object):
return | ProjectSWGCore/NGECore2 | [
23,
70,
23,
56,
1372673790
] |
def main():
fig = plt.figure(figsize=[10, 5])
ax1 = fig.add_subplot(1, 2, 1, projection=ccrs.SouthPolarStereo())
ax2 = fig.add_subplot(1, 2, 2, projection=ccrs.SouthPolarStereo(),
sharex=ax1, sharey=ax1)
fig.subplots_adjust(bottom=0.05, top=0.95,
left=0.... | SciTools/cartopy | [
1188,
337,
1188,
330,
1343979839
] |
def itemNames(): | ProjectSWGCore/NGECore2 | [
23,
70,
23,
56,
1372673790
] |
def check(self, instance):
# read config
tags = instance.get('tags', [])
host = instance.get('host', '127.0.0.1')
port = instance.get('port', 23456)
prefix = instance.get('prefix', 'plog.')
suffix = instance.get('suffix', '')
timeout = instance.get('timeout', 3)
... | pcarrier/plog | [
70,
25,
70,
13,
1391253070
] |
def _core_plugin(self):
return manager.NeutronManager.get_plugin() | eayunstack/neutron-qos | [
2,
4,
2,
1,
1431059236
] |
def _l3_plugin(self):
return manager.NeutronManager.get_service_plugins().get(
constants.L3_ROUTER_NAT) | eayunstack/neutron-qos | [
2,
4,
2,
1,
1431059236
] |
def _make_qos_dict(self, qos, fields=None):
res = {'id': qos.id,
'tenant_id': qos.tenant_id,
'name': qos.name,
'description': qos.description,
'direction': qos.direction,
'rate': qos.rate,
'burst': qos.burst,
... | eayunstack/neutron-qos | [
2,
4,
2,
1,
1431059236
] |
def _aggregate_rate_of_qos(self, qos):
return reduce(lambda x, y: x + y,
[q.rate for q in qos.queues if q.parent_queue is None]) | eayunstack/neutron-qos | [
2,
4,
2,
1,
1431059236
] |
def _check_qos_target(self, context,
target_type, target_id, qos_direction):
ret = {'router_id': None, 'port_id': None}
if target_type is not None and target_id is not None:
# Need to check
try:
if target_type == 'port':
... | eayunstack/neutron-qos | [
2,
4,
2,
1,
1431059236
] |
def create_qos(self, context, qos):
""" Create a qos and its default queue. """
qos = qos['qos']
default_queue = self._extract_default_queue_from_qos_param(qos)
if qos['rate'] < default_queue['rate']:
raise ext_qos.QosRateTooSmall(id=None, rate=qos['rate'])
qos_targe... | eayunstack/neutron-qos | [
2,
4,
2,
1,
1431059236
] |
def delete_qos(self, context, id):
qos = self._get_qos(context, id)
with context.session.begin(subtransactions=True):
context.session.delete(qos) | eayunstack/neutron-qos | [
2,
4,
2,
1,
1431059236
] |
def get_qoss_count(self, context, filters=None):
return self._get_collection_count(context, Qos, filters=filters) | eayunstack/neutron-qos | [
2,
4,
2,
1,
1431059236
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.