function
stringlengths
11
56k
repo_name
stringlengths
5
60
features
list
def test_exit_with_code(self, mock_log, mock_exit): exit_with_code(1) mock_log.error.assert_not_called() mock_exit.assert_called_once_with(1) mock_exit.reset_mock() exit_with_code(0, "msg") mock_exit.assert_called_once_with(0) mock_exit.assert_called_once()
aws-quickstart/taskcat
[ 1061, 211, 1061, 39, 1479169741 ]
def test_s3_bucket_name_from_url(self): bucket = s3_bucket_name_from_url("https://buk.s3.amazonaws.com/obj.yaml") self.assertEqual("buk", bucket)
aws-quickstart/taskcat
[ 1061, 211, 1061, 39, 1479169741 ]
def onInit(): # Variables for assertions only sponge.setVariable("hardwareFailureJavaCount", AtomicInteger(0)) sponge.setVariable("hardwareFailureScriptCount", AtomicInteger(0)) sponge.setVariable("sameSourceFirstFireCount", AtomicInteger(0))
softelnet/sponge
[ 7, 3, 7, 5, 1498251529 ]
def onConfigure(self): self.withEvents(["filesystemFailure", "diskFailure"]).withOrdered(False) self.withAllConditions([ lambda rule, event: rule.firstEvent.get("source") == event.get("source"), lambda rule, event:Duration.between(rule.firstEvent.time, event.time).seconds <= 2 ]) self.withDuration(Duration.ofSeconds(5))
softelnet/sponge
[ 7, 3, 7, 5, 1498251529 ]
def onConfigure(self): self.withEvents(["filesystemFailure e1", "diskFailure e2 :all"]).withOrdered(False) self.withCondition("e1", self.severityCondition) self.withConditions("e2", [self.severityCondition, self.diskFailureSourceCondition]) self.withDuration(Duration.ofSeconds(5))
softelnet/sponge
[ 7, 3, 7, 5, 1498251529 ]
def severityCondition(self, event): return int(event.get("severity")) > 5
softelnet/sponge
[ 7, 3, 7, 5, 1498251529 ]
def onConfigure(self): self.withEvent("alarm")
softelnet/sponge
[ 7, 3, 7, 5, 1498251529 ]
def onAccept(self, event): return self.deduplication.onAccept(event)
softelnet/sponge
[ 7, 3, 7, 5, 1498251529 ]
def onConfigure(self): self.withEvent("alarm")
softelnet/sponge
[ 7, 3, 7, 5, 1498251529 ]
def onLoad(): sponge.enableJava(SameSourceJavaUnorderedRule)
softelnet/sponge
[ 7, 3, 7, 5, 1498251529 ]
def onStartup(): sponge.event("diskFailure").set("severity", 10).set("source", "server1").send() sponge.event("diskFailure").set("severity", 10).set("source", "server2").send() sponge.event("diskFailure").set("severity", 8).set("source", "server1").send() sponge.event("diskFailure").set("severity", 8).set("source", "server1").send() sponge.event("filesystemFailure").set("severity", 8).set("source", "server1").send() sponge.event("filesystemFailure").set("severity", 6).set("source", "server1").send() sponge.event("diskFailure").set("severity", 6).set("source", "server1").send()
softelnet/sponge
[ 7, 3, 7, 5, 1498251529 ]
def sample_delete_conversation_dataset(): # Create a client client = dialogflow_v2.ConversationDatasetsClient() # Initialize request argument(s) request = dialogflow_v2.DeleteConversationDatasetRequest( name="name_value", ) # Make the request operation = client.delete_conversation_dataset(request=request) print("Waiting for operation to complete...") response = operation.result() # Handle the response print(response)
googleapis/python-dialogflow
[ 387, 145, 387, 4, 1508880371 ]
def angle( firstPoint, secondPoint ): """ Returns the angle (in degrees) of the straight line between firstPoint and secondPoint, 0 degrees being the second point to the right of first point. firstPoint, secondPoint: must be NSPoint or GSNode """ xDiff = secondPoint.x - firstPoint.x yDiff = secondPoint.y - firstPoint.y return math.degrees(math.atan2(yDiff,xDiff))
mekkablue/Glyphs-Scripts
[ 349, 100, 349, 27, 1317798757 ]
def isThereAlreadyAGuideWithTheseProperties(thisLayer,guideposition,guideangle): if guideangle < 0: guideangle += 180 if guideangle > 180: guideangle -= 180 for thisGuide in thisLayer.guides: thisAngle = thisGuide.angle if thisAngle < 0: thisAngle += 180 if thisAngle > 180: thisAngle -= 180 if abs(thisAngle - guideangle) < 0.01 and abs(thisGuide.position.x - guideposition.x) < 0.01 and abs(thisGuide.position.y - guideposition.y) < 0.01: return True return False
mekkablue/Glyphs-Scripts
[ 349, 100, 349, 27, 1317798757 ]
def get_requirements(): with codecs.open(REQUIREMENTS_FILE) as buff: return buff.read().splitlines()
arviz-devs/arviz
[ 1351, 324, 1351, 165, 1438170670 ]
def get_requirements_optional(): with codecs.open(REQUIREMENTS_OPTIONAL_FILE) as buff: return buff.read().splitlines()
arviz-devs/arviz
[ 1351, 324, 1351, 165, 1438170670 ]
def get_version(): lines = open(VERSION_FILE, "rt").readlines() version_regex = r"^__version__ = ['\"]([^'\"]*)['\"]" for line in lines: mo = re.search(version_regex, line, re.M) if mo: return mo.group(1) raise RuntimeError("Unable to find version in %s." % (VERSION_FILE,))
arviz-devs/arviz
[ 1351, 324, 1351, 165, 1438170670 ]
def __repr__(self): return "<Event %s>" % self._info
nttcom/eclcli
[ 22, 15, 22, 1, 1472615846 ]
def delete(self): return self.manager.delete(self)
nttcom/eclcli
[ 22, 15, 22, 1, 1472615846 ]
def list(self, stack_id, resource_name=None, **kwargs): """Get a list of events. :param stack_id: ID of stack the events belong to :param resource_name: Optional name of resources to filter events by :rtype: list of :class:`Event` """ params = {} if 'filters' in kwargs: filters = kwargs.pop('filters') params.update(filters) for key, value in six.iteritems(kwargs): if value: params[key] = value if resource_name is None: url = '/stacks/%s/events' % stack_id else: stack_id = self._resolve_stack_id(stack_id) url = '/stacks/%s/resources/%s/events' % ( parse.quote(stack_id, ''), parse.quote(encodeutils.safe_encode(resource_name), '')) if params: url += '?%s' % parse.urlencode(params, True) return self._list(url, 'events')
nttcom/eclcli
[ 22, 15, 22, 1, 1472615846 ]
def get_zk(): global _zk if _zk is None: _zk = KazooClient( app.config['ZK_CONNECTION_STRING'], default_acl=( # grants read permissions to anyone. make_acl('world', 'anyone', read=True), # grants all permissions to the creator of the node. make_acl('auth', '', all=True) ) ) _zk.start() _zk.add_auth('digest', jones_credential) _zk.DataWatch('/services', func=ensure_root) return _zk
mwhooker/jones
[ 49, 12, 49, 10, 1339446670 ]
def request_wants(t): types = ['text/plain', 'text/html', 'application/json'] assert t in types best = request.accept_mimetypes \ .best_match(types) return best == t
mwhooker/jones
[ 49, 12, 49, 10, 1339446670 ]
def as_json(d, indent=None): return Markup(json.dumps(d, indent=indent))
mwhooker/jones
[ 49, 12, 49, 10, 1339446670 ]
def inject_services(): return dict(services=[child for child in get_zk().get_children('/services') if Jones(child, get_zk()).exists()])
mwhooker/jones
[ 49, 12, 49, 10, 1339446670 ]
def index(): return render_template('index.j2')
mwhooker/jones
[ 49, 12, 49, 10, 1339446670 ]
def service_update(env, jones): jones.set_config( env, json.loads(request.form['data']), int(request.form['version']) ) return env
mwhooker/jones
[ 49, 12, 49, 10, 1339446670 ]
def service_get(env, jones): if not jones.exists(): return redirect(url_for('index')) children = jones.get_child_envs(Env.Root) is_leaf = lambda child: len(child) and not any( c.find(child + '/') >= 0 for c in children) try: version, config = jones.get_config_by_env(env) except NoNodeException: return redirect(url_for('services', service=jones.service)) childs = imap(dict, izip( izip(repeat('env'), imap(Env, children)), izip(repeat('is_leaf'), imap(is_leaf, children)))) vals = { "env": env, "version": version, "children": list(childs), "config": config, "view": jones.get_view_by_env(env), "service": jones.service, "associations": jones.get_associations(env) } if request_wants('application/json'): return jsonify(vals) else: return render_template('service.j2', **vals)
mwhooker/jones
[ 49, 12, 49, 10, 1339446670 ]
def services(service, env): jones = Jones(service, get_zk()) environment = Env(env) return SERVICE[request.method.lower()](environment, jones)
mwhooker/jones
[ 49, 12, 49, 10, 1339446670 ]
def association(service, assoc): jones = Jones(service, get_zk()) if request.method == 'GET': if request_wants('application/json'): return jsonify(jones.get_config(assoc)) if request.method == 'PUT': jones.assoc_host(assoc, Env(request.form['env'])) return service, 201 elif request.method == 'DELETE': jones.delete_association(assoc) return service, 200
mwhooker/jones
[ 49, 12, 49, 10, 1339446670 ]
def export(): return zkutil.export_tree(get_zk(), '/')
mwhooker/jones
[ 49, 12, 49, 10, 1339446670 ]
def handle_joined(self): self.invocations.append('joined: {}'.format(self.endpoint_name))
juju-solutions/charms.reactive
[ 21, 34, 21, 33, 1438118568 ]
def handle_changed(self): self.invocations.append('changed: {}'.format(self.endpoint_name))
juju-solutions/charms.reactive
[ 21, 34, 21, 33, 1438118568 ]
def GET(self, path): if path == '': path = 'index.html' f = open('_build/html/'+path) return f.read()
intel-analytics/analytics-zoo
[ 2553, 722, 2553, 534, 1493951250 ]
def uses_custom_precision(bench): custom = False for current_class in CUSTOM_PRECISION_CLASSES: if current_class in bench: custom = True break return custom
VROOM-Project/vroom-scripts
[ 26, 17, 26, 1, 1476267014 ]
def nb_jobs(solution): jobs = 0 for r in solution["routes"]: for s in r["steps"]: if s["type"] in JOB_TYPES: jobs += 1 return jobs
VROOM-Project/vroom-scripts
[ 26, 17, 26, 1, 1476267014 ]
def __init__(self, xml_filename, chlname=None,**kwargs): (eqnset, chlinfo, default_params) = ChannelMLReader.BuildEqnset(xml_filename) default_params = dict([(k, v.as_quantities_quantity()) for (k, v) in default_params.iteritems()]) super(NeuroML_Via_NeuroUnits_ChannelNEURON,self).__init__(eqnset=eqnset, default_parameters=default_params, recordables_map=None, recordables_data=None, xml_filename=xml_filename, chlname=chlname, **kwargs)
mikehulluk/morphforge
[ 5, 3, 5, 2, 1332333834 ]
def setUp(self): self.set_filename('textbox38.xlsx')
jmcnamara/XlsxWriter
[ 3172, 594, 3172, 18, 1357261626 ]
def __init__(self, *args, **kwargs): self.compile_kwargs = copy.deepcopy(_compile_kwargs) self.compile_kwargs['include_dirs'].append(pyodeint.get_include()) self.compile_kwargs['libraries'].extend(['m']) super(NativeOdeintCode, self).__init__(*args, **kwargs)
bjodah/pyodesys
[ 86, 9, 86, 8, 1443176313 ]
def gradient(x): return curvature(x)
castelao/CoTeDe
[ 36, 15, 36, 9, 1369489700 ]
def curvature(x): """Curvature of a timeseries This test is commonly known as gradient for historical reasons, but that is a bad name choice since it is not the actual gradient, like: d/dx + d/dy + d/dz, but as defined by GTSPP, EuroGOOS and others, which is actually the curvature of the timeseries.. Note ---- - Pandas.Series operates with indexes, so it should be done different. In that case, call for _curvature_pandas. """ if isinstance(x, ma.MaskedArray): x[x.mask] = np.nan x = x.data if PANDAS_AVAILABLE and isinstance(x, pd.Series): return _curvature_pandas(x) x = np.atleast_1d(x) y = np.nan * x y[1:-1] = x[1:-1] - (x[:-2] + x[2:]) / 2.0 return y
castelao/CoTeDe
[ 36, 15, 36, 9, 1369489700 ]
def set_features(self): self.features = {"gradient": curvature(self.data[self.varname])}
castelao/CoTeDe
[ 36, 15, 36, 9, 1369489700 ]
def setUpClass(cls): super(SqlUpdateStrategyTest, cls).setUpClass() FormProcessorTestUtils.delete_all_sql_forms() FormProcessorTestUtils.delete_all_sql_cases()
dimagi/commcare-hq
[ 465, 201, 465, 202, 1247158807 ]
def tearDownClass(cls): FormProcessorTestUtils.delete_all_sql_forms() FormProcessorTestUtils.delete_all_sql_cases() super(SqlUpdateStrategyTest, cls).tearDownClass()
dimagi/commcare-hq
[ 465, 201, 465, 202, 1247158807 ]
def test_reconcile_transactions(self, soft_assert_mock): """ tests a transanction with an early client date and late server date """ with freeze_time("2018-10-10"): case = self._create_case() with freeze_time("2018-10-11"): new_old_xform = self._create_form() with freeze_time("2018-10-08"): new_old_trans = self._create_case_transaction(case, new_old_xform) with freeze_time("2018-10-11"): self._save(new_old_xform, case, new_old_trans) case = CommCareCase.objects.get_case(case.case_id) update_strategy = SqlCaseUpdateStrategy(case) self.assertTrue(update_strategy.reconcile_transactions_if_necessary()) self._check_for_reconciliation_error_soft_assert(soft_assert_mock) case.save(with_tracked_models=True) case = CommCareCase.objects.get_case(case.case_id) update_strategy = SqlCaseUpdateStrategy(case) self.assertFalse(update_strategy.reconcile_transactions_if_necessary()) self._check_for_reconciliation_error_soft_assert(soft_assert_mock)
dimagi/commcare-hq
[ 465, 201, 465, 202, 1247158807 ]
def test_ignores_before_rebuild_transaction(self): with freeze_time("2018-10-10"): case = self._create_case() with freeze_time("2018-10-11"): new_old_xform = self._create_form() with freeze_time("2018-10-08"): new_old_trans = self._create_case_transaction(case, new_old_xform) with freeze_time("2018-10-11"): self._save(new_old_xform, case, new_old_trans) self.assertFalse(case.check_transaction_order()) with freeze_time("2018-10-13"): new_rebuild_xform = self._create_form() rebuild_detail = RebuildWithReason(reason="shadow's golden coin") rebuild_transaction = CaseTransaction.rebuild_transaction(case, rebuild_detail) self._save(new_rebuild_xform, case, rebuild_transaction) case = CommCareCase.objects.get_case(case.case_id) update_strategy = SqlCaseUpdateStrategy(case) self.assertFalse(update_strategy.reconcile_transactions_if_necessary())
dimagi/commcare-hq
[ 465, 201, 465, 202, 1247158807 ]
def test_reconcile_transactions_within_fudge_factor(self, soft_assert_mock): """ tests a transanction with an early client date and late server date """ with freeze_time("2018-10-10"): case = self._create_case() with freeze_time("2018-10-11 06:00"): new_old_xform = self._create_form() with freeze_time("2018-10-10 18:00"): new_old_trans = self._create_case_transaction(case, new_old_xform) with freeze_time("2018-10-11 06:00"): self._save(new_old_xform, case, new_old_trans) with freeze_time("2018-10-11"): new_old_xform = self._create_form() new_old_trans = self._create_case_transaction(case, new_old_xform) self._save(new_old_xform, case, new_old_trans) case = CommCareCase.objects.get_case(case.case_id) update_strategy = SqlCaseUpdateStrategy(case) self.assertTrue(update_strategy.reconcile_transactions_if_necessary()) self._check_for_reconciliation_error_soft_assert(soft_assert_mock) case.save(with_tracked_models=True) case = CommCareCase.objects.get_case(case.case_id) update_strategy = SqlCaseUpdateStrategy(case) self.assertFalse(update_strategy.reconcile_transactions_if_necessary()) self._check_for_reconciliation_error_soft_assert(soft_assert_mock)
dimagi/commcare-hq
[ 465, 201, 465, 202, 1247158807 ]
def _create_case_transaction(self, case, form=None, submitted_on=None, action_types=None): form = form or self._create_form() submitted_on = submitted_on or datetime.utcnow() return CaseTransaction.form_transaction(case, form, submitted_on, action_types)
dimagi/commcare-hq
[ 465, 201, 465, 202, 1247158807 ]
def _save(self, form, case, transaction): # disable publish to Kafka to avoid intermittent errors caused by # the nexus of kafka's consumer thread and freeze_time with patch.object(FormProcessorSQL, "publish_changes_to_kafka"): case.track_create(transaction) FormProcessorSQL.save_processed_models(ProcessedForms(form, []), [case])
dimagi/commcare-hq
[ 465, 201, 465, 202, 1247158807 ]
def get(name): try: server_variable = ServerVariable.objects.get(name=name) except: return None return server_variable.value
solarpermit/solarpermit
[ 4, 5, 4, 41, 1376892604 ]
def set(name, value): try: server_variable = ServerVariable.objects.get(name=name) except: server_variable = ServerVariable(name=name) server_variable.value = value server_variable.save() return server_variable
solarpermit/solarpermit
[ 4, 5, 4, 41, 1376892604 ]
def save_history(jurisdiction, source_table, source_id, target_table, target_id, notes='', notes2=''): history, created = MigrationHistory.objects.get_or_create(source_table=source_table, source_id=source_id, target_table=target_table, target_id=target_id) if jurisdiction != None: history.jurisdiction_id = jurisdiction.id history.notes = notes history.notes2 = notes2 history.save() return history
solarpermit/solarpermit
[ 4, 5, 4, 41, 1376892604 ]
def __init__(self): super(Command, self).__init__()
glassesfactory/Shimehari
[ 15, 2, 15, 9, 1344820625 ]
def readAndCreateFile(self, old, new): if os.path.exists(new): raise CommandError('already... %s' % new) with open(old, 'r') as template: content = template.read() with open(new, 'w') as newFile: newFile.write(content) sys.stdout.write(u"Creating: %s\n" % new) try: shutil.copymode(old, new) self.toWritable(new) except OSError: sys.stderr.write('permission error')
glassesfactory/Shimehari
[ 15, 2, 15, 9, 1344820625 ]
def createDirectory(self, rootDir, dirname): targetName = os.path.join(rootDir, dirname) if not os.path.exists(targetName): os.mkdir(targetName) sys.stdout.write("Creating: %s\n" % targetName)
glassesfactory/Shimehari
[ 15, 2, 15, 9, 1344820625 ]
def validate_request(validator, request): result = validator.validate(request) result.raise_for_errors() return result
p1c2u/openapi-core
[ 196, 109, 196, 58, 1505903135 ]
def _config(path): path = path.split('/') key = path[0] plugin = next((p for p in list_plugins('tdp-config-safe-keys') if p.id == key), None) if plugin is None: _log.error('404: config key "{}" not found'.format(key)) abort(404, 'config key "{}" not found'.format(key)) path[0] = plugin.configKey return jsonify(get_config('.'.join(path)))
datavisyn/tdp_core
[ 5, 2, 5, 67, 1503294385 ]
def setUpModule(): base.enabledPlugins.append('ythub') base.startServer()
data-exp-lab/girder_ythub
[ 3, 9, 3, 3, 1460743185 ]
def __init__(self): self.task_id = 'fake_id'
data-exp-lab/girder_ythub
[ 3, 9, 3, 3, 1460743185 ]
def __init__(self): self.task_id = 'fake_id'
data-exp-lab/girder_ythub
[ 3, 9, 3, 3, 1460743185 ]
def __init__(self): self.task_id = 'fake_id'
data-exp-lab/girder_ythub
[ 3, 9, 3, 3, 1460743185 ]
def _getUser(self, userDict): try: user = self.model('user').createUser(**userDict) except ValidationException: resp = self.request( path='/user/authentication', method='GET', basicAuth='{login}:{password}'.format(**userDict)) self.assertStatusOk(resp) user = resp.json['user'] return user
data-exp-lab/girder_ythub
[ 3, 9, 3, 3, 1460743185 ]
def testNotebooks(self): # Grab the default user folders resp = self.request( path='/folder', method='GET', user=self.user, params={ 'parentType': 'user', 'parentId': self.user['_id'], 'sort': 'name', 'sortdir': 1 }) privateFolder = resp.json[0] publicFolder = resp.json[1] example_frontend = { 'imageName': 'xarthisius/ythub', 'command': './perform_magic', 'memLimit': '2048m', 'port': 12345, 'user': 'user', 'targetMount': '/blah', 'urlPath': '?token={token}', 'description': 'foo', 'cpuShares': None, 'public': True, } # Actually create a new frontend (private) resp = self.request( path='/frontend', method='POST', params=example_frontend, user=self.admin) self.assertStatus(resp, 200) frontend = resp.json with mock.patch('celery.Celery') as celeryMock: with mock.patch('urllib.request.urlopen') as urllibMock: instance = celeryMock.return_value instance.send_task.side_effect = [ FakeAsyncResult(), FakeAsyncResult(), FakeAsyncResult2(), FakeAsyncResult2(), FakeAsyncResult3(), FakeAsyncResult3(), FakeAsyncResult(), FakeAsyncResult() ] req = urllibMock.return_value req.fetch.return_value = {} params = { 'frontendId': str(frontend['_id']), 'folderId': str(privateFolder['_id']) } resp = self.request( '/notebook', method='POST', user=self.user, params=params) self.assertStatus(resp, 200) notebook = resp.json self.assertEqual(notebook['serviceInfo']['nodeId'], '123456') self.assertEqual(notebook['serviceInfo']['volumeId'], 'blah_volume') self.assertEqual(notebook['serviceInfo']['serviceId'], 'tmp-blah') self.assertEqual(notebook['url'], 'http://tmp-blah.tmpnb.null/?token=foo') self.assertEqual(notebook['frontendId'], str(frontend['_id'])) self.assertEqual(notebook['folderId'], str(privateFolder['_id'])) self.assertEqual(notebook['creatorId'], str(self.user['_id'])) with mock.patch('celery.Celery') as celeryMock: with mock.patch('urllib.request.urlopen') as urllibMock: params = { 'frontendId': str(frontend['_id']), 'folderId': str(privateFolder['_id']) } # Return exisiting resp = self.request( path='/notebook', method='POST', user=self.user, params=params) self.assertStatus(resp, 200) self.assertEqual(resp.json['_id'], notebook['_id']) # Create 2nd user's nb params['folderId'] = str(publicFolder['_id']) resp = self.request( path='/notebook', method='POST', user=self.user, params=params) self.assertStatus(resp, 200) other_notebook = resp.json # Create admin nb params['folderId'] = str(publicFolder['_id']) resp = self.request( path='/notebook', method='POST', user=self.admin, params=params) self.assertStatus(resp, 200) admin_notebook = resp.json # By default user can list only his/her notebooks resp = self.request( path='/notebook', method='GET', user=self.user) self.assertStatus(resp, 200) self.assertEqual([_['_id'] for _ in resp.json], [other_notebook['_id'], notebook['_id']]) # Filter by folder resp = self.request( path='/notebook', method='GET', user=self.admin, params={'folderId': publicFolder['_id']}) self.assertStatus(resp, 200) self.assertEqual([_['_id'] for _ in resp.json], [admin_notebook['_id'], other_notebook['_id']]) # Filter by folder and user resp = self.request( path='/notebook', method='GET', user=self.admin, params={'folderId': publicFolder['_id'], 'userId': self.user['_id']}) self.assertStatus(resp, 200) self.assertEqual(resp.json[0]['_id'], other_notebook['_id']) # Get notebook by Id resp = self.request( path='/notebook/{_id}'.format(**notebook), method='GET') self.assertStatus(resp, 401) resp = self.request( path='/notebook/{_id}'.format(**admin_notebook), method='GET', user=self.user) self.assertStatus(resp, 403) resp = self.request( path='/notebook/{_id}'.format(**notebook), method='GET', user=self.admin) self.assertStatus(resp, 200) self.assertEqual(resp.json['_id'], notebook['_id']) with mock.patch('celery.Celery') as celeryMock: resp = self.request( path='/notebook/{_id}'.format(**admin_notebook), method='DELETE', user=self.user) self.assertStatus(resp, 403) resp = self.request( path='/notebook/{_id}'.format(**notebook), method='DELETE', user=self.admin) self.assertStatus(resp, 200) # Check if notebook is gone resp = self.request( path='/notebook/{_id}'.format(**notebook), method='GET', user=self.admin) self.assertStatus(resp, 400)
data-exp-lab/girder_ythub
[ 3, 9, 3, 3, 1460743185 ]
def test_nice_section(assert_lines): assert_lines([ 'env = LANG=en_US.UTF-8', 'workers = %k', 'die-on-term = true', 'vacuum = true', 'threads = 4', ], Section(threads=4)) assert_lines([ 'logto', ], Section(), assert_in=False) assert_lines([ 'enable-threads = true', 'uid = www-data', 'gid = www-data', 'logto = /a/b.log', ], Section(threads=True, log_into='/a/b.log').configure_owner()) assert_lines([ 'workers = 13', 'touch-reload', 'test_nice.py', ], Section(workers=13, touch_reload=__file__)) assert_lines([ 'disable-write-exception = true', 'ignore-write-errors = true', 'ignore-sigpipe = true', 'log-master = true', 'threaded-logger = true', ], Section(log_dedicated=True, ignore_write_errors=True)) assert '%(headers) headers in %(hsize) bytes' in Section().get_log_format_default()
idlesign/uwsgiconf
[ 75, 1, 75, 3, 1495944627 ]
def test_configure_https_redirect(assert_lines): section = Section() section.configure_https_redirect() assert_lines( 'route-if-not = eq:${HTTPS};on redirect-301:https://${HTTP_HOST}${REQUEST_URI}', section )
idlesign/uwsgiconf
[ 75, 1, 75, 3, 1495944627 ]
def test_configure_logging_json(assert_lines): section = Section() section.configure_logging_json() assert_lines([ 'logger-req = stdio:', 'log-format = %(method) %(uri) -> %(status)', 'log-req-encoder = json {"dt": "${strftime:%%Y-%%m-%%dT%%H:%%M:%%S%%z}", "src": "uwsgi.req"', 'log-req-encoder = nl', '"src": "uwsgi.out"', ], section)
idlesign/uwsgiconf
[ 75, 1, 75, 3, 1495944627 ]
def __init__(self, table, farms): self.table = table self.farms = farms
douban/douban-sqlstore
[ 31, 16, 31, 2, 1392172265 ]
def __init__(self, config): if isinstance(config, basestring): config = imp.load_source('sqlstore_settings', config) self._default_params = config.default_params self.farms = config.farms self.configs = config.configs
douban/douban-sqlstore
[ 31, 16, 31, 2, 1392172265 ]
def get_tables(self, farm): tables = self.farms.get(farm, {}).get('tables') if tables: return tables dbcnf = self.get_sqlstore_dbcnf('%s_m' % farm) farm = SqlFarm(dbcnf, connect_timeout=1) cursor = farm.get_cursor() cursor.execute('show tables') tables = [r[0] for r in cursor.fetchall()] return tables
douban/douban-sqlstore
[ 31, 16, 31, 2, 1392172265 ]
def gen_config(self, name, instances, extras={}, roles=['m', 's', 'b']): conf = { 'farms': {}, 'migration': {}, 'options': {}, } all_tables = {} all_instances = set() for index, instance in enumerate(instances): if instance in all_instances: if verbose: print >>sys.stderr, 'duplicate instance:', instance all_instances.add(instance) try: # FIXME: roles override the roles param name, roles = instance.rsplit('_', 1) except ValueError: name = instance instance = '%s_m' % instance farm_name = '%s_farm' % name conf['farms'][farm_name] = {} role_names = { 'm': 'master', 's': 'slave', 'b': 'backup', } for role in roles: instance = '%s_%s' % (name, role) dbcnf = self.get_sqlstore_dbcnf(instance) #TODO: len(roles) == 1 means non-algorithm configs if len(roles) == 1: role_name = 'master' else: role_name = role_names[role] conf['farms'][farm_name][role_name] = dbcnf tables = self.get_tables(name) if tables is None: return None for table in tables: if table in all_tables: farms = [farm_name, all_tables[table]] raise DuplicatedTable(table, farms) all_tables[table] = farm_name if index == 0: tables.append('*') conf['farms'][farm_name]['tables'] = tables if verbose: print >>sys.stderr, 'done!' conf.update(extras) return conf
douban/douban-sqlstore
[ 31, 16, 31, 2, 1392172265 ]
def salt(): wx = {} try: tcs = parseICD("http://icd.salt/xml/salt-tcs-icd.xml") time = tcs['tcs xml time info'] bms = tcs['bms external conditions'] temps = bms['Temperatures'] wx["Temp"] = median(array(temps.values())) wx["Temp 2m"] = temps["2m"] wx["Temp 30m"] = temps["30m"] # get time wx["SAST"] = time["SAST"].split()[1] wx["Date"] = time["SAST"].split()[0] # set up other values of interest wx["Air Pressure"] = bms["Air pressure"] * 10.0 wx["Dewpoint"] = bms["Dewpoint"] wx["RH"] = bms["Rel Humidity"] wx["Wind Speed (30m)"] = bms["Wind mag 30m"] * 3.6 wx["Wind Speed"] = bms["Wind mag 10m"] * 3.6 wx["Wind Dir (30m)"] = bms["Wind dir 30m"] wx["Wind Dir"] = bms["Wind dir 10m"] wx["T - DP"] = wx["Temp 2m"] - bms["Dewpoint"] wx["Raining"] = bms["Rain detected"] return wx except: return False
saltastro/timDIMM
[ 1, 3, 1, 1, 1303137751 ]
def grav(): wx = {} p = html5lib.HTMLParser(tree=treebuilders.getTreeBuilder("dom")) kan11 = p.parse(urllib2.urlopen("http://sg1.suth/tmp/kan11.htm", timeout=1).read()) kan16 = p.parse(urllib2.urlopen("http://sg1.suth/tmp/kan16.htm", timeout=1).read()) kan11_tds = kan11.getElementsByTagName("td") kan16_tds = kan16.getElementsByTagName("td") wx["Date"], wx["UT"] = kan11_tds[12].firstChild.nodeValue.split() kan11_tds[14].normalize() kan11_tds[15].normalize() wx["Temp"] = float(kan11_tds[14].firstChild.nodeValue) wx["RH"] = float(kan11_tds[15].firstChild.nodeValue) kan16_tds[13].normalize() kan16_tds[14].normalize() wx["Wind Dir"] = int(kan16_tds[13].firstChild.nodeValue) wx["Wind Speed"] = float(kan16_tds[14].firstChild.nodeValue) * 3.6 return wx
saltastro/timDIMM
[ 1, 3, 1, 1, 1303137751 ]
def make_tileid_list(fadir): fafiles = glob.glob(os.path.join(fadir, '**/*.fits*'), recursive=True) rgx = re.compile(r'.*fiberassign-(\d+)\.fits(\.gz)?') existing_tileids = [] existing_fafiles = [] for fn in fafiles: match = rgx.match(fn) if match: existing_tileids.append(int(match.group(1))) existing_fafiles.append(fn) return np.array(existing_tileids), np.array(existing_fafiles)
desihub/desisurvey
[ 2, 7, 2, 16, 1432315930 ]
def remove_tiles_from_dir(dirname, tileid): for tileid0 in tileid: for ext in ['fits.gz', 'png', 'log']: expidstr= '{:06d}'.format(tileid0) os.remove(os.path.join( dirname, expidstr[:3], 'fiberassign-{}.{}'.format(expidstr, ext)))
desihub/desisurvey
[ 2, 7, 2, 16, 1432315930 ]
def get_untracked_fnames(svn): fnames = [] res = subprocess.run(['svn', 'status', svn], capture_output=True) output = res.stdout.decode('utf8') for line in output.split('\n'): if len(line) == 0: continue modtype = line[0] if modtype != '?': print('unrecognized line: "{}", ignoring.'.format(line)) continue # new file. We need to check it in or delete it. fname = line[8:] fnames.append(fname) return fnames
desihub/desisurvey
[ 2, 7, 2, 16, 1432315930 ]
def execute_svn_maintenance(todelete, tocommit, echo=False, svnrm=False): if echo: cmd = ['echo', 'svn'] else: cmd = ['svn'] for fname in todelete: if svnrm: subprocess.run(cmd + ['rm', fname]) else: if not echo: os.remove(fname) else: print('removing ', fname) for fname in tocommit: subprocess.run(cmd + ['add', fname])
desihub/desisurvey
[ 2, 7, 2, 16, 1432315930 ]
def generate_subid(self, token=None, return_user=False): """generate a new user in the database, still session based so we create a new identifier. """ from expfactory.database.models import Participant if not token: p = Participant() else: p = Participant(token=token) self.session.add(p) self.session.commit() if return_user is True: return p return p.id
expfactory/expfactory
[ 33, 13, 33, 22, 1509213926 ]
def list_users(self, user=None): """list users, each having a model in the database. A headless experiment will use protected tokens, and interactive will be based on auto- incremented ids. """ from expfactory.database.models import Participant participants = Participant.query.all() users = [] for user in participants: users.append(self.print_user(user)) return users
expfactory/expfactory
[ 33, 13, 33, 22, 1509213926 ]
def generate_user(self): """generate a new user in the database, still session based so we create a new identifier. This function is called from the users new entrypoint, and it assumes we want a user generated with a token. """ token = str(uuid.uuid4()) return self.generate_subid(token=token, return_user=True)
expfactory/expfactory
[ 33, 13, 33, 22, 1509213926 ]
def restart_user(self, subid): """restart a user, which means revoking and issuing a new token.""" p = self.revoke_token(subid) p = self.refresh_token(subid) return p
expfactory/expfactory
[ 33, 13, 33, 22, 1509213926 ]
def validate_token(self, token): """retrieve a subject based on a token. Valid means we return a participant invalid means we return None """ from expfactory.database.models import Participant p = Participant.query.filter(Participant.token == token).first() if p is not None: if p.token.endswith(("finished", "revoked")): p = None else: p = p.id return p
expfactory/expfactory
[ 33, 13, 33, 22, 1509213926 ]
def refresh_token(self, subid): """refresh or generate a new token for a user""" from expfactory.database.models import Participant p = Participant.query.filter(Participant.id == subid).first() if p is not None: p.token = str(uuid.uuid4()) self.session.commit() return p
expfactory/expfactory
[ 33, 13, 33, 22, 1509213926 ]
def get_title(self): return _("Alternating in school year {}").format(self.request.school_year)
leprikon-cz/leprikon
[ 6, 1, 6, 29, 1458950472 ]
def get_queryset(self): qs = super().get_queryset() if not self.request.user.is_staff: qs = qs.filter(leaders=self.request.leader) return qs
leprikon-cz/leprikon
[ 6, 1, 6, 29, 1458950472 ]
def dispatch(self, request, subject): kwargs = {"id": subject} if not self.request.user.is_staff: kwargs["leaders"] = self.request.leader self.subject = get_object_or_404(Subject, **kwargs) self.success_url = reverse("leprikon:subject_journals", args=(self.subject.subject_type.slug, self.subject.id)) return super().dispatch(request)
leprikon-cz/leprikon
[ 6, 1, 6, 29, 1458950472 ]
def get_message(self): return _("New journal {} has been created.").format(self.object)
leprikon-cz/leprikon
[ 6, 1, 6, 29, 1458950472 ]
def get_queryset(self): qs = super().get_queryset() if not self.request.user.is_staff: qs = qs.filter(subject__leaders=self.request.leader) return qs
leprikon-cz/leprikon
[ 6, 1, 6, 29, 1458950472 ]
def get_question(self): return _("Do You really want to delete the journal {}?").format(self.object)
leprikon-cz/leprikon
[ 6, 1, 6, 29, 1458950472 ]
def dispatch(self, request, *args, **kwargs): if self.request.user.is_staff: self.journal = get_object_or_404(Journal, id=int(kwargs.pop("journal"))) else: self.journal = get_object_or_404(Journal, id=int(kwargs.pop("journal")), leaders=self.request.leader) return super().dispatch(request, *args, **kwargs)
leprikon-cz/leprikon
[ 6, 1, 6, 29, 1458950472 ]
def get_object(self): obj = super().get_object() if self.request.user.is_staff or self.request.leader in obj.journal.all_leaders + obj.all_alternates: return obj else: raise Http404()
leprikon-cz/leprikon
[ 6, 1, 6, 29, 1458950472 ]
def get_queryset(self): qs = super().get_queryset() if not self.request.user.is_staff: qs = qs.filter(journal__leaders=self.request.leader) return qs
leprikon-cz/leprikon
[ 6, 1, 6, 29, 1458950472 ]
def get_question(self): return _("Do You really want to delete journal entry?")
leprikon-cz/leprikon
[ 6, 1, 6, 29, 1458950472 ]
def get_object(self): obj = super().get_object() if ( self.request.user.is_staff or obj.timesheet.leader == self.request.leader or self.request.leader in obj.journal_entry.journal.all_leaders ): return obj else: raise Http404()
leprikon-cz/leprikon
[ 6, 1, 6, 29, 1458950472 ]
def get_queryset(self): return ( super() .get_queryset() .filter( timesheet__leader=self.request.leader, timesheet__submitted=False, ) )
leprikon-cz/leprikon
[ 6, 1, 6, 29, 1458950472 ]
def itersplit(s, sep=None): if not s: yield s return exp = re.compile(r'\s+' if sep is None else re.escape(sep)) pos = 0 while True: m = exp.search(s, pos) if not m: if pos < len(s) or sep is not None: yield s[pos:] break if pos < m.start() or sep is not None: yield s[pos:m.start()] pos = m.end()
westurner/pkgsetcomp
[ 1, 1, 1, 1, 1400665453 ]
def itersplit_to_fields(_str, fsep=DEFAULT_FSEP, revtuple=None, fields=[], preparse=None): if preparse: _str = preparse(_str) _fields = itersplit(_str, fsep) if revtuple is not None: try: values = (t[1] for t in izip_longest(revtuple._fields, _fields)) return revtuple(*values) except: log.error(revtuple) log.error(_fields) raise return tuple(izip_longest(fields, _fields, fillvalue=None))
westurner/pkgsetcomp
[ 1, 1, 1, 1, 1400665453 ]
def foo(self): # calculate something important here return 42
westurner/pkgsetcomp
[ 1, 1, 1, 1, 1400665453 ]
def __init__(self, func, name=None, doc=None): self.__name__ = name or func.__name__ self.__module__ = func.__module__ self.__doc__ = doc or func.__doc__ self.func = func
westurner/pkgsetcomp
[ 1, 1, 1, 1, 1400665453 ]
def sh(cmd, ignore_error=False, cwd=None, *args, **kwargs): kwargs.update({ 'shell': True, 'cwd': cwd, 'stderr': subprocess.STDOUT, 'stdout': subprocess.PIPE}) log.debug('cmd: %s %s' % (cmd, kwargs)) p = subprocess.Popen(cmd, **kwargs) p_stdout = p.communicate()[0] if p.returncode and not ignore_error: raise Exception("Subprocess return code: %d\n%r\n%r" % ( p.returncode, cmd, p_stdout)) return p_stdout
westurner/pkgsetcomp
[ 1, 1, 1, 1, 1400665453 ]
def __init__(self, fpath): self.fpath = os.path.abspath(fpath) self.symlinks = []
westurner/pkgsetcomp
[ 1, 1, 1, 1, 1400665453 ]
def relpath(self): here = os.path.abspath(os.path.curdir) relpath = os.path.relpath(self.fpath, here) return relpath
westurner/pkgsetcomp
[ 1, 1, 1, 1, 1400665453 ]