function stringlengths 11 56k | repo_name stringlengths 5 60 | features list |
|---|---|---|
def test_shakearound_user_shake_event(self):
from wechatpy.events import ShakearoundUserShakeEvent
xml = """<xml>
<ToUserName><![CDATA[toUser]]></ToUserName>
<FromUserName><![CDATA[fromUser]]></FromUserName>
<CreateTime>1433332012</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[ShakearoundUserShake]]></Event>
<ChosenBeacon>
<Uuid><![CDATA[uuid]]></Uuid>
<Major>major</Major>
<Minor>minor</Minor>
<Distance>0.057</Distance>
</ChosenBeacon>
<AroundBeacons>
<AroundBeacon>
<Uuid><![CDATA[uuid]]></Uuid>
<Major>major</Major>
<Minor>minor</Minor>
<Distance>166.816</Distance>
</AroundBeacon>
<AroundBeacon>
<Uuid><![CDATA[uuid]]></Uuid>
<Major>major</Major>
<Minor>minor</Minor>
<Distance>15.013</Distance>
</AroundBeacon>
</AroundBeacons>
</xml>"""
event = parse_message(xml)
self.assertTrue(isinstance(event, ShakearoundUserShakeEvent))
chosen_beacon = {
"uuid": "uuid",
"major": "major",
"minor": "minor",
"distance": 0.057,
}
self.assertEqual(chosen_beacon, event.chosen_beacon)
self.assertEqual(2, len(event.around_beacons)) | jxtech/wechatpy | [
3364,
745,
3364,
44,
1410527008
] |
def test_qualification_verify_success_event(self):
from wechatpy.events import QualificationVerifySuccessEvent
xml = """
<xml>
<ToUserName><![CDATA[toUser]]></ToUserName>
<FromUserName><![CDATA[FromUser]]></FromUserName>
<CreateTime>1442401156</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[qualification_verify_success]]></Event>
<ExpiredTime>1442401156</ExpiredTime>
</xml>"""
event = parse_message(xml)
self.assertTrue(isinstance(event, QualificationVerifySuccessEvent))
self.assertTrue(isinstance(event.expired_time, datetime)) | jxtech/wechatpy | [
3364,
745,
3364,
44,
1410527008
] |
def test_naming_verify_success_event(self):
from wechatpy.events import NamingVerifySuccessEvent
xml = """
<xml>
<ToUserName><![CDATA[toUser]]></ToUserName>
<FromUserName><![CDATA[FromUser]]></FromUserName>
<CreateTime>1442401093</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[naming_verify_success]]></Event>
<ExpiredTime>1442401093</ExpiredTime>
</xml>"""
event = parse_message(xml)
self.assertTrue(isinstance(event, NamingVerifySuccessEvent))
self.assertTrue(isinstance(event.expired_time, datetime)) | jxtech/wechatpy | [
3364,
745,
3364,
44,
1410527008
] |
def test_annual_renew_event(self):
from wechatpy.events import AnnualRenewEvent
xml = """
<xml>
<ToUserName><![CDATA[toUser]]></ToUserName>
<FromUserName><![CDATA[FromUser]]></FromUserName>
<CreateTime>1442401004</CreateTime>
<MsgType><![CDATA[event]]></MsgType>
<Event><![CDATA[annual_renew]]></Event>
<ExpiredTime>1442401004</ExpiredTime>
</xml>"""
event = parse_message(xml)
self.assertTrue(isinstance(event, AnnualRenewEvent))
self.assertTrue(isinstance(event.expired_time, datetime)) | jxtech/wechatpy | [
3364,
745,
3364,
44,
1410527008
] |
def __init__(self, logdir):
self.PLUGIN_LOGDIR = logdir + '/plugins/' + PLUGIN_NAME
self.is_recording = False
self.video_writer = video_writing.VideoWriter(
self.PLUGIN_LOGDIR,
outputs=[
video_writing.FFmpegVideoOutput,
video_writing.PNGVideoOutput])
self.frame_placeholder = tf.placeholder(tf.uint8, [None, None, None])
self.summary_op = tf.summary.tensor_summary(TAG_NAME,
self.frame_placeholder)
self.last_image_shape = []
self.last_update_time = time.time()
self.config_last_modified_time = -1
self.previous_config = dict(DEFAULT_CONFIG)
if not tf.gfile.Exists(self.PLUGIN_LOGDIR + '/config.pkl'):
tf.gfile.MakeDirs(self.PLUGIN_LOGDIR)
write_pickle(DEFAULT_CONFIG, '{}/{}'.format(self.PLUGIN_LOGDIR,
CONFIG_FILENAME))
self.visualizer = Visualizer(self.PLUGIN_LOGDIR) | ryfeus/lambda-packs | [
1086,
234,
1086,
13,
1476901359
] |
def _write_summary(self, session, frame):
'''Writes the frame to disk as a tensor summary.'''
summary = session.run(self.summary_op, feed_dict={
self.frame_placeholder: frame
})
path = '{}/{}'.format(self.PLUGIN_LOGDIR, SUMMARY_FILENAME)
write_file(summary, path) | ryfeus/lambda-packs | [
1086,
234,
1086,
13,
1476901359
] |
def _enough_time_has_passed(self, FPS):
'''For limiting how often frames are computed.'''
if FPS == 0:
return False
else:
earliest_time = self.last_update_time + (1.0 / FPS)
return time.time() >= earliest_time | ryfeus/lambda-packs | [
1086,
234,
1086,
13,
1476901359
] |
def _update_recording(self, frame, config):
'''Adds a frame to the current video output.'''
# pylint: disable=redefined-variable-type
should_record = config['is_recording']
if should_record:
if not self.is_recording:
self.is_recording = True
tf.logging.info(
'Starting recording using %s',
self.video_writer.current_output().name())
self.video_writer.write_frame(frame)
elif self.is_recording:
self.is_recording = False
self.video_writer.finish()
tf.logging.info('Finished recording') | ryfeus/lambda-packs | [
1086,
234,
1086,
13,
1476901359
] |
def update(self, session, arrays=None, frame=None):
'''Creates a frame and writes it to disk.
Args:
arrays: a list of np arrays. Use the "custom" option in the client.
frame: a 2D np array. This way the plugin can be used for video of any
kind, not just the visualization that comes with the plugin.
frame can also be a function, which only is evaluated when the
"frame" option is selected by the client.
'''
new_config = self._get_config()
if self._enough_time_has_passed(self.previous_config['FPS']):
self.visualizer.update(new_config)
self.last_update_time = time.time()
final_image = self._update_frame(session, arrays, frame, new_config)
self._update_recording(final_image, new_config) | ryfeus/lambda-packs | [
1086,
234,
1086,
13,
1476901359
] |
def gradient_helper(optimizer, loss, var_list=None):
'''A helper to get the gradients out at each step.
Args:
optimizer: the optimizer op.
loss: the op that computes your loss value.
Returns: the gradient tensors and the train_step op.
'''
if var_list is None:
var_list = tf.trainable_variables()
grads_and_vars = optimizer.compute_gradients(loss, var_list=var_list)
grads = [pair[0] for pair in grads_and_vars]
return grads, optimizer.apply_gradients(grads_and_vars) | ryfeus/lambda-packs | [
1086,
234,
1086,
13,
1476901359
] |
def __init__(self, logdir):
"""Creates new Hook instance
Args:
logdir: Directory where Beholder should write data.
"""
self._logdir = logdir
self.beholder = None | ryfeus/lambda-packs | [
1086,
234,
1086,
13,
1476901359
] |
def add_project_member(apps, schema_editor):
# Using historical versions as recommended for RunPython
PublicDataAccess = apps.get_model("public_data", "PublicDataAccess")
DataRequestProjectMember = apps.get_model(
"private_sharing", "DataRequestProjectMember"
)
DataRequestProject = apps.get_model("private_sharing", "DataRequestProject")
db_alias = schema_editor.connection.alias
def id_label_to_project(id_label):
match = re.match(r"direct-sharing-(?P<id>\d+)", id_label)
if match:
project = DataRequestProject.objects.using(db_alias).get(
id=int(match.group("id"))
)
return project
for pda in PublicDataAccess.objects.using(db_alias).filter(project_membership=None):
project = id_label_to_project(pda.data_source)
drpm = DataRequestProjectMember.objects.using(db_alias).get(
project=project, member=pda.participant.member
)
pda.project_membership = drpm
pda.save() | OpenHumans/open-humans | [
67,
22,
67,
87,
1405539847
] |
def get_country_info(country=None):
data = get_all()
data = frappe._dict(data.get(country, {}))
if 'date_format' not in data:
data.date_format = "dd-mm-yyyy"
if 'time_format' not in data:
data.time_format = "HH:mm:ss"
return data | frappe/frappe | [
4495,
2418,
4495,
1493,
1307520856
] |
def get_country_timezone_info():
return {
"country_info": get_all(),
"all_timezones": get_all_timezones()
} | frappe/frappe | [
4495,
2418,
4495,
1493,
1307520856
] |
def setUp(self):
pass | autosub-team/autosub | [
5,
4,
5,
1,
1436120517
] |
def testTask(self):
"""Test Task"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.task.Task() # noqa: E501
pass | autosub-team/autosub | [
5,
4,
5,
1,
1436120517
] |
def __init__(self):
self.proxy = rtorrent.Proxy() | pyroscope/pyroscope | [
8,
2,
8,
11,
1426291073
] |
def test_accounts_search_permission(app, test_users, test_community,
login_user):
"""Test permission of listing user accounts."""
def account_search(user, expected_code):
headers = [('Content-Type', 'application/json'),
('Accept', 'application/json')]
with app.app_context():
url = url_for('invenio_accounts_rest.users_list')
if user:
scopes = current_oauth2server.scope_choices()
allowed_token = Token.create_personal(
'allowed_token', user.id,
scopes=[s[0] for s in scopes]
)
# application authentication token header
headers.append(('Authorization',
'Bearer {}'.format(allowed_token.access_token)))
with app.test_client() as client:
if user is not None:
login_user(user, client)
res = client.get(url, headers=headers)
assert res.status_code == expected_code
# anonymous users can't list accounts
account_search(None, 401)
# authenticated users can't list other users' account
account_search(test_users['normal'], 403)
# community members cannot list all users' accounts
account_search(test_community.member, 403)
# community admins can list all users
account_search(test_community.admin, 200)
# admin is allowed to list all accounts
account_search(test_users['admin'], 200) | EUDAT-B2SHARE/b2share | [
32,
30,
32,
223,
1359562894
] |
def account_read(user, expected_code):
with app.test_client() as client:
if user is not None:
login_user(user, client)
res = client.get(url, headers=headers)
assert res.status_code == expected_code | EUDAT-B2SHARE/b2share | [
32,
30,
32,
223,
1359562894
] |
def test_account_activation_permission(app, test_users, test_community,
login_user):
"""Test deactivating a user account."""
counter = [0]
def account_update(user, expected_code, modified_user=None):
def account_update_sub(patch_content, content_type):
with app.app_context():
if modified_user is None:
test_user = create_user(
'test_user{}'.format(counter[0]))
else:
test_user = modified_user
counter[0] += 1
url = url_for(
'invenio_accounts_rest.user',
user_id=test_user.id,
)
db.session.commit()
headers = [('Content-Type', content_type),
('Accept', 'application/json')]
with app.test_client() as client:
if user is not None:
login_user(user, client)
res = client.patch(url, headers=headers,
data=json.dumps(patch_content))
assert res.status_code == expected_code
# test with a simple JSON
account_update_sub({'active': False}, 'application/json')
# test with a JSON patch
account_update_sub([{
'op': 'replace', 'path': '/active','value': False
}], 'application/json-patch+json')
# anonymous users can't activate/deactivate accounts
account_update(None, 401)
# authenticated users can't activate/deactivate other users' account
account_update(test_users['normal'], 403)
# users can't deactivate their own accounts
account_update(test_users['normal'], 403, test_users['normal'])
# admin is allowed to activate/deactivate accounts
account_update(test_users['admin'], 200) | EUDAT-B2SHARE/b2share | [
32,
30,
32,
223,
1359562894
] |
def roles_read(user, expected_code):
with app.test_client() as client:
if user is not None:
login_user(user, client)
res = client.get(url, headers=headers)
assert res.status_code == expected_code | EUDAT-B2SHARE/b2share | [
32,
30,
32,
223,
1359562894
] |
def __init__(self, iface):
QWidget.__init__(self)
self.iface = iface
self.algorithm = ('invdist', 'average', 'nearest', 'datametrics')
self.datametrics = ('minimum', 'maximum', 'range')
self.setupUi(self)
BasePluginWidget.__init__(self, self.iface, "gdal_grid")
# set the default QSpinBoxes value
self.invdistPowerSpin.setValue(2.0)
self.outputFormat = Utils.fillRasterOutputFormat()
self.lastEncoding = Utils.getLastUsedEncoding()
self.setParamsStatus(
[
(self.inputLayerCombo, [SIGNAL("currentIndexChanged(int)"), SIGNAL("editTextChanged(const QString &)")] ),
(self.outputFileEdit, SIGNAL("textChanged(const QString &)")),
(self.zfieldCombo, SIGNAL("currentIndexChanged(int)"), self.zfieldCheck),
(self.algorithmCombo, SIGNAL("currentIndexChanged(int)"), self.algorithmCheck),
(self.stackedWidget, SIGNAL("currentChanged(int)"), self.algorithmCheck),
([self.invdistPowerSpin, self.invdistSmothingSpin, self.invdistRadius1Spin, self.invdistRadius2Spin, self.invdistAngleSpin, self.invdistNoDataSpin], SIGNAL("valueChanged(double)")),
([self.invdistMaxPointsSpin, self.invdistMinPointsSpin], SIGNAL("valueChanged(int)")),
([self.averageRadius1Spin, self.averageRadius2Spin, self.averageAngleSpin, self.averageNoDataSpin], SIGNAL("valueChanged(double)")),
(self.averageMinPointsSpin, SIGNAL("valueChanged(int)")),
([self.nearestRadius1Spin, self.nearestRadius2Spin, self.nearestAngleSpin, self.nearestNoDataSpin], SIGNAL("valueChanged(double)")),
(self.datametricsCombo, SIGNAL("currentIndexChanged(int)")),
([self.datametricsRadius1Spin, self.datametricsRadius2Spin, self.datametricsAngleSpin, self.datametricsNoDataSpin], SIGNAL("valueChanged(double)")),
(self.datametricsMinPointsSpin, SIGNAL("valueChanged(int)"))
]
)
self.connect(self.selectInputFileButton, SIGNAL("clicked()"), self.fillInputFileEdit)
self.connect(self.selectOutputFileButton, SIGNAL("clicked()"), self.fillOutputFileEdit)
self.connect(self.inputLayerCombo, SIGNAL("currentIndexChanged(int)"), self.fillFieldsCombo)
# fill layers combo
self.fillInputLayerCombo() | sourcepole/qgis | [
11,
3,
11,
1,
1278687507
] |
def fillFieldsCombo(self):
index = self.inputLayerCombo.currentIndex()
if index < 0:
return
self.lastEncoding = self.layers[index].dataProvider().encoding()
self.loadFields( self.getInputFileName() ) | sourcepole/qgis | [
11,
3,
11,
1,
1278687507
] |
def fillInputFileEdit(self):
lastUsedFilter = Utils.FileFilter.lastUsedVectorFilter()
inputFile, encoding = Utils.FileDialog.getOpenFileName(self, self.tr( "Select the input file for Grid" ), Utils.FileFilter.allVectorsFilter(), lastUsedFilter, True)
if inputFile.isEmpty():
return
Utils.FileFilter.setLastUsedVectorFilter(lastUsedFilter)
self.inputLayerCombo.setCurrentIndex(-1)
self.inputLayerCombo.setEditText(inputFile)
self.lastEncoding = encoding
self.loadFields( inputFile ) | sourcepole/qgis | [
11,
3,
11,
1,
1278687507
] |
def getArguments(self):
arguments = QStringList()
if self.zfieldCheck.isChecked() and self.zfieldCombo.currentIndex() >= 0:
arguments << "-zfield"
arguments << self.zfieldCombo.currentText()
if self.inputLayerCombo.currentIndex() >= 0:
arguments << "-l"
arguments << QFileInfo(self.layers[ self.inputLayerCombo.currentIndex() ].source()).baseName()
elif not self.inputLayerCombo.currentText().isEmpty():
arguments << "-l"
arguments << QFileInfo(self.inputLayerCombo.currentText()).baseName()
if self.algorithmCheck.isChecked() and self.algorithmCombo.currentIndex() >= 0:
arguments << "-a"
arguments << self.algorithmArguments(self.algorithmCombo.currentIndex())
if not self.outputFileEdit.text().isEmpty():
arguments << "-of"
arguments << self.outputFormat
arguments << self.getInputFileName()
arguments << self.outputFileEdit.text()
return arguments | sourcepole/qgis | [
11,
3,
11,
1,
1278687507
] |
def getOutputFileName(self):
return self.outputFileEdit.text() | sourcepole/qgis | [
11,
3,
11,
1,
1278687507
] |
def algorithmArguments(self, index):
algorithm = self.algorithm[index]
arguments = QStringList()
if algorithm == "invdist":
arguments.append(algorithm)
arguments.append("power=" + str(self.invdistPowerSpin.value()))
arguments.append("smothing=" + str(self.invdistSmothingSpin.value()))
arguments.append("radius1=" + str(self.invdistRadius1Spin.value()))
arguments.append("radius2=" + str(self.invdistRadius2Spin.value()))
arguments.append("angle=" + str(self.invdistAngleSpin.value()))
arguments.append("max_points=" + str(self.invdistMaxPointsSpin.value()))
arguments.append("min_points=" + str(self.invdistMinPointsSpin.value()))
arguments.append("nodata=" + str(self.invdistNoDataSpin.value()))
elif algorithm == "average":
arguments.append(algorithm)
arguments.append("radius1=" + str(self.averageRadius1Spin.value()))
arguments.append("radius2=" + str(self.averageRadius2Spin.value()))
arguments.append("angle=" + str(self.averageAngleSpin.value()))
arguments.append("min_points=" + str(self.averageMinPointsSpin.value()))
arguments.append("nodata=" + str(self.averageNoDataSpin.value()))
elif algorithm == "nearest":
arguments.append(algorithm)
arguments.append("radius1=" + str(self.nearestRadius1Spin.value()))
arguments.append("radius2=" + str(self.nearestRadius2Spin.value()))
arguments.append("angle=" + str(self.nearestAngleSpin.value()))
arguments.append("nodata=" + str(self.nearestNoDataSpin.value()))
else:
arguments.append(self.datametrics[self.datametricsCombo.currentIndex()])
arguments.append("radius1=" + str(self.datametricsRadius1Spin.value()))
arguments.append("radius2=" + str(self.datametricsRadius2Spin.value()))
arguments.append("angle=" + str(self.datametricsAngleSpin.value()))
arguments.append("min_points=" + str(self.datametricsMinPointsSpin.value()))
arguments.append("nodata=" + str(self.datametricsNoDataSpin.value()))
return arguments.join(":") | sourcepole/qgis | [
11,
3,
11,
1,
1278687507
] |
def print_memory_access( type, address, data):
ch = '%s' % chr(data) if ( 0x1F < data < 0x7F) else '.'
print( "%5s: Address : 0x%04x (%5d) : Data : 0x%04x (%5d) %s" % (type,address,address,data,data,ch)) | revaldinho/opc | [
74,
10,
74,
3,
1490822441
] |
def __init__(self, *args, **kw):
FieldSet.__init__(self, *args, **kw)
self._size = self["res_space"].value * 8 | SickGear/SickGear | [
574,
83,
574,
2,
1415773777
] |
def validate(self):
if self["magic"].value != 0x00035F3F:
return "Invalid magic"
if self["filesize"].value != self.stream.size // 8:
return "Invalid magic"
return True | SickGear/SickGear | [
574,
83,
574,
2,
1415773777
] |
def __init__(self,bot):
self.bot = bot | Canule/Mash-Cogs | [
13,
10,
13,
2,
1461776634
] |
def check_folders():
if not os.path.exists(DIR_DATA):
print("Creating {} folder...".format(DIR_DATA))
os.makedirs(DIR_DATA) | Canule/Mash-Cogs | [
13,
10,
13,
2,
1461776634
] |
def __init__(self, m):
self.message = m | Canule/Mash-Cogs | [
13,
10,
13,
2,
1461776634
] |
def setup(bot):
global geotiler
global Color, Drawing, display, Image, Color, Image, COMPOSITE_OPERATORS
global BeautifulSoup | Canule/Mash-Cogs | [
13,
10,
13,
2,
1461776634
] |
def detail_view(self, request, pk=None, slug=None):
param = pk
if slug is not None:
self.lookup_field = 'slug'
param = slug
try:
return super().detail_view(request, param)
except MultipleObjectsReturned:
# Redirect to the listing view, filtered by the relevant slug
# The router is registered with the `wagtailapi` namespace,
# `pages` is our endpoint namespace and `listing` is the listing view url name.
return redirect(
reverse('wagtailapi:pages:listing') + f'?{self.lookup_field}={param}'
) | openstax/openstax-cms | [
89,
11,
89,
10,
1446045194
] |
def get_urlpatterns(cls):
"""
This returns a list of URL patterns for the endpoint
"""
return [
path('', cls.as_view({'get': 'listing_view'}), name='listing'),
path('<int:pk>/', cls.as_view({'get': 'detail_view'}), name='detail'),
path('<slug:slug>/', cls.as_view({'get': 'detail_view'}), name='detail'),
path('find/', cls.as_view({'get': 'find_view'}), name='find'),
] | openstax/openstax-cms | [
89,
11,
89,
10,
1446045194
] |
def __init__(self, credentials):
db = credentials['db_name']
user_id = credentials['user_id']
host_name = credentials['host_name']
host_port = credentials['host_port']
user_pwd = credentials['user_pwd']
print "Getting connection to {} for {}".format(db, user_id)
try:
oerp = oerplib.OERP(
server=host_name, protocol='xmlrpc', port=host_port)
OErpModel.openErpConnection['super'] = oerp
if db in oerp.db.list():
db_connect(user_id, user_pwd, db)
else:
print "There is no database called : {}".format(db)
except socket.gaierror:
sys.exit(
"Is this the correct URL : {}".format(host_name))
except socket.error:
sys.exit(
"Is this the correct port number : {}".format(host_port)) | martinhbramwell/GData_OpenERP_Data_Pump | [
4,
3,
4,
1,
1364139528
] |
def user_menu(m): | thomashuang/Lilac | [
7,
5,
7,
1,
1418724813
] |
def index(self, page=1):
user = ctx.request.user
if user.role != 'root':
raise exc.HTTPFound(location='/user/%d/edit' % (user.uid))
page = int(page)
users = Backend('user').paginate(page, 10)
return render_template('user.index.html', users=users) | thomashuang/Lilac | [
7,
5,
7,
1,
1418724813
] |
def userinfo(self):
return ctx.request.user | thomashuang/Lilac | [
7,
5,
7,
1,
1418724813
] |
def login(self, username='', password=''):
LOGGER.error('username=%s', username)
username = username.strip()
password = password.strip()
user = Backend('user').find_by_username(username)
if user and user.check(password):
set_secure_cookie('auth', str(user.uid))
LOGGER.info('success')
raise exc.HTTPFound(location='/task')
return render_template('login.html') | thomashuang/Lilac | [
7,
5,
7,
1,
1418724813
] |
def add_page(self):
return render_template('user.add.html', statuses=USER_STATUSES, roles=ROLES) | thomashuang/Lilac | [
7,
5,
7,
1,
1418724813
] |
def add(self, username, email, real_name, password, status='', role='user'):
username, real_name = username.strip(), real_name.strip()
if not re.match(r'^[A-Za-z0-9_]{4,16}$', username):
return {'status' : 'error', 'msg' : 'user name: %s must be the ^[A-Za-z0-9_]{4,16}$ pattern' %(username)}
if not re.match(r'^[A-Za-z0-9_ ]{4,16}$', real_name):
return {'status' : 'error', 'msg' : 'real name: %s must be the [A-Za-z0-9_]{4,16} pattern' %(real_name)}
if not re.match(r'^[A-Za-z0-9@#$%^&+=]{4,16}$', password):
return {'status' : 'error', 'msg' : 'password: %s must be the ^[A-Za-z0-9@#$%^&+=]{4,16}$ pattern' %(password)}
if status not in USER_STATUSES:
status = 'actived'
if role not in ROLES:
role = 'user'
if len(email) > 7 and re.match("^.+\\@(\\[?)[a-zA-Z0-9\\-\\.]+\\.([a-zA-Z]{2,3}|[0-9]{1,3})(\\]?)$", email):
if Backend('user').find_by_email(email):
return {'status' : 'error', 'msg' : 'email:%s is used' %(email)}
if Backend('user').find_by_username(username):
return {'status' : 'error', 'msg' : 'user name:%s is used' %(username)}
user = User(username, email, real_name, password, status, role)
Backend('user').save(user)
return {'status' : 'info', 'msg' : 'saved'} | thomashuang/Lilac | [
7,
5,
7,
1,
1418724813
] |
def edit_page(self, uid):
uid = int(uid)
user = Backend('user').find(uid)
if not user:
raise exc.HTTPNotFound('Not Found')
return render_template('user.edit.html', statuses=USER_STATUSES, roles=ROLES, user=user) | thomashuang/Lilac | [
7,
5,
7,
1,
1418724813
] |
def edit(self, uid, email, real_name, password, newpass1, newpass2, status, role='user'):
real_name, newpass1, newpass2 = real_name.strip(), newpass1.strip(), newpass2.strip()
uid = int(uid)
user = Backend('user').find(uid)
if not user:
raise exc.HTTPNotFound('user not found')
me = ctx.request.user
if me.uid == user.uid:
if re.match(r'[A-Za-z0-9@#$%^&+=]{4,16}', newpass1):
if password and newpass1 and newpass1 == newpass2:
user.password = newpass1
elif newpass1:
return {'status' : 'error', 'msg' : 'password: %s must be the [A-Za-z0-9_]{4,16} pattern' %(newpass1)}
if len(email) > 7 and re.match("^.+\\@(\\[?)[a-zA-Z0-9\\-\\.]+\\.([a-zA-Z]{2,3}|[0-9]{1,3})(\\]?)$", email):
user_ = Backend('user').find_by_email(email)
if user_ and user_.uid != user.uid:
return {'status' : 'error', 'msg' : 'email:%s is used' %(email)}
else:
user.email = email
if me.uid == 1 and user.uid != 1:
if role in (ADMIN, USER):
user.role = role
if user.status != status and status in USER_STATUSES:
user.status = status
if re.match(r'^[A-Za-z0-9_ ]{4,16}$', real_name):
if user.real_name != real_name:
user.real_name = real_name
Backend('user').save(user)
return {'status' : 'info', 'msg' : 'updated'} | thomashuang/Lilac | [
7,
5,
7,
1,
1418724813
] |
def valid_args_list(args):
args_list = args.split(",")
for arg in args_list:
try:
int(arg)
except:
raise argparse.ArgumentTypeError("must be valid integer")
return args_list | brendangregg/bcc | [
60,
12,
60,
1,
1439928302
] |
def find_tid(tgt_dir, tgt_vcpu):
for tid in os.listdir(tgt_dir):
path = tgt_dir + "/" + tid + "/comm"
fp = open(path, "r")
comm = fp.read()
if (comm.find(tgt_vcpu) != -1):
return tid
return -1 | brendangregg/bcc | [
60,
12,
60,
1,
1439928302
] |
def format_ts_from_float(ts):
return int(ts) * 1000000000 | rdo-infra/ci-config | [
8,
10,
8,
2,
1453219380
] |
def format_ts_from_str(ts, pattern='%Y-%m-%d %H:%M:%S'):
return format_ts_from_date(datetime.strptime(ts, pattern)) | rdo-infra/ci-config | [
8,
10,
8,
2,
1453219380
] |
def _process_automations(center, config):
"""Process automations from config."""
automations = center.data.setdefault(DATA_AUTOMATIONS, {})
conf = config[CONF_AUTOMATIONS]
for block in conf:
name = block[CONF_NAME]
_LOGGER.debug("Setting up automation %s", name)
action_sequence = _get_actions(center, block[CONF_ACTION])
cond_func = _process_condition(center, block[CONF_CONDITION])
# use partial to get a function with args to call later
attach_triggers = partial(_process_trigger, center, block[CONF_TRIGGER])
automations[name] = Automation(
center, name, attach_triggers, cond_func, action_sequence
) | CellProfiling/cam_acq | [
4,
1,
4,
1,
1431267814
] |
def _process_condition(center, config_block):
"""Return a function that parses the condition."""
if CONF_TYPE in config_block:
checks = []
condition_type = config_block[CONF_TYPE]
conditions = config_block[CONF_CONDITIONS]
for cond in conditions:
check = _process_condition(center, cond)
checks.append(check)
return make_checker(condition_type, checks)
data = config_block[CONF_CONDITION]
template = make_template(center, data)
return partial(render_template, template) | CellProfiling/cam_acq | [
4,
1,
4,
1,
1431267814
] |
def check_condition(variables):
"""Return True if all or any condition(s) pass."""
if condition_type.lower() == "and":
return all(template_check(check(variables)) for check in checks)
if condition_type.lower() == "or":
return any(template_check(check(variables)) for check in checks)
return False | CellProfiling/cam_acq | [
4,
1,
4,
1,
1431267814
] |
def template_check(value):
"""Check if a rendered template string equals true.
If value is not a string, return value as is.
"""
if isinstance(value, str):
return value.lower() == "true"
return value | CellProfiling/cam_acq | [
4,
1,
4,
1,
1431267814
] |
def remove_triggers():
"""Remove attached triggers."""
for remove in remove_funcs:
remove() | CellProfiling/cam_acq | [
4,
1,
4,
1,
1431267814
] |
def __init__(
self, center, name, attach_triggers, cond_func, action_sequence, enabled=True | CellProfiling/cam_acq | [
4,
1,
4,
1,
1431267814
] |
def __repr__(self):
"""Return the representation."""
return (
f"Automation(center={self._center}, name={self.name}, "
f"attach_triggers={self._attach_triggers}, cond_func={self._cond_func}, "
f"action_sequence={self._action_sequence}, enabled={self.enabled})"
) | CellProfiling/cam_acq | [
4,
1,
4,
1,
1431267814
] |
def disable(self):
"""Disable automation."""
if not self.enabled:
return
if self._detach_triggers is not None:
self._detach_triggers()
self._detach_triggers = None
self.enabled = False | CellProfiling/cam_acq | [
4,
1,
4,
1,
1431267814
] |
def __init__(self, center, actions):
"""Set up instance."""
self._center = center
self.actions = list(actions) # copy to list to make sure it's a list | CellProfiling/cam_acq | [
4,
1,
4,
1,
1431267814
] |
def delay(self, seconds, variables, waiting):
"""Delay action sequence.
Parameters
----------
seconds : float
A time interval to delay the pending action sequence.
variables : dict
A dict of template variables.
"""
sequence = ActionSequence(self._center, waiting)
callback = partial(self._center.create_task, sequence(variables))
waiting.clear()
_LOGGER.info("Action delay for %s seconds", seconds)
callback = self._center.loop.call_later(seconds, callback)
async def cancel_pending_actions(center, event):
"""Cancel pending actions."""
callback.cancel()
self._center.bus.register(CAMACQ_STOP_EVENT, cancel_pending_actions) | CellProfiling/cam_acq | [
4,
1,
4,
1,
1431267814
] |
def __init__(self, center, action_conf):
"""Set up instance."""
self._center = center
self.action_id = action_conf[CONF_ID]
self.action_type = action_conf[CONF_TYPE]
action_data = action_conf[CONF_DATA]
self.template = make_template(center, action_data) | CellProfiling/cam_acq | [
4,
1,
4,
1,
1431267814
] |
def _import(
session: Session, configs: Dict[str, Any], overwrite: bool = False | apache/incubator-superset | [
50904,
10257,
50904,
1280,
1437504934
] |
def test_send_mail(db):
send_mail('Subject', 'Message', to=['user@example.com'])
assert len(mail.outbox) == 1
assert mail.outbox[0].subject == '[example.com] Subject'
assert mail.outbox[0].body == 'Message'
assert mail.outbox[0].from_email == settings.DEFAULT_FROM_EMAIL
assert mail.outbox[0].to == ['user@example.com']
assert mail.outbox[0].cc == []
assert mail.outbox[0].bcc == []
assert mail.outbox[0].attachments == [] | DMPwerkzeug/DMPwerkzeug | [
80,
41,
80,
122,
1438336991
] |
def test_send_mail_bcc(db):
send_mail('Subject', 'Message', bcc=['user2@example.com'])
assert len(mail.outbox) == 1
assert mail.outbox[0].subject == '[example.com] Subject'
assert mail.outbox[0].body == 'Message'
assert mail.outbox[0].from_email == settings.DEFAULT_FROM_EMAIL
assert mail.outbox[0].to == []
assert mail.outbox[0].cc == []
assert mail.outbox[0].bcc == ['user2@example.com']
assert mail.outbox[0].attachments == [] | DMPwerkzeug/DMPwerkzeug | [
80,
41,
80,
122,
1438336991
] |
def test_fixture_create_without_context(self) -> None:
'''torment.fixtures.Fixture() → TypeError'''
self.assertRaises(TypeError, fixtures.Fixture) | kumoru/torment | [
5,
6,
5,
4,
1431370794
] |
def setUp(self) -> None:
self.c = unittest.TestCase()
self.f = fixtures.Fixture(self.c) | kumoru/torment | [
5,
6,
5,
4,
1431370794
] |
def test_fixture_description(self) -> None:
'''torment.fixtures.Fixture(context).description == '94d7c58f6ee44683936c21cb84d1e458—torment.fixtures' '''
self.f.context.module = 'fixtures'
self.f.uuid = uuid.UUID('94d7c58f6ee44683936c21cb84d1e458')
self.assertEqual(self.f.description, '94d7c58f6ee44683936c21cb84d1e458—fixtures') | kumoru/torment | [
5,
6,
5,
4,
1431370794
] |
def test_error_fixture_description(self) -> None:
'''torment.fixtures.ErrorFixture(context).description == 'expected → failure' '''
class fixture(fixtures.Fixture):
@property
def description(self) -> str:
return 'expected'
class error_fixture(fixtures.ErrorFixture, fixture):
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.error = RuntimeError('failure')
c = unittest.TestCase()
e = error_fixture(c)
self.assertEqual(e.description, 'expected → failure') | kumoru/torment | [
5,
6,
5,
4,
1431370794
] |
def test_error_fixture_run(self) -> None:
'''torment.fixtures.ErrorFixture(context).run()'''
class fixture(fixtures.Fixture):
def run(self):
raise RuntimeError('failure')
class error_fixture(fixtures.ErrorFixture, fixture):
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.error = RuntimeError('failure')
c = unittest.TestCase()
e = error_fixture(c)
e.run()
self.assertIsInstance(e.exception, RuntimeError)
self.assertEqual(e.exception.args, ( 'failure', )) | kumoru/torment | [
5,
6,
5,
4,
1431370794
] |
def test_of_zero(self) -> None:
'''torment.fixtures.of(()) == []'''
self.assertEqual(len(fixtures.of(())), 0) | kumoru/torment | [
5,
6,
5,
4,
1431370794
] |
def __init__(self, context) -> None:
pass | kumoru/torment | [
5,
6,
5,
4,
1431370794
] |
def test_of_many_with_subclasses(self) -> None:
'''torment.fixtures.of(( FixtureA, )) == [ fixture_a, ]'''
class FixtureA(object):
def __init__(self, context) -> None:
pass
class FixtureB(FixtureA):
pass
result = fixtures.of(( FixtureA, ))
self.assertEqual(len(result), 1)
self.assertIsInstance(result[0], FixtureB) | kumoru/torment | [
5,
6,
5,
4,
1431370794
] |
def setUp(self) -> None:
_ = unittest.mock.patch('torment.fixtures.inspect')
mocked_inspect = _.start()
self.addCleanup(_.stop)
mocked_inspect.configure_mock(**{ 'isclass': inspect.isclass, 'isfunction': inspect.isfunction, })
mocked_inspect.stack.return_value = ( None, ( None, 'test_unit/test_d43830e2e9624dd19c438b15250c5818.py', ), )
class ContextStub(object):
pass
self.context = ContextStub()
self.context.module = mocked_inspect.getmodule.return_value = 'stack'
self.ns = {} # type: Dict[str, Any]
self.class_name = 'f_d43830e2e9624dd19c438b15250c5818' | kumoru/torment | [
5,
6,
5,
4,
1431370794
] |
def test_one_literal_properties(self) -> None:
'''torment.fixtures.register({}, (), { 'a': 'a', })'''
fixtures.register(self.ns, ( fixtures.Fixture, ), { 'a': 'a', })
_ = self.ns[self.class_name](self.context)
self.assertEqual(_.a, 'a') | kumoru/torment | [
5,
6,
5,
4,
1431370794
] |
def test_one_fixture_class_properties(self) -> None:
'''torment.fixtures.register({}, (), { 'a': fixture_class, })'''
class A(fixtures.Fixture):
pass
fixtures.register(self.ns, ( fixtures.Fixture, ), { 'a': A, })
_ = self.ns[self.class_name](self.context)
self.assertIsInstance(_.a, A)
self.assertEqual(_.a.context, self.context) | kumoru/torment | [
5,
6,
5,
4,
1431370794
] |
def a(self) -> None:
pass | kumoru/torment | [
5,
6,
5,
4,
1431370794
] |
def test_description_property(self) -> None:
'''torment.fixtures.register({}, (), { 'description': 'needle', })'''
fixtures.register(self.ns, ( fixtures.Fixture, ), { 'description': 'needle', })
_ = self.ns[self.class_name](self.context)
self.assertEqual(_.description, 'd43830e2e9624dd19c438b15250c5818—stack—needle') | kumoru/torment | [
5,
6,
5,
4,
1431370794
] |
def test_mocks_mock_property(self) -> None:
'''torment.fixtures.register({}, (), { 'mocks': { 'symbol': …, }, }).setup()'''
_ = unittest.mock.patch('torment.fixtures._find_mocker')
mocked_fixtures_find_mocker = _.start()
self.addCleanup(_.stop)
mocked_fixtures_find_mocker.return_value = lambda: True
_ = unittest.mock.patch('torment.fixtures._prepare_mock')
mocked_fixtures_prepare_mock = _.start()
self.addCleanup(_.stop)
fixtures.register(self.ns, ( fixtures.Fixture, ), { 'mocks': { 'symbol': {}, }, })
_ = self.ns[self.class_name](self.context)
_.setup()
mocked_fixtures_find_mocker.assert_called_once_with('symbol', self.context)
mocked_fixtures_prepare_mock.assert_called_once_with(self.context, 'symbol') | kumoru/torment | [
5,
6,
5,
4,
1431370794
] |
def setUp(self) -> None:
class ContextStub(contexts.TestContext):
mocked_symbol = unittest.mock.MagicMock(name = 'ContextStub.mocked_symbol')
self.context = ContextStub() | kumoru/torment | [
5,
6,
5,
4,
1431370794
] |
def test_prepare_mock_side_effect_zero_dots(self) -> None:
'''tormnet.fixtures._prepare_mock(ContextStub, 'symbol', side_effect = range(2))'''
fixtures._prepare_mock(self.context, 'symbol', side_effect = range(2))
self.assertEqual(self.context.mocked_symbol(), 0)
self.assertEqual(self.context.mocked_symbol(), 1)
self.assertRaises(StopIteration, self.context.mocked_symbol) | kumoru/torment | [
5,
6,
5,
4,
1431370794
] |
def test_prepare_mock_return_value_one_dots(self) -> None:
'''tormnet.fixtures._prepare_mock(ContextStub, 'symbol.Sub', return_value = 'a')'''
fixtures._prepare_mock(self.context, 'symbol.Sub', return_value = 'a')
self.assertEqual(self.context.mocked_symbol.Sub(), 'a') | kumoru/torment | [
5,
6,
5,
4,
1431370794
] |
def test_prepare_mock_return_value_many_dots_second_level(self) -> None:
'''tormnet.fixtures._prepare_mock(ContextStub, 'symbol.sub.a.b.c', return_value = 'a')'''
class ContextStub(contexts.TestContext):
mocked_symbol_sub = unittest.mock.MagicMock(name = 'ContextStub.mocked_symbol_sub')
c = ContextStub()
fixtures._prepare_mock(c, 'symbol.sub.a.b.c', return_value = 'a')
self.assertEqual(c.mocked_symbol_sub.a.b.c(), 'a') | kumoru/torment | [
5,
6,
5,
4,
1431370794
] |
def test_find_mocker_found_zero_levels(self) -> None:
'''tormnet.fixtures._find_mocker('symbol', ContextStub) == mock_symbol'''
class ContextStub(contexts.TestContext):
def mock_symbol(self):
pass
c = ContextStub()
method = fixtures._find_mocker('symbol', c)
self.assertEqual(method, c.mock_symbol) | kumoru/torment | [
5,
6,
5,
4,
1431370794
] |
def mock_symbol_sub(self):
pass | kumoru/torment | [
5,
6,
5,
4,
1431370794
] |
def test_find_mocker_found_many_levels(self) -> None:
'''tormnet.fixtures._find_mocker('symbol.sub.a.b', ContextStub) == mock_symbol_sub_a_b'''
class ContextStub(contexts.TestContext):
def mock_symbol_sub_a_b(self):
pass
c = ContextStub()
method = fixtures._find_mocker('symbol.sub.a.b', c)
self.assertEqual(method, c.mock_symbol_sub_a_b) | kumoru/torment | [
5,
6,
5,
4,
1431370794
] |
def setUp(self) -> None:
class StubFixture(object):
pass
self.f = StubFixture()
self.f.name = 'testing_fixture_stub'
self.o = copy.deepcopy(self.f) | kumoru/torment | [
5,
6,
5,
4,
1431370794
] |
def test_one_functions_without_parameters(self) -> None:
'''torment.fixtures._resolve_functions({ 'a': ø → None, }, fixture)'''
def a() -> None:
pass
fixtures._resolve_functions({ 'a': a, }, self.f)
self.assertEqual(id(self.f.a), id(a)) | kumoru/torment | [
5,
6,
5,
4,
1431370794
] |
def a(self) -> None:
pass | kumoru/torment | [
5,
6,
5,
4,
1431370794
] |
def test_one_functions_with_self_parameter_raises_attributeerror(self) -> None:
'''torment.fixtures._resolve_functions({ 'a': self → self.b, }, fixture)'''
def a(self):
return self.b
fixtures._resolve_functions({ 'a': a, }, self.f)
self.assertEqual(id(self.f.a), id(a)) | kumoru/torment | [
5,
6,
5,
4,
1431370794
] |
def a(self) -> None:
return self.b | kumoru/torment | [
5,
6,
5,
4,
1431370794
] |
def setUp(self) -> None:
self.uuid = uuid.uuid4() | kumoru/torment | [
5,
6,
5,
4,
1431370794
] |
def test_one_namespace(self) -> None:
'''torment.fixtures._unique_class_name({ 'f_{uuid}': None, }, uuid) == 'f_{uuid}_1' '''
n = fixtures._unique_class_name({ 'f_' + self.uuid.hex: None, }, self.uuid)
self.assertEqual(n, 'f_' + self.uuid.hex + '_1') | kumoru/torment | [
5,
6,
5,
4,
1431370794
] |
def __init__(self, container, command, *args, **kwargs):
self._container = container
self._command = command
super(DockerExecMixin, self).__init__(*args, **kwargs) | HPENetworking/topology_docker | [
2,
15,
2,
4,
1452032873
] |
def run_detector(self) -> None:
"""Run the breaking change detector to detect rule violations
This method should populate the detector's internal data such
that `is_breaking` does not require any additional invocations
to the breaking change detector.
"""
pass | envoyproxy/envoy | [
21587,
4174,
21587,
1421,
1470668844
] |
def get_breaking_changes(self) -> List[str]:
"""Return a list of strings containing breaking changes output by the tool"""
pass | envoyproxy/envoy | [
21587,
4174,
21587,
1421,
1470668844
] |
def __init__(
self,
path_to_changed_dir: str,
git_ref: str,
git_path: str,
subdir: str = None,
buf_path: str = None,
config_file_loc: str = None,
additional_args: List[str] = None) -> None:
"""Initialize the configuration of buf
This function sets up any necessary config without actually
running buf against any proto files.
BufWrapper takes a path to a directory containing proto files
as input, and it checks if these proto files break any changes
from a given initial state.
The initial state is input as a git ref. The constructor expects
a git ref string, as well as an absolute path to a .git folder
for the repository.
Args:
path_to_changed_dir {str} -- absolute path to a directory containing proto files in the after state
buf_path {str} -- path to the buf binary (default: "buf")
git_ref {str} -- git reference to use for the initial state of the protos (typically a commit hash)
git_path {str} -- absolute path to .git folder for the repository of interest
subdir {str} -- subdirectory within git repository from which to search for .proto files (default: None, e.g. stay in root)
additional_args {List[str]} -- additional arguments passed into the buf binary invocations
config_file_loc {str} -- absolute path to buf.yaml configuration file (if not provided, uses default buf configuration)
"""
if not Path(path_to_changed_dir).is_dir():
raise ValueError(f"path_to_changed_dir {path_to_changed_dir} is not a valid directory")
if Path.cwd() not in Path(path_to_changed_dir).parents:
raise ValueError(
f"path_to_changed_dir {path_to_changed_dir} must be a subdirectory of the cwd ({ Path.cwd() })"
)
if not Path(git_path).exists():
raise ChangeDetectorError(f'path to .git folder {git_path} does not exist')
self._path_to_changed_dir = path_to_changed_dir
self._additional_args = additional_args
self._buf_path = buf_path or "buf"
self._config_file_loc = config_file_loc
self._git_ref = git_ref
self._git_path = git_path
self._subdir = subdir
self._final_result = None
pull_buf_deps(
self._buf_path,
self._path_to_changed_dir,
config_file_loc=self._config_file_loc,
additional_args=self._additional_args) | envoyproxy/envoy | [
21587,
4174,
21587,
1421,
1470668844
] |
def is_breaking(self) -> bool:
if not self._final_result:
raise ChangeDetectorError("Must invoke run_detector() before checking if is_breaking()")
final_code, final_out, final_err = self._final_result
final_out, final_err = '\n'.join(final_out), '\n'.join(final_err)
if final_err != "":
raise ChangeDetectorError(f"Error from buf: {final_err}")
if final_code != 0:
return True
if final_out != "":
return True
return False | envoyproxy/envoy | [
21587,
4174,
21587,
1421,
1470668844
] |
def SecondsToMicroseconds(seconds):
"""Convert seconds to microseconds.
Args:
seconds: number
Returns:
microseconds
"""
return seconds * _MICROSECONDS_PER_SECOND | googlearchive/titan | [
46,
9,
46,
1,
1366927053
] |
def _GetCurrentTimeMicros():
"""Get the current time in microseconds, in UTC.
Returns:
The number of microseconds since the epoch.
"""
return int(SecondsToMicroseconds(time.time())) | googlearchive/titan | [
46,
9,
46,
1,
1366927053
] |
def GetTimeMicros(time_tuple):
"""Get a time in microseconds.
Arguments:
time_tuple: A (year, month, day, hour, minute, second) tuple (the python
time tuple format) in the UTC time zone.
Returns:
The number of microseconds since the epoch represented by the input tuple.
"""
return int(SecondsToMicroseconds(GetSecondsSinceEpoch(time_tuple))) | googlearchive/titan | [
46,
9,
46,
1,
1366927053
] |
def DatetimeToUTCMillis(date):
"""Converts a datetime object to milliseconds since the epoch in UTC.
Args:
date: A datetime to convert.
Returns:
The number of milliseconds since the epoch, in UTC, represented by the input
datetime.
"""
return DatetimeToUTCMicros(date) / 1000 | googlearchive/titan | [
46,
9,
46,
1,
1366927053
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.