id
int32 0
252k
| repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
list | docstring
stringlengths 3
17.3k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 87
242
|
|---|---|---|---|---|---|---|---|---|---|---|---|
245,900
|
pythonkc/pythonkc-meetups
|
pythonkc_meetups/parsers.py
|
parse_member_from_rsvp
|
def parse_member_from_rsvp(data):
"""
Parse a ``MeetupMember`` from the given RSVP response data.
Returns
-------
A ``pythonkc_meetups.types.MeetupMember``.
"""
return MeetupMember(
id=data['member'].get('member_id', None),
name=data['member'].get('name', None),
photo=(parse_photo(data['member_photo'])
if 'member_photo' in data else None)
)
|
python
|
def parse_member_from_rsvp(data):
"""
Parse a ``MeetupMember`` from the given RSVP response data.
Returns
-------
A ``pythonkc_meetups.types.MeetupMember``.
"""
return MeetupMember(
id=data['member'].get('member_id', None),
name=data['member'].get('name', None),
photo=(parse_photo(data['member_photo'])
if 'member_photo' in data else None)
)
|
[
"def",
"parse_member_from_rsvp",
"(",
"data",
")",
":",
"return",
"MeetupMember",
"(",
"id",
"=",
"data",
"[",
"'member'",
"]",
".",
"get",
"(",
"'member_id'",
",",
"None",
")",
",",
"name",
"=",
"data",
"[",
"'member'",
"]",
".",
"get",
"(",
"'name'",
",",
"None",
")",
",",
"photo",
"=",
"(",
"parse_photo",
"(",
"data",
"[",
"'member_photo'",
"]",
")",
"if",
"'member_photo'",
"in",
"data",
"else",
"None",
")",
")"
] |
Parse a ``MeetupMember`` from the given RSVP response data.
Returns
-------
A ``pythonkc_meetups.types.MeetupMember``.
|
[
"Parse",
"a",
"MeetupMember",
"from",
"the",
"given",
"RSVP",
"response",
"data",
"."
] |
54b5062b2825011c87c303256f59c6c13d395ee7
|
https://github.com/pythonkc/pythonkc-meetups/blob/54b5062b2825011c87c303256f59c6c13d395ee7/pythonkc_meetups/parsers.py#L68-L82
|
245,901
|
pythonkc/pythonkc-meetups
|
pythonkc_meetups/parsers.py
|
parse_photo
|
def parse_photo(data):
"""
Parse a ``MeetupPhoto`` from the given response data.
Returns
-------
A `pythonkc_meetups.types.`MeetupPhoto``.
"""
return MeetupPhoto(
id=data.get('photo_id', data.get('id', None)),
url=data.get('photo_link', None),
highres_url=data.get('highres_link', None),
thumb_url=data.get('thumb_link', None)
)
|
python
|
def parse_photo(data):
"""
Parse a ``MeetupPhoto`` from the given response data.
Returns
-------
A `pythonkc_meetups.types.`MeetupPhoto``.
"""
return MeetupPhoto(
id=data.get('photo_id', data.get('id', None)),
url=data.get('photo_link', None),
highres_url=data.get('highres_link', None),
thumb_url=data.get('thumb_link', None)
)
|
[
"def",
"parse_photo",
"(",
"data",
")",
":",
"return",
"MeetupPhoto",
"(",
"id",
"=",
"data",
".",
"get",
"(",
"'photo_id'",
",",
"data",
".",
"get",
"(",
"'id'",
",",
"None",
")",
")",
",",
"url",
"=",
"data",
".",
"get",
"(",
"'photo_link'",
",",
"None",
")",
",",
"highres_url",
"=",
"data",
".",
"get",
"(",
"'highres_link'",
",",
"None",
")",
",",
"thumb_url",
"=",
"data",
".",
"get",
"(",
"'thumb_link'",
",",
"None",
")",
")"
] |
Parse a ``MeetupPhoto`` from the given response data.
Returns
-------
A `pythonkc_meetups.types.`MeetupPhoto``.
|
[
"Parse",
"a",
"MeetupPhoto",
"from",
"the",
"given",
"response",
"data",
"."
] |
54b5062b2825011c87c303256f59c6c13d395ee7
|
https://github.com/pythonkc/pythonkc-meetups/blob/54b5062b2825011c87c303256f59c6c13d395ee7/pythonkc_meetups/parsers.py#L85-L99
|
245,902
|
PlotWatt/sql_query_dict
|
sql_query_dict.py
|
_mysql_isval
|
def _mysql_isval(val):
""" These types should either be ignored or have already been
inserted into the SQL directly and dont need sqlalchemy to do
it for us. """
if _mysql_is_list(val):
return False
elif isinstance(val, mysql_col):
return False
elif val in [None, mysql_now, mysql_ignore]:
return False
return True
|
python
|
def _mysql_isval(val):
""" These types should either be ignored or have already been
inserted into the SQL directly and dont need sqlalchemy to do
it for us. """
if _mysql_is_list(val):
return False
elif isinstance(val, mysql_col):
return False
elif val in [None, mysql_now, mysql_ignore]:
return False
return True
|
[
"def",
"_mysql_isval",
"(",
"val",
")",
":",
"if",
"_mysql_is_list",
"(",
"val",
")",
":",
"return",
"False",
"elif",
"isinstance",
"(",
"val",
",",
"mysql_col",
")",
":",
"return",
"False",
"elif",
"val",
"in",
"[",
"None",
",",
"mysql_now",
",",
"mysql_ignore",
"]",
":",
"return",
"False",
"return",
"True"
] |
These types should either be ignored or have already been
inserted into the SQL directly and dont need sqlalchemy to do
it for us.
|
[
"These",
"types",
"should",
"either",
"be",
"ignored",
"or",
"have",
"already",
"been",
"inserted",
"into",
"the",
"SQL",
"directly",
"and",
"dont",
"need",
"sqlalchemy",
"to",
"do",
"it",
"for",
"us",
"."
] |
38ca66258a0715b01b5c90c0e42bfa17a1cc8f16
|
https://github.com/PlotWatt/sql_query_dict/blob/38ca66258a0715b01b5c90c0e42bfa17a1cc8f16/sql_query_dict.py#L51-L61
|
245,903
|
heikomuller/sco-models
|
scomodels/__init__.py
|
init_registry_from_json
|
def init_registry_from_json(mongo, filename, clear_collection=False):
"""Initialize a model registry with a list of model definitions that are
stored in a given file in Json format.
Parameters
----------
mongo : scodata.MongoDBFactory
Connector for MongoDB
filename : string
Path to file containing model definitions
clear_collection : boolean
If true, collection will be dropped before models are created
"""
# Read model definition file (JSON)
with open(filename, 'r') as f:
models = json.load(f)
init_registry(mongo, models, clear_collection)
|
python
|
def init_registry_from_json(mongo, filename, clear_collection=False):
"""Initialize a model registry with a list of model definitions that are
stored in a given file in Json format.
Parameters
----------
mongo : scodata.MongoDBFactory
Connector for MongoDB
filename : string
Path to file containing model definitions
clear_collection : boolean
If true, collection will be dropped before models are created
"""
# Read model definition file (JSON)
with open(filename, 'r') as f:
models = json.load(f)
init_registry(mongo, models, clear_collection)
|
[
"def",
"init_registry_from_json",
"(",
"mongo",
",",
"filename",
",",
"clear_collection",
"=",
"False",
")",
":",
"# Read model definition file (JSON)",
"with",
"open",
"(",
"filename",
",",
"'r'",
")",
"as",
"f",
":",
"models",
"=",
"json",
".",
"load",
"(",
"f",
")",
"init_registry",
"(",
"mongo",
",",
"models",
",",
"clear_collection",
")"
] |
Initialize a model registry with a list of model definitions that are
stored in a given file in Json format.
Parameters
----------
mongo : scodata.MongoDBFactory
Connector for MongoDB
filename : string
Path to file containing model definitions
clear_collection : boolean
If true, collection will be dropped before models are created
|
[
"Initialize",
"a",
"model",
"registry",
"with",
"a",
"list",
"of",
"model",
"definitions",
"that",
"are",
"stored",
"in",
"a",
"given",
"file",
"in",
"Json",
"format",
"."
] |
0584322a50ae16d5c32c224e8bca516bd363371f
|
https://github.com/heikomuller/sco-models/blob/0584322a50ae16d5c32c224e8bca516bd363371f/scomodels/__init__.py#L371-L387
|
245,904
|
heikomuller/sco-models
|
scomodels/__init__.py
|
DefaultModelRegistry.from_json
|
def from_json(self, document):
"""Create a model database object from a given Json document.
Parameters
----------
document : JSON
Json representation of the object
Returns
ModelHandle
"""
# The timestamp is optional (e.g., in cases where model definitions are
# loaded from file).
if 'timestamp' in document:
timestamp = datetime.datetime.strptime(
document['timestamp'],
'%Y-%m-%dT%H:%M:%S.%f'
)
else:
timestamp = None
# Create handle for database object
return ModelHandle(
document['_id'],
document['properties'],
[AttributeDefinition.from_json(el) for el in document['parameters']],
ModelOutputs.from_json(document['outputs']),
timestamp=timestamp
)
|
python
|
def from_json(self, document):
"""Create a model database object from a given Json document.
Parameters
----------
document : JSON
Json representation of the object
Returns
ModelHandle
"""
# The timestamp is optional (e.g., in cases where model definitions are
# loaded from file).
if 'timestamp' in document:
timestamp = datetime.datetime.strptime(
document['timestamp'],
'%Y-%m-%dT%H:%M:%S.%f'
)
else:
timestamp = None
# Create handle for database object
return ModelHandle(
document['_id'],
document['properties'],
[AttributeDefinition.from_json(el) for el in document['parameters']],
ModelOutputs.from_json(document['outputs']),
timestamp=timestamp
)
|
[
"def",
"from_json",
"(",
"self",
",",
"document",
")",
":",
"# The timestamp is optional (e.g., in cases where model definitions are",
"# loaded from file).",
"if",
"'timestamp'",
"in",
"document",
":",
"timestamp",
"=",
"datetime",
".",
"datetime",
".",
"strptime",
"(",
"document",
"[",
"'timestamp'",
"]",
",",
"'%Y-%m-%dT%H:%M:%S.%f'",
")",
"else",
":",
"timestamp",
"=",
"None",
"# Create handle for database object",
"return",
"ModelHandle",
"(",
"document",
"[",
"'_id'",
"]",
",",
"document",
"[",
"'properties'",
"]",
",",
"[",
"AttributeDefinition",
".",
"from_json",
"(",
"el",
")",
"for",
"el",
"in",
"document",
"[",
"'parameters'",
"]",
"]",
",",
"ModelOutputs",
".",
"from_json",
"(",
"document",
"[",
"'outputs'",
"]",
")",
",",
"timestamp",
"=",
"timestamp",
")"
] |
Create a model database object from a given Json document.
Parameters
----------
document : JSON
Json representation of the object
Returns
ModelHandle
|
[
"Create",
"a",
"model",
"database",
"object",
"from",
"a",
"given",
"Json",
"document",
"."
] |
0584322a50ae16d5c32c224e8bca516bd363371f
|
https://github.com/heikomuller/sco-models/blob/0584322a50ae16d5c32c224e8bca516bd363371f/scomodels/__init__.py#L225-L252
|
245,905
|
heikomuller/sco-models
|
scomodels/__init__.py
|
DefaultModelRegistry.list_models
|
def list_models(self, limit=-1, offset=-1):
"""List models in the database. Takes optional parameters limit and
offset for pagination.
Parameters
----------
limit : int
Limit number of models in the result set
offset : int
Set offset in list (order as defined by object store)
Returns
-------
ObjectListing
"""
return self.list_objects(limit=limit, offset=offset)
|
python
|
def list_models(self, limit=-1, offset=-1):
"""List models in the database. Takes optional parameters limit and
offset for pagination.
Parameters
----------
limit : int
Limit number of models in the result set
offset : int
Set offset in list (order as defined by object store)
Returns
-------
ObjectListing
"""
return self.list_objects(limit=limit, offset=offset)
|
[
"def",
"list_models",
"(",
"self",
",",
"limit",
"=",
"-",
"1",
",",
"offset",
"=",
"-",
"1",
")",
":",
"return",
"self",
".",
"list_objects",
"(",
"limit",
"=",
"limit",
",",
"offset",
"=",
"offset",
")"
] |
List models in the database. Takes optional parameters limit and
offset for pagination.
Parameters
----------
limit : int
Limit number of models in the result set
offset : int
Set offset in list (order as defined by object store)
Returns
-------
ObjectListing
|
[
"List",
"models",
"in",
"the",
"database",
".",
"Takes",
"optional",
"parameters",
"limit",
"and",
"offset",
"for",
"pagination",
"."
] |
0584322a50ae16d5c32c224e8bca516bd363371f
|
https://github.com/heikomuller/sco-models/blob/0584322a50ae16d5c32c224e8bca516bd363371f/scomodels/__init__.py#L270-L285
|
245,906
|
heikomuller/sco-models
|
scomodels/__init__.py
|
DefaultModelRegistry.to_json
|
def to_json(self, model):
"""Create a Json-like object for a model.
Parameters
----------
model : ModelHandle
Returns
-------
dict
Json-like object representation
"""
# Get the basic Json object from the super class
obj = super(DefaultModelRegistry, self).to_json(model)
# Add model parameter
obj['parameters'] = [
para.to_json() for para in model.parameters
]
obj['outputs'] = model.outputs.to_json()
return obj
|
python
|
def to_json(self, model):
"""Create a Json-like object for a model.
Parameters
----------
model : ModelHandle
Returns
-------
dict
Json-like object representation
"""
# Get the basic Json object from the super class
obj = super(DefaultModelRegistry, self).to_json(model)
# Add model parameter
obj['parameters'] = [
para.to_json() for para in model.parameters
]
obj['outputs'] = model.outputs.to_json()
return obj
|
[
"def",
"to_json",
"(",
"self",
",",
"model",
")",
":",
"# Get the basic Json object from the super class",
"obj",
"=",
"super",
"(",
"DefaultModelRegistry",
",",
"self",
")",
".",
"to_json",
"(",
"model",
")",
"# Add model parameter",
"obj",
"[",
"'parameters'",
"]",
"=",
"[",
"para",
".",
"to_json",
"(",
")",
"for",
"para",
"in",
"model",
".",
"parameters",
"]",
"obj",
"[",
"'outputs'",
"]",
"=",
"model",
".",
"outputs",
".",
"to_json",
"(",
")",
"return",
"obj"
] |
Create a Json-like object for a model.
Parameters
----------
model : ModelHandle
Returns
-------
dict
Json-like object representation
|
[
"Create",
"a",
"Json",
"-",
"like",
"object",
"for",
"a",
"model",
"."
] |
0584322a50ae16d5c32c224e8bca516bd363371f
|
https://github.com/heikomuller/sco-models/blob/0584322a50ae16d5c32c224e8bca516bd363371f/scomodels/__init__.py#L315-L334
|
245,907
|
knagra/farnsworth
|
managers/views.py
|
end_anonymous_session_view
|
def end_anonymous_session_view(request):
''' End the anonymous session if the user is a superuser. '''
request.session['ANONYMOUS_SESSION'] = False
messages.add_message(request, messages.INFO, MESSAGES['ANONYMOUS_SESSION_ENDED'])
return HttpResponseRedirect(reverse('utilities'))
|
python
|
def end_anonymous_session_view(request):
''' End the anonymous session if the user is a superuser. '''
request.session['ANONYMOUS_SESSION'] = False
messages.add_message(request, messages.INFO, MESSAGES['ANONYMOUS_SESSION_ENDED'])
return HttpResponseRedirect(reverse('utilities'))
|
[
"def",
"end_anonymous_session_view",
"(",
"request",
")",
":",
"request",
".",
"session",
"[",
"'ANONYMOUS_SESSION'",
"]",
"=",
"False",
"messages",
".",
"add_message",
"(",
"request",
",",
"messages",
".",
"INFO",
",",
"MESSAGES",
"[",
"'ANONYMOUS_SESSION_ENDED'",
"]",
")",
"return",
"HttpResponseRedirect",
"(",
"reverse",
"(",
"'utilities'",
")",
")"
] |
End the anonymous session if the user is a superuser.
|
[
"End",
"the",
"anonymous",
"session",
"if",
"the",
"user",
"is",
"a",
"superuser",
"."
] |
1b6589f0d9fea154f0a1e2231ed906764ed26d26
|
https://github.com/knagra/farnsworth/blob/1b6589f0d9fea154f0a1e2231ed906764ed26d26/managers/views.py#L92-L96
|
245,908
|
knagra/farnsworth
|
managers/views.py
|
list_managers_view
|
def list_managers_view(request):
''' Show a list of manager positions with links to view in detail. '''
managerset = Manager.objects.filter(active=True)
return render_to_response('list_managers.html', {
'page_name': "Managers",
'managerset': managerset,
}, context_instance=RequestContext(request))
|
python
|
def list_managers_view(request):
''' Show a list of manager positions with links to view in detail. '''
managerset = Manager.objects.filter(active=True)
return render_to_response('list_managers.html', {
'page_name': "Managers",
'managerset': managerset,
}, context_instance=RequestContext(request))
|
[
"def",
"list_managers_view",
"(",
"request",
")",
":",
"managerset",
"=",
"Manager",
".",
"objects",
".",
"filter",
"(",
"active",
"=",
"True",
")",
"return",
"render_to_response",
"(",
"'list_managers.html'",
",",
"{",
"'page_name'",
":",
"\"Managers\"",
",",
"'managerset'",
":",
"managerset",
",",
"}",
",",
"context_instance",
"=",
"RequestContext",
"(",
"request",
")",
")"
] |
Show a list of manager positions with links to view in detail.
|
[
"Show",
"a",
"list",
"of",
"manager",
"positions",
"with",
"links",
"to",
"view",
"in",
"detail",
"."
] |
1b6589f0d9fea154f0a1e2231ed906764ed26d26
|
https://github.com/knagra/farnsworth/blob/1b6589f0d9fea154f0a1e2231ed906764ed26d26/managers/views.py#L99-L105
|
245,909
|
knagra/farnsworth
|
managers/views.py
|
meta_manager_view
|
def meta_manager_view(request):
'''
A manager of managers. Display a list of current managers, with links to modify them.
Also display a link to add a new manager. Restricted to presidents and superadmins.
'''
managers = Manager.objects.all()
return render_to_response('meta_manager.html', {
'page_name': "Admin - Meta-Manager",
'managerset': managers,
}, context_instance=RequestContext(request))
|
python
|
def meta_manager_view(request):
'''
A manager of managers. Display a list of current managers, with links to modify them.
Also display a link to add a new manager. Restricted to presidents and superadmins.
'''
managers = Manager.objects.all()
return render_to_response('meta_manager.html', {
'page_name': "Admin - Meta-Manager",
'managerset': managers,
}, context_instance=RequestContext(request))
|
[
"def",
"meta_manager_view",
"(",
"request",
")",
":",
"managers",
"=",
"Manager",
".",
"objects",
".",
"all",
"(",
")",
"return",
"render_to_response",
"(",
"'meta_manager.html'",
",",
"{",
"'page_name'",
":",
"\"Admin - Meta-Manager\"",
",",
"'managerset'",
":",
"managers",
",",
"}",
",",
"context_instance",
"=",
"RequestContext",
"(",
"request",
")",
")"
] |
A manager of managers. Display a list of current managers, with links to modify them.
Also display a link to add a new manager. Restricted to presidents and superadmins.
|
[
"A",
"manager",
"of",
"managers",
".",
"Display",
"a",
"list",
"of",
"current",
"managers",
"with",
"links",
"to",
"modify",
"them",
".",
"Also",
"display",
"a",
"link",
"to",
"add",
"a",
"new",
"manager",
".",
"Restricted",
"to",
"presidents",
"and",
"superadmins",
"."
] |
1b6589f0d9fea154f0a1e2231ed906764ed26d26
|
https://github.com/knagra/farnsworth/blob/1b6589f0d9fea154f0a1e2231ed906764ed26d26/managers/views.py#L126-L135
|
245,910
|
knagra/farnsworth
|
managers/views.py
|
add_manager_view
|
def add_manager_view(request):
''' View to add a new manager position. Restricted to superadmins and presidents. '''
form = ManagerForm(request.POST or None)
if form.is_valid():
manager = form.save()
messages.add_message(request, messages.SUCCESS,
MESSAGES['MANAGER_ADDED'].format(managerTitle=manager.title))
return HttpResponseRedirect(reverse('managers:add_manager'))
return render_to_response('edit_manager.html', {
'page_name': "Admin - Add Manager",
'managerset': Manager.objects.all(),
'form': form,
}, context_instance=RequestContext(request))
|
python
|
def add_manager_view(request):
''' View to add a new manager position. Restricted to superadmins and presidents. '''
form = ManagerForm(request.POST or None)
if form.is_valid():
manager = form.save()
messages.add_message(request, messages.SUCCESS,
MESSAGES['MANAGER_ADDED'].format(managerTitle=manager.title))
return HttpResponseRedirect(reverse('managers:add_manager'))
return render_to_response('edit_manager.html', {
'page_name': "Admin - Add Manager",
'managerset': Manager.objects.all(),
'form': form,
}, context_instance=RequestContext(request))
|
[
"def",
"add_manager_view",
"(",
"request",
")",
":",
"form",
"=",
"ManagerForm",
"(",
"request",
".",
"POST",
"or",
"None",
")",
"if",
"form",
".",
"is_valid",
"(",
")",
":",
"manager",
"=",
"form",
".",
"save",
"(",
")",
"messages",
".",
"add_message",
"(",
"request",
",",
"messages",
".",
"SUCCESS",
",",
"MESSAGES",
"[",
"'MANAGER_ADDED'",
"]",
".",
"format",
"(",
"managerTitle",
"=",
"manager",
".",
"title",
")",
")",
"return",
"HttpResponseRedirect",
"(",
"reverse",
"(",
"'managers:add_manager'",
")",
")",
"return",
"render_to_response",
"(",
"'edit_manager.html'",
",",
"{",
"'page_name'",
":",
"\"Admin - Add Manager\"",
",",
"'managerset'",
":",
"Manager",
".",
"objects",
".",
"all",
"(",
")",
",",
"'form'",
":",
"form",
",",
"}",
",",
"context_instance",
"=",
"RequestContext",
"(",
"request",
")",
")"
] |
View to add a new manager position. Restricted to superadmins and presidents.
|
[
"View",
"to",
"add",
"a",
"new",
"manager",
"position",
".",
"Restricted",
"to",
"superadmins",
"and",
"presidents",
"."
] |
1b6589f0d9fea154f0a1e2231ed906764ed26d26
|
https://github.com/knagra/farnsworth/blob/1b6589f0d9fea154f0a1e2231ed906764ed26d26/managers/views.py#L138-L150
|
245,911
|
knagra/farnsworth
|
managers/views.py
|
manage_request_types_view
|
def manage_request_types_view(request):
''' Manage requests. Display a list of request types with links to edit them.
Also display a link to add a new request type. Restricted to presidents and superadmins.
'''
request_types = RequestType.objects.all()
return render_to_response('manage_request_types.html', {
'page_name': "Admin - Manage Request Types",
'request_types': request_types
}, context_instance=RequestContext(request))
|
python
|
def manage_request_types_view(request):
''' Manage requests. Display a list of request types with links to edit them.
Also display a link to add a new request type. Restricted to presidents and superadmins.
'''
request_types = RequestType.objects.all()
return render_to_response('manage_request_types.html', {
'page_name': "Admin - Manage Request Types",
'request_types': request_types
}, context_instance=RequestContext(request))
|
[
"def",
"manage_request_types_view",
"(",
"request",
")",
":",
"request_types",
"=",
"RequestType",
".",
"objects",
".",
"all",
"(",
")",
"return",
"render_to_response",
"(",
"'manage_request_types.html'",
",",
"{",
"'page_name'",
":",
"\"Admin - Manage Request Types\"",
",",
"'request_types'",
":",
"request_types",
"}",
",",
"context_instance",
"=",
"RequestContext",
"(",
"request",
")",
")"
] |
Manage requests. Display a list of request types with links to edit them.
Also display a link to add a new request type. Restricted to presidents and superadmins.
|
[
"Manage",
"requests",
".",
"Display",
"a",
"list",
"of",
"request",
"types",
"with",
"links",
"to",
"edit",
"them",
".",
"Also",
"display",
"a",
"link",
"to",
"add",
"a",
"new",
"request",
"type",
".",
"Restricted",
"to",
"presidents",
"and",
"superadmins",
"."
] |
1b6589f0d9fea154f0a1e2231ed906764ed26d26
|
https://github.com/knagra/farnsworth/blob/1b6589f0d9fea154f0a1e2231ed906764ed26d26/managers/views.py#L177-L185
|
245,912
|
knagra/farnsworth
|
managers/views.py
|
add_request_type_view
|
def add_request_type_view(request):
''' View to add a new request type. Restricted to presidents and superadmins. '''
form = RequestTypeForm(request.POST or None)
if form.is_valid():
rtype = form.save()
messages.add_message(request, messages.SUCCESS,
MESSAGES['REQUEST_TYPE_ADDED'].format(typeName=rtype.name))
return HttpResponseRedirect(reverse('managers:manage_request_types'))
return render_to_response('edit_request_type.html', {
'page_name': "Admin - Add Request Type",
'request_types': RequestType.objects.all(),
'form': form,
}, context_instance=RequestContext(request))
|
python
|
def add_request_type_view(request):
''' View to add a new request type. Restricted to presidents and superadmins. '''
form = RequestTypeForm(request.POST or None)
if form.is_valid():
rtype = form.save()
messages.add_message(request, messages.SUCCESS,
MESSAGES['REQUEST_TYPE_ADDED'].format(typeName=rtype.name))
return HttpResponseRedirect(reverse('managers:manage_request_types'))
return render_to_response('edit_request_type.html', {
'page_name': "Admin - Add Request Type",
'request_types': RequestType.objects.all(),
'form': form,
}, context_instance=RequestContext(request))
|
[
"def",
"add_request_type_view",
"(",
"request",
")",
":",
"form",
"=",
"RequestTypeForm",
"(",
"request",
".",
"POST",
"or",
"None",
")",
"if",
"form",
".",
"is_valid",
"(",
")",
":",
"rtype",
"=",
"form",
".",
"save",
"(",
")",
"messages",
".",
"add_message",
"(",
"request",
",",
"messages",
".",
"SUCCESS",
",",
"MESSAGES",
"[",
"'REQUEST_TYPE_ADDED'",
"]",
".",
"format",
"(",
"typeName",
"=",
"rtype",
".",
"name",
")",
")",
"return",
"HttpResponseRedirect",
"(",
"reverse",
"(",
"'managers:manage_request_types'",
")",
")",
"return",
"render_to_response",
"(",
"'edit_request_type.html'",
",",
"{",
"'page_name'",
":",
"\"Admin - Add Request Type\"",
",",
"'request_types'",
":",
"RequestType",
".",
"objects",
".",
"all",
"(",
")",
",",
"'form'",
":",
"form",
",",
"}",
",",
"context_instance",
"=",
"RequestContext",
"(",
"request",
")",
")"
] |
View to add a new request type. Restricted to presidents and superadmins.
|
[
"View",
"to",
"add",
"a",
"new",
"request",
"type",
".",
"Restricted",
"to",
"presidents",
"and",
"superadmins",
"."
] |
1b6589f0d9fea154f0a1e2231ed906764ed26d26
|
https://github.com/knagra/farnsworth/blob/1b6589f0d9fea154f0a1e2231ed906764ed26d26/managers/views.py#L188-L200
|
245,913
|
knagra/farnsworth
|
managers/views.py
|
all_requests_view
|
def all_requests_view(request):
'''
Show user a list of enabled request types, the number of requests of each
type and a link to see them all.
'''
# Pseudo-dictionary, actually a list with items of form
# (request_type.name.title(), number_of_type_requests, name, enabled,
# glyphicon)
types_dict = list()
for request_type in RequestType.objects.all():
requests = Request.objects.filter(request_type=request_type)
# Hide the count for private requests
if not request_type.managers.filter(incumbent__user=request.user):
requests = requests.exclude(
~Q(owner__user=request.user), private=True,
)
number_of_requests = requests.count()
types_dict.append((
request_type.name.title(), number_of_requests,
request_type.url_name, request_type.enabled,
request_type.glyphicon,
))
return render_to_response('all_requests.html', {
'page_name': "Archives - All Requests",
'types_dict': types_dict,
}, context_instance=RequestContext(request))
|
python
|
def all_requests_view(request):
'''
Show user a list of enabled request types, the number of requests of each
type and a link to see them all.
'''
# Pseudo-dictionary, actually a list with items of form
# (request_type.name.title(), number_of_type_requests, name, enabled,
# glyphicon)
types_dict = list()
for request_type in RequestType.objects.all():
requests = Request.objects.filter(request_type=request_type)
# Hide the count for private requests
if not request_type.managers.filter(incumbent__user=request.user):
requests = requests.exclude(
~Q(owner__user=request.user), private=True,
)
number_of_requests = requests.count()
types_dict.append((
request_type.name.title(), number_of_requests,
request_type.url_name, request_type.enabled,
request_type.glyphicon,
))
return render_to_response('all_requests.html', {
'page_name': "Archives - All Requests",
'types_dict': types_dict,
}, context_instance=RequestContext(request))
|
[
"def",
"all_requests_view",
"(",
"request",
")",
":",
"# Pseudo-dictionary, actually a list with items of form",
"# (request_type.name.title(), number_of_type_requests, name, enabled,",
"# glyphicon)",
"types_dict",
"=",
"list",
"(",
")",
"for",
"request_type",
"in",
"RequestType",
".",
"objects",
".",
"all",
"(",
")",
":",
"requests",
"=",
"Request",
".",
"objects",
".",
"filter",
"(",
"request_type",
"=",
"request_type",
")",
"# Hide the count for private requests",
"if",
"not",
"request_type",
".",
"managers",
".",
"filter",
"(",
"incumbent__user",
"=",
"request",
".",
"user",
")",
":",
"requests",
"=",
"requests",
".",
"exclude",
"(",
"~",
"Q",
"(",
"owner__user",
"=",
"request",
".",
"user",
")",
",",
"private",
"=",
"True",
",",
")",
"number_of_requests",
"=",
"requests",
".",
"count",
"(",
")",
"types_dict",
".",
"append",
"(",
"(",
"request_type",
".",
"name",
".",
"title",
"(",
")",
",",
"number_of_requests",
",",
"request_type",
".",
"url_name",
",",
"request_type",
".",
"enabled",
",",
"request_type",
".",
"glyphicon",
",",
")",
")",
"return",
"render_to_response",
"(",
"'all_requests.html'",
",",
"{",
"'page_name'",
":",
"\"Archives - All Requests\"",
",",
"'types_dict'",
":",
"types_dict",
",",
"}",
",",
"context_instance",
"=",
"RequestContext",
"(",
"request",
")",
")"
] |
Show user a list of enabled request types, the number of requests of each
type and a link to see them all.
|
[
"Show",
"user",
"a",
"list",
"of",
"enabled",
"request",
"types",
"the",
"number",
"of",
"requests",
"of",
"each",
"type",
"and",
"a",
"link",
"to",
"see",
"them",
"all",
"."
] |
1b6589f0d9fea154f0a1e2231ed906764ed26d26
|
https://github.com/knagra/farnsworth/blob/1b6589f0d9fea154f0a1e2231ed906764ed26d26/managers/views.py#L409-L435
|
245,914
|
knagra/farnsworth
|
managers/views.py
|
list_all_requests_view
|
def list_all_requests_view(request, requestType):
'''
Show all the requests for a given type in list form.
'''
request_type = get_object_or_404(RequestType, url_name=requestType)
requests = Request.objects.filter(request_type=request_type)
# Hide the count for private requests
if not request_type.managers.filter(incumbent__user=request.user):
requests = requests.exclude(
~Q(owner__user=request.user), private=True,
)
page_name = "Archives - All {0} Requests".format(request_type.name.title())
return render_to_response('list_requests.html', {
'page_name': page_name,
'requests': requests,
'request_type': request_type,
}, context_instance=RequestContext(request))
|
python
|
def list_all_requests_view(request, requestType):
'''
Show all the requests for a given type in list form.
'''
request_type = get_object_or_404(RequestType, url_name=requestType)
requests = Request.objects.filter(request_type=request_type)
# Hide the count for private requests
if not request_type.managers.filter(incumbent__user=request.user):
requests = requests.exclude(
~Q(owner__user=request.user), private=True,
)
page_name = "Archives - All {0} Requests".format(request_type.name.title())
return render_to_response('list_requests.html', {
'page_name': page_name,
'requests': requests,
'request_type': request_type,
}, context_instance=RequestContext(request))
|
[
"def",
"list_all_requests_view",
"(",
"request",
",",
"requestType",
")",
":",
"request_type",
"=",
"get_object_or_404",
"(",
"RequestType",
",",
"url_name",
"=",
"requestType",
")",
"requests",
"=",
"Request",
".",
"objects",
".",
"filter",
"(",
"request_type",
"=",
"request_type",
")",
"# Hide the count for private requests",
"if",
"not",
"request_type",
".",
"managers",
".",
"filter",
"(",
"incumbent__user",
"=",
"request",
".",
"user",
")",
":",
"requests",
"=",
"requests",
".",
"exclude",
"(",
"~",
"Q",
"(",
"owner__user",
"=",
"request",
".",
"user",
")",
",",
"private",
"=",
"True",
",",
")",
"page_name",
"=",
"\"Archives - All {0} Requests\"",
".",
"format",
"(",
"request_type",
".",
"name",
".",
"title",
"(",
")",
")",
"return",
"render_to_response",
"(",
"'list_requests.html'",
",",
"{",
"'page_name'",
":",
"page_name",
",",
"'requests'",
":",
"requests",
",",
"'request_type'",
":",
"request_type",
",",
"}",
",",
"context_instance",
"=",
"RequestContext",
"(",
"request",
")",
")"
] |
Show all the requests for a given type in list form.
|
[
"Show",
"all",
"the",
"requests",
"for",
"a",
"given",
"type",
"in",
"list",
"form",
"."
] |
1b6589f0d9fea154f0a1e2231ed906764ed26d26
|
https://github.com/knagra/farnsworth/blob/1b6589f0d9fea154f0a1e2231ed906764ed26d26/managers/views.py#L438-L455
|
245,915
|
jmgilman/Neolib
|
neolib/http/browser/ChromeCookies.py
|
ChromeCookies.installed
|
def installed():
""" Returns whether or not Google Chrome is installed
Determines the application data path for Google Chrome
and checks if the path exists. If so, returns True, otherwise
it will return False.
Returns
bool - True if Chrome is installed
"""
try:
path = ChromeCookies._getPath()
with open(path) as f: pass
return True
except Exception as e:
return False
|
python
|
def installed():
""" Returns whether or not Google Chrome is installed
Determines the application data path for Google Chrome
and checks if the path exists. If so, returns True, otherwise
it will return False.
Returns
bool - True if Chrome is installed
"""
try:
path = ChromeCookies._getPath()
with open(path) as f: pass
return True
except Exception as e:
return False
|
[
"def",
"installed",
"(",
")",
":",
"try",
":",
"path",
"=",
"ChromeCookies",
".",
"_getPath",
"(",
")",
"with",
"open",
"(",
"path",
")",
"as",
"f",
":",
"pass",
"return",
"True",
"except",
"Exception",
"as",
"e",
":",
"return",
"False"
] |
Returns whether or not Google Chrome is installed
Determines the application data path for Google Chrome
and checks if the path exists. If so, returns True, otherwise
it will return False.
Returns
bool - True if Chrome is installed
|
[
"Returns",
"whether",
"or",
"not",
"Google",
"Chrome",
"is",
"installed",
"Determines",
"the",
"application",
"data",
"path",
"for",
"Google",
"Chrome",
"and",
"checks",
"if",
"the",
"path",
"exists",
".",
"If",
"so",
"returns",
"True",
"otherwise",
"it",
"will",
"return",
"False",
".",
"Returns",
"bool",
"-",
"True",
"if",
"Chrome",
"is",
"installed"
] |
228fafeaed0f3195676137732384a14820ae285c
|
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/http/browser/ChromeCookies.py#L34-L50
|
245,916
|
jmgilman/Neolib
|
neolib/http/browser/ChromeCookies.py
|
ChromeCookies._getPath
|
def _getPath():
""" Returns Chrome's cookie database path
Returns
str - Google Chrome's cookie database path
"""
if os.name == "posix":
path = os.getenv("HOME") + "/.config/google-chrome/Default/Cookies"
return path
import _winreg
key = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, 'Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders')
path = _winreg.QueryValueEx(key, 'Local AppData')[0]
path = os.path.join(path, 'Google\\Chrome\\User Data\\Default\\Cookies')
return path
|
python
|
def _getPath():
""" Returns Chrome's cookie database path
Returns
str - Google Chrome's cookie database path
"""
if os.name == "posix":
path = os.getenv("HOME") + "/.config/google-chrome/Default/Cookies"
return path
import _winreg
key = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, 'Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders')
path = _winreg.QueryValueEx(key, 'Local AppData')[0]
path = os.path.join(path, 'Google\\Chrome\\User Data\\Default\\Cookies')
return path
|
[
"def",
"_getPath",
"(",
")",
":",
"if",
"os",
".",
"name",
"==",
"\"posix\"",
":",
"path",
"=",
"os",
".",
"getenv",
"(",
"\"HOME\"",
")",
"+",
"\"/.config/google-chrome/Default/Cookies\"",
"return",
"path",
"import",
"_winreg",
"key",
"=",
"_winreg",
".",
"OpenKey",
"(",
"_winreg",
".",
"HKEY_CURRENT_USER",
",",
"'Software\\\\Microsoft\\\\Windows\\\\CurrentVersion\\\\Explorer\\\\Shell Folders'",
")",
"path",
"=",
"_winreg",
".",
"QueryValueEx",
"(",
"key",
",",
"'Local AppData'",
")",
"[",
"0",
"]",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"'Google\\\\Chrome\\\\User Data\\\\Default\\\\Cookies'",
")",
"return",
"path"
] |
Returns Chrome's cookie database path
Returns
str - Google Chrome's cookie database path
|
[
"Returns",
"Chrome",
"s",
"cookie",
"database",
"path",
"Returns",
"str",
"-",
"Google",
"Chrome",
"s",
"cookie",
"database",
"path"
] |
228fafeaed0f3195676137732384a14820ae285c
|
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/http/browser/ChromeCookies.py#L115-L131
|
245,917
|
ramrod-project/database-brain
|
schema/brain/decorators.py
|
deprecated_function
|
def deprecated_function(func_, replacement="(see docs)",
*args, **kwargs):
"""
decorator to annotate deprecated functions
usage @decorator(replacement="brain.whatever.new_function")
:param func_: <callable>
:param replacement: <str>
:param args: positional arguments
:param kwargs:
:return: <func_'s return value>
"""
msg = "{} is deprecated, use {}\n".format(func_.__name__,
replacement)
stderr.write(msg)
return func_(*args, **kwargs)
|
python
|
def deprecated_function(func_, replacement="(see docs)",
*args, **kwargs):
"""
decorator to annotate deprecated functions
usage @decorator(replacement="brain.whatever.new_function")
:param func_: <callable>
:param replacement: <str>
:param args: positional arguments
:param kwargs:
:return: <func_'s return value>
"""
msg = "{} is deprecated, use {}\n".format(func_.__name__,
replacement)
stderr.write(msg)
return func_(*args, **kwargs)
|
[
"def",
"deprecated_function",
"(",
"func_",
",",
"replacement",
"=",
"\"(see docs)\"",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"msg",
"=",
"\"{} is deprecated, use {}\\n\"",
".",
"format",
"(",
"func_",
".",
"__name__",
",",
"replacement",
")",
"stderr",
".",
"write",
"(",
"msg",
")",
"return",
"func_",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
decorator to annotate deprecated functions
usage @decorator(replacement="brain.whatever.new_function")
:param func_: <callable>
:param replacement: <str>
:param args: positional arguments
:param kwargs:
:return: <func_'s return value>
|
[
"decorator",
"to",
"annotate",
"deprecated",
"functions"
] |
b024cb44f34cabb9d80af38271ddb65c25767083
|
https://github.com/ramrod-project/database-brain/blob/b024cb44f34cabb9d80af38271ddb65c25767083/schema/brain/decorators.py#L45-L61
|
245,918
|
etscrivner/nose-perfdump
|
perfdump/html.py
|
HtmlReport.write
|
def write(cls, html_file):
"""Writes the HTML report to the given file."""
f = open(html_file, 'w')
f.write('<html>')
f.write('<head>')
f.write('</head>')
f.write('<body>')
f.write('<h1>Test times</h1>')
fmt_test = '<tr><td>{:.05f}</td><td>{}</td></tr><tr><td> </td><td>{}</td></tr>'
f.write('<table>')
f.write('<tr><th>Time</th><th>Test info</th></tr>')
for row in TestTime.get_slowest_tests(10):
f.write(fmt_test.format(row['elapsed'], row['file'], '{}.{}.{}'.format(row['module'], row['class'], row['func'])))
f.write('</table>')
fmt_file = '<tr><td>{:.05f}</td><td>{}</td></tr>'
f.write('<table>')
f.write('<tr><th>Time</th><th>Test info</th></tr>')
for row in TestTime.get_slowest_files(10):
f.write(fmt_file.format(row['sum_elapsed'], row['file']))
f.write('</table>')
f.write('<h1>Setup times</h1>')
f.write('<table>')
f.write('<tr><th>Time</th><th>Test info</th></tr>')
for row in SetupTime.get_slowest_tests(10):
f.write(fmt_test.format(row['elapsed'], row['file'], '{}.{}.{}'.format(row['module'], row['class'], row['func'])))
f.write('</table>')
f.write('<table>')
f.write('<tr><th>Time</th><th>Test info</th></tr>')
for row in SetupTime.get_slowest_files(10):
f.write(fmt_file.format(row['sum_elapsed'], row['file']))
f.write('</table>')
f.write('</body>')
f.write('</html>')
f.close()
|
python
|
def write(cls, html_file):
"""Writes the HTML report to the given file."""
f = open(html_file, 'w')
f.write('<html>')
f.write('<head>')
f.write('</head>')
f.write('<body>')
f.write('<h1>Test times</h1>')
fmt_test = '<tr><td>{:.05f}</td><td>{}</td></tr><tr><td> </td><td>{}</td></tr>'
f.write('<table>')
f.write('<tr><th>Time</th><th>Test info</th></tr>')
for row in TestTime.get_slowest_tests(10):
f.write(fmt_test.format(row['elapsed'], row['file'], '{}.{}.{}'.format(row['module'], row['class'], row['func'])))
f.write('</table>')
fmt_file = '<tr><td>{:.05f}</td><td>{}</td></tr>'
f.write('<table>')
f.write('<tr><th>Time</th><th>Test info</th></tr>')
for row in TestTime.get_slowest_files(10):
f.write(fmt_file.format(row['sum_elapsed'], row['file']))
f.write('</table>')
f.write('<h1>Setup times</h1>')
f.write('<table>')
f.write('<tr><th>Time</th><th>Test info</th></tr>')
for row in SetupTime.get_slowest_tests(10):
f.write(fmt_test.format(row['elapsed'], row['file'], '{}.{}.{}'.format(row['module'], row['class'], row['func'])))
f.write('</table>')
f.write('<table>')
f.write('<tr><th>Time</th><th>Test info</th></tr>')
for row in SetupTime.get_slowest_files(10):
f.write(fmt_file.format(row['sum_elapsed'], row['file']))
f.write('</table>')
f.write('</body>')
f.write('</html>')
f.close()
|
[
"def",
"write",
"(",
"cls",
",",
"html_file",
")",
":",
"f",
"=",
"open",
"(",
"html_file",
",",
"'w'",
")",
"f",
".",
"write",
"(",
"'<html>'",
")",
"f",
".",
"write",
"(",
"'<head>'",
")",
"f",
".",
"write",
"(",
"'</head>'",
")",
"f",
".",
"write",
"(",
"'<body>'",
")",
"f",
".",
"write",
"(",
"'<h1>Test times</h1>'",
")",
"fmt_test",
"=",
"'<tr><td>{:.05f}</td><td>{}</td></tr><tr><td> </td><td>{}</td></tr>'",
"f",
".",
"write",
"(",
"'<table>'",
")",
"f",
".",
"write",
"(",
"'<tr><th>Time</th><th>Test info</th></tr>'",
")",
"for",
"row",
"in",
"TestTime",
".",
"get_slowest_tests",
"(",
"10",
")",
":",
"f",
".",
"write",
"(",
"fmt_test",
".",
"format",
"(",
"row",
"[",
"'elapsed'",
"]",
",",
"row",
"[",
"'file'",
"]",
",",
"'{}.{}.{}'",
".",
"format",
"(",
"row",
"[",
"'module'",
"]",
",",
"row",
"[",
"'class'",
"]",
",",
"row",
"[",
"'func'",
"]",
")",
")",
")",
"f",
".",
"write",
"(",
"'</table>'",
")",
"fmt_file",
"=",
"'<tr><td>{:.05f}</td><td>{}</td></tr>'",
"f",
".",
"write",
"(",
"'<table>'",
")",
"f",
".",
"write",
"(",
"'<tr><th>Time</th><th>Test info</th></tr>'",
")",
"for",
"row",
"in",
"TestTime",
".",
"get_slowest_files",
"(",
"10",
")",
":",
"f",
".",
"write",
"(",
"fmt_file",
".",
"format",
"(",
"row",
"[",
"'sum_elapsed'",
"]",
",",
"row",
"[",
"'file'",
"]",
")",
")",
"f",
".",
"write",
"(",
"'</table>'",
")",
"f",
".",
"write",
"(",
"'<h1>Setup times</h1>'",
")",
"f",
".",
"write",
"(",
"'<table>'",
")",
"f",
".",
"write",
"(",
"'<tr><th>Time</th><th>Test info</th></tr>'",
")",
"for",
"row",
"in",
"SetupTime",
".",
"get_slowest_tests",
"(",
"10",
")",
":",
"f",
".",
"write",
"(",
"fmt_test",
".",
"format",
"(",
"row",
"[",
"'elapsed'",
"]",
",",
"row",
"[",
"'file'",
"]",
",",
"'{}.{}.{}'",
".",
"format",
"(",
"row",
"[",
"'module'",
"]",
",",
"row",
"[",
"'class'",
"]",
",",
"row",
"[",
"'func'",
"]",
")",
")",
")",
"f",
".",
"write",
"(",
"'</table>'",
")",
"f",
".",
"write",
"(",
"'<table>'",
")",
"f",
".",
"write",
"(",
"'<tr><th>Time</th><th>Test info</th></tr>'",
")",
"for",
"row",
"in",
"SetupTime",
".",
"get_slowest_files",
"(",
"10",
")",
":",
"f",
".",
"write",
"(",
"fmt_file",
".",
"format",
"(",
"row",
"[",
"'sum_elapsed'",
"]",
",",
"row",
"[",
"'file'",
"]",
")",
")",
"f",
".",
"write",
"(",
"'</table>'",
")",
"f",
".",
"write",
"(",
"'</body>'",
")",
"f",
".",
"write",
"(",
"'</html>'",
")",
"f",
".",
"close",
"(",
")"
] |
Writes the HTML report to the given file.
|
[
"Writes",
"the",
"HTML",
"report",
"to",
"the",
"given",
"file",
"."
] |
a203a68495d30346fab43fb903cb60cd29b17d49
|
https://github.com/etscrivner/nose-perfdump/blob/a203a68495d30346fab43fb903cb60cd29b17d49/perfdump/html.py#L36-L81
|
245,919
|
dmckeone/frosty
|
frosty/includes.py
|
build_includes
|
def build_includes(include_packages, freezer=None, optional=None):
"""
Iterate the list of packages to build a complete list of those packages as well as all subpackages.
:param include_packages: list of package names
:type: include_pacakges: list of basestr
:param freezer: The freezer to use (See FREEZER constants)
:param optional: Optional pacakge names to include (will only issue a warning if they don't exist)
:return: complete set of package includes
"""
freezer = resolve_freezer(freezer)
# Import (or get reference to) all listed packages to ensure that they exist.
package_references = _import_packages(include_packages, optional=optional)
# Find all includes for the given freezer type
includes = freezer.build_includes(package_references)
return includes
|
python
|
def build_includes(include_packages, freezer=None, optional=None):
"""
Iterate the list of packages to build a complete list of those packages as well as all subpackages.
:param include_packages: list of package names
:type: include_pacakges: list of basestr
:param freezer: The freezer to use (See FREEZER constants)
:param optional: Optional pacakge names to include (will only issue a warning if they don't exist)
:return: complete set of package includes
"""
freezer = resolve_freezer(freezer)
# Import (or get reference to) all listed packages to ensure that they exist.
package_references = _import_packages(include_packages, optional=optional)
# Find all includes for the given freezer type
includes = freezer.build_includes(package_references)
return includes
|
[
"def",
"build_includes",
"(",
"include_packages",
",",
"freezer",
"=",
"None",
",",
"optional",
"=",
"None",
")",
":",
"freezer",
"=",
"resolve_freezer",
"(",
"freezer",
")",
"# Import (or get reference to) all listed packages to ensure that they exist.",
"package_references",
"=",
"_import_packages",
"(",
"include_packages",
",",
"optional",
"=",
"optional",
")",
"# Find all includes for the given freezer type",
"includes",
"=",
"freezer",
".",
"build_includes",
"(",
"package_references",
")",
"return",
"includes"
] |
Iterate the list of packages to build a complete list of those packages as well as all subpackages.
:param include_packages: list of package names
:type: include_pacakges: list of basestr
:param freezer: The freezer to use (See FREEZER constants)
:param optional: Optional pacakge names to include (will only issue a warning if they don't exist)
:return: complete set of package includes
|
[
"Iterate",
"the",
"list",
"of",
"packages",
"to",
"build",
"a",
"complete",
"list",
"of",
"those",
"packages",
"as",
"well",
"as",
"all",
"subpackages",
"."
] |
868d81e72b6c8e354af3697531c20f116cd1fc9a
|
https://github.com/dmckeone/frosty/blob/868d81e72b6c8e354af3697531c20f116cd1fc9a/frosty/includes.py#L52-L70
|
245,920
|
emencia/emencia_paste_djangocms_3
|
emencia_paste_djangocms_3/django_buildout/project/utils/templatetags/i18n_addons.py
|
locale_title
|
def locale_title(locale_name):
"""
Giving a locale name return its title, taken from the settings.EXTRA_COUNTRY_LOCALES
If the locale is not in the settings.EXTRA_COUNTRY_LOCALES, return it unchanged
"""
l = dict(settings.EXTRA_COUNTRY_LOCALES)
if locale_name not in l:
return locale_name
return l.get(locale_name)
|
python
|
def locale_title(locale_name):
"""
Giving a locale name return its title, taken from the settings.EXTRA_COUNTRY_LOCALES
If the locale is not in the settings.EXTRA_COUNTRY_LOCALES, return it unchanged
"""
l = dict(settings.EXTRA_COUNTRY_LOCALES)
if locale_name not in l:
return locale_name
return l.get(locale_name)
|
[
"def",
"locale_title",
"(",
"locale_name",
")",
":",
"l",
"=",
"dict",
"(",
"settings",
".",
"EXTRA_COUNTRY_LOCALES",
")",
"if",
"locale_name",
"not",
"in",
"l",
":",
"return",
"locale_name",
"return",
"l",
".",
"get",
"(",
"locale_name",
")"
] |
Giving a locale name return its title, taken from the settings.EXTRA_COUNTRY_LOCALES
If the locale is not in the settings.EXTRA_COUNTRY_LOCALES, return it unchanged
|
[
"Giving",
"a",
"locale",
"name",
"return",
"its",
"title",
"taken",
"from",
"the",
"settings",
".",
"EXTRA_COUNTRY_LOCALES",
"If",
"the",
"locale",
"is",
"not",
"in",
"the",
"settings",
".",
"EXTRA_COUNTRY_LOCALES",
"return",
"it",
"unchanged"
] |
29eabbcb17e21996a6e1d99592fc719dc8833b59
|
https://github.com/emencia/emencia_paste_djangocms_3/blob/29eabbcb17e21996a6e1d99592fc719dc8833b59/emencia_paste_djangocms_3/django_buildout/project/utils/templatetags/i18n_addons.py#L17-L26
|
245,921
|
shad7/tvdbapi_client
|
tvdbapi_client/options.py
|
_make_opt_list
|
def _make_opt_list(opts, group):
"""Generate a list of tuple containing group, options
:param opts: option lists associated with a group
:type opts: list
:param group: name of an option group
:type group: str
:return: a list of (group_name, opts) tuples
:rtype: list
"""
import copy
import itertools
_opts = [(group, list(itertools.chain(*opts)))]
return [(g, copy.deepcopy(o)) for g, o in _opts]
|
python
|
def _make_opt_list(opts, group):
"""Generate a list of tuple containing group, options
:param opts: option lists associated with a group
:type opts: list
:param group: name of an option group
:type group: str
:return: a list of (group_name, opts) tuples
:rtype: list
"""
import copy
import itertools
_opts = [(group, list(itertools.chain(*opts)))]
return [(g, copy.deepcopy(o)) for g, o in _opts]
|
[
"def",
"_make_opt_list",
"(",
"opts",
",",
"group",
")",
":",
"import",
"copy",
"import",
"itertools",
"_opts",
"=",
"[",
"(",
"group",
",",
"list",
"(",
"itertools",
".",
"chain",
"(",
"*",
"opts",
")",
")",
")",
"]",
"return",
"[",
"(",
"g",
",",
"copy",
".",
"deepcopy",
"(",
"o",
")",
")",
"for",
"g",
",",
"o",
"in",
"_opts",
"]"
] |
Generate a list of tuple containing group, options
:param opts: option lists associated with a group
:type opts: list
:param group: name of an option group
:type group: str
:return: a list of (group_name, opts) tuples
:rtype: list
|
[
"Generate",
"a",
"list",
"of",
"tuple",
"containing",
"group",
"options"
] |
edf1771184122f4db42af7fc087407a3e6a4e377
|
https://github.com/shad7/tvdbapi_client/blob/edf1771184122f4db42af7fc087407a3e6a4e377/tvdbapi_client/options.py#L32-L46
|
245,922
|
boisei0/pynoramio
|
pynoramio/__init__.py
|
Pynoramio._request
|
def _request(self, lat_min, lon_min, lat_max, lon_max, start, end, picture_size=None, set_=None, map_filter=None):
"""
Internal method to send requests to the Panoramio data API.
:param lat_min:
Minimum latitude of the bounding box
:type lat_min: float
:param lon_min:
Minimum longitude of the bounding box
:type lon_min: float
:param lat_max:
Maximum latitude of the bounding box
:type lat_max: float
:param lon_max:
Maximum longitude of the bounding box
:type lon_max: float
:param start:
Start number of the number of photo's to retrieve, where 0 is the most popular picture
:type start: int
:param end:
Last number of the number of photo's to retrieve, where 0 is the most popular picture
:type end: int
:param picture_size:
This can be: original, medium (*default*), small, thumbnail, square, mini_square
:type picture_size: basestring
:param set_:
This can be: public, popular or user-id; where user-id is the specific id of a user (as integer)
:type set_: basestring/int
:param map_filter:
Whether to return photos that look better together; when True, tries to avoid returning photos of the same
location
:type map_filter: bool
:return: JSON response of the request formatted as a dictionary.
"""
if not isinstance(lat_min, float):
raise PynoramioException(
'{0}._request requires the lat_min parameter to be a float.'.format(self.__class__.__name__))
if not isinstance(lon_min, float):
raise PynoramioException(
'{0}._request requires the lon_min parameter to be a float.'.format(self.__class__.__name__))
if not isinstance(lat_max, float):
raise PynoramioException(
'{0}._request requires the lat_max parameter to be a float.'.format(self.__class__.__name__))
if not isinstance(lon_max, float):
raise PynoramioException(
'{0}._request requires the lon_max parameter to be a float.'.format(self.__class__.__name__))
if not isinstance(start, int):
raise PynoramioException(
'{0}._request requires the start parameter to be an int.'.format(self.__class__.__name__))
if not isinstance(end, int):
raise PynoramioException(
'{0}._request requires the end parameter to be an int.'.format(self.__class__.__name__))
url = self.base_url + '&minx={0}&miny={1}&maxx={2}&maxy={3}&from={4}&to={5}'.format(lon_min, lat_min,
lon_max, lat_max,
start, end)
if picture_size is not None and isinstance(picture_size, basestring) \
and picture_size in ['original', 'medium', 'small', 'thumbnail', 'square', 'mini_square']:
url += '&size={0}'.format(picture_size)
if set_ is not None and (isinstance(set_, basestring) and set_ in ['public', 'full']) \
or (isinstance(set_, int)):
url += '&set={0}'.format(set_)
else:
url += '&set=public'
if map_filter is not None and isinstance(map_filter, bool) and not map_filter:
url += '&map_filter=false'
r = requests.get(url)
try:
return r.json()
except ValueError:
# add your debugging lines here, for example, print(r.url)
raise PynoramioException(
'An invalid or malformed url was passed to {0}._request'.format(self.__class__.__name__))
|
python
|
def _request(self, lat_min, lon_min, lat_max, lon_max, start, end, picture_size=None, set_=None, map_filter=None):
"""
Internal method to send requests to the Panoramio data API.
:param lat_min:
Minimum latitude of the bounding box
:type lat_min: float
:param lon_min:
Minimum longitude of the bounding box
:type lon_min: float
:param lat_max:
Maximum latitude of the bounding box
:type lat_max: float
:param lon_max:
Maximum longitude of the bounding box
:type lon_max: float
:param start:
Start number of the number of photo's to retrieve, where 0 is the most popular picture
:type start: int
:param end:
Last number of the number of photo's to retrieve, where 0 is the most popular picture
:type end: int
:param picture_size:
This can be: original, medium (*default*), small, thumbnail, square, mini_square
:type picture_size: basestring
:param set_:
This can be: public, popular or user-id; where user-id is the specific id of a user (as integer)
:type set_: basestring/int
:param map_filter:
Whether to return photos that look better together; when True, tries to avoid returning photos of the same
location
:type map_filter: bool
:return: JSON response of the request formatted as a dictionary.
"""
if not isinstance(lat_min, float):
raise PynoramioException(
'{0}._request requires the lat_min parameter to be a float.'.format(self.__class__.__name__))
if not isinstance(lon_min, float):
raise PynoramioException(
'{0}._request requires the lon_min parameter to be a float.'.format(self.__class__.__name__))
if not isinstance(lat_max, float):
raise PynoramioException(
'{0}._request requires the lat_max parameter to be a float.'.format(self.__class__.__name__))
if not isinstance(lon_max, float):
raise PynoramioException(
'{0}._request requires the lon_max parameter to be a float.'.format(self.__class__.__name__))
if not isinstance(start, int):
raise PynoramioException(
'{0}._request requires the start parameter to be an int.'.format(self.__class__.__name__))
if not isinstance(end, int):
raise PynoramioException(
'{0}._request requires the end parameter to be an int.'.format(self.__class__.__name__))
url = self.base_url + '&minx={0}&miny={1}&maxx={2}&maxy={3}&from={4}&to={5}'.format(lon_min, lat_min,
lon_max, lat_max,
start, end)
if picture_size is not None and isinstance(picture_size, basestring) \
and picture_size in ['original', 'medium', 'small', 'thumbnail', 'square', 'mini_square']:
url += '&size={0}'.format(picture_size)
if set_ is not None and (isinstance(set_, basestring) and set_ in ['public', 'full']) \
or (isinstance(set_, int)):
url += '&set={0}'.format(set_)
else:
url += '&set=public'
if map_filter is not None and isinstance(map_filter, bool) and not map_filter:
url += '&map_filter=false'
r = requests.get(url)
try:
return r.json()
except ValueError:
# add your debugging lines here, for example, print(r.url)
raise PynoramioException(
'An invalid or malformed url was passed to {0}._request'.format(self.__class__.__name__))
|
[
"def",
"_request",
"(",
"self",
",",
"lat_min",
",",
"lon_min",
",",
"lat_max",
",",
"lon_max",
",",
"start",
",",
"end",
",",
"picture_size",
"=",
"None",
",",
"set_",
"=",
"None",
",",
"map_filter",
"=",
"None",
")",
":",
"if",
"not",
"isinstance",
"(",
"lat_min",
",",
"float",
")",
":",
"raise",
"PynoramioException",
"(",
"'{0}._request requires the lat_min parameter to be a float.'",
".",
"format",
"(",
"self",
".",
"__class__",
".",
"__name__",
")",
")",
"if",
"not",
"isinstance",
"(",
"lon_min",
",",
"float",
")",
":",
"raise",
"PynoramioException",
"(",
"'{0}._request requires the lon_min parameter to be a float.'",
".",
"format",
"(",
"self",
".",
"__class__",
".",
"__name__",
")",
")",
"if",
"not",
"isinstance",
"(",
"lat_max",
",",
"float",
")",
":",
"raise",
"PynoramioException",
"(",
"'{0}._request requires the lat_max parameter to be a float.'",
".",
"format",
"(",
"self",
".",
"__class__",
".",
"__name__",
")",
")",
"if",
"not",
"isinstance",
"(",
"lon_max",
",",
"float",
")",
":",
"raise",
"PynoramioException",
"(",
"'{0}._request requires the lon_max parameter to be a float.'",
".",
"format",
"(",
"self",
".",
"__class__",
".",
"__name__",
")",
")",
"if",
"not",
"isinstance",
"(",
"start",
",",
"int",
")",
":",
"raise",
"PynoramioException",
"(",
"'{0}._request requires the start parameter to be an int.'",
".",
"format",
"(",
"self",
".",
"__class__",
".",
"__name__",
")",
")",
"if",
"not",
"isinstance",
"(",
"end",
",",
"int",
")",
":",
"raise",
"PynoramioException",
"(",
"'{0}._request requires the end parameter to be an int.'",
".",
"format",
"(",
"self",
".",
"__class__",
".",
"__name__",
")",
")",
"url",
"=",
"self",
".",
"base_url",
"+",
"'&minx={0}&miny={1}&maxx={2}&maxy={3}&from={4}&to={5}'",
".",
"format",
"(",
"lon_min",
",",
"lat_min",
",",
"lon_max",
",",
"lat_max",
",",
"start",
",",
"end",
")",
"if",
"picture_size",
"is",
"not",
"None",
"and",
"isinstance",
"(",
"picture_size",
",",
"basestring",
")",
"and",
"picture_size",
"in",
"[",
"'original'",
",",
"'medium'",
",",
"'small'",
",",
"'thumbnail'",
",",
"'square'",
",",
"'mini_square'",
"]",
":",
"url",
"+=",
"'&size={0}'",
".",
"format",
"(",
"picture_size",
")",
"if",
"set_",
"is",
"not",
"None",
"and",
"(",
"isinstance",
"(",
"set_",
",",
"basestring",
")",
"and",
"set_",
"in",
"[",
"'public'",
",",
"'full'",
"]",
")",
"or",
"(",
"isinstance",
"(",
"set_",
",",
"int",
")",
")",
":",
"url",
"+=",
"'&set={0}'",
".",
"format",
"(",
"set_",
")",
"else",
":",
"url",
"+=",
"'&set=public'",
"if",
"map_filter",
"is",
"not",
"None",
"and",
"isinstance",
"(",
"map_filter",
",",
"bool",
")",
"and",
"not",
"map_filter",
":",
"url",
"+=",
"'&map_filter=false'",
"r",
"=",
"requests",
".",
"get",
"(",
"url",
")",
"try",
":",
"return",
"r",
".",
"json",
"(",
")",
"except",
"ValueError",
":",
"# add your debugging lines here, for example, print(r.url)",
"raise",
"PynoramioException",
"(",
"'An invalid or malformed url was passed to {0}._request'",
".",
"format",
"(",
"self",
".",
"__class__",
".",
"__name__",
")",
")"
] |
Internal method to send requests to the Panoramio data API.
:param lat_min:
Minimum latitude of the bounding box
:type lat_min: float
:param lon_min:
Minimum longitude of the bounding box
:type lon_min: float
:param lat_max:
Maximum latitude of the bounding box
:type lat_max: float
:param lon_max:
Maximum longitude of the bounding box
:type lon_max: float
:param start:
Start number of the number of photo's to retrieve, where 0 is the most popular picture
:type start: int
:param end:
Last number of the number of photo's to retrieve, where 0 is the most popular picture
:type end: int
:param picture_size:
This can be: original, medium (*default*), small, thumbnail, square, mini_square
:type picture_size: basestring
:param set_:
This can be: public, popular or user-id; where user-id is the specific id of a user (as integer)
:type set_: basestring/int
:param map_filter:
Whether to return photos that look better together; when True, tries to avoid returning photos of the same
location
:type map_filter: bool
:return: JSON response of the request formatted as a dictionary.
|
[
"Internal",
"method",
"to",
"send",
"requests",
"to",
"the",
"Panoramio",
"data",
"API",
"."
] |
d87a1edd691e9bed0527c7b3fe43239687e61546
|
https://github.com/boisei0/pynoramio/blob/d87a1edd691e9bed0527c7b3fe43239687e61546/pynoramio/__init__.py#L18-L95
|
245,923
|
boisei0/pynoramio
|
pynoramio/__init__.py
|
Pynoramio.get_from_area
|
def get_from_area(self, lat_min, lon_min, lat_max, lon_max, picture_size=None, set_=None, map_filter=None):
"""
Get all available photos for a specific bounding box
:param lat_min:
Minimum latitude of the bounding box
:type lat_min: float
:param lon_min:
Minimum longitude of the bounding box
:type lon_min: float
:param lat_max:
Maximum latitude of the bounding box
:type lat_max: float
:param lon_max:
Maximum longitude of the bounding box
:type lon_max: float
:param picture_size:
This can be: original, medium (*default*), small, thumbnail, square, mini_square
:type picture_size: basestring
:param set_:
This can be: public, popular or user-id; where user-id is the specific id of a user (as integer)
:type set_: basestring/int
:param map_filter:
Whether to return photos that look better together; when True, tries to avoid returning photos of the same
location
:type map_filter: bool
:return: Returns the full dataset of all available photos
"""
page_size = 100
page = 0
result = self._request(lat_min, lon_min, lat_max, lon_max, page * page_size, (page + 1) * page_size,
picture_size, set_, map_filter)
total_photos = result['count']
if total_photos < page_size:
return result
page += 1
pages = (total_photos / page_size) + 1
while page < pages:
new_result = self._request(lat_min, lon_min, lat_max, lon_max, page * page_size, (page + 1) * page_size,
picture_size, set_, map_filter)
result['photos'].extend(new_result['photos'])
page += 1
return result
|
python
|
def get_from_area(self, lat_min, lon_min, lat_max, lon_max, picture_size=None, set_=None, map_filter=None):
"""
Get all available photos for a specific bounding box
:param lat_min:
Minimum latitude of the bounding box
:type lat_min: float
:param lon_min:
Minimum longitude of the bounding box
:type lon_min: float
:param lat_max:
Maximum latitude of the bounding box
:type lat_max: float
:param lon_max:
Maximum longitude of the bounding box
:type lon_max: float
:param picture_size:
This can be: original, medium (*default*), small, thumbnail, square, mini_square
:type picture_size: basestring
:param set_:
This can be: public, popular or user-id; where user-id is the specific id of a user (as integer)
:type set_: basestring/int
:param map_filter:
Whether to return photos that look better together; when True, tries to avoid returning photos of the same
location
:type map_filter: bool
:return: Returns the full dataset of all available photos
"""
page_size = 100
page = 0
result = self._request(lat_min, lon_min, lat_max, lon_max, page * page_size, (page + 1) * page_size,
picture_size, set_, map_filter)
total_photos = result['count']
if total_photos < page_size:
return result
page += 1
pages = (total_photos / page_size) + 1
while page < pages:
new_result = self._request(lat_min, lon_min, lat_max, lon_max, page * page_size, (page + 1) * page_size,
picture_size, set_, map_filter)
result['photos'].extend(new_result['photos'])
page += 1
return result
|
[
"def",
"get_from_area",
"(",
"self",
",",
"lat_min",
",",
"lon_min",
",",
"lat_max",
",",
"lon_max",
",",
"picture_size",
"=",
"None",
",",
"set_",
"=",
"None",
",",
"map_filter",
"=",
"None",
")",
":",
"page_size",
"=",
"100",
"page",
"=",
"0",
"result",
"=",
"self",
".",
"_request",
"(",
"lat_min",
",",
"lon_min",
",",
"lat_max",
",",
"lon_max",
",",
"page",
"*",
"page_size",
",",
"(",
"page",
"+",
"1",
")",
"*",
"page_size",
",",
"picture_size",
",",
"set_",
",",
"map_filter",
")",
"total_photos",
"=",
"result",
"[",
"'count'",
"]",
"if",
"total_photos",
"<",
"page_size",
":",
"return",
"result",
"page",
"+=",
"1",
"pages",
"=",
"(",
"total_photos",
"/",
"page_size",
")",
"+",
"1",
"while",
"page",
"<",
"pages",
":",
"new_result",
"=",
"self",
".",
"_request",
"(",
"lat_min",
",",
"lon_min",
",",
"lat_max",
",",
"lon_max",
",",
"page",
"*",
"page_size",
",",
"(",
"page",
"+",
"1",
")",
"*",
"page_size",
",",
"picture_size",
",",
"set_",
",",
"map_filter",
")",
"result",
"[",
"'photos'",
"]",
".",
"extend",
"(",
"new_result",
"[",
"'photos'",
"]",
")",
"page",
"+=",
"1",
"return",
"result"
] |
Get all available photos for a specific bounding box
:param lat_min:
Minimum latitude of the bounding box
:type lat_min: float
:param lon_min:
Minimum longitude of the bounding box
:type lon_min: float
:param lat_max:
Maximum latitude of the bounding box
:type lat_max: float
:param lon_max:
Maximum longitude of the bounding box
:type lon_max: float
:param picture_size:
This can be: original, medium (*default*), small, thumbnail, square, mini_square
:type picture_size: basestring
:param set_:
This can be: public, popular or user-id; where user-id is the specific id of a user (as integer)
:type set_: basestring/int
:param map_filter:
Whether to return photos that look better together; when True, tries to avoid returning photos of the same
location
:type map_filter: bool
:return: Returns the full dataset of all available photos
|
[
"Get",
"all",
"available",
"photos",
"for",
"a",
"specific",
"bounding",
"box"
] |
d87a1edd691e9bed0527c7b3fe43239687e61546
|
https://github.com/boisei0/pynoramio/blob/d87a1edd691e9bed0527c7b3fe43239687e61546/pynoramio/__init__.py#L97-L146
|
245,924
|
boisei0/pynoramio
|
pynoramio/__init__.py
|
Pynoramio.get_all_pictures_cursor
|
def get_all_pictures_cursor(self, lat_min, lon_min, lat_max, lon_max, picture_size=None, set_=None,
map_filter=None):
"""
Generator to get all available photos for a given bounding box
:param lat_min:
Minimum latitude of the bounding box
:type lat_min: float
:param lon_min:
Minimum longitude of the bounding box
:type lon_min: float
:param lat_max:
Maximum latitude of the bounding box
:type lat_max: float
:param lon_max:
Maximum longitude of the bounding box
:type lon_max: float
:param picture_size:
This can be: original, medium (*default*), small, thumbnail, square, mini_square
:type picture_size: basestring
:param set_:
This can be: public, popular or user-id; where user-id is the specific id of a user (as integer)
:type set_: basestring/int
:param map_filter:
Whether to return photos that look better together; when True, tries to avoid returning photos of the same
location
:type map_filter: bool
:return: Yields individual dicts of photos
"""
page_size = 100
page = 0
result = self._request(lat_min, lon_min, lat_max, lon_max, page * page_size, (page + 1) * page_size,
picture_size, set_, map_filter)
total_photos = result['count']
for photo in result['photos']:
yield photo
if total_photos < page_size:
raise StopIteration()
page += 1
pages = (total_photos / page_size) + 1
while page < pages:
result = self._request(lat_min, lon_min, lat_max, lon_max, page * page_size, (page + 1) * page_size,
picture_size, set_, map_filter)
for photo in result['photos']:
yield photo
page += 1
raise StopIteration()
|
python
|
def get_all_pictures_cursor(self, lat_min, lon_min, lat_max, lon_max, picture_size=None, set_=None,
map_filter=None):
"""
Generator to get all available photos for a given bounding box
:param lat_min:
Minimum latitude of the bounding box
:type lat_min: float
:param lon_min:
Minimum longitude of the bounding box
:type lon_min: float
:param lat_max:
Maximum latitude of the bounding box
:type lat_max: float
:param lon_max:
Maximum longitude of the bounding box
:type lon_max: float
:param picture_size:
This can be: original, medium (*default*), small, thumbnail, square, mini_square
:type picture_size: basestring
:param set_:
This can be: public, popular or user-id; where user-id is the specific id of a user (as integer)
:type set_: basestring/int
:param map_filter:
Whether to return photos that look better together; when True, tries to avoid returning photos of the same
location
:type map_filter: bool
:return: Yields individual dicts of photos
"""
page_size = 100
page = 0
result = self._request(lat_min, lon_min, lat_max, lon_max, page * page_size, (page + 1) * page_size,
picture_size, set_, map_filter)
total_photos = result['count']
for photo in result['photos']:
yield photo
if total_photos < page_size:
raise StopIteration()
page += 1
pages = (total_photos / page_size) + 1
while page < pages:
result = self._request(lat_min, lon_min, lat_max, lon_max, page * page_size, (page + 1) * page_size,
picture_size, set_, map_filter)
for photo in result['photos']:
yield photo
page += 1
raise StopIteration()
|
[
"def",
"get_all_pictures_cursor",
"(",
"self",
",",
"lat_min",
",",
"lon_min",
",",
"lat_max",
",",
"lon_max",
",",
"picture_size",
"=",
"None",
",",
"set_",
"=",
"None",
",",
"map_filter",
"=",
"None",
")",
":",
"page_size",
"=",
"100",
"page",
"=",
"0",
"result",
"=",
"self",
".",
"_request",
"(",
"lat_min",
",",
"lon_min",
",",
"lat_max",
",",
"lon_max",
",",
"page",
"*",
"page_size",
",",
"(",
"page",
"+",
"1",
")",
"*",
"page_size",
",",
"picture_size",
",",
"set_",
",",
"map_filter",
")",
"total_photos",
"=",
"result",
"[",
"'count'",
"]",
"for",
"photo",
"in",
"result",
"[",
"'photos'",
"]",
":",
"yield",
"photo",
"if",
"total_photos",
"<",
"page_size",
":",
"raise",
"StopIteration",
"(",
")",
"page",
"+=",
"1",
"pages",
"=",
"(",
"total_photos",
"/",
"page_size",
")",
"+",
"1",
"while",
"page",
"<",
"pages",
":",
"result",
"=",
"self",
".",
"_request",
"(",
"lat_min",
",",
"lon_min",
",",
"lat_max",
",",
"lon_max",
",",
"page",
"*",
"page_size",
",",
"(",
"page",
"+",
"1",
")",
"*",
"page_size",
",",
"picture_size",
",",
"set_",
",",
"map_filter",
")",
"for",
"photo",
"in",
"result",
"[",
"'photos'",
"]",
":",
"yield",
"photo",
"page",
"+=",
"1",
"raise",
"StopIteration",
"(",
")"
] |
Generator to get all available photos for a given bounding box
:param lat_min:
Minimum latitude of the bounding box
:type lat_min: float
:param lon_min:
Minimum longitude of the bounding box
:type lon_min: float
:param lat_max:
Maximum latitude of the bounding box
:type lat_max: float
:param lon_max:
Maximum longitude of the bounding box
:type lon_max: float
:param picture_size:
This can be: original, medium (*default*), small, thumbnail, square, mini_square
:type picture_size: basestring
:param set_:
This can be: public, popular or user-id; where user-id is the specific id of a user (as integer)
:type set_: basestring/int
:param map_filter:
Whether to return photos that look better together; when True, tries to avoid returning photos of the same
location
:type map_filter: bool
:return: Yields individual dicts of photos
|
[
"Generator",
"to",
"get",
"all",
"available",
"photos",
"for",
"a",
"given",
"bounding",
"box"
] |
d87a1edd691e9bed0527c7b3fe43239687e61546
|
https://github.com/boisei0/pynoramio/blob/d87a1edd691e9bed0527c7b3fe43239687e61546/pynoramio/__init__.py#L148-L203
|
245,925
|
pjuren/pyokit
|
src/pyokit/datastruct/multipleAlignment.py
|
MultipleSequenceAlignment.get_column
|
def get_column(self, position, missing_seqs=MissingSequenceHandler.SKIP):
"""
return a column from an alignment as a dictionary indexed by seq. name.
:param position: the index to extract; these are in alignment
co-ordinates, which are one-based, so the first column
has index 1, and the final column has
index == size(self).
:param missing_seqs: how to treat sequence with no actual sequence data for
the column.
:return: dictionary where keys are sequence names and values are
nucleotides (raw strings).
"""
res = {}
for k in self.sequences:
if isinstance(self.sequences[k], UnknownSequence):
if missing_seqs is MissingSequenceHandler.TREAT_AS_ALL_GAPS:
res[k] = "-"
elif missing_seqs is MissingSequenceHandler.SKIP:
continue
else:
res[k] = self.sequences[k][position - 1]
return res
|
python
|
def get_column(self, position, missing_seqs=MissingSequenceHandler.SKIP):
"""
return a column from an alignment as a dictionary indexed by seq. name.
:param position: the index to extract; these are in alignment
co-ordinates, which are one-based, so the first column
has index 1, and the final column has
index == size(self).
:param missing_seqs: how to treat sequence with no actual sequence data for
the column.
:return: dictionary where keys are sequence names and values are
nucleotides (raw strings).
"""
res = {}
for k in self.sequences:
if isinstance(self.sequences[k], UnknownSequence):
if missing_seqs is MissingSequenceHandler.TREAT_AS_ALL_GAPS:
res[k] = "-"
elif missing_seqs is MissingSequenceHandler.SKIP:
continue
else:
res[k] = self.sequences[k][position - 1]
return res
|
[
"def",
"get_column",
"(",
"self",
",",
"position",
",",
"missing_seqs",
"=",
"MissingSequenceHandler",
".",
"SKIP",
")",
":",
"res",
"=",
"{",
"}",
"for",
"k",
"in",
"self",
".",
"sequences",
":",
"if",
"isinstance",
"(",
"self",
".",
"sequences",
"[",
"k",
"]",
",",
"UnknownSequence",
")",
":",
"if",
"missing_seqs",
"is",
"MissingSequenceHandler",
".",
"TREAT_AS_ALL_GAPS",
":",
"res",
"[",
"k",
"]",
"=",
"\"-\"",
"elif",
"missing_seqs",
"is",
"MissingSequenceHandler",
".",
"SKIP",
":",
"continue",
"else",
":",
"res",
"[",
"k",
"]",
"=",
"self",
".",
"sequences",
"[",
"k",
"]",
"[",
"position",
"-",
"1",
"]",
"return",
"res"
] |
return a column from an alignment as a dictionary indexed by seq. name.
:param position: the index to extract; these are in alignment
co-ordinates, which are one-based, so the first column
has index 1, and the final column has
index == size(self).
:param missing_seqs: how to treat sequence with no actual sequence data for
the column.
:return: dictionary where keys are sequence names and values are
nucleotides (raw strings).
|
[
"return",
"a",
"column",
"from",
"an",
"alignment",
"as",
"a",
"dictionary",
"indexed",
"by",
"seq",
".",
"name",
"."
] |
fddae123b5d817daa39496183f19c000d9c3791f
|
https://github.com/pjuren/pyokit/blob/fddae123b5d817daa39496183f19c000d9c3791f/src/pyokit/datastruct/multipleAlignment.py#L149-L171
|
245,926
|
pjuren/pyokit
|
src/pyokit/datastruct/multipleAlignment.py
|
MultipleSequenceAlignment.liftover
|
def liftover(self, origin, dest, o_start, o_end, trim=False):
"""liftover interval in one seq. of this pairwise alignment to the other.
:param origin: name of the origin seq (seq the input coordinates are for)
:param dest: name of the dest. seq (seq the result will be for)
:param o_start: start of the interval (in sequence co-ordinates) to lift.
:param o_end: end of the interval (in seq. coords) to lift.
"""
alig_cols = self.sequence_to_alignment_coords(origin, o_start,
o_end, trim=trim)
res = []
for s, e in alig_cols:
t = self.alignment_to_sequence_coords(dest, s, e)
if t is None:
continue
res.append(t)
return res
|
python
|
def liftover(self, origin, dest, o_start, o_end, trim=False):
"""liftover interval in one seq. of this pairwise alignment to the other.
:param origin: name of the origin seq (seq the input coordinates are for)
:param dest: name of the dest. seq (seq the result will be for)
:param o_start: start of the interval (in sequence co-ordinates) to lift.
:param o_end: end of the interval (in seq. coords) to lift.
"""
alig_cols = self.sequence_to_alignment_coords(origin, o_start,
o_end, trim=trim)
res = []
for s, e in alig_cols:
t = self.alignment_to_sequence_coords(dest, s, e)
if t is None:
continue
res.append(t)
return res
|
[
"def",
"liftover",
"(",
"self",
",",
"origin",
",",
"dest",
",",
"o_start",
",",
"o_end",
",",
"trim",
"=",
"False",
")",
":",
"alig_cols",
"=",
"self",
".",
"sequence_to_alignment_coords",
"(",
"origin",
",",
"o_start",
",",
"o_end",
",",
"trim",
"=",
"trim",
")",
"res",
"=",
"[",
"]",
"for",
"s",
",",
"e",
"in",
"alig_cols",
":",
"t",
"=",
"self",
".",
"alignment_to_sequence_coords",
"(",
"dest",
",",
"s",
",",
"e",
")",
"if",
"t",
"is",
"None",
":",
"continue",
"res",
".",
"append",
"(",
"t",
")",
"return",
"res"
] |
liftover interval in one seq. of this pairwise alignment to the other.
:param origin: name of the origin seq (seq the input coordinates are for)
:param dest: name of the dest. seq (seq the result will be for)
:param o_start: start of the interval (in sequence co-ordinates) to lift.
:param o_end: end of the interval (in seq. coords) to lift.
|
[
"liftover",
"interval",
"in",
"one",
"seq",
".",
"of",
"this",
"pairwise",
"alignment",
"to",
"the",
"other",
"."
] |
fddae123b5d817daa39496183f19c000d9c3791f
|
https://github.com/pjuren/pyokit/blob/fddae123b5d817daa39496183f19c000d9c3791f/src/pyokit/datastruct/multipleAlignment.py#L338-L354
|
245,927
|
ourway/marmoolak
|
marmoolak/__init__.py
|
Machine._after_event
|
def _after_event(self, e):
'''
Checks to see if the callback is registered for, after this event is completed.
'''
''' my patch, serialize to redis '''
self.r.set(self.rhname, self.current)
for fnname in ['onafter' + e.event, 'on' + e.event]:
if hasattr(self, fnname):
return getattr(self, fnname)(e)
|
python
|
def _after_event(self, e):
'''
Checks to see if the callback is registered for, after this event is completed.
'''
''' my patch, serialize to redis '''
self.r.set(self.rhname, self.current)
for fnname in ['onafter' + e.event, 'on' + e.event]:
if hasattr(self, fnname):
return getattr(self, fnname)(e)
|
[
"def",
"_after_event",
"(",
"self",
",",
"e",
")",
":",
"''' my patch, serialize to redis '''",
"self",
".",
"r",
".",
"set",
"(",
"self",
".",
"rhname",
",",
"self",
".",
"current",
")",
"for",
"fnname",
"in",
"[",
"'onafter'",
"+",
"e",
".",
"event",
",",
"'on'",
"+",
"e",
".",
"event",
"]",
":",
"if",
"hasattr",
"(",
"self",
",",
"fnname",
")",
":",
"return",
"getattr",
"(",
"self",
",",
"fnname",
")",
"(",
"e",
")"
] |
Checks to see if the callback is registered for, after this event is completed.
|
[
"Checks",
"to",
"see",
"if",
"the",
"callback",
"is",
"registered",
"for",
"after",
"this",
"event",
"is",
"completed",
"."
] |
02ce76101ca16d3b1c85d88a92bb7b15dc295ca3
|
https://github.com/ourway/marmoolak/blob/02ce76101ca16d3b1c85d88a92bb7b15dc295ca3/marmoolak/__init__.py#L36-L44
|
245,928
|
EventTeam/beliefs
|
src/beliefs/cells/strings.py
|
StringCell.coerce
|
def coerce(value):
"""
Turns value into a string
"""
if isinstance(value, StringCell):
return value
elif isinstance(value, (str, unicode)):
return StringCell(value)
else:
raise CoercionFailure("Cannot coerce %s to StringCell" % (value))
|
python
|
def coerce(value):
"""
Turns value into a string
"""
if isinstance(value, StringCell):
return value
elif isinstance(value, (str, unicode)):
return StringCell(value)
else:
raise CoercionFailure("Cannot coerce %s to StringCell" % (value))
|
[
"def",
"coerce",
"(",
"value",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"StringCell",
")",
":",
"return",
"value",
"elif",
"isinstance",
"(",
"value",
",",
"(",
"str",
",",
"unicode",
")",
")",
":",
"return",
"StringCell",
"(",
"value",
")",
"else",
":",
"raise",
"CoercionFailure",
"(",
"\"Cannot coerce %s to StringCell\"",
"%",
"(",
"value",
")",
")"
] |
Turns value into a string
|
[
"Turns",
"value",
"into",
"a",
"string"
] |
c07d22b61bebeede74a72800030dde770bf64208
|
https://github.com/EventTeam/beliefs/blob/c07d22b61bebeede74a72800030dde770bf64208/src/beliefs/cells/strings.py#L17-L26
|
245,929
|
EventTeam/beliefs
|
src/beliefs/cells/strings.py
|
StringCell.is_contradictory
|
def is_contradictory(self, other):
"""
Can these two strings coexist ?
"""
other = StringCell.coerce(other)
if self.value is None or other.value is None:
# None = empty, and won't contradict anything
return False
def sequence_in(s1, s2):
"""Does `s1` appear in sequence in `s2`?"""
return bool(re.search(".*".join(s1), s2))
return not sequence_in(self.value, other.value) and \
not sequence_in(other.value, self.value)
|
python
|
def is_contradictory(self, other):
"""
Can these two strings coexist ?
"""
other = StringCell.coerce(other)
if self.value is None or other.value is None:
# None = empty, and won't contradict anything
return False
def sequence_in(s1, s2):
"""Does `s1` appear in sequence in `s2`?"""
return bool(re.search(".*".join(s1), s2))
return not sequence_in(self.value, other.value) and \
not sequence_in(other.value, self.value)
|
[
"def",
"is_contradictory",
"(",
"self",
",",
"other",
")",
":",
"other",
"=",
"StringCell",
".",
"coerce",
"(",
"other",
")",
"if",
"self",
".",
"value",
"is",
"None",
"or",
"other",
".",
"value",
"is",
"None",
":",
"# None = empty, and won't contradict anything",
"return",
"False",
"def",
"sequence_in",
"(",
"s1",
",",
"s2",
")",
":",
"\"\"\"Does `s1` appear in sequence in `s2`?\"\"\"",
"return",
"bool",
"(",
"re",
".",
"search",
"(",
"\".*\"",
".",
"join",
"(",
"s1",
")",
",",
"s2",
")",
")",
"return",
"not",
"sequence_in",
"(",
"self",
".",
"value",
",",
"other",
".",
"value",
")",
"and",
"not",
"sequence_in",
"(",
"other",
".",
"value",
",",
"self",
".",
"value",
")"
] |
Can these two strings coexist ?
|
[
"Can",
"these",
"two",
"strings",
"coexist",
"?"
] |
c07d22b61bebeede74a72800030dde770bf64208
|
https://github.com/EventTeam/beliefs/blob/c07d22b61bebeede74a72800030dde770bf64208/src/beliefs/cells/strings.py#L28-L43
|
245,930
|
EventTeam/beliefs
|
src/beliefs/cells/strings.py
|
StringCell.is_equal
|
def is_equal(self, other):
"""
Whether two strings are equal
"""
other = StringCell.coerce(other)
empties = [None,'']
if self.value in empties and other.value in empties:
return True
return self.value == other.value
|
python
|
def is_equal(self, other):
"""
Whether two strings are equal
"""
other = StringCell.coerce(other)
empties = [None,'']
if self.value in empties and other.value in empties:
return True
return self.value == other.value
|
[
"def",
"is_equal",
"(",
"self",
",",
"other",
")",
":",
"other",
"=",
"StringCell",
".",
"coerce",
"(",
"other",
")",
"empties",
"=",
"[",
"None",
",",
"''",
"]",
"if",
"self",
".",
"value",
"in",
"empties",
"and",
"other",
".",
"value",
"in",
"empties",
":",
"return",
"True",
"return",
"self",
".",
"value",
"==",
"other",
".",
"value"
] |
Whether two strings are equal
|
[
"Whether",
"two",
"strings",
"are",
"equal"
] |
c07d22b61bebeede74a72800030dde770bf64208
|
https://github.com/EventTeam/beliefs/blob/c07d22b61bebeede74a72800030dde770bf64208/src/beliefs/cells/strings.py#L60-L68
|
245,931
|
EventTeam/beliefs
|
src/beliefs/cells/strings.py
|
StringCell.merge
|
def merge(self, other):
"""
Merges two strings
"""
other = StringCell.coerce(other)
if self.is_equal(other):
# pick among dependencies
return self
elif other.is_entailed_by(self):
return self
elif self.is_entailed_by(other):
self.value = other.value
elif self.is_contradictory(other):
raise Contradiction("Cannot merge string '%s' with '%s'" % \
(self, other))
else:
self._perform_merge(other)
return self
|
python
|
def merge(self, other):
"""
Merges two strings
"""
other = StringCell.coerce(other)
if self.is_equal(other):
# pick among dependencies
return self
elif other.is_entailed_by(self):
return self
elif self.is_entailed_by(other):
self.value = other.value
elif self.is_contradictory(other):
raise Contradiction("Cannot merge string '%s' with '%s'" % \
(self, other))
else:
self._perform_merge(other)
return self
|
[
"def",
"merge",
"(",
"self",
",",
"other",
")",
":",
"other",
"=",
"StringCell",
".",
"coerce",
"(",
"other",
")",
"if",
"self",
".",
"is_equal",
"(",
"other",
")",
":",
"# pick among dependencies",
"return",
"self",
"elif",
"other",
".",
"is_entailed_by",
"(",
"self",
")",
":",
"return",
"self",
"elif",
"self",
".",
"is_entailed_by",
"(",
"other",
")",
":",
"self",
".",
"value",
"=",
"other",
".",
"value",
"elif",
"self",
".",
"is_contradictory",
"(",
"other",
")",
":",
"raise",
"Contradiction",
"(",
"\"Cannot merge string '%s' with '%s'\"",
"%",
"(",
"self",
",",
"other",
")",
")",
"else",
":",
"self",
".",
"_perform_merge",
"(",
"other",
")",
"return",
"self"
] |
Merges two strings
|
[
"Merges",
"two",
"strings"
] |
c07d22b61bebeede74a72800030dde770bf64208
|
https://github.com/EventTeam/beliefs/blob/c07d22b61bebeede74a72800030dde770bf64208/src/beliefs/cells/strings.py#L70-L87
|
245,932
|
EventTeam/beliefs
|
src/beliefs/cells/strings.py
|
StringCell._perform_merge
|
def _perform_merge(self, other):
"""
Merges the longer string
"""
if len(other.value) > len(self.value):
self.value = other.value[:]
return True
|
python
|
def _perform_merge(self, other):
"""
Merges the longer string
"""
if len(other.value) > len(self.value):
self.value = other.value[:]
return True
|
[
"def",
"_perform_merge",
"(",
"self",
",",
"other",
")",
":",
"if",
"len",
"(",
"other",
".",
"value",
")",
">",
"len",
"(",
"self",
".",
"value",
")",
":",
"self",
".",
"value",
"=",
"other",
".",
"value",
"[",
":",
"]",
"return",
"True"
] |
Merges the longer string
|
[
"Merges",
"the",
"longer",
"string"
] |
c07d22b61bebeede74a72800030dde770bf64208
|
https://github.com/EventTeam/beliefs/blob/c07d22b61bebeede74a72800030dde770bf64208/src/beliefs/cells/strings.py#L89-L95
|
245,933
|
Code4SA/wazimap-mapit
|
wazimap_mapit/geo.py
|
GeoData.get_locations_from_coords
|
def get_locations_from_coords(self, longitude, latitude, levels=None):
"""
Returns a list of geographies containing this point.
"""
resp = requests.get(SETTINGS['url'] + '/point/4326/%s,%s?generation=%s' % (longitude, latitude, SETTINGS['generation']))
resp.raise_for_status()
geos = []
for feature in resp.json().itervalues():
try:
geo = self.get_geography(feature['codes']['MDB'],
feature['type_name'].lower())
if not levels or geo.geo_level in levels:
geos.append(geo)
except LocationNotFound as e:
log.warn("Couldn't find geo that Mapit gave us: %s" % feature, exc_info=e)
return geos
|
python
|
def get_locations_from_coords(self, longitude, latitude, levels=None):
"""
Returns a list of geographies containing this point.
"""
resp = requests.get(SETTINGS['url'] + '/point/4326/%s,%s?generation=%s' % (longitude, latitude, SETTINGS['generation']))
resp.raise_for_status()
geos = []
for feature in resp.json().itervalues():
try:
geo = self.get_geography(feature['codes']['MDB'],
feature['type_name'].lower())
if not levels or geo.geo_level in levels:
geos.append(geo)
except LocationNotFound as e:
log.warn("Couldn't find geo that Mapit gave us: %s" % feature, exc_info=e)
return geos
|
[
"def",
"get_locations_from_coords",
"(",
"self",
",",
"longitude",
",",
"latitude",
",",
"levels",
"=",
"None",
")",
":",
"resp",
"=",
"requests",
".",
"get",
"(",
"SETTINGS",
"[",
"'url'",
"]",
"+",
"'/point/4326/%s,%s?generation=%s'",
"%",
"(",
"longitude",
",",
"latitude",
",",
"SETTINGS",
"[",
"'generation'",
"]",
")",
")",
"resp",
".",
"raise_for_status",
"(",
")",
"geos",
"=",
"[",
"]",
"for",
"feature",
"in",
"resp",
".",
"json",
"(",
")",
".",
"itervalues",
"(",
")",
":",
"try",
":",
"geo",
"=",
"self",
".",
"get_geography",
"(",
"feature",
"[",
"'codes'",
"]",
"[",
"'MDB'",
"]",
",",
"feature",
"[",
"'type_name'",
"]",
".",
"lower",
"(",
")",
")",
"if",
"not",
"levels",
"or",
"geo",
".",
"geo_level",
"in",
"levels",
":",
"geos",
".",
"append",
"(",
"geo",
")",
"except",
"LocationNotFound",
"as",
"e",
":",
"log",
".",
"warn",
"(",
"\"Couldn't find geo that Mapit gave us: %s\"",
"%",
"feature",
",",
"exc_info",
"=",
"e",
")",
"return",
"geos"
] |
Returns a list of geographies containing this point.
|
[
"Returns",
"a",
"list",
"of",
"geographies",
"containing",
"this",
"point",
"."
] |
e72640f760978fa5e63b24a82402d1be1713cee8
|
https://github.com/Code4SA/wazimap-mapit/blob/e72640f760978fa5e63b24a82402d1be1713cee8/wazimap_mapit/geo.py#L55-L73
|
245,934
|
openbermuda/ripl
|
ripl/imagefind.py
|
ImageFind.add_folder
|
def add_folder(self, folder):
""" Add a folder scan images there """
if folder in self.folders:
return
self.folders.add(folder)
for subfolder, junk, filenames in os.walk(folder):
for filename in filenames:
name, ext = os.path.splitext(filename)
if ext in self.exts:
self.images.append(
os.path.join(subfolder, filename))
|
python
|
def add_folder(self, folder):
""" Add a folder scan images there """
if folder in self.folders:
return
self.folders.add(folder)
for subfolder, junk, filenames in os.walk(folder):
for filename in filenames:
name, ext = os.path.splitext(filename)
if ext in self.exts:
self.images.append(
os.path.join(subfolder, filename))
|
[
"def",
"add_folder",
"(",
"self",
",",
"folder",
")",
":",
"if",
"folder",
"in",
"self",
".",
"folders",
":",
"return",
"self",
".",
"folders",
".",
"add",
"(",
"folder",
")",
"for",
"subfolder",
",",
"junk",
",",
"filenames",
"in",
"os",
".",
"walk",
"(",
"folder",
")",
":",
"for",
"filename",
"in",
"filenames",
":",
"name",
",",
"ext",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"filename",
")",
"if",
"ext",
"in",
"self",
".",
"exts",
":",
"self",
".",
"images",
".",
"append",
"(",
"os",
".",
"path",
".",
"join",
"(",
"subfolder",
",",
"filename",
")",
")"
] |
Add a folder scan images there
|
[
"Add",
"a",
"folder",
"scan",
"images",
"there"
] |
4886b1a697e4b81c2202db9cb977609e034f8e70
|
https://github.com/openbermuda/ripl/blob/4886b1a697e4b81c2202db9cb977609e034f8e70/ripl/imagefind.py#L28-L40
|
245,935
|
openbermuda/ripl
|
ripl/imagefind.py
|
ImageFind.interpret
|
def interpret(self, msg):
""" Try and find the image file
some magic here would be good.
FIXME move elsewhere and make so everyone can use.
interpreter that finds things?
"""
for gallery in msg.get('galleries', []):
self.add_folder(gallery)
image_file = msg.get('image')
if not image_file: return
return self.find_image(image_file)
|
python
|
def interpret(self, msg):
""" Try and find the image file
some magic here would be good.
FIXME move elsewhere and make so everyone can use.
interpreter that finds things?
"""
for gallery in msg.get('galleries', []):
self.add_folder(gallery)
image_file = msg.get('image')
if not image_file: return
return self.find_image(image_file)
|
[
"def",
"interpret",
"(",
"self",
",",
"msg",
")",
":",
"for",
"gallery",
"in",
"msg",
".",
"get",
"(",
"'galleries'",
",",
"[",
"]",
")",
":",
"self",
".",
"add_folder",
"(",
"gallery",
")",
"image_file",
"=",
"msg",
".",
"get",
"(",
"'image'",
")",
"if",
"not",
"image_file",
":",
"return",
"return",
"self",
".",
"find_image",
"(",
"image_file",
")"
] |
Try and find the image file
some magic here would be good.
FIXME move elsewhere and make so everyone can use.
interpreter that finds things?
|
[
"Try",
"and",
"find",
"the",
"image",
"file"
] |
4886b1a697e4b81c2202db9cb977609e034f8e70
|
https://github.com/openbermuda/ripl/blob/4886b1a697e4b81c2202db9cb977609e034f8e70/ripl/imagefind.py#L44-L59
|
245,936
|
lbusoni/plico
|
plico/rpc/zmq_remote_procedure_call.py
|
ZmqRemoteProcedureCall.subscriberSocket
|
def subscriberSocket(self, host, port, filt=b'', conflate=False):
'''
Create a SUB-style socket for data receivers
'''
socket = self._context.socket(zmq.SUB)
if conflate:
socket.setsockopt(zmq.CONFLATE, 1)
socket.connect(self.tcpAddress(host, port))
socket.setsockopt(zmq.SUBSCRIBE, filt)
return socket
|
python
|
def subscriberSocket(self, host, port, filt=b'', conflate=False):
'''
Create a SUB-style socket for data receivers
'''
socket = self._context.socket(zmq.SUB)
if conflate:
socket.setsockopt(zmq.CONFLATE, 1)
socket.connect(self.tcpAddress(host, port))
socket.setsockopt(zmq.SUBSCRIBE, filt)
return socket
|
[
"def",
"subscriberSocket",
"(",
"self",
",",
"host",
",",
"port",
",",
"filt",
"=",
"b''",
",",
"conflate",
"=",
"False",
")",
":",
"socket",
"=",
"self",
".",
"_context",
".",
"socket",
"(",
"zmq",
".",
"SUB",
")",
"if",
"conflate",
":",
"socket",
".",
"setsockopt",
"(",
"zmq",
".",
"CONFLATE",
",",
"1",
")",
"socket",
".",
"connect",
"(",
"self",
".",
"tcpAddress",
"(",
"host",
",",
"port",
")",
")",
"socket",
".",
"setsockopt",
"(",
"zmq",
".",
"SUBSCRIBE",
",",
"filt",
")",
"return",
"socket"
] |
Create a SUB-style socket for data receivers
|
[
"Create",
"a",
"SUB",
"-",
"style",
"socket",
"for",
"data",
"receivers"
] |
08a29da8f06e920470516838878a51ac83bab847
|
https://github.com/lbusoni/plico/blob/08a29da8f06e920470516838878a51ac83bab847/plico/rpc/zmq_remote_procedure_call.py#L91-L101
|
245,937
|
lbusoni/plico
|
plico/rpc/zmq_remote_procedure_call.py
|
ZmqRemoteProcedureCall.replySocket
|
def replySocket(self, port, host='*'):
'''
Create a REP-style socket for servers
'''
try:
socket = self._context.socket(zmq.REP)
socket.bind(self.tcpAddress(host, port))
except Exception as e:
newMsg= str("%s %s:%d" % (str(e), host, port))
raise (type(e))(newMsg)
return socket
|
python
|
def replySocket(self, port, host='*'):
'''
Create a REP-style socket for servers
'''
try:
socket = self._context.socket(zmq.REP)
socket.bind(self.tcpAddress(host, port))
except Exception as e:
newMsg= str("%s %s:%d" % (str(e), host, port))
raise (type(e))(newMsg)
return socket
|
[
"def",
"replySocket",
"(",
"self",
",",
"port",
",",
"host",
"=",
"'*'",
")",
":",
"try",
":",
"socket",
"=",
"self",
".",
"_context",
".",
"socket",
"(",
"zmq",
".",
"REP",
")",
"socket",
".",
"bind",
"(",
"self",
".",
"tcpAddress",
"(",
"host",
",",
"port",
")",
")",
"except",
"Exception",
"as",
"e",
":",
"newMsg",
"=",
"str",
"(",
"\"%s %s:%d\"",
"%",
"(",
"str",
"(",
"e",
")",
",",
"host",
",",
"port",
")",
")",
"raise",
"(",
"type",
"(",
"e",
")",
")",
"(",
"newMsg",
")",
"return",
"socket"
] |
Create a REP-style socket for servers
|
[
"Create",
"a",
"REP",
"-",
"style",
"socket",
"for",
"servers"
] |
08a29da8f06e920470516838878a51ac83bab847
|
https://github.com/lbusoni/plico/blob/08a29da8f06e920470516838878a51ac83bab847/plico/rpc/zmq_remote_procedure_call.py#L122-L132
|
245,938
|
lbusoni/plico
|
plico/rpc/zmq_remote_procedure_call.py
|
ZmqRemoteProcedureCall.requestSocket
|
def requestSocket(self, host, port):
'''
Create a REQ-style socket for clients
'''
socket = self._context.socket(zmq.REQ)
socket.connect(self.tcpAddress(host, port))
return socket
|
python
|
def requestSocket(self, host, port):
'''
Create a REQ-style socket for clients
'''
socket = self._context.socket(zmq.REQ)
socket.connect(self.tcpAddress(host, port))
return socket
|
[
"def",
"requestSocket",
"(",
"self",
",",
"host",
",",
"port",
")",
":",
"socket",
"=",
"self",
".",
"_context",
".",
"socket",
"(",
"zmq",
".",
"REQ",
")",
"socket",
".",
"connect",
"(",
"self",
".",
"tcpAddress",
"(",
"host",
",",
"port",
")",
")",
"return",
"socket"
] |
Create a REQ-style socket for clients
|
[
"Create",
"a",
"REQ",
"-",
"style",
"socket",
"for",
"clients"
] |
08a29da8f06e920470516838878a51ac83bab847
|
https://github.com/lbusoni/plico/blob/08a29da8f06e920470516838878a51ac83bab847/plico/rpc/zmq_remote_procedure_call.py#L136-L142
|
245,939
|
klen/muffin-babel
|
muffin_babel.py
|
parse_accept_header
|
def parse_accept_header(header):
"""Parse accept headers."""
result = []
for match in accept_re.finditer(header):
quality = match.group(2)
if not quality:
quality = 1
else:
quality = max(min(float(quality), 1), 0)
result.append((match.group(1), quality))
return result
|
python
|
def parse_accept_header(header):
"""Parse accept headers."""
result = []
for match in accept_re.finditer(header):
quality = match.group(2)
if not quality:
quality = 1
else:
quality = max(min(float(quality), 1), 0)
result.append((match.group(1), quality))
return result
|
[
"def",
"parse_accept_header",
"(",
"header",
")",
":",
"result",
"=",
"[",
"]",
"for",
"match",
"in",
"accept_re",
".",
"finditer",
"(",
"header",
")",
":",
"quality",
"=",
"match",
".",
"group",
"(",
"2",
")",
"if",
"not",
"quality",
":",
"quality",
"=",
"1",
"else",
":",
"quality",
"=",
"max",
"(",
"min",
"(",
"float",
"(",
"quality",
")",
",",
"1",
")",
",",
"0",
")",
"result",
".",
"append",
"(",
"(",
"match",
".",
"group",
"(",
"1",
")",
",",
"quality",
")",
")",
"return",
"result"
] |
Parse accept headers.
|
[
"Parse",
"accept",
"headers",
"."
] |
f48ebbbf7806c6c727f66d8d0df331b29f6ead08
|
https://github.com/klen/muffin-babel/blob/f48ebbbf7806c6c727f66d8d0df331b29f6ead08/muffin_babel.py#L44-L54
|
245,940
|
klen/muffin-babel
|
muffin_babel.py
|
Plugin.startup
|
async def startup(self, app):
"""Initialize a local namespace and setup Jinja2."""
self.local = slocal(app.loop)
if self.cfg.configure_jinja2 and 'jinja2' in app.ps:
app.ps.jinja2.env.add_extension('jinja2.ext.i18n')
app.ps.jinja2.env.install_gettext_callables(
lambda x: self.get_translations().ugettext(x),
lambda s, p, n: self.get_translations().ungettext(s, p, n),
newstyle=True
)
if self.locale_selector_func:
app.middlewares.append(self._middleware)
|
python
|
async def startup(self, app):
"""Initialize a local namespace and setup Jinja2."""
self.local = slocal(app.loop)
if self.cfg.configure_jinja2 and 'jinja2' in app.ps:
app.ps.jinja2.env.add_extension('jinja2.ext.i18n')
app.ps.jinja2.env.install_gettext_callables(
lambda x: self.get_translations().ugettext(x),
lambda s, p, n: self.get_translations().ungettext(s, p, n),
newstyle=True
)
if self.locale_selector_func:
app.middlewares.append(self._middleware)
|
[
"async",
"def",
"startup",
"(",
"self",
",",
"app",
")",
":",
"self",
".",
"local",
"=",
"slocal",
"(",
"app",
".",
"loop",
")",
"if",
"self",
".",
"cfg",
".",
"configure_jinja2",
"and",
"'jinja2'",
"in",
"app",
".",
"ps",
":",
"app",
".",
"ps",
".",
"jinja2",
".",
"env",
".",
"add_extension",
"(",
"'jinja2.ext.i18n'",
")",
"app",
".",
"ps",
".",
"jinja2",
".",
"env",
".",
"install_gettext_callables",
"(",
"lambda",
"x",
":",
"self",
".",
"get_translations",
"(",
")",
".",
"ugettext",
"(",
"x",
")",
",",
"lambda",
"s",
",",
"p",
",",
"n",
":",
"self",
".",
"get_translations",
"(",
")",
".",
"ungettext",
"(",
"s",
",",
"p",
",",
"n",
")",
",",
"newstyle",
"=",
"True",
")",
"if",
"self",
".",
"locale_selector_func",
":",
"app",
".",
"middlewares",
".",
"append",
"(",
"self",
".",
"_middleware",
")"
] |
Initialize a local namespace and setup Jinja2.
|
[
"Initialize",
"a",
"local",
"namespace",
"and",
"setup",
"Jinja2",
"."
] |
f48ebbbf7806c6c727f66d8d0df331b29f6ead08
|
https://github.com/klen/muffin-babel/blob/f48ebbbf7806c6c727f66d8d0df331b29f6ead08/muffin_babel.py#L158-L170
|
245,941
|
klen/muffin-babel
|
muffin_babel.py
|
Plugin.get_translations
|
def get_translations(self, domain=None, locale=None):
"""Load translations for given or configuration domain.
:param domain: Messages domain (str)
:param locale: Locale object
"""
if locale is None:
if self.locale is None:
return support.NullTranslations()
locale = self.locale
if domain is None:
domain = self.cfg.domain
if (domain, locale.language) not in self.translations:
translations = None
for locales_dir in reversed(self.cfg.locales_dirs):
trans = support.Translations.load(
locales_dir, locales=locale, domain=domain)
if translations:
translations._catalog.update(trans._catalog)
else:
translations = trans
self.translations[(domain, locale.language)] = translations
return self.translations[(domain, locale.language)]
|
python
|
def get_translations(self, domain=None, locale=None):
"""Load translations for given or configuration domain.
:param domain: Messages domain (str)
:param locale: Locale object
"""
if locale is None:
if self.locale is None:
return support.NullTranslations()
locale = self.locale
if domain is None:
domain = self.cfg.domain
if (domain, locale.language) not in self.translations:
translations = None
for locales_dir in reversed(self.cfg.locales_dirs):
trans = support.Translations.load(
locales_dir, locales=locale, domain=domain)
if translations:
translations._catalog.update(trans._catalog)
else:
translations = trans
self.translations[(domain, locale.language)] = translations
return self.translations[(domain, locale.language)]
|
[
"def",
"get_translations",
"(",
"self",
",",
"domain",
"=",
"None",
",",
"locale",
"=",
"None",
")",
":",
"if",
"locale",
"is",
"None",
":",
"if",
"self",
".",
"locale",
"is",
"None",
":",
"return",
"support",
".",
"NullTranslations",
"(",
")",
"locale",
"=",
"self",
".",
"locale",
"if",
"domain",
"is",
"None",
":",
"domain",
"=",
"self",
".",
"cfg",
".",
"domain",
"if",
"(",
"domain",
",",
"locale",
".",
"language",
")",
"not",
"in",
"self",
".",
"translations",
":",
"translations",
"=",
"None",
"for",
"locales_dir",
"in",
"reversed",
"(",
"self",
".",
"cfg",
".",
"locales_dirs",
")",
":",
"trans",
"=",
"support",
".",
"Translations",
".",
"load",
"(",
"locales_dir",
",",
"locales",
"=",
"locale",
",",
"domain",
"=",
"domain",
")",
"if",
"translations",
":",
"translations",
".",
"_catalog",
".",
"update",
"(",
"trans",
".",
"_catalog",
")",
"else",
":",
"translations",
"=",
"trans",
"self",
".",
"translations",
"[",
"(",
"domain",
",",
"locale",
".",
"language",
")",
"]",
"=",
"translations",
"return",
"self",
".",
"translations",
"[",
"(",
"domain",
",",
"locale",
".",
"language",
")",
"]"
] |
Load translations for given or configuration domain.
:param domain: Messages domain (str)
:param locale: Locale object
|
[
"Load",
"translations",
"for",
"given",
"or",
"configuration",
"domain",
"."
] |
f48ebbbf7806c6c727f66d8d0df331b29f6ead08
|
https://github.com/klen/muffin-babel/blob/f48ebbbf7806c6c727f66d8d0df331b29f6ead08/muffin_babel.py#L179-L207
|
245,942
|
klen/muffin-babel
|
muffin_babel.py
|
Plugin.locale
|
def locale(self, value):
"""Set current locale."""
if not isinstance(value, Locale):
value = Locale.parse(value)
self.local.babel_locale = value
|
python
|
def locale(self, value):
"""Set current locale."""
if not isinstance(value, Locale):
value = Locale.parse(value)
self.local.babel_locale = value
|
[
"def",
"locale",
"(",
"self",
",",
"value",
")",
":",
"if",
"not",
"isinstance",
"(",
"value",
",",
"Locale",
")",
":",
"value",
"=",
"Locale",
".",
"parse",
"(",
"value",
")",
"self",
".",
"local",
".",
"babel_locale",
"=",
"value"
] |
Set current locale.
|
[
"Set",
"current",
"locale",
"."
] |
f48ebbbf7806c6c727f66d8d0df331b29f6ead08
|
https://github.com/klen/muffin-babel/blob/f48ebbbf7806c6c727f66d8d0df331b29f6ead08/muffin_babel.py#L219-L223
|
245,943
|
klen/muffin-babel
|
muffin_babel.py
|
Plugin.select_locale_by_request
|
def select_locale_by_request(self, request, locales=()):
"""Choose an user's locales by request."""
default_locale = locales and locales[0] or self.cfg.default_locale
if len(locales) == 1 or 'ACCEPT-LANGUAGE' not in request.headers:
return default_locale
ulocales = [
(q, locale_delim_re.split(v)[0])
for v, q in parse_accept_header(request.headers['ACCEPT-LANGUAGE'])
]
ulocales.sort()
ulocales.reverse()
for locale in locales:
for _, ulocale in ulocales:
ulocale = locale_delim_re.split(ulocale)[0]
if ulocale.lower() == locale.lower():
return ulocale
return ulocales[0][1]
|
python
|
def select_locale_by_request(self, request, locales=()):
"""Choose an user's locales by request."""
default_locale = locales and locales[0] or self.cfg.default_locale
if len(locales) == 1 or 'ACCEPT-LANGUAGE' not in request.headers:
return default_locale
ulocales = [
(q, locale_delim_re.split(v)[0])
for v, q in parse_accept_header(request.headers['ACCEPT-LANGUAGE'])
]
ulocales.sort()
ulocales.reverse()
for locale in locales:
for _, ulocale in ulocales:
ulocale = locale_delim_re.split(ulocale)[0]
if ulocale.lower() == locale.lower():
return ulocale
return ulocales[0][1]
|
[
"def",
"select_locale_by_request",
"(",
"self",
",",
"request",
",",
"locales",
"=",
"(",
")",
")",
":",
"default_locale",
"=",
"locales",
"and",
"locales",
"[",
"0",
"]",
"or",
"self",
".",
"cfg",
".",
"default_locale",
"if",
"len",
"(",
"locales",
")",
"==",
"1",
"or",
"'ACCEPT-LANGUAGE'",
"not",
"in",
"request",
".",
"headers",
":",
"return",
"default_locale",
"ulocales",
"=",
"[",
"(",
"q",
",",
"locale_delim_re",
".",
"split",
"(",
"v",
")",
"[",
"0",
"]",
")",
"for",
"v",
",",
"q",
"in",
"parse_accept_header",
"(",
"request",
".",
"headers",
"[",
"'ACCEPT-LANGUAGE'",
"]",
")",
"]",
"ulocales",
".",
"sort",
"(",
")",
"ulocales",
".",
"reverse",
"(",
")",
"for",
"locale",
"in",
"locales",
":",
"for",
"_",
",",
"ulocale",
"in",
"ulocales",
":",
"ulocale",
"=",
"locale_delim_re",
".",
"split",
"(",
"ulocale",
")",
"[",
"0",
"]",
"if",
"ulocale",
".",
"lower",
"(",
")",
"==",
"locale",
".",
"lower",
"(",
")",
":",
"return",
"ulocale",
"return",
"ulocales",
"[",
"0",
"]",
"[",
"1",
"]"
] |
Choose an user's locales by request.
|
[
"Choose",
"an",
"user",
"s",
"locales",
"by",
"request",
"."
] |
f48ebbbf7806c6c727f66d8d0df331b29f6ead08
|
https://github.com/klen/muffin-babel/blob/f48ebbbf7806c6c727f66d8d0df331b29f6ead08/muffin_babel.py#L225-L245
|
245,944
|
klen/muffin-babel
|
muffin_babel.py
|
Plugin.gettext
|
def gettext(self, string, domain=None, **variables):
"""Translate a string with the current locale."""
t = self.get_translations(domain)
return t.ugettext(string) % variables
|
python
|
def gettext(self, string, domain=None, **variables):
"""Translate a string with the current locale."""
t = self.get_translations(domain)
return t.ugettext(string) % variables
|
[
"def",
"gettext",
"(",
"self",
",",
"string",
",",
"domain",
"=",
"None",
",",
"*",
"*",
"variables",
")",
":",
"t",
"=",
"self",
".",
"get_translations",
"(",
"domain",
")",
"return",
"t",
".",
"ugettext",
"(",
"string",
")",
"%",
"variables"
] |
Translate a string with the current locale.
|
[
"Translate",
"a",
"string",
"with",
"the",
"current",
"locale",
"."
] |
f48ebbbf7806c6c727f66d8d0df331b29f6ead08
|
https://github.com/klen/muffin-babel/blob/f48ebbbf7806c6c727f66d8d0df331b29f6ead08/muffin_babel.py#L247-L250
|
245,945
|
klen/muffin-babel
|
muffin_babel.py
|
Plugin.ngettext
|
def ngettext(self, singular, plural, num, domain=None, **variables):
"""Translate a string wity the current locale.
The `num` parameter is used to dispatch between singular and various plural forms of the
message.
"""
variables.setdefault('num', num)
t = self.get_translations(domain)
return t.ungettext(singular, plural, num) % variables
|
python
|
def ngettext(self, singular, plural, num, domain=None, **variables):
"""Translate a string wity the current locale.
The `num` parameter is used to dispatch between singular and various plural forms of the
message.
"""
variables.setdefault('num', num)
t = self.get_translations(domain)
return t.ungettext(singular, plural, num) % variables
|
[
"def",
"ngettext",
"(",
"self",
",",
"singular",
",",
"plural",
",",
"num",
",",
"domain",
"=",
"None",
",",
"*",
"*",
"variables",
")",
":",
"variables",
".",
"setdefault",
"(",
"'num'",
",",
"num",
")",
"t",
"=",
"self",
".",
"get_translations",
"(",
"domain",
")",
"return",
"t",
".",
"ungettext",
"(",
"singular",
",",
"plural",
",",
"num",
")",
"%",
"variables"
] |
Translate a string wity the current locale.
The `num` parameter is used to dispatch between singular and various plural forms of the
message.
|
[
"Translate",
"a",
"string",
"wity",
"the",
"current",
"locale",
"."
] |
f48ebbbf7806c6c727f66d8d0df331b29f6ead08
|
https://github.com/klen/muffin-babel/blob/f48ebbbf7806c6c727f66d8d0df331b29f6ead08/muffin_babel.py#L252-L261
|
245,946
|
KnowledgeLinks/rdfframework
|
rdfframework/datamanager/defmanager.py
|
DefinitionManager.load_vocab
|
def load_vocab(self, vocab_name, **kwargs):
""" loads a vocabulary into the defintion triplestore
args:
vocab_name: the prefix, uri or filename of a vocabulary
"""
log.setLevel(kwargs.get("log_level", self.log_level))
vocab = self.get_vocab(vocab_name , **kwargs)
if vocab['filename'] in self.loaded:
if self.loaded_times.get(vocab['filename'],
datetime.datetime(2001,1,1)).timestamp() \
< vocab['modified']:
self.drop_file(vocab['filename'], **kwargs)
else:
return
conn = kwargs.get("conn", self.conn)
conn.load_data(graph=getattr(__NSM__.kdr, vocab['filename']).clean_uri,
data=vocab['data'],
datatype=vocab['filename'].split(".")[-1],
log_level=logging.WARNING)
self.__update_time__(vocab['filename'], **kwargs)
log.warning("\n\tvocab: '%s' loaded \n\tconn: '%s'",
vocab['filename'],
conn)
self.loaded.append(vocab['filename'])
|
python
|
def load_vocab(self, vocab_name, **kwargs):
""" loads a vocabulary into the defintion triplestore
args:
vocab_name: the prefix, uri or filename of a vocabulary
"""
log.setLevel(kwargs.get("log_level", self.log_level))
vocab = self.get_vocab(vocab_name , **kwargs)
if vocab['filename'] in self.loaded:
if self.loaded_times.get(vocab['filename'],
datetime.datetime(2001,1,1)).timestamp() \
< vocab['modified']:
self.drop_file(vocab['filename'], **kwargs)
else:
return
conn = kwargs.get("conn", self.conn)
conn.load_data(graph=getattr(__NSM__.kdr, vocab['filename']).clean_uri,
data=vocab['data'],
datatype=vocab['filename'].split(".")[-1],
log_level=logging.WARNING)
self.__update_time__(vocab['filename'], **kwargs)
log.warning("\n\tvocab: '%s' loaded \n\tconn: '%s'",
vocab['filename'],
conn)
self.loaded.append(vocab['filename'])
|
[
"def",
"load_vocab",
"(",
"self",
",",
"vocab_name",
",",
"*",
"*",
"kwargs",
")",
":",
"log",
".",
"setLevel",
"(",
"kwargs",
".",
"get",
"(",
"\"log_level\"",
",",
"self",
".",
"log_level",
")",
")",
"vocab",
"=",
"self",
".",
"get_vocab",
"(",
"vocab_name",
",",
"*",
"*",
"kwargs",
")",
"if",
"vocab",
"[",
"'filename'",
"]",
"in",
"self",
".",
"loaded",
":",
"if",
"self",
".",
"loaded_times",
".",
"get",
"(",
"vocab",
"[",
"'filename'",
"]",
",",
"datetime",
".",
"datetime",
"(",
"2001",
",",
"1",
",",
"1",
")",
")",
".",
"timestamp",
"(",
")",
"<",
"vocab",
"[",
"'modified'",
"]",
":",
"self",
".",
"drop_file",
"(",
"vocab",
"[",
"'filename'",
"]",
",",
"*",
"*",
"kwargs",
")",
"else",
":",
"return",
"conn",
"=",
"kwargs",
".",
"get",
"(",
"\"conn\"",
",",
"self",
".",
"conn",
")",
"conn",
".",
"load_data",
"(",
"graph",
"=",
"getattr",
"(",
"__NSM__",
".",
"kdr",
",",
"vocab",
"[",
"'filename'",
"]",
")",
".",
"clean_uri",
",",
"data",
"=",
"vocab",
"[",
"'data'",
"]",
",",
"datatype",
"=",
"vocab",
"[",
"'filename'",
"]",
".",
"split",
"(",
"\".\"",
")",
"[",
"-",
"1",
"]",
",",
"log_level",
"=",
"logging",
".",
"WARNING",
")",
"self",
".",
"__update_time__",
"(",
"vocab",
"[",
"'filename'",
"]",
",",
"*",
"*",
"kwargs",
")",
"log",
".",
"warning",
"(",
"\"\\n\\tvocab: '%s' loaded \\n\\tconn: '%s'\"",
",",
"vocab",
"[",
"'filename'",
"]",
",",
"conn",
")",
"self",
".",
"loaded",
".",
"append",
"(",
"vocab",
"[",
"'filename'",
"]",
")"
] |
loads a vocabulary into the defintion triplestore
args:
vocab_name: the prefix, uri or filename of a vocabulary
|
[
"loads",
"a",
"vocabulary",
"into",
"the",
"defintion",
"triplestore"
] |
9ec32dcc4bed51650a4b392cc5c15100fef7923a
|
https://github.com/KnowledgeLinks/rdfframework/blob/9ec32dcc4bed51650a4b392cc5c15100fef7923a/rdfframework/datamanager/defmanager.py#L194-L219
|
245,947
|
KnowledgeLinks/rdfframework
|
rdfframework/datamanager/defmanager.py
|
DefinitionManager.get_vocab
|
def get_vocab(self, vocab_name, **kwargs):
""" Returns data stream of an rdf vocabulary
args:
vocab_name: the name or uri of the vocab to return
"""
vocab_dict = self.__get_vocab_dict__(vocab_name, **kwargs)
filepaths = list(set([os.path.join(self.cache_dir,
vocab_dict['filename']),
os.path.join(self.vocab_dir,
vocab_dict['filename'])]))
for path in filepaths:
if os.path.exists(path):
with open(path, 'rb') as f_obj:
vocab_dict.update({"name": vocab_name,
"data": f_obj.read(),
"modified": os.path.getmtime(path)})
return vocab_dict
download_locs = make_list(vocab_dict.get('download',[]))
for loc in download_locs:
loc_web = urllib.request.urlopen(loc)
# loc_file_date = date_parse(loc_web.info()['Last-Modified'])
urllib.request.urlretrieve(loc, filepaths[0])
with open(filepaths[0], 'rb') as f_obj:
vocab_dict.update({"name": vocab_name,
"data": f_obj.read(),
"modified": os.path.getmtime(filepaths[0])})
return vocab_dict
|
python
|
def get_vocab(self, vocab_name, **kwargs):
""" Returns data stream of an rdf vocabulary
args:
vocab_name: the name or uri of the vocab to return
"""
vocab_dict = self.__get_vocab_dict__(vocab_name, **kwargs)
filepaths = list(set([os.path.join(self.cache_dir,
vocab_dict['filename']),
os.path.join(self.vocab_dir,
vocab_dict['filename'])]))
for path in filepaths:
if os.path.exists(path):
with open(path, 'rb') as f_obj:
vocab_dict.update({"name": vocab_name,
"data": f_obj.read(),
"modified": os.path.getmtime(path)})
return vocab_dict
download_locs = make_list(vocab_dict.get('download',[]))
for loc in download_locs:
loc_web = urllib.request.urlopen(loc)
# loc_file_date = date_parse(loc_web.info()['Last-Modified'])
urllib.request.urlretrieve(loc, filepaths[0])
with open(filepaths[0], 'rb') as f_obj:
vocab_dict.update({"name": vocab_name,
"data": f_obj.read(),
"modified": os.path.getmtime(filepaths[0])})
return vocab_dict
|
[
"def",
"get_vocab",
"(",
"self",
",",
"vocab_name",
",",
"*",
"*",
"kwargs",
")",
":",
"vocab_dict",
"=",
"self",
".",
"__get_vocab_dict__",
"(",
"vocab_name",
",",
"*",
"*",
"kwargs",
")",
"filepaths",
"=",
"list",
"(",
"set",
"(",
"[",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"cache_dir",
",",
"vocab_dict",
"[",
"'filename'",
"]",
")",
",",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"vocab_dir",
",",
"vocab_dict",
"[",
"'filename'",
"]",
")",
"]",
")",
")",
"for",
"path",
"in",
"filepaths",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"path",
")",
":",
"with",
"open",
"(",
"path",
",",
"'rb'",
")",
"as",
"f_obj",
":",
"vocab_dict",
".",
"update",
"(",
"{",
"\"name\"",
":",
"vocab_name",
",",
"\"data\"",
":",
"f_obj",
".",
"read",
"(",
")",
",",
"\"modified\"",
":",
"os",
".",
"path",
".",
"getmtime",
"(",
"path",
")",
"}",
")",
"return",
"vocab_dict",
"download_locs",
"=",
"make_list",
"(",
"vocab_dict",
".",
"get",
"(",
"'download'",
",",
"[",
"]",
")",
")",
"for",
"loc",
"in",
"download_locs",
":",
"loc_web",
"=",
"urllib",
".",
"request",
".",
"urlopen",
"(",
"loc",
")",
"# loc_file_date = date_parse(loc_web.info()['Last-Modified'])",
"urllib",
".",
"request",
".",
"urlretrieve",
"(",
"loc",
",",
"filepaths",
"[",
"0",
"]",
")",
"with",
"open",
"(",
"filepaths",
"[",
"0",
"]",
",",
"'rb'",
")",
"as",
"f_obj",
":",
"vocab_dict",
".",
"update",
"(",
"{",
"\"name\"",
":",
"vocab_name",
",",
"\"data\"",
":",
"f_obj",
".",
"read",
"(",
")",
",",
"\"modified\"",
":",
"os",
".",
"path",
".",
"getmtime",
"(",
"filepaths",
"[",
"0",
"]",
")",
"}",
")",
"return",
"vocab_dict"
] |
Returns data stream of an rdf vocabulary
args:
vocab_name: the name or uri of the vocab to return
|
[
"Returns",
"data",
"stream",
"of",
"an",
"rdf",
"vocabulary"
] |
9ec32dcc4bed51650a4b392cc5c15100fef7923a
|
https://github.com/KnowledgeLinks/rdfframework/blob/9ec32dcc4bed51650a4b392cc5c15100fef7923a/rdfframework/datamanager/defmanager.py#L236-L265
|
245,948
|
KnowledgeLinks/rdfframework
|
rdfframework/datamanager/defmanager.py
|
DefinitionManager.drop_vocab
|
def drop_vocab(self, vocab_name, **kwargs):
""" Removes the vocab from the definiton triplestore
args:
vocab_name: the name or uri of the vocab to return
"""
vocab_dict = self.__get_vocab_dict__(vocab_name, **kwargs)
return self.drop_file(vocab_dict['filename'], **kwargs)
|
python
|
def drop_vocab(self, vocab_name, **kwargs):
""" Removes the vocab from the definiton triplestore
args:
vocab_name: the name or uri of the vocab to return
"""
vocab_dict = self.__get_vocab_dict__(vocab_name, **kwargs)
return self.drop_file(vocab_dict['filename'], **kwargs)
|
[
"def",
"drop_vocab",
"(",
"self",
",",
"vocab_name",
",",
"*",
"*",
"kwargs",
")",
":",
"vocab_dict",
"=",
"self",
".",
"__get_vocab_dict__",
"(",
"vocab_name",
",",
"*",
"*",
"kwargs",
")",
"return",
"self",
".",
"drop_file",
"(",
"vocab_dict",
"[",
"'filename'",
"]",
",",
"*",
"*",
"kwargs",
")"
] |
Removes the vocab from the definiton triplestore
args:
vocab_name: the name or uri of the vocab to return
|
[
"Removes",
"the",
"vocab",
"from",
"the",
"definiton",
"triplestore"
] |
9ec32dcc4bed51650a4b392cc5c15100fef7923a
|
https://github.com/KnowledgeLinks/rdfframework/blob/9ec32dcc4bed51650a4b392cc5c15100fef7923a/rdfframework/datamanager/defmanager.py#L267-L275
|
245,949
|
djangothon/ab
|
ab/decorators.py
|
quick
|
def quick(config, mysterious=None, only_authenticated=None):
"""
Decides whether this user is allowed to access this view or not.
:param config - Decides if the setting is on globally.
:callable_name - The function which will return the list of users which are
eligible for proceeding further after this action.
"""
def decorator(func):
@wraps(func)
def _quick(request, *args, **kwargs):
# Check if the request is ajax.
is_ajax = request.is_ajax()
# Check if the config is available globally and return '' or raise
# 404 as per the nature of the request.
if not config:
return _return_blank_or_raise_404(is_ajax)
callable_name = None
_only_authenticated = None
if mysterious is not None:
if type(mysterious) == bool:
_only_authenticated = mysterious
else:
callable_name = mysterious
elif only_authenticated is not None:
_only_authenticated = only_authenticated
user = request.user
if callable_name is None:
if (_only_authenticated is not None and
_only_authenticated and
not user.is_authenticated()):
return _return_blank_or_raise_404(is_ajax)
else:
return func(request, *args, **kwargs)
else:
if not user.is_authenticated():
return _return_blank_or_raise_404(is_ajax)
else:
_callable = function_from_string(callable_name)
if user.id in _callable():
return func(request, *args, **kwargs)
return _return_blank_or_raise_404(is_ajax)
return _return_blank_or_raise_404(is_ajax)
return _quick
return decorator
|
python
|
def quick(config, mysterious=None, only_authenticated=None):
"""
Decides whether this user is allowed to access this view or not.
:param config - Decides if the setting is on globally.
:callable_name - The function which will return the list of users which are
eligible for proceeding further after this action.
"""
def decorator(func):
@wraps(func)
def _quick(request, *args, **kwargs):
# Check if the request is ajax.
is_ajax = request.is_ajax()
# Check if the config is available globally and return '' or raise
# 404 as per the nature of the request.
if not config:
return _return_blank_or_raise_404(is_ajax)
callable_name = None
_only_authenticated = None
if mysterious is not None:
if type(mysterious) == bool:
_only_authenticated = mysterious
else:
callable_name = mysterious
elif only_authenticated is not None:
_only_authenticated = only_authenticated
user = request.user
if callable_name is None:
if (_only_authenticated is not None and
_only_authenticated and
not user.is_authenticated()):
return _return_blank_or_raise_404(is_ajax)
else:
return func(request, *args, **kwargs)
else:
if not user.is_authenticated():
return _return_blank_or_raise_404(is_ajax)
else:
_callable = function_from_string(callable_name)
if user.id in _callable():
return func(request, *args, **kwargs)
return _return_blank_or_raise_404(is_ajax)
return _return_blank_or_raise_404(is_ajax)
return _quick
return decorator
|
[
"def",
"quick",
"(",
"config",
",",
"mysterious",
"=",
"None",
",",
"only_authenticated",
"=",
"None",
")",
":",
"def",
"decorator",
"(",
"func",
")",
":",
"@",
"wraps",
"(",
"func",
")",
"def",
"_quick",
"(",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# Check if the request is ajax.",
"is_ajax",
"=",
"request",
".",
"is_ajax",
"(",
")",
"# Check if the config is available globally and return '' or raise",
"# 404 as per the nature of the request.",
"if",
"not",
"config",
":",
"return",
"_return_blank_or_raise_404",
"(",
"is_ajax",
")",
"callable_name",
"=",
"None",
"_only_authenticated",
"=",
"None",
"if",
"mysterious",
"is",
"not",
"None",
":",
"if",
"type",
"(",
"mysterious",
")",
"==",
"bool",
":",
"_only_authenticated",
"=",
"mysterious",
"else",
":",
"callable_name",
"=",
"mysterious",
"elif",
"only_authenticated",
"is",
"not",
"None",
":",
"_only_authenticated",
"=",
"only_authenticated",
"user",
"=",
"request",
".",
"user",
"if",
"callable_name",
"is",
"None",
":",
"if",
"(",
"_only_authenticated",
"is",
"not",
"None",
"and",
"_only_authenticated",
"and",
"not",
"user",
".",
"is_authenticated",
"(",
")",
")",
":",
"return",
"_return_blank_or_raise_404",
"(",
"is_ajax",
")",
"else",
":",
"return",
"func",
"(",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"else",
":",
"if",
"not",
"user",
".",
"is_authenticated",
"(",
")",
":",
"return",
"_return_blank_or_raise_404",
"(",
"is_ajax",
")",
"else",
":",
"_callable",
"=",
"function_from_string",
"(",
"callable_name",
")",
"if",
"user",
".",
"id",
"in",
"_callable",
"(",
")",
":",
"return",
"func",
"(",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"_return_blank_or_raise_404",
"(",
"is_ajax",
")",
"return",
"_return_blank_or_raise_404",
"(",
"is_ajax",
")",
"return",
"_quick",
"return",
"decorator"
] |
Decides whether this user is allowed to access this view or not.
:param config - Decides if the setting is on globally.
:callable_name - The function which will return the list of users which are
eligible for proceeding further after this action.
|
[
"Decides",
"whether",
"this",
"user",
"is",
"allowed",
"to",
"access",
"this",
"view",
"or",
"not",
"."
] |
2075d27805ad94b11535a11439027ce8d0c8ab83
|
https://github.com/djangothon/ab/blob/2075d27805ad94b11535a11439027ce8d0c8ab83/ab/decorators.py#L18-L67
|
245,950
|
cogniteev/docido-python-sdk
|
docido_sdk/toolbox/ha.py
|
HA.catch
|
def catch(cls, catch_exception, config='default'):
"""Decorator class method catching exceptions raised by the wrapped
member function. When exception is caught, the decorator waits
for an amount of time specified in the `ha_config`.
:param catch_exception: Exception class or tuple of exception classes.
"""
def wrap(method):
@functools.wraps(method)
def wrapped_method(self, *args, **kwargs):
assert isinstance(self, HA)
delay_policy = self.ha_get_delay_policy(config)
max_retries = self.ha_get_config(config).max_retries
for retries in itertools.count():
try:
return method(self, *args, **kwargs)
except catch_exception as e:
res = self.ha_on_error(method, e, args, kwargs)
if res is not None:
args, kwargs = res
if max_retries and retries >= max_retries:
raise
tts = next(delay_policy)
time.sleep(tts)
return wrapped_method
return wrap
|
python
|
def catch(cls, catch_exception, config='default'):
"""Decorator class method catching exceptions raised by the wrapped
member function. When exception is caught, the decorator waits
for an amount of time specified in the `ha_config`.
:param catch_exception: Exception class or tuple of exception classes.
"""
def wrap(method):
@functools.wraps(method)
def wrapped_method(self, *args, **kwargs):
assert isinstance(self, HA)
delay_policy = self.ha_get_delay_policy(config)
max_retries = self.ha_get_config(config).max_retries
for retries in itertools.count():
try:
return method(self, *args, **kwargs)
except catch_exception as e:
res = self.ha_on_error(method, e, args, kwargs)
if res is not None:
args, kwargs = res
if max_retries and retries >= max_retries:
raise
tts = next(delay_policy)
time.sleep(tts)
return wrapped_method
return wrap
|
[
"def",
"catch",
"(",
"cls",
",",
"catch_exception",
",",
"config",
"=",
"'default'",
")",
":",
"def",
"wrap",
"(",
"method",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"method",
")",
"def",
"wrapped_method",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"assert",
"isinstance",
"(",
"self",
",",
"HA",
")",
"delay_policy",
"=",
"self",
".",
"ha_get_delay_policy",
"(",
"config",
")",
"max_retries",
"=",
"self",
".",
"ha_get_config",
"(",
"config",
")",
".",
"max_retries",
"for",
"retries",
"in",
"itertools",
".",
"count",
"(",
")",
":",
"try",
":",
"return",
"method",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"except",
"catch_exception",
"as",
"e",
":",
"res",
"=",
"self",
".",
"ha_on_error",
"(",
"method",
",",
"e",
",",
"args",
",",
"kwargs",
")",
"if",
"res",
"is",
"not",
"None",
":",
"args",
",",
"kwargs",
"=",
"res",
"if",
"max_retries",
"and",
"retries",
">=",
"max_retries",
":",
"raise",
"tts",
"=",
"next",
"(",
"delay_policy",
")",
"time",
".",
"sleep",
"(",
"tts",
")",
"return",
"wrapped_method",
"return",
"wrap"
] |
Decorator class method catching exceptions raised by the wrapped
member function. When exception is caught, the decorator waits
for an amount of time specified in the `ha_config`.
:param catch_exception: Exception class or tuple of exception classes.
|
[
"Decorator",
"class",
"method",
"catching",
"exceptions",
"raised",
"by",
"the",
"wrapped",
"member",
"function",
".",
"When",
"exception",
"is",
"caught",
"the",
"decorator",
"waits",
"for",
"an",
"amount",
"of",
"time",
"specified",
"in",
"the",
"ha_config",
"."
] |
58ecb6c6f5757fd40c0601657ab18368da7ddf33
|
https://github.com/cogniteev/docido-python-sdk/blob/58ecb6c6f5757fd40c0601657ab18368da7ddf33/docido_sdk/toolbox/ha.py#L74-L99
|
245,951
|
cogniteev/docido-python-sdk
|
docido_sdk/toolbox/ha.py
|
HA.ha_get_delay_policy
|
def ha_get_delay_policy(self, config_name):
"""Build generator of delays to wait between each call
:param string config_name: configuration name
"""
config = self.ha_get_config(config_name)
delay_policy_conf = config.delay_config
delay_policy_conf = copy.deepcopy(delay_policy_conf)
delay_policy_conf.update(delay=config.delay)
return self._retry_delays_class.get(
config.delay_policy,
**delay_policy_conf
)
|
python
|
def ha_get_delay_policy(self, config_name):
"""Build generator of delays to wait between each call
:param string config_name: configuration name
"""
config = self.ha_get_config(config_name)
delay_policy_conf = config.delay_config
delay_policy_conf = copy.deepcopy(delay_policy_conf)
delay_policy_conf.update(delay=config.delay)
return self._retry_delays_class.get(
config.delay_policy,
**delay_policy_conf
)
|
[
"def",
"ha_get_delay_policy",
"(",
"self",
",",
"config_name",
")",
":",
"config",
"=",
"self",
".",
"ha_get_config",
"(",
"config_name",
")",
"delay_policy_conf",
"=",
"config",
".",
"delay_config",
"delay_policy_conf",
"=",
"copy",
".",
"deepcopy",
"(",
"delay_policy_conf",
")",
"delay_policy_conf",
".",
"update",
"(",
"delay",
"=",
"config",
".",
"delay",
")",
"return",
"self",
".",
"_retry_delays_class",
".",
"get",
"(",
"config",
".",
"delay_policy",
",",
"*",
"*",
"delay_policy_conf",
")"
] |
Build generator of delays to wait between each call
:param string config_name: configuration name
|
[
"Build",
"generator",
"of",
"delays",
"to",
"wait",
"between",
"each",
"call"
] |
58ecb6c6f5757fd40c0601657ab18368da7ddf33
|
https://github.com/cogniteev/docido-python-sdk/blob/58ecb6c6f5757fd40c0601657ab18368da7ddf33/docido_sdk/toolbox/ha.py#L104-L116
|
245,952
|
wooyek/django-powerbank
|
src/django_powerbank/db/models/fields/__init__.py
|
JSONField.validate
|
def validate(self, value, model_instance):
"""Check value is a valid JSON string, raise ValidationError on
error."""
if isinstance(value, six.string_types):
super(JSONField, self).validate(value, model_instance)
try:
json.loads(value)
except Exception as err:
raise ValidationError(str(err))
|
python
|
def validate(self, value, model_instance):
"""Check value is a valid JSON string, raise ValidationError on
error."""
if isinstance(value, six.string_types):
super(JSONField, self).validate(value, model_instance)
try:
json.loads(value)
except Exception as err:
raise ValidationError(str(err))
|
[
"def",
"validate",
"(",
"self",
",",
"value",
",",
"model_instance",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"six",
".",
"string_types",
")",
":",
"super",
"(",
"JSONField",
",",
"self",
")",
".",
"validate",
"(",
"value",
",",
"model_instance",
")",
"try",
":",
"json",
".",
"loads",
"(",
"value",
")",
"except",
"Exception",
"as",
"err",
":",
"raise",
"ValidationError",
"(",
"str",
"(",
"err",
")",
")"
] |
Check value is a valid JSON string, raise ValidationError on
error.
|
[
"Check",
"value",
"is",
"a",
"valid",
"JSON",
"string",
"raise",
"ValidationError",
"on",
"error",
"."
] |
df91189f2ac18bacc545ccf3c81c4465fb993949
|
https://github.com/wooyek/django-powerbank/blob/df91189f2ac18bacc545ccf3c81c4465fb993949/src/django_powerbank/db/models/fields/__init__.py#L179-L187
|
245,953
|
wooyek/django-powerbank
|
src/django_powerbank/db/models/fields/__init__.py
|
JSONField.value_to_string
|
def value_to_string(self, obj):
"""Return value from object converted to string properly"""
value = getattr(obj, self.attname)
return self.get_prep_value(value)
|
python
|
def value_to_string(self, obj):
"""Return value from object converted to string properly"""
value = getattr(obj, self.attname)
return self.get_prep_value(value)
|
[
"def",
"value_to_string",
"(",
"self",
",",
"obj",
")",
":",
"value",
"=",
"getattr",
"(",
"obj",
",",
"self",
".",
"attname",
")",
"return",
"self",
".",
"get_prep_value",
"(",
"value",
")"
] |
Return value from object converted to string properly
|
[
"Return",
"value",
"from",
"object",
"converted",
"to",
"string",
"properly"
] |
df91189f2ac18bacc545ccf3c81c4465fb993949
|
https://github.com/wooyek/django-powerbank/blob/df91189f2ac18bacc545ccf3c81c4465fb993949/src/django_powerbank/db/models/fields/__init__.py#L196-L199
|
245,954
|
dcolish/Refugee
|
refugee/cli.py
|
RefugeeCmd.do_down
|
def do_down(self, arg):
"""Run down migration with name or numeric id matching arg"""
print "running down migration"
self.manager.run_one(arg, Direction.DOWN)
|
python
|
def do_down(self, arg):
"""Run down migration with name or numeric id matching arg"""
print "running down migration"
self.manager.run_one(arg, Direction.DOWN)
|
[
"def",
"do_down",
"(",
"self",
",",
"arg",
")",
":",
"print",
"\"running down migration\"",
"self",
".",
"manager",
".",
"run_one",
"(",
"arg",
",",
"Direction",
".",
"DOWN",
")"
] |
Run down migration with name or numeric id matching arg
|
[
"Run",
"down",
"migration",
"with",
"name",
"or",
"numeric",
"id",
"matching",
"arg"
] |
b98391cb3127d09b15b59c7c25dab07a968062fa
|
https://github.com/dcolish/Refugee/blob/b98391cb3127d09b15b59c7c25dab07a968062fa/refugee/cli.py#L36-L39
|
245,955
|
dcolish/Refugee
|
refugee/cli.py
|
RefugeeCmd.do_up
|
def do_up(self, arg):
"""Run up migration with name or numeric id matching arg"""
print "running up migration"
self.manager.run(arg, Direction.UP)
|
python
|
def do_up(self, arg):
"""Run up migration with name or numeric id matching arg"""
print "running up migration"
self.manager.run(arg, Direction.UP)
|
[
"def",
"do_up",
"(",
"self",
",",
"arg",
")",
":",
"print",
"\"running up migration\"",
"self",
".",
"manager",
".",
"run",
"(",
"arg",
",",
"Direction",
".",
"UP",
")"
] |
Run up migration with name or numeric id matching arg
|
[
"Run",
"up",
"migration",
"with",
"name",
"or",
"numeric",
"id",
"matching",
"arg"
] |
b98391cb3127d09b15b59c7c25dab07a968062fa
|
https://github.com/dcolish/Refugee/blob/b98391cb3127d09b15b59c7c25dab07a968062fa/refugee/cli.py#L78-L81
|
245,956
|
dstufft/crust
|
crust/utils.py
|
subclass_exception
|
def subclass_exception(name, parents, module, attached_to=None):
"""
Create exception subclass.
If 'attached_to' is supplied, the exception will be created in a way that
allows it to be pickled, assuming the returned exception class will be added
as an attribute to the 'attached_to' class.
"""
class_dict = {'__module__': module}
if attached_to is not None:
def __reduce__(self):
# Exceptions are special - they've got state that isn't
# in self.__dict__. We assume it is all in self.args.
return (unpickle_inner_exception, (attached_to, name), self.args)
def __setstate__(self, args):
self.args = args
class_dict['__reduce__'] = __reduce__
class_dict['__setstate__'] = __setstate__
return type(name, parents, class_dict)
|
python
|
def subclass_exception(name, parents, module, attached_to=None):
"""
Create exception subclass.
If 'attached_to' is supplied, the exception will be created in a way that
allows it to be pickled, assuming the returned exception class will be added
as an attribute to the 'attached_to' class.
"""
class_dict = {'__module__': module}
if attached_to is not None:
def __reduce__(self):
# Exceptions are special - they've got state that isn't
# in self.__dict__. We assume it is all in self.args.
return (unpickle_inner_exception, (attached_to, name), self.args)
def __setstate__(self, args):
self.args = args
class_dict['__reduce__'] = __reduce__
class_dict['__setstate__'] = __setstate__
return type(name, parents, class_dict)
|
[
"def",
"subclass_exception",
"(",
"name",
",",
"parents",
",",
"module",
",",
"attached_to",
"=",
"None",
")",
":",
"class_dict",
"=",
"{",
"'__module__'",
":",
"module",
"}",
"if",
"attached_to",
"is",
"not",
"None",
":",
"def",
"__reduce__",
"(",
"self",
")",
":",
"# Exceptions are special - they've got state that isn't",
"# in self.__dict__. We assume it is all in self.args.",
"return",
"(",
"unpickle_inner_exception",
",",
"(",
"attached_to",
",",
"name",
")",
",",
"self",
".",
"args",
")",
"def",
"__setstate__",
"(",
"self",
",",
"args",
")",
":",
"self",
".",
"args",
"=",
"args",
"class_dict",
"[",
"'__reduce__'",
"]",
"=",
"__reduce__",
"class_dict",
"[",
"'__setstate__'",
"]",
"=",
"__setstate__",
"return",
"type",
"(",
"name",
",",
"parents",
",",
"class_dict",
")"
] |
Create exception subclass.
If 'attached_to' is supplied, the exception will be created in a way that
allows it to be pickled, assuming the returned exception class will be added
as an attribute to the 'attached_to' class.
|
[
"Create",
"exception",
"subclass",
"."
] |
5d4011ecace12fd3f68a03a17dbefb78390a9fc0
|
https://github.com/dstufft/crust/blob/5d4011ecace12fd3f68a03a17dbefb78390a9fc0/crust/utils.py#L7-L28
|
245,957
|
delfick/gitmit
|
gitmit/repo.py
|
Repo.all_files
|
def all_files(self):
"""Return a set of all the files under git control"""
return set([entry.decode() for entry, _ in self.git.open_index().items()])
|
python
|
def all_files(self):
"""Return a set of all the files under git control"""
return set([entry.decode() for entry, _ in self.git.open_index().items()])
|
[
"def",
"all_files",
"(",
"self",
")",
":",
"return",
"set",
"(",
"[",
"entry",
".",
"decode",
"(",
")",
"for",
"entry",
",",
"_",
"in",
"self",
".",
"git",
".",
"open_index",
"(",
")",
".",
"items",
"(",
")",
"]",
")"
] |
Return a set of all the files under git control
|
[
"Return",
"a",
"set",
"of",
"all",
"the",
"files",
"under",
"git",
"control"
] |
ae0aef14a06b25ad2811f8f47cc97e68a0910eae
|
https://github.com/delfick/gitmit/blob/ae0aef14a06b25ad2811f8f47cc97e68a0910eae/gitmit/repo.py#L45-L47
|
245,958
|
delfick/gitmit
|
gitmit/repo.py
|
Repo.file_commit_times
|
def file_commit_times(self, use_files_paths, debug=False):
"""
Traverse the commits in the repository, starting from HEAD until we have
found the commit times for all the files we care about.
Yield each file once, only when it is found to be changed in some commit.
If self.debug is true, also output log.debug for the speed we are going
through commits (output commits/second every 1000 commits and every
100000 commits)
"""
prefixes = PrefixTree()
prefixes.fill(use_files_paths)
for entry in self.git.get_walker():
# Commit time taking into account the timezone
commit_time = entry.commit.commit_time - entry.commit.commit_timezone
# Get us the two different tree structures between parents and current
cf_and_pf, changes = self.tree_structures_for(()
, entry.commit.tree
, [self.git.get_object(oid).tree for oid in entry.commit.parents]
, prefixes
)
# Deep dive into any differences
difference = []
if changes:
cfs_and_pfs = [(cf_and_pf, changes)]
while cfs_and_pfs:
nxt, changes = cfs_and_pfs.pop(0)
for thing, changes, is_path in self.differences_between(nxt[0], nxt[1], changes, prefixes):
if is_path:
found = prefixes.remove(thing[:-1], thing[-1])
if found:
difference.append('/'.join(thing))
else:
cfs_and_pfs.append((thing, changes))
# Only yield if there was a difference
if difference:
yield entry.commit.sha().hexdigest(), commit_time, difference
# If nothing remains, then break!
if not prefixes:
break
|
python
|
def file_commit_times(self, use_files_paths, debug=False):
"""
Traverse the commits in the repository, starting from HEAD until we have
found the commit times for all the files we care about.
Yield each file once, only when it is found to be changed in some commit.
If self.debug is true, also output log.debug for the speed we are going
through commits (output commits/second every 1000 commits and every
100000 commits)
"""
prefixes = PrefixTree()
prefixes.fill(use_files_paths)
for entry in self.git.get_walker():
# Commit time taking into account the timezone
commit_time = entry.commit.commit_time - entry.commit.commit_timezone
# Get us the two different tree structures between parents and current
cf_and_pf, changes = self.tree_structures_for(()
, entry.commit.tree
, [self.git.get_object(oid).tree for oid in entry.commit.parents]
, prefixes
)
# Deep dive into any differences
difference = []
if changes:
cfs_and_pfs = [(cf_and_pf, changes)]
while cfs_and_pfs:
nxt, changes = cfs_and_pfs.pop(0)
for thing, changes, is_path in self.differences_between(nxt[0], nxt[1], changes, prefixes):
if is_path:
found = prefixes.remove(thing[:-1], thing[-1])
if found:
difference.append('/'.join(thing))
else:
cfs_and_pfs.append((thing, changes))
# Only yield if there was a difference
if difference:
yield entry.commit.sha().hexdigest(), commit_time, difference
# If nothing remains, then break!
if not prefixes:
break
|
[
"def",
"file_commit_times",
"(",
"self",
",",
"use_files_paths",
",",
"debug",
"=",
"False",
")",
":",
"prefixes",
"=",
"PrefixTree",
"(",
")",
"prefixes",
".",
"fill",
"(",
"use_files_paths",
")",
"for",
"entry",
"in",
"self",
".",
"git",
".",
"get_walker",
"(",
")",
":",
"# Commit time taking into account the timezone",
"commit_time",
"=",
"entry",
".",
"commit",
".",
"commit_time",
"-",
"entry",
".",
"commit",
".",
"commit_timezone",
"# Get us the two different tree structures between parents and current",
"cf_and_pf",
",",
"changes",
"=",
"self",
".",
"tree_structures_for",
"(",
"(",
")",
",",
"entry",
".",
"commit",
".",
"tree",
",",
"[",
"self",
".",
"git",
".",
"get_object",
"(",
"oid",
")",
".",
"tree",
"for",
"oid",
"in",
"entry",
".",
"commit",
".",
"parents",
"]",
",",
"prefixes",
")",
"# Deep dive into any differences",
"difference",
"=",
"[",
"]",
"if",
"changes",
":",
"cfs_and_pfs",
"=",
"[",
"(",
"cf_and_pf",
",",
"changes",
")",
"]",
"while",
"cfs_and_pfs",
":",
"nxt",
",",
"changes",
"=",
"cfs_and_pfs",
".",
"pop",
"(",
"0",
")",
"for",
"thing",
",",
"changes",
",",
"is_path",
"in",
"self",
".",
"differences_between",
"(",
"nxt",
"[",
"0",
"]",
",",
"nxt",
"[",
"1",
"]",
",",
"changes",
",",
"prefixes",
")",
":",
"if",
"is_path",
":",
"found",
"=",
"prefixes",
".",
"remove",
"(",
"thing",
"[",
":",
"-",
"1",
"]",
",",
"thing",
"[",
"-",
"1",
"]",
")",
"if",
"found",
":",
"difference",
".",
"append",
"(",
"'/'",
".",
"join",
"(",
"thing",
")",
")",
"else",
":",
"cfs_and_pfs",
".",
"append",
"(",
"(",
"thing",
",",
"changes",
")",
")",
"# Only yield if there was a difference",
"if",
"difference",
":",
"yield",
"entry",
".",
"commit",
".",
"sha",
"(",
")",
".",
"hexdigest",
"(",
")",
",",
"commit_time",
",",
"difference",
"# If nothing remains, then break!",
"if",
"not",
"prefixes",
":",
"break"
] |
Traverse the commits in the repository, starting from HEAD until we have
found the commit times for all the files we care about.
Yield each file once, only when it is found to be changed in some commit.
If self.debug is true, also output log.debug for the speed we are going
through commits (output commits/second every 1000 commits and every
100000 commits)
|
[
"Traverse",
"the",
"commits",
"in",
"the",
"repository",
"starting",
"from",
"HEAD",
"until",
"we",
"have",
"found",
"the",
"commit",
"times",
"for",
"all",
"the",
"files",
"we",
"care",
"about",
"."
] |
ae0aef14a06b25ad2811f8f47cc97e68a0910eae
|
https://github.com/delfick/gitmit/blob/ae0aef14a06b25ad2811f8f47cc97e68a0910eae/gitmit/repo.py#L54-L99
|
245,959
|
delfick/gitmit
|
gitmit/repo.py
|
Repo.entries_in_tree_oid
|
def entries_in_tree_oid(self, prefix, tree_oid):
"""Find the tree at this oid and return entries prefixed with ``prefix``"""
try:
tree = self.git.get_object(tree_oid)
except KeyError:
log.warning("Couldn't find object {0}".format(tree_oid))
return empty
else:
return frozenset(self.entries_in_tree(prefix, tree))
|
python
|
def entries_in_tree_oid(self, prefix, tree_oid):
"""Find the tree at this oid and return entries prefixed with ``prefix``"""
try:
tree = self.git.get_object(tree_oid)
except KeyError:
log.warning("Couldn't find object {0}".format(tree_oid))
return empty
else:
return frozenset(self.entries_in_tree(prefix, tree))
|
[
"def",
"entries_in_tree_oid",
"(",
"self",
",",
"prefix",
",",
"tree_oid",
")",
":",
"try",
":",
"tree",
"=",
"self",
".",
"git",
".",
"get_object",
"(",
"tree_oid",
")",
"except",
"KeyError",
":",
"log",
".",
"warning",
"(",
"\"Couldn't find object {0}\"",
".",
"format",
"(",
"tree_oid",
")",
")",
"return",
"empty",
"else",
":",
"return",
"frozenset",
"(",
"self",
".",
"entries_in_tree",
"(",
"prefix",
",",
"tree",
")",
")"
] |
Find the tree at this oid and return entries prefixed with ``prefix``
|
[
"Find",
"the",
"tree",
"at",
"this",
"oid",
"and",
"return",
"entries",
"prefixed",
"with",
"prefix"
] |
ae0aef14a06b25ad2811f8f47cc97e68a0910eae
|
https://github.com/delfick/gitmit/blob/ae0aef14a06b25ad2811f8f47cc97e68a0910eae/gitmit/repo.py#L101-L109
|
245,960
|
pjuren/pyokit
|
src/pyokit/scripts/genomicIntJaccard.py
|
main
|
def main(args):
"""
main entry point for the GenomicIntJaccard script.
:param args: the arguments for this script, as a list of string. Should
already have had things like the script name stripped. That
is, if there are no args provided, this should be an empty
list.
"""
# get options and arguments
ui = getUI(args)
if ui.optionIsSet("test"):
# just run unit tests
unittest.main(argv=[sys.argv[0]])
elif ui.optionIsSet("help"):
# just show help
ui.usage()
else:
verbose = ui.optionIsSet("verbose")
stranded = ui.optionIsSet("stranded")
if stranded:
sys.stderr.write("Sorry, stranded mode hasn't been implemented yet.")
sys.exit()
# we required two input files, so we know these will be present...
regions_1 = [e for e in BEDIterator(ui.getArgument(0), verbose=verbose)]
regions_2 = [e for e in BEDIterator(ui.getArgument(1), verbose=verbose)]
print jaccardIndex(regions_1, regions_2)
|
python
|
def main(args):
"""
main entry point for the GenomicIntJaccard script.
:param args: the arguments for this script, as a list of string. Should
already have had things like the script name stripped. That
is, if there are no args provided, this should be an empty
list.
"""
# get options and arguments
ui = getUI(args)
if ui.optionIsSet("test"):
# just run unit tests
unittest.main(argv=[sys.argv[0]])
elif ui.optionIsSet("help"):
# just show help
ui.usage()
else:
verbose = ui.optionIsSet("verbose")
stranded = ui.optionIsSet("stranded")
if stranded:
sys.stderr.write("Sorry, stranded mode hasn't been implemented yet.")
sys.exit()
# we required two input files, so we know these will be present...
regions_1 = [e for e in BEDIterator(ui.getArgument(0), verbose=verbose)]
regions_2 = [e for e in BEDIterator(ui.getArgument(1), verbose=verbose)]
print jaccardIndex(regions_1, regions_2)
|
[
"def",
"main",
"(",
"args",
")",
":",
"# get options and arguments",
"ui",
"=",
"getUI",
"(",
"args",
")",
"if",
"ui",
".",
"optionIsSet",
"(",
"\"test\"",
")",
":",
"# just run unit tests",
"unittest",
".",
"main",
"(",
"argv",
"=",
"[",
"sys",
".",
"argv",
"[",
"0",
"]",
"]",
")",
"elif",
"ui",
".",
"optionIsSet",
"(",
"\"help\"",
")",
":",
"# just show help",
"ui",
".",
"usage",
"(",
")",
"else",
":",
"verbose",
"=",
"ui",
".",
"optionIsSet",
"(",
"\"verbose\"",
")",
"stranded",
"=",
"ui",
".",
"optionIsSet",
"(",
"\"stranded\"",
")",
"if",
"stranded",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"\"Sorry, stranded mode hasn't been implemented yet.\"",
")",
"sys",
".",
"exit",
"(",
")",
"# we required two input files, so we know these will be present...",
"regions_1",
"=",
"[",
"e",
"for",
"e",
"in",
"BEDIterator",
"(",
"ui",
".",
"getArgument",
"(",
"0",
")",
",",
"verbose",
"=",
"verbose",
")",
"]",
"regions_2",
"=",
"[",
"e",
"for",
"e",
"in",
"BEDIterator",
"(",
"ui",
".",
"getArgument",
"(",
"1",
")",
",",
"verbose",
"=",
"verbose",
")",
"]",
"print",
"jaccardIndex",
"(",
"regions_1",
",",
"regions_2",
")"
] |
main entry point for the GenomicIntJaccard script.
:param args: the arguments for this script, as a list of string. Should
already have had things like the script name stripped. That
is, if there are no args provided, this should be an empty
list.
|
[
"main",
"entry",
"point",
"for",
"the",
"GenomicIntJaccard",
"script",
"."
] |
fddae123b5d817daa39496183f19c000d9c3791f
|
https://github.com/pjuren/pyokit/blob/fddae123b5d817daa39496183f19c000d9c3791f/src/pyokit/scripts/genomicIntJaccard.py#L80-L110
|
245,961
|
unbservices/clams
|
clams/__init__.py
|
_parse_doc
|
def _parse_doc(doc=''):
"""Parse a docstring into title and description.
Args
----
doc : str
A docstring, optionally with a title line, separated from a description
line by at least one blank line.
Returns
-------
title : str
The first line of the docstring.
description : str
The rest of a docstring.
"""
title, description = '', ''
if doc:
sp = doc.split('\n', 1)
title = sp[0].strip()
if len(sp) > 1:
description = textwrap.dedent(sp[1]).strip()
return (title, description)
|
python
|
def _parse_doc(doc=''):
"""Parse a docstring into title and description.
Args
----
doc : str
A docstring, optionally with a title line, separated from a description
line by at least one blank line.
Returns
-------
title : str
The first line of the docstring.
description : str
The rest of a docstring.
"""
title, description = '', ''
if doc:
sp = doc.split('\n', 1)
title = sp[0].strip()
if len(sp) > 1:
description = textwrap.dedent(sp[1]).strip()
return (title, description)
|
[
"def",
"_parse_doc",
"(",
"doc",
"=",
"''",
")",
":",
"title",
",",
"description",
"=",
"''",
",",
"''",
"if",
"doc",
":",
"sp",
"=",
"doc",
".",
"split",
"(",
"'\\n'",
",",
"1",
")",
"title",
"=",
"sp",
"[",
"0",
"]",
".",
"strip",
"(",
")",
"if",
"len",
"(",
"sp",
")",
">",
"1",
":",
"description",
"=",
"textwrap",
".",
"dedent",
"(",
"sp",
"[",
"1",
"]",
")",
".",
"strip",
"(",
")",
"return",
"(",
"title",
",",
"description",
")"
] |
Parse a docstring into title and description.
Args
----
doc : str
A docstring, optionally with a title line, separated from a description
line by at least one blank line.
Returns
-------
title : str
The first line of the docstring.
description : str
The rest of a docstring.
|
[
"Parse",
"a",
"docstring",
"into",
"title",
"and",
"description",
"."
] |
2ae0a36eb8f82a153d27f74ef37688f976952789
|
https://github.com/unbservices/clams/blob/2ae0a36eb8f82a153d27f74ef37688f976952789/clams/__init__.py#L154-L177
|
245,962
|
unbservices/clams
|
clams/__init__.py
|
command
|
def command(name):
"""Create a command, using the wrapped function as the handler.
Args
----
name : str
Name given to the created Command instance.
Returns
-------
Command
A new instance of Command, with handler set to the wrapped function.
"""
# TODO(nick): It would be nice if this didn't transform the handler. That
# way, handlers could be used and tested independently of this system.
# Unfortunately that's one of the better properties of the previous
# system that wasn't preserved in this rewrite.
def wrapper(func):
title, description = _parse_doc(func.__doc__)
command = Command(name=name, title=title, description=description)
command.add_handler(func)
argparse_args_list = getattr(func, 'ARGPARSE_ARGS_LIST', [])
for args, kwargs in argparse_args_list:
command.add_argument_tuple((args, kwargs))
return command
return wrapper
|
python
|
def command(name):
"""Create a command, using the wrapped function as the handler.
Args
----
name : str
Name given to the created Command instance.
Returns
-------
Command
A new instance of Command, with handler set to the wrapped function.
"""
# TODO(nick): It would be nice if this didn't transform the handler. That
# way, handlers could be used and tested independently of this system.
# Unfortunately that's one of the better properties of the previous
# system that wasn't preserved in this rewrite.
def wrapper(func):
title, description = _parse_doc(func.__doc__)
command = Command(name=name, title=title, description=description)
command.add_handler(func)
argparse_args_list = getattr(func, 'ARGPARSE_ARGS_LIST', [])
for args, kwargs in argparse_args_list:
command.add_argument_tuple((args, kwargs))
return command
return wrapper
|
[
"def",
"command",
"(",
"name",
")",
":",
"# TODO(nick): It would be nice if this didn't transform the handler. That",
"# way, handlers could be used and tested independently of this system.",
"# Unfortunately that's one of the better properties of the previous",
"# system that wasn't preserved in this rewrite.",
"def",
"wrapper",
"(",
"func",
")",
":",
"title",
",",
"description",
"=",
"_parse_doc",
"(",
"func",
".",
"__doc__",
")",
"command",
"=",
"Command",
"(",
"name",
"=",
"name",
",",
"title",
"=",
"title",
",",
"description",
"=",
"description",
")",
"command",
".",
"add_handler",
"(",
"func",
")",
"argparse_args_list",
"=",
"getattr",
"(",
"func",
",",
"'ARGPARSE_ARGS_LIST'",
",",
"[",
"]",
")",
"for",
"args",
",",
"kwargs",
"in",
"argparse_args_list",
":",
"command",
".",
"add_argument_tuple",
"(",
"(",
"args",
",",
"kwargs",
")",
")",
"return",
"command",
"return",
"wrapper"
] |
Create a command, using the wrapped function as the handler.
Args
----
name : str
Name given to the created Command instance.
Returns
-------
Command
A new instance of Command, with handler set to the wrapped function.
|
[
"Create",
"a",
"command",
"using",
"the",
"wrapped",
"function",
"as",
"the",
"handler",
"."
] |
2ae0a36eb8f82a153d27f74ef37688f976952789
|
https://github.com/unbservices/clams/blob/2ae0a36eb8f82a153d27f74ef37688f976952789/clams/__init__.py#L180-L206
|
245,963
|
unbservices/clams
|
clams/__init__.py
|
register_command
|
def register_command(parent_command, name):
"""Create and register a command with a parent command.
Args
----
parent_comand : Command
The parent command.
name : str
Name given to the created Command instance.
Example
-------
.. testcode::
mygit = Command(name='status')
@register_command(mygit, 'status')
def status():
print 'Nothing to commit.'
.. doctest::
:hide:
>>> mygit.init()
>>> mygit.parse_args(['status'])
Nothing to commit.
"""
def wrapper(func):
c = command(name)(func)
parent_command.add_subcommand(c)
return wrapper
|
python
|
def register_command(parent_command, name):
"""Create and register a command with a parent command.
Args
----
parent_comand : Command
The parent command.
name : str
Name given to the created Command instance.
Example
-------
.. testcode::
mygit = Command(name='status')
@register_command(mygit, 'status')
def status():
print 'Nothing to commit.'
.. doctest::
:hide:
>>> mygit.init()
>>> mygit.parse_args(['status'])
Nothing to commit.
"""
def wrapper(func):
c = command(name)(func)
parent_command.add_subcommand(c)
return wrapper
|
[
"def",
"register_command",
"(",
"parent_command",
",",
"name",
")",
":",
"def",
"wrapper",
"(",
"func",
")",
":",
"c",
"=",
"command",
"(",
"name",
")",
"(",
"func",
")",
"parent_command",
".",
"add_subcommand",
"(",
"c",
")",
"return",
"wrapper"
] |
Create and register a command with a parent command.
Args
----
parent_comand : Command
The parent command.
name : str
Name given to the created Command instance.
Example
-------
.. testcode::
mygit = Command(name='status')
@register_command(mygit, 'status')
def status():
print 'Nothing to commit.'
.. doctest::
:hide:
>>> mygit.init()
>>> mygit.parse_args(['status'])
Nothing to commit.
|
[
"Create",
"and",
"register",
"a",
"command",
"with",
"a",
"parent",
"command",
"."
] |
2ae0a36eb8f82a153d27f74ef37688f976952789
|
https://github.com/unbservices/clams/blob/2ae0a36eb8f82a153d27f74ef37688f976952789/clams/__init__.py#L246-L277
|
245,964
|
unbservices/clams
|
clams/__init__.py
|
Command._attach_arguments
|
def _attach_arguments(self):
"""Add the registered arguments to the parser."""
for arg in self.arguments:
self.parser.add_argument(*arg[0], **arg[1])
|
python
|
def _attach_arguments(self):
"""Add the registered arguments to the parser."""
for arg in self.arguments:
self.parser.add_argument(*arg[0], **arg[1])
|
[
"def",
"_attach_arguments",
"(",
"self",
")",
":",
"for",
"arg",
"in",
"self",
".",
"arguments",
":",
"self",
".",
"parser",
".",
"add_argument",
"(",
"*",
"arg",
"[",
"0",
"]",
",",
"*",
"*",
"arg",
"[",
"1",
"]",
")"
] |
Add the registered arguments to the parser.
|
[
"Add",
"the",
"registered",
"arguments",
"to",
"the",
"parser",
"."
] |
2ae0a36eb8f82a153d27f74ef37688f976952789
|
https://github.com/unbservices/clams/blob/2ae0a36eb8f82a153d27f74ef37688f976952789/clams/__init__.py#L360-L363
|
245,965
|
unbservices/clams
|
clams/__init__.py
|
Command._attach_subcommands
|
def _attach_subcommands(self):
"""Create a subparser and add the registered commands to it.
This will also call ``_init`` on each subcommand (in turn invoking its
``_attach_subcommands`` method).
"""
if self.subcommands:
self.subparsers = self.parser.add_subparsers()
for subcommand in self.subcommands:
subparser = self.subparsers.add_parser(subcommand.name,
help=subcommand.title)
if subcommand.handler:
self._register_handler(subparser, subcommand.handler)
subcommand._init(subparser)
|
python
|
def _attach_subcommands(self):
"""Create a subparser and add the registered commands to it.
This will also call ``_init`` on each subcommand (in turn invoking its
``_attach_subcommands`` method).
"""
if self.subcommands:
self.subparsers = self.parser.add_subparsers()
for subcommand in self.subcommands:
subparser = self.subparsers.add_parser(subcommand.name,
help=subcommand.title)
if subcommand.handler:
self._register_handler(subparser, subcommand.handler)
subcommand._init(subparser)
|
[
"def",
"_attach_subcommands",
"(",
"self",
")",
":",
"if",
"self",
".",
"subcommands",
":",
"self",
".",
"subparsers",
"=",
"self",
".",
"parser",
".",
"add_subparsers",
"(",
")",
"for",
"subcommand",
"in",
"self",
".",
"subcommands",
":",
"subparser",
"=",
"self",
".",
"subparsers",
".",
"add_parser",
"(",
"subcommand",
".",
"name",
",",
"help",
"=",
"subcommand",
".",
"title",
")",
"if",
"subcommand",
".",
"handler",
":",
"self",
".",
"_register_handler",
"(",
"subparser",
",",
"subcommand",
".",
"handler",
")",
"subcommand",
".",
"_init",
"(",
"subparser",
")"
] |
Create a subparser and add the registered commands to it.
This will also call ``_init`` on each subcommand (in turn invoking its
``_attach_subcommands`` method).
|
[
"Create",
"a",
"subparser",
"and",
"add",
"the",
"registered",
"commands",
"to",
"it",
"."
] |
2ae0a36eb8f82a153d27f74ef37688f976952789
|
https://github.com/unbservices/clams/blob/2ae0a36eb8f82a153d27f74ef37688f976952789/clams/__init__.py#L365-L379
|
245,966
|
unbservices/clams
|
clams/__init__.py
|
Command.parse_args
|
def parse_args(self, args=None, namespace=None):
"""Parse the command-line arguments and call the associated handler.
The signature is the same as `argparse.ArgumentParser.parse_args
<https://docs.python.org/2/library/argparse.html#argparse.ArgumentParser.parse_args>`_.
Args
----
args : list
A list of argument strings. If ``None`` the list is taken from
``sys.argv``.
namespace : argparse.Namespace
A Namespace instance. Defaults to a new empty Namespace.
Returns
-------
The return value of the handler called with the populated Namespace as
kwargs.
"""
assert self.initialized, '`init` must be called before `parse_args`.'
namespace = self.parser.parse_args(args, namespace)
handler = self._get_handler(namespace, remove_handler=True)
if handler:
return handler(**vars(namespace))
|
python
|
def parse_args(self, args=None, namespace=None):
"""Parse the command-line arguments and call the associated handler.
The signature is the same as `argparse.ArgumentParser.parse_args
<https://docs.python.org/2/library/argparse.html#argparse.ArgumentParser.parse_args>`_.
Args
----
args : list
A list of argument strings. If ``None`` the list is taken from
``sys.argv``.
namespace : argparse.Namespace
A Namespace instance. Defaults to a new empty Namespace.
Returns
-------
The return value of the handler called with the populated Namespace as
kwargs.
"""
assert self.initialized, '`init` must be called before `parse_args`.'
namespace = self.parser.parse_args(args, namespace)
handler = self._get_handler(namespace, remove_handler=True)
if handler:
return handler(**vars(namespace))
|
[
"def",
"parse_args",
"(",
"self",
",",
"args",
"=",
"None",
",",
"namespace",
"=",
"None",
")",
":",
"assert",
"self",
".",
"initialized",
",",
"'`init` must be called before `parse_args`.'",
"namespace",
"=",
"self",
".",
"parser",
".",
"parse_args",
"(",
"args",
",",
"namespace",
")",
"handler",
"=",
"self",
".",
"_get_handler",
"(",
"namespace",
",",
"remove_handler",
"=",
"True",
")",
"if",
"handler",
":",
"return",
"handler",
"(",
"*",
"*",
"vars",
"(",
"namespace",
")",
")"
] |
Parse the command-line arguments and call the associated handler.
The signature is the same as `argparse.ArgumentParser.parse_args
<https://docs.python.org/2/library/argparse.html#argparse.ArgumentParser.parse_args>`_.
Args
----
args : list
A list of argument strings. If ``None`` the list is taken from
``sys.argv``.
namespace : argparse.Namespace
A Namespace instance. Defaults to a new empty Namespace.
Returns
-------
The return value of the handler called with the populated Namespace as
kwargs.
|
[
"Parse",
"the",
"command",
"-",
"line",
"arguments",
"and",
"call",
"the",
"associated",
"handler",
"."
] |
2ae0a36eb8f82a153d27f74ef37688f976952789
|
https://github.com/unbservices/clams/blob/2ae0a36eb8f82a153d27f74ef37688f976952789/clams/__init__.py#L414-L438
|
245,967
|
jut-io/jut-python-tools
|
jut/commands/jobs.py
|
_print_jobs
|
def _print_jobs(jobs, token_manager, app_url, options):
"""
internal method to print the provided jobs array in a nice tabular
format
"""
accountids = set()
for job in jobs:
if job['user'] != 'jut.internal.user':
accountids.add(job['user'])
account_lookup = {
'jut.internal.user': {
'username': 'Jut Internal'
}
}
if accountids:
accounts_details = accounts.get_accounts(accountids,
token_manager=token_manager,
app_url=app_url)
for account in accounts_details['accounts']:
account_lookup[account['id']] = account
if options.format == 'text':
labels = OrderedDict()
labels['id'] = 'Job ID'
labels['alias'] = 'Juttle Name'
labels['username'] = 'Owner'
labels['_start_time'] = 'Start Date'
labels['persistent'] = 'Persistent'
max_lengths = {
'id': 0,
'alias': 0,
'username': 0,
'_start_time': 0,
'persistent': 0,
}
for key in max_lengths.keys():
max_lengths[key] = len(labels[key]) + 1
# retrieve username and fix up persistent marker
for job in jobs:
job['username'] = account_lookup[job['user']]['username']
job['persistent'] = 'YES' if job['timeout'] == 0 else 'NO'
# calculate max length of each column
for job in jobs:
for key in labels.keys():
if max_lengths[key] < len(job[key]):
max_lengths[key] = len(job[key]) + 1
# print labels
header = ''
for key in labels.keys():
header += (labels[key] + ' ' * (max_lengths[key] - len(labels[key])))
info(header)
for job in jobs:
line = ''
for key in labels.keys():
line += (job[key] + ' ' * (max_lengths[key] - len(job[key])))
info(line)
elif options.format == 'table':
headers = ['Job ID', 'Juttle Name', 'Owner', 'Start Date', 'Persistent']
table = []
for job in jobs:
owner = account_lookup[job['user']]['username']
persistent = 'YES' if job['timeout'] == 0 else 'NO'
name = ''
if 'alias' in job:
name = job['alias']
table.append([job['id'],
name,
owner,
job['_start_time'],
persistent])
info(tabulate.tabulate(table, headers, tablefmt="orgtbl"))
else:
raise JutException('Unsupported output format "%s"' %
options.format)
|
python
|
def _print_jobs(jobs, token_manager, app_url, options):
"""
internal method to print the provided jobs array in a nice tabular
format
"""
accountids = set()
for job in jobs:
if job['user'] != 'jut.internal.user':
accountids.add(job['user'])
account_lookup = {
'jut.internal.user': {
'username': 'Jut Internal'
}
}
if accountids:
accounts_details = accounts.get_accounts(accountids,
token_manager=token_manager,
app_url=app_url)
for account in accounts_details['accounts']:
account_lookup[account['id']] = account
if options.format == 'text':
labels = OrderedDict()
labels['id'] = 'Job ID'
labels['alias'] = 'Juttle Name'
labels['username'] = 'Owner'
labels['_start_time'] = 'Start Date'
labels['persistent'] = 'Persistent'
max_lengths = {
'id': 0,
'alias': 0,
'username': 0,
'_start_time': 0,
'persistent': 0,
}
for key in max_lengths.keys():
max_lengths[key] = len(labels[key]) + 1
# retrieve username and fix up persistent marker
for job in jobs:
job['username'] = account_lookup[job['user']]['username']
job['persistent'] = 'YES' if job['timeout'] == 0 else 'NO'
# calculate max length of each column
for job in jobs:
for key in labels.keys():
if max_lengths[key] < len(job[key]):
max_lengths[key] = len(job[key]) + 1
# print labels
header = ''
for key in labels.keys():
header += (labels[key] + ' ' * (max_lengths[key] - len(labels[key])))
info(header)
for job in jobs:
line = ''
for key in labels.keys():
line += (job[key] + ' ' * (max_lengths[key] - len(job[key])))
info(line)
elif options.format == 'table':
headers = ['Job ID', 'Juttle Name', 'Owner', 'Start Date', 'Persistent']
table = []
for job in jobs:
owner = account_lookup[job['user']]['username']
persistent = 'YES' if job['timeout'] == 0 else 'NO'
name = ''
if 'alias' in job:
name = job['alias']
table.append([job['id'],
name,
owner,
job['_start_time'],
persistent])
info(tabulate.tabulate(table, headers, tablefmt="orgtbl"))
else:
raise JutException('Unsupported output format "%s"' %
options.format)
|
[
"def",
"_print_jobs",
"(",
"jobs",
",",
"token_manager",
",",
"app_url",
",",
"options",
")",
":",
"accountids",
"=",
"set",
"(",
")",
"for",
"job",
"in",
"jobs",
":",
"if",
"job",
"[",
"'user'",
"]",
"!=",
"'jut.internal.user'",
":",
"accountids",
".",
"add",
"(",
"job",
"[",
"'user'",
"]",
")",
"account_lookup",
"=",
"{",
"'jut.internal.user'",
":",
"{",
"'username'",
":",
"'Jut Internal'",
"}",
"}",
"if",
"accountids",
":",
"accounts_details",
"=",
"accounts",
".",
"get_accounts",
"(",
"accountids",
",",
"token_manager",
"=",
"token_manager",
",",
"app_url",
"=",
"app_url",
")",
"for",
"account",
"in",
"accounts_details",
"[",
"'accounts'",
"]",
":",
"account_lookup",
"[",
"account",
"[",
"'id'",
"]",
"]",
"=",
"account",
"if",
"options",
".",
"format",
"==",
"'text'",
":",
"labels",
"=",
"OrderedDict",
"(",
")",
"labels",
"[",
"'id'",
"]",
"=",
"'Job ID'",
"labels",
"[",
"'alias'",
"]",
"=",
"'Juttle Name'",
"labels",
"[",
"'username'",
"]",
"=",
"'Owner'",
"labels",
"[",
"'_start_time'",
"]",
"=",
"'Start Date'",
"labels",
"[",
"'persistent'",
"]",
"=",
"'Persistent'",
"max_lengths",
"=",
"{",
"'id'",
":",
"0",
",",
"'alias'",
":",
"0",
",",
"'username'",
":",
"0",
",",
"'_start_time'",
":",
"0",
",",
"'persistent'",
":",
"0",
",",
"}",
"for",
"key",
"in",
"max_lengths",
".",
"keys",
"(",
")",
":",
"max_lengths",
"[",
"key",
"]",
"=",
"len",
"(",
"labels",
"[",
"key",
"]",
")",
"+",
"1",
"# retrieve username and fix up persistent marker",
"for",
"job",
"in",
"jobs",
":",
"job",
"[",
"'username'",
"]",
"=",
"account_lookup",
"[",
"job",
"[",
"'user'",
"]",
"]",
"[",
"'username'",
"]",
"job",
"[",
"'persistent'",
"]",
"=",
"'YES'",
"if",
"job",
"[",
"'timeout'",
"]",
"==",
"0",
"else",
"'NO'",
"# calculate max length of each column",
"for",
"job",
"in",
"jobs",
":",
"for",
"key",
"in",
"labels",
".",
"keys",
"(",
")",
":",
"if",
"max_lengths",
"[",
"key",
"]",
"<",
"len",
"(",
"job",
"[",
"key",
"]",
")",
":",
"max_lengths",
"[",
"key",
"]",
"=",
"len",
"(",
"job",
"[",
"key",
"]",
")",
"+",
"1",
"# print labels",
"header",
"=",
"''",
"for",
"key",
"in",
"labels",
".",
"keys",
"(",
")",
":",
"header",
"+=",
"(",
"labels",
"[",
"key",
"]",
"+",
"' '",
"*",
"(",
"max_lengths",
"[",
"key",
"]",
"-",
"len",
"(",
"labels",
"[",
"key",
"]",
")",
")",
")",
"info",
"(",
"header",
")",
"for",
"job",
"in",
"jobs",
":",
"line",
"=",
"''",
"for",
"key",
"in",
"labels",
".",
"keys",
"(",
")",
":",
"line",
"+=",
"(",
"job",
"[",
"key",
"]",
"+",
"' '",
"*",
"(",
"max_lengths",
"[",
"key",
"]",
"-",
"len",
"(",
"job",
"[",
"key",
"]",
")",
")",
")",
"info",
"(",
"line",
")",
"elif",
"options",
".",
"format",
"==",
"'table'",
":",
"headers",
"=",
"[",
"'Job ID'",
",",
"'Juttle Name'",
",",
"'Owner'",
",",
"'Start Date'",
",",
"'Persistent'",
"]",
"table",
"=",
"[",
"]",
"for",
"job",
"in",
"jobs",
":",
"owner",
"=",
"account_lookup",
"[",
"job",
"[",
"'user'",
"]",
"]",
"[",
"'username'",
"]",
"persistent",
"=",
"'YES'",
"if",
"job",
"[",
"'timeout'",
"]",
"==",
"0",
"else",
"'NO'",
"name",
"=",
"''",
"if",
"'alias'",
"in",
"job",
":",
"name",
"=",
"job",
"[",
"'alias'",
"]",
"table",
".",
"append",
"(",
"[",
"job",
"[",
"'id'",
"]",
",",
"name",
",",
"owner",
",",
"job",
"[",
"'_start_time'",
"]",
",",
"persistent",
"]",
")",
"info",
"(",
"tabulate",
".",
"tabulate",
"(",
"table",
",",
"headers",
",",
"tablefmt",
"=",
"\"orgtbl\"",
")",
")",
"else",
":",
"raise",
"JutException",
"(",
"'Unsupported output format \"%s\"'",
"%",
"options",
".",
"format",
")"
] |
internal method to print the provided jobs array in a nice tabular
format
|
[
"internal",
"method",
"to",
"print",
"the",
"provided",
"jobs",
"array",
"in",
"a",
"nice",
"tabular",
"format"
] |
65574d23f51a7bbced9bb25010d02da5ca5d906f
|
https://github.com/jut-io/jut-python-tools/blob/65574d23f51a7bbced9bb25010d02da5ca5d906f/jut/commands/jobs.py#L22-L114
|
245,968
|
jut-io/jut-python-tools
|
jut/commands/jobs.py
|
list
|
def list(options):
"""
show all currently running jobs
"""
configuration = config.get_default()
app_url = configuration['app_url']
if options.deployment != None:
deployment_name = options.deployment
else:
deployment_name = configuration['deployment_name']
client_id = configuration['client_id']
client_secret = configuration['client_secret']
token_manager = auth.TokenManager(client_id=client_id,
client_secret=client_secret,
app_url=app_url)
jobs = data_engine.get_jobs(deployment_name,
token_manager=token_manager,
app_url=app_url)
if len(jobs) == 0:
error('No running jobs')
else:
_print_jobs(jobs, token_manager, app_url, options)
|
python
|
def list(options):
"""
show all currently running jobs
"""
configuration = config.get_default()
app_url = configuration['app_url']
if options.deployment != None:
deployment_name = options.deployment
else:
deployment_name = configuration['deployment_name']
client_id = configuration['client_id']
client_secret = configuration['client_secret']
token_manager = auth.TokenManager(client_id=client_id,
client_secret=client_secret,
app_url=app_url)
jobs = data_engine.get_jobs(deployment_name,
token_manager=token_manager,
app_url=app_url)
if len(jobs) == 0:
error('No running jobs')
else:
_print_jobs(jobs, token_manager, app_url, options)
|
[
"def",
"list",
"(",
"options",
")",
":",
"configuration",
"=",
"config",
".",
"get_default",
"(",
")",
"app_url",
"=",
"configuration",
"[",
"'app_url'",
"]",
"if",
"options",
".",
"deployment",
"!=",
"None",
":",
"deployment_name",
"=",
"options",
".",
"deployment",
"else",
":",
"deployment_name",
"=",
"configuration",
"[",
"'deployment_name'",
"]",
"client_id",
"=",
"configuration",
"[",
"'client_id'",
"]",
"client_secret",
"=",
"configuration",
"[",
"'client_secret'",
"]",
"token_manager",
"=",
"auth",
".",
"TokenManager",
"(",
"client_id",
"=",
"client_id",
",",
"client_secret",
"=",
"client_secret",
",",
"app_url",
"=",
"app_url",
")",
"jobs",
"=",
"data_engine",
".",
"get_jobs",
"(",
"deployment_name",
",",
"token_manager",
"=",
"token_manager",
",",
"app_url",
"=",
"app_url",
")",
"if",
"len",
"(",
"jobs",
")",
"==",
"0",
":",
"error",
"(",
"'No running jobs'",
")",
"else",
":",
"_print_jobs",
"(",
"jobs",
",",
"token_manager",
",",
"app_url",
",",
"options",
")"
] |
show all currently running jobs
|
[
"show",
"all",
"currently",
"running",
"jobs"
] |
65574d23f51a7bbced9bb25010d02da5ca5d906f
|
https://github.com/jut-io/jut-python-tools/blob/65574d23f51a7bbced9bb25010d02da5ca5d906f/jut/commands/jobs.py#L116-L145
|
245,969
|
jut-io/jut-python-tools
|
jut/commands/jobs.py
|
kill
|
def kill(options):
"""
kill a specific job by id
"""
configuration = config.get_default()
app_url = configuration['app_url']
if options.deployment != None:
deployment_name = options.deployment
else:
deployment_name = configuration['deployment_name']
client_id = configuration['client_id']
client_secret = configuration['client_secret']
token_manager = auth.TokenManager(client_id=client_id,
client_secret=client_secret,
app_url=app_url)
job_details = data_engine.get_job_details(options.job_id,
deployment_name,
token_manager=token_manager,
app_url=app_url)
options.format = 'table'
if options.yes:
decision = 'Y'
else:
_print_jobs([job_details], token_manager, app_url, options)
decision = prompt('Are you sure you want to delete the above job? (Y/N)')
if decision == 'Y':
data_engine.delete_job(options.job_id.strip(),
deployment_name,
token_manager=token_manager,
app_url=app_url)
else:
raise JutException('Unexpected option "%s"' % decision)
|
python
|
def kill(options):
"""
kill a specific job by id
"""
configuration = config.get_default()
app_url = configuration['app_url']
if options.deployment != None:
deployment_name = options.deployment
else:
deployment_name = configuration['deployment_name']
client_id = configuration['client_id']
client_secret = configuration['client_secret']
token_manager = auth.TokenManager(client_id=client_id,
client_secret=client_secret,
app_url=app_url)
job_details = data_engine.get_job_details(options.job_id,
deployment_name,
token_manager=token_manager,
app_url=app_url)
options.format = 'table'
if options.yes:
decision = 'Y'
else:
_print_jobs([job_details], token_manager, app_url, options)
decision = prompt('Are you sure you want to delete the above job? (Y/N)')
if decision == 'Y':
data_engine.delete_job(options.job_id.strip(),
deployment_name,
token_manager=token_manager,
app_url=app_url)
else:
raise JutException('Unexpected option "%s"' % decision)
|
[
"def",
"kill",
"(",
"options",
")",
":",
"configuration",
"=",
"config",
".",
"get_default",
"(",
")",
"app_url",
"=",
"configuration",
"[",
"'app_url'",
"]",
"if",
"options",
".",
"deployment",
"!=",
"None",
":",
"deployment_name",
"=",
"options",
".",
"deployment",
"else",
":",
"deployment_name",
"=",
"configuration",
"[",
"'deployment_name'",
"]",
"client_id",
"=",
"configuration",
"[",
"'client_id'",
"]",
"client_secret",
"=",
"configuration",
"[",
"'client_secret'",
"]",
"token_manager",
"=",
"auth",
".",
"TokenManager",
"(",
"client_id",
"=",
"client_id",
",",
"client_secret",
"=",
"client_secret",
",",
"app_url",
"=",
"app_url",
")",
"job_details",
"=",
"data_engine",
".",
"get_job_details",
"(",
"options",
".",
"job_id",
",",
"deployment_name",
",",
"token_manager",
"=",
"token_manager",
",",
"app_url",
"=",
"app_url",
")",
"options",
".",
"format",
"=",
"'table'",
"if",
"options",
".",
"yes",
":",
"decision",
"=",
"'Y'",
"else",
":",
"_print_jobs",
"(",
"[",
"job_details",
"]",
",",
"token_manager",
",",
"app_url",
",",
"options",
")",
"decision",
"=",
"prompt",
"(",
"'Are you sure you want to delete the above job? (Y/N)'",
")",
"if",
"decision",
"==",
"'Y'",
":",
"data_engine",
".",
"delete_job",
"(",
"options",
".",
"job_id",
".",
"strip",
"(",
")",
",",
"deployment_name",
",",
"token_manager",
"=",
"token_manager",
",",
"app_url",
"=",
"app_url",
")",
"else",
":",
"raise",
"JutException",
"(",
"'Unexpected option \"%s\"'",
"%",
"decision",
")"
] |
kill a specific job by id
|
[
"kill",
"a",
"specific",
"job",
"by",
"id"
] |
65574d23f51a7bbced9bb25010d02da5ca5d906f
|
https://github.com/jut-io/jut-python-tools/blob/65574d23f51a7bbced9bb25010d02da5ca5d906f/jut/commands/jobs.py#L148-L188
|
245,970
|
xtrementl/focus
|
focus/registry.py
|
Registry.get
|
def get(self, key):
""" Executes the callable registered at the specified key and returns
its value. Subsequent queries are cached internally.
`key`
String key for a previously stored callable.
"""
if not key in self._actions:
return None
if not key in self._cache:
self._cache[key] = self._actions[key]()
return self._cache[key]
|
python
|
def get(self, key):
""" Executes the callable registered at the specified key and returns
its value. Subsequent queries are cached internally.
`key`
String key for a previously stored callable.
"""
if not key in self._actions:
return None
if not key in self._cache:
self._cache[key] = self._actions[key]()
return self._cache[key]
|
[
"def",
"get",
"(",
"self",
",",
"key",
")",
":",
"if",
"not",
"key",
"in",
"self",
".",
"_actions",
":",
"return",
"None",
"if",
"not",
"key",
"in",
"self",
".",
"_cache",
":",
"self",
".",
"_cache",
"[",
"key",
"]",
"=",
"self",
".",
"_actions",
"[",
"key",
"]",
"(",
")",
"return",
"self",
".",
"_cache",
"[",
"key",
"]"
] |
Executes the callable registered at the specified key and returns
its value. Subsequent queries are cached internally.
`key`
String key for a previously stored callable.
|
[
"Executes",
"the",
"callable",
"registered",
"at",
"the",
"specified",
"key",
"and",
"returns",
"its",
"value",
".",
"Subsequent",
"queries",
"are",
"cached",
"internally",
"."
] |
cbbbc0b49a7409f9e0dc899de5b7e057f50838e4
|
https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/registry.py#L30-L43
|
245,971
|
xtrementl/focus
|
focus/registry.py
|
Registry.register
|
def register(self, key, value):
""" Registers a callable with the specified key.
`key`
String key to identify a callable.
`value`
Callable object.
"""
self._actions[key] = value
# invalidate cache of results for existing key
if key in self._cache:
del self._cache[key]
|
python
|
def register(self, key, value):
""" Registers a callable with the specified key.
`key`
String key to identify a callable.
`value`
Callable object.
"""
self._actions[key] = value
# invalidate cache of results for existing key
if key in self._cache:
del self._cache[key]
|
[
"def",
"register",
"(",
"self",
",",
"key",
",",
"value",
")",
":",
"self",
".",
"_actions",
"[",
"key",
"]",
"=",
"value",
"# invalidate cache of results for existing key",
"if",
"key",
"in",
"self",
".",
"_cache",
":",
"del",
"self",
".",
"_cache",
"[",
"key",
"]"
] |
Registers a callable with the specified key.
`key`
String key to identify a callable.
`value`
Callable object.
|
[
"Registers",
"a",
"callable",
"with",
"the",
"specified",
"key",
"."
] |
cbbbc0b49a7409f9e0dc899de5b7e057f50838e4
|
https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/registry.py#L45-L58
|
245,972
|
xtrementl/focus
|
focus/registry.py
|
ExtRegistry.get
|
def get(self, key):
""" Executes the callable registered at the specified key and returns
its value along with type info. Subsequent queries are cached
internally.
`key`
String key for a previously stored callable.
"""
obj = super(ExtRegistry, self).get(key)
if obj is None:
return obj
return (obj, self._type_info.get(key))
|
python
|
def get(self, key):
""" Executes the callable registered at the specified key and returns
its value along with type info. Subsequent queries are cached
internally.
`key`
String key for a previously stored callable.
"""
obj = super(ExtRegistry, self).get(key)
if obj is None:
return obj
return (obj, self._type_info.get(key))
|
[
"def",
"get",
"(",
"self",
",",
"key",
")",
":",
"obj",
"=",
"super",
"(",
"ExtRegistry",
",",
"self",
")",
".",
"get",
"(",
"key",
")",
"if",
"obj",
"is",
"None",
":",
"return",
"obj",
"return",
"(",
"obj",
",",
"self",
".",
"_type_info",
".",
"get",
"(",
"key",
")",
")"
] |
Executes the callable registered at the specified key and returns
its value along with type info. Subsequent queries are cached
internally.
`key`
String key for a previously stored callable.
|
[
"Executes",
"the",
"callable",
"registered",
"at",
"the",
"specified",
"key",
"and",
"returns",
"its",
"value",
"along",
"with",
"type",
"info",
".",
"Subsequent",
"queries",
"are",
"cached",
"internally",
"."
] |
cbbbc0b49a7409f9e0dc899de5b7e057f50838e4
|
https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/registry.py#L95-L108
|
245,973
|
xtrementl/focus
|
focus/registry.py
|
ExtRegistry.register
|
def register(self, key, value, type_info):
""" Registers a callable with the specified key and type info.
`key`
String key to identify a callable.
`value`
Callable object.
`type_info`
Dictionary with type information about the value provided.
"""
# check for existing action
old_action = self._actions.get(key)
# update existing type info if value hasn't changed
if old_action == value and key in self._type_info:
self._type_info[key].update(type_info)
else:
self._type_info[key] = dict(type_info)
super(ExtRegistry, self).register(key, value)
|
python
|
def register(self, key, value, type_info):
""" Registers a callable with the specified key and type info.
`key`
String key to identify a callable.
`value`
Callable object.
`type_info`
Dictionary with type information about the value provided.
"""
# check for existing action
old_action = self._actions.get(key)
# update existing type info if value hasn't changed
if old_action == value and key in self._type_info:
self._type_info[key].update(type_info)
else:
self._type_info[key] = dict(type_info)
super(ExtRegistry, self).register(key, value)
|
[
"def",
"register",
"(",
"self",
",",
"key",
",",
"value",
",",
"type_info",
")",
":",
"# check for existing action",
"old_action",
"=",
"self",
".",
"_actions",
".",
"get",
"(",
"key",
")",
"# update existing type info if value hasn't changed",
"if",
"old_action",
"==",
"value",
"and",
"key",
"in",
"self",
".",
"_type_info",
":",
"self",
".",
"_type_info",
"[",
"key",
"]",
".",
"update",
"(",
"type_info",
")",
"else",
":",
"self",
".",
"_type_info",
"[",
"key",
"]",
"=",
"dict",
"(",
"type_info",
")",
"super",
"(",
"ExtRegistry",
",",
"self",
")",
".",
"register",
"(",
"key",
",",
"value",
")"
] |
Registers a callable with the specified key and type info.
`key`
String key to identify a callable.
`value`
Callable object.
`type_info`
Dictionary with type information about the value provided.
|
[
"Registers",
"a",
"callable",
"with",
"the",
"specified",
"key",
"and",
"type",
"info",
"."
] |
cbbbc0b49a7409f9e0dc899de5b7e057f50838e4
|
https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/registry.py#L110-L130
|
245,974
|
KnowledgeLinks/rdfframework
|
rdfframework/ingesters/xmlingester.py
|
run_extractor
|
def run_extractor(*args, **kwargs):
"""
Initializes and runs an extractor
"""
# pdb.set_trace()
extractor = Extractor(*args, **kwargs)
result = extractor.run(**kwargs)
|
python
|
def run_extractor(*args, **kwargs):
"""
Initializes and runs an extractor
"""
# pdb.set_trace()
extractor = Extractor(*args, **kwargs)
result = extractor.run(**kwargs)
|
[
"def",
"run_extractor",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# pdb.set_trace()",
"extractor",
"=",
"Extractor",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"result",
"=",
"extractor",
".",
"run",
"(",
"*",
"*",
"kwargs",
")"
] |
Initializes and runs an extractor
|
[
"Initializes",
"and",
"runs",
"an",
"extractor"
] |
9ec32dcc4bed51650a4b392cc5c15100fef7923a
|
https://github.com/KnowledgeLinks/rdfframework/blob/9ec32dcc4bed51650a4b392cc5c15100fef7923a/rdfframework/ingesters/xmlingester.py#L96-L102
|
245,975
|
KnowledgeLinks/rdfframework
|
rdfframework/ingesters/xmlingester.py
|
Extractor.run
|
def run(self, tag=None, output=None, **kwargs):
"""
runs the extractor
Args:
-----
output: ['filepath', None]
"""
start = datetime.datetime.now()
count = 0
if tag:
tag = Uri(tag)
xml_generator = etree.iterparse(self.source,
#events=("start", "end"),
tag=tag.etree)
else:
xml_generator = etree.iterparse(self.source) #,
#events=("start", "end"))
i = 0
for event, element in xml_generator:
type_tags = element.findall(_RDF_TYPE_TAG)
rdf_types = [el.get(_RES_TAG)
for el in type_tags
if el.get(_RES_TAG)]
# print(rdf_types)
if str(self.filter_val) in rdf_types:
pdb.set_trace()
# print("%s - %s - %s - %s" % (event,
# element.tag,
# element.attrib,
# element.text))
count += 1
# if i == 100:
# break
i += 1
element.clear()
print("Found '{}' items in {}".format(count,
(datetime.datetime.now() - start)))
|
python
|
def run(self, tag=None, output=None, **kwargs):
"""
runs the extractor
Args:
-----
output: ['filepath', None]
"""
start = datetime.datetime.now()
count = 0
if tag:
tag = Uri(tag)
xml_generator = etree.iterparse(self.source,
#events=("start", "end"),
tag=tag.etree)
else:
xml_generator = etree.iterparse(self.source) #,
#events=("start", "end"))
i = 0
for event, element in xml_generator:
type_tags = element.findall(_RDF_TYPE_TAG)
rdf_types = [el.get(_RES_TAG)
for el in type_tags
if el.get(_RES_TAG)]
# print(rdf_types)
if str(self.filter_val) in rdf_types:
pdb.set_trace()
# print("%s - %s - %s - %s" % (event,
# element.tag,
# element.attrib,
# element.text))
count += 1
# if i == 100:
# break
i += 1
element.clear()
print("Found '{}' items in {}".format(count,
(datetime.datetime.now() - start)))
|
[
"def",
"run",
"(",
"self",
",",
"tag",
"=",
"None",
",",
"output",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"start",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"count",
"=",
"0",
"if",
"tag",
":",
"tag",
"=",
"Uri",
"(",
"tag",
")",
"xml_generator",
"=",
"etree",
".",
"iterparse",
"(",
"self",
".",
"source",
",",
"#events=(\"start\", \"end\"),",
"tag",
"=",
"tag",
".",
"etree",
")",
"else",
":",
"xml_generator",
"=",
"etree",
".",
"iterparse",
"(",
"self",
".",
"source",
")",
"#,",
"#events=(\"start\", \"end\"))",
"i",
"=",
"0",
"for",
"event",
",",
"element",
"in",
"xml_generator",
":",
"type_tags",
"=",
"element",
".",
"findall",
"(",
"_RDF_TYPE_TAG",
")",
"rdf_types",
"=",
"[",
"el",
".",
"get",
"(",
"_RES_TAG",
")",
"for",
"el",
"in",
"type_tags",
"if",
"el",
".",
"get",
"(",
"_RES_TAG",
")",
"]",
"# print(rdf_types)",
"if",
"str",
"(",
"self",
".",
"filter_val",
")",
"in",
"rdf_types",
":",
"pdb",
".",
"set_trace",
"(",
")",
"# print(\"%s - %s - %s - %s\" % (event,",
"# element.tag,",
"# element.attrib,",
"# element.text))",
"count",
"+=",
"1",
"# if i == 100:",
"# break",
"i",
"+=",
"1",
"element",
".",
"clear",
"(",
")",
"print",
"(",
"\"Found '{}' items in {}\"",
".",
"format",
"(",
"count",
",",
"(",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"-",
"start",
")",
")",
")"
] |
runs the extractor
Args:
-----
output: ['filepath', None]
|
[
"runs",
"the",
"extractor"
] |
9ec32dcc4bed51650a4b392cc5c15100fef7923a
|
https://github.com/KnowledgeLinks/rdfframework/blob/9ec32dcc4bed51650a4b392cc5c15100fef7923a/rdfframework/ingesters/xmlingester.py#L39-L77
|
245,976
|
KnowledgeLinks/rdfframework
|
rdfframework/datasets/RemoteException.py
|
withTracebackPrint
|
def withTracebackPrint(ErrorType, thrownError, _traceback):
'''returns an Exception object for the given ErrorType of the thrownError
and the _traceback
can be used like withTracebackPrint(*sys.exc_info())'''
file = StringIO.StringIO()
traceback.print_exception(ErrorType, thrownError, _traceback, file = file)
return _loadError(ErrorType, thrownError, file.getvalue())
|
python
|
def withTracebackPrint(ErrorType, thrownError, _traceback):
'''returns an Exception object for the given ErrorType of the thrownError
and the _traceback
can be used like withTracebackPrint(*sys.exc_info())'''
file = StringIO.StringIO()
traceback.print_exception(ErrorType, thrownError, _traceback, file = file)
return _loadError(ErrorType, thrownError, file.getvalue())
|
[
"def",
"withTracebackPrint",
"(",
"ErrorType",
",",
"thrownError",
",",
"_traceback",
")",
":",
"file",
"=",
"StringIO",
".",
"StringIO",
"(",
")",
"traceback",
".",
"print_exception",
"(",
"ErrorType",
",",
"thrownError",
",",
"_traceback",
",",
"file",
"=",
"file",
")",
"return",
"_loadError",
"(",
"ErrorType",
",",
"thrownError",
",",
"file",
".",
"getvalue",
"(",
")",
")"
] |
returns an Exception object for the given ErrorType of the thrownError
and the _traceback
can be used like withTracebackPrint(*sys.exc_info())
|
[
"returns",
"an",
"Exception",
"object",
"for",
"the",
"given",
"ErrorType",
"of",
"the",
"thrownError",
"and",
"the",
"_traceback"
] |
9ec32dcc4bed51650a4b392cc5c15100fef7923a
|
https://github.com/KnowledgeLinks/rdfframework/blob/9ec32dcc4bed51650a4b392cc5c15100fef7923a/rdfframework/datasets/RemoteException.py#L16-L23
|
245,977
|
KnowledgeLinks/rdfframework
|
rdfframework/datasets/RemoteException.py
|
_newRemoteException
|
def _newRemoteException(ErrorType):
'''create a new RemoteExceptionType from a given errortype'''
RemoteErrorBaseType = _RemoteExceptionMeta('', (ErrorType,), {})
class RemoteException(RemoteErrorBaseType):
BaseExceptionType = ErrorType
def __init__(self, thrownError, tracebackString):
self.thrownError = thrownError
self.tracebackString = tracebackString
RemoteErrorBaseType.__init__(self, *thrownError.args)
loadError = staticmethod(_loadError)
def __str__(self):
return '\n%s\n%s' % (self.tracebackString, self.thrownError)
def __reduce__(self):
args = (ErrorType, self.thrownError, self.tracebackString)
return self.loadError, args
RemoteException.__name__ = 'Remote' + ErrorType.__name__
return RemoteException
|
python
|
def _newRemoteException(ErrorType):
'''create a new RemoteExceptionType from a given errortype'''
RemoteErrorBaseType = _RemoteExceptionMeta('', (ErrorType,), {})
class RemoteException(RemoteErrorBaseType):
BaseExceptionType = ErrorType
def __init__(self, thrownError, tracebackString):
self.thrownError = thrownError
self.tracebackString = tracebackString
RemoteErrorBaseType.__init__(self, *thrownError.args)
loadError = staticmethod(_loadError)
def __str__(self):
return '\n%s\n%s' % (self.tracebackString, self.thrownError)
def __reduce__(self):
args = (ErrorType, self.thrownError, self.tracebackString)
return self.loadError, args
RemoteException.__name__ = 'Remote' + ErrorType.__name__
return RemoteException
|
[
"def",
"_newRemoteException",
"(",
"ErrorType",
")",
":",
"RemoteErrorBaseType",
"=",
"_RemoteExceptionMeta",
"(",
"''",
",",
"(",
"ErrorType",
",",
")",
",",
"{",
"}",
")",
"class",
"RemoteException",
"(",
"RemoteErrorBaseType",
")",
":",
"BaseExceptionType",
"=",
"ErrorType",
"def",
"__init__",
"(",
"self",
",",
"thrownError",
",",
"tracebackString",
")",
":",
"self",
".",
"thrownError",
"=",
"thrownError",
"self",
".",
"tracebackString",
"=",
"tracebackString",
"RemoteErrorBaseType",
".",
"__init__",
"(",
"self",
",",
"*",
"thrownError",
".",
"args",
")",
"loadError",
"=",
"staticmethod",
"(",
"_loadError",
")",
"def",
"__str__",
"(",
"self",
")",
":",
"return",
"'\\n%s\\n%s'",
"%",
"(",
"self",
".",
"tracebackString",
",",
"self",
".",
"thrownError",
")",
"def",
"__reduce__",
"(",
"self",
")",
":",
"args",
"=",
"(",
"ErrorType",
",",
"self",
".",
"thrownError",
",",
"self",
".",
"tracebackString",
")",
"return",
"self",
".",
"loadError",
",",
"args",
"RemoteException",
".",
"__name__",
"=",
"'Remote'",
"+",
"ErrorType",
".",
"__name__",
"return",
"RemoteException"
] |
create a new RemoteExceptionType from a given errortype
|
[
"create",
"a",
"new",
"RemoteExceptionType",
"from",
"a",
"given",
"errortype"
] |
9ec32dcc4bed51650a4b392cc5c15100fef7923a
|
https://github.com/KnowledgeLinks/rdfframework/blob/9ec32dcc4bed51650a4b392cc5c15100fef7923a/rdfframework/datasets/RemoteException.py#L41-L61
|
245,978
|
KnowledgeLinks/rdfframework
|
rdfframework/datasets/RemoteException.py
|
_loadError
|
def _loadError(ErrorType, thrownError, tracebackString):
'''constructor of RemoteExceptions'''
RemoteException = asRemoteException(ErrorType)
return RemoteException(thrownError, tracebackString)
|
python
|
def _loadError(ErrorType, thrownError, tracebackString):
'''constructor of RemoteExceptions'''
RemoteException = asRemoteException(ErrorType)
return RemoteException(thrownError, tracebackString)
|
[
"def",
"_loadError",
"(",
"ErrorType",
",",
"thrownError",
",",
"tracebackString",
")",
":",
"RemoteException",
"=",
"asRemoteException",
"(",
"ErrorType",
")",
"return",
"RemoteException",
"(",
"thrownError",
",",
"tracebackString",
")"
] |
constructor of RemoteExceptions
|
[
"constructor",
"of",
"RemoteExceptions"
] |
9ec32dcc4bed51650a4b392cc5c15100fef7923a
|
https://github.com/KnowledgeLinks/rdfframework/blob/9ec32dcc4bed51650a4b392cc5c15100fef7923a/rdfframework/datasets/RemoteException.py#L85-L88
|
245,979
|
c0ntrol-x/p4rr0t007
|
p4rr0t007/web.py
|
Application.handle_exception
|
def handle_exception(self, e):
"""called by flask when an exception happens. p4rr0t007 always returns
a 500.html response that must exist under the given
``template_folder`` constructor param.
"""
sys.stderr.write("p4rr0t007 handled an error:")
sys.stderr.write(traceback.format_exc(e))
sys.stderr.flush()
self.log.exception('failed to handle {} {}'.format(request.method, request.url))
try:
return self.template_response(self.error_template_name, code=500)
except TemplateError as e:
sys.stderr.write('failed render the {}/{}'.format(self.template_folder, self.error_template_name))
sys.stderr.write(traceback.format_exc(e))
sys.stderr.flush()
return self.text_response('5ERV3R 3RR0R')
|
python
|
def handle_exception(self, e):
"""called by flask when an exception happens. p4rr0t007 always returns
a 500.html response that must exist under the given
``template_folder`` constructor param.
"""
sys.stderr.write("p4rr0t007 handled an error:")
sys.stderr.write(traceback.format_exc(e))
sys.stderr.flush()
self.log.exception('failed to handle {} {}'.format(request.method, request.url))
try:
return self.template_response(self.error_template_name, code=500)
except TemplateError as e:
sys.stderr.write('failed render the {}/{}'.format(self.template_folder, self.error_template_name))
sys.stderr.write(traceback.format_exc(e))
sys.stderr.flush()
return self.text_response('5ERV3R 3RR0R')
|
[
"def",
"handle_exception",
"(",
"self",
",",
"e",
")",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"\"p4rr0t007 handled an error:\"",
")",
"sys",
".",
"stderr",
".",
"write",
"(",
"traceback",
".",
"format_exc",
"(",
"e",
")",
")",
"sys",
".",
"stderr",
".",
"flush",
"(",
")",
"self",
".",
"log",
".",
"exception",
"(",
"'failed to handle {} {}'",
".",
"format",
"(",
"request",
".",
"method",
",",
"request",
".",
"url",
")",
")",
"try",
":",
"return",
"self",
".",
"template_response",
"(",
"self",
".",
"error_template_name",
",",
"code",
"=",
"500",
")",
"except",
"TemplateError",
"as",
"e",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"'failed render the {}/{}'",
".",
"format",
"(",
"self",
".",
"template_folder",
",",
"self",
".",
"error_template_name",
")",
")",
"sys",
".",
"stderr",
".",
"write",
"(",
"traceback",
".",
"format_exc",
"(",
"e",
")",
")",
"sys",
".",
"stderr",
".",
"flush",
"(",
")",
"return",
"self",
".",
"text_response",
"(",
"'5ERV3R 3RR0R'",
")"
] |
called by flask when an exception happens. p4rr0t007 always returns
a 500.html response that must exist under the given
``template_folder`` constructor param.
|
[
"called",
"by",
"flask",
"when",
"an",
"exception",
"happens",
".",
"p4rr0t007",
"always",
"returns",
"a",
"500",
".",
"html",
"response",
"that",
"must",
"exist",
"under",
"the",
"given",
"template_folder",
"constructor",
"param",
"."
] |
6fe88ec1231a778b9f1d13bc61332581715d646e
|
https://github.com/c0ntrol-x/p4rr0t007/blob/6fe88ec1231a778b9f1d13bc61332581715d646e/p4rr0t007/web.py#L140-L157
|
245,980
|
rohithpr/py-timed-dialog
|
ptd/dialog.py
|
TimedDialog.button_input
|
def button_input(self, title, message, buttons, default, timeout=None, dimensions=None):
'''
Function to accept input in the form of a button click.
'''
# Create the dialog box
self.response = default
self.top = tkinter.Tk()
self.top.title(title)
# Use dimensions if passes
if dimensions is not None:
self.top.minsize(width=dimensions[0], height=dimensions[1])
self.top.maxsize(width=dimensions[0], height=dimensions[1])
# Display a message
labelString = tkinter.StringVar()
labelString.set(message)
label = tkinter.Label(self.top, textvariable=labelString, relief=tkinter.RAISED)
label.pack(ipadx=100, ipady=10)
# Populate dialog box with buttons
for key in buttons.keys():
button = tkinter.Button(self.top, text=buttons[key], command=lambda key=key: self.selected(key))
button.pack(fill='both', pady=5, padx=10)
# Destroy the dialog box if there has been no button click within the timeout period
if timeout != None:
try:
self.top.after(timeout, lambda: self.top.destroy())
except:
pass
self.top.mainloop()
return self.response
|
python
|
def button_input(self, title, message, buttons, default, timeout=None, dimensions=None):
'''
Function to accept input in the form of a button click.
'''
# Create the dialog box
self.response = default
self.top = tkinter.Tk()
self.top.title(title)
# Use dimensions if passes
if dimensions is not None:
self.top.minsize(width=dimensions[0], height=dimensions[1])
self.top.maxsize(width=dimensions[0], height=dimensions[1])
# Display a message
labelString = tkinter.StringVar()
labelString.set(message)
label = tkinter.Label(self.top, textvariable=labelString, relief=tkinter.RAISED)
label.pack(ipadx=100, ipady=10)
# Populate dialog box with buttons
for key in buttons.keys():
button = tkinter.Button(self.top, text=buttons[key], command=lambda key=key: self.selected(key))
button.pack(fill='both', pady=5, padx=10)
# Destroy the dialog box if there has been no button click within the timeout period
if timeout != None:
try:
self.top.after(timeout, lambda: self.top.destroy())
except:
pass
self.top.mainloop()
return self.response
|
[
"def",
"button_input",
"(",
"self",
",",
"title",
",",
"message",
",",
"buttons",
",",
"default",
",",
"timeout",
"=",
"None",
",",
"dimensions",
"=",
"None",
")",
":",
"# Create the dialog box",
"self",
".",
"response",
"=",
"default",
"self",
".",
"top",
"=",
"tkinter",
".",
"Tk",
"(",
")",
"self",
".",
"top",
".",
"title",
"(",
"title",
")",
"# Use dimensions if passes",
"if",
"dimensions",
"is",
"not",
"None",
":",
"self",
".",
"top",
".",
"minsize",
"(",
"width",
"=",
"dimensions",
"[",
"0",
"]",
",",
"height",
"=",
"dimensions",
"[",
"1",
"]",
")",
"self",
".",
"top",
".",
"maxsize",
"(",
"width",
"=",
"dimensions",
"[",
"0",
"]",
",",
"height",
"=",
"dimensions",
"[",
"1",
"]",
")",
"# Display a message",
"labelString",
"=",
"tkinter",
".",
"StringVar",
"(",
")",
"labelString",
".",
"set",
"(",
"message",
")",
"label",
"=",
"tkinter",
".",
"Label",
"(",
"self",
".",
"top",
",",
"textvariable",
"=",
"labelString",
",",
"relief",
"=",
"tkinter",
".",
"RAISED",
")",
"label",
".",
"pack",
"(",
"ipadx",
"=",
"100",
",",
"ipady",
"=",
"10",
")",
"# Populate dialog box with buttons",
"for",
"key",
"in",
"buttons",
".",
"keys",
"(",
")",
":",
"button",
"=",
"tkinter",
".",
"Button",
"(",
"self",
".",
"top",
",",
"text",
"=",
"buttons",
"[",
"key",
"]",
",",
"command",
"=",
"lambda",
"key",
"=",
"key",
":",
"self",
".",
"selected",
"(",
"key",
")",
")",
"button",
".",
"pack",
"(",
"fill",
"=",
"'both'",
",",
"pady",
"=",
"5",
",",
"padx",
"=",
"10",
")",
"# Destroy the dialog box if there has been no button click within the timeout period",
"if",
"timeout",
"!=",
"None",
":",
"try",
":",
"self",
".",
"top",
".",
"after",
"(",
"timeout",
",",
"lambda",
":",
"self",
".",
"top",
".",
"destroy",
"(",
")",
")",
"except",
":",
"pass",
"self",
".",
"top",
".",
"mainloop",
"(",
")",
"return",
"self",
".",
"response"
] |
Function to accept input in the form of a button click.
|
[
"Function",
"to",
"accept",
"input",
"in",
"the",
"form",
"of",
"a",
"button",
"click",
"."
] |
6ca2d8d3ea4da5bac016d4b2f56c3c3a822ecf56
|
https://github.com/rohithpr/py-timed-dialog/blob/6ca2d8d3ea4da5bac016d4b2f56c3c3a822ecf56/ptd/dialog.py#L25-L59
|
245,981
|
sysr-q/chrw
|
chrw/v1.py
|
rate_limited
|
def rate_limited(max_per_second):
""" Sort of based off of an answer about
rate limiting on Stack Overflow.
Definitely **not** thread safe, so
don't even think about it, buddy.
"""
import datetime
min_request_time = datetime.timedelta(seconds=max_per_second)
last_time_called = [None]
def decorate(func):
def rate_limited_function(*args, **kwargs):
if last_time_called[0]:
delta = datetime.datetime.now() - last_time_called[0]
if delta < datetime.timedelta.min:
raise chrw.exceptions.TimeIsBackToFront, "Call the Doc!"
elif delta < min_request_time:
msg = "Last request was {0}, should be at least {1}".format(
delta, min_request_time
)
raise chrw.exceptions.RequestRateTooHigh, msg
ret = func(*args, **kwargs)
last_time_called[0] = datetime.datetime.now()
return ret
return functools.update_wrapper(rate_limited_function, func)
return decorate
|
python
|
def rate_limited(max_per_second):
""" Sort of based off of an answer about
rate limiting on Stack Overflow.
Definitely **not** thread safe, so
don't even think about it, buddy.
"""
import datetime
min_request_time = datetime.timedelta(seconds=max_per_second)
last_time_called = [None]
def decorate(func):
def rate_limited_function(*args, **kwargs):
if last_time_called[0]:
delta = datetime.datetime.now() - last_time_called[0]
if delta < datetime.timedelta.min:
raise chrw.exceptions.TimeIsBackToFront, "Call the Doc!"
elif delta < min_request_time:
msg = "Last request was {0}, should be at least {1}".format(
delta, min_request_time
)
raise chrw.exceptions.RequestRateTooHigh, msg
ret = func(*args, **kwargs)
last_time_called[0] = datetime.datetime.now()
return ret
return functools.update_wrapper(rate_limited_function, func)
return decorate
|
[
"def",
"rate_limited",
"(",
"max_per_second",
")",
":",
"import",
"datetime",
"min_request_time",
"=",
"datetime",
".",
"timedelta",
"(",
"seconds",
"=",
"max_per_second",
")",
"last_time_called",
"=",
"[",
"None",
"]",
"def",
"decorate",
"(",
"func",
")",
":",
"def",
"rate_limited_function",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"last_time_called",
"[",
"0",
"]",
":",
"delta",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"-",
"last_time_called",
"[",
"0",
"]",
"if",
"delta",
"<",
"datetime",
".",
"timedelta",
".",
"min",
":",
"raise",
"chrw",
".",
"exceptions",
".",
"TimeIsBackToFront",
",",
"\"Call the Doc!\"",
"elif",
"delta",
"<",
"min_request_time",
":",
"msg",
"=",
"\"Last request was {0}, should be at least {1}\"",
".",
"format",
"(",
"delta",
",",
"min_request_time",
")",
"raise",
"chrw",
".",
"exceptions",
".",
"RequestRateTooHigh",
",",
"msg",
"ret",
"=",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"last_time_called",
"[",
"0",
"]",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
"return",
"ret",
"return",
"functools",
".",
"update_wrapper",
"(",
"rate_limited_function",
",",
"func",
")",
"return",
"decorate"
] |
Sort of based off of an answer about
rate limiting on Stack Overflow.
Definitely **not** thread safe, so
don't even think about it, buddy.
|
[
"Sort",
"of",
"based",
"off",
"of",
"an",
"answer",
"about",
"rate",
"limiting",
"on",
"Stack",
"Overflow",
"."
] |
071b53f5bd4fdea06599e6b95997db53ed930f7b
|
https://github.com/sysr-q/chrw/blob/071b53f5bd4fdea06599e6b95997db53ed930f7b/chrw/v1.py#L14-L39
|
245,982
|
sysr-q/chrw
|
chrw/v1.py
|
wrapper.shorten
|
def shorten(self, url, custom=None, give_delete=True):
"""
Sends a URL shorten request to the API.
:param url: the URL to shrink
:type url: str
:param custom: a custom URL to request
:type custom: str
:param give_delete: would we like a deletion key to be returned?
:type give_delete: bool
:return: the API response JSON dict
:rtype: dict
"""
data = self.fetch("/submit", {
"long": url,
"short": custom if custom else "",
"delete": "true" if give_delete else ""
})
return data
|
python
|
def shorten(self, url, custom=None, give_delete=True):
"""
Sends a URL shorten request to the API.
:param url: the URL to shrink
:type url: str
:param custom: a custom URL to request
:type custom: str
:param give_delete: would we like a deletion key to be returned?
:type give_delete: bool
:return: the API response JSON dict
:rtype: dict
"""
data = self.fetch("/submit", {
"long": url,
"short": custom if custom else "",
"delete": "true" if give_delete else ""
})
return data
|
[
"def",
"shorten",
"(",
"self",
",",
"url",
",",
"custom",
"=",
"None",
",",
"give_delete",
"=",
"True",
")",
":",
"data",
"=",
"self",
".",
"fetch",
"(",
"\"/submit\"",
",",
"{",
"\"long\"",
":",
"url",
",",
"\"short\"",
":",
"custom",
"if",
"custom",
"else",
"\"\"",
",",
"\"delete\"",
":",
"\"true\"",
"if",
"give_delete",
"else",
"\"\"",
"}",
")",
"return",
"data"
] |
Sends a URL shorten request to the API.
:param url: the URL to shrink
:type url: str
:param custom: a custom URL to request
:type custom: str
:param give_delete: would we like a deletion key to be returned?
:type give_delete: bool
:return: the API response JSON dict
:rtype: dict
|
[
"Sends",
"a",
"URL",
"shorten",
"request",
"to",
"the",
"API",
"."
] |
071b53f5bd4fdea06599e6b95997db53ed930f7b
|
https://github.com/sysr-q/chrw/blob/071b53f5bd4fdea06599e6b95997db53ed930f7b/chrw/v1.py#L78-L97
|
245,983
|
sysr-q/chrw
|
chrw/v1.py
|
wrapper.delete
|
def delete(self, url, code):
"""
Request a URL be deleted.
This will only work if you supply the valid deletion code.
:param url: the shortened url to delete
:type url: str
:param code: the deletion code given to you on URL shorten
:type code: str
:return: the deletion request's reply dict
:rtype: dict
"""
data = self.fetch("/delete", {
"short": url,
"delete": code
})
return data
|
python
|
def delete(self, url, code):
"""
Request a URL be deleted.
This will only work if you supply the valid deletion code.
:param url: the shortened url to delete
:type url: str
:param code: the deletion code given to you on URL shorten
:type code: str
:return: the deletion request's reply dict
:rtype: dict
"""
data = self.fetch("/delete", {
"short": url,
"delete": code
})
return data
|
[
"def",
"delete",
"(",
"self",
",",
"url",
",",
"code",
")",
":",
"data",
"=",
"self",
".",
"fetch",
"(",
"\"/delete\"",
",",
"{",
"\"short\"",
":",
"url",
",",
"\"delete\"",
":",
"code",
"}",
")",
"return",
"data"
] |
Request a URL be deleted.
This will only work if you supply the valid deletion code.
:param url: the shortened url to delete
:type url: str
:param code: the deletion code given to you on URL shorten
:type code: str
:return: the deletion request's reply dict
:rtype: dict
|
[
"Request",
"a",
"URL",
"be",
"deleted",
"."
] |
071b53f5bd4fdea06599e6b95997db53ed930f7b
|
https://github.com/sysr-q/chrw/blob/071b53f5bd4fdea06599e6b95997db53ed930f7b/chrw/v1.py#L114-L132
|
245,984
|
sysr-q/chrw
|
chrw/v1.py
|
wrapper.fetch
|
def fetch(self, url, pdata, store_to_self=True):
"""
This does the bulk of the work for the wrapper.
It will send a POST request, to the API URL, with
all required data, as well as the api_key given,
and will handle various replies, raising exceptions
as required.
:param url: the url segment to POST to (unbuilt url, e.g., /submit, /expand)
:type url: str
:param pdata: a dictionary of data to POST
:type pdata: dict
:param store_to_self: should we store the reply (if any) to self.reply?
:type store_to_self: bool
:return: the API reply data
:rtype: dict
:raises: chrw.exceptions.ApiDisabled,
chrw.exceptions.InvalidApiKey,
chrw.exceptions.PartialFormData,
chrw.exceptions.NonZeroException
"""
url = self.schema + '://' + self.base + url
post = dict(pdata.items() + {
"api_key": self.api_key
}.items())
self.post = post
res = requests.post(url, post, headers={"User-Agent": self.user_agent})
if self.require_200 and res.status_code != requests.codes.ok:
raise chrw.exceptions.RequestFailed, "Got HTTP reply {0}, needed {1}".format(res.status_code, requests.codes.ok)
if not res.json:
raise chrw.exceptions.InvalidDataReturned, "Invalid JSON data was returned"
if store_to_self:
self.reply = res.json
if res.json["enum"] == chrw.codes.api_disabled:
raise chrw.exceptions.ApiDisabled, res.json["message"]
elif res.json["enum"] == chrw.codes.no_such_key:
raise chrw.exceptions.InvalidApiKey, res.json["message"]
elif res.json["enum"] == chrw.codes.partial_form_data:
raise chrw.exceptions.PartialFormData, res.json["message"]
elif res.json["enum"] != chrw.codes.success:
__ = "Non-zero reply {0}: {1}".format(res.json["enum"], res.json["message"])
raise chrw.exceptions.NonZeroReply, __
return res.json
|
python
|
def fetch(self, url, pdata, store_to_self=True):
"""
This does the bulk of the work for the wrapper.
It will send a POST request, to the API URL, with
all required data, as well as the api_key given,
and will handle various replies, raising exceptions
as required.
:param url: the url segment to POST to (unbuilt url, e.g., /submit, /expand)
:type url: str
:param pdata: a dictionary of data to POST
:type pdata: dict
:param store_to_self: should we store the reply (if any) to self.reply?
:type store_to_self: bool
:return: the API reply data
:rtype: dict
:raises: chrw.exceptions.ApiDisabled,
chrw.exceptions.InvalidApiKey,
chrw.exceptions.PartialFormData,
chrw.exceptions.NonZeroException
"""
url = self.schema + '://' + self.base + url
post = dict(pdata.items() + {
"api_key": self.api_key
}.items())
self.post = post
res = requests.post(url, post, headers={"User-Agent": self.user_agent})
if self.require_200 and res.status_code != requests.codes.ok:
raise chrw.exceptions.RequestFailed, "Got HTTP reply {0}, needed {1}".format(res.status_code, requests.codes.ok)
if not res.json:
raise chrw.exceptions.InvalidDataReturned, "Invalid JSON data was returned"
if store_to_self:
self.reply = res.json
if res.json["enum"] == chrw.codes.api_disabled:
raise chrw.exceptions.ApiDisabled, res.json["message"]
elif res.json["enum"] == chrw.codes.no_such_key:
raise chrw.exceptions.InvalidApiKey, res.json["message"]
elif res.json["enum"] == chrw.codes.partial_form_data:
raise chrw.exceptions.PartialFormData, res.json["message"]
elif res.json["enum"] != chrw.codes.success:
__ = "Non-zero reply {0}: {1}".format(res.json["enum"], res.json["message"])
raise chrw.exceptions.NonZeroReply, __
return res.json
|
[
"def",
"fetch",
"(",
"self",
",",
"url",
",",
"pdata",
",",
"store_to_self",
"=",
"True",
")",
":",
"url",
"=",
"self",
".",
"schema",
"+",
"'://'",
"+",
"self",
".",
"base",
"+",
"url",
"post",
"=",
"dict",
"(",
"pdata",
".",
"items",
"(",
")",
"+",
"{",
"\"api_key\"",
":",
"self",
".",
"api_key",
"}",
".",
"items",
"(",
")",
")",
"self",
".",
"post",
"=",
"post",
"res",
"=",
"requests",
".",
"post",
"(",
"url",
",",
"post",
",",
"headers",
"=",
"{",
"\"User-Agent\"",
":",
"self",
".",
"user_agent",
"}",
")",
"if",
"self",
".",
"require_200",
"and",
"res",
".",
"status_code",
"!=",
"requests",
".",
"codes",
".",
"ok",
":",
"raise",
"chrw",
".",
"exceptions",
".",
"RequestFailed",
",",
"\"Got HTTP reply {0}, needed {1}\"",
".",
"format",
"(",
"res",
".",
"status_code",
",",
"requests",
".",
"codes",
".",
"ok",
")",
"if",
"not",
"res",
".",
"json",
":",
"raise",
"chrw",
".",
"exceptions",
".",
"InvalidDataReturned",
",",
"\"Invalid JSON data was returned\"",
"if",
"store_to_self",
":",
"self",
".",
"reply",
"=",
"res",
".",
"json",
"if",
"res",
".",
"json",
"[",
"\"enum\"",
"]",
"==",
"chrw",
".",
"codes",
".",
"api_disabled",
":",
"raise",
"chrw",
".",
"exceptions",
".",
"ApiDisabled",
",",
"res",
".",
"json",
"[",
"\"message\"",
"]",
"elif",
"res",
".",
"json",
"[",
"\"enum\"",
"]",
"==",
"chrw",
".",
"codes",
".",
"no_such_key",
":",
"raise",
"chrw",
".",
"exceptions",
".",
"InvalidApiKey",
",",
"res",
".",
"json",
"[",
"\"message\"",
"]",
"elif",
"res",
".",
"json",
"[",
"\"enum\"",
"]",
"==",
"chrw",
".",
"codes",
".",
"partial_form_data",
":",
"raise",
"chrw",
".",
"exceptions",
".",
"PartialFormData",
",",
"res",
".",
"json",
"[",
"\"message\"",
"]",
"elif",
"res",
".",
"json",
"[",
"\"enum\"",
"]",
"!=",
"chrw",
".",
"codes",
".",
"success",
":",
"__",
"=",
"\"Non-zero reply {0}: {1}\"",
".",
"format",
"(",
"res",
".",
"json",
"[",
"\"enum\"",
"]",
",",
"res",
".",
"json",
"[",
"\"message\"",
"]",
")",
"raise",
"chrw",
".",
"exceptions",
".",
"NonZeroReply",
",",
"__",
"return",
"res",
".",
"json"
] |
This does the bulk of the work for the wrapper.
It will send a POST request, to the API URL, with
all required data, as well as the api_key given,
and will handle various replies, raising exceptions
as required.
:param url: the url segment to POST to (unbuilt url, e.g., /submit, /expand)
:type url: str
:param pdata: a dictionary of data to POST
:type pdata: dict
:param store_to_self: should we store the reply (if any) to self.reply?
:type store_to_self: bool
:return: the API reply data
:rtype: dict
:raises: chrw.exceptions.ApiDisabled,
chrw.exceptions.InvalidApiKey,
chrw.exceptions.PartialFormData,
chrw.exceptions.NonZeroException
|
[
"This",
"does",
"the",
"bulk",
"of",
"the",
"work",
"for",
"the",
"wrapper",
"."
] |
071b53f5bd4fdea06599e6b95997db53ed930f7b
|
https://github.com/sysr-q/chrw/blob/071b53f5bd4fdea06599e6b95997db53ed930f7b/chrw/v1.py#L151-L198
|
245,985
|
Ceasar/easywatch
|
easywatch/easywatch.py
|
watch
|
def watch(path, handler):
"""Watch a directory for events.
- path should be the directory to watch
- handler should a function which takes an event_type and src_path
and does something interesting. event_type will be one of 'created',
'deleted', 'modified', or 'moved'. src_path will be the absolute
path to the file that triggered the event.
"""
# let the user just deal with events
@functools.wraps(handler)
def wrapper(self, event):
if not event.is_directory:
return handler(event.event_type, event.src_path)
attrs = {'on_any_event': wrapper}
EventHandler = type("EventHandler", (FileSystemEventHandler,), attrs)
observer = Observer()
observer.schedule(EventHandler(), path=path, recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
|
python
|
def watch(path, handler):
"""Watch a directory for events.
- path should be the directory to watch
- handler should a function which takes an event_type and src_path
and does something interesting. event_type will be one of 'created',
'deleted', 'modified', or 'moved'. src_path will be the absolute
path to the file that triggered the event.
"""
# let the user just deal with events
@functools.wraps(handler)
def wrapper(self, event):
if not event.is_directory:
return handler(event.event_type, event.src_path)
attrs = {'on_any_event': wrapper}
EventHandler = type("EventHandler", (FileSystemEventHandler,), attrs)
observer = Observer()
observer.schedule(EventHandler(), path=path, recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
|
[
"def",
"watch",
"(",
"path",
",",
"handler",
")",
":",
"# let the user just deal with events",
"@",
"functools",
".",
"wraps",
"(",
"handler",
")",
"def",
"wrapper",
"(",
"self",
",",
"event",
")",
":",
"if",
"not",
"event",
".",
"is_directory",
":",
"return",
"handler",
"(",
"event",
".",
"event_type",
",",
"event",
".",
"src_path",
")",
"attrs",
"=",
"{",
"'on_any_event'",
":",
"wrapper",
"}",
"EventHandler",
"=",
"type",
"(",
"\"EventHandler\"",
",",
"(",
"FileSystemEventHandler",
",",
")",
",",
"attrs",
")",
"observer",
"=",
"Observer",
"(",
")",
"observer",
".",
"schedule",
"(",
"EventHandler",
"(",
")",
",",
"path",
"=",
"path",
",",
"recursive",
"=",
"True",
")",
"observer",
".",
"start",
"(",
")",
"try",
":",
"while",
"True",
":",
"time",
".",
"sleep",
"(",
"1",
")",
"except",
"KeyboardInterrupt",
":",
"observer",
".",
"stop",
"(",
")",
"observer",
".",
"join",
"(",
")"
] |
Watch a directory for events.
- path should be the directory to watch
- handler should a function which takes an event_type and src_path
and does something interesting. event_type will be one of 'created',
'deleted', 'modified', or 'moved'. src_path will be the absolute
path to the file that triggered the event.
|
[
"Watch",
"a",
"directory",
"for",
"events",
".",
"-",
"path",
"should",
"be",
"the",
"directory",
"to",
"watch",
"-",
"handler",
"should",
"a",
"function",
"which",
"takes",
"an",
"event_type",
"and",
"src_path",
"and",
"does",
"something",
"interesting",
".",
"event_type",
"will",
"be",
"one",
"of",
"created",
"deleted",
"modified",
"or",
"moved",
".",
"src_path",
"will",
"be",
"the",
"absolute",
"path",
"to",
"the",
"file",
"that",
"triggered",
"the",
"event",
"."
] |
1dd464d2acca5932473759b187dec4eb63dab2d9
|
https://github.com/Ceasar/easywatch/blob/1dd464d2acca5932473759b187dec4eb63dab2d9/easywatch/easywatch.py#L13-L36
|
245,986
|
cfobel/clutter-webcam-viewer
|
clutter_webcam_viewer/warp.py
|
WarpActor.rotate
|
def rotate(self, shift):
'''
Rotate 90 degrees clockwise `shift` times. If `shift` is negative,
rotate counter-clockwise.
'''
self.child_corners.values[:] = np.roll(self.child_corners
.values, shift, axis=0)
self.update_transform()
|
python
|
def rotate(self, shift):
'''
Rotate 90 degrees clockwise `shift` times. If `shift` is negative,
rotate counter-clockwise.
'''
self.child_corners.values[:] = np.roll(self.child_corners
.values, shift, axis=0)
self.update_transform()
|
[
"def",
"rotate",
"(",
"self",
",",
"shift",
")",
":",
"self",
".",
"child_corners",
".",
"values",
"[",
":",
"]",
"=",
"np",
".",
"roll",
"(",
"self",
".",
"child_corners",
".",
"values",
",",
"shift",
",",
"axis",
"=",
"0",
")",
"self",
".",
"update_transform",
"(",
")"
] |
Rotate 90 degrees clockwise `shift` times. If `shift` is negative,
rotate counter-clockwise.
|
[
"Rotate",
"90",
"degrees",
"clockwise",
"shift",
"times",
".",
"If",
"shift",
"is",
"negative",
"rotate",
"counter",
"-",
"clockwise",
"."
] |
b227d2ae02d750194e65c13bcf178550755c3afc
|
https://github.com/cfobel/clutter-webcam-viewer/blob/b227d2ae02d750194e65c13bcf178550755c3afc/clutter_webcam_viewer/warp.py#L109-L116
|
245,987
|
ymotongpoo/pysuddendeath
|
suddendeath/__init__.py
|
message_length
|
def message_length(message):
'''
message_length returns visual length of message.
Ascii chars are counted as 1, non-asciis are 2.
:param str message: random unicode mixed text
:rtype: int
'''
length = 0
for char in map(east_asian_width, message):
if char == 'W':
length += 2
elif char == 'Na':
length += 1
return length
|
python
|
def message_length(message):
'''
message_length returns visual length of message.
Ascii chars are counted as 1, non-asciis are 2.
:param str message: random unicode mixed text
:rtype: int
'''
length = 0
for char in map(east_asian_width, message):
if char == 'W':
length += 2
elif char == 'Na':
length += 1
return length
|
[
"def",
"message_length",
"(",
"message",
")",
":",
"length",
"=",
"0",
"for",
"char",
"in",
"map",
"(",
"east_asian_width",
",",
"message",
")",
":",
"if",
"char",
"==",
"'W'",
":",
"length",
"+=",
"2",
"elif",
"char",
"==",
"'Na'",
":",
"length",
"+=",
"1",
"return",
"length"
] |
message_length returns visual length of message.
Ascii chars are counted as 1, non-asciis are 2.
:param str message: random unicode mixed text
:rtype: int
|
[
"message_length",
"returns",
"visual",
"length",
"of",
"message",
".",
"Ascii",
"chars",
"are",
"counted",
"as",
"1",
"non",
"-",
"asciis",
"are",
"2",
"."
] |
2ce26d3229e60ce1f1fd5f032a7b0512dec25c5a
|
https://github.com/ymotongpoo/pysuddendeath/blob/2ce26d3229e60ce1f1fd5f032a7b0512dec25c5a/suddendeath/__init__.py#L32-L47
|
245,988
|
openpermissions/perch
|
perch/migrate.py
|
migration
|
def migration(resource, version, previous_version=''):
"""Register a migration function"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
migrated = func(*args, **kwargs)
return migrated
m = Migration(wrapper, resource, version, previous_version)
m.register()
return m
return decorator
|
python
|
def migration(resource, version, previous_version=''):
"""Register a migration function"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
migrated = func(*args, **kwargs)
return migrated
m = Migration(wrapper, resource, version, previous_version)
m.register()
return m
return decorator
|
[
"def",
"migration",
"(",
"resource",
",",
"version",
",",
"previous_version",
"=",
"''",
")",
":",
"def",
"decorator",
"(",
"func",
")",
":",
"@",
"wraps",
"(",
"func",
")",
"def",
"wrapper",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"migrated",
"=",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"migrated",
"m",
"=",
"Migration",
"(",
"wrapper",
",",
"resource",
",",
"version",
",",
"previous_version",
")",
"m",
".",
"register",
"(",
")",
"return",
"m",
"return",
"decorator"
] |
Register a migration function
|
[
"Register",
"a",
"migration",
"function"
] |
36d78994133918f3c52c187f19e50132960a0156
|
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/migrate.py#L101-L115
|
245,989
|
openpermissions/perch
|
perch/migrate.py
|
create
|
def create(resource_path, previous_version=None, package='perch.migrations'):
"""Create a new migration"""
pkg, obj = resource_path.rsplit('.', 1)
module = importlib.import_module(pkg)
resource = getattr(module, obj)
version = uuid4().hex
target_module = importlib.import_module(package)
target_dir = os.path.dirname(target_module.__file__)
target_file = os.path.join(target_dir, resource.resource_type + '_' + version + '.py')
with open(target_file, 'w') as f:
f.write(MIGRATION_TEMPLATE.format(
resource_path=resource_path,
resource_type=resource.resource_type,
version=version,
previous_version=previous_version or '',
))
return target_file
|
python
|
def create(resource_path, previous_version=None, package='perch.migrations'):
"""Create a new migration"""
pkg, obj = resource_path.rsplit('.', 1)
module = importlib.import_module(pkg)
resource = getattr(module, obj)
version = uuid4().hex
target_module = importlib.import_module(package)
target_dir = os.path.dirname(target_module.__file__)
target_file = os.path.join(target_dir, resource.resource_type + '_' + version + '.py')
with open(target_file, 'w') as f:
f.write(MIGRATION_TEMPLATE.format(
resource_path=resource_path,
resource_type=resource.resource_type,
version=version,
previous_version=previous_version or '',
))
return target_file
|
[
"def",
"create",
"(",
"resource_path",
",",
"previous_version",
"=",
"None",
",",
"package",
"=",
"'perch.migrations'",
")",
":",
"pkg",
",",
"obj",
"=",
"resource_path",
".",
"rsplit",
"(",
"'.'",
",",
"1",
")",
"module",
"=",
"importlib",
".",
"import_module",
"(",
"pkg",
")",
"resource",
"=",
"getattr",
"(",
"module",
",",
"obj",
")",
"version",
"=",
"uuid4",
"(",
")",
".",
"hex",
"target_module",
"=",
"importlib",
".",
"import_module",
"(",
"package",
")",
"target_dir",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"target_module",
".",
"__file__",
")",
"target_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"target_dir",
",",
"resource",
".",
"resource_type",
"+",
"'_'",
"+",
"version",
"+",
"'.py'",
")",
"with",
"open",
"(",
"target_file",
",",
"'w'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"MIGRATION_TEMPLATE",
".",
"format",
"(",
"resource_path",
"=",
"resource_path",
",",
"resource_type",
"=",
"resource",
".",
"resource_type",
",",
"version",
"=",
"version",
",",
"previous_version",
"=",
"previous_version",
"or",
"''",
",",
")",
")",
"return",
"target_file"
] |
Create a new migration
|
[
"Create",
"a",
"new",
"migration"
] |
36d78994133918f3c52c187f19e50132960a0156
|
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/migrate.py#L118-L136
|
245,990
|
openpermissions/perch
|
perch/migrate.py
|
collect
|
def collect(package='perch.migrations'):
"""
Import all modules inside the perch.migrations package and return the
registered migrations
"""
package = importlib.import_module(package)
for loader, name, is_pkg in pkgutil.walk_packages(package.__path__):
importlib.import_module(package.__name__ + '.' + name)
return _migrations
|
python
|
def collect(package='perch.migrations'):
"""
Import all modules inside the perch.migrations package and return the
registered migrations
"""
package = importlib.import_module(package)
for loader, name, is_pkg in pkgutil.walk_packages(package.__path__):
importlib.import_module(package.__name__ + '.' + name)
return _migrations
|
[
"def",
"collect",
"(",
"package",
"=",
"'perch.migrations'",
")",
":",
"package",
"=",
"importlib",
".",
"import_module",
"(",
"package",
")",
"for",
"loader",
",",
"name",
",",
"is_pkg",
"in",
"pkgutil",
".",
"walk_packages",
"(",
"package",
".",
"__path__",
")",
":",
"importlib",
".",
"import_module",
"(",
"package",
".",
"__name__",
"+",
"'.'",
"+",
"name",
")",
"return",
"_migrations"
] |
Import all modules inside the perch.migrations package and return the
registered migrations
|
[
"Import",
"all",
"modules",
"inside",
"the",
"perch",
".",
"migrations",
"package",
"and",
"return",
"the",
"registered",
"migrations"
] |
36d78994133918f3c52c187f19e50132960a0156
|
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/migrate.py#L139-L149
|
245,991
|
openpermissions/perch
|
perch/migrate.py
|
run_migrations
|
def run_migrations(migrations):
"""
Run migrations for a resource type
:param: a dicitionary of migrations
"""
for resource, resource_migrations in migrations.items():
for version in resource_migrations:
to_migrate = yield resource_version.get(
key=[resource.resource_type, version],
include_docs=True)
for x in to_migrate['rows']:
instance = resource(**x['doc'])
instance = _migrate_resource(
instance,
resource_migrations,
version
)
yield instance._save()
|
python
|
def run_migrations(migrations):
"""
Run migrations for a resource type
:param: a dicitionary of migrations
"""
for resource, resource_migrations in migrations.items():
for version in resource_migrations:
to_migrate = yield resource_version.get(
key=[resource.resource_type, version],
include_docs=True)
for x in to_migrate['rows']:
instance = resource(**x['doc'])
instance = _migrate_resource(
instance,
resource_migrations,
version
)
yield instance._save()
|
[
"def",
"run_migrations",
"(",
"migrations",
")",
":",
"for",
"resource",
",",
"resource_migrations",
"in",
"migrations",
".",
"items",
"(",
")",
":",
"for",
"version",
"in",
"resource_migrations",
":",
"to_migrate",
"=",
"yield",
"resource_version",
".",
"get",
"(",
"key",
"=",
"[",
"resource",
".",
"resource_type",
",",
"version",
"]",
",",
"include_docs",
"=",
"True",
")",
"for",
"x",
"in",
"to_migrate",
"[",
"'rows'",
"]",
":",
"instance",
"=",
"resource",
"(",
"*",
"*",
"x",
"[",
"'doc'",
"]",
")",
"instance",
"=",
"_migrate_resource",
"(",
"instance",
",",
"resource_migrations",
",",
"version",
")",
"yield",
"instance",
".",
"_save",
"(",
")"
] |
Run migrations for a resource type
:param: a dicitionary of migrations
|
[
"Run",
"migrations",
"for",
"a",
"resource",
"type"
] |
36d78994133918f3c52c187f19e50132960a0156
|
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/migrate.py#L153-L173
|
245,992
|
openpermissions/perch
|
perch/migrate.py
|
_migrate_resource
|
def _migrate_resource(instance, migrations, version=''):
"""
Migrate a resource instance
Subresources are migrated first, then the resource is recursively migrated
:param instance: a perch.Document instance
:param migrations: the migrations for a resource
:param version: the current resource version to migrate
"""
if version not in migrations:
return instance
instance = _migrate_subresources(
instance,
migrations[version]['subresources']
)
for migration in migrations[version]['migrations']:
instance = migration(instance)
instance._resource['doc_version'] = unicode(migration.version)
instance = _migrate_resource(
instance,
migrations,
version=migration.version
)
return instance
|
python
|
def _migrate_resource(instance, migrations, version=''):
"""
Migrate a resource instance
Subresources are migrated first, then the resource is recursively migrated
:param instance: a perch.Document instance
:param migrations: the migrations for a resource
:param version: the current resource version to migrate
"""
if version not in migrations:
return instance
instance = _migrate_subresources(
instance,
migrations[version]['subresources']
)
for migration in migrations[version]['migrations']:
instance = migration(instance)
instance._resource['doc_version'] = unicode(migration.version)
instance = _migrate_resource(
instance,
migrations,
version=migration.version
)
return instance
|
[
"def",
"_migrate_resource",
"(",
"instance",
",",
"migrations",
",",
"version",
"=",
"''",
")",
":",
"if",
"version",
"not",
"in",
"migrations",
":",
"return",
"instance",
"instance",
"=",
"_migrate_subresources",
"(",
"instance",
",",
"migrations",
"[",
"version",
"]",
"[",
"'subresources'",
"]",
")",
"for",
"migration",
"in",
"migrations",
"[",
"version",
"]",
"[",
"'migrations'",
"]",
":",
"instance",
"=",
"migration",
"(",
"instance",
")",
"instance",
".",
"_resource",
"[",
"'doc_version'",
"]",
"=",
"unicode",
"(",
"migration",
".",
"version",
")",
"instance",
"=",
"_migrate_resource",
"(",
"instance",
",",
"migrations",
",",
"version",
"=",
"migration",
".",
"version",
")",
"return",
"instance"
] |
Migrate a resource instance
Subresources are migrated first, then the resource is recursively migrated
:param instance: a perch.Document instance
:param migrations: the migrations for a resource
:param version: the current resource version to migrate
|
[
"Migrate",
"a",
"resource",
"instance"
] |
36d78994133918f3c52c187f19e50132960a0156
|
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/migrate.py#L176-L204
|
245,993
|
openpermissions/perch
|
perch/migrate.py
|
_migrate_subresources
|
def _migrate_subresources(parent, migrations):
"""
Migrate a resource's subresources
:param parent: the parent perch.Document instance
:param migrations: the migrations for a resource
"""
for subresource, resource_migrations in migrations.items():
parent = _migrate_subresource(
subresource,
parent,
resource_migrations
)
return parent
|
python
|
def _migrate_subresources(parent, migrations):
"""
Migrate a resource's subresources
:param parent: the parent perch.Document instance
:param migrations: the migrations for a resource
"""
for subresource, resource_migrations in migrations.items():
parent = _migrate_subresource(
subresource,
parent,
resource_migrations
)
return parent
|
[
"def",
"_migrate_subresources",
"(",
"parent",
",",
"migrations",
")",
":",
"for",
"subresource",
",",
"resource_migrations",
"in",
"migrations",
".",
"items",
"(",
")",
":",
"parent",
"=",
"_migrate_subresource",
"(",
"subresource",
",",
"parent",
",",
"resource_migrations",
")",
"return",
"parent"
] |
Migrate a resource's subresources
:param parent: the parent perch.Document instance
:param migrations: the migrations for a resource
|
[
"Migrate",
"a",
"resource",
"s",
"subresources"
] |
36d78994133918f3c52c187f19e50132960a0156
|
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/migrate.py#L207-L221
|
245,994
|
openpermissions/perch
|
perch/migrate.py
|
_migrate_subresource
|
def _migrate_subresource(subresource, parent, migrations):
"""
Migrate a resource's subresource
:param subresource: the perch.SubResource instance
:param parent: the parent perch.Document instance
:param migrations: the migrations for a resource
"""
for key, doc in getattr(parent, subresource.parent_key, {}).items():
for migration in migrations['migrations']:
instance = migration(subresource(id=key, **doc))
parent._resource['doc_version'] = unicode(migration.version)
instance = _migrate_subresources(
instance,
migrations['subresources']
)
doc = instance._resource
doc.pop('id', None)
doc.pop(instance.resource_type + '_id', None)
getattr(parent, subresource.parent_key)[key] = doc
return parent
|
python
|
def _migrate_subresource(subresource, parent, migrations):
"""
Migrate a resource's subresource
:param subresource: the perch.SubResource instance
:param parent: the parent perch.Document instance
:param migrations: the migrations for a resource
"""
for key, doc in getattr(parent, subresource.parent_key, {}).items():
for migration in migrations['migrations']:
instance = migration(subresource(id=key, **doc))
parent._resource['doc_version'] = unicode(migration.version)
instance = _migrate_subresources(
instance,
migrations['subresources']
)
doc = instance._resource
doc.pop('id', None)
doc.pop(instance.resource_type + '_id', None)
getattr(parent, subresource.parent_key)[key] = doc
return parent
|
[
"def",
"_migrate_subresource",
"(",
"subresource",
",",
"parent",
",",
"migrations",
")",
":",
"for",
"key",
",",
"doc",
"in",
"getattr",
"(",
"parent",
",",
"subresource",
".",
"parent_key",
",",
"{",
"}",
")",
".",
"items",
"(",
")",
":",
"for",
"migration",
"in",
"migrations",
"[",
"'migrations'",
"]",
":",
"instance",
"=",
"migration",
"(",
"subresource",
"(",
"id",
"=",
"key",
",",
"*",
"*",
"doc",
")",
")",
"parent",
".",
"_resource",
"[",
"'doc_version'",
"]",
"=",
"unicode",
"(",
"migration",
".",
"version",
")",
"instance",
"=",
"_migrate_subresources",
"(",
"instance",
",",
"migrations",
"[",
"'subresources'",
"]",
")",
"doc",
"=",
"instance",
".",
"_resource",
"doc",
".",
"pop",
"(",
"'id'",
",",
"None",
")",
"doc",
".",
"pop",
"(",
"instance",
".",
"resource_type",
"+",
"'_id'",
",",
"None",
")",
"getattr",
"(",
"parent",
",",
"subresource",
".",
"parent_key",
")",
"[",
"key",
"]",
"=",
"doc",
"return",
"parent"
] |
Migrate a resource's subresource
:param subresource: the perch.SubResource instance
:param parent: the parent perch.Document instance
:param migrations: the migrations for a resource
|
[
"Migrate",
"a",
"resource",
"s",
"subresource"
] |
36d78994133918f3c52c187f19e50132960a0156
|
https://github.com/openpermissions/perch/blob/36d78994133918f3c52c187f19e50132960a0156/perch/migrate.py#L224-L248
|
245,995
|
msfrank/cifparser
|
cifparser/parser.py
|
ParserContext.pop_frame
|
def pop_frame(self):
"""
Remove and return the frame at the top of the stack.
:returns: The top frame
:rtype: Frame
:raises Exception: If there are no frames on the stack
"""
self.frames.pop(0)
if len(self.frames) == 0:
raise Exception("stack is exhausted")
return self.frames[0]
|
python
|
def pop_frame(self):
"""
Remove and return the frame at the top of the stack.
:returns: The top frame
:rtype: Frame
:raises Exception: If there are no frames on the stack
"""
self.frames.pop(0)
if len(self.frames) == 0:
raise Exception("stack is exhausted")
return self.frames[0]
|
[
"def",
"pop_frame",
"(",
"self",
")",
":",
"self",
".",
"frames",
".",
"pop",
"(",
"0",
")",
"if",
"len",
"(",
"self",
".",
"frames",
")",
"==",
"0",
":",
"raise",
"Exception",
"(",
"\"stack is exhausted\"",
")",
"return",
"self",
".",
"frames",
"[",
"0",
"]"
] |
Remove and return the frame at the top of the stack.
:returns: The top frame
:rtype: Frame
:raises Exception: If there are no frames on the stack
|
[
"Remove",
"and",
"return",
"the",
"frame",
"at",
"the",
"top",
"of",
"the",
"stack",
"."
] |
ecd899ba2e7b990e2cec62b115742d830e7e4384
|
https://github.com/msfrank/cifparser/blob/ecd899ba2e7b990e2cec62b115742d830e7e4384/cifparser/parser.py#L191-L202
|
245,996
|
tylerbutler/propane
|
propane/importing.py
|
dict_to_querystring
|
def dict_to_querystring(dictionary):
"""Converts a dict to a querystring suitable to be appended to a URL."""
s = u""
for d in dictionary.keys():
s = unicode.format(u"{0}{1}={2}&", s, d, dictionary[d])
return s[:-1]
|
python
|
def dict_to_querystring(dictionary):
"""Converts a dict to a querystring suitable to be appended to a URL."""
s = u""
for d in dictionary.keys():
s = unicode.format(u"{0}{1}={2}&", s, d, dictionary[d])
return s[:-1]
|
[
"def",
"dict_to_querystring",
"(",
"dictionary",
")",
":",
"s",
"=",
"u\"\"",
"for",
"d",
"in",
"dictionary",
".",
"keys",
"(",
")",
":",
"s",
"=",
"unicode",
".",
"format",
"(",
"u\"{0}{1}={2}&\"",
",",
"s",
",",
"d",
",",
"dictionary",
"[",
"d",
"]",
")",
"return",
"s",
"[",
":",
"-",
"1",
"]"
] |
Converts a dict to a querystring suitable to be appended to a URL.
|
[
"Converts",
"a",
"dict",
"to",
"a",
"querystring",
"suitable",
"to",
"be",
"appended",
"to",
"a",
"URL",
"."
] |
6c404285ab8d78865b7175a5c8adf8fae12d6be5
|
https://github.com/tylerbutler/propane/blob/6c404285ab8d78865b7175a5c8adf8fae12d6be5/propane/importing.py#L22-L27
|
245,997
|
nickmilon/Hellas
|
Hellas/Delphi.py
|
visualise
|
def visualise(seq, sort=lambda x: x[0]):
"""visualises as seq or dictionary"""
frmt = "{:6} {:8,d} {}"
if isinstance(seq, dict):
seq = seq.items()
if sort:
seq = sorted(seq, key=sort)
mx, mn = max([i[1] for i in seq]), min([i[1] for i in seq])
range = mx - mn
for i in seq:
v = int((i[1] * 100) / range)
print (frmt.format(i[0], i[1], "*" * v))
|
python
|
def visualise(seq, sort=lambda x: x[0]):
"""visualises as seq or dictionary"""
frmt = "{:6} {:8,d} {}"
if isinstance(seq, dict):
seq = seq.items()
if sort:
seq = sorted(seq, key=sort)
mx, mn = max([i[1] for i in seq]), min([i[1] for i in seq])
range = mx - mn
for i in seq:
v = int((i[1] * 100) / range)
print (frmt.format(i[0], i[1], "*" * v))
|
[
"def",
"visualise",
"(",
"seq",
",",
"sort",
"=",
"lambda",
"x",
":",
"x",
"[",
"0",
"]",
")",
":",
"frmt",
"=",
"\"{:6} {:8,d} {}\"",
"if",
"isinstance",
"(",
"seq",
",",
"dict",
")",
":",
"seq",
"=",
"seq",
".",
"items",
"(",
")",
"if",
"sort",
":",
"seq",
"=",
"sorted",
"(",
"seq",
",",
"key",
"=",
"sort",
")",
"mx",
",",
"mn",
"=",
"max",
"(",
"[",
"i",
"[",
"1",
"]",
"for",
"i",
"in",
"seq",
"]",
")",
",",
"min",
"(",
"[",
"i",
"[",
"1",
"]",
"for",
"i",
"in",
"seq",
"]",
")",
"range",
"=",
"mx",
"-",
"mn",
"for",
"i",
"in",
"seq",
":",
"v",
"=",
"int",
"(",
"(",
"i",
"[",
"1",
"]",
"*",
"100",
")",
"/",
"range",
")",
"print",
"(",
"frmt",
".",
"format",
"(",
"i",
"[",
"0",
"]",
",",
"i",
"[",
"1",
"]",
",",
"\"*\"",
"*",
"v",
")",
")"
] |
visualises as seq or dictionary
|
[
"visualises",
"as",
"seq",
"or",
"dictionary"
] |
542e4778692fbec90753942946f20100412ec9ee
|
https://github.com/nickmilon/Hellas/blob/542e4778692fbec90753942946f20100412ec9ee/Hellas/Delphi.py#L269-L280
|
245,998
|
nickmilon/Hellas
|
Hellas/Delphi.py
|
Color.help
|
def help(cls):
"""prints named colors"""
print("for named colors use :")
for c in sorted(list(cls.colors.items())):
print("{:10} {}".format(*c))
|
python
|
def help(cls):
"""prints named colors"""
print("for named colors use :")
for c in sorted(list(cls.colors.items())):
print("{:10} {}".format(*c))
|
[
"def",
"help",
"(",
"cls",
")",
":",
"print",
"(",
"\"for named colors use :\"",
")",
"for",
"c",
"in",
"sorted",
"(",
"list",
"(",
"cls",
".",
"colors",
".",
"items",
"(",
")",
")",
")",
":",
"print",
"(",
"\"{:10} {}\"",
".",
"format",
"(",
"*",
"c",
")",
")"
] |
prints named colors
|
[
"prints",
"named",
"colors"
] |
542e4778692fbec90753942946f20100412ec9ee
|
https://github.com/nickmilon/Hellas/blob/542e4778692fbec90753942946f20100412ec9ee/Hellas/Delphi.py#L47-L51
|
245,999
|
nickmilon/Hellas
|
Hellas/Delphi.py
|
Color.printc
|
def printc(cls, txt, color=colors.red):
"""Print in color."""
print(cls.color_txt(txt, color))
|
python
|
def printc(cls, txt, color=colors.red):
"""Print in color."""
print(cls.color_txt(txt, color))
|
[
"def",
"printc",
"(",
"cls",
",",
"txt",
",",
"color",
"=",
"colors",
".",
"red",
")",
":",
"print",
"(",
"cls",
".",
"color_txt",
"(",
"txt",
",",
"color",
")",
")"
] |
Print in color.
|
[
"Print",
"in",
"color",
"."
] |
542e4778692fbec90753942946f20100412ec9ee
|
https://github.com/nickmilon/Hellas/blob/542e4778692fbec90753942946f20100412ec9ee/Hellas/Delphi.py#L73-L75
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.