repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1
value | code stringlengths 75 19.8k | code_tokens listlengths 20 707 | docstring stringlengths 3 17.3k | docstring_tokens listlengths 3 222 | sha stringlengths 40 40 | url stringlengths 87 242 | partition stringclasses 1
value | idx int64 0 252k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
T-002/pycast | pycast/optimization/gridsearch.py | GridSearch.optimize_forecasting_method | def optimize_forecasting_method(self, timeSeries, forecastingMethod):
"""Optimizes the parameters for the given timeSeries and forecastingMethod.
:param TimeSeries timeSeries: TimeSeries instance, containing hte original data.
:param BaseForecastingMethod forecastingMethod: ForecastingMethod that is used to optimize the parameters.
:return: Returns a tuple containing only the smallest BaseErrorMeasure instance as defined in
:py:meth:`BaseOptimizationMethod.__init__` and the forecastingMethods parameter.
:rtype: tuple
"""
tuneableParameters = forecastingMethod.get_optimizable_parameters()
remainingParameters = []
for tuneableParameter in tuneableParameters:
remainingParameters.append([tuneableParameter, [item for item in self._generate_next_parameter_value(tuneableParameter, forecastingMethod)]])
# Collect the forecasting results
forecastingResults = self.optimization_loop(timeSeries, forecastingMethod, remainingParameters)
# Debugging GridSearchTest.inner_optimization_result_test
#print ""
#print "GridSearch"
#print "Instance / SMAPE / Alpha"
#for item in forecastingResults:
# print "%s / %s / %s" % (
# str(item[0])[-12:-1],
# str(item[0].get_error(self._startingPercentage, self._endPercentage))[:8],
# item[1]["smoothingFactor"]
#)
#print ""
# Collect the parameters that resulted in the smallest error
bestForecastingResult = min(forecastingResults, key=lambda item: item[0].get_error(self._startingPercentage, self._endPercentage))
# return the determined parameters
return bestForecastingResult | python | def optimize_forecasting_method(self, timeSeries, forecastingMethod):
"""Optimizes the parameters for the given timeSeries and forecastingMethod.
:param TimeSeries timeSeries: TimeSeries instance, containing hte original data.
:param BaseForecastingMethod forecastingMethod: ForecastingMethod that is used to optimize the parameters.
:return: Returns a tuple containing only the smallest BaseErrorMeasure instance as defined in
:py:meth:`BaseOptimizationMethod.__init__` and the forecastingMethods parameter.
:rtype: tuple
"""
tuneableParameters = forecastingMethod.get_optimizable_parameters()
remainingParameters = []
for tuneableParameter in tuneableParameters:
remainingParameters.append([tuneableParameter, [item for item in self._generate_next_parameter_value(tuneableParameter, forecastingMethod)]])
# Collect the forecasting results
forecastingResults = self.optimization_loop(timeSeries, forecastingMethod, remainingParameters)
# Debugging GridSearchTest.inner_optimization_result_test
#print ""
#print "GridSearch"
#print "Instance / SMAPE / Alpha"
#for item in forecastingResults:
# print "%s / %s / %s" % (
# str(item[0])[-12:-1],
# str(item[0].get_error(self._startingPercentage, self._endPercentage))[:8],
# item[1]["smoothingFactor"]
#)
#print ""
# Collect the parameters that resulted in the smallest error
bestForecastingResult = min(forecastingResults, key=lambda item: item[0].get_error(self._startingPercentage, self._endPercentage))
# return the determined parameters
return bestForecastingResult | [
"def",
"optimize_forecasting_method",
"(",
"self",
",",
"timeSeries",
",",
"forecastingMethod",
")",
":",
"tuneableParameters",
"=",
"forecastingMethod",
".",
"get_optimizable_parameters",
"(",
")",
"remainingParameters",
"=",
"[",
"]",
"for",
"tuneableParameter",
"in",... | Optimizes the parameters for the given timeSeries and forecastingMethod.
:param TimeSeries timeSeries: TimeSeries instance, containing hte original data.
:param BaseForecastingMethod forecastingMethod: ForecastingMethod that is used to optimize the parameters.
:return: Returns a tuple containing only the smallest BaseErrorMeasure instance as defined in
:py:meth:`BaseOptimizationMethod.__init__` and the forecastingMethods parameter.
:rtype: tuple | [
"Optimizes",
"the",
"parameters",
"for",
"the",
"given",
"timeSeries",
"and",
"forecastingMethod",
"."
] | 8a53505c6d8367e0ea572e8af768e80b29e1cc41 | https://github.com/T-002/pycast/blob/8a53505c6d8367e0ea572e8af768e80b29e1cc41/pycast/optimization/gridsearch.py#L100-L135 | train | 36,900 |
T-002/pycast | pycast/optimization/gridsearch.py | GridSearch.optimization_loop | def optimization_loop(self, timeSeries, forecastingMethod, remainingParameters, currentParameterValues=None):
"""The optimization loop.
This function is called recursively, until all parameter values were evaluated.
:param TimeSeries timeSeries: TimeSeries instance that requires an optimized forecast.
:param BaseForecastingMethod forecastingMethod: ForecastingMethod that is used to optimize the parameters.
:param list remainingParameters: List containing all parameters with their corresponding values that still
need to be evaluated.
When this list is empty, the most inner optimization loop is reached.
:param dictionary currentParameterValues: The currently evaluated forecast parameter combination.
:return: Returns a list containing a BaseErrorMeasure instance as defined in
:py:meth:`BaseOptimizationMethod.__init__` and the forecastingMethods parameter.
:rtype: list
"""
if currentParameterValues is None:
currentParameterValues = {}
# The most inner loop is reached
if 0 == len(remainingParameters):
# set the forecasting parameters
for parameter in currentParameterValues:
forecastingMethod.set_parameter(parameter, currentParameterValues[parameter])
# calculate the forecast
forecast = timeSeries.apply(forecastingMethod)
# create and initialize the ErrorMeasure
error = self._errorClass(**self._errorMeasureKWArgs)
# when the error could not be calculated, return an empty result
if not error.initialize(timeSeries, forecast):
return []
# Debugging GridSearchTest.inner_optimization_result_test
#print "Instance / SMAPE / Alpha: %s / %s / %s" % (
# str(error)[-12:-1],
# str(error.get_error(self._startingPercentage, self._endPercentage))[:8],
# currentParameterValues["smoothingFactor"]
#)
# return the result
return [[error, dict(currentParameterValues)]]
# If this is not the most inner loop than extract an additional parameter
localParameter = remainingParameters[-1]
localParameterName = localParameter[0]
localParameterValues = localParameter[1]
# initialize the result
results = []
# check the next level for each existing parameter
for value in localParameterValues:
currentParameterValues[localParameterName] = value
remainingParameters = remainingParameters[:-1]
results += self.optimization_loop(timeSeries, forecastingMethod, remainingParameters, currentParameterValues)
return results | python | def optimization_loop(self, timeSeries, forecastingMethod, remainingParameters, currentParameterValues=None):
"""The optimization loop.
This function is called recursively, until all parameter values were evaluated.
:param TimeSeries timeSeries: TimeSeries instance that requires an optimized forecast.
:param BaseForecastingMethod forecastingMethod: ForecastingMethod that is used to optimize the parameters.
:param list remainingParameters: List containing all parameters with their corresponding values that still
need to be evaluated.
When this list is empty, the most inner optimization loop is reached.
:param dictionary currentParameterValues: The currently evaluated forecast parameter combination.
:return: Returns a list containing a BaseErrorMeasure instance as defined in
:py:meth:`BaseOptimizationMethod.__init__` and the forecastingMethods parameter.
:rtype: list
"""
if currentParameterValues is None:
currentParameterValues = {}
# The most inner loop is reached
if 0 == len(remainingParameters):
# set the forecasting parameters
for parameter in currentParameterValues:
forecastingMethod.set_parameter(parameter, currentParameterValues[parameter])
# calculate the forecast
forecast = timeSeries.apply(forecastingMethod)
# create and initialize the ErrorMeasure
error = self._errorClass(**self._errorMeasureKWArgs)
# when the error could not be calculated, return an empty result
if not error.initialize(timeSeries, forecast):
return []
# Debugging GridSearchTest.inner_optimization_result_test
#print "Instance / SMAPE / Alpha: %s / %s / %s" % (
# str(error)[-12:-1],
# str(error.get_error(self._startingPercentage, self._endPercentage))[:8],
# currentParameterValues["smoothingFactor"]
#)
# return the result
return [[error, dict(currentParameterValues)]]
# If this is not the most inner loop than extract an additional parameter
localParameter = remainingParameters[-1]
localParameterName = localParameter[0]
localParameterValues = localParameter[1]
# initialize the result
results = []
# check the next level for each existing parameter
for value in localParameterValues:
currentParameterValues[localParameterName] = value
remainingParameters = remainingParameters[:-1]
results += self.optimization_loop(timeSeries, forecastingMethod, remainingParameters, currentParameterValues)
return results | [
"def",
"optimization_loop",
"(",
"self",
",",
"timeSeries",
",",
"forecastingMethod",
",",
"remainingParameters",
",",
"currentParameterValues",
"=",
"None",
")",
":",
"if",
"currentParameterValues",
"is",
"None",
":",
"currentParameterValues",
"=",
"{",
"}",
"# The... | The optimization loop.
This function is called recursively, until all parameter values were evaluated.
:param TimeSeries timeSeries: TimeSeries instance that requires an optimized forecast.
:param BaseForecastingMethod forecastingMethod: ForecastingMethod that is used to optimize the parameters.
:param list remainingParameters: List containing all parameters with their corresponding values that still
need to be evaluated.
When this list is empty, the most inner optimization loop is reached.
:param dictionary currentParameterValues: The currently evaluated forecast parameter combination.
:return: Returns a list containing a BaseErrorMeasure instance as defined in
:py:meth:`BaseOptimizationMethod.__init__` and the forecastingMethods parameter.
:rtype: list | [
"The",
"optimization",
"loop",
"."
] | 8a53505c6d8367e0ea572e8af768e80b29e1cc41 | https://github.com/T-002/pycast/blob/8a53505c6d8367e0ea572e8af768e80b29e1cc41/pycast/optimization/gridsearch.py#L137-L198 | train | 36,901 |
T-002/pycast | bin/examples/LivingRoomEnergy/server.py | energy_data | def energy_data():
"""
Connects to the database and loads Readings for device 8.
"""
cur = db.cursor().execute("""SELECT timestamp, current FROM Readings""")
original = TimeSeries()
original.initialize_from_sql_cursor(cur)
original.normalize("day", fusionMethod = "sum")
return itty.Response(json.dumps(original, cls=PycastEncoder), content_type='application/json') | python | def energy_data():
"""
Connects to the database and loads Readings for device 8.
"""
cur = db.cursor().execute("""SELECT timestamp, current FROM Readings""")
original = TimeSeries()
original.initialize_from_sql_cursor(cur)
original.normalize("day", fusionMethod = "sum")
return itty.Response(json.dumps(original, cls=PycastEncoder), content_type='application/json') | [
"def",
"energy_data",
"(",
")",
":",
"cur",
"=",
"db",
".",
"cursor",
"(",
")",
".",
"execute",
"(",
"\"\"\"SELECT timestamp, current FROM Readings\"\"\"",
")",
"original",
"=",
"TimeSeries",
"(",
")",
"original",
".",
"initialize_from_sql_cursor",
"(",
"cur",
"... | Connects to the database and loads Readings for device 8. | [
"Connects",
"to",
"the",
"database",
"and",
"loads",
"Readings",
"for",
"device",
"8",
"."
] | 8a53505c6d8367e0ea572e8af768e80b29e1cc41 | https://github.com/T-002/pycast/blob/8a53505c6d8367e0ea572e8af768e80b29e1cc41/bin/examples/LivingRoomEnergy/server.py#L43-L51 | train | 36,902 |
Staffjoy/client_python | staffjoy/resource.py | Resource.get | def get(cls, parent=None, id=None, data=None):
"""Inherit info from parent and return new object"""
# TODO - allow fetching of parent based on child?
if parent is not None:
route = copy(parent.route)
else:
route = {}
if id is not None and cls.ID_NAME is not None:
route[cls.ID_NAME] = id
obj = cls(key=parent.key, route=route, config=parent.config)
if data:
# This is used in "get all" queries
obj.data = data
else:
obj.fetch()
return obj | python | def get(cls, parent=None, id=None, data=None):
"""Inherit info from parent and return new object"""
# TODO - allow fetching of parent based on child?
if parent is not None:
route = copy(parent.route)
else:
route = {}
if id is not None and cls.ID_NAME is not None:
route[cls.ID_NAME] = id
obj = cls(key=parent.key, route=route, config=parent.config)
if data:
# This is used in "get all" queries
obj.data = data
else:
obj.fetch()
return obj | [
"def",
"get",
"(",
"cls",
",",
"parent",
"=",
"None",
",",
"id",
"=",
"None",
",",
"data",
"=",
"None",
")",
":",
"# TODO - allow fetching of parent based on child?",
"if",
"parent",
"is",
"not",
"None",
":",
"route",
"=",
"copy",
"(",
"parent",
".",
"ro... | Inherit info from parent and return new object | [
"Inherit",
"info",
"from",
"parent",
"and",
"return",
"new",
"object"
] | e8811b0c06651a15e691c96cbfd41e7da4f7f213 | https://github.com/Staffjoy/client_python/blob/e8811b0c06651a15e691c96cbfd41e7da4f7f213/staffjoy/resource.py#L48-L68 | train | 36,903 |
Staffjoy/client_python | staffjoy/resource.py | Resource._url | def _url(self):
"""Get the URL for the resource"""
if self.ID_NAME not in self.route.keys() and "id" in self.data.keys():
self.route[self.ID_NAME] = self.data["id"]
return self.config.BASE + self.PATH.format(**self.route) | python | def _url(self):
"""Get the URL for the resource"""
if self.ID_NAME not in self.route.keys() and "id" in self.data.keys():
self.route[self.ID_NAME] = self.data["id"]
return self.config.BASE + self.PATH.format(**self.route) | [
"def",
"_url",
"(",
"self",
")",
":",
"if",
"self",
".",
"ID_NAME",
"not",
"in",
"self",
".",
"route",
".",
"keys",
"(",
")",
"and",
"\"id\"",
"in",
"self",
".",
"data",
".",
"keys",
"(",
")",
":",
"self",
".",
"route",
"[",
"self",
".",
"ID_NA... | Get the URL for the resource | [
"Get",
"the",
"URL",
"for",
"the",
"resource"
] | e8811b0c06651a15e691c96cbfd41e7da4f7f213 | https://github.com/Staffjoy/client_python/blob/e8811b0c06651a15e691c96cbfd41e7da4f7f213/staffjoy/resource.py#L106-L110 | train | 36,904 |
Staffjoy/client_python | staffjoy/resource.py | Resource._handle_request_exception | def _handle_request_exception(request):
"""Raise the proper exception based on the response"""
try:
data = request.json()
except:
data = {}
code = request.status_code
if code == requests.codes.bad:
raise BadRequestException(response=data)
if code == requests.codes.unauthorized:
raise UnauthorizedException(response=data)
if code == requests.codes.not_found:
raise NotFoundException(response=data)
# Generic error fallback
request.raise_for_status() | python | def _handle_request_exception(request):
"""Raise the proper exception based on the response"""
try:
data = request.json()
except:
data = {}
code = request.status_code
if code == requests.codes.bad:
raise BadRequestException(response=data)
if code == requests.codes.unauthorized:
raise UnauthorizedException(response=data)
if code == requests.codes.not_found:
raise NotFoundException(response=data)
# Generic error fallback
request.raise_for_status() | [
"def",
"_handle_request_exception",
"(",
"request",
")",
":",
"try",
":",
"data",
"=",
"request",
".",
"json",
"(",
")",
"except",
":",
"data",
"=",
"{",
"}",
"code",
"=",
"request",
".",
"status_code",
"if",
"code",
"==",
"requests",
".",
"codes",
"."... | Raise the proper exception based on the response | [
"Raise",
"the",
"proper",
"exception",
"based",
"on",
"the",
"response"
] | e8811b0c06651a15e691c96cbfd41e7da4f7f213 | https://github.com/Staffjoy/client_python/blob/e8811b0c06651a15e691c96cbfd41e7da4f7f213/staffjoy/resource.py#L113-L131 | train | 36,905 |
Staffjoy/client_python | staffjoy/resource.py | Resource._process_meta | def _process_meta(self, response):
"""Process additional data sent in response"""
for key in self.META_ENVELOPES:
self.meta[key] = response.get(key) | python | def _process_meta(self, response):
"""Process additional data sent in response"""
for key in self.META_ENVELOPES:
self.meta[key] = response.get(key) | [
"def",
"_process_meta",
"(",
"self",
",",
"response",
")",
":",
"for",
"key",
"in",
"self",
".",
"META_ENVELOPES",
":",
"self",
".",
"meta",
"[",
"key",
"]",
"=",
"response",
".",
"get",
"(",
"key",
")"
] | Process additional data sent in response | [
"Process",
"additional",
"data",
"sent",
"in",
"response"
] | e8811b0c06651a15e691c96cbfd41e7da4f7f213 | https://github.com/Staffjoy/client_python/blob/e8811b0c06651a15e691c96cbfd41e7da4f7f213/staffjoy/resource.py#L151-L154 | train | 36,906 |
Staffjoy/client_python | staffjoy/resource.py | Resource.patch | def patch(self, **kwargs):
"""Change attributes of the item"""
start = datetime.now()
r = requests.patch(self._url(), auth=(self.key, ""), data=kwargs)
self._delay_for_ratelimits(start)
if r.status_code not in self.TRUTHY_CODES:
return self._handle_request_exception(r)
# Refetch for safety. We could modify based on response,
# but I'm afraid of some edge cases and marshal functions.
self.fetch() | python | def patch(self, **kwargs):
"""Change attributes of the item"""
start = datetime.now()
r = requests.patch(self._url(), auth=(self.key, ""), data=kwargs)
self._delay_for_ratelimits(start)
if r.status_code not in self.TRUTHY_CODES:
return self._handle_request_exception(r)
# Refetch for safety. We could modify based on response,
# but I'm afraid of some edge cases and marshal functions.
self.fetch() | [
"def",
"patch",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"start",
"=",
"datetime",
".",
"now",
"(",
")",
"r",
"=",
"requests",
".",
"patch",
"(",
"self",
".",
"_url",
"(",
")",
",",
"auth",
"=",
"(",
"self",
".",
"key",
",",
"\"\"",
")... | Change attributes of the item | [
"Change",
"attributes",
"of",
"the",
"item"
] | e8811b0c06651a15e691c96cbfd41e7da4f7f213 | https://github.com/Staffjoy/client_python/blob/e8811b0c06651a15e691c96cbfd41e7da4f7f213/staffjoy/resource.py#L166-L177 | train | 36,907 |
Staffjoy/client_python | staffjoy/resource.py | Resource.create | def create(cls, parent=None, **kwargs):
"""Create an object and return it"""
if parent is None:
raise Exception("Parent class is required")
route = copy(parent.route)
if cls.ID_NAME is not None:
route[cls.ID_NAME] = ""
obj = cls(key=parent.key, route=route, config=parent.config)
start = datetime.now()
response = requests.post(obj._url(), auth=(obj.key, ""), data=kwargs)
cls._delay_for_ratelimits(start)
if response.status_code not in cls.TRUTHY_CODES:
return cls._handle_request_exception(response)
# No envelope on post requests
data = response.json()
obj.route[obj.ID_NAME] = data.get("id", data.get(obj.ID_NAME))
obj.data = data
return obj | python | def create(cls, parent=None, **kwargs):
"""Create an object and return it"""
if parent is None:
raise Exception("Parent class is required")
route = copy(parent.route)
if cls.ID_NAME is not None:
route[cls.ID_NAME] = ""
obj = cls(key=parent.key, route=route, config=parent.config)
start = datetime.now()
response = requests.post(obj._url(), auth=(obj.key, ""), data=kwargs)
cls._delay_for_ratelimits(start)
if response.status_code not in cls.TRUTHY_CODES:
return cls._handle_request_exception(response)
# No envelope on post requests
data = response.json()
obj.route[obj.ID_NAME] = data.get("id", data.get(obj.ID_NAME))
obj.data = data
return obj | [
"def",
"create",
"(",
"cls",
",",
"parent",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"parent",
"is",
"None",
":",
"raise",
"Exception",
"(",
"\"Parent class is required\"",
")",
"route",
"=",
"copy",
"(",
"parent",
".",
"route",
")",
"if",... | Create an object and return it | [
"Create",
"an",
"object",
"and",
"return",
"it"
] | e8811b0c06651a15e691c96cbfd41e7da4f7f213 | https://github.com/Staffjoy/client_python/blob/e8811b0c06651a15e691c96cbfd41e7da4f7f213/staffjoy/resource.py#L180-L204 | train | 36,908 |
Staffjoy/client_python | staffjoy/resource.py | Resource._delay_for_ratelimits | def _delay_for_ratelimits(cls, start):
"""If request was shorter than max request time, delay"""
stop = datetime.now()
duration_microseconds = (stop - start).microseconds
if duration_microseconds < cls.REQUEST_TIME_MICROSECONDS:
time.sleep((cls.REQUEST_TIME_MICROSECONDS - duration_microseconds)
/ MICROSECONDS_PER_SECOND) | python | def _delay_for_ratelimits(cls, start):
"""If request was shorter than max request time, delay"""
stop = datetime.now()
duration_microseconds = (stop - start).microseconds
if duration_microseconds < cls.REQUEST_TIME_MICROSECONDS:
time.sleep((cls.REQUEST_TIME_MICROSECONDS - duration_microseconds)
/ MICROSECONDS_PER_SECOND) | [
"def",
"_delay_for_ratelimits",
"(",
"cls",
",",
"start",
")",
":",
"stop",
"=",
"datetime",
".",
"now",
"(",
")",
"duration_microseconds",
"=",
"(",
"stop",
"-",
"start",
")",
".",
"microseconds",
"if",
"duration_microseconds",
"<",
"cls",
".",
"REQUEST_TIM... | If request was shorter than max request time, delay | [
"If",
"request",
"was",
"shorter",
"than",
"max",
"request",
"time",
"delay"
] | e8811b0c06651a15e691c96cbfd41e7da4f7f213 | https://github.com/Staffjoy/client_python/blob/e8811b0c06651a15e691c96cbfd41e7da4f7f213/staffjoy/resource.py#L210-L216 | train | 36,909 |
twisted/txaws | txaws/client/discover/command.py | Command.run | def run(self):
"""
Run the configured method and write the HTTP response status and text
to the output stream.
"""
region = AWSServiceRegion(access_key=self.key, secret_key=self.secret,
uri=self.endpoint)
query = self.query_factory(action=self.action, creds=region.creds,
endpoint=region.ec2_endpoint,
other_params=self.parameters)
def write_response(response):
print >> self.output, "URL: %s" % query.client.url
print >> self.output
print >> self.output, "HTTP status code: %s" % query.client.status
print >> self.output
print >> self.output, response
def write_error(failure):
if failure.check(AWSError):
message = failure.value.original
else:
message = failure.getErrorMessage()
if message.startswith("Error Message: "):
message = message[len("Error Message: "):]
print >> self.output, "URL: %s" % query.client.url
print >> self.output
if getattr(query.client, "status", None) is not None:
print >> self.output, "HTTP status code: %s" % (
query.client.status,)
print >> self.output
print >> self.output, message
if getattr(failure.value, "response", None) is not None:
print >> self.output
print >> self.output, failure.value.response
deferred = query.submit()
deferred.addCallback(write_response)
deferred.addErrback(write_error)
return deferred | python | def run(self):
"""
Run the configured method and write the HTTP response status and text
to the output stream.
"""
region = AWSServiceRegion(access_key=self.key, secret_key=self.secret,
uri=self.endpoint)
query = self.query_factory(action=self.action, creds=region.creds,
endpoint=region.ec2_endpoint,
other_params=self.parameters)
def write_response(response):
print >> self.output, "URL: %s" % query.client.url
print >> self.output
print >> self.output, "HTTP status code: %s" % query.client.status
print >> self.output
print >> self.output, response
def write_error(failure):
if failure.check(AWSError):
message = failure.value.original
else:
message = failure.getErrorMessage()
if message.startswith("Error Message: "):
message = message[len("Error Message: "):]
print >> self.output, "URL: %s" % query.client.url
print >> self.output
if getattr(query.client, "status", None) is not None:
print >> self.output, "HTTP status code: %s" % (
query.client.status,)
print >> self.output
print >> self.output, message
if getattr(failure.value, "response", None) is not None:
print >> self.output
print >> self.output, failure.value.response
deferred = query.submit()
deferred.addCallback(write_response)
deferred.addErrback(write_error)
return deferred | [
"def",
"run",
"(",
"self",
")",
":",
"region",
"=",
"AWSServiceRegion",
"(",
"access_key",
"=",
"self",
".",
"key",
",",
"secret_key",
"=",
"self",
".",
"secret",
",",
"uri",
"=",
"self",
".",
"endpoint",
")",
"query",
"=",
"self",
".",
"query_factory"... | Run the configured method and write the HTTP response status and text
to the output stream. | [
"Run",
"the",
"configured",
"method",
"and",
"write",
"the",
"HTTP",
"response",
"status",
"and",
"text",
"to",
"the",
"output",
"stream",
"."
] | 5c3317376cd47e536625027e38c3b37840175ce0 | https://github.com/twisted/txaws/blob/5c3317376cd47e536625027e38c3b37840175ce0/txaws/client/discover/command.py#L46-L87 | train | 36,910 |
twisted/txaws | txaws/service.py | AWSServiceEndpoint.get_canonical_host | def get_canonical_host(self):
"""
Return the canonical host as for the Host HTTP header specification.
"""
host = self.host.lower()
if self.port is not None:
host = "%s:%s" % (host, self.port)
return host | python | def get_canonical_host(self):
"""
Return the canonical host as for the Host HTTP header specification.
"""
host = self.host.lower()
if self.port is not None:
host = "%s:%s" % (host, self.port)
return host | [
"def",
"get_canonical_host",
"(",
"self",
")",
":",
"host",
"=",
"self",
".",
"host",
".",
"lower",
"(",
")",
"if",
"self",
".",
"port",
"is",
"not",
"None",
":",
"host",
"=",
"\"%s:%s\"",
"%",
"(",
"host",
",",
"self",
".",
"port",
")",
"return",
... | Return the canonical host as for the Host HTTP header specification. | [
"Return",
"the",
"canonical",
"host",
"as",
"for",
"the",
"Host",
"HTTP",
"header",
"specification",
"."
] | 5c3317376cd47e536625027e38c3b37840175ce0 | https://github.com/twisted/txaws/blob/5c3317376cd47e536625027e38c3b37840175ce0/txaws/service.py#L75-L82 | train | 36,911 |
twisted/txaws | txaws/service.py | AWSServiceEndpoint.set_canonical_host | def set_canonical_host(self, canonical_host):
"""
Set host and port from a canonical host string as for the Host HTTP
header specification.
"""
parts = canonical_host.lower().split(":")
self.host = parts[0]
if len(parts) > 1 and parts[1]:
self.port = int(parts[1])
else:
self.port = None | python | def set_canonical_host(self, canonical_host):
"""
Set host and port from a canonical host string as for the Host HTTP
header specification.
"""
parts = canonical_host.lower().split(":")
self.host = parts[0]
if len(parts) > 1 and parts[1]:
self.port = int(parts[1])
else:
self.port = None | [
"def",
"set_canonical_host",
"(",
"self",
",",
"canonical_host",
")",
":",
"parts",
"=",
"canonical_host",
".",
"lower",
"(",
")",
".",
"split",
"(",
"\":\"",
")",
"self",
".",
"host",
"=",
"parts",
"[",
"0",
"]",
"if",
"len",
"(",
"parts",
")",
">",... | Set host and port from a canonical host string as for the Host HTTP
header specification. | [
"Set",
"host",
"and",
"port",
"from",
"a",
"canonical",
"host",
"string",
"as",
"for",
"the",
"Host",
"HTTP",
"header",
"specification",
"."
] | 5c3317376cd47e536625027e38c3b37840175ce0 | https://github.com/twisted/txaws/blob/5c3317376cd47e536625027e38c3b37840175ce0/txaws/service.py#L84-L94 | train | 36,912 |
twisted/txaws | txaws/service.py | AWSServiceEndpoint.get_uri | def get_uri(self):
"""Get a URL representation of the service."""
uri = "%s://%s%s" % (self.scheme, self.get_canonical_host(), self.path)
return uri | python | def get_uri(self):
"""Get a URL representation of the service."""
uri = "%s://%s%s" % (self.scheme, self.get_canonical_host(), self.path)
return uri | [
"def",
"get_uri",
"(",
"self",
")",
":",
"uri",
"=",
"\"%s://%s%s\"",
"%",
"(",
"self",
".",
"scheme",
",",
"self",
".",
"get_canonical_host",
"(",
")",
",",
"self",
".",
"path",
")",
"return",
"uri"
] | Get a URL representation of the service. | [
"Get",
"a",
"URL",
"representation",
"of",
"the",
"service",
"."
] | 5c3317376cd47e536625027e38c3b37840175ce0 | https://github.com/twisted/txaws/blob/5c3317376cd47e536625027e38c3b37840175ce0/txaws/service.py#L99-L102 | train | 36,913 |
T-002/pycast | pycast/common/decorators.py | optimized | def optimized(fn):
"""Decorator that will call the optimized c++ version
of a pycast function if available rather than theo
original pycast function
:param function fn: original pycast function
:return: return the wrapped function
:rtype: function
"""
def _optimized(self, *args, **kwargs):
""" This method calls the pycastC function if
optimization is enabled and the pycastC function
is available.
:param: PyCastObject self: reference to the calling object.
Needs to be passed to the pycastC function,
so that all uts members are available.
:param: list *args: list of arguments the function is called with.
:param: dict **kwargs: dictionary of parameter names and values the function has been called with.
:return result of the function call either from pycast or pycastC module.
:rtype: function
"""
if self.optimizationEnabled:
class_name = self.__class__.__name__
module = self.__module__.replace("pycast", "pycastC")
try:
imported = __import__("%s.%s" % (module, class_name), globals(), locals(), [fn.__name__])
function = getattr(imported, fn.__name__)
return function(self, *args, **kwargs)
except ImportError:
print "[WARNING] Could not enable optimization for %s, %s" % (fn.__name__, self)
return fn(self, *args, **kwargs)
else:
return fn(self, *args, **kwargs)
setattr(_optimized, "__name__", fn.__name__)
setattr(_optimized, "__repr__", fn.__repr__)
setattr(_optimized, "__str__", fn.__str__)
setattr(_optimized, "__doc__", fn.__doc__)
return _optimized | python | def optimized(fn):
"""Decorator that will call the optimized c++ version
of a pycast function if available rather than theo
original pycast function
:param function fn: original pycast function
:return: return the wrapped function
:rtype: function
"""
def _optimized(self, *args, **kwargs):
""" This method calls the pycastC function if
optimization is enabled and the pycastC function
is available.
:param: PyCastObject self: reference to the calling object.
Needs to be passed to the pycastC function,
so that all uts members are available.
:param: list *args: list of arguments the function is called with.
:param: dict **kwargs: dictionary of parameter names and values the function has been called with.
:return result of the function call either from pycast or pycastC module.
:rtype: function
"""
if self.optimizationEnabled:
class_name = self.__class__.__name__
module = self.__module__.replace("pycast", "pycastC")
try:
imported = __import__("%s.%s" % (module, class_name), globals(), locals(), [fn.__name__])
function = getattr(imported, fn.__name__)
return function(self, *args, **kwargs)
except ImportError:
print "[WARNING] Could not enable optimization for %s, %s" % (fn.__name__, self)
return fn(self, *args, **kwargs)
else:
return fn(self, *args, **kwargs)
setattr(_optimized, "__name__", fn.__name__)
setattr(_optimized, "__repr__", fn.__repr__)
setattr(_optimized, "__str__", fn.__str__)
setattr(_optimized, "__doc__", fn.__doc__)
return _optimized | [
"def",
"optimized",
"(",
"fn",
")",
":",
"def",
"_optimized",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"\"\"\" This method calls the pycastC function if\n optimization is enabled and the pycastC function\n is available.\n\n :param: ... | Decorator that will call the optimized c++ version
of a pycast function if available rather than theo
original pycast function
:param function fn: original pycast function
:return: return the wrapped function
:rtype: function | [
"Decorator",
"that",
"will",
"call",
"the",
"optimized",
"c",
"++",
"version",
"of",
"a",
"pycast",
"function",
"if",
"available",
"rather",
"than",
"theo",
"original",
"pycast",
"function"
] | 8a53505c6d8367e0ea572e8af768e80b29e1cc41 | https://github.com/T-002/pycast/blob/8a53505c6d8367e0ea572e8af768e80b29e1cc41/pycast/common/decorators.py#L25-L68 | train | 36,914 |
T-002/pycast | pycast/optimization/baseoptimizationmethod.py | BaseOptimizationMethod.optimize | def optimize(self, timeSeries, forecastingMethods=None, startingPercentage=0.0, endPercentage=100.0):
"""Runs the optimization on the given TimeSeries.
:param TimeSeries timeSeries: TimeSeries instance that requires an optimized forecast.
:param list forecastingMethods: List of forecastingMethods that will be used for optimization.
:param float startingPercentage: Defines the start of the interval. This has to be a value in [0.0, 100.0].
It represents the value, where the error calculation should be started.
25.0 for example means that the first 25% of all calculated errors will be ignored.
:param float endPercentage: Defines the end of the interval. This has to be a value in [0.0, 100.0].
It represents the value, after which all error values will be ignored. 90.0 for example means that
the last 10% of all local errors will be ignored.
:return: Returns the optimized forecasting method with the smallest error.
:rtype: (BaseForecastingMethod, Dictionary)
:raise: Raises a :py:exc:`ValueError` ValueError if no forecastingMethods is empty.
"""
# no forecasting methods provided
if forecastingMethods is None or len(forecastingMethods) == 0:
raise ValueError("forecastingMethods cannot be empty.") | python | def optimize(self, timeSeries, forecastingMethods=None, startingPercentage=0.0, endPercentage=100.0):
"""Runs the optimization on the given TimeSeries.
:param TimeSeries timeSeries: TimeSeries instance that requires an optimized forecast.
:param list forecastingMethods: List of forecastingMethods that will be used for optimization.
:param float startingPercentage: Defines the start of the interval. This has to be a value in [0.0, 100.0].
It represents the value, where the error calculation should be started.
25.0 for example means that the first 25% of all calculated errors will be ignored.
:param float endPercentage: Defines the end of the interval. This has to be a value in [0.0, 100.0].
It represents the value, after which all error values will be ignored. 90.0 for example means that
the last 10% of all local errors will be ignored.
:return: Returns the optimized forecasting method with the smallest error.
:rtype: (BaseForecastingMethod, Dictionary)
:raise: Raises a :py:exc:`ValueError` ValueError if no forecastingMethods is empty.
"""
# no forecasting methods provided
if forecastingMethods is None or len(forecastingMethods) == 0:
raise ValueError("forecastingMethods cannot be empty.") | [
"def",
"optimize",
"(",
"self",
",",
"timeSeries",
",",
"forecastingMethods",
"=",
"None",
",",
"startingPercentage",
"=",
"0.0",
",",
"endPercentage",
"=",
"100.0",
")",
":",
"# no forecasting methods provided",
"if",
"forecastingMethods",
"is",
"None",
"or",
"le... | Runs the optimization on the given TimeSeries.
:param TimeSeries timeSeries: TimeSeries instance that requires an optimized forecast.
:param list forecastingMethods: List of forecastingMethods that will be used for optimization.
:param float startingPercentage: Defines the start of the interval. This has to be a value in [0.0, 100.0].
It represents the value, where the error calculation should be started.
25.0 for example means that the first 25% of all calculated errors will be ignored.
:param float endPercentage: Defines the end of the interval. This has to be a value in [0.0, 100.0].
It represents the value, after which all error values will be ignored. 90.0 for example means that
the last 10% of all local errors will be ignored.
:return: Returns the optimized forecasting method with the smallest error.
:rtype: (BaseForecastingMethod, Dictionary)
:raise: Raises a :py:exc:`ValueError` ValueError if no forecastingMethods is empty. | [
"Runs",
"the",
"optimization",
"on",
"the",
"given",
"TimeSeries",
"."
] | 8a53505c6d8367e0ea572e8af768e80b29e1cc41 | https://github.com/T-002/pycast/blob/8a53505c6d8367e0ea572e8af768e80b29e1cc41/pycast/optimization/baseoptimizationmethod.py#L64-L84 | train | 36,915 |
twisted/txaws | txaws/server/method.py | method | def method(method_class):
"""Decorator to use to mark an API method.
When invoking L{Registry.scan} the classes marked with this decorator
will be added to the registry.
@param method_class: The L{Method} class to register.
"""
def callback(scanner, name, method_class):
if method_class.actions is not None:
actions = method_class.actions
else:
actions = [name]
if method_class.versions is not None:
versions = method_class.versions
else:
versions = [None]
for action in actions:
for version in versions:
scanner.registry.add(method_class,
action=action,
version=version)
from venusian import attach
attach(method_class, callback, category="method")
return method_class | python | def method(method_class):
"""Decorator to use to mark an API method.
When invoking L{Registry.scan} the classes marked with this decorator
will be added to the registry.
@param method_class: The L{Method} class to register.
"""
def callback(scanner, name, method_class):
if method_class.actions is not None:
actions = method_class.actions
else:
actions = [name]
if method_class.versions is not None:
versions = method_class.versions
else:
versions = [None]
for action in actions:
for version in versions:
scanner.registry.add(method_class,
action=action,
version=version)
from venusian import attach
attach(method_class, callback, category="method")
return method_class | [
"def",
"method",
"(",
"method_class",
")",
":",
"def",
"callback",
"(",
"scanner",
",",
"name",
",",
"method_class",
")",
":",
"if",
"method_class",
".",
"actions",
"is",
"not",
"None",
":",
"actions",
"=",
"method_class",
".",
"actions",
"else",
":",
"a... | Decorator to use to mark an API method.
When invoking L{Registry.scan} the classes marked with this decorator
will be added to the registry.
@param method_class: The L{Method} class to register. | [
"Decorator",
"to",
"use",
"to",
"mark",
"an",
"API",
"method",
"."
] | 5c3317376cd47e536625027e38c3b37840175ce0 | https://github.com/twisted/txaws/blob/5c3317376cd47e536625027e38c3b37840175ce0/txaws/server/method.py#L1-L29 | train | 36,916 |
ntoll/uflash | uflash.py | hexlify | def hexlify(script, minify=False):
"""
Takes the byte content of a Python script and returns a hex encoded
version of it.
Based on the hexlify script in the microbit-micropython repository.
"""
if not script:
return ''
# Convert line endings in case the file was created on Windows.
script = script.replace(b'\r\n', b'\n')
script = script.replace(b'\r', b'\n')
if minify:
if not can_minify:
raise ValueError("No minifier is available")
script = nudatus.mangle(script.decode('utf-8')).encode('utf-8')
# Add header, pad to multiple of 16 bytes.
data = b'MP' + struct.pack('<H', len(script)) + script
# Padding with null bytes in a 2/3 compatible way
data = data + (b'\x00' * (16 - len(data) % 16))
if len(data) > _MAX_SIZE:
# 'MP' = 2 bytes, script length is another 2 bytes.
raise ValueError("Python script must be less than 8188 bytes.")
# Convert to .hex format.
output = [':020000040003F7'] # extended linear address, 0x0003.
addr = _SCRIPT_ADDR
for i in range(0, len(data), 16):
chunk = data[i:min(i + 16, len(data))]
chunk = struct.pack('>BHB', len(chunk), addr & 0xffff, 0) + chunk
checksum = (-(sum(bytearray(chunk)))) & 0xff
hexline = ':%s%02X' % (strfunc(binascii.hexlify(chunk)).upper(),
checksum)
output.append(hexline)
addr += 16
return '\n'.join(output) | python | def hexlify(script, minify=False):
"""
Takes the byte content of a Python script and returns a hex encoded
version of it.
Based on the hexlify script in the microbit-micropython repository.
"""
if not script:
return ''
# Convert line endings in case the file was created on Windows.
script = script.replace(b'\r\n', b'\n')
script = script.replace(b'\r', b'\n')
if minify:
if not can_minify:
raise ValueError("No minifier is available")
script = nudatus.mangle(script.decode('utf-8')).encode('utf-8')
# Add header, pad to multiple of 16 bytes.
data = b'MP' + struct.pack('<H', len(script)) + script
# Padding with null bytes in a 2/3 compatible way
data = data + (b'\x00' * (16 - len(data) % 16))
if len(data) > _MAX_SIZE:
# 'MP' = 2 bytes, script length is another 2 bytes.
raise ValueError("Python script must be less than 8188 bytes.")
# Convert to .hex format.
output = [':020000040003F7'] # extended linear address, 0x0003.
addr = _SCRIPT_ADDR
for i in range(0, len(data), 16):
chunk = data[i:min(i + 16, len(data))]
chunk = struct.pack('>BHB', len(chunk), addr & 0xffff, 0) + chunk
checksum = (-(sum(bytearray(chunk)))) & 0xff
hexline = ':%s%02X' % (strfunc(binascii.hexlify(chunk)).upper(),
checksum)
output.append(hexline)
addr += 16
return '\n'.join(output) | [
"def",
"hexlify",
"(",
"script",
",",
"minify",
"=",
"False",
")",
":",
"if",
"not",
"script",
":",
"return",
"''",
"# Convert line endings in case the file was created on Windows.",
"script",
"=",
"script",
".",
"replace",
"(",
"b'\\r\\n'",
",",
"b'\\n'",
")",
... | Takes the byte content of a Python script and returns a hex encoded
version of it.
Based on the hexlify script in the microbit-micropython repository. | [
"Takes",
"the",
"byte",
"content",
"of",
"a",
"Python",
"script",
"and",
"returns",
"a",
"hex",
"encoded",
"version",
"of",
"it",
"."
] | 867468d386da0aa20212b69a152ce8bfc0972366 | https://github.com/ntoll/uflash/blob/867468d386da0aa20212b69a152ce8bfc0972366/uflash.py#L81-L115 | train | 36,917 |
ntoll/uflash | uflash.py | unhexlify | def unhexlify(blob):
"""
Takes a hexlified script and turns it back into a string of Python code.
"""
lines = blob.split('\n')[1:]
output = []
for line in lines:
# Discard the address, length etc. and reverse the hexlification
output.append(binascii.unhexlify(line[9:-2]))
# Check the header is correct ("MP<size>")
if (output[0][0:2].decode('utf-8') != u'MP'):
return ''
# Strip off header
output[0] = output[0][4:]
# and strip any null bytes from the end
output[-1] = output[-1].strip(b'\x00')
script = b''.join(output)
try:
result = script.decode('utf-8')
return result
except UnicodeDecodeError:
# Return an empty string because in certain rare circumstances (where
# the source hex doesn't include any embedded Python code) this
# function may be passed in "raw" bytes from MicroPython.
return '' | python | def unhexlify(blob):
"""
Takes a hexlified script and turns it back into a string of Python code.
"""
lines = blob.split('\n')[1:]
output = []
for line in lines:
# Discard the address, length etc. and reverse the hexlification
output.append(binascii.unhexlify(line[9:-2]))
# Check the header is correct ("MP<size>")
if (output[0][0:2].decode('utf-8') != u'MP'):
return ''
# Strip off header
output[0] = output[0][4:]
# and strip any null bytes from the end
output[-1] = output[-1].strip(b'\x00')
script = b''.join(output)
try:
result = script.decode('utf-8')
return result
except UnicodeDecodeError:
# Return an empty string because in certain rare circumstances (where
# the source hex doesn't include any embedded Python code) this
# function may be passed in "raw" bytes from MicroPython.
return '' | [
"def",
"unhexlify",
"(",
"blob",
")",
":",
"lines",
"=",
"blob",
".",
"split",
"(",
"'\\n'",
")",
"[",
"1",
":",
"]",
"output",
"=",
"[",
"]",
"for",
"line",
"in",
"lines",
":",
"# Discard the address, length etc. and reverse the hexlification",
"output",
".... | Takes a hexlified script and turns it back into a string of Python code. | [
"Takes",
"a",
"hexlified",
"script",
"and",
"turns",
"it",
"back",
"into",
"a",
"string",
"of",
"Python",
"code",
"."
] | 867468d386da0aa20212b69a152ce8bfc0972366 | https://github.com/ntoll/uflash/blob/867468d386da0aa20212b69a152ce8bfc0972366/uflash.py#L118-L142 | train | 36,918 |
ntoll/uflash | uflash.py | embed_hex | def embed_hex(runtime_hex, python_hex=None):
"""
Given a string representing the MicroPython runtime hex, will embed a
string representing a hex encoded Python script into it.
Returns a string representation of the resulting combination.
Will raise a ValueError if the runtime_hex is missing.
If the python_hex is missing, it will return the unmodified runtime_hex.
"""
if not runtime_hex:
raise ValueError('MicroPython runtime hex required.')
if not python_hex:
return runtime_hex
py_list = python_hex.split()
runtime_list = runtime_hex.split()
embedded_list = []
# The embedded list should be the original runtime with the Python based
# hex embedded two lines from the end.
embedded_list.extend(runtime_list[:-5])
embedded_list.extend(py_list)
embedded_list.extend(runtime_list[-5:])
return '\n'.join(embedded_list) + '\n' | python | def embed_hex(runtime_hex, python_hex=None):
"""
Given a string representing the MicroPython runtime hex, will embed a
string representing a hex encoded Python script into it.
Returns a string representation of the resulting combination.
Will raise a ValueError if the runtime_hex is missing.
If the python_hex is missing, it will return the unmodified runtime_hex.
"""
if not runtime_hex:
raise ValueError('MicroPython runtime hex required.')
if not python_hex:
return runtime_hex
py_list = python_hex.split()
runtime_list = runtime_hex.split()
embedded_list = []
# The embedded list should be the original runtime with the Python based
# hex embedded two lines from the end.
embedded_list.extend(runtime_list[:-5])
embedded_list.extend(py_list)
embedded_list.extend(runtime_list[-5:])
return '\n'.join(embedded_list) + '\n' | [
"def",
"embed_hex",
"(",
"runtime_hex",
",",
"python_hex",
"=",
"None",
")",
":",
"if",
"not",
"runtime_hex",
":",
"raise",
"ValueError",
"(",
"'MicroPython runtime hex required.'",
")",
"if",
"not",
"python_hex",
":",
"return",
"runtime_hex",
"py_list",
"=",
"p... | Given a string representing the MicroPython runtime hex, will embed a
string representing a hex encoded Python script into it.
Returns a string representation of the resulting combination.
Will raise a ValueError if the runtime_hex is missing.
If the python_hex is missing, it will return the unmodified runtime_hex. | [
"Given",
"a",
"string",
"representing",
"the",
"MicroPython",
"runtime",
"hex",
"will",
"embed",
"a",
"string",
"representing",
"a",
"hex",
"encoded",
"Python",
"script",
"into",
"it",
"."
] | 867468d386da0aa20212b69a152ce8bfc0972366 | https://github.com/ntoll/uflash/blob/867468d386da0aa20212b69a152ce8bfc0972366/uflash.py#L145-L168 | train | 36,919 |
ntoll/uflash | uflash.py | extract_script | def extract_script(embedded_hex):
"""
Given a hex file containing the MicroPython runtime and an embedded Python
script, will extract the original Python script.
Returns a string containing the original embedded script.
"""
hex_lines = embedded_hex.split('\n')
script_addr_high = hex((_SCRIPT_ADDR >> 16) & 0xffff)[2:].upper().zfill(4)
script_addr_low = hex(_SCRIPT_ADDR & 0xffff)[2:].upper().zfill(4)
start_script = None
within_range = False
# Look for the script start address
for loc, val in enumerate(hex_lines):
if val[0:9] == ':02000004':
# Reached an extended address record, check if within script range
within_range = val[9:13].upper() == script_addr_high
elif within_range and val[0:3] == ':10' and \
val[3:7].upper() == script_addr_low:
start_script = loc
break
if start_script:
# Find the end of the script
end_script = None
for loc, val in enumerate(hex_lines[start_script:]):
if val[9:41] == 'FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF':
end_script = loc + start_script
break
# Pass the extracted hex through unhexlify
return unhexlify('\n'.join(
hex_lines[start_script - 1:end_script if end_script else -6]))
return '' | python | def extract_script(embedded_hex):
"""
Given a hex file containing the MicroPython runtime and an embedded Python
script, will extract the original Python script.
Returns a string containing the original embedded script.
"""
hex_lines = embedded_hex.split('\n')
script_addr_high = hex((_SCRIPT_ADDR >> 16) & 0xffff)[2:].upper().zfill(4)
script_addr_low = hex(_SCRIPT_ADDR & 0xffff)[2:].upper().zfill(4)
start_script = None
within_range = False
# Look for the script start address
for loc, val in enumerate(hex_lines):
if val[0:9] == ':02000004':
# Reached an extended address record, check if within script range
within_range = val[9:13].upper() == script_addr_high
elif within_range and val[0:3] == ':10' and \
val[3:7].upper() == script_addr_low:
start_script = loc
break
if start_script:
# Find the end of the script
end_script = None
for loc, val in enumerate(hex_lines[start_script:]):
if val[9:41] == 'FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF':
end_script = loc + start_script
break
# Pass the extracted hex through unhexlify
return unhexlify('\n'.join(
hex_lines[start_script - 1:end_script if end_script else -6]))
return '' | [
"def",
"extract_script",
"(",
"embedded_hex",
")",
":",
"hex_lines",
"=",
"embedded_hex",
".",
"split",
"(",
"'\\n'",
")",
"script_addr_high",
"=",
"hex",
"(",
"(",
"_SCRIPT_ADDR",
">>",
"16",
")",
"&",
"0xffff",
")",
"[",
"2",
":",
"]",
".",
"upper",
... | Given a hex file containing the MicroPython runtime and an embedded Python
script, will extract the original Python script.
Returns a string containing the original embedded script. | [
"Given",
"a",
"hex",
"file",
"containing",
"the",
"MicroPython",
"runtime",
"and",
"an",
"embedded",
"Python",
"script",
"will",
"extract",
"the",
"original",
"Python",
"script",
"."
] | 867468d386da0aa20212b69a152ce8bfc0972366 | https://github.com/ntoll/uflash/blob/867468d386da0aa20212b69a152ce8bfc0972366/uflash.py#L171-L202 | train | 36,920 |
ntoll/uflash | uflash.py | save_hex | def save_hex(hex_file, path):
"""
Given a string representation of a hex file, this function copies it to
the specified path thus causing the device mounted at that point to be
flashed.
If the hex_file is empty it will raise a ValueError.
If the filename at the end of the path does not end in '.hex' it will raise
a ValueError.
"""
if not hex_file:
raise ValueError('Cannot flash an empty .hex file.')
if not path.endswith('.hex'):
raise ValueError('The path to flash must be for a .hex file.')
with open(path, 'wb') as output:
output.write(hex_file.encode('ascii')) | python | def save_hex(hex_file, path):
"""
Given a string representation of a hex file, this function copies it to
the specified path thus causing the device mounted at that point to be
flashed.
If the hex_file is empty it will raise a ValueError.
If the filename at the end of the path does not end in '.hex' it will raise
a ValueError.
"""
if not hex_file:
raise ValueError('Cannot flash an empty .hex file.')
if not path.endswith('.hex'):
raise ValueError('The path to flash must be for a .hex file.')
with open(path, 'wb') as output:
output.write(hex_file.encode('ascii')) | [
"def",
"save_hex",
"(",
"hex_file",
",",
"path",
")",
":",
"if",
"not",
"hex_file",
":",
"raise",
"ValueError",
"(",
"'Cannot flash an empty .hex file.'",
")",
"if",
"not",
"path",
".",
"endswith",
"(",
"'.hex'",
")",
":",
"raise",
"ValueError",
"(",
"'The p... | Given a string representation of a hex file, this function copies it to
the specified path thus causing the device mounted at that point to be
flashed.
If the hex_file is empty it will raise a ValueError.
If the filename at the end of the path does not end in '.hex' it will raise
a ValueError. | [
"Given",
"a",
"string",
"representation",
"of",
"a",
"hex",
"file",
"this",
"function",
"copies",
"it",
"to",
"the",
"specified",
"path",
"thus",
"causing",
"the",
"device",
"mounted",
"at",
"that",
"point",
"to",
"be",
"flashed",
"."
] | 867468d386da0aa20212b69a152ce8bfc0972366 | https://github.com/ntoll/uflash/blob/867468d386da0aa20212b69a152ce8bfc0972366/uflash.py#L265-L281 | train | 36,921 |
ntoll/uflash | uflash.py | extract | def extract(path_to_hex, output_path=None):
"""
Given a path_to_hex file this function will attempt to extract the
embedded script from it and save it either to output_path or stdout
"""
with open(path_to_hex, 'r') as hex_file:
python_script = extract_script(hex_file.read())
if output_path:
with open(output_path, 'w') as output_file:
output_file.write(python_script)
else:
print(python_script) | python | def extract(path_to_hex, output_path=None):
"""
Given a path_to_hex file this function will attempt to extract the
embedded script from it and save it either to output_path or stdout
"""
with open(path_to_hex, 'r') as hex_file:
python_script = extract_script(hex_file.read())
if output_path:
with open(output_path, 'w') as output_file:
output_file.write(python_script)
else:
print(python_script) | [
"def",
"extract",
"(",
"path_to_hex",
",",
"output_path",
"=",
"None",
")",
":",
"with",
"open",
"(",
"path_to_hex",
",",
"'r'",
")",
"as",
"hex_file",
":",
"python_script",
"=",
"extract_script",
"(",
"hex_file",
".",
"read",
"(",
")",
")",
"if",
"outpu... | Given a path_to_hex file this function will attempt to extract the
embedded script from it and save it either to output_path or stdout | [
"Given",
"a",
"path_to_hex",
"file",
"this",
"function",
"will",
"attempt",
"to",
"extract",
"the",
"embedded",
"script",
"from",
"it",
"and",
"save",
"it",
"either",
"to",
"output_path",
"or",
"stdout"
] | 867468d386da0aa20212b69a152ce8bfc0972366 | https://github.com/ntoll/uflash/blob/867468d386da0aa20212b69a152ce8bfc0972366/uflash.py#L344-L355 | train | 36,922 |
ntoll/uflash | uflash.py | main | def main(argv=None):
"""
Entry point for the command line tool 'uflash'.
Will print help text if the optional first argument is "help". Otherwise
it will ensure the optional first argument ends in ".py" (the source
Python script).
An optional second argument is used to reference the path to the micro:bit
device. Any more arguments are ignored.
Exceptions are caught and printed for the user.
"""
if not argv:
argv = sys.argv[1:]
parser = argparse.ArgumentParser(description=_HELP_TEXT)
parser.add_argument('source', nargs='?', default=None)
parser.add_argument('target', nargs='*', default=None)
parser.add_argument('-r', '--runtime', default=None,
help="Use the referenced MicroPython runtime.")
parser.add_argument('-e', '--extract',
action='store_true',
help=("Extract python source from a hex file"
" instead of creating the hex file."), )
parser.add_argument('-w', '--watch',
action='store_true',
help='Watch the source file for changes.')
parser.add_argument('-m', '--minify',
action='store_true',
help='Minify the source')
parser.add_argument('--version', action='version',
version='%(prog)s ' + get_version())
args = parser.parse_args(argv)
if args.extract:
try:
extract(args.source, args.target)
except Exception as ex:
error_message = "Error extracting {source}: {error!s}"
print(error_message.format(source=args.source, error=ex),
file=sys.stderr)
sys.exit(1)
elif args.watch:
try:
watch_file(args.source, flash,
path_to_python=args.source,
paths_to_microbits=args.target,
path_to_runtime=args.runtime)
except Exception as ex:
error_message = "Error watching {source}: {error!s}"
print(error_message.format(source=args.source, error=ex),
file=sys.stderr)
sys.exit(1)
else:
try:
flash(path_to_python=args.source, paths_to_microbits=args.target,
path_to_runtime=args.runtime, minify=args.minify)
except Exception as ex:
error_message = (
"Error flashing {source} to {target}{runtime}: {error!s}"
)
source = args.source
target = args.target if args.target else "microbit"
if args.runtime:
runtime = "with runtime {runtime}".format(runtime=args.runtime)
else:
runtime = ""
print(error_message.format(source=source, target=target,
runtime=runtime, error=ex),
file=sys.stderr)
sys.exit(1) | python | def main(argv=None):
"""
Entry point for the command line tool 'uflash'.
Will print help text if the optional first argument is "help". Otherwise
it will ensure the optional first argument ends in ".py" (the source
Python script).
An optional second argument is used to reference the path to the micro:bit
device. Any more arguments are ignored.
Exceptions are caught and printed for the user.
"""
if not argv:
argv = sys.argv[1:]
parser = argparse.ArgumentParser(description=_HELP_TEXT)
parser.add_argument('source', nargs='?', default=None)
parser.add_argument('target', nargs='*', default=None)
parser.add_argument('-r', '--runtime', default=None,
help="Use the referenced MicroPython runtime.")
parser.add_argument('-e', '--extract',
action='store_true',
help=("Extract python source from a hex file"
" instead of creating the hex file."), )
parser.add_argument('-w', '--watch',
action='store_true',
help='Watch the source file for changes.')
parser.add_argument('-m', '--minify',
action='store_true',
help='Minify the source')
parser.add_argument('--version', action='version',
version='%(prog)s ' + get_version())
args = parser.parse_args(argv)
if args.extract:
try:
extract(args.source, args.target)
except Exception as ex:
error_message = "Error extracting {source}: {error!s}"
print(error_message.format(source=args.source, error=ex),
file=sys.stderr)
sys.exit(1)
elif args.watch:
try:
watch_file(args.source, flash,
path_to_python=args.source,
paths_to_microbits=args.target,
path_to_runtime=args.runtime)
except Exception as ex:
error_message = "Error watching {source}: {error!s}"
print(error_message.format(source=args.source, error=ex),
file=sys.stderr)
sys.exit(1)
else:
try:
flash(path_to_python=args.source, paths_to_microbits=args.target,
path_to_runtime=args.runtime, minify=args.minify)
except Exception as ex:
error_message = (
"Error flashing {source} to {target}{runtime}: {error!s}"
)
source = args.source
target = args.target if args.target else "microbit"
if args.runtime:
runtime = "with runtime {runtime}".format(runtime=args.runtime)
else:
runtime = ""
print(error_message.format(source=source, target=target,
runtime=runtime, error=ex),
file=sys.stderr)
sys.exit(1) | [
"def",
"main",
"(",
"argv",
"=",
"None",
")",
":",
"if",
"not",
"argv",
":",
"argv",
"=",
"sys",
".",
"argv",
"[",
"1",
":",
"]",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"description",
"=",
"_HELP_TEXT",
")",
"parser",
".",
"add_argume... | Entry point for the command line tool 'uflash'.
Will print help text if the optional first argument is "help". Otherwise
it will ensure the optional first argument ends in ".py" (the source
Python script).
An optional second argument is used to reference the path to the micro:bit
device. Any more arguments are ignored.
Exceptions are caught and printed for the user. | [
"Entry",
"point",
"for",
"the",
"command",
"line",
"tool",
"uflash",
"."
] | 867468d386da0aa20212b69a152ce8bfc0972366 | https://github.com/ntoll/uflash/blob/867468d386da0aa20212b69a152ce8bfc0972366/uflash.py#L379-L452 | train | 36,923 |
MartinThoma/mpu | mpu/decorators.py | timing | def timing(func):
"""Measure the execution time of a function call and print the result."""
@functools.wraps(func)
def wrap(*args, **kw):
t0 = time()
result = func(*args, **kw)
t1 = time()
print('func:%r args:[%r, %r] took: %2.4f sec' %
(func.__name__, args, kw, t1 - t0))
return result
return wrap | python | def timing(func):
"""Measure the execution time of a function call and print the result."""
@functools.wraps(func)
def wrap(*args, **kw):
t0 = time()
result = func(*args, **kw)
t1 = time()
print('func:%r args:[%r, %r] took: %2.4f sec' %
(func.__name__, args, kw, t1 - t0))
return result
return wrap | [
"def",
"timing",
"(",
"func",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"func",
")",
"def",
"wrap",
"(",
"*",
"args",
",",
"*",
"*",
"kw",
")",
":",
"t0",
"=",
"time",
"(",
")",
"result",
"=",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"... | Measure the execution time of a function call and print the result. | [
"Measure",
"the",
"execution",
"time",
"of",
"a",
"function",
"call",
"and",
"print",
"the",
"result",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/decorators.py#L13-L23 | train | 36,924 |
MartinThoma/mpu | mpu/units/__init__.py | get_currency | def get_currency(currency_str):
"""
Convert an identifier for a currency into a currency object.
Parameters
----------
currency_str : str
Returns
-------
currency : Currency
"""
path = 'units/currencies.csv' # always use slash in Python packages
filepath = pkg_resources.resource_filename('mpu', path)
with open(filepath, 'r') as fp:
reader = csv.reader(fp, delimiter=',', quotechar='"')
next(reader, None) # skip the headers
for row in reader:
is_currency = currency_str in [row[0], row[1], row[2]]
if is_currency:
entity = row[0]
name = row[1]
code = row[2]
numeric_code = row[3]
symbol = row[4]
if len(row[5]) == 0:
exponent = None
else:
exponent = int(row[5])
if len(row[6]) > 0:
withdrawal_date = row[6]
else:
withdrawal_date = None
subunits = row[7]
return Currency(name=name,
code=code,
numeric_code=numeric_code,
symbol=symbol,
exponent=exponent,
entities=[entity],
withdrawal_date=withdrawal_date,
subunits=subunits)
raise ValueError('Could not find currency \'{}\''.format(currency_str)) | python | def get_currency(currency_str):
"""
Convert an identifier for a currency into a currency object.
Parameters
----------
currency_str : str
Returns
-------
currency : Currency
"""
path = 'units/currencies.csv' # always use slash in Python packages
filepath = pkg_resources.resource_filename('mpu', path)
with open(filepath, 'r') as fp:
reader = csv.reader(fp, delimiter=',', quotechar='"')
next(reader, None) # skip the headers
for row in reader:
is_currency = currency_str in [row[0], row[1], row[2]]
if is_currency:
entity = row[0]
name = row[1]
code = row[2]
numeric_code = row[3]
symbol = row[4]
if len(row[5]) == 0:
exponent = None
else:
exponent = int(row[5])
if len(row[6]) > 0:
withdrawal_date = row[6]
else:
withdrawal_date = None
subunits = row[7]
return Currency(name=name,
code=code,
numeric_code=numeric_code,
symbol=symbol,
exponent=exponent,
entities=[entity],
withdrawal_date=withdrawal_date,
subunits=subunits)
raise ValueError('Could not find currency \'{}\''.format(currency_str)) | [
"def",
"get_currency",
"(",
"currency_str",
")",
":",
"path",
"=",
"'units/currencies.csv'",
"# always use slash in Python packages",
"filepath",
"=",
"pkg_resources",
".",
"resource_filename",
"(",
"'mpu'",
",",
"path",
")",
"with",
"open",
"(",
"filepath",
",",
"'... | Convert an identifier for a currency into a currency object.
Parameters
----------
currency_str : str
Returns
-------
currency : Currency | [
"Convert",
"an",
"identifier",
"for",
"a",
"currency",
"into",
"a",
"currency",
"object",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/units/__init__.py#L236-L278 | train | 36,925 |
MartinThoma/mpu | mpu/units/__init__.py | Currency.from_json | def from_json(cls, json):
"""Create a Currency object from a JSON dump."""
obj = cls(name=json['name'],
code=json['code'],
numeric_code=json['numeric_code'],
symbol=json['symbol'],
exponent=json['exponent'],
entities=json['entities'],
withdrawal_date=json['withdrawal_date'],
subunits=json['subunits'])
return obj | python | def from_json(cls, json):
"""Create a Currency object from a JSON dump."""
obj = cls(name=json['name'],
code=json['code'],
numeric_code=json['numeric_code'],
symbol=json['symbol'],
exponent=json['exponent'],
entities=json['entities'],
withdrawal_date=json['withdrawal_date'],
subunits=json['subunits'])
return obj | [
"def",
"from_json",
"(",
"cls",
",",
"json",
")",
":",
"obj",
"=",
"cls",
"(",
"name",
"=",
"json",
"[",
"'name'",
"]",
",",
"code",
"=",
"json",
"[",
"'code'",
"]",
",",
"numeric_code",
"=",
"json",
"[",
"'numeric_code'",
"]",
",",
"symbol",
"=",
... | Create a Currency object from a JSON dump. | [
"Create",
"a",
"Currency",
"object",
"from",
"a",
"JSON",
"dump",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/units/__init__.py#L348-L358 | train | 36,926 |
MartinThoma/mpu | mpu/string.py | is_email | def is_email(potential_email_address):
"""
Check if potential_email_address is a valid e-mail address.
Please note that this function has no false-negatives but many
false-positives. So if it returns that the input is not a valid
e-mail adress, it certainly isn't. If it returns True, it might still be
invalid. For example, the domain could not be registered.
Parameters
----------
potential_email_address : str
Returns
-------
is_email : bool
Examples
--------
>>> is_email('')
False
>>> is_email('info@martin-thoma.de')
True
>>> is_email('info@math.martin-thoma.de')
True
>>> is_email('Martin Thoma <info@martin-thoma.de>')
False
>>> is_email('info@martin-thoma')
False
"""
context, mail = parseaddr(potential_email_address)
first_condition = len(context) == 0 and len(mail) != 0
dot_after_at = ('@' in potential_email_address and
'.' in potential_email_address.split('@')[1])
return first_condition and dot_after_at | python | def is_email(potential_email_address):
"""
Check if potential_email_address is a valid e-mail address.
Please note that this function has no false-negatives but many
false-positives. So if it returns that the input is not a valid
e-mail adress, it certainly isn't. If it returns True, it might still be
invalid. For example, the domain could not be registered.
Parameters
----------
potential_email_address : str
Returns
-------
is_email : bool
Examples
--------
>>> is_email('')
False
>>> is_email('info@martin-thoma.de')
True
>>> is_email('info@math.martin-thoma.de')
True
>>> is_email('Martin Thoma <info@martin-thoma.de>')
False
>>> is_email('info@martin-thoma')
False
"""
context, mail = parseaddr(potential_email_address)
first_condition = len(context) == 0 and len(mail) != 0
dot_after_at = ('@' in potential_email_address and
'.' in potential_email_address.split('@')[1])
return first_condition and dot_after_at | [
"def",
"is_email",
"(",
"potential_email_address",
")",
":",
"context",
",",
"mail",
"=",
"parseaddr",
"(",
"potential_email_address",
")",
"first_condition",
"=",
"len",
"(",
"context",
")",
"==",
"0",
"and",
"len",
"(",
"mail",
")",
"!=",
"0",
"dot_after_a... | Check if potential_email_address is a valid e-mail address.
Please note that this function has no false-negatives but many
false-positives. So if it returns that the input is not a valid
e-mail adress, it certainly isn't. If it returns True, it might still be
invalid. For example, the domain could not be registered.
Parameters
----------
potential_email_address : str
Returns
-------
is_email : bool
Examples
--------
>>> is_email('')
False
>>> is_email('info@martin-thoma.de')
True
>>> is_email('info@math.martin-thoma.de')
True
>>> is_email('Martin Thoma <info@martin-thoma.de>')
False
>>> is_email('info@martin-thoma')
False | [
"Check",
"if",
"potential_email_address",
"is",
"a",
"valid",
"e",
"-",
"mail",
"address",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/string.py#L19-L53 | train | 36,927 |
MartinThoma/mpu | mpu/string.py | str2bool | def str2bool(string_, default='raise'):
"""
Convert a string to a bool.
Parameters
----------
string_ : str
default : {'raise', False}
Default behaviour if none of the "true" strings is detected.
Returns
-------
boolean : bool
Examples
--------
>>> str2bool('True')
True
>>> str2bool('1')
True
>>> str2bool('0')
False
"""
true = ['true', 't', '1', 'y', 'yes', 'enabled', 'enable', 'on']
false = ['false', 'f', '0', 'n', 'no', 'disabled', 'disable', 'off']
if string_.lower() in true:
return True
elif string_.lower() in false or (not default):
return False
else:
raise ValueError('The value \'{}\' cannot be mapped to boolean.'
.format(string_)) | python | def str2bool(string_, default='raise'):
"""
Convert a string to a bool.
Parameters
----------
string_ : str
default : {'raise', False}
Default behaviour if none of the "true" strings is detected.
Returns
-------
boolean : bool
Examples
--------
>>> str2bool('True')
True
>>> str2bool('1')
True
>>> str2bool('0')
False
"""
true = ['true', 't', '1', 'y', 'yes', 'enabled', 'enable', 'on']
false = ['false', 'f', '0', 'n', 'no', 'disabled', 'disable', 'off']
if string_.lower() in true:
return True
elif string_.lower() in false or (not default):
return False
else:
raise ValueError('The value \'{}\' cannot be mapped to boolean.'
.format(string_)) | [
"def",
"str2bool",
"(",
"string_",
",",
"default",
"=",
"'raise'",
")",
":",
"true",
"=",
"[",
"'true'",
",",
"'t'",
",",
"'1'",
",",
"'y'",
",",
"'yes'",
",",
"'enabled'",
",",
"'enable'",
",",
"'on'",
"]",
"false",
"=",
"[",
"'false'",
",",
"'f'"... | Convert a string to a bool.
Parameters
----------
string_ : str
default : {'raise', False}
Default behaviour if none of the "true" strings is detected.
Returns
-------
boolean : bool
Examples
--------
>>> str2bool('True')
True
>>> str2bool('1')
True
>>> str2bool('0')
False | [
"Convert",
"a",
"string",
"to",
"a",
"bool",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/string.py#L124-L155 | train | 36,928 |
MartinThoma/mpu | mpu/string.py | str2bool_or_none | def str2bool_or_none(string_, default='raise'):
"""
Convert a string to a bool or to None.
Parameters
----------
string_ : str
default : {'raise', False}
Default behaviour if none of the "true" or "none" strings is detected.
Returns
-------
bool_or_none : bool or None
Examples
--------
>>> str2bool_or_none('True')
True
>>> str2bool_or_none('1')
True
>>> str2bool_or_none('0')
False
>>> str2bool_or_none('undefined')
"""
if is_none(string_, default=False):
return None
else:
return str2bool(string_, default) | python | def str2bool_or_none(string_, default='raise'):
"""
Convert a string to a bool or to None.
Parameters
----------
string_ : str
default : {'raise', False}
Default behaviour if none of the "true" or "none" strings is detected.
Returns
-------
bool_or_none : bool or None
Examples
--------
>>> str2bool_or_none('True')
True
>>> str2bool_or_none('1')
True
>>> str2bool_or_none('0')
False
>>> str2bool_or_none('undefined')
"""
if is_none(string_, default=False):
return None
else:
return str2bool(string_, default) | [
"def",
"str2bool_or_none",
"(",
"string_",
",",
"default",
"=",
"'raise'",
")",
":",
"if",
"is_none",
"(",
"string_",
",",
"default",
"=",
"False",
")",
":",
"return",
"None",
"else",
":",
"return",
"str2bool",
"(",
"string_",
",",
"default",
")"
] | Convert a string to a bool or to None.
Parameters
----------
string_ : str
default : {'raise', False}
Default behaviour if none of the "true" or "none" strings is detected.
Returns
-------
bool_or_none : bool or None
Examples
--------
>>> str2bool_or_none('True')
True
>>> str2bool_or_none('1')
True
>>> str2bool_or_none('0')
False
>>> str2bool_or_none('undefined') | [
"Convert",
"a",
"string",
"to",
"a",
"bool",
"or",
"to",
"None",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/string.py#L186-L213 | train | 36,929 |
MartinThoma/mpu | mpu/string.py | is_none | def is_none(string_, default='raise'):
"""
Check if a string is equivalent to None.
Parameters
----------
string_ : str
default : {'raise', False}
Default behaviour if none of the "None" strings is detected.
Returns
-------
is_none : bool
Examples
--------
>>> is_none('2', default=False)
False
>>> is_none('undefined', default=False)
True
"""
none = ['none', 'undefined', 'unknown', 'null', '']
if string_.lower() in none:
return True
elif not default:
return False
else:
raise ValueError('The value \'{}\' cannot be mapped to none.'
.format(string_)) | python | def is_none(string_, default='raise'):
"""
Check if a string is equivalent to None.
Parameters
----------
string_ : str
default : {'raise', False}
Default behaviour if none of the "None" strings is detected.
Returns
-------
is_none : bool
Examples
--------
>>> is_none('2', default=False)
False
>>> is_none('undefined', default=False)
True
"""
none = ['none', 'undefined', 'unknown', 'null', '']
if string_.lower() in none:
return True
elif not default:
return False
else:
raise ValueError('The value \'{}\' cannot be mapped to none.'
.format(string_)) | [
"def",
"is_none",
"(",
"string_",
",",
"default",
"=",
"'raise'",
")",
":",
"none",
"=",
"[",
"'none'",
",",
"'undefined'",
",",
"'unknown'",
",",
"'null'",
",",
"''",
"]",
"if",
"string_",
".",
"lower",
"(",
")",
"in",
"none",
":",
"return",
"True",... | Check if a string is equivalent to None.
Parameters
----------
string_ : str
default : {'raise', False}
Default behaviour if none of the "None" strings is detected.
Returns
-------
is_none : bool
Examples
--------
>>> is_none('2', default=False)
False
>>> is_none('undefined', default=False)
True | [
"Check",
"if",
"a",
"string",
"is",
"equivalent",
"to",
"None",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/string.py#L266-L294 | train | 36,930 |
MartinThoma/mpu | mpu/string.py | is_iban | def is_iban(potential_iban):
"""
Check if a string is a valid IBAN number.
IBAN is described in ISO 13616-1:2007 Part 1.
Spaces are ignored.
# CODE
0 = always zero
b = BIC or National Bank code
c = Account number
i = holder's kennitala (national identification number)
k = IBAN check digits
n = Branch number
t = Account type
x = National check digit or character
Examples
--------
>>> is_iban('DE89 3704 0044 0532 0130 00')
True
>>> is_iban('DE89 3704 0044 0532 0130 01')
False
"""
path = 'data/iban.csv' # always use slash in Python packages
filepath = pkg_resources.resource_filename('mpu', path)
data = mpu.io.read(filepath, delimiter=';', format='dicts')
potential_iban = potential_iban.replace(' ', '') # Remove spaces
if len(potential_iban) < min([int(el['length']) for el in data]):
return False
country = None
for element in data:
if element['iban_fields'][:2] == potential_iban[:2]:
country = element
break
if country is None:
return False
if len(potential_iban) != int(country['length']):
return False
if country['country_en'] == 'Germany':
checksum_val = [value
for field_type, value in
zip(country['iban_fields'], potential_iban)
if field_type == 'k']
checksum_val = ''.join(checksum_val)
checksum_exp = _calculate_german_iban_checksum(potential_iban,
country['iban_fields'])
return checksum_val == checksum_exp
return True | python | def is_iban(potential_iban):
"""
Check if a string is a valid IBAN number.
IBAN is described in ISO 13616-1:2007 Part 1.
Spaces are ignored.
# CODE
0 = always zero
b = BIC or National Bank code
c = Account number
i = holder's kennitala (national identification number)
k = IBAN check digits
n = Branch number
t = Account type
x = National check digit or character
Examples
--------
>>> is_iban('DE89 3704 0044 0532 0130 00')
True
>>> is_iban('DE89 3704 0044 0532 0130 01')
False
"""
path = 'data/iban.csv' # always use slash in Python packages
filepath = pkg_resources.resource_filename('mpu', path)
data = mpu.io.read(filepath, delimiter=';', format='dicts')
potential_iban = potential_iban.replace(' ', '') # Remove spaces
if len(potential_iban) < min([int(el['length']) for el in data]):
return False
country = None
for element in data:
if element['iban_fields'][:2] == potential_iban[:2]:
country = element
break
if country is None:
return False
if len(potential_iban) != int(country['length']):
return False
if country['country_en'] == 'Germany':
checksum_val = [value
for field_type, value in
zip(country['iban_fields'], potential_iban)
if field_type == 'k']
checksum_val = ''.join(checksum_val)
checksum_exp = _calculate_german_iban_checksum(potential_iban,
country['iban_fields'])
return checksum_val == checksum_exp
return True | [
"def",
"is_iban",
"(",
"potential_iban",
")",
":",
"path",
"=",
"'data/iban.csv'",
"# always use slash in Python packages",
"filepath",
"=",
"pkg_resources",
".",
"resource_filename",
"(",
"'mpu'",
",",
"path",
")",
"data",
"=",
"mpu",
".",
"io",
".",
"read",
"(... | Check if a string is a valid IBAN number.
IBAN is described in ISO 13616-1:2007 Part 1.
Spaces are ignored.
# CODE
0 = always zero
b = BIC or National Bank code
c = Account number
i = holder's kennitala (national identification number)
k = IBAN check digits
n = Branch number
t = Account type
x = National check digit or character
Examples
--------
>>> is_iban('DE89 3704 0044 0532 0130 00')
True
>>> is_iban('DE89 3704 0044 0532 0130 01')
False | [
"Check",
"if",
"a",
"string",
"is",
"a",
"valid",
"IBAN",
"number",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/string.py#L297-L346 | train | 36,931 |
MartinThoma/mpu | mpu/string.py | _calculate_german_iban_checksum | def _calculate_german_iban_checksum(iban,
iban_fields='DEkkbbbbbbbbcccccccccc'):
"""
Calculate the checksam of the German IBAN format.
Examples
--------
>>> iban = 'DE41500105170123456789'
>>> _calculate_german_iban_checksum(iban)
'41'
"""
number = [value
for field_type, value in zip(iban_fields, iban)
if field_type in ['b', 'c']]
translate = {'0': '0', '1': '1', '2': '2', '3': '3', '4': '4', '5': '5',
'6': '6', '7': '7', '8': '8', '9': '9'}
for i in range(ord('A'), ord('Z') + 1):
translate[chr(i)] = str(i - ord('A') + 10)
for val in 'DE00':
translated = translate[val]
for char in translated:
number.append(char)
number = sum(int(value) * 10**i for i, value in enumerate(number[::-1]))
checksum = 98 - (number % 97)
return str(checksum) | python | def _calculate_german_iban_checksum(iban,
iban_fields='DEkkbbbbbbbbcccccccccc'):
"""
Calculate the checksam of the German IBAN format.
Examples
--------
>>> iban = 'DE41500105170123456789'
>>> _calculate_german_iban_checksum(iban)
'41'
"""
number = [value
for field_type, value in zip(iban_fields, iban)
if field_type in ['b', 'c']]
translate = {'0': '0', '1': '1', '2': '2', '3': '3', '4': '4', '5': '5',
'6': '6', '7': '7', '8': '8', '9': '9'}
for i in range(ord('A'), ord('Z') + 1):
translate[chr(i)] = str(i - ord('A') + 10)
for val in 'DE00':
translated = translate[val]
for char in translated:
number.append(char)
number = sum(int(value) * 10**i for i, value in enumerate(number[::-1]))
checksum = 98 - (number % 97)
return str(checksum) | [
"def",
"_calculate_german_iban_checksum",
"(",
"iban",
",",
"iban_fields",
"=",
"'DEkkbbbbbbbbcccccccccc'",
")",
":",
"number",
"=",
"[",
"value",
"for",
"field_type",
",",
"value",
"in",
"zip",
"(",
"iban_fields",
",",
"iban",
")",
"if",
"field_type",
"in",
"... | Calculate the checksam of the German IBAN format.
Examples
--------
>>> iban = 'DE41500105170123456789'
>>> _calculate_german_iban_checksum(iban)
'41' | [
"Calculate",
"the",
"checksam",
"of",
"the",
"German",
"IBAN",
"format",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/string.py#L349-L373 | train | 36,932 |
MartinThoma/mpu | mpu/string.py | human_readable_bytes | def human_readable_bytes(nb_bytes, suffix='B'):
"""
Convert a byte number into a human readable format.
Parameters
----------
nb_bytes : number
suffix : str, optional (default: "B")
Returns
-------
size_str : str
Examples
--------
>>> human_readable_bytes(123)
'123.0 B'
>>> human_readable_bytes(1025)
'1.0 KiB'
>>> human_readable_bytes(9671406556917033397649423)
'8.0 YiB'
"""
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
if abs(nb_bytes) < 1024.0:
return '%3.1f %s%s' % (nb_bytes, unit, suffix)
nb_bytes /= 1024.0
return '%.1f %s%s' % (nb_bytes, 'Yi', suffix) | python | def human_readable_bytes(nb_bytes, suffix='B'):
"""
Convert a byte number into a human readable format.
Parameters
----------
nb_bytes : number
suffix : str, optional (default: "B")
Returns
-------
size_str : str
Examples
--------
>>> human_readable_bytes(123)
'123.0 B'
>>> human_readable_bytes(1025)
'1.0 KiB'
>>> human_readable_bytes(9671406556917033397649423)
'8.0 YiB'
"""
for unit in ['', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi']:
if abs(nb_bytes) < 1024.0:
return '%3.1f %s%s' % (nb_bytes, unit, suffix)
nb_bytes /= 1024.0
return '%.1f %s%s' % (nb_bytes, 'Yi', suffix) | [
"def",
"human_readable_bytes",
"(",
"nb_bytes",
",",
"suffix",
"=",
"'B'",
")",
":",
"for",
"unit",
"in",
"[",
"''",
",",
"'Ki'",
",",
"'Mi'",
",",
"'Gi'",
",",
"'Ti'",
",",
"'Pi'",
",",
"'Ei'",
",",
"'Zi'",
"]",
":",
"if",
"abs",
"(",
"nb_bytes",
... | Convert a byte number into a human readable format.
Parameters
----------
nb_bytes : number
suffix : str, optional (default: "B")
Returns
-------
size_str : str
Examples
--------
>>> human_readable_bytes(123)
'123.0 B'
>>> human_readable_bytes(1025)
'1.0 KiB'
>>> human_readable_bytes(9671406556917033397649423)
'8.0 YiB' | [
"Convert",
"a",
"byte",
"number",
"into",
"a",
"human",
"readable",
"format",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/string.py#L376-L404 | train | 36,933 |
MartinThoma/mpu | mpu/aws.py | list_files | def list_files(bucket, profile_name=None):
"""
List up to 1000 files in a bucket.
Parameters
----------
bucket : str
profile_name : str, optional
AWS profile
Returns
-------
s3_paths : List[str]
"""
session = boto3.Session(profile_name=profile_name)
conn = session.client('s3')
keys = []
ret = conn.list_objects(Bucket=bucket)
print(ret)
if 'Contents' not in ret:
return []
# Make this a generator in future and use the marker:
# https://boto3.readthedocs.io/en/latest/reference/services/
# s3.html#S3.Client.list_objects
for key in conn.list_objects(Bucket=bucket)['Contents']:
keys.append('s3://' + bucket + '/' + key['Key'])
return keys | python | def list_files(bucket, profile_name=None):
"""
List up to 1000 files in a bucket.
Parameters
----------
bucket : str
profile_name : str, optional
AWS profile
Returns
-------
s3_paths : List[str]
"""
session = boto3.Session(profile_name=profile_name)
conn = session.client('s3')
keys = []
ret = conn.list_objects(Bucket=bucket)
print(ret)
if 'Contents' not in ret:
return []
# Make this a generator in future and use the marker:
# https://boto3.readthedocs.io/en/latest/reference/services/
# s3.html#S3.Client.list_objects
for key in conn.list_objects(Bucket=bucket)['Contents']:
keys.append('s3://' + bucket + '/' + key['Key'])
return keys | [
"def",
"list_files",
"(",
"bucket",
",",
"profile_name",
"=",
"None",
")",
":",
"session",
"=",
"boto3",
".",
"Session",
"(",
"profile_name",
"=",
"profile_name",
")",
"conn",
"=",
"session",
".",
"client",
"(",
"'s3'",
")",
"keys",
"=",
"[",
"]",
"ret... | List up to 1000 files in a bucket.
Parameters
----------
bucket : str
profile_name : str, optional
AWS profile
Returns
-------
s3_paths : List[str] | [
"List",
"up",
"to",
"1000",
"files",
"in",
"a",
"bucket",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/aws.py#L15-L41 | train | 36,934 |
MartinThoma/mpu | mpu/aws.py | s3_read | def s3_read(source, profile_name=None):
"""
Read a file from an S3 source.
Parameters
----------
source : str
Path starting with s3://, e.g. 's3://bucket-name/key/foo.bar'
profile_name : str, optional
AWS profile
Returns
-------
content : bytes
Raises
------
botocore.exceptions.NoCredentialsError
Botocore is not able to find your credentials. Either specify
profile_name or add the environment variables AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY and AWS_SESSION_TOKEN.
See https://boto3.readthedocs.io/en/latest/guide/configuration.html
"""
session = boto3.Session(profile_name=profile_name)
s3 = session.client('s3')
bucket_name, key = _s3_path_split(source)
s3_object = s3.get_object(Bucket=bucket_name, Key=key)
body = s3_object['Body']
return body.read() | python | def s3_read(source, profile_name=None):
"""
Read a file from an S3 source.
Parameters
----------
source : str
Path starting with s3://, e.g. 's3://bucket-name/key/foo.bar'
profile_name : str, optional
AWS profile
Returns
-------
content : bytes
Raises
------
botocore.exceptions.NoCredentialsError
Botocore is not able to find your credentials. Either specify
profile_name or add the environment variables AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY and AWS_SESSION_TOKEN.
See https://boto3.readthedocs.io/en/latest/guide/configuration.html
"""
session = boto3.Session(profile_name=profile_name)
s3 = session.client('s3')
bucket_name, key = _s3_path_split(source)
s3_object = s3.get_object(Bucket=bucket_name, Key=key)
body = s3_object['Body']
return body.read() | [
"def",
"s3_read",
"(",
"source",
",",
"profile_name",
"=",
"None",
")",
":",
"session",
"=",
"boto3",
".",
"Session",
"(",
"profile_name",
"=",
"profile_name",
")",
"s3",
"=",
"session",
".",
"client",
"(",
"'s3'",
")",
"bucket_name",
",",
"key",
"=",
... | Read a file from an S3 source.
Parameters
----------
source : str
Path starting with s3://, e.g. 's3://bucket-name/key/foo.bar'
profile_name : str, optional
AWS profile
Returns
-------
content : bytes
Raises
------
botocore.exceptions.NoCredentialsError
Botocore is not able to find your credentials. Either specify
profile_name or add the environment variables AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY and AWS_SESSION_TOKEN.
See https://boto3.readthedocs.io/en/latest/guide/configuration.html | [
"Read",
"a",
"file",
"from",
"an",
"S3",
"source",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/aws.py#L44-L72 | train | 36,935 |
MartinThoma/mpu | mpu/aws.py | s3_download | def s3_download(source, destination,
exists_strategy=ExistsStrategy.RAISE,
profile_name=None):
"""
Copy a file from an S3 source to a local destination.
Parameters
----------
source : str
Path starting with s3://, e.g. 's3://bucket-name/key/foo.bar'
destination : str
exists_strategy : {'raise', 'replace', 'abort'}
What is done when the destination already exists?
* `ExistsStrategy.RAISE` means a RuntimeError is raised,
* `ExistsStrategy.REPLACE` means the local file is replaced,
* `ExistsStrategy.ABORT` means the download is not done.
profile_name : str, optional
AWS profile
Raises
------
botocore.exceptions.NoCredentialsError
Botocore is not able to find your credentials. Either specify
profile_name or add the environment variables AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY and AWS_SESSION_TOKEN.
See https://boto3.readthedocs.io/en/latest/guide/configuration.html
"""
if not isinstance(exists_strategy, ExistsStrategy):
raise ValueError('exists_strategy \'{}\' is not in {}'
.format(exists_strategy, ExistsStrategy))
session = boto3.Session(profile_name=profile_name)
s3 = session.resource('s3')
bucket_name, key = _s3_path_split(source)
if os.path.isfile(destination):
if exists_strategy is ExistsStrategy.RAISE:
raise RuntimeError('File \'{}\' already exists.'
.format(destination))
elif exists_strategy is ExistsStrategy.ABORT:
return
s3.Bucket(bucket_name).download_file(key, destination) | python | def s3_download(source, destination,
exists_strategy=ExistsStrategy.RAISE,
profile_name=None):
"""
Copy a file from an S3 source to a local destination.
Parameters
----------
source : str
Path starting with s3://, e.g. 's3://bucket-name/key/foo.bar'
destination : str
exists_strategy : {'raise', 'replace', 'abort'}
What is done when the destination already exists?
* `ExistsStrategy.RAISE` means a RuntimeError is raised,
* `ExistsStrategy.REPLACE` means the local file is replaced,
* `ExistsStrategy.ABORT` means the download is not done.
profile_name : str, optional
AWS profile
Raises
------
botocore.exceptions.NoCredentialsError
Botocore is not able to find your credentials. Either specify
profile_name or add the environment variables AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY and AWS_SESSION_TOKEN.
See https://boto3.readthedocs.io/en/latest/guide/configuration.html
"""
if not isinstance(exists_strategy, ExistsStrategy):
raise ValueError('exists_strategy \'{}\' is not in {}'
.format(exists_strategy, ExistsStrategy))
session = boto3.Session(profile_name=profile_name)
s3 = session.resource('s3')
bucket_name, key = _s3_path_split(source)
if os.path.isfile(destination):
if exists_strategy is ExistsStrategy.RAISE:
raise RuntimeError('File \'{}\' already exists.'
.format(destination))
elif exists_strategy is ExistsStrategy.ABORT:
return
s3.Bucket(bucket_name).download_file(key, destination) | [
"def",
"s3_download",
"(",
"source",
",",
"destination",
",",
"exists_strategy",
"=",
"ExistsStrategy",
".",
"RAISE",
",",
"profile_name",
"=",
"None",
")",
":",
"if",
"not",
"isinstance",
"(",
"exists_strategy",
",",
"ExistsStrategy",
")",
":",
"raise",
"Valu... | Copy a file from an S3 source to a local destination.
Parameters
----------
source : str
Path starting with s3://, e.g. 's3://bucket-name/key/foo.bar'
destination : str
exists_strategy : {'raise', 'replace', 'abort'}
What is done when the destination already exists?
* `ExistsStrategy.RAISE` means a RuntimeError is raised,
* `ExistsStrategy.REPLACE` means the local file is replaced,
* `ExistsStrategy.ABORT` means the download is not done.
profile_name : str, optional
AWS profile
Raises
------
botocore.exceptions.NoCredentialsError
Botocore is not able to find your credentials. Either specify
profile_name or add the environment variables AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY and AWS_SESSION_TOKEN.
See https://boto3.readthedocs.io/en/latest/guide/configuration.html | [
"Copy",
"a",
"file",
"from",
"an",
"S3",
"source",
"to",
"a",
"local",
"destination",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/aws.py#L83-L122 | train | 36,936 |
MartinThoma/mpu | mpu/aws.py | s3_upload | def s3_upload(source, destination, profile_name=None):
"""
Copy a file from a local source to an S3 destination.
Parameters
----------
source : str
destination : str
Path starting with s3://, e.g. 's3://bucket-name/key/foo.bar'
profile_name : str, optional
AWS profile
"""
session = boto3.Session(profile_name=profile_name)
s3 = session.resource('s3')
bucket_name, key = _s3_path_split(destination)
with open(source, 'rb') as data:
s3.Bucket(bucket_name).put_object(Key=key, Body=data) | python | def s3_upload(source, destination, profile_name=None):
"""
Copy a file from a local source to an S3 destination.
Parameters
----------
source : str
destination : str
Path starting with s3://, e.g. 's3://bucket-name/key/foo.bar'
profile_name : str, optional
AWS profile
"""
session = boto3.Session(profile_name=profile_name)
s3 = session.resource('s3')
bucket_name, key = _s3_path_split(destination)
with open(source, 'rb') as data:
s3.Bucket(bucket_name).put_object(Key=key, Body=data) | [
"def",
"s3_upload",
"(",
"source",
",",
"destination",
",",
"profile_name",
"=",
"None",
")",
":",
"session",
"=",
"boto3",
".",
"Session",
"(",
"profile_name",
"=",
"profile_name",
")",
"s3",
"=",
"session",
".",
"resource",
"(",
"'s3'",
")",
"bucket_name... | Copy a file from a local source to an S3 destination.
Parameters
----------
source : str
destination : str
Path starting with s3://, e.g. 's3://bucket-name/key/foo.bar'
profile_name : str, optional
AWS profile | [
"Copy",
"a",
"file",
"from",
"a",
"local",
"source",
"to",
"an",
"S3",
"destination",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/aws.py#L125-L141 | train | 36,937 |
MartinThoma/mpu | mpu/aws.py | _s3_path_split | def _s3_path_split(s3_path):
"""
Split an S3 path into bucket and key.
Parameters
----------
s3_path : str
Returns
-------
splitted : (str, str)
(bucket, key)
Examples
--------
>>> _s3_path_split('s3://my-bucket/foo/bar.jpg')
S3Path(bucket_name='my-bucket', key='foo/bar.jpg')
"""
if not s3_path.startswith('s3://'):
raise ValueError('s3_path is expected to start with \'s3://\', '
'but was {}'.format(s3_path))
bucket_key = s3_path[len('s3://'):]
bucket_name, key = bucket_key.split('/', 1)
return S3Path(bucket_name, key) | python | def _s3_path_split(s3_path):
"""
Split an S3 path into bucket and key.
Parameters
----------
s3_path : str
Returns
-------
splitted : (str, str)
(bucket, key)
Examples
--------
>>> _s3_path_split('s3://my-bucket/foo/bar.jpg')
S3Path(bucket_name='my-bucket', key='foo/bar.jpg')
"""
if not s3_path.startswith('s3://'):
raise ValueError('s3_path is expected to start with \'s3://\', '
'but was {}'.format(s3_path))
bucket_key = s3_path[len('s3://'):]
bucket_name, key = bucket_key.split('/', 1)
return S3Path(bucket_name, key) | [
"def",
"_s3_path_split",
"(",
"s3_path",
")",
":",
"if",
"not",
"s3_path",
".",
"startswith",
"(",
"'s3://'",
")",
":",
"raise",
"ValueError",
"(",
"'s3_path is expected to start with \\'s3://\\', '",
"'but was {}'",
".",
"format",
"(",
"s3_path",
")",
")",
"bucke... | Split an S3 path into bucket and key.
Parameters
----------
s3_path : str
Returns
-------
splitted : (str, str)
(bucket, key)
Examples
--------
>>> _s3_path_split('s3://my-bucket/foo/bar.jpg')
S3Path(bucket_name='my-bucket', key='foo/bar.jpg') | [
"Split",
"an",
"S3",
"path",
"into",
"bucket",
"and",
"key",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/aws.py#L147-L170 | train | 36,938 |
MartinThoma/mpu | mpu/image.py | get_meta | def get_meta(filepath):
"""
Get meta-information of an image.
Parameters
----------
filepath : str
Returns
-------
meta : dict
"""
meta = {}
try:
from PIL import Image
with Image.open(filepath) as img:
width, height = img.size
meta['width'] = width
meta['height'] = height
meta['channels'] = len(img.mode) # RGB, RGBA - does this always work?
except ImportError:
pass
# Get times - creation, last edit, last open
meta['file'] = mpu.io.get_file_meta(filepath)
return meta | python | def get_meta(filepath):
"""
Get meta-information of an image.
Parameters
----------
filepath : str
Returns
-------
meta : dict
"""
meta = {}
try:
from PIL import Image
with Image.open(filepath) as img:
width, height = img.size
meta['width'] = width
meta['height'] = height
meta['channels'] = len(img.mode) # RGB, RGBA - does this always work?
except ImportError:
pass
# Get times - creation, last edit, last open
meta['file'] = mpu.io.get_file_meta(filepath)
return meta | [
"def",
"get_meta",
"(",
"filepath",
")",
":",
"meta",
"=",
"{",
"}",
"try",
":",
"from",
"PIL",
"import",
"Image",
"with",
"Image",
".",
"open",
"(",
"filepath",
")",
"as",
"img",
":",
"width",
",",
"height",
"=",
"img",
".",
"size",
"meta",
"[",
... | Get meta-information of an image.
Parameters
----------
filepath : str
Returns
-------
meta : dict | [
"Get",
"meta",
"-",
"information",
"of",
"an",
"image",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/image.py#L10-L35 | train | 36,939 |
MartinThoma/mpu | mpu/datastructures.py | flatten | def flatten(iterable, string_flattening=False):
"""
Flatten an given iterable of iterables into one list.
Parameters
----------
iterable : iterable
string_flattening : bool
If this is False, then strings are NOT flattened
Returns
-------
flat_list : List
Examples
--------
>>> flatten([1, [2, [3]]])
[1, 2, 3]
>>> flatten(((1, 2), (3, 4), (5, 6)))
[1, 2, 3, 4, 5, 6]
>>> flatten(EList([EList([1, 2]), (3, [4, [[5]]])]))
[1, 2, 3, 4, 5]
"""
flat_list = []
for item in iterable:
is_iterable = (isinstance(item, collections.Iterable) and
(string_flattening or
(not string_flattening and not isinstance(item, str))
))
if is_iterable:
flat_list.extend(flatten(item))
else:
flat_list.append(item)
return flat_list | python | def flatten(iterable, string_flattening=False):
"""
Flatten an given iterable of iterables into one list.
Parameters
----------
iterable : iterable
string_flattening : bool
If this is False, then strings are NOT flattened
Returns
-------
flat_list : List
Examples
--------
>>> flatten([1, [2, [3]]])
[1, 2, 3]
>>> flatten(((1, 2), (3, 4), (5, 6)))
[1, 2, 3, 4, 5, 6]
>>> flatten(EList([EList([1, 2]), (3, [4, [[5]]])]))
[1, 2, 3, 4, 5]
"""
flat_list = []
for item in iterable:
is_iterable = (isinstance(item, collections.Iterable) and
(string_flattening or
(not string_flattening and not isinstance(item, str))
))
if is_iterable:
flat_list.extend(flatten(item))
else:
flat_list.append(item)
return flat_list | [
"def",
"flatten",
"(",
"iterable",
",",
"string_flattening",
"=",
"False",
")",
":",
"flat_list",
"=",
"[",
"]",
"for",
"item",
"in",
"iterable",
":",
"is_iterable",
"=",
"(",
"isinstance",
"(",
"item",
",",
"collections",
".",
"Iterable",
")",
"and",
"(... | Flatten an given iterable of iterables into one list.
Parameters
----------
iterable : iterable
string_flattening : bool
If this is False, then strings are NOT flattened
Returns
-------
flat_list : List
Examples
--------
>>> flatten([1, [2, [3]]])
[1, 2, 3]
>>> flatten(((1, 2), (3, 4), (5, 6)))
[1, 2, 3, 4, 5, 6]
>>> flatten(EList([EList([1, 2]), (3, [4, [[5]]])]))
[1, 2, 3, 4, 5] | [
"Flatten",
"an",
"given",
"iterable",
"of",
"iterables",
"into",
"one",
"list",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/datastructures.py#L57-L92 | train | 36,940 |
MartinThoma/mpu | mpu/datastructures.py | dict_merge | def dict_merge(dict_left, dict_right, merge_method='take_left_shallow'):
"""
Merge two dictionaries.
This method does NOT modify dict_left or dict_right!
Apply this method multiple times if the dictionary is nested.
Parameters
----------
dict_left : dict
dict_right: dict
merge_method : {'take_left_shallow', 'take_left_deep', \
'take_right_shallow', 'take_right_deep', \
'sum'}
* take_left_shallow: Use both dictinaries. If both have the same key,
take the value of dict_left
* take_left_deep : If both dictionaries have the same key and the value
is a dict for both again, then merge those sub-dictionaries
* take_right_shallow : See take_left_shallow
* take_right_deep : See take_left_deep
* sum : sum up both dictionaries. If one does not have a value for a
key of the other, assume the missing value to be zero.
Returns
-------
merged_dict : dict
Examples
--------
>>> dict_merge({'a': 1, 'b': 2}, {'c': 3}) == {'a': 1, 'b': 2, 'c': 3}
True
>>> out = dict_merge({'a': {'A': 1}},
... {'a': {'A': 2, 'B': 3}}, 'take_left_deep')
>>> expected = {'a': {'A': 1, 'B': 3}}
>>> out == expected
True
>>> out = dict_merge({'a': {'A': 1}},
... {'a': {'A': 2, 'B': 3}}, 'take_left_shallow')
>>> expected = {'a': {'A': 1}}
>>> out == expected
True
>>> out = dict_merge({'a': 1, 'b': {'c': 2}},
... {'b': {'c': 3, 'd': 4}},
... 'sum')
>>> expected = {'a': 1, 'b': {'c': 5, 'd': 4}}
>>> out == expected
True
"""
new_dict = {}
if merge_method in ['take_right_shallow', 'take_right_deep']:
return _dict_merge_right(dict_left, dict_right, merge_method)
elif merge_method == 'take_left_shallow':
return dict_merge(dict_right, dict_left, 'take_right_shallow')
elif merge_method == 'take_left_deep':
return dict_merge(dict_right, dict_left, 'take_right_deep')
elif merge_method == 'sum':
new_dict = deepcopy(dict_left)
for key, value in dict_right.items():
if key not in new_dict:
new_dict[key] = value
else:
recurse = isinstance(value, dict)
if recurse:
new_dict[key] = dict_merge(dict_left[key],
dict_right[key],
merge_method='sum')
else:
new_dict[key] = dict_left[key] + dict_right[key]
return new_dict
else:
raise NotImplementedError('merge_method=\'{}\' is not known.'
.format(merge_method)) | python | def dict_merge(dict_left, dict_right, merge_method='take_left_shallow'):
"""
Merge two dictionaries.
This method does NOT modify dict_left or dict_right!
Apply this method multiple times if the dictionary is nested.
Parameters
----------
dict_left : dict
dict_right: dict
merge_method : {'take_left_shallow', 'take_left_deep', \
'take_right_shallow', 'take_right_deep', \
'sum'}
* take_left_shallow: Use both dictinaries. If both have the same key,
take the value of dict_left
* take_left_deep : If both dictionaries have the same key and the value
is a dict for both again, then merge those sub-dictionaries
* take_right_shallow : See take_left_shallow
* take_right_deep : See take_left_deep
* sum : sum up both dictionaries. If one does not have a value for a
key of the other, assume the missing value to be zero.
Returns
-------
merged_dict : dict
Examples
--------
>>> dict_merge({'a': 1, 'b': 2}, {'c': 3}) == {'a': 1, 'b': 2, 'c': 3}
True
>>> out = dict_merge({'a': {'A': 1}},
... {'a': {'A': 2, 'B': 3}}, 'take_left_deep')
>>> expected = {'a': {'A': 1, 'B': 3}}
>>> out == expected
True
>>> out = dict_merge({'a': {'A': 1}},
... {'a': {'A': 2, 'B': 3}}, 'take_left_shallow')
>>> expected = {'a': {'A': 1}}
>>> out == expected
True
>>> out = dict_merge({'a': 1, 'b': {'c': 2}},
... {'b': {'c': 3, 'd': 4}},
... 'sum')
>>> expected = {'a': 1, 'b': {'c': 5, 'd': 4}}
>>> out == expected
True
"""
new_dict = {}
if merge_method in ['take_right_shallow', 'take_right_deep']:
return _dict_merge_right(dict_left, dict_right, merge_method)
elif merge_method == 'take_left_shallow':
return dict_merge(dict_right, dict_left, 'take_right_shallow')
elif merge_method == 'take_left_deep':
return dict_merge(dict_right, dict_left, 'take_right_deep')
elif merge_method == 'sum':
new_dict = deepcopy(dict_left)
for key, value in dict_right.items():
if key not in new_dict:
new_dict[key] = value
else:
recurse = isinstance(value, dict)
if recurse:
new_dict[key] = dict_merge(dict_left[key],
dict_right[key],
merge_method='sum')
else:
new_dict[key] = dict_left[key] + dict_right[key]
return new_dict
else:
raise NotImplementedError('merge_method=\'{}\' is not known.'
.format(merge_method)) | [
"def",
"dict_merge",
"(",
"dict_left",
",",
"dict_right",
",",
"merge_method",
"=",
"'take_left_shallow'",
")",
":",
"new_dict",
"=",
"{",
"}",
"if",
"merge_method",
"in",
"[",
"'take_right_shallow'",
",",
"'take_right_deep'",
"]",
":",
"return",
"_dict_merge_righ... | Merge two dictionaries.
This method does NOT modify dict_left or dict_right!
Apply this method multiple times if the dictionary is nested.
Parameters
----------
dict_left : dict
dict_right: dict
merge_method : {'take_left_shallow', 'take_left_deep', \
'take_right_shallow', 'take_right_deep', \
'sum'}
* take_left_shallow: Use both dictinaries. If both have the same key,
take the value of dict_left
* take_left_deep : If both dictionaries have the same key and the value
is a dict for both again, then merge those sub-dictionaries
* take_right_shallow : See take_left_shallow
* take_right_deep : See take_left_deep
* sum : sum up both dictionaries. If one does not have a value for a
key of the other, assume the missing value to be zero.
Returns
-------
merged_dict : dict
Examples
--------
>>> dict_merge({'a': 1, 'b': 2}, {'c': 3}) == {'a': 1, 'b': 2, 'c': 3}
True
>>> out = dict_merge({'a': {'A': 1}},
... {'a': {'A': 2, 'B': 3}}, 'take_left_deep')
>>> expected = {'a': {'A': 1, 'B': 3}}
>>> out == expected
True
>>> out = dict_merge({'a': {'A': 1}},
... {'a': {'A': 2, 'B': 3}}, 'take_left_shallow')
>>> expected = {'a': {'A': 1}}
>>> out == expected
True
>>> out = dict_merge({'a': 1, 'b': {'c': 2}},
... {'b': {'c': 3, 'd': 4}},
... 'sum')
>>> expected = {'a': 1, 'b': {'c': 5, 'd': 4}}
>>> out == expected
True | [
"Merge",
"two",
"dictionaries",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/datastructures.py#L95-L170 | train | 36,941 |
MartinThoma/mpu | mpu/datastructures.py | _dict_merge_right | def _dict_merge_right(dict_left, dict_right, merge_method):
"""See documentation of mpu.datastructures.dict_merge."""
new_dict = deepcopy(dict_left)
for key, value in dict_right.items():
if key not in new_dict:
new_dict[key] = value
else:
recurse = (merge_method == 'take_right_deep' and
isinstance(dict_left[key], dict) and
isinstance(dict_right[key], dict))
if recurse:
new_dict[key] = dict_merge(dict_left[key],
dict_right[key],
merge_method='take_right_deep')
else:
new_dict[key] = value
return new_dict | python | def _dict_merge_right(dict_left, dict_right, merge_method):
"""See documentation of mpu.datastructures.dict_merge."""
new_dict = deepcopy(dict_left)
for key, value in dict_right.items():
if key not in new_dict:
new_dict[key] = value
else:
recurse = (merge_method == 'take_right_deep' and
isinstance(dict_left[key], dict) and
isinstance(dict_right[key], dict))
if recurse:
new_dict[key] = dict_merge(dict_left[key],
dict_right[key],
merge_method='take_right_deep')
else:
new_dict[key] = value
return new_dict | [
"def",
"_dict_merge_right",
"(",
"dict_left",
",",
"dict_right",
",",
"merge_method",
")",
":",
"new_dict",
"=",
"deepcopy",
"(",
"dict_left",
")",
"for",
"key",
",",
"value",
"in",
"dict_right",
".",
"items",
"(",
")",
":",
"if",
"key",
"not",
"in",
"ne... | See documentation of mpu.datastructures.dict_merge. | [
"See",
"documentation",
"of",
"mpu",
".",
"datastructures",
".",
"dict_merge",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/datastructures.py#L173-L189 | train | 36,942 |
MartinThoma/mpu | mpu/datastructures.py | does_keychain_exist | def does_keychain_exist(dict_, list_):
"""
Check if a sequence of keys exist in a nested dictionary.
Parameters
----------
dict_ : Dict[str/int/tuple, Any]
list_ : List[str/int/tuple]
Returns
-------
keychain_exists : bool
Examples
--------
>>> d = {'a': {'b': {'c': 'd'}}}
>>> l_exists = ['a', 'b']
>>> does_keychain_exist(d, l_exists)
True
>>> l_no_existant = ['a', 'c']
>>> does_keychain_exist(d, l_no_existant)
False
"""
for key in list_:
if key not in dict_:
return False
dict_ = dict_[key]
return True | python | def does_keychain_exist(dict_, list_):
"""
Check if a sequence of keys exist in a nested dictionary.
Parameters
----------
dict_ : Dict[str/int/tuple, Any]
list_ : List[str/int/tuple]
Returns
-------
keychain_exists : bool
Examples
--------
>>> d = {'a': {'b': {'c': 'd'}}}
>>> l_exists = ['a', 'b']
>>> does_keychain_exist(d, l_exists)
True
>>> l_no_existant = ['a', 'c']
>>> does_keychain_exist(d, l_no_existant)
False
"""
for key in list_:
if key not in dict_:
return False
dict_ = dict_[key]
return True | [
"def",
"does_keychain_exist",
"(",
"dict_",
",",
"list_",
")",
":",
"for",
"key",
"in",
"list_",
":",
"if",
"key",
"not",
"in",
"dict_",
":",
"return",
"False",
"dict_",
"=",
"dict_",
"[",
"key",
"]",
"return",
"True"
] | Check if a sequence of keys exist in a nested dictionary.
Parameters
----------
dict_ : Dict[str/int/tuple, Any]
list_ : List[str/int/tuple]
Returns
-------
keychain_exists : bool
Examples
--------
>>> d = {'a': {'b': {'c': 'd'}}}
>>> l_exists = ['a', 'b']
>>> does_keychain_exist(d, l_exists)
True
>>> l_no_existant = ['a', 'c']
>>> does_keychain_exist(d, l_no_existant)
False | [
"Check",
"if",
"a",
"sequence",
"of",
"keys",
"exist",
"in",
"a",
"nested",
"dictionary",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/datastructures.py#L224-L252 | train | 36,943 |
MartinThoma/mpu | mpu/datastructures.py | EList.remove_indices | def remove_indices(self, indices):
"""
Remove rows by which have the given indices.
Parameters
----------
indices : list
Returns
-------
filtered_list : EList
"""
new_list = []
for index, element in enumerate(self):
if index not in indices:
new_list.append(element)
return EList(new_list) | python | def remove_indices(self, indices):
"""
Remove rows by which have the given indices.
Parameters
----------
indices : list
Returns
-------
filtered_list : EList
"""
new_list = []
for index, element in enumerate(self):
if index not in indices:
new_list.append(element)
return EList(new_list) | [
"def",
"remove_indices",
"(",
"self",
",",
"indices",
")",
":",
"new_list",
"=",
"[",
"]",
"for",
"index",
",",
"element",
"in",
"enumerate",
"(",
"self",
")",
":",
"if",
"index",
"not",
"in",
"indices",
":",
"new_list",
".",
"append",
"(",
"element",
... | Remove rows by which have the given indices.
Parameters
----------
indices : list
Returns
-------
filtered_list : EList | [
"Remove",
"rows",
"by",
"which",
"have",
"the",
"given",
"indices",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/datastructures.py#L38-L54 | train | 36,944 |
MartinThoma/mpu | mpu/path.py | get_all_files | def get_all_files(root, followlinks=False):
"""
Get all files within the given root directory.
Note that this list is not ordered.
Parameters
----------
root : str
Path to a directory
followlinks : bool, optional (default: False)
Returns
-------
filepaths : list
List of absolute paths to files
"""
filepaths = []
for path, _, files in os.walk(root, followlinks=followlinks):
for name in files:
filepaths.append(os.path.abspath(os.path.join(path, name)))
return filepaths | python | def get_all_files(root, followlinks=False):
"""
Get all files within the given root directory.
Note that this list is not ordered.
Parameters
----------
root : str
Path to a directory
followlinks : bool, optional (default: False)
Returns
-------
filepaths : list
List of absolute paths to files
"""
filepaths = []
for path, _, files in os.walk(root, followlinks=followlinks):
for name in files:
filepaths.append(os.path.abspath(os.path.join(path, name)))
return filepaths | [
"def",
"get_all_files",
"(",
"root",
",",
"followlinks",
"=",
"False",
")",
":",
"filepaths",
"=",
"[",
"]",
"for",
"path",
",",
"_",
",",
"files",
"in",
"os",
".",
"walk",
"(",
"root",
",",
"followlinks",
"=",
"followlinks",
")",
":",
"for",
"name",... | Get all files within the given root directory.
Note that this list is not ordered.
Parameters
----------
root : str
Path to a directory
followlinks : bool, optional (default: False)
Returns
-------
filepaths : list
List of absolute paths to files | [
"Get",
"all",
"files",
"within",
"the",
"given",
"root",
"directory",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/path.py#L11-L32 | train | 36,945 |
MartinThoma/mpu | mpu/path.py | get_from_package | def get_from_package(package_name, path):
"""
Get the absolute path to a file in a package.
Parameters
----------
package_name : str
e.g. 'mpu'
path : str
Path within a package
Returns
-------
filepath : str
"""
filepath = pkg_resources.resource_filename(package_name, path)
return os.path.abspath(filepath) | python | def get_from_package(package_name, path):
"""
Get the absolute path to a file in a package.
Parameters
----------
package_name : str
e.g. 'mpu'
path : str
Path within a package
Returns
-------
filepath : str
"""
filepath = pkg_resources.resource_filename(package_name, path)
return os.path.abspath(filepath) | [
"def",
"get_from_package",
"(",
"package_name",
",",
"path",
")",
":",
"filepath",
"=",
"pkg_resources",
".",
"resource_filename",
"(",
"package_name",
",",
"path",
")",
"return",
"os",
".",
"path",
".",
"abspath",
"(",
"filepath",
")"
] | Get the absolute path to a file in a package.
Parameters
----------
package_name : str
e.g. 'mpu'
path : str
Path within a package
Returns
-------
filepath : str | [
"Get",
"the",
"absolute",
"path",
"to",
"a",
"file",
"in",
"a",
"package",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/path.py#L35-L51 | train | 36,946 |
MartinThoma/mpu | mpu/pd.py | example_df | def example_df():
"""Create an example dataframe."""
country_names = ['Germany',
'France',
'Indonesia',
'Ireland',
'Spain',
'Vatican']
population = [82521653, 66991000, 255461700, 4761865, 46549045, None]
population_time = [dt.datetime(2016, 12, 1),
dt.datetime(2017, 1, 1),
dt.datetime(2017, 1, 1),
None, # Ireland
dt.datetime(2017, 6, 1), # Spain
None,
]
euro = [True, True, False, True, True, True]
df = pd.DataFrame({'country': country_names,
'population': population,
'population_time': population_time,
'EUR': euro})
df = df[['country', 'population', 'population_time', 'EUR']]
return df | python | def example_df():
"""Create an example dataframe."""
country_names = ['Germany',
'France',
'Indonesia',
'Ireland',
'Spain',
'Vatican']
population = [82521653, 66991000, 255461700, 4761865, 46549045, None]
population_time = [dt.datetime(2016, 12, 1),
dt.datetime(2017, 1, 1),
dt.datetime(2017, 1, 1),
None, # Ireland
dt.datetime(2017, 6, 1), # Spain
None,
]
euro = [True, True, False, True, True, True]
df = pd.DataFrame({'country': country_names,
'population': population,
'population_time': population_time,
'EUR': euro})
df = df[['country', 'population', 'population_time', 'EUR']]
return df | [
"def",
"example_df",
"(",
")",
":",
"country_names",
"=",
"[",
"'Germany'",
",",
"'France'",
",",
"'Indonesia'",
",",
"'Ireland'",
",",
"'Spain'",
",",
"'Vatican'",
"]",
"population",
"=",
"[",
"82521653",
",",
"66991000",
",",
"255461700",
",",
"4761865",
... | Create an example dataframe. | [
"Create",
"an",
"example",
"dataframe",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/pd.py#L24-L46 | train | 36,947 |
MartinThoma/mpu | mpu/pd.py | describe | def describe(df, dtype=None):
"""
Print a description of a Pandas dataframe.
Parameters
----------
df : Pandas.DataFrame
dtype : dict
Maps column names to types
"""
if dtype is None:
dtype = {}
print('Number of datapoints: {datapoints}'.format(datapoints=len(df)))
column_info, column_info_meta = _get_column_info(df, dtype)
if len(column_info['int']) > 0:
_describe_int(df, column_info)
if len(column_info['float']) > 0:
_describe_float(df, column_info)
if len(column_info['category']) > 0:
_describe_category(df, column_info, column_info_meta)
if len(column_info['time']) > 0:
_describe_time(df, column_info, column_info_meta)
if len(column_info['other']) > 0:
_describe_other(df, column_info, column_info_meta)
column_types = {}
for column_type, columns in column_info.items():
for column_name in columns:
if column_type == 'other':
column_type = 'str'
column_types[column_name] = column_type
return column_types | python | def describe(df, dtype=None):
"""
Print a description of a Pandas dataframe.
Parameters
----------
df : Pandas.DataFrame
dtype : dict
Maps column names to types
"""
if dtype is None:
dtype = {}
print('Number of datapoints: {datapoints}'.format(datapoints=len(df)))
column_info, column_info_meta = _get_column_info(df, dtype)
if len(column_info['int']) > 0:
_describe_int(df, column_info)
if len(column_info['float']) > 0:
_describe_float(df, column_info)
if len(column_info['category']) > 0:
_describe_category(df, column_info, column_info_meta)
if len(column_info['time']) > 0:
_describe_time(df, column_info, column_info_meta)
if len(column_info['other']) > 0:
_describe_other(df, column_info, column_info_meta)
column_types = {}
for column_type, columns in column_info.items():
for column_name in columns:
if column_type == 'other':
column_type = 'str'
column_types[column_name] = column_type
return column_types | [
"def",
"describe",
"(",
"df",
",",
"dtype",
"=",
"None",
")",
":",
"if",
"dtype",
"is",
"None",
":",
"dtype",
"=",
"{",
"}",
"print",
"(",
"'Number of datapoints: {datapoints}'",
".",
"format",
"(",
"datapoints",
"=",
"len",
"(",
"df",
")",
")",
")",
... | Print a description of a Pandas dataframe.
Parameters
----------
df : Pandas.DataFrame
dtype : dict
Maps column names to types | [
"Print",
"a",
"description",
"of",
"a",
"Pandas",
"dataframe",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/pd.py#L49-L85 | train | 36,948 |
MartinThoma/mpu | mpu/ml.py | indices2one_hot | def indices2one_hot(indices, nb_classes):
"""
Convert an iterable of indices to one-hot encoded list.
You might also be interested in sklearn.preprocessing.OneHotEncoder
Parameters
----------
indices : iterable
iterable of indices
nb_classes : int
Number of classes
dtype : type
Returns
-------
one_hot : list
Examples
--------
>>> indices2one_hot([0, 1, 1], 3)
[[1, 0, 0], [0, 1, 0], [0, 1, 0]]
>>> indices2one_hot([0, 1, 1], 2)
[[1, 0], [0, 1], [0, 1]]
"""
if nb_classes < 1:
raise ValueError('nb_classes={}, but positive number expected'
.format(nb_classes))
one_hot = []
for index in indices:
one_hot.append([0] * nb_classes)
one_hot[-1][index] = 1
return one_hot | python | def indices2one_hot(indices, nb_classes):
"""
Convert an iterable of indices to one-hot encoded list.
You might also be interested in sklearn.preprocessing.OneHotEncoder
Parameters
----------
indices : iterable
iterable of indices
nb_classes : int
Number of classes
dtype : type
Returns
-------
one_hot : list
Examples
--------
>>> indices2one_hot([0, 1, 1], 3)
[[1, 0, 0], [0, 1, 0], [0, 1, 0]]
>>> indices2one_hot([0, 1, 1], 2)
[[1, 0], [0, 1], [0, 1]]
"""
if nb_classes < 1:
raise ValueError('nb_classes={}, but positive number expected'
.format(nb_classes))
one_hot = []
for index in indices:
one_hot.append([0] * nb_classes)
one_hot[-1][index] = 1
return one_hot | [
"def",
"indices2one_hot",
"(",
"indices",
",",
"nb_classes",
")",
":",
"if",
"nb_classes",
"<",
"1",
":",
"raise",
"ValueError",
"(",
"'nb_classes={}, but positive number expected'",
".",
"format",
"(",
"nb_classes",
")",
")",
"one_hot",
"=",
"[",
"]",
"for",
... | Convert an iterable of indices to one-hot encoded list.
You might also be interested in sklearn.preprocessing.OneHotEncoder
Parameters
----------
indices : iterable
iterable of indices
nb_classes : int
Number of classes
dtype : type
Returns
-------
one_hot : list
Examples
--------
>>> indices2one_hot([0, 1, 1], 3)
[[1, 0, 0], [0, 1, 0], [0, 1, 0]]
>>> indices2one_hot([0, 1, 1], 2)
[[1, 0], [0, 1], [0, 1]] | [
"Convert",
"an",
"iterable",
"of",
"indices",
"to",
"one",
"-",
"hot",
"encoded",
"list",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/ml.py#L11-L44 | train | 36,949 |
MartinThoma/mpu | mpu/ml.py | one_hot2indices | def one_hot2indices(one_hots):
"""
Convert an iterable of one-hot encoded targets to a list of indices.
Parameters
----------
one_hot : list
Returns
-------
indices : list
Examples
--------
>>> one_hot2indices([[1, 0, 0], [0, 1, 0], [0, 0, 1]])
[0, 1, 2]
>>> one_hot2indices([[1, 0], [1, 0], [0, 1]])
[0, 0, 1]
"""
indices = []
for one_hot in one_hots:
indices.append(argmax(one_hot))
return indices | python | def one_hot2indices(one_hots):
"""
Convert an iterable of one-hot encoded targets to a list of indices.
Parameters
----------
one_hot : list
Returns
-------
indices : list
Examples
--------
>>> one_hot2indices([[1, 0, 0], [0, 1, 0], [0, 0, 1]])
[0, 1, 2]
>>> one_hot2indices([[1, 0], [1, 0], [0, 1]])
[0, 0, 1]
"""
indices = []
for one_hot in one_hots:
indices.append(argmax(one_hot))
return indices | [
"def",
"one_hot2indices",
"(",
"one_hots",
")",
":",
"indices",
"=",
"[",
"]",
"for",
"one_hot",
"in",
"one_hots",
":",
"indices",
".",
"append",
"(",
"argmax",
"(",
"one_hot",
")",
")",
"return",
"indices"
] | Convert an iterable of one-hot encoded targets to a list of indices.
Parameters
----------
one_hot : list
Returns
-------
indices : list
Examples
--------
>>> one_hot2indices([[1, 0, 0], [0, 1, 0], [0, 0, 1]])
[0, 1, 2]
>>> one_hot2indices([[1, 0], [1, 0], [0, 1]])
[0, 0, 1] | [
"Convert",
"an",
"iterable",
"of",
"one",
"-",
"hot",
"encoded",
"targets",
"to",
"a",
"list",
"of",
"indices",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/ml.py#L47-L70 | train | 36,950 |
MartinThoma/mpu | mpu/math.py | factorize | def factorize(number):
"""
Get the prime factors of an integer except for 1.
Parameters
----------
number : int
Returns
-------
primes : iterable
Examples
--------
>>> factorize(-17)
[-1, 17]
>>> factorize(8)
[2, 2, 2]
>>> factorize(3**25)
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3]
>>> factorize(1)
[1]
"""
if not isinstance(number, int):
raise ValueError('integer expected, but type(number)={}'
.format(type(number)))
if number < 0:
return [-1] + factorize(number * (-1))
elif number == 0:
raise ValueError('All primes are prime factors of 0.')
else:
for i in range(2, int(math_stl.ceil(number**0.5)) + 1):
if number % i == 0:
if i == number:
return [i]
else:
return [i] + factorize(int(number / i))
return [number] | python | def factorize(number):
"""
Get the prime factors of an integer except for 1.
Parameters
----------
number : int
Returns
-------
primes : iterable
Examples
--------
>>> factorize(-17)
[-1, 17]
>>> factorize(8)
[2, 2, 2]
>>> factorize(3**25)
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3]
>>> factorize(1)
[1]
"""
if not isinstance(number, int):
raise ValueError('integer expected, but type(number)={}'
.format(type(number)))
if number < 0:
return [-1] + factorize(number * (-1))
elif number == 0:
raise ValueError('All primes are prime factors of 0.')
else:
for i in range(2, int(math_stl.ceil(number**0.5)) + 1):
if number % i == 0:
if i == number:
return [i]
else:
return [i] + factorize(int(number / i))
return [number] | [
"def",
"factorize",
"(",
"number",
")",
":",
"if",
"not",
"isinstance",
"(",
"number",
",",
"int",
")",
":",
"raise",
"ValueError",
"(",
"'integer expected, but type(number)={}'",
".",
"format",
"(",
"type",
"(",
"number",
")",
")",
")",
"if",
"number",
"<... | Get the prime factors of an integer except for 1.
Parameters
----------
number : int
Returns
-------
primes : iterable
Examples
--------
>>> factorize(-17)
[-1, 17]
>>> factorize(8)
[2, 2, 2]
>>> factorize(3**25)
[3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3]
>>> factorize(1)
[1] | [
"Get",
"the",
"prime",
"factors",
"of",
"an",
"integer",
"except",
"for",
"1",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/math.py#L64-L101 | train | 36,951 |
MartinThoma/mpu | mpu/math.py | argmax | def argmax(iterable):
"""
Find the first index of the biggest value in the iterable.
Parameters
----------
iterable : iterable
Returns
-------
argmax : int
Examples
--------
>>> argmax([0, 0, 0])
0
>>> argmax([1, 0, 0])
0
>>> argmax([0, 1, 0])
1
>>> argmax([])
"""
max_value = None
max_index = None
for index, value in enumerate(iterable):
if (max_value is None) or max_value < value:
max_value = value
max_index = index
return max_index | python | def argmax(iterable):
"""
Find the first index of the biggest value in the iterable.
Parameters
----------
iterable : iterable
Returns
-------
argmax : int
Examples
--------
>>> argmax([0, 0, 0])
0
>>> argmax([1, 0, 0])
0
>>> argmax([0, 1, 0])
1
>>> argmax([])
"""
max_value = None
max_index = None
for index, value in enumerate(iterable):
if (max_value is None) or max_value < value:
max_value = value
max_index = index
return max_index | [
"def",
"argmax",
"(",
"iterable",
")",
":",
"max_value",
"=",
"None",
"max_index",
"=",
"None",
"for",
"index",
",",
"value",
"in",
"enumerate",
"(",
"iterable",
")",
":",
"if",
"(",
"max_value",
"is",
"None",
")",
"or",
"max_value",
"<",
"value",
":",... | Find the first index of the biggest value in the iterable.
Parameters
----------
iterable : iterable
Returns
-------
argmax : int
Examples
--------
>>> argmax([0, 0, 0])
0
>>> argmax([1, 0, 0])
0
>>> argmax([0, 1, 0])
1
>>> argmax([]) | [
"Find",
"the",
"first",
"index",
"of",
"the",
"biggest",
"value",
"in",
"the",
"iterable",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/math.py#L152-L180 | train | 36,952 |
MartinThoma/mpu | mpu/math.py | round_down | def round_down(x, decimal_places):
"""
Round a float down to decimal_places.
Parameters
----------
x : float
decimal_places : int
Returns
-------
rounded_float : float
Examples
--------
>>> round_down(1.23456, 3)
1.234
>>> round_down(1.23456, 2)
1.23
"""
from math import floor
d = int('1' + ('0' * decimal_places))
return floor(x * d) / d | python | def round_down(x, decimal_places):
"""
Round a float down to decimal_places.
Parameters
----------
x : float
decimal_places : int
Returns
-------
rounded_float : float
Examples
--------
>>> round_down(1.23456, 3)
1.234
>>> round_down(1.23456, 2)
1.23
"""
from math import floor
d = int('1' + ('0' * decimal_places))
return floor(x * d) / d | [
"def",
"round_down",
"(",
"x",
",",
"decimal_places",
")",
":",
"from",
"math",
"import",
"floor",
"d",
"=",
"int",
"(",
"'1'",
"+",
"(",
"'0'",
"*",
"decimal_places",
")",
")",
"return",
"floor",
"(",
"x",
"*",
"d",
")",
"/",
"d"
] | Round a float down to decimal_places.
Parameters
----------
x : float
decimal_places : int
Returns
-------
rounded_float : float
Examples
--------
>>> round_down(1.23456, 3)
1.234
>>> round_down(1.23456, 2)
1.23 | [
"Round",
"a",
"float",
"down",
"to",
"decimal_places",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/math.py#L206-L228 | train | 36,953 |
MartinThoma/mpu | mpu/datetime.py | add_time | def add_time(datetime_obj, days=0, hours=0, minutes=0, seconds=0):
"""
Add time to a timezone-aware datetime object.
This keeps the timezone correct, even if it changes due to daylight
saving time (DST).
Parameters
----------
datetime_obj : datetime.datetime
days : int
hours : int
minutes : int
seconds : int
Returns
-------
datetime : datetime.datetime
"""
seconds += minutes * 60
seconds += hours * 60**2
seconds += days * 24 * 60**2
t14 = datetime_obj + dt.timedelta(seconds=seconds) # Invalid timezone!
t14 = t14.astimezone(pytz.utc).astimezone(t14.tzinfo) # Fix the timezone
return t14 | python | def add_time(datetime_obj, days=0, hours=0, minutes=0, seconds=0):
"""
Add time to a timezone-aware datetime object.
This keeps the timezone correct, even if it changes due to daylight
saving time (DST).
Parameters
----------
datetime_obj : datetime.datetime
days : int
hours : int
minutes : int
seconds : int
Returns
-------
datetime : datetime.datetime
"""
seconds += minutes * 60
seconds += hours * 60**2
seconds += days * 24 * 60**2
t14 = datetime_obj + dt.timedelta(seconds=seconds) # Invalid timezone!
t14 = t14.astimezone(pytz.utc).astimezone(t14.tzinfo) # Fix the timezone
return t14 | [
"def",
"add_time",
"(",
"datetime_obj",
",",
"days",
"=",
"0",
",",
"hours",
"=",
"0",
",",
"minutes",
"=",
"0",
",",
"seconds",
"=",
"0",
")",
":",
"seconds",
"+=",
"minutes",
"*",
"60",
"seconds",
"+=",
"hours",
"*",
"60",
"**",
"2",
"seconds",
... | Add time to a timezone-aware datetime object.
This keeps the timezone correct, even if it changes due to daylight
saving time (DST).
Parameters
----------
datetime_obj : datetime.datetime
days : int
hours : int
minutes : int
seconds : int
Returns
-------
datetime : datetime.datetime | [
"Add",
"time",
"to",
"a",
"timezone",
"-",
"aware",
"datetime",
"object",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/datetime.py#L16-L40 | train | 36,954 |
MartinThoma/mpu | mpu/datetime.py | generate | def generate(minimum, maximum, local_random=random.Random()):
"""
Generate a random date.
The generated dates are uniformly distributed.
Parameters
----------
minimum : datetime object
maximum : datetime object
local_random : random.Random
Returns
-------
generated_date : datetime object
Examples
--------
>>> import random; r = random.Random(); r.seed(0)
>>> from datetime import datetime
>>> generate(datetime(2018, 1, 1), datetime(2018, 1, 2), local_random=r)
datetime.datetime(2018, 1, 1, 20, 15, 58, 47972)
>>> generate(datetime(2018, 1, 1), datetime(2018, 1, 2), local_random=r)
datetime.datetime(2018, 1, 1, 18, 11, 27, 260414)
"""
if not (minimum < maximum):
raise ValueError('{} is not smaller than {}'.format(minimum, maximum))
# Python 3 allows direct multiplication of timedelta with a float, but
# Python 2.7 does not. Hence this work-around.
time_d = maximum - minimum
time_d_float = time_d.total_seconds()
time_d_rand = dt.timedelta(seconds=time_d_float * local_random.random())
generated = minimum + time_d_rand
return generated | python | def generate(minimum, maximum, local_random=random.Random()):
"""
Generate a random date.
The generated dates are uniformly distributed.
Parameters
----------
minimum : datetime object
maximum : datetime object
local_random : random.Random
Returns
-------
generated_date : datetime object
Examples
--------
>>> import random; r = random.Random(); r.seed(0)
>>> from datetime import datetime
>>> generate(datetime(2018, 1, 1), datetime(2018, 1, 2), local_random=r)
datetime.datetime(2018, 1, 1, 20, 15, 58, 47972)
>>> generate(datetime(2018, 1, 1), datetime(2018, 1, 2), local_random=r)
datetime.datetime(2018, 1, 1, 18, 11, 27, 260414)
"""
if not (minimum < maximum):
raise ValueError('{} is not smaller than {}'.format(minimum, maximum))
# Python 3 allows direct multiplication of timedelta with a float, but
# Python 2.7 does not. Hence this work-around.
time_d = maximum - minimum
time_d_float = time_d.total_seconds()
time_d_rand = dt.timedelta(seconds=time_d_float * local_random.random())
generated = minimum + time_d_rand
return generated | [
"def",
"generate",
"(",
"minimum",
",",
"maximum",
",",
"local_random",
"=",
"random",
".",
"Random",
"(",
")",
")",
":",
"if",
"not",
"(",
"minimum",
"<",
"maximum",
")",
":",
"raise",
"ValueError",
"(",
"'{} is not smaller than {}'",
".",
"format",
"(",
... | Generate a random date.
The generated dates are uniformly distributed.
Parameters
----------
minimum : datetime object
maximum : datetime object
local_random : random.Random
Returns
-------
generated_date : datetime object
Examples
--------
>>> import random; r = random.Random(); r.seed(0)
>>> from datetime import datetime
>>> generate(datetime(2018, 1, 1), datetime(2018, 1, 2), local_random=r)
datetime.datetime(2018, 1, 1, 20, 15, 58, 47972)
>>> generate(datetime(2018, 1, 1), datetime(2018, 1, 2), local_random=r)
datetime.datetime(2018, 1, 1, 18, 11, 27, 260414) | [
"Generate",
"a",
"random",
"date",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/datetime.py#L43-L79 | train | 36,955 |
MartinThoma/mpu | mpu/package/cli.py | run_init | def run_init(args):
"""
Run project initialization.
This will ask the user for input.
Parameters
----------
args : argparse named arguments
"""
root = args.root
if root is None:
root = '.'
root = os.path.abspath(root)
project_data = _get_package_data()
project_name = project_data['project_name']
directories = [os.path.join(root, 'bin'),
os.path.join(root, 'docs'),
os.path.join(root, 'tests'),
os.path.join(root, project_name),
]
for dir_path in directories:
if not os.path.exists(dir_path):
os.makedirs(dir_path)
script_paths = [os.path.join(root, 'README.md'),
os.path.join(root, 'tests/__init__.py'),
]
for script_path in script_paths:
if not os.path.exists(script_path):
os.mknod(script_path)
copy_samples = [(resource_filename('mpu', 'package/templates/tox.ini.txt'),
os.path.join(root, 'tox.ini')),
(resource_filename('mpu',
'package/templates/setup.cfg.txt'),
os.path.join(root, 'setup.cfg')),
(resource_filename('mpu',
'package/templates/setup.py.txt'),
os.path.join(root, 'setup.py')),
(resource_filename('mpu',
'package/templates/_version.py.txt'),
os.path.join(root, project_name + '/_version.py')),
(resource_filename('mpu',
'package/templates/coveragerc.txt'),
os.path.join(root, '.coveragerc')),
(resource_filename('mpu', 'package/templates/init.py.txt'),
os.path.join(root, project_name + '/__init__.py')),
]
translate = {'[[project_name]]': project_data['project_name'],
'[[license]]': project_data['license'],
'[[author]]': project_data['author'],
'[[email]]': project_data['email'],
}
for source, destination in copy_samples:
if not os.path.exists(destination):
copyfile(source, destination)
_adjust_template(destination, translate) | python | def run_init(args):
"""
Run project initialization.
This will ask the user for input.
Parameters
----------
args : argparse named arguments
"""
root = args.root
if root is None:
root = '.'
root = os.path.abspath(root)
project_data = _get_package_data()
project_name = project_data['project_name']
directories = [os.path.join(root, 'bin'),
os.path.join(root, 'docs'),
os.path.join(root, 'tests'),
os.path.join(root, project_name),
]
for dir_path in directories:
if not os.path.exists(dir_path):
os.makedirs(dir_path)
script_paths = [os.path.join(root, 'README.md'),
os.path.join(root, 'tests/__init__.py'),
]
for script_path in script_paths:
if not os.path.exists(script_path):
os.mknod(script_path)
copy_samples = [(resource_filename('mpu', 'package/templates/tox.ini.txt'),
os.path.join(root, 'tox.ini')),
(resource_filename('mpu',
'package/templates/setup.cfg.txt'),
os.path.join(root, 'setup.cfg')),
(resource_filename('mpu',
'package/templates/setup.py.txt'),
os.path.join(root, 'setup.py')),
(resource_filename('mpu',
'package/templates/_version.py.txt'),
os.path.join(root, project_name + '/_version.py')),
(resource_filename('mpu',
'package/templates/coveragerc.txt'),
os.path.join(root, '.coveragerc')),
(resource_filename('mpu', 'package/templates/init.py.txt'),
os.path.join(root, project_name + '/__init__.py')),
]
translate = {'[[project_name]]': project_data['project_name'],
'[[license]]': project_data['license'],
'[[author]]': project_data['author'],
'[[email]]': project_data['email'],
}
for source, destination in copy_samples:
if not os.path.exists(destination):
copyfile(source, destination)
_adjust_template(destination, translate) | [
"def",
"run_init",
"(",
"args",
")",
":",
"root",
"=",
"args",
".",
"root",
"if",
"root",
"is",
"None",
":",
"root",
"=",
"'.'",
"root",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"root",
")",
"project_data",
"=",
"_get_package_data",
"(",
")",
"... | Run project initialization.
This will ask the user for input.
Parameters
----------
args : argparse named arguments | [
"Run",
"project",
"initialization",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/package/cli.py#L16-L75 | train | 36,956 |
MartinThoma/mpu | mpu/package/cli.py | _multiple_replace | def _multiple_replace(text, search_replace_dict):
"""
Replace multiple things at once in a text.
Parameters
----------
text : str
search_replace_dict : dict
Returns
-------
replaced_text : str
Examples
--------
>>> d = {'a': 'b', 'b': 'c', 'c': 'd', 'd': 'e'}
>>> _multiple_replace('abcdefghijklm', d)
'bcdeefghijklm'
"""
# Create a regular expression from all of the dictionary keys
regex = re.compile("|".join(map(re.escape, search_replace_dict.keys())))
# For each match, look up the corresponding value in the dictionary
return regex.sub(lambda match: search_replace_dict[match.group(0)], text) | python | def _multiple_replace(text, search_replace_dict):
"""
Replace multiple things at once in a text.
Parameters
----------
text : str
search_replace_dict : dict
Returns
-------
replaced_text : str
Examples
--------
>>> d = {'a': 'b', 'b': 'c', 'c': 'd', 'd': 'e'}
>>> _multiple_replace('abcdefghijklm', d)
'bcdeefghijklm'
"""
# Create a regular expression from all of the dictionary keys
regex = re.compile("|".join(map(re.escape, search_replace_dict.keys())))
# For each match, look up the corresponding value in the dictionary
return regex.sub(lambda match: search_replace_dict[match.group(0)], text) | [
"def",
"_multiple_replace",
"(",
"text",
",",
"search_replace_dict",
")",
":",
"# Create a regular expression from all of the dictionary keys",
"regex",
"=",
"re",
".",
"compile",
"(",
"\"|\"",
".",
"join",
"(",
"map",
"(",
"re",
".",
"escape",
",",
"search_replace_... | Replace multiple things at once in a text.
Parameters
----------
text : str
search_replace_dict : dict
Returns
-------
replaced_text : str
Examples
--------
>>> d = {'a': 'b', 'b': 'c', 'c': 'd', 'd': 'e'}
>>> _multiple_replace('abcdefghijklm', d)
'bcdeefghijklm' | [
"Replace",
"multiple",
"things",
"at",
"once",
"in",
"a",
"text",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/package/cli.py#L88-L111 | train | 36,957 |
MartinThoma/mpu | mpu/package/cli.py | _adjust_template | def _adjust_template(filepath, translate):
"""
Search and replace contents of a filepath.
Parameters
----------
filepath : str
translate : dict
"""
with open(filepath, 'r') as file:
filedata = file.read()
filedata = _multiple_replace(filedata, translate)
with open(filepath, 'w') as file:
file.write(filedata) | python | def _adjust_template(filepath, translate):
"""
Search and replace contents of a filepath.
Parameters
----------
filepath : str
translate : dict
"""
with open(filepath, 'r') as file:
filedata = file.read()
filedata = _multiple_replace(filedata, translate)
with open(filepath, 'w') as file:
file.write(filedata) | [
"def",
"_adjust_template",
"(",
"filepath",
",",
"translate",
")",
":",
"with",
"open",
"(",
"filepath",
",",
"'r'",
")",
"as",
"file",
":",
"filedata",
"=",
"file",
".",
"read",
"(",
")",
"filedata",
"=",
"_multiple_replace",
"(",
"filedata",
",",
"tran... | Search and replace contents of a filepath.
Parameters
----------
filepath : str
translate : dict | [
"Search",
"and",
"replace",
"contents",
"of",
"a",
"filepath",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/package/cli.py#L114-L129 | train | 36,958 |
MartinThoma/mpu | mpu/__init__.py | parallel_for | def parallel_for(loop_function, parameters, nb_threads=100):
"""
Execute the loop body in parallel.
.. note:: Race-Conditions
Executing code in parallel can cause an error class called
"race-condition".
Parameters
----------
loop_function : Python function which takes a tuple as input
parameters : List of tuples
Each element here should be executed in parallel.
Returns
-------
return_values : list of return values
"""
import multiprocessing.pool
from contextlib import closing
with closing(multiprocessing.pool.ThreadPool(nb_threads)) as pool:
return pool.map(loop_function, parameters) | python | def parallel_for(loop_function, parameters, nb_threads=100):
"""
Execute the loop body in parallel.
.. note:: Race-Conditions
Executing code in parallel can cause an error class called
"race-condition".
Parameters
----------
loop_function : Python function which takes a tuple as input
parameters : List of tuples
Each element here should be executed in parallel.
Returns
-------
return_values : list of return values
"""
import multiprocessing.pool
from contextlib import closing
with closing(multiprocessing.pool.ThreadPool(nb_threads)) as pool:
return pool.map(loop_function, parameters) | [
"def",
"parallel_for",
"(",
"loop_function",
",",
"parameters",
",",
"nb_threads",
"=",
"100",
")",
":",
"import",
"multiprocessing",
".",
"pool",
"from",
"contextlib",
"import",
"closing",
"with",
"closing",
"(",
"multiprocessing",
".",
"pool",
".",
"ThreadPool... | Execute the loop body in parallel.
.. note:: Race-Conditions
Executing code in parallel can cause an error class called
"race-condition".
Parameters
----------
loop_function : Python function which takes a tuple as input
parameters : List of tuples
Each element here should be executed in parallel.
Returns
-------
return_values : list of return values | [
"Execute",
"the",
"loop",
"body",
"in",
"parallel",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/__init__.py#L16-L37 | train | 36,959 |
MartinThoma/mpu | mpu/__init__.py | consistent_shuffle | def consistent_shuffle(*lists):
"""
Shuffle lists consistently.
Parameters
----------
*lists
Variable length number of lists
Returns
-------
shuffled_lists : tuple of lists
All of the lists are shuffled consistently
Examples
--------
>>> import mpu, random; random.seed(8)
>>> mpu.consistent_shuffle([1,2,3], ['a', 'b', 'c'], ['A', 'B', 'C'])
([3, 2, 1], ['c', 'b', 'a'], ['C', 'B', 'A'])
"""
perm = list(range(len(lists[0])))
random.shuffle(perm)
lists = tuple([sublist[index] for index in perm]
for sublist in lists)
return lists | python | def consistent_shuffle(*lists):
"""
Shuffle lists consistently.
Parameters
----------
*lists
Variable length number of lists
Returns
-------
shuffled_lists : tuple of lists
All of the lists are shuffled consistently
Examples
--------
>>> import mpu, random; random.seed(8)
>>> mpu.consistent_shuffle([1,2,3], ['a', 'b', 'c'], ['A', 'B', 'C'])
([3, 2, 1], ['c', 'b', 'a'], ['C', 'B', 'A'])
"""
perm = list(range(len(lists[0])))
random.shuffle(perm)
lists = tuple([sublist[index] for index in perm]
for sublist in lists)
return lists | [
"def",
"consistent_shuffle",
"(",
"*",
"lists",
")",
":",
"perm",
"=",
"list",
"(",
"range",
"(",
"len",
"(",
"lists",
"[",
"0",
"]",
")",
")",
")",
"random",
".",
"shuffle",
"(",
"perm",
")",
"lists",
"=",
"tuple",
"(",
"[",
"sublist",
"[",
"ind... | Shuffle lists consistently.
Parameters
----------
*lists
Variable length number of lists
Returns
-------
shuffled_lists : tuple of lists
All of the lists are shuffled consistently
Examples
--------
>>> import mpu, random; random.seed(8)
>>> mpu.consistent_shuffle([1,2,3], ['a', 'b', 'c'], ['A', 'B', 'C'])
([3, 2, 1], ['c', 'b', 'a'], ['C', 'B', 'A']) | [
"Shuffle",
"lists",
"consistently",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/__init__.py#L66-L90 | train | 36,960 |
MartinThoma/mpu | mpu/__init__.py | haversine_distance | def haversine_distance(origin, destination):
"""
Calculate the Haversine distance.
Parameters
----------
origin : tuple of float
(lat, long)
destination : tuple of float
(lat, long)
Returns
-------
distance_in_km : float
Examples
--------
>>> munich = (48.1372, 11.5756)
>>> berlin = (52.5186, 13.4083)
>>> round(haversine_distance(munich, berlin), 1)
504.2
>>> new_york_city = (40.712777777778, -74.005833333333) # NYC
>>> round(haversine_distance(berlin, new_york_city), 1)
6385.3
"""
lat1, lon1 = origin
lat2, lon2 = destination
if not (-90.0 <= lat1 <= 90):
raise ValueError('lat1={:2.2f}, but must be in [-90,+90]'.format(lat1))
if not (-90.0 <= lat2 <= 90):
raise ValueError('lat2={:2.2f}, but must be in [-90,+90]'.format(lat2))
if not (-180.0 <= lon1 <= 180):
raise ValueError('lon1={:2.2f}, but must be in [-180,+180]'
.format(lat1))
if not (-180.0 <= lon2 <= 180):
raise ValueError('lon1={:2.2f}, but must be in [-180,+180]'
.format(lat1))
radius = 6371 # km
dlat = math_stl.radians(lat2 - lat1)
dlon = math_stl.radians(lon2 - lon1)
a = (math_stl.sin(dlat / 2) * math_stl.sin(dlat / 2) +
math_stl.cos(math_stl.radians(lat1)) *
math_stl.cos(math_stl.radians(lat2)) *
math_stl.sin(dlon / 2) * math_stl.sin(dlon / 2))
c = 2 * math_stl.atan2(math_stl.sqrt(a), math_stl.sqrt(1 - a))
d = radius * c
return d | python | def haversine_distance(origin, destination):
"""
Calculate the Haversine distance.
Parameters
----------
origin : tuple of float
(lat, long)
destination : tuple of float
(lat, long)
Returns
-------
distance_in_km : float
Examples
--------
>>> munich = (48.1372, 11.5756)
>>> berlin = (52.5186, 13.4083)
>>> round(haversine_distance(munich, berlin), 1)
504.2
>>> new_york_city = (40.712777777778, -74.005833333333) # NYC
>>> round(haversine_distance(berlin, new_york_city), 1)
6385.3
"""
lat1, lon1 = origin
lat2, lon2 = destination
if not (-90.0 <= lat1 <= 90):
raise ValueError('lat1={:2.2f}, but must be in [-90,+90]'.format(lat1))
if not (-90.0 <= lat2 <= 90):
raise ValueError('lat2={:2.2f}, but must be in [-90,+90]'.format(lat2))
if not (-180.0 <= lon1 <= 180):
raise ValueError('lon1={:2.2f}, but must be in [-180,+180]'
.format(lat1))
if not (-180.0 <= lon2 <= 180):
raise ValueError('lon1={:2.2f}, but must be in [-180,+180]'
.format(lat1))
radius = 6371 # km
dlat = math_stl.radians(lat2 - lat1)
dlon = math_stl.radians(lon2 - lon1)
a = (math_stl.sin(dlat / 2) * math_stl.sin(dlat / 2) +
math_stl.cos(math_stl.radians(lat1)) *
math_stl.cos(math_stl.radians(lat2)) *
math_stl.sin(dlon / 2) * math_stl.sin(dlon / 2))
c = 2 * math_stl.atan2(math_stl.sqrt(a), math_stl.sqrt(1 - a))
d = radius * c
return d | [
"def",
"haversine_distance",
"(",
"origin",
",",
"destination",
")",
":",
"lat1",
",",
"lon1",
"=",
"origin",
"lat2",
",",
"lon2",
"=",
"destination",
"if",
"not",
"(",
"-",
"90.0",
"<=",
"lat1",
"<=",
"90",
")",
":",
"raise",
"ValueError",
"(",
"'lat1... | Calculate the Haversine distance.
Parameters
----------
origin : tuple of float
(lat, long)
destination : tuple of float
(lat, long)
Returns
-------
distance_in_km : float
Examples
--------
>>> munich = (48.1372, 11.5756)
>>> berlin = (52.5186, 13.4083)
>>> round(haversine_distance(munich, berlin), 1)
504.2
>>> new_york_city = (40.712777777778, -74.005833333333) # NYC
>>> round(haversine_distance(berlin, new_york_city), 1)
6385.3 | [
"Calculate",
"the",
"Haversine",
"distance",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/__init__.py#L161-L210 | train | 36,961 |
MartinThoma/mpu | mpu/__init__.py | is_in_intervall | def is_in_intervall(value, min_value, max_value, name='variable'):
"""
Raise an exception if value is not in an interval.
Parameters
----------
value : orderable
min_value : orderable
max_value : orderable
name : str
Name of the variable to print in exception.
"""
if not (min_value <= value <= max_value):
raise ValueError('{}={} is not in [{}, {}]'
.format(name, value, min_value, max_value)) | python | def is_in_intervall(value, min_value, max_value, name='variable'):
"""
Raise an exception if value is not in an interval.
Parameters
----------
value : orderable
min_value : orderable
max_value : orderable
name : str
Name of the variable to print in exception.
"""
if not (min_value <= value <= max_value):
raise ValueError('{}={} is not in [{}, {}]'
.format(name, value, min_value, max_value)) | [
"def",
"is_in_intervall",
"(",
"value",
",",
"min_value",
",",
"max_value",
",",
"name",
"=",
"'variable'",
")",
":",
"if",
"not",
"(",
"min_value",
"<=",
"value",
"<=",
"max_value",
")",
":",
"raise",
"ValueError",
"(",
"'{}={} is not in [{}, {}]'",
".",
"f... | Raise an exception if value is not in an interval.
Parameters
----------
value : orderable
min_value : orderable
max_value : orderable
name : str
Name of the variable to print in exception. | [
"Raise",
"an",
"exception",
"if",
"value",
"is",
"not",
"in",
"an",
"interval",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/__init__.py#L213-L227 | train | 36,962 |
MartinThoma/mpu | mpu/__init__.py | exception_logging | def exception_logging(exctype, value, tb):
"""
Log exception by using the root logger.
Use it as `sys.excepthook = exception_logging`.
Parameters
----------
exctype : type
value : NameError
tb : traceback
"""
write_val = {'exception_type': str(exctype),
'message': str(traceback.format_tb(tb, 10))}
logging.exception(str(write_val)) | python | def exception_logging(exctype, value, tb):
"""
Log exception by using the root logger.
Use it as `sys.excepthook = exception_logging`.
Parameters
----------
exctype : type
value : NameError
tb : traceback
"""
write_val = {'exception_type': str(exctype),
'message': str(traceback.format_tb(tb, 10))}
logging.exception(str(write_val)) | [
"def",
"exception_logging",
"(",
"exctype",
",",
"value",
",",
"tb",
")",
":",
"write_val",
"=",
"{",
"'exception_type'",
":",
"str",
"(",
"exctype",
")",
",",
"'message'",
":",
"str",
"(",
"traceback",
".",
"format_tb",
"(",
"tb",
",",
"10",
")",
")",... | Log exception by using the root logger.
Use it as `sys.excepthook = exception_logging`.
Parameters
----------
exctype : type
value : NameError
tb : traceback | [
"Log",
"exception",
"by",
"using",
"the",
"root",
"logger",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/__init__.py#L230-L244 | train | 36,963 |
MartinThoma/mpu | mpu/__init__.py | Location.latitude | def latitude(self, latitude):
"""Setter for latiutde."""
if not (-90 <= latitude <= 90):
raise ValueError('latitude was {}, but has to be in [-90, 90]'
.format(latitude))
self._latitude = latitude | python | def latitude(self, latitude):
"""Setter for latiutde."""
if not (-90 <= latitude <= 90):
raise ValueError('latitude was {}, but has to be in [-90, 90]'
.format(latitude))
self._latitude = latitude | [
"def",
"latitude",
"(",
"self",
",",
"latitude",
")",
":",
"if",
"not",
"(",
"-",
"90",
"<=",
"latitude",
"<=",
"90",
")",
":",
"raise",
"ValueError",
"(",
"'latitude was {}, but has to be in [-90, 90]'",
".",
"format",
"(",
"latitude",
")",
")",
"self",
"... | Setter for latiutde. | [
"Setter",
"for",
"latiutde",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/__init__.py#L120-L125 | train | 36,964 |
MartinThoma/mpu | mpu/__init__.py | Location.longitude | def longitude(self, longitude):
"""Setter for longitude."""
if not (-180 <= longitude <= 180):
raise ValueError('longitude was {}, but has to be in [-180, 180]'
.format(longitude))
self._longitude = longitude | python | def longitude(self, longitude):
"""Setter for longitude."""
if not (-180 <= longitude <= 180):
raise ValueError('longitude was {}, but has to be in [-180, 180]'
.format(longitude))
self._longitude = longitude | [
"def",
"longitude",
"(",
"self",
",",
"longitude",
")",
":",
"if",
"not",
"(",
"-",
"180",
"<=",
"longitude",
"<=",
"180",
")",
":",
"raise",
"ValueError",
"(",
"'longitude was {}, but has to be in [-180, 180]'",
".",
"format",
"(",
"longitude",
")",
")",
"s... | Setter for longitude. | [
"Setter",
"for",
"longitude",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/__init__.py#L128-L133 | train | 36,965 |
MartinThoma/mpu | mpu/__init__.py | Location.distance | def distance(self, there):
"""
Calculate the distance from this location to there.
Parameters
----------
there : Location
Returns
-------
distance_in_m : float
"""
return haversine_distance((self.latitude, self.longitude),
(there.latitude, there.longitude)) | python | def distance(self, there):
"""
Calculate the distance from this location to there.
Parameters
----------
there : Location
Returns
-------
distance_in_m : float
"""
return haversine_distance((self.latitude, self.longitude),
(there.latitude, there.longitude)) | [
"def",
"distance",
"(",
"self",
",",
"there",
")",
":",
"return",
"haversine_distance",
"(",
"(",
"self",
".",
"latitude",
",",
"self",
".",
"longitude",
")",
",",
"(",
"there",
".",
"latitude",
",",
"there",
".",
"longitude",
")",
")"
] | Calculate the distance from this location to there.
Parameters
----------
there : Location
Returns
-------
distance_in_m : float | [
"Calculate",
"the",
"distance",
"from",
"this",
"location",
"to",
"there",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/__init__.py#L140-L153 | train | 36,966 |
MartinThoma/mpu | mpu/_cli.py | main | def main():
"""Command line interface of mpu."""
parser = get_parser()
args = parser.parse_args()
if hasattr(args, 'func') and args.func:
args.func(args)
else:
parser.print_help() | python | def main():
"""Command line interface of mpu."""
parser = get_parser()
args = parser.parse_args()
if hasattr(args, 'func') and args.func:
args.func(args)
else:
parser.print_help() | [
"def",
"main",
"(",
")",
":",
"parser",
"=",
"get_parser",
"(",
")",
"args",
"=",
"parser",
".",
"parse_args",
"(",
")",
"if",
"hasattr",
"(",
"args",
",",
"'func'",
")",
"and",
"args",
".",
"func",
":",
"args",
".",
"func",
"(",
"args",
")",
"el... | Command line interface of mpu. | [
"Command",
"line",
"interface",
"of",
"mpu",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/_cli.py#L13-L20 | train | 36,967 |
MartinThoma/mpu | mpu/io.py | urlread | def urlread(url, encoding='utf8'):
"""
Read the content of an URL.
Parameters
----------
url : str
Returns
-------
content : str
"""
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
response = urlopen(url)
content = response.read()
content = content.decode(encoding)
return content | python | def urlread(url, encoding='utf8'):
"""
Read the content of an URL.
Parameters
----------
url : str
Returns
-------
content : str
"""
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
response = urlopen(url)
content = response.read()
content = content.decode(encoding)
return content | [
"def",
"urlread",
"(",
"url",
",",
"encoding",
"=",
"'utf8'",
")",
":",
"try",
":",
"from",
"urllib",
".",
"request",
"import",
"urlopen",
"except",
"ImportError",
":",
"from",
"urllib2",
"import",
"urlopen",
"response",
"=",
"urlopen",
"(",
"url",
")",
... | Read the content of an URL.
Parameters
----------
url : str
Returns
-------
content : str | [
"Read",
"the",
"content",
"of",
"an",
"URL",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/io.py#L234-L253 | train | 36,968 |
MartinThoma/mpu | mpu/io.py | hash | def hash(filepath, method='sha1', buffer_size=65536):
"""
Calculate a hash of a local file.
Parameters
----------
filepath : str
method : {'sha1', 'md5'}
buffer_size : int, optional (default: 65536 byte = 64 KiB)
in byte
Returns
-------
hash : str
"""
if method == 'sha1':
hash_function = hashlib.sha1()
elif method == 'md5':
hash_function = hashlib.md5()
else:
raise NotImplementedError('Only md5 and sha1 hashes are known, but '
' \'{}\' was specified.'.format(method))
with open(filepath, 'rb') as fp:
while True:
data = fp.read(buffer_size)
if not data:
break
hash_function.update(data)
return hash_function.hexdigest() | python | def hash(filepath, method='sha1', buffer_size=65536):
"""
Calculate a hash of a local file.
Parameters
----------
filepath : str
method : {'sha1', 'md5'}
buffer_size : int, optional (default: 65536 byte = 64 KiB)
in byte
Returns
-------
hash : str
"""
if method == 'sha1':
hash_function = hashlib.sha1()
elif method == 'md5':
hash_function = hashlib.md5()
else:
raise NotImplementedError('Only md5 and sha1 hashes are known, but '
' \'{}\' was specified.'.format(method))
with open(filepath, 'rb') as fp:
while True:
data = fp.read(buffer_size)
if not data:
break
hash_function.update(data)
return hash_function.hexdigest() | [
"def",
"hash",
"(",
"filepath",
",",
"method",
"=",
"'sha1'",
",",
"buffer_size",
"=",
"65536",
")",
":",
"if",
"method",
"==",
"'sha1'",
":",
"hash_function",
"=",
"hashlib",
".",
"sha1",
"(",
")",
"elif",
"method",
"==",
"'md5'",
":",
"hash_function",
... | Calculate a hash of a local file.
Parameters
----------
filepath : str
method : {'sha1', 'md5'}
buffer_size : int, optional (default: 65536 byte = 64 KiB)
in byte
Returns
-------
hash : str | [
"Calculate",
"a",
"hash",
"of",
"a",
"local",
"file",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/io.py#L277-L306 | train | 36,969 |
MartinThoma/mpu | mpu/io.py | get_creation_datetime | def get_creation_datetime(filepath):
"""
Get the date that a file was created.
Parameters
----------
filepath : str
Returns
-------
creation_datetime : datetime.datetime or None
"""
if platform.system() == 'Windows':
return datetime.fromtimestamp(os.path.getctime(filepath))
else:
stat = os.stat(filepath)
try:
return datetime.fromtimestamp(stat.st_birthtime)
except AttributeError:
# We're probably on Linux. No easy way to get creation dates here,
# so we'll settle for when its content was last modified.
return None | python | def get_creation_datetime(filepath):
"""
Get the date that a file was created.
Parameters
----------
filepath : str
Returns
-------
creation_datetime : datetime.datetime or None
"""
if platform.system() == 'Windows':
return datetime.fromtimestamp(os.path.getctime(filepath))
else:
stat = os.stat(filepath)
try:
return datetime.fromtimestamp(stat.st_birthtime)
except AttributeError:
# We're probably on Linux. No easy way to get creation dates here,
# so we'll settle for when its content was last modified.
return None | [
"def",
"get_creation_datetime",
"(",
"filepath",
")",
":",
"if",
"platform",
".",
"system",
"(",
")",
"==",
"'Windows'",
":",
"return",
"datetime",
".",
"fromtimestamp",
"(",
"os",
".",
"path",
".",
"getctime",
"(",
"filepath",
")",
")",
"else",
":",
"st... | Get the date that a file was created.
Parameters
----------
filepath : str
Returns
-------
creation_datetime : datetime.datetime or None | [
"Get",
"the",
"date",
"that",
"a",
"file",
"was",
"created",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/io.py#L309-L330 | train | 36,970 |
MartinThoma/mpu | mpu/io.py | get_modification_datetime | def get_modification_datetime(filepath):
"""
Get the datetime that a file was last modified.
Parameters
----------
filepath : str
Returns
-------
modification_datetime : datetime.datetime
"""
import tzlocal
timezone = tzlocal.get_localzone()
mtime = datetime.fromtimestamp(os.path.getmtime(filepath))
return mtime.replace(tzinfo=timezone) | python | def get_modification_datetime(filepath):
"""
Get the datetime that a file was last modified.
Parameters
----------
filepath : str
Returns
-------
modification_datetime : datetime.datetime
"""
import tzlocal
timezone = tzlocal.get_localzone()
mtime = datetime.fromtimestamp(os.path.getmtime(filepath))
return mtime.replace(tzinfo=timezone) | [
"def",
"get_modification_datetime",
"(",
"filepath",
")",
":",
"import",
"tzlocal",
"timezone",
"=",
"tzlocal",
".",
"get_localzone",
"(",
")",
"mtime",
"=",
"datetime",
".",
"fromtimestamp",
"(",
"os",
".",
"path",
".",
"getmtime",
"(",
"filepath",
")",
")"... | Get the datetime that a file was last modified.
Parameters
----------
filepath : str
Returns
-------
modification_datetime : datetime.datetime | [
"Get",
"the",
"datetime",
"that",
"a",
"file",
"was",
"last",
"modified",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/io.py#L333-L349 | train | 36,971 |
MartinThoma/mpu | mpu/io.py | get_access_datetime | def get_access_datetime(filepath):
"""
Get the last time filepath was accessed.
Parameters
----------
filepath : str
Returns
-------
access_datetime : datetime.datetime
"""
import tzlocal
tz = tzlocal.get_localzone()
mtime = datetime.fromtimestamp(os.path.getatime(filepath))
return mtime.replace(tzinfo=tz) | python | def get_access_datetime(filepath):
"""
Get the last time filepath was accessed.
Parameters
----------
filepath : str
Returns
-------
access_datetime : datetime.datetime
"""
import tzlocal
tz = tzlocal.get_localzone()
mtime = datetime.fromtimestamp(os.path.getatime(filepath))
return mtime.replace(tzinfo=tz) | [
"def",
"get_access_datetime",
"(",
"filepath",
")",
":",
"import",
"tzlocal",
"tz",
"=",
"tzlocal",
".",
"get_localzone",
"(",
")",
"mtime",
"=",
"datetime",
".",
"fromtimestamp",
"(",
"os",
".",
"path",
".",
"getatime",
"(",
"filepath",
")",
")",
"return"... | Get the last time filepath was accessed.
Parameters
----------
filepath : str
Returns
-------
access_datetime : datetime.datetime | [
"Get",
"the",
"last",
"time",
"filepath",
"was",
"accessed",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/io.py#L352-L367 | train | 36,972 |
MartinThoma/mpu | mpu/io.py | get_file_meta | def get_file_meta(filepath):
"""
Get meta-information about a file.
Parameters
----------
filepath : str
Returns
-------
meta : dict
"""
meta = {}
meta['filepath'] = os.path.abspath(filepath)
meta['creation_datetime'] = get_creation_datetime(filepath)
meta['last_access_datetime'] = get_access_datetime(filepath)
meta['modification_datetime'] = get_modification_datetime(filepath)
try:
import magic
f_mime = magic.Magic(mime=True, uncompress=True)
f_other = magic.Magic(mime=False, uncompress=True)
meta['mime'] = f_mime.from_file(meta['filepath'])
meta['magic-type'] = f_other.from_file(meta['filepath'])
except ImportError:
pass
return meta | python | def get_file_meta(filepath):
"""
Get meta-information about a file.
Parameters
----------
filepath : str
Returns
-------
meta : dict
"""
meta = {}
meta['filepath'] = os.path.abspath(filepath)
meta['creation_datetime'] = get_creation_datetime(filepath)
meta['last_access_datetime'] = get_access_datetime(filepath)
meta['modification_datetime'] = get_modification_datetime(filepath)
try:
import magic
f_mime = magic.Magic(mime=True, uncompress=True)
f_other = magic.Magic(mime=False, uncompress=True)
meta['mime'] = f_mime.from_file(meta['filepath'])
meta['magic-type'] = f_other.from_file(meta['filepath'])
except ImportError:
pass
return meta | [
"def",
"get_file_meta",
"(",
"filepath",
")",
":",
"meta",
"=",
"{",
"}",
"meta",
"[",
"'filepath'",
"]",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"filepath",
")",
"meta",
"[",
"'creation_datetime'",
"]",
"=",
"get_creation_datetime",
"(",
"filepath",
... | Get meta-information about a file.
Parameters
----------
filepath : str
Returns
-------
meta : dict | [
"Get",
"meta",
"-",
"information",
"about",
"a",
"file",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/io.py#L370-L395 | train | 36,973 |
MartinThoma/mpu | mpu/io.py | gzip_file | def gzip_file(source, sink):
"""
Create a GZIP file from a source file.
Parameters
----------
source : str
Filepath
sink : str
Filepath
"""
import gzip
with open(source, 'rb') as f_in, gzip.open(sink, 'wb') as f_out:
f_out.writelines(f_in) | python | def gzip_file(source, sink):
"""
Create a GZIP file from a source file.
Parameters
----------
source : str
Filepath
sink : str
Filepath
"""
import gzip
with open(source, 'rb') as f_in, gzip.open(sink, 'wb') as f_out:
f_out.writelines(f_in) | [
"def",
"gzip_file",
"(",
"source",
",",
"sink",
")",
":",
"import",
"gzip",
"with",
"open",
"(",
"source",
",",
"'rb'",
")",
"as",
"f_in",
",",
"gzip",
".",
"open",
"(",
"sink",
",",
"'wb'",
")",
"as",
"f_out",
":",
"f_out",
".",
"writelines",
"(",... | Create a GZIP file from a source file.
Parameters
----------
source : str
Filepath
sink : str
Filepath | [
"Create",
"a",
"GZIP",
"file",
"from",
"a",
"source",
"file",
"."
] | 61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6 | https://github.com/MartinThoma/mpu/blob/61bc36d0192ca90c0bcf9b8a5d7d0d8520e20ff6/mpu/io.py#L398-L411 | train | 36,974 |
fulfilio/fulfil-python-api | fulfil_client/contrib/mocking.py | MockFulfil.start | def start(self):
"""
Start the patch
"""
self._patcher = mock.patch(target=self.target)
MockClient = self._patcher.start()
instance = MockClient.return_value
instance.model.side_effect = mock.Mock(
side_effect=self.model
) | python | def start(self):
"""
Start the patch
"""
self._patcher = mock.patch(target=self.target)
MockClient = self._patcher.start()
instance = MockClient.return_value
instance.model.side_effect = mock.Mock(
side_effect=self.model
) | [
"def",
"start",
"(",
"self",
")",
":",
"self",
".",
"_patcher",
"=",
"mock",
".",
"patch",
"(",
"target",
"=",
"self",
".",
"target",
")",
"MockClient",
"=",
"self",
".",
"_patcher",
".",
"start",
"(",
")",
"instance",
"=",
"MockClient",
".",
"return... | Start the patch | [
"Start",
"the",
"patch"
] | 180ac969c427b1292439a0371866aa5f169ffa6b | https://github.com/fulfilio/fulfil-python-api/blob/180ac969c427b1292439a0371866aa5f169ffa6b/fulfil_client/contrib/mocking.py#L38-L47 | train | 36,975 |
fulfilio/fulfil-python-api | fulfil_client/oauth.py | Session.setup | def setup(cls, client_id, client_secret):
"""Configure client in session
"""
cls.client_id = client_id
cls.client_secret = client_secret | python | def setup(cls, client_id, client_secret):
"""Configure client in session
"""
cls.client_id = client_id
cls.client_secret = client_secret | [
"def",
"setup",
"(",
"cls",
",",
"client_id",
",",
"client_secret",
")",
":",
"cls",
".",
"client_id",
"=",
"client_id",
"cls",
".",
"client_secret",
"=",
"client_secret"
] | Configure client in session | [
"Configure",
"client",
"in",
"session"
] | 180ac969c427b1292439a0371866aa5f169ffa6b | https://github.com/fulfilio/fulfil-python-api/blob/180ac969c427b1292439a0371866aa5f169ffa6b/fulfil_client/oauth.py#L18-L22 | train | 36,976 |
MrTango/RISparser | RISparser/parser.py | read | def read(filelines, mapping=None, wok=False):
"""Parse a ris lines and return a list of entries.
Entries are codified as dictionaries whose keys are the
different tags. For single line and singly occurring tags,
the content is codified as a string. In the case of multiline
or multiple key occurrences, the content is returned as a list
of strings.
Keyword arguments:
bibliography_file -- ris filehandle
mapping -- custom RIS tags mapping
wok -- flag, Web of Knowledge format is used if True, otherwise
Refman's RIS specifications are used.
"""
if wok:
if not mapping:
mapping = WOK_TAG_KEY_MAPPING
return Wok(filelines, mapping).parse()
else:
if not mapping:
mapping = TAG_KEY_MAPPING
return Ris(filelines, mapping).parse() | python | def read(filelines, mapping=None, wok=False):
"""Parse a ris lines and return a list of entries.
Entries are codified as dictionaries whose keys are the
different tags. For single line and singly occurring tags,
the content is codified as a string. In the case of multiline
or multiple key occurrences, the content is returned as a list
of strings.
Keyword arguments:
bibliography_file -- ris filehandle
mapping -- custom RIS tags mapping
wok -- flag, Web of Knowledge format is used if True, otherwise
Refman's RIS specifications are used.
"""
if wok:
if not mapping:
mapping = WOK_TAG_KEY_MAPPING
return Wok(filelines, mapping).parse()
else:
if not mapping:
mapping = TAG_KEY_MAPPING
return Ris(filelines, mapping).parse() | [
"def",
"read",
"(",
"filelines",
",",
"mapping",
"=",
"None",
",",
"wok",
"=",
"False",
")",
":",
"if",
"wok",
":",
"if",
"not",
"mapping",
":",
"mapping",
"=",
"WOK_TAG_KEY_MAPPING",
"return",
"Wok",
"(",
"filelines",
",",
"mapping",
")",
".",
"parse"... | Parse a ris lines and return a list of entries.
Entries are codified as dictionaries whose keys are the
different tags. For single line and singly occurring tags,
the content is codified as a string. In the case of multiline
or multiple key occurrences, the content is returned as a list
of strings.
Keyword arguments:
bibliography_file -- ris filehandle
mapping -- custom RIS tags mapping
wok -- flag, Web of Knowledge format is used if True, otherwise
Refman's RIS specifications are used. | [
"Parse",
"a",
"ris",
"lines",
"and",
"return",
"a",
"list",
"of",
"entries",
"."
] | d133d74022d3edbbdec19ef72bd34c8902a0bad1 | https://github.com/MrTango/RISparser/blob/d133d74022d3edbbdec19ef72bd34c8902a0bad1/RISparser/parser.py#L180-L204 | train | 36,977 |
fulfilio/fulfil-python-api | fulfil_client/client.py | Client.refresh_context | def refresh_context(self):
"""
Get the default context of the user and save it
"""
User = self.model('res.user')
self.context = User.get_preferences(True)
return self.context | python | def refresh_context(self):
"""
Get the default context of the user and save it
"""
User = self.model('res.user')
self.context = User.get_preferences(True)
return self.context | [
"def",
"refresh_context",
"(",
"self",
")",
":",
"User",
"=",
"self",
".",
"model",
"(",
"'res.user'",
")",
"self",
".",
"context",
"=",
"User",
".",
"get_preferences",
"(",
"True",
")",
"return",
"self",
".",
"context"
] | Get the default context of the user and save it | [
"Get",
"the",
"default",
"context",
"of",
"the",
"user",
"and",
"save",
"it"
] | 180ac969c427b1292439a0371866aa5f169ffa6b | https://github.com/fulfilio/fulfil-python-api/blob/180ac969c427b1292439a0371866aa5f169ffa6b/fulfil_client/client.py#L144-L151 | train | 36,978 |
fulfilio/fulfil-python-api | fulfil_client/client.py | Client.login | def login(self, login, password, set_auth=False):
"""
Attempts a login to the remote server
and on success returns user id and session
or None
Warning: Do not depend on this. This will be deprecated
with SSO.
param set_auth: sets the authentication on the client
"""
rv = self.session.post(
self.host,
dumps({
"method": "common.db.login",
"params": [login, password]
}),
)
rv = loads(rv.content)['result']
if set_auth:
self.set_auth(
SessionAuth(login, *rv)
)
return rv | python | def login(self, login, password, set_auth=False):
"""
Attempts a login to the remote server
and on success returns user id and session
or None
Warning: Do not depend on this. This will be deprecated
with SSO.
param set_auth: sets the authentication on the client
"""
rv = self.session.post(
self.host,
dumps({
"method": "common.db.login",
"params": [login, password]
}),
)
rv = loads(rv.content)['result']
if set_auth:
self.set_auth(
SessionAuth(login, *rv)
)
return rv | [
"def",
"login",
"(",
"self",
",",
"login",
",",
"password",
",",
"set_auth",
"=",
"False",
")",
":",
"rv",
"=",
"self",
".",
"session",
".",
"post",
"(",
"self",
".",
"host",
",",
"dumps",
"(",
"{",
"\"method\"",
":",
"\"common.db.login\"",
",",
"\"p... | Attempts a login to the remote server
and on success returns user id and session
or None
Warning: Do not depend on this. This will be deprecated
with SSO.
param set_auth: sets the authentication on the client | [
"Attempts",
"a",
"login",
"to",
"the",
"remote",
"server",
"and",
"on",
"success",
"returns",
"user",
"id",
"and",
"session",
"or",
"None"
] | 180ac969c427b1292439a0371866aa5f169ffa6b | https://github.com/fulfilio/fulfil-python-api/blob/180ac969c427b1292439a0371866aa5f169ffa6b/fulfil_client/client.py#L173-L196 | train | 36,979 |
fulfilio/fulfil-python-api | fulfil_client/client.py | Client.is_auth_alive | def is_auth_alive(self):
"Return true if the auth is not expired, else false"
model = self.model('ir.model')
try:
model.search([], None, 1, None)
except ClientError as err:
if err and err.message['code'] == 403:
return False
raise
except Exception:
raise
else:
return True | python | def is_auth_alive(self):
"Return true if the auth is not expired, else false"
model = self.model('ir.model')
try:
model.search([], None, 1, None)
except ClientError as err:
if err and err.message['code'] == 403:
return False
raise
except Exception:
raise
else:
return True | [
"def",
"is_auth_alive",
"(",
"self",
")",
":",
"model",
"=",
"self",
".",
"model",
"(",
"'ir.model'",
")",
"try",
":",
"model",
".",
"search",
"(",
"[",
"]",
",",
"None",
",",
"1",
",",
"None",
")",
"except",
"ClientError",
"as",
"err",
":",
"if",
... | Return true if the auth is not expired, else false | [
"Return",
"true",
"if",
"the",
"auth",
"is",
"not",
"expired",
"else",
"false"
] | 180ac969c427b1292439a0371866aa5f169ffa6b | https://github.com/fulfilio/fulfil-python-api/blob/180ac969c427b1292439a0371866aa5f169ffa6b/fulfil_client/client.py#L198-L210 | train | 36,980 |
fulfilio/fulfil-python-api | fulfil_client/client.py | Record.update | def update(self, data=None, **kwargs):
"""
Update the record right away.
:param data: dictionary of changes
:param kwargs: possibly a list of keyword args to change
"""
if data is None:
data = {}
data.update(kwargs)
return self.model.write([self.id], data) | python | def update(self, data=None, **kwargs):
"""
Update the record right away.
:param data: dictionary of changes
:param kwargs: possibly a list of keyword args to change
"""
if data is None:
data = {}
data.update(kwargs)
return self.model.write([self.id], data) | [
"def",
"update",
"(",
"self",
",",
"data",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"data",
"is",
"None",
":",
"data",
"=",
"{",
"}",
"data",
".",
"update",
"(",
"kwargs",
")",
"return",
"self",
".",
"model",
".",
"write",
"(",
"["... | Update the record right away.
:param data: dictionary of changes
:param kwargs: possibly a list of keyword args to change | [
"Update",
"the",
"record",
"right",
"away",
"."
] | 180ac969c427b1292439a0371866aa5f169ffa6b | https://github.com/fulfilio/fulfil-python-api/blob/180ac969c427b1292439a0371866aa5f169ffa6b/fulfil_client/client.py#L333-L343 | train | 36,981 |
fulfilio/fulfil-python-api | fulfil_client/client.py | Model.search_read_all | def search_read_all(self, domain, order, fields, batch_size=500,
context=None, offset=0, limit=None):
"""
An endless iterator that iterates over records.
:param domain: A search domain
:param order: The order clause for search read
:param fields: The fields argument for search_read
:param batch_size: The optimal batch size when sending paginated
requests
"""
if context is None:
context = {}
if limit is None:
# When no limit is specified, all the records
# should be fetched.
record_count = self.search_count(domain, context=context)
end = record_count + offset
else:
end = limit + offset
for page_offset in range(offset, end, batch_size):
if page_offset + batch_size > end:
batch_size = end - page_offset
for record in self.search_read(
domain, page_offset, batch_size,
order, fields, context=context):
yield record | python | def search_read_all(self, domain, order, fields, batch_size=500,
context=None, offset=0, limit=None):
"""
An endless iterator that iterates over records.
:param domain: A search domain
:param order: The order clause for search read
:param fields: The fields argument for search_read
:param batch_size: The optimal batch size when sending paginated
requests
"""
if context is None:
context = {}
if limit is None:
# When no limit is specified, all the records
# should be fetched.
record_count = self.search_count(domain, context=context)
end = record_count + offset
else:
end = limit + offset
for page_offset in range(offset, end, batch_size):
if page_offset + batch_size > end:
batch_size = end - page_offset
for record in self.search_read(
domain, page_offset, batch_size,
order, fields, context=context):
yield record | [
"def",
"search_read_all",
"(",
"self",
",",
"domain",
",",
"order",
",",
"fields",
",",
"batch_size",
"=",
"500",
",",
"context",
"=",
"None",
",",
"offset",
"=",
"0",
",",
"limit",
"=",
"None",
")",
":",
"if",
"context",
"is",
"None",
":",
"context"... | An endless iterator that iterates over records.
:param domain: A search domain
:param order: The order clause for search read
:param fields: The fields argument for search_read
:param batch_size: The optimal batch size when sending paginated
requests | [
"An",
"endless",
"iterator",
"that",
"iterates",
"over",
"records",
"."
] | 180ac969c427b1292439a0371866aa5f169ffa6b | https://github.com/fulfilio/fulfil-python-api/blob/180ac969c427b1292439a0371866aa5f169ffa6b/fulfil_client/client.py#L390-L418 | train | 36,982 |
fulfilio/fulfil-python-api | fulfil_client/client.py | Model.find | def find(self, filter=None, page=1, per_page=10, fields=None, context=None):
"""
Find records that match the filter.
Pro Tip: The fields could have nested fields names if the field is
a relationship type. For example if you were looking up an order
and also want to get the shipping address country then fields would be:
`['shipment_address', 'shipment_address.country']`
but country in this case is the ID of the country which is not very
useful if you don't already have a map. You can fetch the country code
by adding `'shipment_address.country.code'` to the fields.
:param filter: A domain expression (Refer docs for domain syntax)
:param page: The page to fetch to get paginated results
:param per_page: The number of records to fetch per page
:param fields: A list of field names to fetch.
:param context: Any overrides to the context.
"""
if filter is None:
filter = []
rv = self.client.session.get(
self.path,
params={
'filter': dumps(filter or []),
'page': page,
'per_page': per_page,
'field': fields,
'context': dumps(context or self.client.context),
}
)
response_received.send(rv)
return rv | python | def find(self, filter=None, page=1, per_page=10, fields=None, context=None):
"""
Find records that match the filter.
Pro Tip: The fields could have nested fields names if the field is
a relationship type. For example if you were looking up an order
and also want to get the shipping address country then fields would be:
`['shipment_address', 'shipment_address.country']`
but country in this case is the ID of the country which is not very
useful if you don't already have a map. You can fetch the country code
by adding `'shipment_address.country.code'` to the fields.
:param filter: A domain expression (Refer docs for domain syntax)
:param page: The page to fetch to get paginated results
:param per_page: The number of records to fetch per page
:param fields: A list of field names to fetch.
:param context: Any overrides to the context.
"""
if filter is None:
filter = []
rv = self.client.session.get(
self.path,
params={
'filter': dumps(filter or []),
'page': page,
'per_page': per_page,
'field': fields,
'context': dumps(context or self.client.context),
}
)
response_received.send(rv)
return rv | [
"def",
"find",
"(",
"self",
",",
"filter",
"=",
"None",
",",
"page",
"=",
"1",
",",
"per_page",
"=",
"10",
",",
"fields",
"=",
"None",
",",
"context",
"=",
"None",
")",
":",
"if",
"filter",
"is",
"None",
":",
"filter",
"=",
"[",
"]",
"rv",
"=",... | Find records that match the filter.
Pro Tip: The fields could have nested fields names if the field is
a relationship type. For example if you were looking up an order
and also want to get the shipping address country then fields would be:
`['shipment_address', 'shipment_address.country']`
but country in this case is the ID of the country which is not very
useful if you don't already have a map. You can fetch the country code
by adding `'shipment_address.country.code'` to the fields.
:param filter: A domain expression (Refer docs for domain syntax)
:param page: The page to fetch to get paginated results
:param per_page: The number of records to fetch per page
:param fields: A list of field names to fetch.
:param context: Any overrides to the context. | [
"Find",
"records",
"that",
"match",
"the",
"filter",
"."
] | 180ac969c427b1292439a0371866aa5f169ffa6b | https://github.com/fulfilio/fulfil-python-api/blob/180ac969c427b1292439a0371866aa5f169ffa6b/fulfil_client/client.py#L421-L454 | train | 36,983 |
fulfilio/fulfil-python-api | fulfil_client/client.py | Model.attach | def attach(self, id, filename, url):
"""Add an attachmemt to record from url
:param id: ID of record
:param filename: File name of attachment
:param url: Public url to download file from.
"""
Attachment = self.client.model('ir.attachment')
return Attachment.add_attachment_from_url(
filename, url, '%s,%s' % (self.model_name, id)
) | python | def attach(self, id, filename, url):
"""Add an attachmemt to record from url
:param id: ID of record
:param filename: File name of attachment
:param url: Public url to download file from.
"""
Attachment = self.client.model('ir.attachment')
return Attachment.add_attachment_from_url(
filename, url, '%s,%s' % (self.model_name, id)
) | [
"def",
"attach",
"(",
"self",
",",
"id",
",",
"filename",
",",
"url",
")",
":",
"Attachment",
"=",
"self",
".",
"client",
".",
"model",
"(",
"'ir.attachment'",
")",
"return",
"Attachment",
".",
"add_attachment_from_url",
"(",
"filename",
",",
"url",
",",
... | Add an attachmemt to record from url
:param id: ID of record
:param filename: File name of attachment
:param url: Public url to download file from. | [
"Add",
"an",
"attachmemt",
"to",
"record",
"from",
"url"
] | 180ac969c427b1292439a0371866aa5f169ffa6b | https://github.com/fulfilio/fulfil-python-api/blob/180ac969c427b1292439a0371866aa5f169ffa6b/fulfil_client/client.py#L456-L466 | train | 36,984 |
fulfilio/fulfil-python-api | fulfil_client/client.py | AsyncResult.refresh_if_needed | def refresh_if_needed(self):
"""
Refresh the status of the task from server if required.
"""
if self.state in (self.PENDING, self.STARTED):
try:
response, = self._fetch_result()['tasks']
except (KeyError, ValueError):
raise Exception(
"Unable to find results for task."
)
if 'error' in response:
self.state == self.FAILURE
raise ServerError(response['error'])
if 'state' in response:
self.state = response['state']
self.result = response['result'] | python | def refresh_if_needed(self):
"""
Refresh the status of the task from server if required.
"""
if self.state in (self.PENDING, self.STARTED):
try:
response, = self._fetch_result()['tasks']
except (KeyError, ValueError):
raise Exception(
"Unable to find results for task."
)
if 'error' in response:
self.state == self.FAILURE
raise ServerError(response['error'])
if 'state' in response:
self.state = response['state']
self.result = response['result'] | [
"def",
"refresh_if_needed",
"(",
"self",
")",
":",
"if",
"self",
".",
"state",
"in",
"(",
"self",
".",
"PENDING",
",",
"self",
".",
"STARTED",
")",
":",
"try",
":",
"response",
",",
"=",
"self",
".",
"_fetch_result",
"(",
")",
"[",
"'tasks'",
"]",
... | Refresh the status of the task from server if required. | [
"Refresh",
"the",
"status",
"of",
"the",
"task",
"from",
"server",
"if",
"required",
"."
] | 180ac969c427b1292439a0371866aa5f169ffa6b | https://github.com/fulfilio/fulfil-python-api/blob/180ac969c427b1292439a0371866aa5f169ffa6b/fulfil_client/client.py#L572-L590 | train | 36,985 |
fulfilio/fulfil-python-api | examples/create-sale-order.py | get_customer | def get_customer(code):
"""
Fetch a customer with the code.
Returns None if the customer is not found.
"""
Party = client.model('party.party')
results = Party.find([('code', '=', code)])
if results:
return results[0]['id'] | python | def get_customer(code):
"""
Fetch a customer with the code.
Returns None if the customer is not found.
"""
Party = client.model('party.party')
results = Party.find([('code', '=', code)])
if results:
return results[0]['id'] | [
"def",
"get_customer",
"(",
"code",
")",
":",
"Party",
"=",
"client",
".",
"model",
"(",
"'party.party'",
")",
"results",
"=",
"Party",
".",
"find",
"(",
"[",
"(",
"'code'",
",",
"'='",
",",
"code",
")",
"]",
")",
"if",
"results",
":",
"return",
"r... | Fetch a customer with the code.
Returns None if the customer is not found. | [
"Fetch",
"a",
"customer",
"with",
"the",
"code",
".",
"Returns",
"None",
"if",
"the",
"customer",
"is",
"not",
"found",
"."
] | 180ac969c427b1292439a0371866aa5f169ffa6b | https://github.com/fulfilio/fulfil-python-api/blob/180ac969c427b1292439a0371866aa5f169ffa6b/examples/create-sale-order.py#L67-L75 | train | 36,986 |
fulfilio/fulfil-python-api | examples/create-sale-order.py | get_address | def get_address(customer_id, data):
"""
Easier to fetch the addresses of customer and then check one by one.
You can get fancy by using some validation mechanism too
"""
Address = client.model('party.address')
addresses = Address.find(
[('party', '=', customer_id)],
fields=[
'name', 'street', 'street_bis', 'city', 'zip',
'subdivision.code', 'country.code'
]
)
for address in addresses:
if (
address['name'] == data['name'] and
address['street'] == data['street'] and
address['street_bis'] == data['street_bis'] and
address['city'] == data['city'] and
address['zip'] == data['zip'] and
address['subdivision.code'].endswith(data['state']) and
address['country.code'] == data['country']):
return address['id'] | python | def get_address(customer_id, data):
"""
Easier to fetch the addresses of customer and then check one by one.
You can get fancy by using some validation mechanism too
"""
Address = client.model('party.address')
addresses = Address.find(
[('party', '=', customer_id)],
fields=[
'name', 'street', 'street_bis', 'city', 'zip',
'subdivision.code', 'country.code'
]
)
for address in addresses:
if (
address['name'] == data['name'] and
address['street'] == data['street'] and
address['street_bis'] == data['street_bis'] and
address['city'] == data['city'] and
address['zip'] == data['zip'] and
address['subdivision.code'].endswith(data['state']) and
address['country.code'] == data['country']):
return address['id'] | [
"def",
"get_address",
"(",
"customer_id",
",",
"data",
")",
":",
"Address",
"=",
"client",
".",
"model",
"(",
"'party.address'",
")",
"addresses",
"=",
"Address",
".",
"find",
"(",
"[",
"(",
"'party'",
",",
"'='",
",",
"customer_id",
")",
"]",
",",
"fi... | Easier to fetch the addresses of customer and then check one by one.
You can get fancy by using some validation mechanism too | [
"Easier",
"to",
"fetch",
"the",
"addresses",
"of",
"customer",
"and",
"then",
"check",
"one",
"by",
"one",
"."
] | 180ac969c427b1292439a0371866aa5f169ffa6b | https://github.com/fulfilio/fulfil-python-api/blob/180ac969c427b1292439a0371866aa5f169ffa6b/examples/create-sale-order.py#L78-L102 | train | 36,987 |
fulfilio/fulfil-python-api | examples/create-sale-order.py | create_address | def create_address(customer_id, data):
"""
Create an address and return the id
"""
Address = client.model('party.address')
Country = client.model('country.country')
Subdivision = client.model('country.subdivision')
country, = Country.find([('code', '=', data['country'])])
state, = Subdivision.find([
('code', 'ilike', '%-' + data['state']), # state codes are US-CA, IN-KL
('country', '=', country['id'])
])
address, = Address.create([{
'party': customer_id,
'name': data['name'],
'street': data['street'],
'street_bis': data['street_bis'],
'city': data['city'],
'zip': data['zip'],
'country': country['id'],
'subdivision': state['id'],
}])
return address['id'] | python | def create_address(customer_id, data):
"""
Create an address and return the id
"""
Address = client.model('party.address')
Country = client.model('country.country')
Subdivision = client.model('country.subdivision')
country, = Country.find([('code', '=', data['country'])])
state, = Subdivision.find([
('code', 'ilike', '%-' + data['state']), # state codes are US-CA, IN-KL
('country', '=', country['id'])
])
address, = Address.create([{
'party': customer_id,
'name': data['name'],
'street': data['street'],
'street_bis': data['street_bis'],
'city': data['city'],
'zip': data['zip'],
'country': country['id'],
'subdivision': state['id'],
}])
return address['id'] | [
"def",
"create_address",
"(",
"customer_id",
",",
"data",
")",
":",
"Address",
"=",
"client",
".",
"model",
"(",
"'party.address'",
")",
"Country",
"=",
"client",
".",
"model",
"(",
"'country.country'",
")",
"Subdivision",
"=",
"client",
".",
"model",
"(",
... | Create an address and return the id | [
"Create",
"an",
"address",
"and",
"return",
"the",
"id"
] | 180ac969c427b1292439a0371866aa5f169ffa6b | https://github.com/fulfilio/fulfil-python-api/blob/180ac969c427b1292439a0371866aa5f169ffa6b/examples/create-sale-order.py#L105-L129 | train | 36,988 |
fulfilio/fulfil-python-api | examples/create-sale-order.py | create_customer | def create_customer(name, email, phone):
"""
Create a customer with the name.
Then attach the email and phone as contact methods
"""
Party = client.model('party.party')
ContactMechanism = client.model('party.contact_mechanism')
party, = Party.create([{'name': name}])
# Bulk create the email and phone
ContactMechanism.create([
{'type': 'email', 'value': email, 'party': party},
{'type': 'phone', 'value': phone, 'party': party},
])
return party | python | def create_customer(name, email, phone):
"""
Create a customer with the name.
Then attach the email and phone as contact methods
"""
Party = client.model('party.party')
ContactMechanism = client.model('party.contact_mechanism')
party, = Party.create([{'name': name}])
# Bulk create the email and phone
ContactMechanism.create([
{'type': 'email', 'value': email, 'party': party},
{'type': 'phone', 'value': phone, 'party': party},
])
return party | [
"def",
"create_customer",
"(",
"name",
",",
"email",
",",
"phone",
")",
":",
"Party",
"=",
"client",
".",
"model",
"(",
"'party.party'",
")",
"ContactMechanism",
"=",
"client",
".",
"model",
"(",
"'party.contact_mechanism'",
")",
"party",
",",
"=",
"Party",
... | Create a customer with the name.
Then attach the email and phone as contact methods | [
"Create",
"a",
"customer",
"with",
"the",
"name",
".",
"Then",
"attach",
"the",
"email",
"and",
"phone",
"as",
"contact",
"methods"
] | 180ac969c427b1292439a0371866aa5f169ffa6b | https://github.com/fulfilio/fulfil-python-api/blob/180ac969c427b1292439a0371866aa5f169ffa6b/examples/create-sale-order.py#L132-L148 | train | 36,989 |
fulfilio/fulfil-python-api | examples/create-sale-order.py | create_order | def create_order(order):
"""
Create an order on fulfil from order_details.
See the calling function below for an example of the order_details
"""
SaleOrder = client.model('sale.sale')
SaleOrderLine = client.model('sale.line')
# Check if customer exists, if not create one
customer_id = get_customer(order['customer']['code'])
if not customer_id:
customer_id = create_customer(
order['customer']['name'],
order['customer']['email'],
order['customer']['phone'],
)
# No check if there is a matching address
invoice_address = get_address(
customer_id,
order['invoice_address']
)
if not invoice_address:
invoice_address = create_address(
customer_id,
order['invoice_address']
)
# See if the shipping address exists, if not create it
shipment_address = get_address(
customer_id,
order['shipment_address']
)
if not shipment_address:
shipment_address = create_address(
customer_id,
order['shipment_address']
)
sale_order_id, = SaleOrder.create([{
'reference': order['number'],
'sale_date': order['date'],
'party': customer_id,
'invoice_address': invoice_address,
'shipment_address': shipment_address,
}])
# fetch inventory of all the products before we create lines
warehouses = get_warehouses()
warehouse_ids = [warehouse['id'] for warehouse in warehouses]
lines = []
for item in order['items']:
# get the product. We assume ti already exists.
product = get_product(item['product'])
# find the first location that has inventory
product_inventory = get_product_inventory(product, warehouse_ids)
for location, quantities in product_inventory.items():
if quantities['quantity_available'] >= item['quantity']:
break
lines.append({
'sale': sale_order_id,
'product': product,
'quantity': item['quantity'],
'unit_price': item['unit_price'],
'warehouse': location,
})
SaleOrderLine.create(lines)
SaleOrder.quote([sale_order_id])
SaleOrder.confirm([sale_order_id]) | python | def create_order(order):
"""
Create an order on fulfil from order_details.
See the calling function below for an example of the order_details
"""
SaleOrder = client.model('sale.sale')
SaleOrderLine = client.model('sale.line')
# Check if customer exists, if not create one
customer_id = get_customer(order['customer']['code'])
if not customer_id:
customer_id = create_customer(
order['customer']['name'],
order['customer']['email'],
order['customer']['phone'],
)
# No check if there is a matching address
invoice_address = get_address(
customer_id,
order['invoice_address']
)
if not invoice_address:
invoice_address = create_address(
customer_id,
order['invoice_address']
)
# See if the shipping address exists, if not create it
shipment_address = get_address(
customer_id,
order['shipment_address']
)
if not shipment_address:
shipment_address = create_address(
customer_id,
order['shipment_address']
)
sale_order_id, = SaleOrder.create([{
'reference': order['number'],
'sale_date': order['date'],
'party': customer_id,
'invoice_address': invoice_address,
'shipment_address': shipment_address,
}])
# fetch inventory of all the products before we create lines
warehouses = get_warehouses()
warehouse_ids = [warehouse['id'] for warehouse in warehouses]
lines = []
for item in order['items']:
# get the product. We assume ti already exists.
product = get_product(item['product'])
# find the first location that has inventory
product_inventory = get_product_inventory(product, warehouse_ids)
for location, quantities in product_inventory.items():
if quantities['quantity_available'] >= item['quantity']:
break
lines.append({
'sale': sale_order_id,
'product': product,
'quantity': item['quantity'],
'unit_price': item['unit_price'],
'warehouse': location,
})
SaleOrderLine.create(lines)
SaleOrder.quote([sale_order_id])
SaleOrder.confirm([sale_order_id]) | [
"def",
"create_order",
"(",
"order",
")",
":",
"SaleOrder",
"=",
"client",
".",
"model",
"(",
"'sale.sale'",
")",
"SaleOrderLine",
"=",
"client",
".",
"model",
"(",
"'sale.line'",
")",
"# Check if customer exists, if not create one",
"customer_id",
"=",
"get_custome... | Create an order on fulfil from order_details.
See the calling function below for an example of the order_details | [
"Create",
"an",
"order",
"on",
"fulfil",
"from",
"order_details",
".",
"See",
"the",
"calling",
"function",
"below",
"for",
"an",
"example",
"of",
"the",
"order_details"
] | 180ac969c427b1292439a0371866aa5f169ffa6b | https://github.com/fulfilio/fulfil-python-api/blob/180ac969c427b1292439a0371866aa5f169ffa6b/examples/create-sale-order.py#L162-L235 | train | 36,990 |
fulfilio/fulfil-python-api | fulfil_client/model.py | model_base | def model_base(fulfil_client, cache_backend=None, cache_expire=10 * 60):
"""
Return a Base Model class that binds to the fulfil client instance and
the cache instance.
This design is inspired by the declarative base pattern in SQL Alchemy.
"""
return type(
'BaseModel',
(Model,),
{
'fulfil_client': fulfil_client,
'cache_backend': cache_backend,
'cache_expire': cache_expire,
'__abstract__': True,
'__modelregistry__': {},
},
) | python | def model_base(fulfil_client, cache_backend=None, cache_expire=10 * 60):
"""
Return a Base Model class that binds to the fulfil client instance and
the cache instance.
This design is inspired by the declarative base pattern in SQL Alchemy.
"""
return type(
'BaseModel',
(Model,),
{
'fulfil_client': fulfil_client,
'cache_backend': cache_backend,
'cache_expire': cache_expire,
'__abstract__': True,
'__modelregistry__': {},
},
) | [
"def",
"model_base",
"(",
"fulfil_client",
",",
"cache_backend",
"=",
"None",
",",
"cache_expire",
"=",
"10",
"*",
"60",
")",
":",
"return",
"type",
"(",
"'BaseModel'",
",",
"(",
"Model",
",",
")",
",",
"{",
"'fulfil_client'",
":",
"fulfil_client",
",",
... | Return a Base Model class that binds to the fulfil client instance and
the cache instance.
This design is inspired by the declarative base pattern in SQL Alchemy. | [
"Return",
"a",
"Base",
"Model",
"class",
"that",
"binds",
"to",
"the",
"fulfil",
"client",
"instance",
"and",
"the",
"cache",
"instance",
"."
] | 180ac969c427b1292439a0371866aa5f169ffa6b | https://github.com/fulfilio/fulfil-python-api/blob/180ac969c427b1292439a0371866aa5f169ffa6b/fulfil_client/model.py#L746-L763 | train | 36,991 |
fulfilio/fulfil-python-api | fulfil_client/model.py | Query.all | def all(self):
"""
Return the results represented by this Query as a list.
.. versionchanged:: 0.10.0
Returns an iterator that lazily loads
records instead of fetching thousands
of records at once.
"""
return self.rpc_model.search_read_all(
self.domain,
self._order_by,
self.fields,
context=self.context,
offset=self._offset or 0,
limit=self._limit,
) | python | def all(self):
"""
Return the results represented by this Query as a list.
.. versionchanged:: 0.10.0
Returns an iterator that lazily loads
records instead of fetching thousands
of records at once.
"""
return self.rpc_model.search_read_all(
self.domain,
self._order_by,
self.fields,
context=self.context,
offset=self._offset or 0,
limit=self._limit,
) | [
"def",
"all",
"(",
"self",
")",
":",
"return",
"self",
".",
"rpc_model",
".",
"search_read_all",
"(",
"self",
".",
"domain",
",",
"self",
".",
"_order_by",
",",
"self",
".",
"fields",
",",
"context",
"=",
"self",
".",
"context",
",",
"offset",
"=",
"... | Return the results represented by this Query as a list.
.. versionchanged:: 0.10.0
Returns an iterator that lazily loads
records instead of fetching thousands
of records at once. | [
"Return",
"the",
"results",
"represented",
"by",
"this",
"Query",
"as",
"a",
"list",
"."
] | 180ac969c427b1292439a0371866aa5f169ffa6b | https://github.com/fulfilio/fulfil-python-api/blob/180ac969c427b1292439a0371866aa5f169ffa6b/fulfil_client/model.py#L346-L363 | train | 36,992 |
fulfilio/fulfil-python-api | fulfil_client/model.py | Query.count | def count(self):
"Return a count of rows this Query would return."
return self.rpc_model.search_count(
self.domain, context=self.context
) | python | def count(self):
"Return a count of rows this Query would return."
return self.rpc_model.search_count(
self.domain, context=self.context
) | [
"def",
"count",
"(",
"self",
")",
":",
"return",
"self",
".",
"rpc_model",
".",
"search_count",
"(",
"self",
".",
"domain",
",",
"context",
"=",
"self",
".",
"context",
")"
] | Return a count of rows this Query would return. | [
"Return",
"a",
"count",
"of",
"rows",
"this",
"Query",
"would",
"return",
"."
] | 180ac969c427b1292439a0371866aa5f169ffa6b | https://github.com/fulfilio/fulfil-python-api/blob/180ac969c427b1292439a0371866aa5f169ffa6b/fulfil_client/model.py#L365-L369 | train | 36,993 |
fulfilio/fulfil-python-api | fulfil_client/model.py | Query.exists | def exists(self):
"""
A convenience method that returns True if a record
satisfying the query exists
"""
return self.rpc_model.search_count(
self.domain, context=self.context
) > 0 | python | def exists(self):
"""
A convenience method that returns True if a record
satisfying the query exists
"""
return self.rpc_model.search_count(
self.domain, context=self.context
) > 0 | [
"def",
"exists",
"(",
"self",
")",
":",
"return",
"self",
".",
"rpc_model",
".",
"search_count",
"(",
"self",
".",
"domain",
",",
"context",
"=",
"self",
".",
"context",
")",
">",
"0"
] | A convenience method that returns True if a record
satisfying the query exists | [
"A",
"convenience",
"method",
"that",
"returns",
"True",
"if",
"a",
"record",
"satisfying",
"the",
"query",
"exists"
] | 180ac969c427b1292439a0371866aa5f169ffa6b | https://github.com/fulfilio/fulfil-python-api/blob/180ac969c427b1292439a0371866aa5f169ffa6b/fulfil_client/model.py#L371-L378 | train | 36,994 |
fulfilio/fulfil-python-api | fulfil_client/model.py | Query.show_active_only | def show_active_only(self, state):
"""
Set active only to true or false on a copy of this query
"""
query = self._copy()
query.active_only = state
return query | python | def show_active_only(self, state):
"""
Set active only to true or false on a copy of this query
"""
query = self._copy()
query.active_only = state
return query | [
"def",
"show_active_only",
"(",
"self",
",",
"state",
")",
":",
"query",
"=",
"self",
".",
"_copy",
"(",
")",
"query",
".",
"active_only",
"=",
"state",
"return",
"query"
] | Set active only to true or false on a copy of this query | [
"Set",
"active",
"only",
"to",
"true",
"or",
"false",
"on",
"a",
"copy",
"of",
"this",
"query"
] | 180ac969c427b1292439a0371866aa5f169ffa6b | https://github.com/fulfilio/fulfil-python-api/blob/180ac969c427b1292439a0371866aa5f169ffa6b/fulfil_client/model.py#L380-L386 | train | 36,995 |
fulfilio/fulfil-python-api | fulfil_client/model.py | Query.filter_by | def filter_by(self, **kwargs):
"""
Apply the given filtering criterion to a copy of this Query, using
keyword expressions.
"""
query = self._copy()
for field, value in kwargs.items():
query.domain.append(
(field, '=', value)
)
return query | python | def filter_by(self, **kwargs):
"""
Apply the given filtering criterion to a copy of this Query, using
keyword expressions.
"""
query = self._copy()
for field, value in kwargs.items():
query.domain.append(
(field, '=', value)
)
return query | [
"def",
"filter_by",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"query",
"=",
"self",
".",
"_copy",
"(",
")",
"for",
"field",
",",
"value",
"in",
"kwargs",
".",
"items",
"(",
")",
":",
"query",
".",
"domain",
".",
"append",
"(",
"(",
"field",... | Apply the given filtering criterion to a copy of this Query, using
keyword expressions. | [
"Apply",
"the",
"given",
"filtering",
"criterion",
"to",
"a",
"copy",
"of",
"this",
"Query",
"using",
"keyword",
"expressions",
"."
] | 180ac969c427b1292439a0371866aa5f169ffa6b | https://github.com/fulfilio/fulfil-python-api/blob/180ac969c427b1292439a0371866aa5f169ffa6b/fulfil_client/model.py#L388-L398 | train | 36,996 |
fulfilio/fulfil-python-api | fulfil_client/model.py | Query.filter_by_domain | def filter_by_domain(self, domain):
"""
Apply the given domain to a copy of this query
"""
query = self._copy()
query.domain = domain
return query | python | def filter_by_domain(self, domain):
"""
Apply the given domain to a copy of this query
"""
query = self._copy()
query.domain = domain
return query | [
"def",
"filter_by_domain",
"(",
"self",
",",
"domain",
")",
":",
"query",
"=",
"self",
".",
"_copy",
"(",
")",
"query",
".",
"domain",
"=",
"domain",
"return",
"query"
] | Apply the given domain to a copy of this query | [
"Apply",
"the",
"given",
"domain",
"to",
"a",
"copy",
"of",
"this",
"query"
] | 180ac969c427b1292439a0371866aa5f169ffa6b | https://github.com/fulfilio/fulfil-python-api/blob/180ac969c427b1292439a0371866aa5f169ffa6b/fulfil_client/model.py#L400-L406 | train | 36,997 |
fulfilio/fulfil-python-api | fulfil_client/model.py | Query.first | def first(self):
"""
Return the first result of this Query or None if the result
doesn't contain any row.
"""
results = self.rpc_model.search_read(
self.domain, None, 1, self._order_by, self.fields,
context=self.context
)
return results and results[0] or None | python | def first(self):
"""
Return the first result of this Query or None if the result
doesn't contain any row.
"""
results = self.rpc_model.search_read(
self.domain, None, 1, self._order_by, self.fields,
context=self.context
)
return results and results[0] or None | [
"def",
"first",
"(",
"self",
")",
":",
"results",
"=",
"self",
".",
"rpc_model",
".",
"search_read",
"(",
"self",
".",
"domain",
",",
"None",
",",
"1",
",",
"self",
".",
"_order_by",
",",
"self",
".",
"fields",
",",
"context",
"=",
"self",
".",
"co... | Return the first result of this Query or None if the result
doesn't contain any row. | [
"Return",
"the",
"first",
"result",
"of",
"this",
"Query",
"or",
"None",
"if",
"the",
"result",
"doesn",
"t",
"contain",
"any",
"row",
"."
] | 180ac969c427b1292439a0371866aa5f169ffa6b | https://github.com/fulfilio/fulfil-python-api/blob/180ac969c427b1292439a0371866aa5f169ffa6b/fulfil_client/model.py#L409-L418 | train | 36,998 |
fulfilio/fulfil-python-api | fulfil_client/model.py | Query.get | def get(self, id):
"""
Return an instance based on the given primary key identifier,
or None if not found.
This returns a record whether active or not.
"""
ctx = self.context.copy()
ctx['active_test'] = False
results = self.rpc_model.search_read(
[('id', '=', id)],
None, None, None, self.fields,
context=ctx
)
return results and results[0] or None | python | def get(self, id):
"""
Return an instance based on the given primary key identifier,
or None if not found.
This returns a record whether active or not.
"""
ctx = self.context.copy()
ctx['active_test'] = False
results = self.rpc_model.search_read(
[('id', '=', id)],
None, None, None, self.fields,
context=ctx
)
return results and results[0] or None | [
"def",
"get",
"(",
"self",
",",
"id",
")",
":",
"ctx",
"=",
"self",
".",
"context",
".",
"copy",
"(",
")",
"ctx",
"[",
"'active_test'",
"]",
"=",
"False",
"results",
"=",
"self",
".",
"rpc_model",
".",
"search_read",
"(",
"[",
"(",
"'id'",
",",
"... | Return an instance based on the given primary key identifier,
or None if not found.
This returns a record whether active or not. | [
"Return",
"an",
"instance",
"based",
"on",
"the",
"given",
"primary",
"key",
"identifier",
"or",
"None",
"if",
"not",
"found",
"."
] | 180ac969c427b1292439a0371866aa5f169ffa6b | https://github.com/fulfilio/fulfil-python-api/blob/180ac969c427b1292439a0371866aa5f169ffa6b/fulfil_client/model.py#L421-L435 | train | 36,999 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.