repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1 value | code stringlengths 75 19.8k | code_tokens listlengths 20 707 | docstring stringlengths 3 17.3k | docstring_tokens listlengths 3 222 | sha stringlengths 40 40 | url stringlengths 87 242 | partition stringclasses 1 value | idx int64 0 252k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
mapbox/mapbox-sdk-py | mapbox/services/base.py | Session | def Session(access_token=None, env=None):
"""Create an HTTP session.
Parameters
----------
access_token : str
Mapbox access token string (optional).
env : dict, optional
A dict that subsitutes for os.environ.
Returns
-------
requests.Session
"""
if env is None:
env = os.environ.copy()
access_token = (
access_token or
env.get('MapboxAccessToken') or
env.get('MAPBOX_ACCESS_TOKEN'))
session = requests.Session()
session.params.update(access_token=access_token)
session.headers.update({
'User-Agent': 'mapbox-sdk-py/{0} {1}'.format(
__version__, requests.utils.default_user_agent())})
return session | python | def Session(access_token=None, env=None):
"""Create an HTTP session.
Parameters
----------
access_token : str
Mapbox access token string (optional).
env : dict, optional
A dict that subsitutes for os.environ.
Returns
-------
requests.Session
"""
if env is None:
env = os.environ.copy()
access_token = (
access_token or
env.get('MapboxAccessToken') or
env.get('MAPBOX_ACCESS_TOKEN'))
session = requests.Session()
session.params.update(access_token=access_token)
session.headers.update({
'User-Agent': 'mapbox-sdk-py/{0} {1}'.format(
__version__, requests.utils.default_user_agent())})
return session | [
"def",
"Session",
"(",
"access_token",
"=",
"None",
",",
"env",
"=",
"None",
")",
":",
"if",
"env",
"is",
"None",
":",
"env",
"=",
"os",
".",
"environ",
".",
"copy",
"(",
")",
"access_token",
"=",
"(",
"access_token",
"or",
"env",
".",
"get",
"(",
... | Create an HTTP session.
Parameters
----------
access_token : str
Mapbox access token string (optional).
env : dict, optional
A dict that subsitutes for os.environ.
Returns
-------
requests.Session | [
"Create",
"an",
"HTTP",
"session",
"."
] | 72d19dbcf2d254a6ea08129a726471fd21f13023 | https://github.com/mapbox/mapbox-sdk-py/blob/72d19dbcf2d254a6ea08129a726471fd21f13023/mapbox/services/base.py#L14-L39 | train | 219,800 |
mapbox/mapbox-sdk-py | mapbox/services/base.py | Service.username | def username(self):
"""The username in the service's access token
Returns
-------
str
"""
token = self.session.params.get('access_token')
if not token:
raise errors.TokenError(
"session does not have a valid access_token param")
data = token.split('.')[1]
# replace url chars and add padding
# (https://gist.github.com/perrygeo/ee7c65bb1541ff6ac770)
data = data.replace('-', '+').replace('_', '/') + "==="
try:
return json.loads(base64.b64decode(data).decode('utf-8'))['u']
except (ValueError, KeyError):
raise errors.TokenError(
"access_token does not contain username") | python | def username(self):
"""The username in the service's access token
Returns
-------
str
"""
token = self.session.params.get('access_token')
if not token:
raise errors.TokenError(
"session does not have a valid access_token param")
data = token.split('.')[1]
# replace url chars and add padding
# (https://gist.github.com/perrygeo/ee7c65bb1541ff6ac770)
data = data.replace('-', '+').replace('_', '/') + "==="
try:
return json.loads(base64.b64decode(data).decode('utf-8'))['u']
except (ValueError, KeyError):
raise errors.TokenError(
"access_token does not contain username") | [
"def",
"username",
"(",
"self",
")",
":",
"token",
"=",
"self",
".",
"session",
".",
"params",
".",
"get",
"(",
"'access_token'",
")",
"if",
"not",
"token",
":",
"raise",
"errors",
".",
"TokenError",
"(",
"\"session does not have a valid access_token param\"",
... | The username in the service's access token
Returns
-------
str | [
"The",
"username",
"in",
"the",
"service",
"s",
"access",
"token"
] | 72d19dbcf2d254a6ea08129a726471fd21f13023 | https://github.com/mapbox/mapbox-sdk-py/blob/72d19dbcf2d254a6ea08129a726471fd21f13023/mapbox/services/base.py#L101-L120 | train | 219,801 |
mapbox/mapbox-sdk-py | mapbox/services/base.py | Service.handle_http_error | def handle_http_error(self, response, custom_messages=None,
raise_for_status=False):
"""Converts service errors to Python exceptions
Parameters
----------
response : requests.Response
A service response.
custom_messages : dict, optional
A mapping of custom exception messages to HTTP status codes.
raise_for_status : bool, optional
If True, the requests library provides Python exceptions.
Returns
-------
None
"""
if not custom_messages:
custom_messages = {}
if response.status_code in custom_messages.keys():
raise errors.HTTPError(custom_messages[response.status_code])
if raise_for_status:
response.raise_for_status() | python | def handle_http_error(self, response, custom_messages=None,
raise_for_status=False):
"""Converts service errors to Python exceptions
Parameters
----------
response : requests.Response
A service response.
custom_messages : dict, optional
A mapping of custom exception messages to HTTP status codes.
raise_for_status : bool, optional
If True, the requests library provides Python exceptions.
Returns
-------
None
"""
if not custom_messages:
custom_messages = {}
if response.status_code in custom_messages.keys():
raise errors.HTTPError(custom_messages[response.status_code])
if raise_for_status:
response.raise_for_status() | [
"def",
"handle_http_error",
"(",
"self",
",",
"response",
",",
"custom_messages",
"=",
"None",
",",
"raise_for_status",
"=",
"False",
")",
":",
"if",
"not",
"custom_messages",
":",
"custom_messages",
"=",
"{",
"}",
"if",
"response",
".",
"status_code",
"in",
... | Converts service errors to Python exceptions
Parameters
----------
response : requests.Response
A service response.
custom_messages : dict, optional
A mapping of custom exception messages to HTTP status codes.
raise_for_status : bool, optional
If True, the requests library provides Python exceptions.
Returns
-------
None | [
"Converts",
"service",
"errors",
"to",
"Python",
"exceptions"
] | 72d19dbcf2d254a6ea08129a726471fd21f13023 | https://github.com/mapbox/mapbox-sdk-py/blob/72d19dbcf2d254a6ea08129a726471fd21f13023/mapbox/services/base.py#L122-L144 | train | 219,802 |
mapbox/mapbox-sdk-py | mapbox/services/mapmatching.py | MapMatcher.match | def match(self, feature, gps_precision=None, profile='mapbox.driving'):
"""Match features to OpenStreetMap data."""
profile = self._validate_profile(profile)
feature = self._validate_feature(feature)
geojson_line_feature = json.dumps(feature)
uri = URITemplate(self.baseuri + '/{profile}.json').expand(
profile=profile)
params = None
if gps_precision:
params = {'gps_precision': gps_precision}
res = self.session.post(uri, data=geojson_line_feature, params=params,
headers={'Content-Type': 'application/json'})
self.handle_http_error(res)
def geojson():
return res.json()
res.geojson = geojson
return res | python | def match(self, feature, gps_precision=None, profile='mapbox.driving'):
"""Match features to OpenStreetMap data."""
profile = self._validate_profile(profile)
feature = self._validate_feature(feature)
geojson_line_feature = json.dumps(feature)
uri = URITemplate(self.baseuri + '/{profile}.json').expand(
profile=profile)
params = None
if gps_precision:
params = {'gps_precision': gps_precision}
res = self.session.post(uri, data=geojson_line_feature, params=params,
headers={'Content-Type': 'application/json'})
self.handle_http_error(res)
def geojson():
return res.json()
res.geojson = geojson
return res | [
"def",
"match",
"(",
"self",
",",
"feature",
",",
"gps_precision",
"=",
"None",
",",
"profile",
"=",
"'mapbox.driving'",
")",
":",
"profile",
"=",
"self",
".",
"_validate_profile",
"(",
"profile",
")",
"feature",
"=",
"self",
".",
"_validate_feature",
"(",
... | Match features to OpenStreetMap data. | [
"Match",
"features",
"to",
"OpenStreetMap",
"data",
"."
] | 72d19dbcf2d254a6ea08129a726471fd21f13023 | https://github.com/mapbox/mapbox-sdk-py/blob/72d19dbcf2d254a6ea08129a726471fd21f13023/mapbox/services/mapmatching.py#L33-L55 | train | 219,803 |
mapbox/mapbox-sdk-py | mapbox/services/tilequery.py | Tilequery._validate_geometry | def _validate_geometry(self, geometry):
"""Validates geometry, raising error if invalid."""
if geometry is not None and geometry not in self.valid_geometries:
raise InvalidParameterError("{} is not a valid geometry".format(geometry))
return geometry | python | def _validate_geometry(self, geometry):
"""Validates geometry, raising error if invalid."""
if geometry is not None and geometry not in self.valid_geometries:
raise InvalidParameterError("{} is not a valid geometry".format(geometry))
return geometry | [
"def",
"_validate_geometry",
"(",
"self",
",",
"geometry",
")",
":",
"if",
"geometry",
"is",
"not",
"None",
"and",
"geometry",
"not",
"in",
"self",
".",
"valid_geometries",
":",
"raise",
"InvalidParameterError",
"(",
"\"{} is not a valid geometry\"",
".",
"format"... | Validates geometry, raising error if invalid. | [
"Validates",
"geometry",
"raising",
"error",
"if",
"invalid",
"."
] | 72d19dbcf2d254a6ea08129a726471fd21f13023 | https://github.com/mapbox/mapbox-sdk-py/blob/72d19dbcf2d254a6ea08129a726471fd21f13023/mapbox/services/tilequery.py#L72-L78 | train | 219,804 |
mapbox/mapbox-sdk-py | mapbox/services/tilequery.py | Tilequery.tilequery | def tilequery(
self,
map_id,
lon=None,
lat=None,
radius=None,
limit=None,
dedupe=None,
geometry=None,
layers=None,
):
"""Returns data about specific features
from a vector tileset.
Parameters
----------
map_id : str or list
The tileset's unique identifier in the
format username.id.
map_id may be either a str with one value
or a list with multiple values.
lon : float
The longitude to query, where -180
is the minimum value and 180 is the
maximum value.
lat : float
The latitude to query, where -85.0511
is the minimum value and 85.0511 is the
maximum value.
radius : int, optional
The approximate distance in meters to
query, where 0 is the minimum value.
(There is no maximum value.)
If None, the default value is 0.
limit : int, optional
The number of features to return, where
1 is the minimum value and 50 is the
maximum value.
If None, the default value is 5.
dedupe : bool, optional
Whether to remove duplicate results.
If None, the default value is True.
geometry : str, optional
The geometry type to query.
layers : list, optional
The list of layers to query.
If a specified layer does not exist,
then the Tilequery API will skip it.
If no layers exist, then the API will
return an empty GeoJSON FeatureCollection.
Returns
-------
request.Response
The response object with a GeoJSON
FeatureCollection of features at or near
the specified longitude and latitude.
"""
# If map_id is a list, then convert it to a str
# of comma-separated values.
if isinstance(map_id, list):
map_id = ",".join(map_id)
# Validate lon and lat.
lon = self._validate_lon(lon)
lat = self._validate_lat(lat)
# Create dict to assist in building URI resource path.
path_values = dict(
api_name=self.api_name, lon=lon, lat=lat
)
# Build URI resource path.
path_part = "/" + map_id + "/{api_name}/{lon},{lat}.json"
uri = URITemplate(self.base_uri + path_part).expand(**path_values)
# Build URI query_parameters.
query_parameters = dict()
if radius is not None:
radius = self._validate_radius(radius)
query_parameters["radius"] = radius
if limit is not None:
limit = self._validate_limit(limit)
query_parameters["limit"] = limit
if dedupe is not None:
query_parameters["dedupe"] = "true" if True else "false"
if geometry is not None:
geometry = self._validate_geometry(geometry)
query_parameters["geometry"] = geometry
if layers is not None:
query_parameters["layers"] = ",".join(layers)
# Send HTTP GET request.
response = self.session.get(uri, params=query_parameters)
self.handle_http_error(response)
# To be consistent with other services,
# add geojson method to response object.
def geojson():
return response.json()
response.geojson = geojson
return response | python | def tilequery(
self,
map_id,
lon=None,
lat=None,
radius=None,
limit=None,
dedupe=None,
geometry=None,
layers=None,
):
"""Returns data about specific features
from a vector tileset.
Parameters
----------
map_id : str or list
The tileset's unique identifier in the
format username.id.
map_id may be either a str with one value
or a list with multiple values.
lon : float
The longitude to query, where -180
is the minimum value and 180 is the
maximum value.
lat : float
The latitude to query, where -85.0511
is the minimum value and 85.0511 is the
maximum value.
radius : int, optional
The approximate distance in meters to
query, where 0 is the minimum value.
(There is no maximum value.)
If None, the default value is 0.
limit : int, optional
The number of features to return, where
1 is the minimum value and 50 is the
maximum value.
If None, the default value is 5.
dedupe : bool, optional
Whether to remove duplicate results.
If None, the default value is True.
geometry : str, optional
The geometry type to query.
layers : list, optional
The list of layers to query.
If a specified layer does not exist,
then the Tilequery API will skip it.
If no layers exist, then the API will
return an empty GeoJSON FeatureCollection.
Returns
-------
request.Response
The response object with a GeoJSON
FeatureCollection of features at or near
the specified longitude and latitude.
"""
# If map_id is a list, then convert it to a str
# of comma-separated values.
if isinstance(map_id, list):
map_id = ",".join(map_id)
# Validate lon and lat.
lon = self._validate_lon(lon)
lat = self._validate_lat(lat)
# Create dict to assist in building URI resource path.
path_values = dict(
api_name=self.api_name, lon=lon, lat=lat
)
# Build URI resource path.
path_part = "/" + map_id + "/{api_name}/{lon},{lat}.json"
uri = URITemplate(self.base_uri + path_part).expand(**path_values)
# Build URI query_parameters.
query_parameters = dict()
if radius is not None:
radius = self._validate_radius(radius)
query_parameters["radius"] = radius
if limit is not None:
limit = self._validate_limit(limit)
query_parameters["limit"] = limit
if dedupe is not None:
query_parameters["dedupe"] = "true" if True else "false"
if geometry is not None:
geometry = self._validate_geometry(geometry)
query_parameters["geometry"] = geometry
if layers is not None:
query_parameters["layers"] = ",".join(layers)
# Send HTTP GET request.
response = self.session.get(uri, params=query_parameters)
self.handle_http_error(response)
# To be consistent with other services,
# add geojson method to response object.
def geojson():
return response.json()
response.geojson = geojson
return response | [
"def",
"tilequery",
"(",
"self",
",",
"map_id",
",",
"lon",
"=",
"None",
",",
"lat",
"=",
"None",
",",
"radius",
"=",
"None",
",",
"limit",
"=",
"None",
",",
"dedupe",
"=",
"None",
",",
"geometry",
"=",
"None",
",",
"layers",
"=",
"None",
",",
")... | Returns data about specific features
from a vector tileset.
Parameters
----------
map_id : str or list
The tileset's unique identifier in the
format username.id.
map_id may be either a str with one value
or a list with multiple values.
lon : float
The longitude to query, where -180
is the minimum value and 180 is the
maximum value.
lat : float
The latitude to query, where -85.0511
is the minimum value and 85.0511 is the
maximum value.
radius : int, optional
The approximate distance in meters to
query, where 0 is the minimum value.
(There is no maximum value.)
If None, the default value is 0.
limit : int, optional
The number of features to return, where
1 is the minimum value and 50 is the
maximum value.
If None, the default value is 5.
dedupe : bool, optional
Whether to remove duplicate results.
If None, the default value is True.
geometry : str, optional
The geometry type to query.
layers : list, optional
The list of layers to query.
If a specified layer does not exist,
then the Tilequery API will skip it.
If no layers exist, then the API will
return an empty GeoJSON FeatureCollection.
Returns
-------
request.Response
The response object with a GeoJSON
FeatureCollection of features at or near
the specified longitude and latitude. | [
"Returns",
"data",
"about",
"specific",
"features",
"from",
"a",
"vector",
"tileset",
"."
] | 72d19dbcf2d254a6ea08129a726471fd21f13023 | https://github.com/mapbox/mapbox-sdk-py/blob/72d19dbcf2d254a6ea08129a726471fd21f13023/mapbox/services/tilequery.py#L80-L209 | train | 219,805 |
mapbox/mapbox-sdk-py | mapbox/services/maps.py | Maps._validate_file_format | def _validate_file_format(self, file_format):
"""Validates file format, raising error if invalid."""
if file_format not in self.valid_file_formats:
raise InvalidFileFormatError(
"{} is not a valid file format".format(file_format)
)
return file_format | python | def _validate_file_format(self, file_format):
"""Validates file format, raising error if invalid."""
if file_format not in self.valid_file_formats:
raise InvalidFileFormatError(
"{} is not a valid file format".format(file_format)
)
return file_format | [
"def",
"_validate_file_format",
"(",
"self",
",",
"file_format",
")",
":",
"if",
"file_format",
"not",
"in",
"self",
".",
"valid_file_formats",
":",
"raise",
"InvalidFileFormatError",
"(",
"\"{} is not a valid file format\"",
".",
"format",
"(",
"file_format",
")",
... | Validates file format, raising error if invalid. | [
"Validates",
"file",
"format",
"raising",
"error",
"if",
"invalid",
"."
] | 72d19dbcf2d254a6ea08129a726471fd21f13023 | https://github.com/mapbox/mapbox-sdk-py/blob/72d19dbcf2d254a6ea08129a726471fd21f13023/mapbox/services/maps.py#L122-L130 | train | 219,806 |
mapbox/mapbox-sdk-py | mapbox/services/maps.py | Maps._validate_feature_format | def _validate_feature_format(self, feature_format):
"""Validates feature format, raising error if invalid."""
if feature_format not in self.valid_feature_formats:
raise InvalidFeatureFormatError(
"{} is not a valid feature format".format(feature_format)
)
return feature_format | python | def _validate_feature_format(self, feature_format):
"""Validates feature format, raising error if invalid."""
if feature_format not in self.valid_feature_formats:
raise InvalidFeatureFormatError(
"{} is not a valid feature format".format(feature_format)
)
return feature_format | [
"def",
"_validate_feature_format",
"(",
"self",
",",
"feature_format",
")",
":",
"if",
"feature_format",
"not",
"in",
"self",
".",
"valid_feature_formats",
":",
"raise",
"InvalidFeatureFormatError",
"(",
"\"{} is not a valid feature format\"",
".",
"format",
"(",
"featu... | Validates feature format, raising error if invalid. | [
"Validates",
"feature",
"format",
"raising",
"error",
"if",
"invalid",
"."
] | 72d19dbcf2d254a6ea08129a726471fd21f13023 | https://github.com/mapbox/mapbox-sdk-py/blob/72d19dbcf2d254a6ea08129a726471fd21f13023/mapbox/services/maps.py#L144-L152 | train | 219,807 |
mapbox/mapbox-sdk-py | mapbox/services/maps.py | Maps._validate_marker_name | def _validate_marker_name(self, marker_name):
"""Validates marker name, raising error if invalid."""
if marker_name not in self.valid_marker_names:
raise InvalidMarkerNameError(
"{} is not a valid marker name".format(marker_name)
)
return marker_name | python | def _validate_marker_name(self, marker_name):
"""Validates marker name, raising error if invalid."""
if marker_name not in self.valid_marker_names:
raise InvalidMarkerNameError(
"{} is not a valid marker name".format(marker_name)
)
return marker_name | [
"def",
"_validate_marker_name",
"(",
"self",
",",
"marker_name",
")",
":",
"if",
"marker_name",
"not",
"in",
"self",
".",
"valid_marker_names",
":",
"raise",
"InvalidMarkerNameError",
"(",
"\"{} is not a valid marker name\"",
".",
"format",
"(",
"marker_name",
")",
... | Validates marker name, raising error if invalid. | [
"Validates",
"marker",
"name",
"raising",
"error",
"if",
"invalid",
"."
] | 72d19dbcf2d254a6ea08129a726471fd21f13023 | https://github.com/mapbox/mapbox-sdk-py/blob/72d19dbcf2d254a6ea08129a726471fd21f13023/mapbox/services/maps.py#L154-L162 | train | 219,808 |
mapbox/mapbox-sdk-py | mapbox/services/maps.py | Maps._validate_label | def _validate_label(self, label):
"""Validates label, raising error if invalid."""
letter_pattern = compile("^[a-z]{1}$")
number_pattern = compile("^[0]{1}$|^[1-9]{1,2}$")
icon_pattern = compile("^[a-zA-Z ]{1,}$")
if not match(letter_pattern, label)\
and not match(number_pattern, label)\
and not match(icon_pattern, label):
raise InvalidLabelError(
"{} is not a valid label".format(label)
)
return label | python | def _validate_label(self, label):
"""Validates label, raising error if invalid."""
letter_pattern = compile("^[a-z]{1}$")
number_pattern = compile("^[0]{1}$|^[1-9]{1,2}$")
icon_pattern = compile("^[a-zA-Z ]{1,}$")
if not match(letter_pattern, label)\
and not match(number_pattern, label)\
and not match(icon_pattern, label):
raise InvalidLabelError(
"{} is not a valid label".format(label)
)
return label | [
"def",
"_validate_label",
"(",
"self",
",",
"label",
")",
":",
"letter_pattern",
"=",
"compile",
"(",
"\"^[a-z]{1}$\"",
")",
"number_pattern",
"=",
"compile",
"(",
"\"^[0]{1}$|^[1-9]{1,2}$\"",
")",
"icon_pattern",
"=",
"compile",
"(",
"\"^[a-zA-Z ]{1,}$\"",
")",
"... | Validates label, raising error if invalid. | [
"Validates",
"label",
"raising",
"error",
"if",
"invalid",
"."
] | 72d19dbcf2d254a6ea08129a726471fd21f13023 | https://github.com/mapbox/mapbox-sdk-py/blob/72d19dbcf2d254a6ea08129a726471fd21f13023/mapbox/services/maps.py#L164-L178 | train | 219,809 |
mapbox/mapbox-sdk-py | mapbox/services/maps.py | Maps._validate_color | def _validate_color(self, color):
"""Validates color, raising error if invalid."""
three_digit_pattern = compile("^[a-f0-9]{3}$")
six_digit_pattern = compile("^[a-f0-9]{6}$")
if not match(three_digit_pattern, color)\
and not match(six_digit_pattern, color):
raise InvalidColorError(
"{} is not a valid color".format(color)
)
return color | python | def _validate_color(self, color):
"""Validates color, raising error if invalid."""
three_digit_pattern = compile("^[a-f0-9]{3}$")
six_digit_pattern = compile("^[a-f0-9]{6}$")
if not match(three_digit_pattern, color)\
and not match(six_digit_pattern, color):
raise InvalidColorError(
"{} is not a valid color".format(color)
)
return color | [
"def",
"_validate_color",
"(",
"self",
",",
"color",
")",
":",
"three_digit_pattern",
"=",
"compile",
"(",
"\"^[a-f0-9]{3}$\"",
")",
"six_digit_pattern",
"=",
"compile",
"(",
"\"^[a-f0-9]{6}$\"",
")",
"if",
"not",
"match",
"(",
"three_digit_pattern",
",",
"color",... | Validates color, raising error if invalid. | [
"Validates",
"color",
"raising",
"error",
"if",
"invalid",
"."
] | 72d19dbcf2d254a6ea08129a726471fd21f13023 | https://github.com/mapbox/mapbox-sdk-py/blob/72d19dbcf2d254a6ea08129a726471fd21f13023/mapbox/services/maps.py#L180-L192 | train | 219,810 |
mapbox/mapbox-sdk-py | mapbox/services/maps.py | Maps.tile | def tile(self, map_id, x, y, z, retina=False,
file_format="png", style_id=None, timestamp=None):
"""Returns an image tile, vector tile, or UTFGrid
in the specified file format.
Parameters
----------
map_id : str
The tile's unique identifier in the format username.id.
x : int
The tile's column, where 0 is the minimum value
and ((2**z) - 1) is the maximum value.
y : int
The tile's row, where 0 is the minimum value
and ((2**z) - 1) is the maximum value.
z : int
The tile's zoom level, where 0 is the minimum value
and 20 is the maximum value.
retina : bool, optional
The tile's scale, where True indicates Retina scale
(double scale) and False indicates regular scale.
The default value is false.
file_format : str, optional
The tile's file format.
The default value is png.
style_id : str, optional
The tile's style id.
style_id must be used together with timestamp.
timestamp : str, optional
The style id's ISO-formatted timestamp, found by
accessing the "modified" property of a style object.
timestamp must be used together with style_id.
Returns
-------
request.Response
The response object with a tile in the specified format.
"""
# Check x, y, and z.
if x is None or y is None or z is None:
raise ValidationError(
"x, y, and z must be not be None"
)
# Validate x, y, z, retina, and file_format.
x = self._validate_x(x, z)
y = self._validate_y(y, z)
z = self._validate_z(z)
retina = self._validate_retina(retina)
file_format = self._validate_file_format(file_format)
# Create dict to assist in building URI resource path.
path_values = dict(
map_id=map_id,
x=str(x),
y=str(y),
z=str(z)
)
# Start building URI resource path.
path_part = "/{map_id}/{z}/{x}/{y}"
uri = URITemplate(self.base_uri + path_part).expand(**path_values)
# Finish building URI resource path.
# As in static.py, this two-part process avoids
# undesired escaping of "@" in "@2x."
path_part = "{}.{}".format(retina, file_format)
uri += path_part
# Validate timestamp and build URI query parameters.
query_parameters = dict()
if style_id is not None and timestamp is not None:
timestamp = self._validate_timestamp(timestamp)
style = "{}@{}".format(style_id, timestamp)
query_parameters["style"] = style
# Send HTTP GET request.
response = self.session.get(uri, params=query_parameters)
self.handle_http_error(response)
return response | python | def tile(self, map_id, x, y, z, retina=False,
file_format="png", style_id=None, timestamp=None):
"""Returns an image tile, vector tile, or UTFGrid
in the specified file format.
Parameters
----------
map_id : str
The tile's unique identifier in the format username.id.
x : int
The tile's column, where 0 is the minimum value
and ((2**z) - 1) is the maximum value.
y : int
The tile's row, where 0 is the minimum value
and ((2**z) - 1) is the maximum value.
z : int
The tile's zoom level, where 0 is the minimum value
and 20 is the maximum value.
retina : bool, optional
The tile's scale, where True indicates Retina scale
(double scale) and False indicates regular scale.
The default value is false.
file_format : str, optional
The tile's file format.
The default value is png.
style_id : str, optional
The tile's style id.
style_id must be used together with timestamp.
timestamp : str, optional
The style id's ISO-formatted timestamp, found by
accessing the "modified" property of a style object.
timestamp must be used together with style_id.
Returns
-------
request.Response
The response object with a tile in the specified format.
"""
# Check x, y, and z.
if x is None or y is None or z is None:
raise ValidationError(
"x, y, and z must be not be None"
)
# Validate x, y, z, retina, and file_format.
x = self._validate_x(x, z)
y = self._validate_y(y, z)
z = self._validate_z(z)
retina = self._validate_retina(retina)
file_format = self._validate_file_format(file_format)
# Create dict to assist in building URI resource path.
path_values = dict(
map_id=map_id,
x=str(x),
y=str(y),
z=str(z)
)
# Start building URI resource path.
path_part = "/{map_id}/{z}/{x}/{y}"
uri = URITemplate(self.base_uri + path_part).expand(**path_values)
# Finish building URI resource path.
# As in static.py, this two-part process avoids
# undesired escaping of "@" in "@2x."
path_part = "{}.{}".format(retina, file_format)
uri += path_part
# Validate timestamp and build URI query parameters.
query_parameters = dict()
if style_id is not None and timestamp is not None:
timestamp = self._validate_timestamp(timestamp)
style = "{}@{}".format(style_id, timestamp)
query_parameters["style"] = style
# Send HTTP GET request.
response = self.session.get(uri, params=query_parameters)
self.handle_http_error(response)
return response | [
"def",
"tile",
"(",
"self",
",",
"map_id",
",",
"x",
",",
"y",
",",
"z",
",",
"retina",
"=",
"False",
",",
"file_format",
"=",
"\"png\"",
",",
"style_id",
"=",
"None",
",",
"timestamp",
"=",
"None",
")",
":",
"# Check x, y, and z.",
"if",
"x",
"is",
... | Returns an image tile, vector tile, or UTFGrid
in the specified file format.
Parameters
----------
map_id : str
The tile's unique identifier in the format username.id.
x : int
The tile's column, where 0 is the minimum value
and ((2**z) - 1) is the maximum value.
y : int
The tile's row, where 0 is the minimum value
and ((2**z) - 1) is the maximum value.
z : int
The tile's zoom level, where 0 is the minimum value
and 20 is the maximum value.
retina : bool, optional
The tile's scale, where True indicates Retina scale
(double scale) and False indicates regular scale.
The default value is false.
file_format : str, optional
The tile's file format.
The default value is png.
style_id : str, optional
The tile's style id.
style_id must be used together with timestamp.
timestamp : str, optional
The style id's ISO-formatted timestamp, found by
accessing the "modified" property of a style object.
timestamp must be used together with style_id.
Returns
-------
request.Response
The response object with a tile in the specified format. | [
"Returns",
"an",
"image",
"tile",
"vector",
"tile",
"or",
"UTFGrid",
"in",
"the",
"specified",
"file",
"format",
"."
] | 72d19dbcf2d254a6ea08129a726471fd21f13023 | https://github.com/mapbox/mapbox-sdk-py/blob/72d19dbcf2d254a6ea08129a726471fd21f13023/mapbox/services/maps.py#L194-L295 | train | 219,811 |
mapbox/mapbox-sdk-py | mapbox/services/maps.py | Maps.features | def features(self, map_id, feature_format="json"):
"""Returns vector features from Mapbox Editor projects
as GeoJSON or KML.
Parameters
----------
map_id : str
The map's unique identifier in the format username.id.
feature_format : str, optional
The vector's feature format.
The default value is json.
Returns
-------
request.Response
The response object with vector features.
"""
# Validate feature_format.
feature_format = self._validate_feature_format(feature_format)
# Create dict to assist in building URI resource path.
path_values = dict(
map_id=map_id,
feature_format=feature_format
)
# Build URI resource path.
path_part = "/{map_id}/features.{feature_format}"
uri = URITemplate(self.base_uri + path_part).expand(**path_values)
# Send HTTP GET request.
response = self.session.get(uri)
self.handle_http_error(response)
return response | python | def features(self, map_id, feature_format="json"):
"""Returns vector features from Mapbox Editor projects
as GeoJSON or KML.
Parameters
----------
map_id : str
The map's unique identifier in the format username.id.
feature_format : str, optional
The vector's feature format.
The default value is json.
Returns
-------
request.Response
The response object with vector features.
"""
# Validate feature_format.
feature_format = self._validate_feature_format(feature_format)
# Create dict to assist in building URI resource path.
path_values = dict(
map_id=map_id,
feature_format=feature_format
)
# Build URI resource path.
path_part = "/{map_id}/features.{feature_format}"
uri = URITemplate(self.base_uri + path_part).expand(**path_values)
# Send HTTP GET request.
response = self.session.get(uri)
self.handle_http_error(response)
return response | [
"def",
"features",
"(",
"self",
",",
"map_id",
",",
"feature_format",
"=",
"\"json\"",
")",
":",
"# Validate feature_format.",
"feature_format",
"=",
"self",
".",
"_validate_feature_format",
"(",
"feature_format",
")",
"# Create dict to assist in building URI resource path.... | Returns vector features from Mapbox Editor projects
as GeoJSON or KML.
Parameters
----------
map_id : str
The map's unique identifier in the format username.id.
feature_format : str, optional
The vector's feature format.
The default value is json.
Returns
-------
request.Response
The response object with vector features. | [
"Returns",
"vector",
"features",
"from",
"Mapbox",
"Editor",
"projects",
"as",
"GeoJSON",
"or",
"KML",
"."
] | 72d19dbcf2d254a6ea08129a726471fd21f13023 | https://github.com/mapbox/mapbox-sdk-py/blob/72d19dbcf2d254a6ea08129a726471fd21f13023/mapbox/services/maps.py#L297-L338 | train | 219,812 |
mapbox/mapbox-sdk-py | mapbox/services/maps.py | Maps.metadata | def metadata(self, map_id, secure=False):
"""Returns TileJSON metadata for a tileset.
Parameters
----------
map_id : str
The map's unique identifier in the format username.id.
secure : bool, optional
The representation of the requested resources,
where True indicates representation as HTTPS endpoints.
The default value is False.
Returns
-------
request.Response
The response object with TileJSON metadata for the
specified tileset.
"""
# Create dict to assist in building URI resource path.
path_values = dict(
map_id=map_id
)
# Build URI resource path.
path_part = "/{map_id}.json"
uri = URITemplate(self.base_uri + path_part).expand(**path_values)
# Build URI query parameters.
query_parameters = dict()
if secure:
query_parameters["secure"] = ""
# Send HTTP GET request.
response = self.session.get(uri, params=query_parameters)
self.handle_http_error(response)
return response | python | def metadata(self, map_id, secure=False):
"""Returns TileJSON metadata for a tileset.
Parameters
----------
map_id : str
The map's unique identifier in the format username.id.
secure : bool, optional
The representation of the requested resources,
where True indicates representation as HTTPS endpoints.
The default value is False.
Returns
-------
request.Response
The response object with TileJSON metadata for the
specified tileset.
"""
# Create dict to assist in building URI resource path.
path_values = dict(
map_id=map_id
)
# Build URI resource path.
path_part = "/{map_id}.json"
uri = URITemplate(self.base_uri + path_part).expand(**path_values)
# Build URI query parameters.
query_parameters = dict()
if secure:
query_parameters["secure"] = ""
# Send HTTP GET request.
response = self.session.get(uri, params=query_parameters)
self.handle_http_error(response)
return response | [
"def",
"metadata",
"(",
"self",
",",
"map_id",
",",
"secure",
"=",
"False",
")",
":",
"# Create dict to assist in building URI resource path.",
"path_values",
"=",
"dict",
"(",
"map_id",
"=",
"map_id",
")",
"# Build URI resource path.",
"path_part",
"=",
"\"/{map_id}.... | Returns TileJSON metadata for a tileset.
Parameters
----------
map_id : str
The map's unique identifier in the format username.id.
secure : bool, optional
The representation of the requested resources,
where True indicates representation as HTTPS endpoints.
The default value is False.
Returns
-------
request.Response
The response object with TileJSON metadata for the
specified tileset. | [
"Returns",
"TileJSON",
"metadata",
"for",
"a",
"tileset",
"."
] | 72d19dbcf2d254a6ea08129a726471fd21f13023 | https://github.com/mapbox/mapbox-sdk-py/blob/72d19dbcf2d254a6ea08129a726471fd21f13023/mapbox/services/maps.py#L340-L384 | train | 219,813 |
mapbox/mapbox-sdk-py | mapbox/services/maps.py | Maps.marker | def marker(self, marker_name=None, label=None,
color=None, retina=False):
"""Returns a single marker image without any
background map.
Parameters
----------
marker_name : str
The marker's shape and size.
label : str, optional
The marker's alphanumeric label.
Options are a through z, 0 through 99, or the
name of a valid Maki icon.
color : str, optional
The marker's color.
Options are three- or six-digit hexadecimal
color codes.
retina : bool, optional
The marker's scale, where True indicates Retina scale
(double scale) and False indicates regular scale.
The default value is false.
Returns
-------
request.Response
The response object with the specified marker.
"""
# Check for marker_name.
if marker_name is None:
raise ValidationError(
"marker_name is a required argument"
)
# Validate marker_name and retina.
marker_name = self._validate_marker_name(marker_name)
retina = self._validate_retina(retina)
# Create dict and start building URI resource path.
path_values = dict(
marker_name=marker_name
)
path_part = "/marker/{marker_name}"
# Validate label, update dict,
# and continue building URI resource path.
if label is not None:
label = self._validate_label(label)
path_values["label"] = label
path_part += "-{label}"
# Validate color, update dict,
# and continue building URI resource path.
if color is not None:
color = self._validate_color(color)
path_values["color"] = color
path_part += "+{color}"
uri = URITemplate(self.base_uri + path_part).expand(**path_values)
# Finish building URI resource path.
path_part = "{}.png".format(retina)
uri += path_part
# Send HTTP GET request.
response = self.session.get(uri)
self.handle_http_error(response)
return response | python | def marker(self, marker_name=None, label=None,
color=None, retina=False):
"""Returns a single marker image without any
background map.
Parameters
----------
marker_name : str
The marker's shape and size.
label : str, optional
The marker's alphanumeric label.
Options are a through z, 0 through 99, or the
name of a valid Maki icon.
color : str, optional
The marker's color.
Options are three- or six-digit hexadecimal
color codes.
retina : bool, optional
The marker's scale, where True indicates Retina scale
(double scale) and False indicates regular scale.
The default value is false.
Returns
-------
request.Response
The response object with the specified marker.
"""
# Check for marker_name.
if marker_name is None:
raise ValidationError(
"marker_name is a required argument"
)
# Validate marker_name and retina.
marker_name = self._validate_marker_name(marker_name)
retina = self._validate_retina(retina)
# Create dict and start building URI resource path.
path_values = dict(
marker_name=marker_name
)
path_part = "/marker/{marker_name}"
# Validate label, update dict,
# and continue building URI resource path.
if label is not None:
label = self._validate_label(label)
path_values["label"] = label
path_part += "-{label}"
# Validate color, update dict,
# and continue building URI resource path.
if color is not None:
color = self._validate_color(color)
path_values["color"] = color
path_part += "+{color}"
uri = URITemplate(self.base_uri + path_part).expand(**path_values)
# Finish building URI resource path.
path_part = "{}.png".format(retina)
uri += path_part
# Send HTTP GET request.
response = self.session.get(uri)
self.handle_http_error(response)
return response | [
"def",
"marker",
"(",
"self",
",",
"marker_name",
"=",
"None",
",",
"label",
"=",
"None",
",",
"color",
"=",
"None",
",",
"retina",
"=",
"False",
")",
":",
"# Check for marker_name.",
"if",
"marker_name",
"is",
"None",
":",
"raise",
"ValidationError",
"(",... | Returns a single marker image without any
background map.
Parameters
----------
marker_name : str
The marker's shape and size.
label : str, optional
The marker's alphanumeric label.
Options are a through z, 0 through 99, or the
name of a valid Maki icon.
color : str, optional
The marker's color.
Options are three- or six-digit hexadecimal
color codes.
retina : bool, optional
The marker's scale, where True indicates Retina scale
(double scale) and False indicates regular scale.
The default value is false.
Returns
-------
request.Response
The response object with the specified marker. | [
"Returns",
"a",
"single",
"marker",
"image",
"without",
"any",
"background",
"map",
"."
] | 72d19dbcf2d254a6ea08129a726471fd21f13023 | https://github.com/mapbox/mapbox-sdk-py/blob/72d19dbcf2d254a6ea08129a726471fd21f13023/mapbox/services/maps.py#L386-L468 | train | 219,814 |
django-extensions/django-extensions | django_extensions/management/commands/runserver_plus.py | set_werkzeug_log_color | def set_werkzeug_log_color():
"""Try to set color to the werkzeug log."""
from django.core.management.color import color_style
from werkzeug.serving import WSGIRequestHandler
from werkzeug._internal import _log
_style = color_style()
_orig_log = WSGIRequestHandler.log
def werk_log(self, type, message, *args):
try:
msg = '%s - - [%s] %s' % (
self.address_string(),
self.log_date_time_string(),
message % args,
)
http_code = str(args[1])
except Exception:
return _orig_log(type, message, *args)
# Utilize terminal colors, if available
if http_code[0] == '2':
# Put 2XX first, since it should be the common case
msg = _style.HTTP_SUCCESS(msg)
elif http_code[0] == '1':
msg = _style.HTTP_INFO(msg)
elif http_code == '304':
msg = _style.HTTP_NOT_MODIFIED(msg)
elif http_code[0] == '3':
msg = _style.HTTP_REDIRECT(msg)
elif http_code == '404':
msg = _style.HTTP_NOT_FOUND(msg)
elif http_code[0] == '4':
msg = _style.HTTP_BAD_REQUEST(msg)
else:
# Any 5XX, or any other response
msg = _style.HTTP_SERVER_ERROR(msg)
_log(type, msg)
WSGIRequestHandler.log = werk_log | python | def set_werkzeug_log_color():
"""Try to set color to the werkzeug log."""
from django.core.management.color import color_style
from werkzeug.serving import WSGIRequestHandler
from werkzeug._internal import _log
_style = color_style()
_orig_log = WSGIRequestHandler.log
def werk_log(self, type, message, *args):
try:
msg = '%s - - [%s] %s' % (
self.address_string(),
self.log_date_time_string(),
message % args,
)
http_code = str(args[1])
except Exception:
return _orig_log(type, message, *args)
# Utilize terminal colors, if available
if http_code[0] == '2':
# Put 2XX first, since it should be the common case
msg = _style.HTTP_SUCCESS(msg)
elif http_code[0] == '1':
msg = _style.HTTP_INFO(msg)
elif http_code == '304':
msg = _style.HTTP_NOT_MODIFIED(msg)
elif http_code[0] == '3':
msg = _style.HTTP_REDIRECT(msg)
elif http_code == '404':
msg = _style.HTTP_NOT_FOUND(msg)
elif http_code[0] == '4':
msg = _style.HTTP_BAD_REQUEST(msg)
else:
# Any 5XX, or any other response
msg = _style.HTTP_SERVER_ERROR(msg)
_log(type, msg)
WSGIRequestHandler.log = werk_log | [
"def",
"set_werkzeug_log_color",
"(",
")",
":",
"from",
"django",
".",
"core",
".",
"management",
".",
"color",
"import",
"color_style",
"from",
"werkzeug",
".",
"serving",
"import",
"WSGIRequestHandler",
"from",
"werkzeug",
".",
"_internal",
"import",
"_log",
"... | Try to set color to the werkzeug log. | [
"Try",
"to",
"set",
"color",
"to",
"the",
"werkzeug",
"log",
"."
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/runserver_plus.py#L423-L463 | train | 219,815 |
django-extensions/django-extensions | django_extensions/db/fields/__init__.py | AutoSlugField._slug_strip | def _slug_strip(self, value):
"""
Clean up a slug by removing slug separator characters that occur at
the beginning or end of a slug.
If an alternate separator is used, it will also replace any instances
of the default '-' separator with the new separator.
"""
re_sep = '(?:-|%s)' % re.escape(self.separator)
value = re.sub('%s+' % re_sep, self.separator, value)
return re.sub(r'^%s+|%s+$' % (re_sep, re_sep), '', value) | python | def _slug_strip(self, value):
"""
Clean up a slug by removing slug separator characters that occur at
the beginning or end of a slug.
If an alternate separator is used, it will also replace any instances
of the default '-' separator with the new separator.
"""
re_sep = '(?:-|%s)' % re.escape(self.separator)
value = re.sub('%s+' % re_sep, self.separator, value)
return re.sub(r'^%s+|%s+$' % (re_sep, re_sep), '', value) | [
"def",
"_slug_strip",
"(",
"self",
",",
"value",
")",
":",
"re_sep",
"=",
"'(?:-|%s)'",
"%",
"re",
".",
"escape",
"(",
"self",
".",
"separator",
")",
"value",
"=",
"re",
".",
"sub",
"(",
"'%s+'",
"%",
"re_sep",
",",
"self",
".",
"separator",
",",
"... | Clean up a slug by removing slug separator characters that occur at
the beginning or end of a slug.
If an alternate separator is used, it will also replace any instances
of the default '-' separator with the new separator. | [
"Clean",
"up",
"a",
"slug",
"by",
"removing",
"slug",
"separator",
"characters",
"that",
"occur",
"at",
"the",
"beginning",
"or",
"end",
"of",
"a",
"slug",
"."
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/db/fields/__init__.py#L134-L144 | train | 219,816 |
django-extensions/django-extensions | django_extensions/management/commands/export_emails.py | full_name | def full_name(first_name, last_name, username, **extra):
"""Return full name or username."""
name = " ".join(n for n in [first_name, last_name] if n)
if not name:
return username
return name | python | def full_name(first_name, last_name, username, **extra):
"""Return full name or username."""
name = " ".join(n for n in [first_name, last_name] if n)
if not name:
return username
return name | [
"def",
"full_name",
"(",
"first_name",
",",
"last_name",
",",
"username",
",",
"*",
"*",
"extra",
")",
":",
"name",
"=",
"\" \"",
".",
"join",
"(",
"n",
"for",
"n",
"in",
"[",
"first_name",
",",
"last_name",
"]",
"if",
"n",
")",
"if",
"not",
"name"... | Return full name or username. | [
"Return",
"full",
"name",
"or",
"username",
"."
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/export_emails.py#L23-L28 | train | 219,817 |
django-extensions/django-extensions | django_extensions/management/commands/export_emails.py | Command.google | def google(self, qs):
"""CSV format suitable for importing into google GMail"""
csvf = writer(sys.stdout)
csvf.writerow(['Name', 'Email'])
for ent in qs:
csvf.writerow([full_name(**ent), ent['email']]) | python | def google(self, qs):
"""CSV format suitable for importing into google GMail"""
csvf = writer(sys.stdout)
csvf.writerow(['Name', 'Email'])
for ent in qs:
csvf.writerow([full_name(**ent), ent['email']]) | [
"def",
"google",
"(",
"self",
",",
"qs",
")",
":",
"csvf",
"=",
"writer",
"(",
"sys",
".",
"stdout",
")",
"csvf",
".",
"writerow",
"(",
"[",
"'Name'",
",",
"'Email'",
"]",
")",
"for",
"ent",
"in",
"qs",
":",
"csvf",
".",
"writerow",
"(",
"[",
"... | CSV format suitable for importing into google GMail | [
"CSV",
"format",
"suitable",
"for",
"importing",
"into",
"google",
"GMail"
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/export_emails.py#L88-L93 | train | 219,818 |
django-extensions/django-extensions | django_extensions/management/commands/export_emails.py | Command.linkedin | def linkedin(self, qs):
"""
CSV format suitable for importing into linkedin Groups.
perfect for pre-approving members of a linkedin group.
"""
csvf = writer(sys.stdout)
csvf.writerow(['First Name', 'Last Name', 'Email'])
for ent in qs:
csvf.writerow([ent['first_name'], ent['last_name'], ent['email']]) | python | def linkedin(self, qs):
"""
CSV format suitable for importing into linkedin Groups.
perfect for pre-approving members of a linkedin group.
"""
csvf = writer(sys.stdout)
csvf.writerow(['First Name', 'Last Name', 'Email'])
for ent in qs:
csvf.writerow([ent['first_name'], ent['last_name'], ent['email']]) | [
"def",
"linkedin",
"(",
"self",
",",
"qs",
")",
":",
"csvf",
"=",
"writer",
"(",
"sys",
".",
"stdout",
")",
"csvf",
".",
"writerow",
"(",
"[",
"'First Name'",
",",
"'Last Name'",
",",
"'Email'",
"]",
")",
"for",
"ent",
"in",
"qs",
":",
"csvf",
".",... | CSV format suitable for importing into linkedin Groups.
perfect for pre-approving members of a linkedin group. | [
"CSV",
"format",
"suitable",
"for",
"importing",
"into",
"linkedin",
"Groups",
".",
"perfect",
"for",
"pre",
"-",
"approving",
"members",
"of",
"a",
"linkedin",
"group",
"."
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/export_emails.py#L95-L103 | train | 219,819 |
django-extensions/django-extensions | django_extensions/management/commands/export_emails.py | Command.outlook | def outlook(self, qs):
"""CSV format suitable for importing into outlook"""
csvf = writer(sys.stdout)
columns = ['Name', 'E-mail Address', 'Notes', 'E-mail 2 Address', 'E-mail 3 Address',
'Mobile Phone', 'Pager', 'Company', 'Job Title', 'Home Phone', 'Home Phone 2',
'Home Fax', 'Home Address', 'Business Phone', 'Business Phone 2',
'Business Fax', 'Business Address', 'Other Phone', 'Other Fax', 'Other Address']
csvf.writerow(columns)
empty = [''] * (len(columns) - 2)
for ent in qs:
csvf.writerow([full_name(**ent), ent['email']] + empty) | python | def outlook(self, qs):
"""CSV format suitable for importing into outlook"""
csvf = writer(sys.stdout)
columns = ['Name', 'E-mail Address', 'Notes', 'E-mail 2 Address', 'E-mail 3 Address',
'Mobile Phone', 'Pager', 'Company', 'Job Title', 'Home Phone', 'Home Phone 2',
'Home Fax', 'Home Address', 'Business Phone', 'Business Phone 2',
'Business Fax', 'Business Address', 'Other Phone', 'Other Fax', 'Other Address']
csvf.writerow(columns)
empty = [''] * (len(columns) - 2)
for ent in qs:
csvf.writerow([full_name(**ent), ent['email']] + empty) | [
"def",
"outlook",
"(",
"self",
",",
"qs",
")",
":",
"csvf",
"=",
"writer",
"(",
"sys",
".",
"stdout",
")",
"columns",
"=",
"[",
"'Name'",
",",
"'E-mail Address'",
",",
"'Notes'",
",",
"'E-mail 2 Address'",
",",
"'E-mail 3 Address'",
",",
"'Mobile Phone'",
... | CSV format suitable for importing into outlook | [
"CSV",
"format",
"suitable",
"for",
"importing",
"into",
"outlook"
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/export_emails.py#L105-L115 | train | 219,820 |
django-extensions/django-extensions | django_extensions/management/commands/export_emails.py | Command.vcard | def vcard(self, qs):
"""VCARD format."""
try:
import vobject
except ImportError:
print(self.style.ERROR("Please install vobject to use the vcard export format."))
sys.exit(1)
out = sys.stdout
for ent in qs:
card = vobject.vCard()
card.add('fn').value = full_name(**ent)
if not ent['last_name'] and not ent['first_name']:
# fallback to fullname, if both first and lastname are not declared
card.add('n').value = vobject.vcard.Name(full_name(**ent))
else:
card.add('n').value = vobject.vcard.Name(ent['last_name'], ent['first_name'])
emailpart = card.add('email')
emailpart.value = ent['email']
emailpart.type_param = 'INTERNET'
out.write(card.serialize()) | python | def vcard(self, qs):
"""VCARD format."""
try:
import vobject
except ImportError:
print(self.style.ERROR("Please install vobject to use the vcard export format."))
sys.exit(1)
out = sys.stdout
for ent in qs:
card = vobject.vCard()
card.add('fn').value = full_name(**ent)
if not ent['last_name'] and not ent['first_name']:
# fallback to fullname, if both first and lastname are not declared
card.add('n').value = vobject.vcard.Name(full_name(**ent))
else:
card.add('n').value = vobject.vcard.Name(ent['last_name'], ent['first_name'])
emailpart = card.add('email')
emailpart.value = ent['email']
emailpart.type_param = 'INTERNET'
out.write(card.serialize()) | [
"def",
"vcard",
"(",
"self",
",",
"qs",
")",
":",
"try",
":",
"import",
"vobject",
"except",
"ImportError",
":",
"print",
"(",
"self",
".",
"style",
".",
"ERROR",
"(",
"\"Please install vobject to use the vcard export format.\"",
")",
")",
"sys",
".",
"exit",
... | VCARD format. | [
"VCARD",
"format",
"."
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/export_emails.py#L117-L138 | train | 219,821 |
django-extensions/django-extensions | django_extensions/management/mysql.py | parse_mysql_cnf | def parse_mysql_cnf(dbinfo):
"""
Attempt to parse mysql database config file for connection settings.
Ideally we would hook into django's code to do this, but read_default_file is handled by the mysql C libs
so we have to emulate the behaviour
Settings that are missing will return ''
returns (user, password, database_name, database_host, database_port)
"""
read_default_file = dbinfo.get('OPTIONS', {}).get('read_default_file')
if read_default_file:
config = configparser.RawConfigParser({
'user': '',
'password': '',
'database': '',
'host': '',
'port': '',
'socket': '',
})
import os
config.read(os.path.expanduser(read_default_file))
try:
user = config.get('client', 'user')
password = config.get('client', 'password')
database_name = config.get('client', 'database')
database_host = config.get('client', 'host')
database_port = config.get('client', 'port')
socket = config.get('client', 'socket')
if database_host == 'localhost' and socket:
# mysql actually uses a socket if host is localhost
database_host = socket
return user, password, database_name, database_host, database_port
except configparser.NoSectionError:
pass
return '', '', '', '', '' | python | def parse_mysql_cnf(dbinfo):
"""
Attempt to parse mysql database config file for connection settings.
Ideally we would hook into django's code to do this, but read_default_file is handled by the mysql C libs
so we have to emulate the behaviour
Settings that are missing will return ''
returns (user, password, database_name, database_host, database_port)
"""
read_default_file = dbinfo.get('OPTIONS', {}).get('read_default_file')
if read_default_file:
config = configparser.RawConfigParser({
'user': '',
'password': '',
'database': '',
'host': '',
'port': '',
'socket': '',
})
import os
config.read(os.path.expanduser(read_default_file))
try:
user = config.get('client', 'user')
password = config.get('client', 'password')
database_name = config.get('client', 'database')
database_host = config.get('client', 'host')
database_port = config.get('client', 'port')
socket = config.get('client', 'socket')
if database_host == 'localhost' and socket:
# mysql actually uses a socket if host is localhost
database_host = socket
return user, password, database_name, database_host, database_port
except configparser.NoSectionError:
pass
return '', '', '', '', '' | [
"def",
"parse_mysql_cnf",
"(",
"dbinfo",
")",
":",
"read_default_file",
"=",
"dbinfo",
".",
"get",
"(",
"'OPTIONS'",
",",
"{",
"}",
")",
".",
"get",
"(",
"'read_default_file'",
")",
"if",
"read_default_file",
":",
"config",
"=",
"configparser",
".",
"RawConf... | Attempt to parse mysql database config file for connection settings.
Ideally we would hook into django's code to do this, but read_default_file is handled by the mysql C libs
so we have to emulate the behaviour
Settings that are missing will return ''
returns (user, password, database_name, database_host, database_port) | [
"Attempt",
"to",
"parse",
"mysql",
"database",
"config",
"file",
"for",
"connection",
"settings",
".",
"Ideally",
"we",
"would",
"hook",
"into",
"django",
"s",
"code",
"to",
"do",
"this",
"but",
"read_default_file",
"is",
"handled",
"by",
"the",
"mysql",
"C"... | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/mysql.py#L5-L43 | train | 219,822 |
django-extensions/django-extensions | django_extensions/management/jobs.py | get_jobs | def get_jobs(when=None, only_scheduled=False):
"""
Return a dictionary mapping of job names together with their respective
application class.
"""
# FIXME: HACK: make sure the project dir is on the path when executed as ./manage.py
try:
cpath = os.path.dirname(os.path.realpath(sys.argv[0]))
ppath = os.path.dirname(cpath)
if ppath not in sys.path:
sys.path.append(ppath)
except Exception:
pass
_jobs = {}
for app_name in [app.name for app in apps.get_app_configs()]:
scandirs = (None, 'minutely', 'quarter_hourly', 'hourly', 'daily', 'weekly', 'monthly', 'yearly')
if when:
scandirs = None, when
for subdir in scandirs:
try:
path = find_job_module(app_name, subdir)
for name in find_jobs(path):
if (app_name, name) in _jobs:
raise JobError("Duplicate job %s" % name)
job = import_job(app_name, name, subdir)
if only_scheduled and job.when is None:
# only include jobs which are scheduled
continue
if when and job.when != when:
# generic job not in same schedule
continue
_jobs[(app_name, name)] = job
except ImportError:
# No job module -- continue scanning
pass
return _jobs | python | def get_jobs(when=None, only_scheduled=False):
"""
Return a dictionary mapping of job names together with their respective
application class.
"""
# FIXME: HACK: make sure the project dir is on the path when executed as ./manage.py
try:
cpath = os.path.dirname(os.path.realpath(sys.argv[0]))
ppath = os.path.dirname(cpath)
if ppath not in sys.path:
sys.path.append(ppath)
except Exception:
pass
_jobs = {}
for app_name in [app.name for app in apps.get_app_configs()]:
scandirs = (None, 'minutely', 'quarter_hourly', 'hourly', 'daily', 'weekly', 'monthly', 'yearly')
if when:
scandirs = None, when
for subdir in scandirs:
try:
path = find_job_module(app_name, subdir)
for name in find_jobs(path):
if (app_name, name) in _jobs:
raise JobError("Duplicate job %s" % name)
job = import_job(app_name, name, subdir)
if only_scheduled and job.when is None:
# only include jobs which are scheduled
continue
if when and job.when != when:
# generic job not in same schedule
continue
_jobs[(app_name, name)] = job
except ImportError:
# No job module -- continue scanning
pass
return _jobs | [
"def",
"get_jobs",
"(",
"when",
"=",
"None",
",",
"only_scheduled",
"=",
"False",
")",
":",
"# FIXME: HACK: make sure the project dir is on the path when executed as ./manage.py",
"try",
":",
"cpath",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"os",
".",
"path",
... | Return a dictionary mapping of job names together with their respective
application class. | [
"Return",
"a",
"dictionary",
"mapping",
"of",
"job",
"names",
"together",
"with",
"their",
"respective",
"application",
"class",
"."
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/jobs.py#L102-L139 | train | 219,823 |
django-extensions/django-extensions | django_extensions/management/commands/runjobs.py | Command.runjobs_by_signals | def runjobs_by_signals(self, when, options):
""" Run jobs from the signals """
# Thanks for Ian Holsman for the idea and code
from django_extensions.management import signals
from django.conf import settings
verbosity = options["verbosity"]
for app_name in settings.INSTALLED_APPS:
try:
__import__(app_name + '.management', '', '', [''])
except ImportError:
pass
for app in (app.models_module for app in apps.get_app_configs() if app.models_module):
if verbosity > 1:
app_name = '.'.join(app.__name__.rsplit('.')[:-1])
print("Sending %s job signal for: %s" % (when, app_name))
if when == 'minutely':
signals.run_minutely_jobs.send(sender=app, app=app)
elif when == 'quarter_hourly':
signals.run_quarter_hourly_jobs.send(sender=app, app=app)
elif when == 'hourly':
signals.run_hourly_jobs.send(sender=app, app=app)
elif when == 'daily':
signals.run_daily_jobs.send(sender=app, app=app)
elif when == 'weekly':
signals.run_weekly_jobs.send(sender=app, app=app)
elif when == 'monthly':
signals.run_monthly_jobs.send(sender=app, app=app)
elif when == 'yearly':
signals.run_yearly_jobs.send(sender=app, app=app) | python | def runjobs_by_signals(self, when, options):
""" Run jobs from the signals """
# Thanks for Ian Holsman for the idea and code
from django_extensions.management import signals
from django.conf import settings
verbosity = options["verbosity"]
for app_name in settings.INSTALLED_APPS:
try:
__import__(app_name + '.management', '', '', [''])
except ImportError:
pass
for app in (app.models_module for app in apps.get_app_configs() if app.models_module):
if verbosity > 1:
app_name = '.'.join(app.__name__.rsplit('.')[:-1])
print("Sending %s job signal for: %s" % (when, app_name))
if when == 'minutely':
signals.run_minutely_jobs.send(sender=app, app=app)
elif when == 'quarter_hourly':
signals.run_quarter_hourly_jobs.send(sender=app, app=app)
elif when == 'hourly':
signals.run_hourly_jobs.send(sender=app, app=app)
elif when == 'daily':
signals.run_daily_jobs.send(sender=app, app=app)
elif when == 'weekly':
signals.run_weekly_jobs.send(sender=app, app=app)
elif when == 'monthly':
signals.run_monthly_jobs.send(sender=app, app=app)
elif when == 'yearly':
signals.run_yearly_jobs.send(sender=app, app=app) | [
"def",
"runjobs_by_signals",
"(",
"self",
",",
"when",
",",
"options",
")",
":",
"# Thanks for Ian Holsman for the idea and code",
"from",
"django_extensions",
".",
"management",
"import",
"signals",
"from",
"django",
".",
"conf",
"import",
"settings",
"verbosity",
"=... | Run jobs from the signals | [
"Run",
"jobs",
"from",
"the",
"signals"
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/runjobs.py#L44-L74 | train | 219,824 |
django-extensions/django-extensions | django_extensions/__init__.py | get_version | def get_version(version):
"""Dynamically calculate the version based on VERSION tuple."""
if len(version) > 2 and version[2] is not None:
if isinstance(version[2], int):
str_version = "%s.%s.%s" % version[:3]
else:
str_version = "%s.%s_%s" % version[:3]
else:
str_version = "%s.%s" % version[:2]
return str_version | python | def get_version(version):
"""Dynamically calculate the version based on VERSION tuple."""
if len(version) > 2 and version[2] is not None:
if isinstance(version[2], int):
str_version = "%s.%s.%s" % version[:3]
else:
str_version = "%s.%s_%s" % version[:3]
else:
str_version = "%s.%s" % version[:2]
return str_version | [
"def",
"get_version",
"(",
"version",
")",
":",
"if",
"len",
"(",
"version",
")",
">",
"2",
"and",
"version",
"[",
"2",
"]",
"is",
"not",
"None",
":",
"if",
"isinstance",
"(",
"version",
"[",
"2",
"]",
",",
"int",
")",
":",
"str_version",
"=",
"\... | Dynamically calculate the version based on VERSION tuple. | [
"Dynamically",
"calculate",
"the",
"version",
"based",
"on",
"VERSION",
"tuple",
"."
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/__init__.py#L5-L15 | train | 219,825 |
django-extensions/django-extensions | django_extensions/templatetags/indent_text.py | indentby | def indentby(parser, token):
"""
Add indentation to text between the tags by the given indentation level.
{% indentby <indent_level> [if <statement>] %}
...
{% endindentby %}
Arguments:
indent_level - Number of spaces to indent text with.
statement - Only apply indent_level if the boolean statement evalutates to True.
"""
args = token.split_contents()
largs = len(args)
if largs not in (2, 4):
raise template.TemplateSyntaxError("indentby tag requires 1 or 3 arguments")
indent_level = args[1]
if_statement = None
if largs == 4:
if_statement = args[3]
nodelist = parser.parse(('endindentby', ))
parser.delete_first_token()
return IndentByNode(nodelist, indent_level, if_statement) | python | def indentby(parser, token):
"""
Add indentation to text between the tags by the given indentation level.
{% indentby <indent_level> [if <statement>] %}
...
{% endindentby %}
Arguments:
indent_level - Number of spaces to indent text with.
statement - Only apply indent_level if the boolean statement evalutates to True.
"""
args = token.split_contents()
largs = len(args)
if largs not in (2, 4):
raise template.TemplateSyntaxError("indentby tag requires 1 or 3 arguments")
indent_level = args[1]
if_statement = None
if largs == 4:
if_statement = args[3]
nodelist = parser.parse(('endindentby', ))
parser.delete_first_token()
return IndentByNode(nodelist, indent_level, if_statement) | [
"def",
"indentby",
"(",
"parser",
",",
"token",
")",
":",
"args",
"=",
"token",
".",
"split_contents",
"(",
")",
"largs",
"=",
"len",
"(",
"args",
")",
"if",
"largs",
"not",
"in",
"(",
"2",
",",
"4",
")",
":",
"raise",
"template",
".",
"TemplateSyn... | Add indentation to text between the tags by the given indentation level.
{% indentby <indent_level> [if <statement>] %}
...
{% endindentby %}
Arguments:
indent_level - Number of spaces to indent text with.
statement - Only apply indent_level if the boolean statement evalutates to True. | [
"Add",
"indentation",
"to",
"text",
"between",
"the",
"tags",
"by",
"the",
"given",
"indentation",
"level",
"."
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/templatetags/indent_text.py#L33-L55 | train | 219,826 |
django-extensions/django-extensions | django_extensions/management/commands/pipchecker.py | Command._urlopen_as_json | def _urlopen_as_json(self, url, headers=None):
"""Shorcut for return contents as json"""
req = Request(url, headers=headers)
return json.loads(urlopen(req).read()) | python | def _urlopen_as_json(self, url, headers=None):
"""Shorcut for return contents as json"""
req = Request(url, headers=headers)
return json.loads(urlopen(req).read()) | [
"def",
"_urlopen_as_json",
"(",
"self",
",",
"url",
",",
"headers",
"=",
"None",
")",
":",
"req",
"=",
"Request",
"(",
"url",
",",
"headers",
"=",
"headers",
")",
"return",
"json",
".",
"loads",
"(",
"urlopen",
"(",
"req",
")",
".",
"read",
"(",
")... | Shorcut for return contents as json | [
"Shorcut",
"for",
"return",
"contents",
"as",
"json"
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/pipchecker.py#L116-L119 | train | 219,827 |
django-extensions/django-extensions | django_extensions/management/commands/pipchecker.py | Command.check_pypi | def check_pypi(self):
"""If the requirement is frozen to pypi, check for a new version."""
for dist in get_installed_distributions():
name = dist.project_name
if name in self.reqs.keys():
self.reqs[name]["dist"] = dist
pypi = ServerProxy("https://pypi.python.org/pypi")
for name, req in list(self.reqs.items()):
if req["url"]:
continue # skipping github packages.
elif "dist" in req:
dist = req["dist"]
dist_version = LooseVersion(dist.version)
available = pypi.package_releases(req["pip_req"].name, True) or pypi.package_releases(req["pip_req"].name.replace('-', '_'), True)
available_version = self._available_version(dist_version, available)
if not available_version:
msg = self.style.WARN("release is not on pypi (check capitalization and/or --extra-index-url)")
elif self.options['show_newer'] and dist_version > available_version:
msg = self.style.INFO("{0} available (newer installed)".format(available_version))
elif available_version > dist_version:
msg = self.style.INFO("{0} available".format(available_version))
else:
msg = "up to date"
del self.reqs[name]
continue
pkg_info = self.style.BOLD("{dist.project_name} {dist.version}".format(dist=dist))
else:
msg = "not installed"
pkg_info = name
self.stdout.write("{pkg_info:40} {msg}".format(pkg_info=pkg_info, msg=msg))
del self.reqs[name] | python | def check_pypi(self):
"""If the requirement is frozen to pypi, check for a new version."""
for dist in get_installed_distributions():
name = dist.project_name
if name in self.reqs.keys():
self.reqs[name]["dist"] = dist
pypi = ServerProxy("https://pypi.python.org/pypi")
for name, req in list(self.reqs.items()):
if req["url"]:
continue # skipping github packages.
elif "dist" in req:
dist = req["dist"]
dist_version = LooseVersion(dist.version)
available = pypi.package_releases(req["pip_req"].name, True) or pypi.package_releases(req["pip_req"].name.replace('-', '_'), True)
available_version = self._available_version(dist_version, available)
if not available_version:
msg = self.style.WARN("release is not on pypi (check capitalization and/or --extra-index-url)")
elif self.options['show_newer'] and dist_version > available_version:
msg = self.style.INFO("{0} available (newer installed)".format(available_version))
elif available_version > dist_version:
msg = self.style.INFO("{0} available".format(available_version))
else:
msg = "up to date"
del self.reqs[name]
continue
pkg_info = self.style.BOLD("{dist.project_name} {dist.version}".format(dist=dist))
else:
msg = "not installed"
pkg_info = name
self.stdout.write("{pkg_info:40} {msg}".format(pkg_info=pkg_info, msg=msg))
del self.reqs[name] | [
"def",
"check_pypi",
"(",
"self",
")",
":",
"for",
"dist",
"in",
"get_installed_distributions",
"(",
")",
":",
"name",
"=",
"dist",
".",
"project_name",
"if",
"name",
"in",
"self",
".",
"reqs",
".",
"keys",
"(",
")",
":",
"self",
".",
"reqs",
"[",
"n... | If the requirement is frozen to pypi, check for a new version. | [
"If",
"the",
"requirement",
"is",
"frozen",
"to",
"pypi",
"check",
"for",
"a",
"new",
"version",
"."
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/pipchecker.py#L132-L164 | train | 219,828 |
django-extensions/django-extensions | django_extensions/management/commands/pipchecker.py | Command.check_other | def check_other(self):
"""
If the requirement is frozen somewhere other than pypi or github, skip.
If you have a private pypi or use --extra-index-url, consider contributing
support here.
"""
if self.reqs:
self.stdout.write(self.style.ERROR("\nOnly pypi and github based requirements are supported:"))
for name, req in self.reqs.items():
if "dist" in req:
pkg_info = "{dist.project_name} {dist.version}".format(dist=req["dist"])
elif "url" in req:
pkg_info = "{url}".format(url=req["url"])
else:
pkg_info = "unknown package"
self.stdout.write(self.style.BOLD("{pkg_info:40} is not a pypi or github requirement".format(pkg_info=pkg_info))) | python | def check_other(self):
"""
If the requirement is frozen somewhere other than pypi or github, skip.
If you have a private pypi or use --extra-index-url, consider contributing
support here.
"""
if self.reqs:
self.stdout.write(self.style.ERROR("\nOnly pypi and github based requirements are supported:"))
for name, req in self.reqs.items():
if "dist" in req:
pkg_info = "{dist.project_name} {dist.version}".format(dist=req["dist"])
elif "url" in req:
pkg_info = "{url}".format(url=req["url"])
else:
pkg_info = "unknown package"
self.stdout.write(self.style.BOLD("{pkg_info:40} is not a pypi or github requirement".format(pkg_info=pkg_info))) | [
"def",
"check_other",
"(",
"self",
")",
":",
"if",
"self",
".",
"reqs",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"self",
".",
"style",
".",
"ERROR",
"(",
"\"\\nOnly pypi and github based requirements are supported:\"",
")",
")",
"for",
"name",
",",
"re... | If the requirement is frozen somewhere other than pypi or github, skip.
If you have a private pypi or use --extra-index-url, consider contributing
support here. | [
"If",
"the",
"requirement",
"is",
"frozen",
"somewhere",
"other",
"than",
"pypi",
"or",
"github",
"skip",
"."
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/pipchecker.py#L297-L313 | train | 219,829 |
django-extensions/django-extensions | django_extensions/db/fields/encrypted.py | BaseEncryptedField.get_crypt_class | def get_crypt_class(self):
"""
Get the Keyczar class to use.
The class can be customized with the ENCRYPTED_FIELD_MODE setting. By default,
this setting is DECRYPT_AND_ENCRYPT. Set this to ENCRYPT to disable decryption.
This is necessary if you are only providing public keys to Keyczar.
Returns:
keyczar.Encrypter if ENCRYPTED_FIELD_MODE is ENCRYPT.
keyczar.Crypter if ENCRYPTED_FIELD_MODE is DECRYPT_AND_ENCRYPT.
Override this method to customize the type of Keyczar class returned.
"""
crypt_type = getattr(settings, 'ENCRYPTED_FIELD_MODE', 'DECRYPT_AND_ENCRYPT')
if crypt_type == 'ENCRYPT':
crypt_class_name = 'Encrypter'
elif crypt_type == 'DECRYPT_AND_ENCRYPT':
crypt_class_name = 'Crypter'
else:
raise ImproperlyConfigured(
'ENCRYPTED_FIELD_MODE must be either DECRYPT_AND_ENCRYPT '
'or ENCRYPT, not %s.' % crypt_type)
return getattr(keyczar, crypt_class_name) | python | def get_crypt_class(self):
"""
Get the Keyczar class to use.
The class can be customized with the ENCRYPTED_FIELD_MODE setting. By default,
this setting is DECRYPT_AND_ENCRYPT. Set this to ENCRYPT to disable decryption.
This is necessary if you are only providing public keys to Keyczar.
Returns:
keyczar.Encrypter if ENCRYPTED_FIELD_MODE is ENCRYPT.
keyczar.Crypter if ENCRYPTED_FIELD_MODE is DECRYPT_AND_ENCRYPT.
Override this method to customize the type of Keyczar class returned.
"""
crypt_type = getattr(settings, 'ENCRYPTED_FIELD_MODE', 'DECRYPT_AND_ENCRYPT')
if crypt_type == 'ENCRYPT':
crypt_class_name = 'Encrypter'
elif crypt_type == 'DECRYPT_AND_ENCRYPT':
crypt_class_name = 'Crypter'
else:
raise ImproperlyConfigured(
'ENCRYPTED_FIELD_MODE must be either DECRYPT_AND_ENCRYPT '
'or ENCRYPT, not %s.' % crypt_type)
return getattr(keyczar, crypt_class_name) | [
"def",
"get_crypt_class",
"(",
"self",
")",
":",
"crypt_type",
"=",
"getattr",
"(",
"settings",
",",
"'ENCRYPTED_FIELD_MODE'",
",",
"'DECRYPT_AND_ENCRYPT'",
")",
"if",
"crypt_type",
"==",
"'ENCRYPT'",
":",
"crypt_class_name",
"=",
"'Encrypter'",
"elif",
"crypt_type"... | Get the Keyczar class to use.
The class can be customized with the ENCRYPTED_FIELD_MODE setting. By default,
this setting is DECRYPT_AND_ENCRYPT. Set this to ENCRYPT to disable decryption.
This is necessary if you are only providing public keys to Keyczar.
Returns:
keyczar.Encrypter if ENCRYPTED_FIELD_MODE is ENCRYPT.
keyczar.Crypter if ENCRYPTED_FIELD_MODE is DECRYPT_AND_ENCRYPT.
Override this method to customize the type of Keyczar class returned. | [
"Get",
"the",
"Keyczar",
"class",
"to",
"use",
"."
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/db/fields/encrypted.py#L46-L69 | train | 219,830 |
django-extensions/django-extensions | django_extensions/management/commands/sqldsn.py | Command._postgresql | def _postgresql(self, dbhost, dbport, dbname, dbuser, dbpass, dsn_style=None): # noqa
"""PostgreSQL psycopg2 driver accepts two syntaxes
Plus a string for .pgpass file
"""
dsn = []
if dsn_style is None or dsn_style == 'all' or dsn_style == 'keyvalue':
dsnstr = "host='{0}' dbname='{2}' user='{3}' password='{4}'"
if dbport is not None:
dsnstr += " port='{1}'"
dsn.append(dsnstr.format(dbhost,
dbport,
dbname,
dbuser,
dbpass,))
if dsn_style == 'all' or dsn_style == 'kwargs':
dsnstr = "host='{0}', database='{2}', user='{3}', password='{4}'"
if dbport is not None:
dsnstr += ", port='{1}'"
dsn.append(dsnstr.format(dbhost,
dbport,
dbname,
dbuser,
dbpass))
if dsn_style == 'all' or dsn_style == 'uri':
dsnstr = "postgresql://{user}:{password}@{host}/{name}"
dsn.append(dsnstr.format(
host="{host}:{port}".format(host=dbhost, port=dbport) if dbport else dbhost, # noqa
name=dbname, user=dbuser, password=dbpass))
if dsn_style == 'all' or dsn_style == 'pgpass':
dsn.append(':'.join(map(str, filter(
None, [dbhost, dbport, dbname, dbuser, dbpass]))))
return dsn | python | def _postgresql(self, dbhost, dbport, dbname, dbuser, dbpass, dsn_style=None): # noqa
"""PostgreSQL psycopg2 driver accepts two syntaxes
Plus a string for .pgpass file
"""
dsn = []
if dsn_style is None or dsn_style == 'all' or dsn_style == 'keyvalue':
dsnstr = "host='{0}' dbname='{2}' user='{3}' password='{4}'"
if dbport is not None:
dsnstr += " port='{1}'"
dsn.append(dsnstr.format(dbhost,
dbport,
dbname,
dbuser,
dbpass,))
if dsn_style == 'all' or dsn_style == 'kwargs':
dsnstr = "host='{0}', database='{2}', user='{3}', password='{4}'"
if dbport is not None:
dsnstr += ", port='{1}'"
dsn.append(dsnstr.format(dbhost,
dbport,
dbname,
dbuser,
dbpass))
if dsn_style == 'all' or dsn_style == 'uri':
dsnstr = "postgresql://{user}:{password}@{host}/{name}"
dsn.append(dsnstr.format(
host="{host}:{port}".format(host=dbhost, port=dbport) if dbport else dbhost, # noqa
name=dbname, user=dbuser, password=dbpass))
if dsn_style == 'all' or dsn_style == 'pgpass':
dsn.append(':'.join(map(str, filter(
None, [dbhost, dbport, dbname, dbuser, dbpass]))))
return dsn | [
"def",
"_postgresql",
"(",
"self",
",",
"dbhost",
",",
"dbport",
",",
"dbname",
",",
"dbuser",
",",
"dbpass",
",",
"dsn_style",
"=",
"None",
")",
":",
"# noqa",
"dsn",
"=",
"[",
"]",
"if",
"dsn_style",
"is",
"None",
"or",
"dsn_style",
"==",
"'all'",
... | PostgreSQL psycopg2 driver accepts two syntaxes
Plus a string for .pgpass file | [
"PostgreSQL",
"psycopg2",
"driver",
"accepts",
"two",
"syntaxes"
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/sqldsn.py#L100-L141 | train | 219,831 |
django-extensions/django-extensions | django_extensions/management/commands/dumpscript.py | check_dependencies | def check_dependencies(model, model_queue, avaliable_models):
""" Check that all the depenedencies for this model are already in the queue. """
# A list of allowed links: existing fields, itself and the special case ContentType
allowed_links = [m.model.__name__ for m in model_queue] + [model.__name__, 'ContentType']
# For each ForeignKey or ManyToMany field, check that a link is possible
for field in model._meta.fields:
if not field.remote_field:
continue
if field.remote_field.model.__name__ not in allowed_links:
if field.remote_field.model not in avaliable_models:
continue
return False
for field in model._meta.many_to_many:
if not field.remote_field:
continue
if field.remote_field.model.__name__ not in allowed_links:
return False
return True | python | def check_dependencies(model, model_queue, avaliable_models):
""" Check that all the depenedencies for this model are already in the queue. """
# A list of allowed links: existing fields, itself and the special case ContentType
allowed_links = [m.model.__name__ for m in model_queue] + [model.__name__, 'ContentType']
# For each ForeignKey or ManyToMany field, check that a link is possible
for field in model._meta.fields:
if not field.remote_field:
continue
if field.remote_field.model.__name__ not in allowed_links:
if field.remote_field.model not in avaliable_models:
continue
return False
for field in model._meta.many_to_many:
if not field.remote_field:
continue
if field.remote_field.model.__name__ not in allowed_links:
return False
return True | [
"def",
"check_dependencies",
"(",
"model",
",",
"model_queue",
",",
"avaliable_models",
")",
":",
"# A list of allowed links: existing fields, itself and the special case ContentType",
"allowed_links",
"=",
"[",
"m",
".",
"model",
".",
"__name__",
"for",
"m",
"in",
"model... | Check that all the depenedencies for this model are already in the queue. | [
"Check",
"that",
"all",
"the",
"depenedencies",
"for",
"this",
"model",
"are",
"already",
"in",
"the",
"queue",
"."
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/dumpscript.py#L712-L733 | train | 219,832 |
django-extensions/django-extensions | django_extensions/management/commands/dumpscript.py | Code.get_import_lines | def get_import_lines(self):
""" Take the stored imports and converts them to lines """
if self.imports:
return ["from %s import %s" % (value, key) for key, value in self.imports.items()]
else:
return [] | python | def get_import_lines(self):
""" Take the stored imports and converts them to lines """
if self.imports:
return ["from %s import %s" % (value, key) for key, value in self.imports.items()]
else:
return [] | [
"def",
"get_import_lines",
"(",
"self",
")",
":",
"if",
"self",
".",
"imports",
":",
"return",
"[",
"\"from %s import %s\"",
"%",
"(",
"value",
",",
"key",
")",
"for",
"key",
",",
"value",
"in",
"self",
".",
"imports",
".",
"items",
"(",
")",
"]",
"e... | Take the stored imports and converts them to lines | [
"Take",
"the",
"stored",
"imports",
"and",
"converts",
"them",
"to",
"lines"
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/dumpscript.py#L178-L183 | train | 219,833 |
django-extensions/django-extensions | django_extensions/management/commands/dumpscript.py | InstanceCode.skip | def skip(self):
"""
Determine whether or not this object should be skipped.
If this model instance is a parent of a single subclassed
instance, skip it. The subclassed instance will create this
parent instance for us.
TODO: Allow the user to force its creation?
"""
if self.skip_me is not None:
return self.skip_me
cls = self.instance.__class__
using = router.db_for_write(cls, instance=self.instance)
collector = Collector(using=using)
collector.collect([self.instance], collect_related=False)
sub_objects = sum([list(i) for i in collector.data.values()], [])
sub_objects_parents = [so._meta.parents for so in sub_objects]
if [self.model in p for p in sub_objects_parents].count(True) == 1:
# since this instance isn't explicitly created, it's variable name
# can't be referenced in the script, so record None in context dict
pk_name = self.instance._meta.pk.name
key = '%s_%s' % (self.model.__name__, getattr(self.instance, pk_name))
self.context[key] = None
self.skip_me = True
else:
self.skip_me = False
return self.skip_me | python | def skip(self):
"""
Determine whether or not this object should be skipped.
If this model instance is a parent of a single subclassed
instance, skip it. The subclassed instance will create this
parent instance for us.
TODO: Allow the user to force its creation?
"""
if self.skip_me is not None:
return self.skip_me
cls = self.instance.__class__
using = router.db_for_write(cls, instance=self.instance)
collector = Collector(using=using)
collector.collect([self.instance], collect_related=False)
sub_objects = sum([list(i) for i in collector.data.values()], [])
sub_objects_parents = [so._meta.parents for so in sub_objects]
if [self.model in p for p in sub_objects_parents].count(True) == 1:
# since this instance isn't explicitly created, it's variable name
# can't be referenced in the script, so record None in context dict
pk_name = self.instance._meta.pk.name
key = '%s_%s' % (self.model.__name__, getattr(self.instance, pk_name))
self.context[key] = None
self.skip_me = True
else:
self.skip_me = False
return self.skip_me | [
"def",
"skip",
"(",
"self",
")",
":",
"if",
"self",
".",
"skip_me",
"is",
"not",
"None",
":",
"return",
"self",
".",
"skip_me",
"cls",
"=",
"self",
".",
"instance",
".",
"__class__",
"using",
"=",
"router",
".",
"db_for_write",
"(",
"cls",
",",
"inst... | Determine whether or not this object should be skipped.
If this model instance is a parent of a single subclassed
instance, skip it. The subclassed instance will create this
parent instance for us.
TODO: Allow the user to force its creation? | [
"Determine",
"whether",
"or",
"not",
"this",
"object",
"should",
"be",
"skipped",
".",
"If",
"this",
"model",
"instance",
"is",
"a",
"parent",
"of",
"a",
"single",
"subclassed",
"instance",
"skip",
"it",
".",
"The",
"subclassed",
"instance",
"will",
"create"... | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/dumpscript.py#L299-L327 | train | 219,834 |
django-extensions/django-extensions | django_extensions/management/commands/dumpscript.py | InstanceCode.instantiate | def instantiate(self):
""" Write lines for instantiation """
# e.g. model_name_35 = Model()
code_lines = []
if not self.instantiated:
code_lines.append("%s = %s()" % (self.variable_name, self.model.__name__))
self.instantiated = True
# Store our variable name for future foreign key references
pk_name = self.instance._meta.pk.name
key = '%s_%s' % (self.model.__name__, getattr(self.instance, pk_name))
self.context[key] = self.variable_name
return code_lines | python | def instantiate(self):
""" Write lines for instantiation """
# e.g. model_name_35 = Model()
code_lines = []
if not self.instantiated:
code_lines.append("%s = %s()" % (self.variable_name, self.model.__name__))
self.instantiated = True
# Store our variable name for future foreign key references
pk_name = self.instance._meta.pk.name
key = '%s_%s' % (self.model.__name__, getattr(self.instance, pk_name))
self.context[key] = self.variable_name
return code_lines | [
"def",
"instantiate",
"(",
"self",
")",
":",
"# e.g. model_name_35 = Model()",
"code_lines",
"=",
"[",
"]",
"if",
"not",
"self",
".",
"instantiated",
":",
"code_lines",
".",
"append",
"(",
"\"%s = %s()\"",
"%",
"(",
"self",
".",
"variable_name",
",",
"self",
... | Write lines for instantiation | [
"Write",
"lines",
"for",
"instantiation"
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/dumpscript.py#L329-L343 | train | 219,835 |
django-extensions/django-extensions | django_extensions/management/commands/dumpscript.py | InstanceCode.get_waiting_list | def get_waiting_list(self, force=False):
""" Add lines for any waiting fields that can be completed now. """
code_lines = []
skip_autofield = self.options['skip_autofield']
# Process normal fields
for field in list(self.waiting_list):
try:
# Find the value, add the line, remove from waiting list and move on
value = get_attribute_value(self.instance, field, self.context, force=force, skip_autofield=skip_autofield)
code_lines.append('%s.%s = %s' % (self.variable_name, field.name, value))
self.waiting_list.remove(field)
except SkipValue:
# Remove from the waiting list and move on
self.waiting_list.remove(field)
continue
except DoLater:
# Move on, maybe next time
continue
return code_lines | python | def get_waiting_list(self, force=False):
""" Add lines for any waiting fields that can be completed now. """
code_lines = []
skip_autofield = self.options['skip_autofield']
# Process normal fields
for field in list(self.waiting_list):
try:
# Find the value, add the line, remove from waiting list and move on
value = get_attribute_value(self.instance, field, self.context, force=force, skip_autofield=skip_autofield)
code_lines.append('%s.%s = %s' % (self.variable_name, field.name, value))
self.waiting_list.remove(field)
except SkipValue:
# Remove from the waiting list and move on
self.waiting_list.remove(field)
continue
except DoLater:
# Move on, maybe next time
continue
return code_lines | [
"def",
"get_waiting_list",
"(",
"self",
",",
"force",
"=",
"False",
")",
":",
"code_lines",
"=",
"[",
"]",
"skip_autofield",
"=",
"self",
".",
"options",
"[",
"'skip_autofield'",
"]",
"# Process normal fields",
"for",
"field",
"in",
"list",
"(",
"self",
".",... | Add lines for any waiting fields that can be completed now. | [
"Add",
"lines",
"for",
"any",
"waiting",
"fields",
"that",
"can",
"be",
"completed",
"now",
"."
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/dumpscript.py#L345-L366 | train | 219,836 |
django-extensions/django-extensions | django_extensions/management/commands/dumpscript.py | InstanceCode.get_many_to_many_lines | def get_many_to_many_lines(self, force=False):
""" Generate lines that define many to many relations for this instance. """
lines = []
for field, rel_items in self.many_to_many_waiting_list.items():
for rel_item in list(rel_items):
try:
pk_name = rel_item._meta.pk.name
key = '%s_%s' % (rel_item.__class__.__name__, getattr(rel_item, pk_name))
value = "%s" % self.context[key]
lines.append('%s.%s.add(%s)' % (self.variable_name, field.name, value))
self.many_to_many_waiting_list[field].remove(rel_item)
except KeyError:
if force:
item_locator = orm_item_locator(rel_item)
self.context["__extra_imports"][rel_item._meta.object_name] = rel_item.__module__
lines.append('%s.%s.add( %s )' % (self.variable_name, field.name, item_locator))
self.many_to_many_waiting_list[field].remove(rel_item)
if lines:
lines.append("")
return lines | python | def get_many_to_many_lines(self, force=False):
""" Generate lines that define many to many relations for this instance. """
lines = []
for field, rel_items in self.many_to_many_waiting_list.items():
for rel_item in list(rel_items):
try:
pk_name = rel_item._meta.pk.name
key = '%s_%s' % (rel_item.__class__.__name__, getattr(rel_item, pk_name))
value = "%s" % self.context[key]
lines.append('%s.%s.add(%s)' % (self.variable_name, field.name, value))
self.many_to_many_waiting_list[field].remove(rel_item)
except KeyError:
if force:
item_locator = orm_item_locator(rel_item)
self.context["__extra_imports"][rel_item._meta.object_name] = rel_item.__module__
lines.append('%s.%s.add( %s )' % (self.variable_name, field.name, item_locator))
self.many_to_many_waiting_list[field].remove(rel_item)
if lines:
lines.append("")
return lines | [
"def",
"get_many_to_many_lines",
"(",
"self",
",",
"force",
"=",
"False",
")",
":",
"lines",
"=",
"[",
"]",
"for",
"field",
",",
"rel_items",
"in",
"self",
".",
"many_to_many_waiting_list",
".",
"items",
"(",
")",
":",
"for",
"rel_item",
"in",
"list",
"(... | Generate lines that define many to many relations for this instance. | [
"Generate",
"lines",
"that",
"define",
"many",
"to",
"many",
"relations",
"for",
"this",
"instance",
"."
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/dumpscript.py#L368-L391 | train | 219,837 |
django-extensions/django-extensions | django_extensions/management/commands/dumpscript.py | Script._queue_models | def _queue_models(self, models, context):
"""
Work an an appropriate ordering for the models.
This isn't essential, but makes the script look nicer because
more instances can be defined on their first try.
"""
model_queue = []
number_remaining_models = len(models)
# Max number of cycles allowed before we call it an infinite loop.
MAX_CYCLES = number_remaining_models
allowed_cycles = MAX_CYCLES
while number_remaining_models > 0:
previous_number_remaining_models = number_remaining_models
model = models.pop(0)
# If the model is ready to be processed, add it to the list
if check_dependencies(model, model_queue, context["__avaliable_models"]):
model_class = ModelCode(model=model, context=context, stdout=self.stdout, stderr=self.stderr, options=self.options)
model_queue.append(model_class)
# Otherwise put the model back at the end of the list
else:
models.append(model)
# Check for infinite loops.
# This means there is a cyclic foreign key structure
# That cannot be resolved by re-ordering
number_remaining_models = len(models)
if number_remaining_models == previous_number_remaining_models:
allowed_cycles -= 1
if allowed_cycles <= 0:
# Add the remaining models, but do not remove them from the model list
missing_models = [ModelCode(model=m, context=context, stdout=self.stdout, stderr=self.stderr, options=self.options) for m in models]
model_queue += missing_models
# Replace the models with the model class objects
# (sure, this is a little bit of hackery)
models[:] = missing_models
break
else:
allowed_cycles = MAX_CYCLES
return model_queue | python | def _queue_models(self, models, context):
"""
Work an an appropriate ordering for the models.
This isn't essential, but makes the script look nicer because
more instances can be defined on their first try.
"""
model_queue = []
number_remaining_models = len(models)
# Max number of cycles allowed before we call it an infinite loop.
MAX_CYCLES = number_remaining_models
allowed_cycles = MAX_CYCLES
while number_remaining_models > 0:
previous_number_remaining_models = number_remaining_models
model = models.pop(0)
# If the model is ready to be processed, add it to the list
if check_dependencies(model, model_queue, context["__avaliable_models"]):
model_class = ModelCode(model=model, context=context, stdout=self.stdout, stderr=self.stderr, options=self.options)
model_queue.append(model_class)
# Otherwise put the model back at the end of the list
else:
models.append(model)
# Check for infinite loops.
# This means there is a cyclic foreign key structure
# That cannot be resolved by re-ordering
number_remaining_models = len(models)
if number_remaining_models == previous_number_remaining_models:
allowed_cycles -= 1
if allowed_cycles <= 0:
# Add the remaining models, but do not remove them from the model list
missing_models = [ModelCode(model=m, context=context, stdout=self.stdout, stderr=self.stderr, options=self.options) for m in models]
model_queue += missing_models
# Replace the models with the model class objects
# (sure, this is a little bit of hackery)
models[:] = missing_models
break
else:
allowed_cycles = MAX_CYCLES
return model_queue | [
"def",
"_queue_models",
"(",
"self",
",",
"models",
",",
"context",
")",
":",
"model_queue",
"=",
"[",
"]",
"number_remaining_models",
"=",
"len",
"(",
"models",
")",
"# Max number of cycles allowed before we call it an infinite loop.",
"MAX_CYCLES",
"=",
"number_remain... | Work an an appropriate ordering for the models.
This isn't essential, but makes the script look nicer because
more instances can be defined on their first try. | [
"Work",
"an",
"an",
"appropriate",
"ordering",
"for",
"the",
"models",
".",
"This",
"isn",
"t",
"essential",
"but",
"makes",
"the",
"script",
"look",
"nicer",
"because",
"more",
"instances",
"can",
"be",
"defined",
"on",
"their",
"first",
"try",
"."
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/dumpscript.py#L411-L454 | train | 219,838 |
django-extensions/django-extensions | django_extensions/management/commands/sqldiff.py | SQLDiff.sql_to_dict | def sql_to_dict(self, query, param):
"""
Execute query and return a dict
sql_to_dict(query, param) -> list of dicts
code from snippet at http://www.djangosnippets.org/snippets/1383/
"""
cursor = connection.cursor()
cursor.execute(query, param)
fieldnames = [name[0] for name in cursor.description]
result = []
for row in cursor.fetchall():
rowset = []
for field in zip(fieldnames, row):
rowset.append(field)
result.append(dict(rowset))
return result | python | def sql_to_dict(self, query, param):
"""
Execute query and return a dict
sql_to_dict(query, param) -> list of dicts
code from snippet at http://www.djangosnippets.org/snippets/1383/
"""
cursor = connection.cursor()
cursor.execute(query, param)
fieldnames = [name[0] for name in cursor.description]
result = []
for row in cursor.fetchall():
rowset = []
for field in zip(fieldnames, row):
rowset.append(field)
result.append(dict(rowset))
return result | [
"def",
"sql_to_dict",
"(",
"self",
",",
"query",
",",
"param",
")",
":",
"cursor",
"=",
"connection",
".",
"cursor",
"(",
")",
"cursor",
".",
"execute",
"(",
"query",
",",
"param",
")",
"fieldnames",
"=",
"[",
"name",
"[",
"0",
"]",
"for",
"name",
... | Execute query and return a dict
sql_to_dict(query, param) -> list of dicts
code from snippet at http://www.djangosnippets.org/snippets/1383/ | [
"Execute",
"query",
"and",
"return",
"a",
"dict"
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/sqldiff.py#L257-L274 | train | 219,839 |
django-extensions/django-extensions | django_extensions/management/commands/sqldiff.py | SQLDiff.print_diff | def print_diff(self, style=no_style()):
""" Print differences to stdout """
if self.options['sql']:
self.print_diff_sql(style)
else:
self.print_diff_text(style) | python | def print_diff(self, style=no_style()):
""" Print differences to stdout """
if self.options['sql']:
self.print_diff_sql(style)
else:
self.print_diff_text(style) | [
"def",
"print_diff",
"(",
"self",
",",
"style",
"=",
"no_style",
"(",
")",
")",
":",
"if",
"self",
".",
"options",
"[",
"'sql'",
"]",
":",
"self",
".",
"print_diff_sql",
"(",
"style",
")",
"else",
":",
"self",
".",
"print_diff_text",
"(",
"style",
")... | Print differences to stdout | [
"Print",
"differences",
"to",
"stdout"
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/sqldiff.py#L676-L681 | train | 219,840 |
django-extensions/django-extensions | django_extensions/templatetags/syntax_color.py | pygments_required | def pygments_required(func):
"""Raise ImportError if pygments is not installed."""
def wrapper(*args, **kwargs):
if not HAS_PYGMENTS: # pragma: no cover
raise ImportError(
"Please install 'pygments' library to use syntax_color.")
rv = func(*args, **kwargs)
return rv
return wrapper | python | def pygments_required(func):
"""Raise ImportError if pygments is not installed."""
def wrapper(*args, **kwargs):
if not HAS_PYGMENTS: # pragma: no cover
raise ImportError(
"Please install 'pygments' library to use syntax_color.")
rv = func(*args, **kwargs)
return rv
return wrapper | [
"def",
"pygments_required",
"(",
"func",
")",
":",
"def",
"wrapper",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"HAS_PYGMENTS",
":",
"# pragma: no cover",
"raise",
"ImportError",
"(",
"\"Please install 'pygments' library to use syntax_color.\"... | Raise ImportError if pygments is not installed. | [
"Raise",
"ImportError",
"if",
"pygments",
"is",
"not",
"installed",
"."
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/templatetags/syntax_color.py#L54-L62 | train | 219,841 |
django-extensions/django-extensions | django_extensions/utils/dia2django.py | addparentstofks | def addparentstofks(rels, fks):
"""
Get a list of relations, between parents and sons and a dict of
clases named in dia, and modifies the fks to add the parent as fk to get
order on the output of classes and replaces the base class of the son, to
put the class parent name.
"""
for j in rels:
son = index(fks, j[1])
parent = index(fks, j[0])
fks[son][2] = fks[son][2].replace("models.Model", parent)
if parent not in fks[son][0]:
fks[son][0].append(parent) | python | def addparentstofks(rels, fks):
"""
Get a list of relations, between parents and sons and a dict of
clases named in dia, and modifies the fks to add the parent as fk to get
order on the output of classes and replaces the base class of the son, to
put the class parent name.
"""
for j in rels:
son = index(fks, j[1])
parent = index(fks, j[0])
fks[son][2] = fks[son][2].replace("models.Model", parent)
if parent not in fks[son][0]:
fks[son][0].append(parent) | [
"def",
"addparentstofks",
"(",
"rels",
",",
"fks",
")",
":",
"for",
"j",
"in",
"rels",
":",
"son",
"=",
"index",
"(",
"fks",
",",
"j",
"[",
"1",
"]",
")",
"parent",
"=",
"index",
"(",
"fks",
",",
"j",
"[",
"0",
"]",
")",
"fks",
"[",
"son",
... | Get a list of relations, between parents and sons and a dict of
clases named in dia, and modifies the fks to add the parent as fk to get
order on the output of classes and replaces the base class of the son, to
put the class parent name. | [
"Get",
"a",
"list",
"of",
"relations",
"between",
"parents",
"and",
"sons",
"and",
"a",
"dict",
"of",
"clases",
"named",
"in",
"dia",
"and",
"modifies",
"the",
"fks",
"to",
"add",
"the",
"parent",
"as",
"fk",
"to",
"get",
"order",
"on",
"the",
"output"... | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/utils/dia2django.py#L57-L69 | train | 219,842 |
django-extensions/django-extensions | django_extensions/management/shells.py | get_app_name | def get_app_name(mod_name):
"""
Retrieve application name from models.py module path
>>> get_app_name('testapp.models.foo')
'testapp'
'testapp' instead of 'some.testapp' for compatibility:
>>> get_app_name('some.testapp.models.foo')
'testapp'
>>> get_app_name('some.models.testapp.models.foo')
'testapp'
>>> get_app_name('testapp.foo')
'testapp'
>>> get_app_name('some.testapp.foo')
'testapp'
"""
rparts = list(reversed(mod_name.split('.')))
try:
try:
return rparts[rparts.index(MODELS_MODULE_NAME) + 1]
except ValueError:
# MODELS_MODULE_NAME ('models' string) is not found
return rparts[1]
except IndexError:
# Some weird model naming scheme like in Sentry.
return mod_name | python | def get_app_name(mod_name):
"""
Retrieve application name from models.py module path
>>> get_app_name('testapp.models.foo')
'testapp'
'testapp' instead of 'some.testapp' for compatibility:
>>> get_app_name('some.testapp.models.foo')
'testapp'
>>> get_app_name('some.models.testapp.models.foo')
'testapp'
>>> get_app_name('testapp.foo')
'testapp'
>>> get_app_name('some.testapp.foo')
'testapp'
"""
rparts = list(reversed(mod_name.split('.')))
try:
try:
return rparts[rparts.index(MODELS_MODULE_NAME) + 1]
except ValueError:
# MODELS_MODULE_NAME ('models' string) is not found
return rparts[1]
except IndexError:
# Some weird model naming scheme like in Sentry.
return mod_name | [
"def",
"get_app_name",
"(",
"mod_name",
")",
":",
"rparts",
"=",
"list",
"(",
"reversed",
"(",
"mod_name",
".",
"split",
"(",
"'.'",
")",
")",
")",
"try",
":",
"try",
":",
"return",
"rparts",
"[",
"rparts",
".",
"index",
"(",
"MODELS_MODULE_NAME",
")",... | Retrieve application name from models.py module path
>>> get_app_name('testapp.models.foo')
'testapp'
'testapp' instead of 'some.testapp' for compatibility:
>>> get_app_name('some.testapp.models.foo')
'testapp'
>>> get_app_name('some.models.testapp.models.foo')
'testapp'
>>> get_app_name('testapp.foo')
'testapp'
>>> get_app_name('some.testapp.foo')
'testapp' | [
"Retrieve",
"application",
"name",
"from",
"models",
".",
"py",
"module",
"path"
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/shells.py#L46-L72 | train | 219,843 |
django-extensions/django-extensions | django_extensions/management/commands/merge_model_instances.py | get_generic_fields | def get_generic_fields():
"""Return a list of all GenericForeignKeys in all models."""
generic_fields = []
for model in apps.get_models():
for field_name, field in model.__dict__.items():
if isinstance(field, GenericForeignKey):
generic_fields.append(field)
return generic_fields | python | def get_generic_fields():
"""Return a list of all GenericForeignKeys in all models."""
generic_fields = []
for model in apps.get_models():
for field_name, field in model.__dict__.items():
if isinstance(field, GenericForeignKey):
generic_fields.append(field)
return generic_fields | [
"def",
"get_generic_fields",
"(",
")",
":",
"generic_fields",
"=",
"[",
"]",
"for",
"model",
"in",
"apps",
".",
"get_models",
"(",
")",
":",
"for",
"field_name",
",",
"field",
"in",
"model",
".",
"__dict__",
".",
"items",
"(",
")",
":",
"if",
"isinstan... | Return a list of all GenericForeignKeys in all models. | [
"Return",
"a",
"list",
"of",
"all",
"GenericForeignKeys",
"in",
"all",
"models",
"."
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/merge_model_instances.py#L78-L85 | train | 219,844 |
django-extensions/django-extensions | django_extensions/management/commands/merge_model_instances.py | Command.merge_model_instances | def merge_model_instances(self, primary_object, alias_objects):
"""
Merge several model instances into one, the `primary_object`.
Use this function to merge model objects and migrate all of the related
fields from the alias objects the primary object.
"""
generic_fields = get_generic_fields()
# get related fields
related_fields = list(filter(
lambda x: x.is_relation is True,
primary_object._meta.get_fields()))
many_to_many_fields = list(filter(
lambda x: x.many_to_many is True, related_fields))
related_fields = list(filter(
lambda x: x.many_to_many is False, related_fields))
# Loop through all alias objects and migrate their references to the
# primary object
deleted_objects = []
deleted_objects_count = 0
for alias_object in alias_objects:
# Migrate all foreign key references from alias object to primary
# object.
for many_to_many_field in many_to_many_fields:
alias_varname = many_to_many_field.name
related_objects = getattr(alias_object, alias_varname)
for obj in related_objects.all():
try:
# Handle regular M2M relationships.
getattr(alias_object, alias_varname).remove(obj)
getattr(primary_object, alias_varname).add(obj)
except AttributeError:
# Handle M2M relationships with a 'through' model.
# This does not delete the 'through model.
# TODO: Allow the user to delete a duplicate 'through' model.
through_model = getattr(alias_object, alias_varname).through
kwargs = {
many_to_many_field.m2m_reverse_field_name(): obj,
many_to_many_field.m2m_field_name(): alias_object,
}
through_model_instances = through_model.objects.filter(**kwargs)
for instance in through_model_instances:
# Re-attach the through model to the primary_object
setattr(
instance,
many_to_many_field.m2m_field_name(),
primary_object)
instance.save()
# TODO: Here, try to delete duplicate instances that are
# disallowed by a unique_together constraint
for related_field in related_fields:
if related_field.one_to_many:
alias_varname = related_field.get_accessor_name()
related_objects = getattr(alias_object, alias_varname)
for obj in related_objects.all():
field_name = related_field.field.name
setattr(obj, field_name, primary_object)
obj.save()
elif related_field.one_to_one or related_field.many_to_one:
alias_varname = related_field.name
related_object = getattr(alias_object, alias_varname)
primary_related_object = getattr(primary_object, alias_varname)
if primary_related_object is None:
setattr(primary_object, alias_varname, related_object)
primary_object.save()
elif related_field.one_to_one:
self.stdout.write("Deleted {} with id {}\n".format(
related_object, related_object.id))
related_object.delete()
for field in generic_fields:
filter_kwargs = {}
filter_kwargs[field.fk_field] = alias_object._get_pk_val()
filter_kwargs[field.ct_field] = field.get_content_type(alias_object)
related_objects = field.model.objects.filter(**filter_kwargs)
for generic_related_object in related_objects:
setattr(generic_related_object, field.name, primary_object)
generic_related_object.save()
if alias_object.id:
deleted_objects += [alias_object]
self.stdout.write("Deleted {} with id {}\n".format(
alias_object, alias_object.id))
alias_object.delete()
deleted_objects_count += 1
return primary_object, deleted_objects, deleted_objects_count | python | def merge_model_instances(self, primary_object, alias_objects):
"""
Merge several model instances into one, the `primary_object`.
Use this function to merge model objects and migrate all of the related
fields from the alias objects the primary object.
"""
generic_fields = get_generic_fields()
# get related fields
related_fields = list(filter(
lambda x: x.is_relation is True,
primary_object._meta.get_fields()))
many_to_many_fields = list(filter(
lambda x: x.many_to_many is True, related_fields))
related_fields = list(filter(
lambda x: x.many_to_many is False, related_fields))
# Loop through all alias objects and migrate their references to the
# primary object
deleted_objects = []
deleted_objects_count = 0
for alias_object in alias_objects:
# Migrate all foreign key references from alias object to primary
# object.
for many_to_many_field in many_to_many_fields:
alias_varname = many_to_many_field.name
related_objects = getattr(alias_object, alias_varname)
for obj in related_objects.all():
try:
# Handle regular M2M relationships.
getattr(alias_object, alias_varname).remove(obj)
getattr(primary_object, alias_varname).add(obj)
except AttributeError:
# Handle M2M relationships with a 'through' model.
# This does not delete the 'through model.
# TODO: Allow the user to delete a duplicate 'through' model.
through_model = getattr(alias_object, alias_varname).through
kwargs = {
many_to_many_field.m2m_reverse_field_name(): obj,
many_to_many_field.m2m_field_name(): alias_object,
}
through_model_instances = through_model.objects.filter(**kwargs)
for instance in through_model_instances:
# Re-attach the through model to the primary_object
setattr(
instance,
many_to_many_field.m2m_field_name(),
primary_object)
instance.save()
# TODO: Here, try to delete duplicate instances that are
# disallowed by a unique_together constraint
for related_field in related_fields:
if related_field.one_to_many:
alias_varname = related_field.get_accessor_name()
related_objects = getattr(alias_object, alias_varname)
for obj in related_objects.all():
field_name = related_field.field.name
setattr(obj, field_name, primary_object)
obj.save()
elif related_field.one_to_one or related_field.many_to_one:
alias_varname = related_field.name
related_object = getattr(alias_object, alias_varname)
primary_related_object = getattr(primary_object, alias_varname)
if primary_related_object is None:
setattr(primary_object, alias_varname, related_object)
primary_object.save()
elif related_field.one_to_one:
self.stdout.write("Deleted {} with id {}\n".format(
related_object, related_object.id))
related_object.delete()
for field in generic_fields:
filter_kwargs = {}
filter_kwargs[field.fk_field] = alias_object._get_pk_val()
filter_kwargs[field.ct_field] = field.get_content_type(alias_object)
related_objects = field.model.objects.filter(**filter_kwargs)
for generic_related_object in related_objects:
setattr(generic_related_object, field.name, primary_object)
generic_related_object.save()
if alias_object.id:
deleted_objects += [alias_object]
self.stdout.write("Deleted {} with id {}\n".format(
alias_object, alias_object.id))
alias_object.delete()
deleted_objects_count += 1
return primary_object, deleted_objects, deleted_objects_count | [
"def",
"merge_model_instances",
"(",
"self",
",",
"primary_object",
",",
"alias_objects",
")",
":",
"generic_fields",
"=",
"get_generic_fields",
"(",
")",
"# get related fields",
"related_fields",
"=",
"list",
"(",
"filter",
"(",
"lambda",
"x",
":",
"x",
".",
"i... | Merge several model instances into one, the `primary_object`.
Use this function to merge model objects and migrate all of the related
fields from the alias objects the primary object. | [
"Merge",
"several",
"model",
"instances",
"into",
"one",
"the",
"primary_object",
".",
"Use",
"this",
"function",
"to",
"merge",
"model",
"objects",
"and",
"migrate",
"all",
"of",
"the",
"related",
"fields",
"from",
"the",
"alias",
"objects",
"the",
"primary",... | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/merge_model_instances.py#L132-L222 | train | 219,845 |
django-extensions/django-extensions | django_extensions/management/commands/graph_models.py | Command.add_arguments | def add_arguments(self, parser):
"""Unpack self.arguments for parser.add_arguments."""
parser.add_argument('app_label', nargs='*')
for argument in self.arguments:
parser.add_argument(*argument.split(' '), **self.arguments[argument]) | python | def add_arguments(self, parser):
"""Unpack self.arguments for parser.add_arguments."""
parser.add_argument('app_label', nargs='*')
for argument in self.arguments:
parser.add_argument(*argument.split(' '), **self.arguments[argument]) | [
"def",
"add_arguments",
"(",
"self",
",",
"parser",
")",
":",
"parser",
".",
"add_argument",
"(",
"'app_label'",
",",
"nargs",
"=",
"'*'",
")",
"for",
"argument",
"in",
"self",
".",
"arguments",
":",
"parser",
".",
"add_argument",
"(",
"*",
"argument",
"... | Unpack self.arguments for parser.add_arguments. | [
"Unpack",
"self",
".",
"arguments",
"for",
"parser",
".",
"add_arguments",
"."
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/graph_models.py#L169-L173 | train | 219,846 |
django-extensions/django-extensions | django_extensions/management/commands/graph_models.py | Command.render_output_json | def render_output_json(self, graph_data, output_file=None):
"""Write model data to file or stdout in JSON format."""
if output_file:
with open(output_file, 'wt') as json_output_f:
json.dump(graph_data, json_output_f)
else:
self.stdout.write(json.dumps(graph_data)) | python | def render_output_json(self, graph_data, output_file=None):
"""Write model data to file or stdout in JSON format."""
if output_file:
with open(output_file, 'wt') as json_output_f:
json.dump(graph_data, json_output_f)
else:
self.stdout.write(json.dumps(graph_data)) | [
"def",
"render_output_json",
"(",
"self",
",",
"graph_data",
",",
"output_file",
"=",
"None",
")",
":",
"if",
"output_file",
":",
"with",
"open",
"(",
"output_file",
",",
"'wt'",
")",
"as",
"json_output_f",
":",
"json",
".",
"dump",
"(",
"graph_data",
",",... | Write model data to file or stdout in JSON format. | [
"Write",
"model",
"data",
"to",
"file",
"or",
"stdout",
"in",
"JSON",
"format",
"."
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/graph_models.py#L246-L252 | train | 219,847 |
django-extensions/django-extensions | django_extensions/management/commands/graph_models.py | Command.render_output_pygraphviz | def render_output_pygraphviz(self, dotdata, **kwargs):
"""Render model data as image using pygraphviz"""
if not HAS_PYGRAPHVIZ:
raise CommandError("You need to install pygraphviz python module")
version = pygraphviz.__version__.rstrip("-svn")
try:
if tuple(int(v) for v in version.split('.')) < (0, 36):
# HACK around old/broken AGraph before version 0.36 (ubuntu ships with this old version)
tmpfile = tempfile.NamedTemporaryFile()
tmpfile.write(dotdata)
tmpfile.seek(0)
dotdata = tmpfile.name
except ValueError:
pass
graph = pygraphviz.AGraph(dotdata)
graph.layout(prog=kwargs['layout'])
graph.draw(kwargs['outputfile']) | python | def render_output_pygraphviz(self, dotdata, **kwargs):
"""Render model data as image using pygraphviz"""
if not HAS_PYGRAPHVIZ:
raise CommandError("You need to install pygraphviz python module")
version = pygraphviz.__version__.rstrip("-svn")
try:
if tuple(int(v) for v in version.split('.')) < (0, 36):
# HACK around old/broken AGraph before version 0.36 (ubuntu ships with this old version)
tmpfile = tempfile.NamedTemporaryFile()
tmpfile.write(dotdata)
tmpfile.seek(0)
dotdata = tmpfile.name
except ValueError:
pass
graph = pygraphviz.AGraph(dotdata)
graph.layout(prog=kwargs['layout'])
graph.draw(kwargs['outputfile']) | [
"def",
"render_output_pygraphviz",
"(",
"self",
",",
"dotdata",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"HAS_PYGRAPHVIZ",
":",
"raise",
"CommandError",
"(",
"\"You need to install pygraphviz python module\"",
")",
"version",
"=",
"pygraphviz",
".",
"__versio... | Render model data as image using pygraphviz | [
"Render",
"model",
"data",
"as",
"image",
"using",
"pygraphviz"
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/graph_models.py#L254-L272 | train | 219,848 |
django-extensions/django-extensions | django_extensions/management/commands/graph_models.py | Command.render_output_pydot | def render_output_pydot(self, dotdata, **kwargs):
"""Render model data as image using pydot"""
if not HAS_PYDOT:
raise CommandError("You need to install pydot python module")
graph = pydot.graph_from_dot_data(dotdata)
if not graph:
raise CommandError("pydot returned an error")
if isinstance(graph, (list, tuple)):
if len(graph) > 1:
sys.stderr.write("Found more then one graph, rendering only the first one.\n")
graph = graph[0]
output_file = kwargs['outputfile']
formats = [
'bmp', 'canon', 'cmap', 'cmapx', 'cmapx_np', 'dot', 'dia', 'emf',
'em', 'fplus', 'eps', 'fig', 'gd', 'gd2', 'gif', 'gv', 'imap',
'imap_np', 'ismap', 'jpe', 'jpeg', 'jpg', 'metafile', 'pdf',
'pic', 'plain', 'plain-ext', 'png', 'pov', 'ps', 'ps2', 'svg',
'svgz', 'tif', 'tiff', 'tk', 'vml', 'vmlz', 'vrml', 'wbmp', 'xdot',
]
ext = output_file[output_file.rfind('.') + 1:]
format_ = ext if ext in formats else 'raw'
graph.write(output_file, format=format_) | python | def render_output_pydot(self, dotdata, **kwargs):
"""Render model data as image using pydot"""
if not HAS_PYDOT:
raise CommandError("You need to install pydot python module")
graph = pydot.graph_from_dot_data(dotdata)
if not graph:
raise CommandError("pydot returned an error")
if isinstance(graph, (list, tuple)):
if len(graph) > 1:
sys.stderr.write("Found more then one graph, rendering only the first one.\n")
graph = graph[0]
output_file = kwargs['outputfile']
formats = [
'bmp', 'canon', 'cmap', 'cmapx', 'cmapx_np', 'dot', 'dia', 'emf',
'em', 'fplus', 'eps', 'fig', 'gd', 'gd2', 'gif', 'gv', 'imap',
'imap_np', 'ismap', 'jpe', 'jpeg', 'jpg', 'metafile', 'pdf',
'pic', 'plain', 'plain-ext', 'png', 'pov', 'ps', 'ps2', 'svg',
'svgz', 'tif', 'tiff', 'tk', 'vml', 'vmlz', 'vrml', 'wbmp', 'xdot',
]
ext = output_file[output_file.rfind('.') + 1:]
format_ = ext if ext in formats else 'raw'
graph.write(output_file, format=format_) | [
"def",
"render_output_pydot",
"(",
"self",
",",
"dotdata",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"HAS_PYDOT",
":",
"raise",
"CommandError",
"(",
"\"You need to install pydot python module\"",
")",
"graph",
"=",
"pydot",
".",
"graph_from_dot_data",
"(",
... | Render model data as image using pydot | [
"Render",
"model",
"data",
"as",
"image",
"using",
"pydot"
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/graph_models.py#L274-L297 | train | 219,849 |
django-extensions/django-extensions | django_extensions/management/commands/show_urls.py | Command.extract_views_from_urlpatterns | def extract_views_from_urlpatterns(self, urlpatterns, base='', namespace=None):
"""
Return a list of views from a list of urlpatterns.
Each object in the returned list is a three-tuple: (view_func, regex, name)
"""
views = []
for p in urlpatterns:
if isinstance(p, (URLPattern, RegexURLPattern)):
try:
if not p.name:
name = p.name
elif namespace:
name = '{0}:{1}'.format(namespace, p.name)
else:
name = p.name
pattern = describe_pattern(p)
views.append((p.callback, base + pattern, name))
except ViewDoesNotExist:
continue
elif isinstance(p, (URLResolver, RegexURLResolver)):
try:
patterns = p.url_patterns
except ImportError:
continue
if namespace and p.namespace:
_namespace = '{0}:{1}'.format(namespace, p.namespace)
else:
_namespace = (p.namespace or namespace)
pattern = describe_pattern(p)
if isinstance(p, LocaleRegexURLResolver):
for language in self.LANGUAGES:
with translation.override(language[0]):
views.extend(self.extract_views_from_urlpatterns(patterns, base + pattern, namespace=_namespace))
else:
views.extend(self.extract_views_from_urlpatterns(patterns, base + pattern, namespace=_namespace))
elif hasattr(p, '_get_callback'):
try:
views.append((p._get_callback(), base + describe_pattern(p), p.name))
except ViewDoesNotExist:
continue
elif hasattr(p, 'url_patterns') or hasattr(p, '_get_url_patterns'):
try:
patterns = p.url_patterns
except ImportError:
continue
views.extend(self.extract_views_from_urlpatterns(patterns, base + describe_pattern(p), namespace=namespace))
else:
raise TypeError("%s does not appear to be a urlpattern object" % p)
return views | python | def extract_views_from_urlpatterns(self, urlpatterns, base='', namespace=None):
"""
Return a list of views from a list of urlpatterns.
Each object in the returned list is a three-tuple: (view_func, regex, name)
"""
views = []
for p in urlpatterns:
if isinstance(p, (URLPattern, RegexURLPattern)):
try:
if not p.name:
name = p.name
elif namespace:
name = '{0}:{1}'.format(namespace, p.name)
else:
name = p.name
pattern = describe_pattern(p)
views.append((p.callback, base + pattern, name))
except ViewDoesNotExist:
continue
elif isinstance(p, (URLResolver, RegexURLResolver)):
try:
patterns = p.url_patterns
except ImportError:
continue
if namespace and p.namespace:
_namespace = '{0}:{1}'.format(namespace, p.namespace)
else:
_namespace = (p.namespace or namespace)
pattern = describe_pattern(p)
if isinstance(p, LocaleRegexURLResolver):
for language in self.LANGUAGES:
with translation.override(language[0]):
views.extend(self.extract_views_from_urlpatterns(patterns, base + pattern, namespace=_namespace))
else:
views.extend(self.extract_views_from_urlpatterns(patterns, base + pattern, namespace=_namespace))
elif hasattr(p, '_get_callback'):
try:
views.append((p._get_callback(), base + describe_pattern(p), p.name))
except ViewDoesNotExist:
continue
elif hasattr(p, 'url_patterns') or hasattr(p, '_get_url_patterns'):
try:
patterns = p.url_patterns
except ImportError:
continue
views.extend(self.extract_views_from_urlpatterns(patterns, base + describe_pattern(p), namespace=namespace))
else:
raise TypeError("%s does not appear to be a urlpattern object" % p)
return views | [
"def",
"extract_views_from_urlpatterns",
"(",
"self",
",",
"urlpatterns",
",",
"base",
"=",
"''",
",",
"namespace",
"=",
"None",
")",
":",
"views",
"=",
"[",
"]",
"for",
"p",
"in",
"urlpatterns",
":",
"if",
"isinstance",
"(",
"p",
",",
"(",
"URLPattern",... | Return a list of views from a list of urlpatterns.
Each object in the returned list is a three-tuple: (view_func, regex, name) | [
"Return",
"a",
"list",
"of",
"views",
"from",
"a",
"list",
"of",
"urlpatterns",
"."
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/show_urls.py#L196-L245 | train | 219,850 |
django-extensions/django-extensions | django_extensions/admin/__init__.py | ForeignKeyAutocompleteAdminMixin.foreignkey_autocomplete | def foreignkey_autocomplete(self, request):
"""
Search in the fields of the given related model and returns the
result as a simple string to be used by the jQuery Autocomplete plugin
"""
query = request.GET.get('q', None)
app_label = request.GET.get('app_label', None)
model_name = request.GET.get('model_name', None)
search_fields = request.GET.get('search_fields', None)
object_pk = request.GET.get('object_pk', None)
try:
to_string_function = self.related_string_functions[model_name]
except KeyError:
if six.PY3:
to_string_function = lambda x: x.__str__()
else:
to_string_function = lambda x: x.__unicode__()
if search_fields and app_label and model_name and (query or object_pk):
def construct_search(field_name):
# use different lookup methods depending on the notation
if field_name.startswith('^'):
return "%s__istartswith" % field_name[1:]
elif field_name.startswith('='):
return "%s__iexact" % field_name[1:]
elif field_name.startswith('@'):
return "%s__search" % field_name[1:]
else:
return "%s__icontains" % field_name
model = apps.get_model(app_label, model_name)
queryset = model._default_manager.all()
data = ''
if query:
for bit in query.split():
or_queries = [models.Q(**{construct_search(smart_str(field_name)): smart_str(bit)}) for field_name in search_fields.split(',')]
other_qs = QuerySet(model)
other_qs.query.select_related = queryset.query.select_related
other_qs = other_qs.filter(reduce(operator.or_, or_queries))
queryset = queryset & other_qs
additional_filter = self.get_related_filter(model, request)
if additional_filter:
queryset = queryset.filter(additional_filter)
if self.autocomplete_limit:
queryset = queryset[:self.autocomplete_limit]
data = ''.join([six.u('%s|%s\n') % (to_string_function(f), f.pk) for f in queryset])
elif object_pk:
try:
obj = queryset.get(pk=object_pk)
except Exception: # FIXME: use stricter exception checking
pass
else:
data = to_string_function(obj)
return HttpResponse(data, content_type='text/plain')
return HttpResponseNotFound() | python | def foreignkey_autocomplete(self, request):
"""
Search in the fields of the given related model and returns the
result as a simple string to be used by the jQuery Autocomplete plugin
"""
query = request.GET.get('q', None)
app_label = request.GET.get('app_label', None)
model_name = request.GET.get('model_name', None)
search_fields = request.GET.get('search_fields', None)
object_pk = request.GET.get('object_pk', None)
try:
to_string_function = self.related_string_functions[model_name]
except KeyError:
if six.PY3:
to_string_function = lambda x: x.__str__()
else:
to_string_function = lambda x: x.__unicode__()
if search_fields and app_label and model_name and (query or object_pk):
def construct_search(field_name):
# use different lookup methods depending on the notation
if field_name.startswith('^'):
return "%s__istartswith" % field_name[1:]
elif field_name.startswith('='):
return "%s__iexact" % field_name[1:]
elif field_name.startswith('@'):
return "%s__search" % field_name[1:]
else:
return "%s__icontains" % field_name
model = apps.get_model(app_label, model_name)
queryset = model._default_manager.all()
data = ''
if query:
for bit in query.split():
or_queries = [models.Q(**{construct_search(smart_str(field_name)): smart_str(bit)}) for field_name in search_fields.split(',')]
other_qs = QuerySet(model)
other_qs.query.select_related = queryset.query.select_related
other_qs = other_qs.filter(reduce(operator.or_, or_queries))
queryset = queryset & other_qs
additional_filter = self.get_related_filter(model, request)
if additional_filter:
queryset = queryset.filter(additional_filter)
if self.autocomplete_limit:
queryset = queryset[:self.autocomplete_limit]
data = ''.join([six.u('%s|%s\n') % (to_string_function(f), f.pk) for f in queryset])
elif object_pk:
try:
obj = queryset.get(pk=object_pk)
except Exception: # FIXME: use stricter exception checking
pass
else:
data = to_string_function(obj)
return HttpResponse(data, content_type='text/plain')
return HttpResponseNotFound() | [
"def",
"foreignkey_autocomplete",
"(",
"self",
",",
"request",
")",
":",
"query",
"=",
"request",
".",
"GET",
".",
"get",
"(",
"'q'",
",",
"None",
")",
"app_label",
"=",
"request",
".",
"GET",
".",
"get",
"(",
"'app_label'",
",",
"None",
")",
"model_na... | Search in the fields of the given related model and returns the
result as a simple string to be used by the jQuery Autocomplete plugin | [
"Search",
"in",
"the",
"fields",
"of",
"the",
"given",
"related",
"model",
"and",
"returns",
"the",
"result",
"as",
"a",
"simple",
"string",
"to",
"be",
"used",
"by",
"the",
"jQuery",
"Autocomplete",
"plugin"
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/admin/__init__.py#L67-L126 | train | 219,851 |
django-extensions/django-extensions | django_extensions/admin/__init__.py | ForeignKeyAutocompleteAdminMixin.formfield_for_dbfield | def formfield_for_dbfield(self, db_field, **kwargs):
"""
Override the default widget for Foreignkey fields if they are
specified in the related_search_fields class attribute.
"""
if isinstance(db_field, models.ForeignKey) and db_field.name in self.related_search_fields:
help_text = self.get_help_text(db_field.name, db_field.remote_field.model._meta.object_name)
if kwargs.get('help_text'):
help_text = six.u('%s %s' % (kwargs['help_text'], help_text))
kwargs['widget'] = ForeignKeySearchInput(db_field.remote_field, self.related_search_fields[db_field.name])
kwargs['help_text'] = help_text
return super(ForeignKeyAutocompleteAdminMixin, self).formfield_for_dbfield(db_field, **kwargs) | python | def formfield_for_dbfield(self, db_field, **kwargs):
"""
Override the default widget for Foreignkey fields if they are
specified in the related_search_fields class attribute.
"""
if isinstance(db_field, models.ForeignKey) and db_field.name in self.related_search_fields:
help_text = self.get_help_text(db_field.name, db_field.remote_field.model._meta.object_name)
if kwargs.get('help_text'):
help_text = six.u('%s %s' % (kwargs['help_text'], help_text))
kwargs['widget'] = ForeignKeySearchInput(db_field.remote_field, self.related_search_fields[db_field.name])
kwargs['help_text'] = help_text
return super(ForeignKeyAutocompleteAdminMixin, self).formfield_for_dbfield(db_field, **kwargs) | [
"def",
"formfield_for_dbfield",
"(",
"self",
",",
"db_field",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"isinstance",
"(",
"db_field",
",",
"models",
".",
"ForeignKey",
")",
"and",
"db_field",
".",
"name",
"in",
"self",
".",
"related_search_fields",
":",
"h... | Override the default widget for Foreignkey fields if they are
specified in the related_search_fields class attribute. | [
"Override",
"the",
"default",
"widget",
"for",
"Foreignkey",
"fields",
"if",
"they",
"are",
"specified",
"in",
"the",
"related_search_fields",
"class",
"attribute",
"."
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/admin/__init__.py#L147-L158 | train | 219,852 |
django-extensions/django-extensions | django_extensions/management/technical_response.py | null_technical_500_response | def null_technical_500_response(request, exc_type, exc_value, tb, status_code=500):
"""
Alternative function for django.views.debug.technical_500_response.
Django's convert_exception_to_response() wrapper is called on each 'Middleware' object to avoid
leaking exceptions. If an uncaught exception is raised, the wrapper calls technical_500_response()
to create a response for django's debug view.
Runserver_plus overrides the django debug view's technical_500_response() function to allow for
an enhanced WSGI debugger view to be displayed. However, because Django calls
convert_exception_to_response() on each object in the stack of Middleware objects, re-raising an
error quickly pollutes the traceback displayed.
Runserver_plus only needs needs traceback frames relevant to WSGIHandler Middleware objects, so
only store the traceback if it is for a WSGIHandler. If an exception is not raised here, Django
eventually throws an error for not getting a valid response object for its debug view.
"""
try:
# Store the most recent tb for WSGI requests. The class can be found in the second frame of the tb
if isinstance(tb.tb_next.tb_frame.f_locals.get('self'), WSGIHandler):
tld.wsgi_tb = tb
elif tld.wsgi_tb:
tb = tld.wsgi_tb
except AttributeError:
pass
six.reraise(exc_type, exc_value, tb) | python | def null_technical_500_response(request, exc_type, exc_value, tb, status_code=500):
"""
Alternative function for django.views.debug.technical_500_response.
Django's convert_exception_to_response() wrapper is called on each 'Middleware' object to avoid
leaking exceptions. If an uncaught exception is raised, the wrapper calls technical_500_response()
to create a response for django's debug view.
Runserver_plus overrides the django debug view's technical_500_response() function to allow for
an enhanced WSGI debugger view to be displayed. However, because Django calls
convert_exception_to_response() on each object in the stack of Middleware objects, re-raising an
error quickly pollutes the traceback displayed.
Runserver_plus only needs needs traceback frames relevant to WSGIHandler Middleware objects, so
only store the traceback if it is for a WSGIHandler. If an exception is not raised here, Django
eventually throws an error for not getting a valid response object for its debug view.
"""
try:
# Store the most recent tb for WSGI requests. The class can be found in the second frame of the tb
if isinstance(tb.tb_next.tb_frame.f_locals.get('self'), WSGIHandler):
tld.wsgi_tb = tb
elif tld.wsgi_tb:
tb = tld.wsgi_tb
except AttributeError:
pass
six.reraise(exc_type, exc_value, tb) | [
"def",
"null_technical_500_response",
"(",
"request",
",",
"exc_type",
",",
"exc_value",
",",
"tb",
",",
"status_code",
"=",
"500",
")",
":",
"try",
":",
"# Store the most recent tb for WSGI requests. The class can be found in the second frame of the tb",
"if",
"isinstance",
... | Alternative function for django.views.debug.technical_500_response.
Django's convert_exception_to_response() wrapper is called on each 'Middleware' object to avoid
leaking exceptions. If an uncaught exception is raised, the wrapper calls technical_500_response()
to create a response for django's debug view.
Runserver_plus overrides the django debug view's technical_500_response() function to allow for
an enhanced WSGI debugger view to be displayed. However, because Django calls
convert_exception_to_response() on each object in the stack of Middleware objects, re-raising an
error quickly pollutes the traceback displayed.
Runserver_plus only needs needs traceback frames relevant to WSGIHandler Middleware objects, so
only store the traceback if it is for a WSGIHandler. If an exception is not raised here, Django
eventually throws an error for not getting a valid response object for its debug view. | [
"Alternative",
"function",
"for",
"django",
".",
"views",
".",
"debug",
".",
"technical_500_response",
"."
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/technical_response.py#L11-L37 | train | 219,853 |
django-extensions/django-extensions | django_extensions/management/commands/sync_s3.py | Command.invalidate_objects_cf | def invalidate_objects_cf(self):
"""Split the invalidation request in groups of 1000 objects"""
if not self.AWS_CLOUDFRONT_DISTRIBUTION:
raise CommandError(
'An object invalidation was requested but the variable '
'AWS_CLOUDFRONT_DISTRIBUTION is not present in your settings.')
# We can't send more than 1000 objects in the same invalidation
# request.
chunk = 1000
# Connecting to CloudFront
conn = self.open_cf()
# Splitting the object list
objs = self.uploaded_files
chunks = [objs[i:i + chunk] for i in range(0, len(objs), chunk)]
# Invalidation requests
for paths in chunks:
conn.create_invalidation_request(
self.AWS_CLOUDFRONT_DISTRIBUTION, paths) | python | def invalidate_objects_cf(self):
"""Split the invalidation request in groups of 1000 objects"""
if not self.AWS_CLOUDFRONT_DISTRIBUTION:
raise CommandError(
'An object invalidation was requested but the variable '
'AWS_CLOUDFRONT_DISTRIBUTION is not present in your settings.')
# We can't send more than 1000 objects in the same invalidation
# request.
chunk = 1000
# Connecting to CloudFront
conn = self.open_cf()
# Splitting the object list
objs = self.uploaded_files
chunks = [objs[i:i + chunk] for i in range(0, len(objs), chunk)]
# Invalidation requests
for paths in chunks:
conn.create_invalidation_request(
self.AWS_CLOUDFRONT_DISTRIBUTION, paths) | [
"def",
"invalidate_objects_cf",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"AWS_CLOUDFRONT_DISTRIBUTION",
":",
"raise",
"CommandError",
"(",
"'An object invalidation was requested but the variable '",
"'AWS_CLOUDFRONT_DISTRIBUTION is not present in your settings.'",
")",
"#... | Split the invalidation request in groups of 1000 objects | [
"Split",
"the",
"invalidation",
"request",
"in",
"groups",
"of",
"1000",
"objects"
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/sync_s3.py#L253-L274 | train | 219,854 |
django-extensions/django-extensions | django_extensions/management/commands/sync_s3.py | Command.open_s3 | def open_s3(self):
"""Open connection to S3 returning bucket and key"""
conn = boto.connect_s3(
self.AWS_ACCESS_KEY_ID,
self.AWS_SECRET_ACCESS_KEY,
**self.get_s3connection_kwargs())
try:
bucket = conn.get_bucket(self.AWS_BUCKET_NAME)
except boto.exception.S3ResponseError:
bucket = conn.create_bucket(self.AWS_BUCKET_NAME)
return bucket, boto.s3.key.Key(bucket) | python | def open_s3(self):
"""Open connection to S3 returning bucket and key"""
conn = boto.connect_s3(
self.AWS_ACCESS_KEY_ID,
self.AWS_SECRET_ACCESS_KEY,
**self.get_s3connection_kwargs())
try:
bucket = conn.get_bucket(self.AWS_BUCKET_NAME)
except boto.exception.S3ResponseError:
bucket = conn.create_bucket(self.AWS_BUCKET_NAME)
return bucket, boto.s3.key.Key(bucket) | [
"def",
"open_s3",
"(",
"self",
")",
":",
"conn",
"=",
"boto",
".",
"connect_s3",
"(",
"self",
".",
"AWS_ACCESS_KEY_ID",
",",
"self",
".",
"AWS_SECRET_ACCESS_KEY",
",",
"*",
"*",
"self",
".",
"get_s3connection_kwargs",
"(",
")",
")",
"try",
":",
"bucket",
... | Open connection to S3 returning bucket and key | [
"Open",
"connection",
"to",
"S3",
"returning",
"bucket",
"and",
"key"
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/sync_s3.py#L298-L308 | train | 219,855 |
django-extensions/django-extensions | django_extensions/management/commands/shell_plus.py | Command.set_application_name | def set_application_name(self, options):
"""
Set the application_name on PostgreSQL connection
Use the fallback_application_name to let the user override
it with PGAPPNAME env variable
http://www.postgresql.org/docs/9.4/static/libpq-connect.html#LIBPQ-PARAMKEYWORDS # noqa
"""
supported_backends = ['django.db.backends.postgresql',
'django.db.backends.postgresql_psycopg2']
opt_name = 'fallback_application_name'
default_app_name = 'django_shell'
app_name = default_app_name
dbs = getattr(settings, 'DATABASES', [])
# lookup over all the databases entry
for db in dbs.keys():
if dbs[db]['ENGINE'] in supported_backends:
try:
options = dbs[db]['OPTIONS']
except KeyError:
options = {}
# dot not override a defined value
if opt_name in options.keys():
app_name = dbs[db]['OPTIONS'][opt_name]
else:
dbs[db].setdefault('OPTIONS', {}).update({opt_name: default_app_name})
app_name = default_app_name
return app_name | python | def set_application_name(self, options):
"""
Set the application_name on PostgreSQL connection
Use the fallback_application_name to let the user override
it with PGAPPNAME env variable
http://www.postgresql.org/docs/9.4/static/libpq-connect.html#LIBPQ-PARAMKEYWORDS # noqa
"""
supported_backends = ['django.db.backends.postgresql',
'django.db.backends.postgresql_psycopg2']
opt_name = 'fallback_application_name'
default_app_name = 'django_shell'
app_name = default_app_name
dbs = getattr(settings, 'DATABASES', [])
# lookup over all the databases entry
for db in dbs.keys():
if dbs[db]['ENGINE'] in supported_backends:
try:
options = dbs[db]['OPTIONS']
except KeyError:
options = {}
# dot not override a defined value
if opt_name in options.keys():
app_name = dbs[db]['OPTIONS'][opt_name]
else:
dbs[db].setdefault('OPTIONS', {}).update({opt_name: default_app_name})
app_name = default_app_name
return app_name | [
"def",
"set_application_name",
"(",
"self",
",",
"options",
")",
":",
"supported_backends",
"=",
"[",
"'django.db.backends.postgresql'",
",",
"'django.db.backends.postgresql_psycopg2'",
"]",
"opt_name",
"=",
"'fallback_application_name'",
"default_app_name",
"=",
"'django_she... | Set the application_name on PostgreSQL connection
Use the fallback_application_name to let the user override
it with PGAPPNAME env variable
http://www.postgresql.org/docs/9.4/static/libpq-connect.html#LIBPQ-PARAMKEYWORDS # noqa | [
"Set",
"the",
"application_name",
"on",
"PostgreSQL",
"connection"
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/shell_plus.py#L394-L425 | train | 219,856 |
django-extensions/django-extensions | django_extensions/management/commands/mail_debug.py | ExtensionDebuggingServer.process_message | def process_message(self, peer, mailfrom, rcpttos, data, **kwargs):
"""Output will be sent to the module logger at INFO level."""
inheaders = 1
lines = data.split('\n')
logger.info('---------- MESSAGE FOLLOWS ----------')
for line in lines:
# headers first
if inheaders and not line:
logger.info('X-Peer: %s' % peer[0])
inheaders = 0
logger.info(line)
logger.info('------------ END MESSAGE ------------') | python | def process_message(self, peer, mailfrom, rcpttos, data, **kwargs):
"""Output will be sent to the module logger at INFO level."""
inheaders = 1
lines = data.split('\n')
logger.info('---------- MESSAGE FOLLOWS ----------')
for line in lines:
# headers first
if inheaders and not line:
logger.info('X-Peer: %s' % peer[0])
inheaders = 0
logger.info(line)
logger.info('------------ END MESSAGE ------------') | [
"def",
"process_message",
"(",
"self",
",",
"peer",
",",
"mailfrom",
",",
"rcpttos",
",",
"data",
",",
"*",
"*",
"kwargs",
")",
":",
"inheaders",
"=",
"1",
"lines",
"=",
"data",
".",
"split",
"(",
"'\\n'",
")",
"logger",
".",
"info",
"(",
"'---------... | Output will be sent to the module logger at INFO level. | [
"Output",
"will",
"be",
"sent",
"to",
"the",
"module",
"logger",
"at",
"INFO",
"level",
"."
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/commands/mail_debug.py#L20-L31 | train | 219,857 |
django-extensions/django-extensions | django_extensions/management/email_notifications.py | EmailNotificationCommand.run_from_argv | def run_from_argv(self, argv):
"""Overriden in order to access the command line arguments."""
self.argv_string = ' '.join(argv)
super(EmailNotificationCommand, self).run_from_argv(argv) | python | def run_from_argv(self, argv):
"""Overriden in order to access the command line arguments."""
self.argv_string = ' '.join(argv)
super(EmailNotificationCommand, self).run_from_argv(argv) | [
"def",
"run_from_argv",
"(",
"self",
",",
"argv",
")",
":",
"self",
".",
"argv_string",
"=",
"' '",
".",
"join",
"(",
"argv",
")",
"super",
"(",
"EmailNotificationCommand",
",",
"self",
")",
".",
"run_from_argv",
"(",
"argv",
")"
] | Overriden in order to access the command line arguments. | [
"Overriden",
"in",
"order",
"to",
"access",
"the",
"command",
"line",
"arguments",
"."
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/email_notifications.py#L62-L65 | train | 219,858 |
django-extensions/django-extensions | django_extensions/management/email_notifications.py | EmailNotificationCommand.execute | def execute(self, *args, **options):
"""
Overriden in order to send emails on unhandled exception.
If an unhandled exception in ``def handle(self, *args, **options)``
occurs and `--email-exception` is set or `self.email_exception` is
set to True send an email to ADMINS with the traceback and then
reraise the exception.
"""
try:
super(EmailNotificationCommand, self).execute(*args, **options)
except Exception:
if options['email_exception'] or getattr(self, 'email_exception', False):
self.send_email_notification(include_traceback=True)
raise | python | def execute(self, *args, **options):
"""
Overriden in order to send emails on unhandled exception.
If an unhandled exception in ``def handle(self, *args, **options)``
occurs and `--email-exception` is set or `self.email_exception` is
set to True send an email to ADMINS with the traceback and then
reraise the exception.
"""
try:
super(EmailNotificationCommand, self).execute(*args, **options)
except Exception:
if options['email_exception'] or getattr(self, 'email_exception', False):
self.send_email_notification(include_traceback=True)
raise | [
"def",
"execute",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"options",
")",
":",
"try",
":",
"super",
"(",
"EmailNotificationCommand",
",",
"self",
")",
".",
"execute",
"(",
"*",
"args",
",",
"*",
"*",
"options",
")",
"except",
"Exception",
":",
... | Overriden in order to send emails on unhandled exception.
If an unhandled exception in ``def handle(self, *args, **options)``
occurs and `--email-exception` is set or `self.email_exception` is
set to True send an email to ADMINS with the traceback and then
reraise the exception. | [
"Overriden",
"in",
"order",
"to",
"send",
"emails",
"on",
"unhandled",
"exception",
"."
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/email_notifications.py#L67-L81 | train | 219,859 |
django-extensions/django-extensions | django_extensions/management/email_notifications.py | EmailNotificationCommand.send_email_notification | def send_email_notification(self, notification_id=None, include_traceback=False, verbosity=1):
"""
Send email notifications.
Reads settings from settings.EMAIL_NOTIFICATIONS dict, if available,
using ``notification_id`` as a key or else provides reasonable
defaults.
"""
# Load email notification settings if available
if notification_id is not None:
try:
email_settings = settings.EMAIL_NOTIFICATIONS.get(notification_id, {})
except AttributeError:
email_settings = {}
else:
email_settings = {}
# Exit if no traceback found and not in 'notify always' mode
if not include_traceback and not email_settings.get('notification_level', 0):
print(self.style.ERROR("Exiting, not in 'notify always' mode."))
return
# Set email fields.
subject = email_settings.get('subject', "Django extensions email notification.")
command_name = self.__module__.split('.')[-1]
body = email_settings.get(
'body',
"Reporting execution of command: '%s'" % command_name
)
# Include traceback
if include_traceback and not email_settings.get('no_traceback', False):
try:
exc_type, exc_value, exc_traceback = sys.exc_info()
trb = ''.join(traceback.format_tb(exc_traceback))
body += "\n\nTraceback:\n\n%s\n" % trb
finally:
del exc_traceback
# Set from address
from_email = email_settings.get('from_email', settings.DEFAULT_FROM_EMAIL)
# Calculate recipients
recipients = list(email_settings.get('recipients', []))
if not email_settings.get('no_admins', False):
recipients.extend(settings.ADMINS)
if not recipients:
if verbosity > 0:
print(self.style.ERROR("No email recipients available."))
return
# Send email...
send_mail(subject, body, from_email, recipients,
fail_silently=email_settings.get('fail_silently', True)) | python | def send_email_notification(self, notification_id=None, include_traceback=False, verbosity=1):
"""
Send email notifications.
Reads settings from settings.EMAIL_NOTIFICATIONS dict, if available,
using ``notification_id`` as a key or else provides reasonable
defaults.
"""
# Load email notification settings if available
if notification_id is not None:
try:
email_settings = settings.EMAIL_NOTIFICATIONS.get(notification_id, {})
except AttributeError:
email_settings = {}
else:
email_settings = {}
# Exit if no traceback found and not in 'notify always' mode
if not include_traceback and not email_settings.get('notification_level', 0):
print(self.style.ERROR("Exiting, not in 'notify always' mode."))
return
# Set email fields.
subject = email_settings.get('subject', "Django extensions email notification.")
command_name = self.__module__.split('.')[-1]
body = email_settings.get(
'body',
"Reporting execution of command: '%s'" % command_name
)
# Include traceback
if include_traceback and not email_settings.get('no_traceback', False):
try:
exc_type, exc_value, exc_traceback = sys.exc_info()
trb = ''.join(traceback.format_tb(exc_traceback))
body += "\n\nTraceback:\n\n%s\n" % trb
finally:
del exc_traceback
# Set from address
from_email = email_settings.get('from_email', settings.DEFAULT_FROM_EMAIL)
# Calculate recipients
recipients = list(email_settings.get('recipients', []))
if not email_settings.get('no_admins', False):
recipients.extend(settings.ADMINS)
if not recipients:
if verbosity > 0:
print(self.style.ERROR("No email recipients available."))
return
# Send email...
send_mail(subject, body, from_email, recipients,
fail_silently=email_settings.get('fail_silently', True)) | [
"def",
"send_email_notification",
"(",
"self",
",",
"notification_id",
"=",
"None",
",",
"include_traceback",
"=",
"False",
",",
"verbosity",
"=",
"1",
")",
":",
"# Load email notification settings if available",
"if",
"notification_id",
"is",
"not",
"None",
":",
"t... | Send email notifications.
Reads settings from settings.EMAIL_NOTIFICATIONS dict, if available,
using ``notification_id`` as a key or else provides reasonable
defaults. | [
"Send",
"email",
"notifications",
"."
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/email_notifications.py#L83-L140 | train | 219,860 |
django-extensions/django-extensions | django_extensions/compat.py | load_tag_library | def load_tag_library(libname):
"""
Load a templatetag library on multiple Django versions.
Returns None if the library isn't loaded.
"""
from django.template.backends.django import get_installed_libraries
from django.template.library import InvalidTemplateLibrary
try:
lib = get_installed_libraries()[libname]
lib = importlib.import_module(lib).register
return lib
except (InvalidTemplateLibrary, KeyError):
return None | python | def load_tag_library(libname):
"""
Load a templatetag library on multiple Django versions.
Returns None if the library isn't loaded.
"""
from django.template.backends.django import get_installed_libraries
from django.template.library import InvalidTemplateLibrary
try:
lib = get_installed_libraries()[libname]
lib = importlib.import_module(lib).register
return lib
except (InvalidTemplateLibrary, KeyError):
return None | [
"def",
"load_tag_library",
"(",
"libname",
")",
":",
"from",
"django",
".",
"template",
".",
"backends",
".",
"django",
"import",
"get_installed_libraries",
"from",
"django",
".",
"template",
".",
"library",
"import",
"InvalidTemplateLibrary",
"try",
":",
"lib",
... | Load a templatetag library on multiple Django versions.
Returns None if the library isn't loaded. | [
"Load",
"a",
"templatetag",
"library",
"on",
"multiple",
"Django",
"versions",
"."
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/compat.py#L17-L30 | train | 219,861 |
django-extensions/django-extensions | django_extensions/compat.py | get_template_setting | def get_template_setting(template_key, default=None):
""" Read template settings """
templates_var = getattr(settings, 'TEMPLATES', None)
if templates_var:
for tdict in templates_var:
if template_key in tdict:
return tdict[template_key]
return default | python | def get_template_setting(template_key, default=None):
""" Read template settings """
templates_var = getattr(settings, 'TEMPLATES', None)
if templates_var:
for tdict in templates_var:
if template_key in tdict:
return tdict[template_key]
return default | [
"def",
"get_template_setting",
"(",
"template_key",
",",
"default",
"=",
"None",
")",
":",
"templates_var",
"=",
"getattr",
"(",
"settings",
",",
"'TEMPLATES'",
",",
"None",
")",
"if",
"templates_var",
":",
"for",
"tdict",
"in",
"templates_var",
":",
"if",
"... | Read template settings | [
"Read",
"template",
"settings"
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/compat.py#L33-L40 | train | 219,862 |
django-extensions/django-extensions | django_extensions/management/modelviz.py | ModelGraph.use_model | def use_model(self, model_name):
"""
Decide whether to use a model, based on the model name and the lists of
models to exclude and include.
"""
# Check against exclude list.
if self.exclude_models:
for model_pattern in self.exclude_models:
model_pattern = '^%s$' % model_pattern.replace('*', '.*')
if re.search(model_pattern, model_name):
return False
# Check against exclude list.
elif self.include_models:
for model_pattern in self.include_models:
model_pattern = '^%s$' % model_pattern.replace('*', '.*')
if re.search(model_pattern, model_name):
return True
# Return `True` if `include_models` is falsey, otherwise return `False`.
return not self.include_models | python | def use_model(self, model_name):
"""
Decide whether to use a model, based on the model name and the lists of
models to exclude and include.
"""
# Check against exclude list.
if self.exclude_models:
for model_pattern in self.exclude_models:
model_pattern = '^%s$' % model_pattern.replace('*', '.*')
if re.search(model_pattern, model_name):
return False
# Check against exclude list.
elif self.include_models:
for model_pattern in self.include_models:
model_pattern = '^%s$' % model_pattern.replace('*', '.*')
if re.search(model_pattern, model_name):
return True
# Return `True` if `include_models` is falsey, otherwise return `False`.
return not self.include_models | [
"def",
"use_model",
"(",
"self",
",",
"model_name",
")",
":",
"# Check against exclude list.",
"if",
"self",
".",
"exclude_models",
":",
"for",
"model_pattern",
"in",
"self",
".",
"exclude_models",
":",
"model_pattern",
"=",
"'^%s$'",
"%",
"model_pattern",
".",
... | Decide whether to use a model, based on the model name and the lists of
models to exclude and include. | [
"Decide",
"whether",
"to",
"use",
"a",
"model",
"based",
"on",
"the",
"model",
"name",
"and",
"the",
"lists",
"of",
"models",
"to",
"exclude",
"and",
"include",
"."
] | 7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8 | https://github.com/django-extensions/django-extensions/blob/7e0bef97ea6cb7f9eea5e2528e3a985a83a7b9b8/django_extensions/management/modelviz.py#L359-L377 | train | 219,863 |
google/transitfeed | transitfeed/shapelib.py | GetClosestPoint | def GetClosestPoint(x, a, b):
"""
Returns the point on the great circle segment ab closest to x.
"""
assert(x.IsUnitLength())
assert(a.IsUnitLength())
assert(b.IsUnitLength())
a_cross_b = a.RobustCrossProd(b)
# project to the great circle going through a and b
p = x.Minus(
a_cross_b.Times(
x.DotProd(a_cross_b) / a_cross_b.Norm2()))
# if p lies between a and b, return it
if SimpleCCW(a_cross_b, a, p) and SimpleCCW(p, b, a_cross_b):
return p.Normalize()
# otherwise return the closer of a or b
if x.Minus(a).Norm2() <= x.Minus(b).Norm2():
return a
else:
return b | python | def GetClosestPoint(x, a, b):
"""
Returns the point on the great circle segment ab closest to x.
"""
assert(x.IsUnitLength())
assert(a.IsUnitLength())
assert(b.IsUnitLength())
a_cross_b = a.RobustCrossProd(b)
# project to the great circle going through a and b
p = x.Minus(
a_cross_b.Times(
x.DotProd(a_cross_b) / a_cross_b.Norm2()))
# if p lies between a and b, return it
if SimpleCCW(a_cross_b, a, p) and SimpleCCW(p, b, a_cross_b):
return p.Normalize()
# otherwise return the closer of a or b
if x.Minus(a).Norm2() <= x.Minus(b).Norm2():
return a
else:
return b | [
"def",
"GetClosestPoint",
"(",
"x",
",",
"a",
",",
"b",
")",
":",
"assert",
"(",
"x",
".",
"IsUnitLength",
"(",
")",
")",
"assert",
"(",
"a",
".",
"IsUnitLength",
"(",
")",
")",
"assert",
"(",
"b",
".",
"IsUnitLength",
"(",
")",
")",
"a_cross_b",
... | Returns the point on the great circle segment ab closest to x. | [
"Returns",
"the",
"point",
"on",
"the",
"great",
"circle",
"segment",
"ab",
"closest",
"to",
"x",
"."
] | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/shapelib.py#L221-L243 | train | 219,864 |
google/transitfeed | transitfeed/shapelib.py | Point.Plus | def Plus(self, other):
"""
Returns a new point which is the pointwise sum of self and other.
"""
return Point(self.x + other.x,
self.y + other.y,
self.z + other.z) | python | def Plus(self, other):
"""
Returns a new point which is the pointwise sum of self and other.
"""
return Point(self.x + other.x,
self.y + other.y,
self.z + other.z) | [
"def",
"Plus",
"(",
"self",
",",
"other",
")",
":",
"return",
"Point",
"(",
"self",
".",
"x",
"+",
"other",
".",
"x",
",",
"self",
".",
"y",
"+",
"other",
".",
"y",
",",
"self",
".",
"z",
"+",
"other",
".",
"z",
")"
] | Returns a new point which is the pointwise sum of self and other. | [
"Returns",
"a",
"new",
"point",
"which",
"is",
"the",
"pointwise",
"sum",
"of",
"self",
"and",
"other",
"."
] | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/shapelib.py#L79-L85 | train | 219,865 |
google/transitfeed | transitfeed/shapelib.py | Point.Minus | def Minus(self, other):
"""
Returns a new point which is the pointwise subtraction of other from
self.
"""
return Point(self.x - other.x,
self.y - other.y,
self.z - other.z) | python | def Minus(self, other):
"""
Returns a new point which is the pointwise subtraction of other from
self.
"""
return Point(self.x - other.x,
self.y - other.y,
self.z - other.z) | [
"def",
"Minus",
"(",
"self",
",",
"other",
")",
":",
"return",
"Point",
"(",
"self",
".",
"x",
"-",
"other",
".",
"x",
",",
"self",
".",
"y",
"-",
"other",
".",
"y",
",",
"self",
".",
"z",
"-",
"other",
".",
"z",
")"
] | Returns a new point which is the pointwise subtraction of other from
self. | [
"Returns",
"a",
"new",
"point",
"which",
"is",
"the",
"pointwise",
"subtraction",
"of",
"other",
"from",
"self",
"."
] | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/shapelib.py#L87-L94 | train | 219,866 |
google/transitfeed | transitfeed/shapelib.py | Point.Times | def Times(self, val):
"""
Returns a new point which is pointwise multiplied by val.
"""
return Point(self.x * val, self.y * val, self.z * val) | python | def Times(self, val):
"""
Returns a new point which is pointwise multiplied by val.
"""
return Point(self.x * val, self.y * val, self.z * val) | [
"def",
"Times",
"(",
"self",
",",
"val",
")",
":",
"return",
"Point",
"(",
"self",
".",
"x",
"*",
"val",
",",
"self",
".",
"y",
"*",
"val",
",",
"self",
".",
"z",
"*",
"val",
")"
] | Returns a new point which is pointwise multiplied by val. | [
"Returns",
"a",
"new",
"point",
"which",
"is",
"pointwise",
"multiplied",
"by",
"val",
"."
] | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/shapelib.py#L102-L106 | train | 219,867 |
google/transitfeed | transitfeed/shapelib.py | Point.Ortho | def Ortho(self):
"""Returns a unit-length point orthogonal to this point"""
(index, val) = self.LargestComponent()
index = index - 1
if index < 0:
index = 2
temp = Point(0.012, 0.053, 0.00457)
if index == 0:
temp.x = 1
elif index == 1:
temp.y = 1
elif index == 2:
temp.z = 1
return self.CrossProd(temp).Normalize() | python | def Ortho(self):
"""Returns a unit-length point orthogonal to this point"""
(index, val) = self.LargestComponent()
index = index - 1
if index < 0:
index = 2
temp = Point(0.012, 0.053, 0.00457)
if index == 0:
temp.x = 1
elif index == 1:
temp.y = 1
elif index == 2:
temp.z = 1
return self.CrossProd(temp).Normalize() | [
"def",
"Ortho",
"(",
"self",
")",
":",
"(",
"index",
",",
"val",
")",
"=",
"self",
".",
"LargestComponent",
"(",
")",
"index",
"=",
"index",
"-",
"1",
"if",
"index",
"<",
"0",
":",
"index",
"=",
"2",
"temp",
"=",
"Point",
"(",
"0.012",
",",
"0.... | Returns a unit-length point orthogonal to this point | [
"Returns",
"a",
"unit",
"-",
"length",
"point",
"orthogonal",
"to",
"this",
"point"
] | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/shapelib.py#L145-L158 | train | 219,868 |
google/transitfeed | transitfeed/shapelib.py | Point.CrossProd | def CrossProd(self, other):
"""
Returns the cross product of self and other.
"""
return Point(
self.y * other.z - self.z * other.y,
self.z * other.x - self.x * other.z,
self.x * other.y - self.y * other.x) | python | def CrossProd(self, other):
"""
Returns the cross product of self and other.
"""
return Point(
self.y * other.z - self.z * other.y,
self.z * other.x - self.x * other.z,
self.x * other.y - self.y * other.x) | [
"def",
"CrossProd",
"(",
"self",
",",
"other",
")",
":",
"return",
"Point",
"(",
"self",
".",
"y",
"*",
"other",
".",
"z",
"-",
"self",
".",
"z",
"*",
"other",
".",
"y",
",",
"self",
".",
"z",
"*",
"other",
".",
"x",
"-",
"self",
".",
"x",
... | Returns the cross product of self and other. | [
"Returns",
"the",
"cross",
"product",
"of",
"self",
"and",
"other",
"."
] | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/shapelib.py#L160-L167 | train | 219,869 |
google/transitfeed | transitfeed/shapelib.py | Point.Equals | def Equals(self, other):
"""
Returns true of self and other are approximately equal.
"""
return (self._approxEq(self.x, other.x)
and self._approxEq(self.y, other.y)
and self._approxEq(self.z, other.z)) | python | def Equals(self, other):
"""
Returns true of self and other are approximately equal.
"""
return (self._approxEq(self.x, other.x)
and self._approxEq(self.y, other.y)
and self._approxEq(self.z, other.z)) | [
"def",
"Equals",
"(",
"self",
",",
"other",
")",
":",
"return",
"(",
"self",
".",
"_approxEq",
"(",
"self",
".",
"x",
",",
"other",
".",
"x",
")",
"and",
"self",
".",
"_approxEq",
"(",
"self",
".",
"y",
",",
"other",
".",
"y",
")",
"and",
"self... | Returns true of self and other are approximately equal. | [
"Returns",
"true",
"of",
"self",
"and",
"other",
"are",
"approximately",
"equal",
"."
] | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/shapelib.py#L173-L179 | train | 219,870 |
google/transitfeed | transitfeed/shapelib.py | Point.Angle | def Angle(self, other):
"""
Returns the angle in radians between self and other.
"""
return math.atan2(self.CrossProd(other).Norm2(),
self.DotProd(other)) | python | def Angle(self, other):
"""
Returns the angle in radians between self and other.
"""
return math.atan2(self.CrossProd(other).Norm2(),
self.DotProd(other)) | [
"def",
"Angle",
"(",
"self",
",",
"other",
")",
":",
"return",
"math",
".",
"atan2",
"(",
"self",
".",
"CrossProd",
"(",
"other",
")",
".",
"Norm2",
"(",
")",
",",
"self",
".",
"DotProd",
"(",
"other",
")",
")"
] | Returns the angle in radians between self and other. | [
"Returns",
"the",
"angle",
"in",
"radians",
"between",
"self",
"and",
"other",
"."
] | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/shapelib.py#L181-L186 | train | 219,871 |
google/transitfeed | transitfeed/shapelib.py | Point.ToLatLng | def ToLatLng(self):
"""
Returns that latitude and longitude that this point represents
under a spherical Earth model.
"""
rad_lat = math.atan2(self.z, math.sqrt(self.x * self.x + self.y * self.y))
rad_lng = math.atan2(self.y, self.x)
return (rad_lat * 180.0 / math.pi, rad_lng * 180.0 / math.pi) | python | def ToLatLng(self):
"""
Returns that latitude and longitude that this point represents
under a spherical Earth model.
"""
rad_lat = math.atan2(self.z, math.sqrt(self.x * self.x + self.y * self.y))
rad_lng = math.atan2(self.y, self.x)
return (rad_lat * 180.0 / math.pi, rad_lng * 180.0 / math.pi) | [
"def",
"ToLatLng",
"(",
"self",
")",
":",
"rad_lat",
"=",
"math",
".",
"atan2",
"(",
"self",
".",
"z",
",",
"math",
".",
"sqrt",
"(",
"self",
".",
"x",
"*",
"self",
".",
"x",
"+",
"self",
".",
"y",
"*",
"self",
".",
"y",
")",
")",
"rad_lng",
... | Returns that latitude and longitude that this point represents
under a spherical Earth model. | [
"Returns",
"that",
"latitude",
"and",
"longitude",
"that",
"this",
"point",
"represents",
"under",
"a",
"spherical",
"Earth",
"model",
"."
] | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/shapelib.py#L188-L195 | train | 219,872 |
google/transitfeed | transitfeed/shapelib.py | Point.FromLatLng | def FromLatLng(lat, lng):
"""
Returns a new point representing this latitude and longitude under
a spherical Earth model.
"""
phi = lat * (math.pi / 180.0)
theta = lng * (math.pi / 180.0)
cosphi = math.cos(phi)
return Point(math.cos(theta) * cosphi,
math.sin(theta) * cosphi,
math.sin(phi)) | python | def FromLatLng(lat, lng):
"""
Returns a new point representing this latitude and longitude under
a spherical Earth model.
"""
phi = lat * (math.pi / 180.0)
theta = lng * (math.pi / 180.0)
cosphi = math.cos(phi)
return Point(math.cos(theta) * cosphi,
math.sin(theta) * cosphi,
math.sin(phi)) | [
"def",
"FromLatLng",
"(",
"lat",
",",
"lng",
")",
":",
"phi",
"=",
"lat",
"*",
"(",
"math",
".",
"pi",
"/",
"180.0",
")",
"theta",
"=",
"lng",
"*",
"(",
"math",
".",
"pi",
"/",
"180.0",
")",
"cosphi",
"=",
"math",
".",
"cos",
"(",
"phi",
")",... | Returns a new point representing this latitude and longitude under
a spherical Earth model. | [
"Returns",
"a",
"new",
"point",
"representing",
"this",
"latitude",
"and",
"longitude",
"under",
"a",
"spherical",
"Earth",
"model",
"."
] | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/shapelib.py#L198-L208 | train | 219,873 |
google/transitfeed | transitfeed/shapelib.py | Poly.LengthMeters | def LengthMeters(self):
"""Return length of this polyline in meters."""
assert(len(self._points) > 0)
length = 0
for i in range(0, len(self._points) - 1):
length += self._points[i].GetDistanceMeters(self._points[i+1])
return length | python | def LengthMeters(self):
"""Return length of this polyline in meters."""
assert(len(self._points) > 0)
length = 0
for i in range(0, len(self._points) - 1):
length += self._points[i].GetDistanceMeters(self._points[i+1])
return length | [
"def",
"LengthMeters",
"(",
"self",
")",
":",
"assert",
"(",
"len",
"(",
"self",
".",
"_points",
")",
">",
"0",
")",
"length",
"=",
"0",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"self",
".",
"_points",
")",
"-",
"1",
")",
":",
"l... | Return length of this polyline in meters. | [
"Return",
"length",
"of",
"this",
"polyline",
"in",
"meters",
"."
] | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/shapelib.py#L299-L305 | train | 219,874 |
google/transitfeed | transitfeed/shapelib.py | Poly.CutAtClosestPoint | def CutAtClosestPoint(self, p):
"""
Let x be the point on the polyline closest to p. Then
CutAtClosestPoint returns two new polylines, one representing
the polyline from the beginning up to x, and one representing
x onwards to the end of the polyline. x is the first point
returned in the second polyline.
"""
(closest, i) = self.GetClosestPoint(p)
tmp = [closest]
tmp.extend(self._points[i+1:])
return (Poly(self._points[0:i+1]),
Poly(tmp)) | python | def CutAtClosestPoint(self, p):
"""
Let x be the point on the polyline closest to p. Then
CutAtClosestPoint returns two new polylines, one representing
the polyline from the beginning up to x, and one representing
x onwards to the end of the polyline. x is the first point
returned in the second polyline.
"""
(closest, i) = self.GetClosestPoint(p)
tmp = [closest]
tmp.extend(self._points[i+1:])
return (Poly(self._points[0:i+1]),
Poly(tmp)) | [
"def",
"CutAtClosestPoint",
"(",
"self",
",",
"p",
")",
":",
"(",
"closest",
",",
"i",
")",
"=",
"self",
".",
"GetClosestPoint",
"(",
"p",
")",
"tmp",
"=",
"[",
"closest",
"]",
"tmp",
".",
"extend",
"(",
"self",
".",
"_points",
"[",
"i",
"+",
"1"... | Let x be the point on the polyline closest to p. Then
CutAtClosestPoint returns two new polylines, one representing
the polyline from the beginning up to x, and one representing
x onwards to the end of the polyline. x is the first point
returned in the second polyline. | [
"Let",
"x",
"be",
"the",
"point",
"on",
"the",
"polyline",
"closest",
"to",
"p",
".",
"Then",
"CutAtClosestPoint",
"returns",
"two",
"new",
"polylines",
"one",
"representing",
"the",
"polyline",
"from",
"the",
"beginning",
"up",
"to",
"x",
"and",
"one",
"r... | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/shapelib.py#L311-L324 | train | 219,875 |
google/transitfeed | transitfeed/shapelib.py | Poly.GreedyPolyMatchDist | def GreedyPolyMatchDist(self, shape):
"""
Tries a greedy matching algorithm to match self to the
given shape. Returns the maximum distance in meters of
any point in self to its matched point in shape under the
algorithm.
Args: shape, a Poly object.
"""
tmp_shape = Poly(shape.GetPoints())
max_radius = 0
for (i, point) in enumerate(self._points):
tmp_shape = tmp_shape.CutAtClosestPoint(point)[1]
dist = tmp_shape.GetPoint(0).GetDistanceMeters(point)
max_radius = max(max_radius, dist)
return max_radius | python | def GreedyPolyMatchDist(self, shape):
"""
Tries a greedy matching algorithm to match self to the
given shape. Returns the maximum distance in meters of
any point in self to its matched point in shape under the
algorithm.
Args: shape, a Poly object.
"""
tmp_shape = Poly(shape.GetPoints())
max_radius = 0
for (i, point) in enumerate(self._points):
tmp_shape = tmp_shape.CutAtClosestPoint(point)[1]
dist = tmp_shape.GetPoint(0).GetDistanceMeters(point)
max_radius = max(max_radius, dist)
return max_radius | [
"def",
"GreedyPolyMatchDist",
"(",
"self",
",",
"shape",
")",
":",
"tmp_shape",
"=",
"Poly",
"(",
"shape",
".",
"GetPoints",
"(",
")",
")",
"max_radius",
"=",
"0",
"for",
"(",
"i",
",",
"point",
")",
"in",
"enumerate",
"(",
"self",
".",
"_points",
")... | Tries a greedy matching algorithm to match self to the
given shape. Returns the maximum distance in meters of
any point in self to its matched point in shape under the
algorithm.
Args: shape, a Poly object. | [
"Tries",
"a",
"greedy",
"matching",
"algorithm",
"to",
"match",
"self",
"to",
"the",
"given",
"shape",
".",
"Returns",
"the",
"maximum",
"distance",
"in",
"meters",
"of",
"any",
"point",
"in",
"self",
"to",
"its",
"matched",
"point",
"in",
"shape",
"under"... | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/shapelib.py#L326-L341 | train | 219,876 |
google/transitfeed | transitfeed/shapelib.py | PolyCollection.AddPoly | def AddPoly(self, poly, smart_duplicate_handling=True):
"""
Adds a new polyline to the collection.
"""
inserted_name = poly.GetName()
if poly.GetName() in self._name_to_shape:
if not smart_duplicate_handling:
raise ShapeError("Duplicate shape found: " + poly.GetName())
print ("Warning: duplicate shape id being added to collection: " +
poly.GetName())
if poly.GreedyPolyMatchDist(self._name_to_shape[poly.GetName()]) < 10:
print(" (Skipping as it apears to be an exact duplicate)")
else:
print(" (Adding new shape variant with uniquified name)")
inserted_name = "%s-%d" % (inserted_name, len(self._name_to_shape))
self._name_to_shape[inserted_name] = poly | python | def AddPoly(self, poly, smart_duplicate_handling=True):
"""
Adds a new polyline to the collection.
"""
inserted_name = poly.GetName()
if poly.GetName() in self._name_to_shape:
if not smart_duplicate_handling:
raise ShapeError("Duplicate shape found: " + poly.GetName())
print ("Warning: duplicate shape id being added to collection: " +
poly.GetName())
if poly.GreedyPolyMatchDist(self._name_to_shape[poly.GetName()]) < 10:
print(" (Skipping as it apears to be an exact duplicate)")
else:
print(" (Adding new shape variant with uniquified name)")
inserted_name = "%s-%d" % (inserted_name, len(self._name_to_shape))
self._name_to_shape[inserted_name] = poly | [
"def",
"AddPoly",
"(",
"self",
",",
"poly",
",",
"smart_duplicate_handling",
"=",
"True",
")",
":",
"inserted_name",
"=",
"poly",
".",
"GetName",
"(",
")",
"if",
"poly",
".",
"GetName",
"(",
")",
"in",
"self",
".",
"_name_to_shape",
":",
"if",
"not",
"... | Adds a new polyline to the collection. | [
"Adds",
"a",
"new",
"polyline",
"to",
"the",
"collection",
"."
] | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/shapelib.py#L392-L408 | train | 219,877 |
google/transitfeed | transitfeed/shapelib.py | PolyCollection.FindMatchingPolys | def FindMatchingPolys(self, start_point, end_point, max_radius=150):
"""
Returns a list of polylines in the collection that have endpoints
within max_radius of the given start and end points.
"""
matches = []
for shape in self._name_to_shape.itervalues():
if start_point.GetDistanceMeters(shape.GetPoint(0)) < max_radius and \
end_point.GetDistanceMeters(shape.GetPoint(-1)) < max_radius:
matches.append(shape)
return matches | python | def FindMatchingPolys(self, start_point, end_point, max_radius=150):
"""
Returns a list of polylines in the collection that have endpoints
within max_radius of the given start and end points.
"""
matches = []
for shape in self._name_to_shape.itervalues():
if start_point.GetDistanceMeters(shape.GetPoint(0)) < max_radius and \
end_point.GetDistanceMeters(shape.GetPoint(-1)) < max_radius:
matches.append(shape)
return matches | [
"def",
"FindMatchingPolys",
"(",
"self",
",",
"start_point",
",",
"end_point",
",",
"max_radius",
"=",
"150",
")",
":",
"matches",
"=",
"[",
"]",
"for",
"shape",
"in",
"self",
".",
"_name_to_shape",
".",
"itervalues",
"(",
")",
":",
"if",
"start_point",
... | Returns a list of polylines in the collection that have endpoints
within max_radius of the given start and end points. | [
"Returns",
"a",
"list",
"of",
"polylines",
"in",
"the",
"collection",
"that",
"have",
"endpoints",
"within",
"max_radius",
"of",
"the",
"given",
"start",
"and",
"end",
"points",
"."
] | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/shapelib.py#L413-L423 | train | 219,878 |
google/transitfeed | transitfeed/shapelib.py | PolyGraph._ReconstructPath | def _ReconstructPath(self, came_from, current_node):
"""
Helper method for ShortestPath, to reconstruct path.
Arguments:
came_from: a dictionary mapping Point to (Point, Poly) tuples.
This dictionary keeps track of the previous neighbor to a node, and
the edge used to get from the previous neighbor to the node.
current_node: the current Point in the path.
Returns:
A Poly that represents the path through the graph from the start of the
search to current_node.
"""
if current_node in came_from:
(previous_node, previous_edge) = came_from[current_node]
if previous_edge.GetPoint(0) == current_node:
previous_edge = previous_edge.Reversed()
p = self._ReconstructPath(came_from, previous_node)
return Poly.MergePolys([p, previous_edge], merge_point_threshold=0)
else:
return Poly([], '') | python | def _ReconstructPath(self, came_from, current_node):
"""
Helper method for ShortestPath, to reconstruct path.
Arguments:
came_from: a dictionary mapping Point to (Point, Poly) tuples.
This dictionary keeps track of the previous neighbor to a node, and
the edge used to get from the previous neighbor to the node.
current_node: the current Point in the path.
Returns:
A Poly that represents the path through the graph from the start of the
search to current_node.
"""
if current_node in came_from:
(previous_node, previous_edge) = came_from[current_node]
if previous_edge.GetPoint(0) == current_node:
previous_edge = previous_edge.Reversed()
p = self._ReconstructPath(came_from, previous_node)
return Poly.MergePolys([p, previous_edge], merge_point_threshold=0)
else:
return Poly([], '') | [
"def",
"_ReconstructPath",
"(",
"self",
",",
"came_from",
",",
"current_node",
")",
":",
"if",
"current_node",
"in",
"came_from",
":",
"(",
"previous_node",
",",
"previous_edge",
")",
"=",
"came_from",
"[",
"current_node",
"]",
"if",
"previous_edge",
".",
"Get... | Helper method for ShortestPath, to reconstruct path.
Arguments:
came_from: a dictionary mapping Point to (Point, Poly) tuples.
This dictionary keeps track of the previous neighbor to a node, and
the edge used to get from the previous neighbor to the node.
current_node: the current Point in the path.
Returns:
A Poly that represents the path through the graph from the start of the
search to current_node. | [
"Helper",
"method",
"for",
"ShortestPath",
"to",
"reconstruct",
"path",
"."
] | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/shapelib.py#L505-L526 | train | 219,879 |
google/transitfeed | transitfeed/schedule.py | Schedule.AddTableColumn | def AddTableColumn(self, table, column):
"""Add column to table if it is not already there."""
if column not in self._table_columns[table]:
self._table_columns[table].append(column) | python | def AddTableColumn(self, table, column):
"""Add column to table if it is not already there."""
if column not in self._table_columns[table]:
self._table_columns[table].append(column) | [
"def",
"AddTableColumn",
"(",
"self",
",",
"table",
",",
"column",
")",
":",
"if",
"column",
"not",
"in",
"self",
".",
"_table_columns",
"[",
"table",
"]",
":",
"self",
".",
"_table_columns",
"[",
"table",
"]",
".",
"append",
"(",
"column",
")"
] | Add column to table if it is not already there. | [
"Add",
"column",
"to",
"table",
"if",
"it",
"is",
"not",
"already",
"there",
"."
] | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/schedule.py#L87-L90 | train | 219,880 |
google/transitfeed | transitfeed/schedule.py | Schedule.AddTableColumns | def AddTableColumns(self, table, columns):
"""Add columns to table if they are not already there.
Args:
table: table name as a string
columns: an iterable of column names"""
table_columns = self._table_columns.setdefault(table, [])
for attr in columns:
if attr not in table_columns:
table_columns.append(attr) | python | def AddTableColumns(self, table, columns):
"""Add columns to table if they are not already there.
Args:
table: table name as a string
columns: an iterable of column names"""
table_columns = self._table_columns.setdefault(table, [])
for attr in columns:
if attr not in table_columns:
table_columns.append(attr) | [
"def",
"AddTableColumns",
"(",
"self",
",",
"table",
",",
"columns",
")",
":",
"table_columns",
"=",
"self",
".",
"_table_columns",
".",
"setdefault",
"(",
"table",
",",
"[",
"]",
")",
"for",
"attr",
"in",
"columns",
":",
"if",
"attr",
"not",
"in",
"ta... | Add columns to table if they are not already there.
Args:
table: table name as a string
columns: an iterable of column names | [
"Add",
"columns",
"to",
"table",
"if",
"they",
"are",
"not",
"already",
"there",
"."
] | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/schedule.py#L92-L101 | train | 219,881 |
google/transitfeed | transitfeed/schedule.py | Schedule.AddAgency | def AddAgency(self, name, url, timezone, agency_id=None):
"""Adds an agency to this schedule."""
agency = self._gtfs_factory.Agency(name, url, timezone, agency_id)
self.AddAgencyObject(agency)
return agency | python | def AddAgency(self, name, url, timezone, agency_id=None):
"""Adds an agency to this schedule."""
agency = self._gtfs_factory.Agency(name, url, timezone, agency_id)
self.AddAgencyObject(agency)
return agency | [
"def",
"AddAgency",
"(",
"self",
",",
"name",
",",
"url",
",",
"timezone",
",",
"agency_id",
"=",
"None",
")",
":",
"agency",
"=",
"self",
".",
"_gtfs_factory",
".",
"Agency",
"(",
"name",
",",
"url",
",",
"timezone",
",",
"agency_id",
")",
"self",
"... | Adds an agency to this schedule. | [
"Adds",
"an",
"agency",
"to",
"this",
"schedule",
"."
] | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/schedule.py#L157-L161 | train | 219,882 |
google/transitfeed | transitfeed/schedule.py | Schedule.GetDefaultAgency | def GetDefaultAgency(self):
"""Return the default Agency. If no default Agency has been set select the
default depending on how many Agency objects are in the Schedule. If there
are 0 make a new Agency the default, if there is 1 it becomes the default,
if there is more than 1 then return None.
"""
if not self._default_agency:
if len(self._agencies) == 0:
self.NewDefaultAgency()
elif len(self._agencies) == 1:
self._default_agency = self._agencies.values()[0]
return self._default_agency | python | def GetDefaultAgency(self):
"""Return the default Agency. If no default Agency has been set select the
default depending on how many Agency objects are in the Schedule. If there
are 0 make a new Agency the default, if there is 1 it becomes the default,
if there is more than 1 then return None.
"""
if not self._default_agency:
if len(self._agencies) == 0:
self.NewDefaultAgency()
elif len(self._agencies) == 1:
self._default_agency = self._agencies.values()[0]
return self._default_agency | [
"def",
"GetDefaultAgency",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_default_agency",
":",
"if",
"len",
"(",
"self",
".",
"_agencies",
")",
"==",
"0",
":",
"self",
".",
"NewDefaultAgency",
"(",
")",
"elif",
"len",
"(",
"self",
".",
"_agencies"... | Return the default Agency. If no default Agency has been set select the
default depending on how many Agency objects are in the Schedule. If there
are 0 make a new Agency the default, if there is 1 it becomes the default,
if there is more than 1 then return None. | [
"Return",
"the",
"default",
"Agency",
".",
"If",
"no",
"default",
"Agency",
"has",
"been",
"set",
"select",
"the",
"default",
"depending",
"on",
"how",
"many",
"Agency",
"objects",
"are",
"in",
"the",
"Schedule",
".",
"If",
"there",
"are",
"0",
"make",
"... | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/schedule.py#L184-L195 | train | 219,883 |
google/transitfeed | transitfeed/schedule.py | Schedule.NewDefaultAgency | def NewDefaultAgency(self, **kwargs):
"""Create a new Agency object and make it the default agency for this Schedule"""
agency = self._gtfs_factory.Agency(**kwargs)
if not agency.agency_id:
agency.agency_id = util.FindUniqueId(self._agencies)
self._default_agency = agency
self.SetDefaultAgency(agency, validate=False) # Blank agency won't validate
return agency | python | def NewDefaultAgency(self, **kwargs):
"""Create a new Agency object and make it the default agency for this Schedule"""
agency = self._gtfs_factory.Agency(**kwargs)
if not agency.agency_id:
agency.agency_id = util.FindUniqueId(self._agencies)
self._default_agency = agency
self.SetDefaultAgency(agency, validate=False) # Blank agency won't validate
return agency | [
"def",
"NewDefaultAgency",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"agency",
"=",
"self",
".",
"_gtfs_factory",
".",
"Agency",
"(",
"*",
"*",
"kwargs",
")",
"if",
"not",
"agency",
".",
"agency_id",
":",
"agency",
".",
"agency_id",
"=",
"util",
... | Create a new Agency object and make it the default agency for this Schedule | [
"Create",
"a",
"new",
"Agency",
"object",
"and",
"make",
"it",
"the",
"default",
"agency",
"for",
"this",
"Schedule"
] | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/schedule.py#L197-L204 | train | 219,884 |
google/transitfeed | transitfeed/schedule.py | Schedule.SetDefaultAgency | def SetDefaultAgency(self, agency, validate=True):
"""Make agency the default and add it to the schedule if not already added"""
assert isinstance(agency, self._gtfs_factory.Agency)
self._default_agency = agency
if agency.agency_id not in self._agencies:
self.AddAgencyObject(agency, validate=validate) | python | def SetDefaultAgency(self, agency, validate=True):
"""Make agency the default and add it to the schedule if not already added"""
assert isinstance(agency, self._gtfs_factory.Agency)
self._default_agency = agency
if agency.agency_id not in self._agencies:
self.AddAgencyObject(agency, validate=validate) | [
"def",
"SetDefaultAgency",
"(",
"self",
",",
"agency",
",",
"validate",
"=",
"True",
")",
":",
"assert",
"isinstance",
"(",
"agency",
",",
"self",
".",
"_gtfs_factory",
".",
"Agency",
")",
"self",
".",
"_default_agency",
"=",
"agency",
"if",
"agency",
".",... | Make agency the default and add it to the schedule if not already added | [
"Make",
"agency",
"the",
"default",
"and",
"add",
"it",
"to",
"the",
"schedule",
"if",
"not",
"already",
"added"
] | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/schedule.py#L206-L211 | train | 219,885 |
google/transitfeed | transitfeed/schedule.py | Schedule.GetDefaultServicePeriod | def GetDefaultServicePeriod(self):
"""Return the default ServicePeriod. If no default ServicePeriod has been
set select the default depending on how many ServicePeriod objects are in
the Schedule. If there are 0 make a new ServicePeriod the default, if there
is 1 it becomes the default, if there is more than 1 then return None.
"""
if not self._default_service_period:
if len(self.service_periods) == 0:
self.NewDefaultServicePeriod()
elif len(self.service_periods) == 1:
self._default_service_period = self.service_periods.values()[0]
return self._default_service_period | python | def GetDefaultServicePeriod(self):
"""Return the default ServicePeriod. If no default ServicePeriod has been
set select the default depending on how many ServicePeriod objects are in
the Schedule. If there are 0 make a new ServicePeriod the default, if there
is 1 it becomes the default, if there is more than 1 then return None.
"""
if not self._default_service_period:
if len(self.service_periods) == 0:
self.NewDefaultServicePeriod()
elif len(self.service_periods) == 1:
self._default_service_period = self.service_periods.values()[0]
return self._default_service_period | [
"def",
"GetDefaultServicePeriod",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_default_service_period",
":",
"if",
"len",
"(",
"self",
".",
"service_periods",
")",
"==",
"0",
":",
"self",
".",
"NewDefaultServicePeriod",
"(",
")",
"elif",
"len",
"(",
... | Return the default ServicePeriod. If no default ServicePeriod has been
set select the default depending on how many ServicePeriod objects are in
the Schedule. If there are 0 make a new ServicePeriod the default, if there
is 1 it becomes the default, if there is more than 1 then return None. | [
"Return",
"the",
"default",
"ServicePeriod",
".",
"If",
"no",
"default",
"ServicePeriod",
"has",
"been",
"set",
"select",
"the",
"default",
"depending",
"on",
"how",
"many",
"ServicePeriod",
"objects",
"are",
"in",
"the",
"Schedule",
".",
"If",
"there",
"are",... | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/schedule.py#L221-L232 | train | 219,886 |
google/transitfeed | transitfeed/schedule.py | Schedule.NewDefaultServicePeriod | def NewDefaultServicePeriod(self):
"""Create a new ServicePeriod object, make it the default service period and
return it. The default service period is used when you create a trip without
providing an explict service period. """
service_period = self._gtfs_factory.ServicePeriod()
service_period.service_id = util.FindUniqueId(self.service_periods)
# blank service won't validate in AddServicePeriodObject
self.SetDefaultServicePeriod(service_period, validate=False)
return service_period | python | def NewDefaultServicePeriod(self):
"""Create a new ServicePeriod object, make it the default service period and
return it. The default service period is used when you create a trip without
providing an explict service period. """
service_period = self._gtfs_factory.ServicePeriod()
service_period.service_id = util.FindUniqueId(self.service_periods)
# blank service won't validate in AddServicePeriodObject
self.SetDefaultServicePeriod(service_period, validate=False)
return service_period | [
"def",
"NewDefaultServicePeriod",
"(",
"self",
")",
":",
"service_period",
"=",
"self",
".",
"_gtfs_factory",
".",
"ServicePeriod",
"(",
")",
"service_period",
".",
"service_id",
"=",
"util",
".",
"FindUniqueId",
"(",
"self",
".",
"service_periods",
")",
"# blan... | Create a new ServicePeriod object, make it the default service period and
return it. The default service period is used when you create a trip without
providing an explict service period. | [
"Create",
"a",
"new",
"ServicePeriod",
"object",
"make",
"it",
"the",
"default",
"service",
"period",
"and",
"return",
"it",
".",
"The",
"default",
"service",
"period",
"is",
"used",
"when",
"you",
"create",
"a",
"trip",
"without",
"providing",
"an",
"explic... | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/schedule.py#L234-L242 | train | 219,887 |
google/transitfeed | transitfeed/schedule.py | Schedule.AddStop | def AddStop(self, lat, lng, name, stop_id=None):
"""Add a stop to this schedule.
Args:
lat: Latitude of the stop as a float or string
lng: Longitude of the stop as a float or string
name: Name of the stop, which will appear in the feed
stop_id: stop_id of the stop or None, in which case a unique id is picked
Returns:
A new Stop object
"""
if stop_id is None:
stop_id = util.FindUniqueId(self.stops)
stop = self._gtfs_factory.Stop(stop_id=stop_id, lat=lat, lng=lng, name=name)
self.AddStopObject(stop)
return stop | python | def AddStop(self, lat, lng, name, stop_id=None):
"""Add a stop to this schedule.
Args:
lat: Latitude of the stop as a float or string
lng: Longitude of the stop as a float or string
name: Name of the stop, which will appear in the feed
stop_id: stop_id of the stop or None, in which case a unique id is picked
Returns:
A new Stop object
"""
if stop_id is None:
stop_id = util.FindUniqueId(self.stops)
stop = self._gtfs_factory.Stop(stop_id=stop_id, lat=lat, lng=lng, name=name)
self.AddStopObject(stop)
return stop | [
"def",
"AddStop",
"(",
"self",
",",
"lat",
",",
"lng",
",",
"name",
",",
"stop_id",
"=",
"None",
")",
":",
"if",
"stop_id",
"is",
"None",
":",
"stop_id",
"=",
"util",
".",
"FindUniqueId",
"(",
"self",
".",
"stops",
")",
"stop",
"=",
"self",
".",
... | Add a stop to this schedule.
Args:
lat: Latitude of the stop as a float or string
lng: Longitude of the stop as a float or string
name: Name of the stop, which will appear in the feed
stop_id: stop_id of the stop or None, in which case a unique id is picked
Returns:
A new Stop object | [
"Add",
"a",
"stop",
"to",
"this",
"schedule",
"."
] | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/schedule.py#L340-L356 | train | 219,888 |
google/transitfeed | transitfeed/schedule.py | Schedule.AddStopObject | def AddStopObject(self, stop, problem_reporter=None):
"""Add Stop object to this schedule if stop_id is non-blank."""
assert stop._schedule is None
if not problem_reporter:
problem_reporter = self.problem_reporter
if not stop.stop_id:
return
if stop.stop_id in self.stops:
problem_reporter.DuplicateID('stop_id', stop.stop_id)
return
stop._schedule = weakref.proxy(self)
self.AddTableColumns('stops', stop._ColumnNames())
self.stops[stop.stop_id] = stop
if hasattr(stop, 'zone_id') and stop.zone_id:
self.fare_zones[stop.zone_id] = True | python | def AddStopObject(self, stop, problem_reporter=None):
"""Add Stop object to this schedule if stop_id is non-blank."""
assert stop._schedule is None
if not problem_reporter:
problem_reporter = self.problem_reporter
if not stop.stop_id:
return
if stop.stop_id in self.stops:
problem_reporter.DuplicateID('stop_id', stop.stop_id)
return
stop._schedule = weakref.proxy(self)
self.AddTableColumns('stops', stop._ColumnNames())
self.stops[stop.stop_id] = stop
if hasattr(stop, 'zone_id') and stop.zone_id:
self.fare_zones[stop.zone_id] = True | [
"def",
"AddStopObject",
"(",
"self",
",",
"stop",
",",
"problem_reporter",
"=",
"None",
")",
":",
"assert",
"stop",
".",
"_schedule",
"is",
"None",
"if",
"not",
"problem_reporter",
":",
"problem_reporter",
"=",
"self",
".",
"problem_reporter",
"if",
"not",
"... | Add Stop object to this schedule if stop_id is non-blank. | [
"Add",
"Stop",
"object",
"to",
"this",
"schedule",
"if",
"stop_id",
"is",
"non",
"-",
"blank",
"."
] | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/schedule.py#L358-L375 | train | 219,889 |
google/transitfeed | transitfeed/schedule.py | Schedule.AddRoute | def AddRoute(self, short_name, long_name, route_type, route_id=None):
"""Add a route to this schedule.
Args:
short_name: Short name of the route, such as "71L"
long_name: Full name of the route, such as "NW 21st Ave/St Helens Rd"
route_type: A type such as "Tram", "Subway" or "Bus"
route_id: id of the route or None, in which case a unique id is picked
Returns:
A new Route object
"""
if route_id is None:
route_id = util.FindUniqueId(self.routes)
route = self._gtfs_factory.Route(short_name=short_name, long_name=long_name,
route_type=route_type, route_id=route_id)
route.agency_id = self.GetDefaultAgency().agency_id
self.AddRouteObject(route)
return route | python | def AddRoute(self, short_name, long_name, route_type, route_id=None):
"""Add a route to this schedule.
Args:
short_name: Short name of the route, such as "71L"
long_name: Full name of the route, such as "NW 21st Ave/St Helens Rd"
route_type: A type such as "Tram", "Subway" or "Bus"
route_id: id of the route or None, in which case a unique id is picked
Returns:
A new Route object
"""
if route_id is None:
route_id = util.FindUniqueId(self.routes)
route = self._gtfs_factory.Route(short_name=short_name, long_name=long_name,
route_type=route_type, route_id=route_id)
route.agency_id = self.GetDefaultAgency().agency_id
self.AddRouteObject(route)
return route | [
"def",
"AddRoute",
"(",
"self",
",",
"short_name",
",",
"long_name",
",",
"route_type",
",",
"route_id",
"=",
"None",
")",
":",
"if",
"route_id",
"is",
"None",
":",
"route_id",
"=",
"util",
".",
"FindUniqueId",
"(",
"self",
".",
"routes",
")",
"route",
... | Add a route to this schedule.
Args:
short_name: Short name of the route, such as "71L"
long_name: Full name of the route, such as "NW 21st Ave/St Helens Rd"
route_type: A type such as "Tram", "Subway" or "Bus"
route_id: id of the route or None, in which case a unique id is picked
Returns:
A new Route object | [
"Add",
"a",
"route",
"to",
"this",
"schedule",
"."
] | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/schedule.py#L380-L397 | train | 219,890 |
google/transitfeed | transitfeed/schedule.py | Schedule.AddFareObject | def AddFareObject(self, fare, problem_reporter=None):
"""Deprecated. Please use AddFareAttributeObject."""
warnings.warn("No longer supported. The Fare class was renamed to "
"FareAttribute, and all related functions were renamed "
"accordingly.", DeprecationWarning)
self.AddFareAttributeObject(fare, problem_reporter) | python | def AddFareObject(self, fare, problem_reporter=None):
"""Deprecated. Please use AddFareAttributeObject."""
warnings.warn("No longer supported. The Fare class was renamed to "
"FareAttribute, and all related functions were renamed "
"accordingly.", DeprecationWarning)
self.AddFareAttributeObject(fare, problem_reporter) | [
"def",
"AddFareObject",
"(",
"self",
",",
"fare",
",",
"problem_reporter",
"=",
"None",
")",
":",
"warnings",
".",
"warn",
"(",
"\"No longer supported. The Fare class was renamed to \"",
"\"FareAttribute, and all related functions were renamed \"",
"\"accordingly.\"",
",",
"D... | Deprecated. Please use AddFareAttributeObject. | [
"Deprecated",
".",
"Please",
"use",
"AddFareAttributeObject",
"."
] | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/schedule.py#L475-L480 | train | 219,891 |
google/transitfeed | transitfeed/schedule.py | Schedule.GetNearestStops | def GetNearestStops(self, lat, lon, n=1):
"""Return the n nearest stops to lat,lon"""
dist_stop_list = []
for s in self.stops.values():
# TODO: Use util.ApproximateDistanceBetweenStops?
dist = (s.stop_lat - lat)**2 + (s.stop_lon - lon)**2
if len(dist_stop_list) < n:
bisect.insort(dist_stop_list, (dist, s))
elif dist < dist_stop_list[-1][0]:
bisect.insort(dist_stop_list, (dist, s))
dist_stop_list.pop() # Remove stop with greatest distance
return [stop for dist, stop in dist_stop_list] | python | def GetNearestStops(self, lat, lon, n=1):
"""Return the n nearest stops to lat,lon"""
dist_stop_list = []
for s in self.stops.values():
# TODO: Use util.ApproximateDistanceBetweenStops?
dist = (s.stop_lat - lat)**2 + (s.stop_lon - lon)**2
if len(dist_stop_list) < n:
bisect.insort(dist_stop_list, (dist, s))
elif dist < dist_stop_list[-1][0]:
bisect.insort(dist_stop_list, (dist, s))
dist_stop_list.pop() # Remove stop with greatest distance
return [stop for dist, stop in dist_stop_list] | [
"def",
"GetNearestStops",
"(",
"self",
",",
"lat",
",",
"lon",
",",
"n",
"=",
"1",
")",
":",
"dist_stop_list",
"=",
"[",
"]",
"for",
"s",
"in",
"self",
".",
"stops",
".",
"values",
"(",
")",
":",
"# TODO: Use util.ApproximateDistanceBetweenStops?",
"dist",... | Return the n nearest stops to lat,lon | [
"Return",
"the",
"n",
"nearest",
"stops",
"to",
"lat",
"lon"
] | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/schedule.py#L584-L595 | train | 219,892 |
google/transitfeed | transitfeed/schedule.py | Schedule.GetStopsInBoundingBox | def GetStopsInBoundingBox(self, north, east, south, west, n):
"""Return a sample of up to n stops in a bounding box"""
stop_list = []
for s in self.stops.values():
if (s.stop_lat <= north and s.stop_lat >= south and
s.stop_lon <= east and s.stop_lon >= west):
stop_list.append(s)
if len(stop_list) == n:
break
return stop_list | python | def GetStopsInBoundingBox(self, north, east, south, west, n):
"""Return a sample of up to n stops in a bounding box"""
stop_list = []
for s in self.stops.values():
if (s.stop_lat <= north and s.stop_lat >= south and
s.stop_lon <= east and s.stop_lon >= west):
stop_list.append(s)
if len(stop_list) == n:
break
return stop_list | [
"def",
"GetStopsInBoundingBox",
"(",
"self",
",",
"north",
",",
"east",
",",
"south",
",",
"west",
",",
"n",
")",
":",
"stop_list",
"=",
"[",
"]",
"for",
"s",
"in",
"self",
".",
"stops",
".",
"values",
"(",
")",
":",
"if",
"(",
"s",
".",
"stop_la... | Return a sample of up to n stops in a bounding box | [
"Return",
"a",
"sample",
"of",
"up",
"to",
"n",
"stops",
"in",
"a",
"bounding",
"box"
] | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/schedule.py#L597-L606 | train | 219,893 |
google/transitfeed | transitfeed/schedule.py | Schedule.ValidateFeedStartAndExpirationDates | def ValidateFeedStartAndExpirationDates(self, problems, first_date, last_date,
first_date_origin, last_date_origin,
today):
"""Validate the start and expiration dates of the feed.
Issue a warning if it only starts in the future, or if
it expires within 60 days.
Args:
problems: The problem reporter object
first_date: A date object representing the first day the feed is active
last_date: A date object representing the last day the feed is active
today: A date object representing the date the validation is being run on
Returns:
None
"""
warning_cutoff = today + datetime.timedelta(days=60)
if last_date < warning_cutoff:
problems.ExpirationDate(time.mktime(last_date.timetuple()),
last_date_origin)
if first_date > today:
problems.FutureService(time.mktime(first_date.timetuple()),
first_date_origin) | python | def ValidateFeedStartAndExpirationDates(self, problems, first_date, last_date,
first_date_origin, last_date_origin,
today):
"""Validate the start and expiration dates of the feed.
Issue a warning if it only starts in the future, or if
it expires within 60 days.
Args:
problems: The problem reporter object
first_date: A date object representing the first day the feed is active
last_date: A date object representing the last day the feed is active
today: A date object representing the date the validation is being run on
Returns:
None
"""
warning_cutoff = today + datetime.timedelta(days=60)
if last_date < warning_cutoff:
problems.ExpirationDate(time.mktime(last_date.timetuple()),
last_date_origin)
if first_date > today:
problems.FutureService(time.mktime(first_date.timetuple()),
first_date_origin) | [
"def",
"ValidateFeedStartAndExpirationDates",
"(",
"self",
",",
"problems",
",",
"first_date",
",",
"last_date",
",",
"first_date_origin",
",",
"last_date_origin",
",",
"today",
")",
":",
"warning_cutoff",
"=",
"today",
"+",
"datetime",
".",
"timedelta",
"(",
"day... | Validate the start and expiration dates of the feed.
Issue a warning if it only starts in the future, or if
it expires within 60 days.
Args:
problems: The problem reporter object
first_date: A date object representing the first day the feed is active
last_date: A date object representing the last day the feed is active
today: A date object representing the date the validation is being run on
Returns:
None | [
"Validate",
"the",
"start",
"and",
"expiration",
"dates",
"of",
"the",
"feed",
".",
"Issue",
"a",
"warning",
"if",
"it",
"only",
"starts",
"in",
"the",
"future",
"or",
"if",
"it",
"expires",
"within",
"60",
"days",
"."
] | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/schedule.py#L829-L852 | train | 219,894 |
google/transitfeed | transitfeed/schedule.py | Schedule.ValidateServiceGaps | def ValidateServiceGaps(self,
problems,
validation_start_date,
validation_end_date,
service_gap_interval):
"""Validate consecutive dates without service in the feed.
Issue a warning if it finds service gaps of at least
"service_gap_interval" consecutive days in the date range
[validation_start_date, last_service_date)
Args:
problems: The problem reporter object
validation_start_date: A date object representing the date from which the
validation should take place
validation_end_date: A date object representing the first day the feed is
active
service_gap_interval: An integer indicating how many consecutive days the
service gaps need to have for a warning to be issued
Returns:
None
"""
if service_gap_interval is None:
return
departures = self.GenerateDateTripsDeparturesList(validation_start_date,
validation_end_date)
# The first day without service of the _current_ gap
first_day_without_service = validation_start_date
# The last day without service of the _current_ gap
last_day_without_service = validation_start_date
consecutive_days_without_service = 0
for day_date, day_trips, _ in departures:
if day_trips == 0:
if consecutive_days_without_service == 0:
first_day_without_service = day_date
consecutive_days_without_service += 1
last_day_without_service = day_date
else:
if consecutive_days_without_service >= service_gap_interval:
problems.TooManyDaysWithoutService(first_day_without_service,
last_day_without_service,
consecutive_days_without_service)
consecutive_days_without_service = 0
# We have to check if there is a gap at the end of the specified date range
if consecutive_days_without_service >= service_gap_interval:
problems.TooManyDaysWithoutService(first_day_without_service,
last_day_without_service,
consecutive_days_without_service) | python | def ValidateServiceGaps(self,
problems,
validation_start_date,
validation_end_date,
service_gap_interval):
"""Validate consecutive dates without service in the feed.
Issue a warning if it finds service gaps of at least
"service_gap_interval" consecutive days in the date range
[validation_start_date, last_service_date)
Args:
problems: The problem reporter object
validation_start_date: A date object representing the date from which the
validation should take place
validation_end_date: A date object representing the first day the feed is
active
service_gap_interval: An integer indicating how many consecutive days the
service gaps need to have for a warning to be issued
Returns:
None
"""
if service_gap_interval is None:
return
departures = self.GenerateDateTripsDeparturesList(validation_start_date,
validation_end_date)
# The first day without service of the _current_ gap
first_day_without_service = validation_start_date
# The last day without service of the _current_ gap
last_day_without_service = validation_start_date
consecutive_days_without_service = 0
for day_date, day_trips, _ in departures:
if day_trips == 0:
if consecutive_days_without_service == 0:
first_day_without_service = day_date
consecutive_days_without_service += 1
last_day_without_service = day_date
else:
if consecutive_days_without_service >= service_gap_interval:
problems.TooManyDaysWithoutService(first_day_without_service,
last_day_without_service,
consecutive_days_without_service)
consecutive_days_without_service = 0
# We have to check if there is a gap at the end of the specified date range
if consecutive_days_without_service >= service_gap_interval:
problems.TooManyDaysWithoutService(first_day_without_service,
last_day_without_service,
consecutive_days_without_service) | [
"def",
"ValidateServiceGaps",
"(",
"self",
",",
"problems",
",",
"validation_start_date",
",",
"validation_end_date",
",",
"service_gap_interval",
")",
":",
"if",
"service_gap_interval",
"is",
"None",
":",
"return",
"departures",
"=",
"self",
".",
"GenerateDateTripsDe... | Validate consecutive dates without service in the feed.
Issue a warning if it finds service gaps of at least
"service_gap_interval" consecutive days in the date range
[validation_start_date, last_service_date)
Args:
problems: The problem reporter object
validation_start_date: A date object representing the date from which the
validation should take place
validation_end_date: A date object representing the first day the feed is
active
service_gap_interval: An integer indicating how many consecutive days the
service gaps need to have for a warning to be issued
Returns:
None | [
"Validate",
"consecutive",
"dates",
"without",
"service",
"in",
"the",
"feed",
".",
"Issue",
"a",
"warning",
"if",
"it",
"finds",
"service",
"gaps",
"of",
"at",
"least",
"service_gap_interval",
"consecutive",
"days",
"in",
"the",
"date",
"range",
"[",
"validat... | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/schedule.py#L854-L907 | train | 219,895 |
google/transitfeed | transitfeed/schedule.py | Schedule.ValidateStopTimesForTrip | def ValidateStopTimesForTrip(self, problems, trip, stop_times):
"""Checks for the stop times of a trip.
Ensure that a trip does not have too many consecutive stop times with the
same departure/arrival time."""
prev_departure_secs = -1
consecutive_stop_times_with_potentially_same_time = 0
consecutive_stop_times_with_fully_specified_same_time = 0
def CheckSameTimeCount():
# More than five consecutive stop times with the same time? Seems not
# very likely (a stop every 10 seconds?). In practice, this warning
# affects about 0.5% of current GTFS trips.
if (prev_departure_secs != -1 and
consecutive_stop_times_with_fully_specified_same_time > 5):
problems.TooManyConsecutiveStopTimesWithSameTime(trip.trip_id,
consecutive_stop_times_with_fully_specified_same_time,
prev_departure_secs)
for index, st in enumerate(stop_times):
if st.arrival_secs is None or st.departure_secs is None:
consecutive_stop_times_with_potentially_same_time += 1
continue
if (prev_departure_secs == st.arrival_secs and
st.arrival_secs == st.departure_secs):
consecutive_stop_times_with_potentially_same_time += 1
consecutive_stop_times_with_fully_specified_same_time = (
consecutive_stop_times_with_potentially_same_time)
else:
CheckSameTimeCount()
consecutive_stop_times_with_potentially_same_time = 1
consecutive_stop_times_with_fully_specified_same_time = 1
prev_departure_secs = st.departure_secs
# Make sure to check one last time at the end
CheckSameTimeCount() | python | def ValidateStopTimesForTrip(self, problems, trip, stop_times):
"""Checks for the stop times of a trip.
Ensure that a trip does not have too many consecutive stop times with the
same departure/arrival time."""
prev_departure_secs = -1
consecutive_stop_times_with_potentially_same_time = 0
consecutive_stop_times_with_fully_specified_same_time = 0
def CheckSameTimeCount():
# More than five consecutive stop times with the same time? Seems not
# very likely (a stop every 10 seconds?). In practice, this warning
# affects about 0.5% of current GTFS trips.
if (prev_departure_secs != -1 and
consecutive_stop_times_with_fully_specified_same_time > 5):
problems.TooManyConsecutiveStopTimesWithSameTime(trip.trip_id,
consecutive_stop_times_with_fully_specified_same_time,
prev_departure_secs)
for index, st in enumerate(stop_times):
if st.arrival_secs is None or st.departure_secs is None:
consecutive_stop_times_with_potentially_same_time += 1
continue
if (prev_departure_secs == st.arrival_secs and
st.arrival_secs == st.departure_secs):
consecutive_stop_times_with_potentially_same_time += 1
consecutive_stop_times_with_fully_specified_same_time = (
consecutive_stop_times_with_potentially_same_time)
else:
CheckSameTimeCount()
consecutive_stop_times_with_potentially_same_time = 1
consecutive_stop_times_with_fully_specified_same_time = 1
prev_departure_secs = st.departure_secs
# Make sure to check one last time at the end
CheckSameTimeCount() | [
"def",
"ValidateStopTimesForTrip",
"(",
"self",
",",
"problems",
",",
"trip",
",",
"stop_times",
")",
":",
"prev_departure_secs",
"=",
"-",
"1",
"consecutive_stop_times_with_potentially_same_time",
"=",
"0",
"consecutive_stop_times_with_fully_specified_same_time",
"=",
"0",... | Checks for the stop times of a trip.
Ensure that a trip does not have too many consecutive stop times with the
same departure/arrival time. | [
"Checks",
"for",
"the",
"stop",
"times",
"of",
"a",
"trip",
"."
] | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/transitfeed/schedule.py#L1169-L1203 | train | 219,896 |
google/transitfeed | gtfsscheduleviewer/marey_graph.py | MareyGraph.Draw | def Draw(self, stoplist=None, triplist=None, height=520):
"""Main interface for drawing the marey graph.
If called without arguments, the data generated in the previous call
will be used. New decorators can be added between calls.
Args:
# Class Stop is defined in transitfeed.py
stoplist: [Stop, Stop, ...]
# Class Trip is defined in transitfeed.py
triplist: [Trip, Trip, ...]
Returns:
# A string that contain a svg/xml web-page with a marey graph.
" <svg width="1440" height="520" version="1.1" ... "
"""
output = str()
if not triplist:
triplist = []
if not stoplist:
stoplist = []
if not self._cache or triplist or stoplist:
self._gheight = height
self._tlist=triplist
self._slist=stoplist
self._decorators = []
self._stations = self._BuildStations(stoplist)
self._cache = "%s %s %s %s" % (self._DrawBox(),
self._DrawHours(),
self._DrawStations(),
self._DrawTrips(triplist))
output = "%s %s %s %s" % (self._DrawHeader(),
self._cache,
self._DrawDecorators(),
self._DrawFooter())
return output | python | def Draw(self, stoplist=None, triplist=None, height=520):
"""Main interface for drawing the marey graph.
If called without arguments, the data generated in the previous call
will be used. New decorators can be added between calls.
Args:
# Class Stop is defined in transitfeed.py
stoplist: [Stop, Stop, ...]
# Class Trip is defined in transitfeed.py
triplist: [Trip, Trip, ...]
Returns:
# A string that contain a svg/xml web-page with a marey graph.
" <svg width="1440" height="520" version="1.1" ... "
"""
output = str()
if not triplist:
triplist = []
if not stoplist:
stoplist = []
if not self._cache or triplist or stoplist:
self._gheight = height
self._tlist=triplist
self._slist=stoplist
self._decorators = []
self._stations = self._BuildStations(stoplist)
self._cache = "%s %s %s %s" % (self._DrawBox(),
self._DrawHours(),
self._DrawStations(),
self._DrawTrips(triplist))
output = "%s %s %s %s" % (self._DrawHeader(),
self._cache,
self._DrawDecorators(),
self._DrawFooter())
return output | [
"def",
"Draw",
"(",
"self",
",",
"stoplist",
"=",
"None",
",",
"triplist",
"=",
"None",
",",
"height",
"=",
"520",
")",
":",
"output",
"=",
"str",
"(",
")",
"if",
"not",
"triplist",
":",
"triplist",
"=",
"[",
"]",
"if",
"not",
"stoplist",
":",
"s... | Main interface for drawing the marey graph.
If called without arguments, the data generated in the previous call
will be used. New decorators can be added between calls.
Args:
# Class Stop is defined in transitfeed.py
stoplist: [Stop, Stop, ...]
# Class Trip is defined in transitfeed.py
triplist: [Trip, Trip, ...]
Returns:
# A string that contain a svg/xml web-page with a marey graph.
" <svg width="1440" height="520" version="1.1" ... " | [
"Main",
"interface",
"for",
"drawing",
"the",
"marey",
"graph",
"."
] | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/gtfsscheduleviewer/marey_graph.py#L73-L112 | train | 219,897 |
google/transitfeed | gtfsscheduleviewer/marey_graph.py | MareyGraph._BuildStations | def _BuildStations(self, stoplist):
"""Dispatches the best algorithm for calculating station line position.
Args:
# Class Stop is defined in transitfeed.py
stoplist: [Stop, Stop, ...]
# Class Trip is defined in transitfeed.py
triplist: [Trip, Trip, ...]
Returns:
# One integer y-coordinate for each station normalized between
# 0 and X, where X is the height of the graph in pixels
[0, 33, 140, ... , X]
"""
stations = []
dists = self._EuclidianDistances(stoplist)
stations = self._CalculateYLines(dists)
return stations | python | def _BuildStations(self, stoplist):
"""Dispatches the best algorithm for calculating station line position.
Args:
# Class Stop is defined in transitfeed.py
stoplist: [Stop, Stop, ...]
# Class Trip is defined in transitfeed.py
triplist: [Trip, Trip, ...]
Returns:
# One integer y-coordinate for each station normalized between
# 0 and X, where X is the height of the graph in pixels
[0, 33, 140, ... , X]
"""
stations = []
dists = self._EuclidianDistances(stoplist)
stations = self._CalculateYLines(dists)
return stations | [
"def",
"_BuildStations",
"(",
"self",
",",
"stoplist",
")",
":",
"stations",
"=",
"[",
"]",
"dists",
"=",
"self",
".",
"_EuclidianDistances",
"(",
"stoplist",
")",
"stations",
"=",
"self",
".",
"_CalculateYLines",
"(",
"dists",
")",
"return",
"stations"
] | Dispatches the best algorithm for calculating station line position.
Args:
# Class Stop is defined in transitfeed.py
stoplist: [Stop, Stop, ...]
# Class Trip is defined in transitfeed.py
triplist: [Trip, Trip, ...]
Returns:
# One integer y-coordinate for each station normalized between
# 0 and X, where X is the height of the graph in pixels
[0, 33, 140, ... , X] | [
"Dispatches",
"the",
"best",
"algorithm",
"for",
"calculating",
"station",
"line",
"position",
"."
] | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/gtfsscheduleviewer/marey_graph.py#L196-L213 | train | 219,898 |
google/transitfeed | gtfsscheduleviewer/marey_graph.py | MareyGraph._EuclidianDistances | def _EuclidianDistances(self,slist):
"""Calculate euclidian distances between stops.
Uses the stoplists long/lats to approximate distances
between stations and build a list with y-coordinates for the
horizontal lines in the graph.
Args:
# Class Stop is defined in transitfeed.py
stoplist: [Stop, Stop, ...]
Returns:
# One integer for each pair of stations
# indicating the approximate distance
[0,33,140, ... ,X]
"""
e_dists2 = [transitfeed.ApproximateDistanceBetweenStops(stop, tail) for
(stop,tail) in itertools.izip(slist, slist[1:])]
return e_dists2 | python | def _EuclidianDistances(self,slist):
"""Calculate euclidian distances between stops.
Uses the stoplists long/lats to approximate distances
between stations and build a list with y-coordinates for the
horizontal lines in the graph.
Args:
# Class Stop is defined in transitfeed.py
stoplist: [Stop, Stop, ...]
Returns:
# One integer for each pair of stations
# indicating the approximate distance
[0,33,140, ... ,X]
"""
e_dists2 = [transitfeed.ApproximateDistanceBetweenStops(stop, tail) for
(stop,tail) in itertools.izip(slist, slist[1:])]
return e_dists2 | [
"def",
"_EuclidianDistances",
"(",
"self",
",",
"slist",
")",
":",
"e_dists2",
"=",
"[",
"transitfeed",
".",
"ApproximateDistanceBetweenStops",
"(",
"stop",
",",
"tail",
")",
"for",
"(",
"stop",
",",
"tail",
")",
"in",
"itertools",
".",
"izip",
"(",
"slist... | Calculate euclidian distances between stops.
Uses the stoplists long/lats to approximate distances
between stations and build a list with y-coordinates for the
horizontal lines in the graph.
Args:
# Class Stop is defined in transitfeed.py
stoplist: [Stop, Stop, ...]
Returns:
# One integer for each pair of stations
# indicating the approximate distance
[0,33,140, ... ,X] | [
"Calculate",
"euclidian",
"distances",
"between",
"stops",
"."
] | eb2991a3747ba541b2cb66502b305b6304a1f85f | https://github.com/google/transitfeed/blob/eb2991a3747ba541b2cb66502b305b6304a1f85f/gtfsscheduleviewer/marey_graph.py#L215-L234 | train | 219,899 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.