repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
list | docstring
stringlengths 3
17.3k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 87
242
| partition
stringclasses 1
value |
|---|---|---|---|---|---|---|---|---|---|---|---|
pyroscope/pyrocore
|
docs/examples/rt-heatmap.py
|
HeatMap.heatmap
|
def heatmap(self, df, imagefile):
""" Create the heat map.
"""
import seaborn as sns
import matplotlib.ticker as tkr
import matplotlib.pyplot as plt
from matplotlib.colors import LinearSegmentedColormap
sns.set()
with sns.axes_style('whitegrid'):
fig, ax = plt.subplots(figsize=(5, 11)) # inches
cmax = max(df[self.args[2]].max(), self.CMAP_MIN_MAX)
csteps = {
0.0: 'darkred', 0.3/cmax: 'red', 0.6/cmax: 'orangered', 0.9/cmax: 'coral',
1.0/cmax: 'skyblue', 1.5/cmax: 'blue', 1.9/cmax: 'darkblue',
2.0/cmax: 'darkgreen', 3.0/cmax: 'green',
(self.CMAP_MIN_MAX - .1)/cmax: 'palegreen', 1.0: 'yellow'}
cmap = LinearSegmentedColormap.from_list('RdGrYl', sorted(csteps.items()), N=256)
dataset = df.pivot(*self.args)
sns.heatmap(dataset, mask=dataset.isnull(), annot=False, linewidths=.5, square=True, ax=ax, cmap=cmap,
annot_kws=dict(stretch='condensed'))
ax.tick_params(axis='y', labelrotation=30, labelsize=8)
# ax.get_yaxis().set_major_formatter(tkr.FuncFormatter(lambda x, p: x))
plt.savefig(imagefile)
|
python
|
def heatmap(self, df, imagefile):
""" Create the heat map.
"""
import seaborn as sns
import matplotlib.ticker as tkr
import matplotlib.pyplot as plt
from matplotlib.colors import LinearSegmentedColormap
sns.set()
with sns.axes_style('whitegrid'):
fig, ax = plt.subplots(figsize=(5, 11)) # inches
cmax = max(df[self.args[2]].max(), self.CMAP_MIN_MAX)
csteps = {
0.0: 'darkred', 0.3/cmax: 'red', 0.6/cmax: 'orangered', 0.9/cmax: 'coral',
1.0/cmax: 'skyblue', 1.5/cmax: 'blue', 1.9/cmax: 'darkblue',
2.0/cmax: 'darkgreen', 3.0/cmax: 'green',
(self.CMAP_MIN_MAX - .1)/cmax: 'palegreen', 1.0: 'yellow'}
cmap = LinearSegmentedColormap.from_list('RdGrYl', sorted(csteps.items()), N=256)
dataset = df.pivot(*self.args)
sns.heatmap(dataset, mask=dataset.isnull(), annot=False, linewidths=.5, square=True, ax=ax, cmap=cmap,
annot_kws=dict(stretch='condensed'))
ax.tick_params(axis='y', labelrotation=30, labelsize=8)
# ax.get_yaxis().set_major_formatter(tkr.FuncFormatter(lambda x, p: x))
plt.savefig(imagefile)
|
[
"def",
"heatmap",
"(",
"self",
",",
"df",
",",
"imagefile",
")",
":",
"import",
"seaborn",
"as",
"sns",
"import",
"matplotlib",
".",
"ticker",
"as",
"tkr",
"import",
"matplotlib",
".",
"pyplot",
"as",
"plt",
"from",
"matplotlib",
".",
"colors",
"import",
"LinearSegmentedColormap",
"sns",
".",
"set",
"(",
")",
"with",
"sns",
".",
"axes_style",
"(",
"'whitegrid'",
")",
":",
"fig",
",",
"ax",
"=",
"plt",
".",
"subplots",
"(",
"figsize",
"=",
"(",
"5",
",",
"11",
")",
")",
"# inches",
"cmax",
"=",
"max",
"(",
"df",
"[",
"self",
".",
"args",
"[",
"2",
"]",
"]",
".",
"max",
"(",
")",
",",
"self",
".",
"CMAP_MIN_MAX",
")",
"csteps",
"=",
"{",
"0.0",
":",
"'darkred'",
",",
"0.3",
"/",
"cmax",
":",
"'red'",
",",
"0.6",
"/",
"cmax",
":",
"'orangered'",
",",
"0.9",
"/",
"cmax",
":",
"'coral'",
",",
"1.0",
"/",
"cmax",
":",
"'skyblue'",
",",
"1.5",
"/",
"cmax",
":",
"'blue'",
",",
"1.9",
"/",
"cmax",
":",
"'darkblue'",
",",
"2.0",
"/",
"cmax",
":",
"'darkgreen'",
",",
"3.0",
"/",
"cmax",
":",
"'green'",
",",
"(",
"self",
".",
"CMAP_MIN_MAX",
"-",
".1",
")",
"/",
"cmax",
":",
"'palegreen'",
",",
"1.0",
":",
"'yellow'",
"}",
"cmap",
"=",
"LinearSegmentedColormap",
".",
"from_list",
"(",
"'RdGrYl'",
",",
"sorted",
"(",
"csteps",
".",
"items",
"(",
")",
")",
",",
"N",
"=",
"256",
")",
"dataset",
"=",
"df",
".",
"pivot",
"(",
"*",
"self",
".",
"args",
")",
"sns",
".",
"heatmap",
"(",
"dataset",
",",
"mask",
"=",
"dataset",
".",
"isnull",
"(",
")",
",",
"annot",
"=",
"False",
",",
"linewidths",
"=",
".5",
",",
"square",
"=",
"True",
",",
"ax",
"=",
"ax",
",",
"cmap",
"=",
"cmap",
",",
"annot_kws",
"=",
"dict",
"(",
"stretch",
"=",
"'condensed'",
")",
")",
"ax",
".",
"tick_params",
"(",
"axis",
"=",
"'y'",
",",
"labelrotation",
"=",
"30",
",",
"labelsize",
"=",
"8",
")",
"# ax.get_yaxis().set_major_formatter(tkr.FuncFormatter(lambda x, p: x))",
"plt",
".",
"savefig",
"(",
"imagefile",
")"
] |
Create the heat map.
|
[
"Create",
"the",
"heat",
"map",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/docs/examples/rt-heatmap.py#L55-L81
|
train
|
pyroscope/pyrocore
|
src/pyrocore/ui/categories.py
|
CategoryManager.mainloop
|
def mainloop(self):
""" Manage category views.
"""
# Get client state
proxy = config.engine.open()
views = [x for x in sorted(proxy.view.list()) if x.startswith(self.PREFIX)]
current_view = real_current_view = proxy.ui.current_view()
if current_view not in views:
if views:
current_view = views[0]
else:
raise error.UserError("There are no '{}*' views defined at all!".format(self.PREFIX))
# Check options
if self.options.list:
for name in sorted(views):
print("{} {:5d} {}".format(
'*' if name == real_current_view else ' ',
proxy.view.size(xmlrpc.NOHASH, name),
name[self.PREFIX_LEN:]))
elif self.options.next or self.options.prev or self.options.update:
# Determine next in line
if self.options.update:
new_view = current_view
else:
new_view = (views * 2)[views.index(current_view) + (1 if self.options.next else -1)]
self.LOG.info("{} category view '{}'.".format(
"Updating" if self.options.update else "Switching to", new_view))
# Update and switch to filtered view
proxy.pyro.category.update(xmlrpc.NOHASH, new_view[self.PREFIX_LEN:])
proxy.ui.current_view.set(new_view)
else:
self.LOG.info("Current category view is '{}'.".format(current_view[self.PREFIX_LEN:]))
self.LOG.info("Use '--help' to get usage information.")
|
python
|
def mainloop(self):
""" Manage category views.
"""
# Get client state
proxy = config.engine.open()
views = [x for x in sorted(proxy.view.list()) if x.startswith(self.PREFIX)]
current_view = real_current_view = proxy.ui.current_view()
if current_view not in views:
if views:
current_view = views[0]
else:
raise error.UserError("There are no '{}*' views defined at all!".format(self.PREFIX))
# Check options
if self.options.list:
for name in sorted(views):
print("{} {:5d} {}".format(
'*' if name == real_current_view else ' ',
proxy.view.size(xmlrpc.NOHASH, name),
name[self.PREFIX_LEN:]))
elif self.options.next or self.options.prev or self.options.update:
# Determine next in line
if self.options.update:
new_view = current_view
else:
new_view = (views * 2)[views.index(current_view) + (1 if self.options.next else -1)]
self.LOG.info("{} category view '{}'.".format(
"Updating" if self.options.update else "Switching to", new_view))
# Update and switch to filtered view
proxy.pyro.category.update(xmlrpc.NOHASH, new_view[self.PREFIX_LEN:])
proxy.ui.current_view.set(new_view)
else:
self.LOG.info("Current category view is '{}'.".format(current_view[self.PREFIX_LEN:]))
self.LOG.info("Use '--help' to get usage information.")
|
[
"def",
"mainloop",
"(",
"self",
")",
":",
"# Get client state",
"proxy",
"=",
"config",
".",
"engine",
".",
"open",
"(",
")",
"views",
"=",
"[",
"x",
"for",
"x",
"in",
"sorted",
"(",
"proxy",
".",
"view",
".",
"list",
"(",
")",
")",
"if",
"x",
".",
"startswith",
"(",
"self",
".",
"PREFIX",
")",
"]",
"current_view",
"=",
"real_current_view",
"=",
"proxy",
".",
"ui",
".",
"current_view",
"(",
")",
"if",
"current_view",
"not",
"in",
"views",
":",
"if",
"views",
":",
"current_view",
"=",
"views",
"[",
"0",
"]",
"else",
":",
"raise",
"error",
".",
"UserError",
"(",
"\"There are no '{}*' views defined at all!\"",
".",
"format",
"(",
"self",
".",
"PREFIX",
")",
")",
"# Check options",
"if",
"self",
".",
"options",
".",
"list",
":",
"for",
"name",
"in",
"sorted",
"(",
"views",
")",
":",
"print",
"(",
"\"{} {:5d} {}\"",
".",
"format",
"(",
"'*'",
"if",
"name",
"==",
"real_current_view",
"else",
"' '",
",",
"proxy",
".",
"view",
".",
"size",
"(",
"xmlrpc",
".",
"NOHASH",
",",
"name",
")",
",",
"name",
"[",
"self",
".",
"PREFIX_LEN",
":",
"]",
")",
")",
"elif",
"self",
".",
"options",
".",
"next",
"or",
"self",
".",
"options",
".",
"prev",
"or",
"self",
".",
"options",
".",
"update",
":",
"# Determine next in line",
"if",
"self",
".",
"options",
".",
"update",
":",
"new_view",
"=",
"current_view",
"else",
":",
"new_view",
"=",
"(",
"views",
"*",
"2",
")",
"[",
"views",
".",
"index",
"(",
"current_view",
")",
"+",
"(",
"1",
"if",
"self",
".",
"options",
".",
"next",
"else",
"-",
"1",
")",
"]",
"self",
".",
"LOG",
".",
"info",
"(",
"\"{} category view '{}'.\"",
".",
"format",
"(",
"\"Updating\"",
"if",
"self",
".",
"options",
".",
"update",
"else",
"\"Switching to\"",
",",
"new_view",
")",
")",
"# Update and switch to filtered view",
"proxy",
".",
"pyro",
".",
"category",
".",
"update",
"(",
"xmlrpc",
".",
"NOHASH",
",",
"new_view",
"[",
"self",
".",
"PREFIX_LEN",
":",
"]",
")",
"proxy",
".",
"ui",
".",
"current_view",
".",
"set",
"(",
"new_view",
")",
"else",
":",
"self",
".",
"LOG",
".",
"info",
"(",
"\"Current category view is '{}'.\"",
".",
"format",
"(",
"current_view",
"[",
"self",
".",
"PREFIX_LEN",
":",
"]",
")",
")",
"self",
".",
"LOG",
".",
"info",
"(",
"\"Use '--help' to get usage information.\"",
")"
] |
Manage category views.
|
[
"Manage",
"category",
"views",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/ui/categories.py#L53-L91
|
train
|
pyroscope/pyrocore
|
src/pyrocore/data/config/config.py
|
_custom_fields
|
def _custom_fields():
""" Yield custom field definitions.
"""
# Import some commonly needed modules
import os
from pyrocore.torrent import engine, matching
from pyrocore.util import fmt
# PUT CUSTOM FIELD CODE HERE
# Disk space check (as an example)
# see https://pyrocore.readthedocs.io/en/latest/custom.html#has-room
def has_room(obj):
"Check disk space."
pathname = obj.path
if pathname and not os.path.exists(pathname):
pathname = os.path.dirname(pathname)
if pathname and os.path.exists(pathname):
stats = os.statvfs(pathname)
return (stats.f_bavail * stats.f_frsize - int(diskspace_threshold_mb) * 1024**2
> obj.size * (1.0 - obj.done / 100.0))
else:
return None
yield engine.DynamicField(engine.untyped, "has_room",
"check whether the download will fit on its target device",
matcher=matching.BoolFilter, accessor=has_room,
formatter=lambda val: "OK" if val else "??" if val is None else "NO")
globals().setdefault("diskspace_threshold_mb", "500")
|
python
|
def _custom_fields():
""" Yield custom field definitions.
"""
# Import some commonly needed modules
import os
from pyrocore.torrent import engine, matching
from pyrocore.util import fmt
# PUT CUSTOM FIELD CODE HERE
# Disk space check (as an example)
# see https://pyrocore.readthedocs.io/en/latest/custom.html#has-room
def has_room(obj):
"Check disk space."
pathname = obj.path
if pathname and not os.path.exists(pathname):
pathname = os.path.dirname(pathname)
if pathname and os.path.exists(pathname):
stats = os.statvfs(pathname)
return (stats.f_bavail * stats.f_frsize - int(diskspace_threshold_mb) * 1024**2
> obj.size * (1.0 - obj.done / 100.0))
else:
return None
yield engine.DynamicField(engine.untyped, "has_room",
"check whether the download will fit on its target device",
matcher=matching.BoolFilter, accessor=has_room,
formatter=lambda val: "OK" if val else "??" if val is None else "NO")
globals().setdefault("diskspace_threshold_mb", "500")
|
[
"def",
"_custom_fields",
"(",
")",
":",
"# Import some commonly needed modules",
"import",
"os",
"from",
"pyrocore",
".",
"torrent",
"import",
"engine",
",",
"matching",
"from",
"pyrocore",
".",
"util",
"import",
"fmt",
"# PUT CUSTOM FIELD CODE HERE",
"# Disk space check (as an example)",
"# see https://pyrocore.readthedocs.io/en/latest/custom.html#has-room",
"def",
"has_room",
"(",
"obj",
")",
":",
"\"Check disk space.\"",
"pathname",
"=",
"obj",
".",
"path",
"if",
"pathname",
"and",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"pathname",
")",
":",
"pathname",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"pathname",
")",
"if",
"pathname",
"and",
"os",
".",
"path",
".",
"exists",
"(",
"pathname",
")",
":",
"stats",
"=",
"os",
".",
"statvfs",
"(",
"pathname",
")",
"return",
"(",
"stats",
".",
"f_bavail",
"*",
"stats",
".",
"f_frsize",
"-",
"int",
"(",
"diskspace_threshold_mb",
")",
"*",
"1024",
"**",
"2",
">",
"obj",
".",
"size",
"*",
"(",
"1.0",
"-",
"obj",
".",
"done",
"/",
"100.0",
")",
")",
"else",
":",
"return",
"None",
"yield",
"engine",
".",
"DynamicField",
"(",
"engine",
".",
"untyped",
",",
"\"has_room\"",
",",
"\"check whether the download will fit on its target device\"",
",",
"matcher",
"=",
"matching",
".",
"BoolFilter",
",",
"accessor",
"=",
"has_room",
",",
"formatter",
"=",
"lambda",
"val",
":",
"\"OK\"",
"if",
"val",
"else",
"\"??\"",
"if",
"val",
"is",
"None",
"else",
"\"NO\"",
")",
"globals",
"(",
")",
".",
"setdefault",
"(",
"\"diskspace_threshold_mb\"",
",",
"\"500\"",
")"
] |
Yield custom field definitions.
|
[
"Yield",
"custom",
"field",
"definitions",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/data/config/config.py#L7-L35
|
train
|
pyroscope/pyrocore
|
src/pyrocore/util/stats.py
|
engine_data
|
def engine_data(engine):
""" Get important performance data and metadata from rTorrent.
"""
views = ("default", "main", "started", "stopped", "complete",
"incomplete", "seeding", "leeching", "active", "messages")
methods = [
"throttle.global_up.rate", "throttle.global_up.max_rate",
"throttle.global_down.rate", "throttle.global_down.max_rate",
]
# Get data via multicall
proxy = engine.open()
calls = [dict(methodName=method, params=[]) for method in methods] \
+ [dict(methodName="view.size", params=['', view]) for view in views]
result = proxy.system.multicall(calls, flatten=True)
# Build result object
data = dict(
now = time.time(),
engine_id = engine.engine_id,
versions = engine.versions,
uptime = engine.uptime,
upload = [result[0], result[1]],
download = [result[2], result[3]],
views = dict([(name, result[4+i])
for i, name in enumerate(views)
]),
)
return data
|
python
|
def engine_data(engine):
""" Get important performance data and metadata from rTorrent.
"""
views = ("default", "main", "started", "stopped", "complete",
"incomplete", "seeding", "leeching", "active", "messages")
methods = [
"throttle.global_up.rate", "throttle.global_up.max_rate",
"throttle.global_down.rate", "throttle.global_down.max_rate",
]
# Get data via multicall
proxy = engine.open()
calls = [dict(methodName=method, params=[]) for method in methods] \
+ [dict(methodName="view.size", params=['', view]) for view in views]
result = proxy.system.multicall(calls, flatten=True)
# Build result object
data = dict(
now = time.time(),
engine_id = engine.engine_id,
versions = engine.versions,
uptime = engine.uptime,
upload = [result[0], result[1]],
download = [result[2], result[3]],
views = dict([(name, result[4+i])
for i, name in enumerate(views)
]),
)
return data
|
[
"def",
"engine_data",
"(",
"engine",
")",
":",
"views",
"=",
"(",
"\"default\"",
",",
"\"main\"",
",",
"\"started\"",
",",
"\"stopped\"",
",",
"\"complete\"",
",",
"\"incomplete\"",
",",
"\"seeding\"",
",",
"\"leeching\"",
",",
"\"active\"",
",",
"\"messages\"",
")",
"methods",
"=",
"[",
"\"throttle.global_up.rate\"",
",",
"\"throttle.global_up.max_rate\"",
",",
"\"throttle.global_down.rate\"",
",",
"\"throttle.global_down.max_rate\"",
",",
"]",
"# Get data via multicall",
"proxy",
"=",
"engine",
".",
"open",
"(",
")",
"calls",
"=",
"[",
"dict",
"(",
"methodName",
"=",
"method",
",",
"params",
"=",
"[",
"]",
")",
"for",
"method",
"in",
"methods",
"]",
"+",
"[",
"dict",
"(",
"methodName",
"=",
"\"view.size\"",
",",
"params",
"=",
"[",
"''",
",",
"view",
"]",
")",
"for",
"view",
"in",
"views",
"]",
"result",
"=",
"proxy",
".",
"system",
".",
"multicall",
"(",
"calls",
",",
"flatten",
"=",
"True",
")",
"# Build result object",
"data",
"=",
"dict",
"(",
"now",
"=",
"time",
".",
"time",
"(",
")",
",",
"engine_id",
"=",
"engine",
".",
"engine_id",
",",
"versions",
"=",
"engine",
".",
"versions",
",",
"uptime",
"=",
"engine",
".",
"uptime",
",",
"upload",
"=",
"[",
"result",
"[",
"0",
"]",
",",
"result",
"[",
"1",
"]",
"]",
",",
"download",
"=",
"[",
"result",
"[",
"2",
"]",
",",
"result",
"[",
"3",
"]",
"]",
",",
"views",
"=",
"dict",
"(",
"[",
"(",
"name",
",",
"result",
"[",
"4",
"+",
"i",
"]",
")",
"for",
"i",
",",
"name",
"in",
"enumerate",
"(",
"views",
")",
"]",
")",
",",
")",
"return",
"data"
] |
Get important performance data and metadata from rTorrent.
|
[
"Get",
"important",
"performance",
"data",
"and",
"metadata",
"from",
"rTorrent",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/stats.py#L25-L54
|
train
|
pyroscope/pyrocore
|
src/pyrocore/util/osmagic.py
|
_write_pidfile
|
def _write_pidfile(pidfile):
""" Write file with current process ID.
"""
pid = str(os.getpid())
handle = open(pidfile, 'w')
try:
handle.write("%s\n" % pid)
finally:
handle.close()
|
python
|
def _write_pidfile(pidfile):
""" Write file with current process ID.
"""
pid = str(os.getpid())
handle = open(pidfile, 'w')
try:
handle.write("%s\n" % pid)
finally:
handle.close()
|
[
"def",
"_write_pidfile",
"(",
"pidfile",
")",
":",
"pid",
"=",
"str",
"(",
"os",
".",
"getpid",
"(",
")",
")",
"handle",
"=",
"open",
"(",
"pidfile",
",",
"'w'",
")",
"try",
":",
"handle",
".",
"write",
"(",
"\"%s\\n\"",
"%",
"pid",
")",
"finally",
":",
"handle",
".",
"close",
"(",
")"
] |
Write file with current process ID.
|
[
"Write",
"file",
"with",
"current",
"process",
"ID",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/osmagic.py#L30-L38
|
train
|
pyroscope/pyrocore
|
src/pyrocore/util/osmagic.py
|
guard
|
def guard(pidfile, guardfile=None):
""" Raise an EnvironmentError when the "guardfile" doesn't exist, or
the process with the ID found in "pidfile" is still active.
"""
# Check guard
if guardfile and not os.path.exists(guardfile):
raise EnvironmentError("Guard file '%s' not found, won't start!" % guardfile)
if os.path.exists(pidfile):
running, pid = check_process(pidfile)
if running:
raise EnvironmentError("Daemon process #%d still running, won't start!" % pid)
else:
logging.getLogger("daemonize").info("Process #%d disappeared, continuing..." % pid)
# Keep race condition window small, by immediately writing launcher process ID
_write_pidfile(pidfile)
|
python
|
def guard(pidfile, guardfile=None):
""" Raise an EnvironmentError when the "guardfile" doesn't exist, or
the process with the ID found in "pidfile" is still active.
"""
# Check guard
if guardfile and not os.path.exists(guardfile):
raise EnvironmentError("Guard file '%s' not found, won't start!" % guardfile)
if os.path.exists(pidfile):
running, pid = check_process(pidfile)
if running:
raise EnvironmentError("Daemon process #%d still running, won't start!" % pid)
else:
logging.getLogger("daemonize").info("Process #%d disappeared, continuing..." % pid)
# Keep race condition window small, by immediately writing launcher process ID
_write_pidfile(pidfile)
|
[
"def",
"guard",
"(",
"pidfile",
",",
"guardfile",
"=",
"None",
")",
":",
"# Check guard",
"if",
"guardfile",
"and",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"guardfile",
")",
":",
"raise",
"EnvironmentError",
"(",
"\"Guard file '%s' not found, won't start!\"",
"%",
"guardfile",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"pidfile",
")",
":",
"running",
",",
"pid",
"=",
"check_process",
"(",
"pidfile",
")",
"if",
"running",
":",
"raise",
"EnvironmentError",
"(",
"\"Daemon process #%d still running, won't start!\"",
"%",
"pid",
")",
"else",
":",
"logging",
".",
"getLogger",
"(",
"\"daemonize\"",
")",
".",
"info",
"(",
"\"Process #%d disappeared, continuing...\"",
"%",
"pid",
")",
"# Keep race condition window small, by immediately writing launcher process ID",
"_write_pidfile",
"(",
"pidfile",
")"
] |
Raise an EnvironmentError when the "guardfile" doesn't exist, or
the process with the ID found in "pidfile" is still active.
|
[
"Raise",
"an",
"EnvironmentError",
"when",
"the",
"guardfile",
"doesn",
"t",
"exist",
"or",
"the",
"process",
"with",
"the",
"ID",
"found",
"in",
"pidfile",
"is",
"still",
"active",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/osmagic.py#L70-L86
|
train
|
pyroscope/pyrocore
|
src/pyrocore/util/osmagic.py
|
daemonize
|
def daemonize(pidfile=None, logfile=None, sync=True):
""" Fork the process into the background.
@param pidfile: Optional PID file path.
@param sync: Wait for parent process to disappear?
@param logfile: Optional name of stdin/stderr log file or stream.
"""
log = logging.getLogger("daemonize")
ppid = os.getpid()
try:
pid = os.fork()
if pid > 0:
log.debug("Parent exiting (PID %d, CHILD %d)" % (ppid, pid))
sys.exit(0)
except OSError as exc:
log.critical("fork #1 failed (PID %d): (%d) %s\n" % (os.getpid(), exc.errno, exc.strerror))
sys.exit(1)
##os.chdir("/")
##os.umask(0022)
os.setsid()
try:
pid = os.fork()
if pid > 0:
log.debug("Session leader exiting (PID %d, PPID %d, DEMON %d)" % (os.getpid(), ppid, pid))
sys.exit(0)
except OSError as exc:
log.critical("fork #2 failed (PID %d): (%d) %s\n" % (os.getpid(), exc.errno, exc.strerror))
sys.exit(1)
if pidfile:
_write_pidfile(pidfile)
def sig_term(*dummy):
"Handler for SIGTERM."
sys.exit(0)
stdin = open("/dev/null", "r")
os.dup2(stdin.fileno(), sys.stdin.fileno())
signal.signal(signal.SIGTERM, sig_term)
if logfile:
try:
logfile + ""
except TypeError:
if logfile.fileno() != sys.stdout.fileno():
os.dup2(logfile.fileno(), sys.stdout.fileno())
if logfile.fileno() != sys.stderr.fileno():
os.dup2(logfile.fileno(), sys.stderr.fileno())
else:
log.debug("Redirecting stdout / stderr to %r" % logfile)
loghandle = open(logfile, "a+")
os.dup2(loghandle.fileno(), sys.stdout.fileno())
os.dup2(loghandle.fileno(), sys.stderr.fileno())
loghandle.close()
if sync:
# Wait for 5 seconds at most, in 10ms steps
polling = 5, .01
for _ in range(int(polling[0] * 1 / polling[1])):
try:
os.kill(ppid, 0)
except OSError:
break
else:
time.sleep(polling[1])
log.debug("Process detached (PID %d)" % os.getpid())
|
python
|
def daemonize(pidfile=None, logfile=None, sync=True):
""" Fork the process into the background.
@param pidfile: Optional PID file path.
@param sync: Wait for parent process to disappear?
@param logfile: Optional name of stdin/stderr log file or stream.
"""
log = logging.getLogger("daemonize")
ppid = os.getpid()
try:
pid = os.fork()
if pid > 0:
log.debug("Parent exiting (PID %d, CHILD %d)" % (ppid, pid))
sys.exit(0)
except OSError as exc:
log.critical("fork #1 failed (PID %d): (%d) %s\n" % (os.getpid(), exc.errno, exc.strerror))
sys.exit(1)
##os.chdir("/")
##os.umask(0022)
os.setsid()
try:
pid = os.fork()
if pid > 0:
log.debug("Session leader exiting (PID %d, PPID %d, DEMON %d)" % (os.getpid(), ppid, pid))
sys.exit(0)
except OSError as exc:
log.critical("fork #2 failed (PID %d): (%d) %s\n" % (os.getpid(), exc.errno, exc.strerror))
sys.exit(1)
if pidfile:
_write_pidfile(pidfile)
def sig_term(*dummy):
"Handler for SIGTERM."
sys.exit(0)
stdin = open("/dev/null", "r")
os.dup2(stdin.fileno(), sys.stdin.fileno())
signal.signal(signal.SIGTERM, sig_term)
if logfile:
try:
logfile + ""
except TypeError:
if logfile.fileno() != sys.stdout.fileno():
os.dup2(logfile.fileno(), sys.stdout.fileno())
if logfile.fileno() != sys.stderr.fileno():
os.dup2(logfile.fileno(), sys.stderr.fileno())
else:
log.debug("Redirecting stdout / stderr to %r" % logfile)
loghandle = open(logfile, "a+")
os.dup2(loghandle.fileno(), sys.stdout.fileno())
os.dup2(loghandle.fileno(), sys.stderr.fileno())
loghandle.close()
if sync:
# Wait for 5 seconds at most, in 10ms steps
polling = 5, .01
for _ in range(int(polling[0] * 1 / polling[1])):
try:
os.kill(ppid, 0)
except OSError:
break
else:
time.sleep(polling[1])
log.debug("Process detached (PID %d)" % os.getpid())
|
[
"def",
"daemonize",
"(",
"pidfile",
"=",
"None",
",",
"logfile",
"=",
"None",
",",
"sync",
"=",
"True",
")",
":",
"log",
"=",
"logging",
".",
"getLogger",
"(",
"\"daemonize\"",
")",
"ppid",
"=",
"os",
".",
"getpid",
"(",
")",
"try",
":",
"pid",
"=",
"os",
".",
"fork",
"(",
")",
"if",
"pid",
">",
"0",
":",
"log",
".",
"debug",
"(",
"\"Parent exiting (PID %d, CHILD %d)\"",
"%",
"(",
"ppid",
",",
"pid",
")",
")",
"sys",
".",
"exit",
"(",
"0",
")",
"except",
"OSError",
"as",
"exc",
":",
"log",
".",
"critical",
"(",
"\"fork #1 failed (PID %d): (%d) %s\\n\"",
"%",
"(",
"os",
".",
"getpid",
"(",
")",
",",
"exc",
".",
"errno",
",",
"exc",
".",
"strerror",
")",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"##os.chdir(\"/\")",
"##os.umask(0022)",
"os",
".",
"setsid",
"(",
")",
"try",
":",
"pid",
"=",
"os",
".",
"fork",
"(",
")",
"if",
"pid",
">",
"0",
":",
"log",
".",
"debug",
"(",
"\"Session leader exiting (PID %d, PPID %d, DEMON %d)\"",
"%",
"(",
"os",
".",
"getpid",
"(",
")",
",",
"ppid",
",",
"pid",
")",
")",
"sys",
".",
"exit",
"(",
"0",
")",
"except",
"OSError",
"as",
"exc",
":",
"log",
".",
"critical",
"(",
"\"fork #2 failed (PID %d): (%d) %s\\n\"",
"%",
"(",
"os",
".",
"getpid",
"(",
")",
",",
"exc",
".",
"errno",
",",
"exc",
".",
"strerror",
")",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"if",
"pidfile",
":",
"_write_pidfile",
"(",
"pidfile",
")",
"def",
"sig_term",
"(",
"*",
"dummy",
")",
":",
"\"Handler for SIGTERM.\"",
"sys",
".",
"exit",
"(",
"0",
")",
"stdin",
"=",
"open",
"(",
"\"/dev/null\"",
",",
"\"r\"",
")",
"os",
".",
"dup2",
"(",
"stdin",
".",
"fileno",
"(",
")",
",",
"sys",
".",
"stdin",
".",
"fileno",
"(",
")",
")",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGTERM",
",",
"sig_term",
")",
"if",
"logfile",
":",
"try",
":",
"logfile",
"+",
"\"\"",
"except",
"TypeError",
":",
"if",
"logfile",
".",
"fileno",
"(",
")",
"!=",
"sys",
".",
"stdout",
".",
"fileno",
"(",
")",
":",
"os",
".",
"dup2",
"(",
"logfile",
".",
"fileno",
"(",
")",
",",
"sys",
".",
"stdout",
".",
"fileno",
"(",
")",
")",
"if",
"logfile",
".",
"fileno",
"(",
")",
"!=",
"sys",
".",
"stderr",
".",
"fileno",
"(",
")",
":",
"os",
".",
"dup2",
"(",
"logfile",
".",
"fileno",
"(",
")",
",",
"sys",
".",
"stderr",
".",
"fileno",
"(",
")",
")",
"else",
":",
"log",
".",
"debug",
"(",
"\"Redirecting stdout / stderr to %r\"",
"%",
"logfile",
")",
"loghandle",
"=",
"open",
"(",
"logfile",
",",
"\"a+\"",
")",
"os",
".",
"dup2",
"(",
"loghandle",
".",
"fileno",
"(",
")",
",",
"sys",
".",
"stdout",
".",
"fileno",
"(",
")",
")",
"os",
".",
"dup2",
"(",
"loghandle",
".",
"fileno",
"(",
")",
",",
"sys",
".",
"stderr",
".",
"fileno",
"(",
")",
")",
"loghandle",
".",
"close",
"(",
")",
"if",
"sync",
":",
"# Wait for 5 seconds at most, in 10ms steps",
"polling",
"=",
"5",
",",
".01",
"for",
"_",
"in",
"range",
"(",
"int",
"(",
"polling",
"[",
"0",
"]",
"*",
"1",
"/",
"polling",
"[",
"1",
"]",
")",
")",
":",
"try",
":",
"os",
".",
"kill",
"(",
"ppid",
",",
"0",
")",
"except",
"OSError",
":",
"break",
"else",
":",
"time",
".",
"sleep",
"(",
"polling",
"[",
"1",
"]",
")",
"log",
".",
"debug",
"(",
"\"Process detached (PID %d)\"",
"%",
"os",
".",
"getpid",
"(",
")",
")"
] |
Fork the process into the background.
@param pidfile: Optional PID file path.
@param sync: Wait for parent process to disappear?
@param logfile: Optional name of stdin/stderr log file or stream.
|
[
"Fork",
"the",
"process",
"into",
"the",
"background",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/osmagic.py#L89-L158
|
train
|
pyroscope/pyrocore
|
src/pyrocore/util/algo.py
|
flatten
|
def flatten(nested, containers=(list, tuple)):
""" Flatten a nested list in-place and return it.
"""
flat = list(nested) # handle iterators / generators
i = 0
while i < len(flat):
while isinstance(flat[i], containers):
if not flat[i]:
# kill empty list
flat.pop(i)
# inspect new 'i'th element in outer loop
i -= 1
break
else:
flat[i:i + 1] = (flat[i])
# 'i'th element is scalar, proceed
i += 1
return flat
|
python
|
def flatten(nested, containers=(list, tuple)):
""" Flatten a nested list in-place and return it.
"""
flat = list(nested) # handle iterators / generators
i = 0
while i < len(flat):
while isinstance(flat[i], containers):
if not flat[i]:
# kill empty list
flat.pop(i)
# inspect new 'i'th element in outer loop
i -= 1
break
else:
flat[i:i + 1] = (flat[i])
# 'i'th element is scalar, proceed
i += 1
return flat
|
[
"def",
"flatten",
"(",
"nested",
",",
"containers",
"=",
"(",
"list",
",",
"tuple",
")",
")",
":",
"flat",
"=",
"list",
"(",
"nested",
")",
"# handle iterators / generators",
"i",
"=",
"0",
"while",
"i",
"<",
"len",
"(",
"flat",
")",
":",
"while",
"isinstance",
"(",
"flat",
"[",
"i",
"]",
",",
"containers",
")",
":",
"if",
"not",
"flat",
"[",
"i",
"]",
":",
"# kill empty list",
"flat",
".",
"pop",
"(",
"i",
")",
"# inspect new 'i'th element in outer loop",
"i",
"-=",
"1",
"break",
"else",
":",
"flat",
"[",
"i",
":",
"i",
"+",
"1",
"]",
"=",
"(",
"flat",
"[",
"i",
"]",
")",
"# 'i'th element is scalar, proceed",
"i",
"+=",
"1",
"return",
"flat"
] |
Flatten a nested list in-place and return it.
|
[
"Flatten",
"a",
"nested",
"list",
"in",
"-",
"place",
"and",
"return",
"it",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/algo.py#L42-L62
|
train
|
pyroscope/pyrocore
|
pavement.py
|
gendocs
|
def gendocs():
"create some doc pages automatically"
helppage = path("docs/references-cli-usage.rst")
content = [
".. automatically generated using 'paver gendocs'.",
"",
".. contents::",
" :local:",
"",
".. note::",
"",
" The help output presented here applies to version ``%s`` of the tools."
% sh("pyroadmin --version", capture=True).split()[1],
"",
]
for tool in sorted(project.entry_points["console_scripts"]):
tool, _ = tool.split(None, 1)
content.extend([
".. _cli-usage-%s:" % tool,
"",
tool,
'^' * len(tool),
"",
"::",
"",
])
help_opt = "--help-fields --config-dir /tmp" if tool == "rtcontrol" else "--help"
help_txt = sh("%s -q %s" % (tool, help_opt), capture=True, ignore_error=True).splitlines()
content.extend(' ' + i for i in help_txt
if ' on Python ' not in i and 'Copyright (c) 200' not in i
and 'see the full documentation' not in i
and ' https://pyrocore.readthedocs.io/' not in i)
content.extend([
"",
])
content = [line.rstrip() for line in content if all(
i not in line for i in (", Copyright (c) ", "Total time: ", "Configuration file '/tmp/")
)]
content = [line for line, succ in zip(content, content[1:] + ['']) if line or succ] # filter twin empty lines
helppage.write_lines(content)
|
python
|
def gendocs():
"create some doc pages automatically"
helppage = path("docs/references-cli-usage.rst")
content = [
".. automatically generated using 'paver gendocs'.",
"",
".. contents::",
" :local:",
"",
".. note::",
"",
" The help output presented here applies to version ``%s`` of the tools."
% sh("pyroadmin --version", capture=True).split()[1],
"",
]
for tool in sorted(project.entry_points["console_scripts"]):
tool, _ = tool.split(None, 1)
content.extend([
".. _cli-usage-%s:" % tool,
"",
tool,
'^' * len(tool),
"",
"::",
"",
])
help_opt = "--help-fields --config-dir /tmp" if tool == "rtcontrol" else "--help"
help_txt = sh("%s -q %s" % (tool, help_opt), capture=True, ignore_error=True).splitlines()
content.extend(' ' + i for i in help_txt
if ' on Python ' not in i and 'Copyright (c) 200' not in i
and 'see the full documentation' not in i
and ' https://pyrocore.readthedocs.io/' not in i)
content.extend([
"",
])
content = [line.rstrip() for line in content if all(
i not in line for i in (", Copyright (c) ", "Total time: ", "Configuration file '/tmp/")
)]
content = [line for line, succ in zip(content, content[1:] + ['']) if line or succ] # filter twin empty lines
helppage.write_lines(content)
|
[
"def",
"gendocs",
"(",
")",
":",
"helppage",
"=",
"path",
"(",
"\"docs/references-cli-usage.rst\"",
")",
"content",
"=",
"[",
"\".. automatically generated using 'paver gendocs'.\"",
",",
"\"\"",
",",
"\".. contents::\"",
",",
"\" :local:\"",
",",
"\"\"",
",",
"\".. note::\"",
",",
"\"\"",
",",
"\" The help output presented here applies to version ``%s`` of the tools.\"",
"%",
"sh",
"(",
"\"pyroadmin --version\"",
",",
"capture",
"=",
"True",
")",
".",
"split",
"(",
")",
"[",
"1",
"]",
",",
"\"\"",
",",
"]",
"for",
"tool",
"in",
"sorted",
"(",
"project",
".",
"entry_points",
"[",
"\"console_scripts\"",
"]",
")",
":",
"tool",
",",
"_",
"=",
"tool",
".",
"split",
"(",
"None",
",",
"1",
")",
"content",
".",
"extend",
"(",
"[",
"\".. _cli-usage-%s:\"",
"%",
"tool",
",",
"\"\"",
",",
"tool",
",",
"'^'",
"*",
"len",
"(",
"tool",
")",
",",
"\"\"",
",",
"\"::\"",
",",
"\"\"",
",",
"]",
")",
"help_opt",
"=",
"\"--help-fields --config-dir /tmp\"",
"if",
"tool",
"==",
"\"rtcontrol\"",
"else",
"\"--help\"",
"help_txt",
"=",
"sh",
"(",
"\"%s -q %s\"",
"%",
"(",
"tool",
",",
"help_opt",
")",
",",
"capture",
"=",
"True",
",",
"ignore_error",
"=",
"True",
")",
".",
"splitlines",
"(",
")",
"content",
".",
"extend",
"(",
"' '",
"+",
"i",
"for",
"i",
"in",
"help_txt",
"if",
"' on Python '",
"not",
"in",
"i",
"and",
"'Copyright (c) 200'",
"not",
"in",
"i",
"and",
"'see the full documentation'",
"not",
"in",
"i",
"and",
"' https://pyrocore.readthedocs.io/'",
"not",
"in",
"i",
")",
"content",
".",
"extend",
"(",
"[",
"\"\"",
",",
"]",
")",
"content",
"=",
"[",
"line",
".",
"rstrip",
"(",
")",
"for",
"line",
"in",
"content",
"if",
"all",
"(",
"i",
"not",
"in",
"line",
"for",
"i",
"in",
"(",
"\", Copyright (c) \"",
",",
"\"Total time: \"",
",",
"\"Configuration file '/tmp/\"",
")",
")",
"]",
"content",
"=",
"[",
"line",
"for",
"line",
",",
"succ",
"in",
"zip",
"(",
"content",
",",
"content",
"[",
"1",
":",
"]",
"+",
"[",
"''",
"]",
")",
"if",
"line",
"or",
"succ",
"]",
"# filter twin empty lines",
"helppage",
".",
"write_lines",
"(",
"content",
")"
] |
create some doc pages automatically
|
[
"create",
"some",
"doc",
"pages",
"automatically"
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/pavement.py#L196-L237
|
train
|
pyroscope/pyrocore
|
pavement.py
|
watchdog_pid
|
def watchdog_pid():
"""Get watchdog PID via ``netstat``."""
result = sh('netstat -tulpn 2>/dev/null | grep 127.0.0.1:{:d}'
.format(SPHINX_AUTOBUILD_PORT), capture=True, ignore_error=True)
pid = result.strip()
pid = pid.split()[-1] if pid else None
pid = pid.split('/', 1)[0] if pid and pid != '-' else None
return pid
|
python
|
def watchdog_pid():
"""Get watchdog PID via ``netstat``."""
result = sh('netstat -tulpn 2>/dev/null | grep 127.0.0.1:{:d}'
.format(SPHINX_AUTOBUILD_PORT), capture=True, ignore_error=True)
pid = result.strip()
pid = pid.split()[-1] if pid else None
pid = pid.split('/', 1)[0] if pid and pid != '-' else None
return pid
|
[
"def",
"watchdog_pid",
"(",
")",
":",
"result",
"=",
"sh",
"(",
"'netstat -tulpn 2>/dev/null | grep 127.0.0.1:{:d}'",
".",
"format",
"(",
"SPHINX_AUTOBUILD_PORT",
")",
",",
"capture",
"=",
"True",
",",
"ignore_error",
"=",
"True",
")",
"pid",
"=",
"result",
".",
"strip",
"(",
")",
"pid",
"=",
"pid",
".",
"split",
"(",
")",
"[",
"-",
"1",
"]",
"if",
"pid",
"else",
"None",
"pid",
"=",
"pid",
".",
"split",
"(",
"'/'",
",",
"1",
")",
"[",
"0",
"]",
"if",
"pid",
"and",
"pid",
"!=",
"'-'",
"else",
"None",
"return",
"pid"
] |
Get watchdog PID via ``netstat``.
|
[
"Get",
"watchdog",
"PID",
"via",
"netstat",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/pavement.py#L262-L270
|
train
|
pyroscope/pyrocore
|
pavement.py
|
autodocs
|
def autodocs():
"create Sphinx docs locally, and start a watchdog"
build_dir = path('docs/_build')
index_html = build_dir / 'html/index.html'
if build_dir.exists():
build_dir.rmtree()
with pushd("docs"):
print "\n*** Generating API doc ***\n"
sh("sphinx-apidoc -o apidoc -f -T -M ../src/pyrocore")
sh("sphinx-apidoc -o apidoc -f -T -M $(dirname $(python -c 'import tempita; print(tempita.__file__)'))")
print "\n*** Generating HTML doc ***\n"
sh('nohup %s/Makefile SPHINXBUILD="sphinx-autobuild -p %d'
' -i \'.*\' -i \'*.log\' -i \'*.png\' -i \'*.txt\'" html >autobuild.log 2>&1 &'
% (os.getcwd(), SPHINX_AUTOBUILD_PORT))
for i in range(25):
time.sleep(2.5)
pid = watchdog_pid()
if pid:
sh("touch docs/index.rst")
sh('ps {}'.format(pid))
url = 'http://localhost:{port:d}/'.format(port=SPHINX_AUTOBUILD_PORT)
print("\n*** Open '{}' in your browser...".format(url))
break
|
python
|
def autodocs():
"create Sphinx docs locally, and start a watchdog"
build_dir = path('docs/_build')
index_html = build_dir / 'html/index.html'
if build_dir.exists():
build_dir.rmtree()
with pushd("docs"):
print "\n*** Generating API doc ***\n"
sh("sphinx-apidoc -o apidoc -f -T -M ../src/pyrocore")
sh("sphinx-apidoc -o apidoc -f -T -M $(dirname $(python -c 'import tempita; print(tempita.__file__)'))")
print "\n*** Generating HTML doc ***\n"
sh('nohup %s/Makefile SPHINXBUILD="sphinx-autobuild -p %d'
' -i \'.*\' -i \'*.log\' -i \'*.png\' -i \'*.txt\'" html >autobuild.log 2>&1 &'
% (os.getcwd(), SPHINX_AUTOBUILD_PORT))
for i in range(25):
time.sleep(2.5)
pid = watchdog_pid()
if pid:
sh("touch docs/index.rst")
sh('ps {}'.format(pid))
url = 'http://localhost:{port:d}/'.format(port=SPHINX_AUTOBUILD_PORT)
print("\n*** Open '{}' in your browser...".format(url))
break
|
[
"def",
"autodocs",
"(",
")",
":",
"build_dir",
"=",
"path",
"(",
"'docs/_build'",
")",
"index_html",
"=",
"build_dir",
"/",
"'html/index.html'",
"if",
"build_dir",
".",
"exists",
"(",
")",
":",
"build_dir",
".",
"rmtree",
"(",
")",
"with",
"pushd",
"(",
"\"docs\"",
")",
":",
"print",
"\"\\n*** Generating API doc ***\\n\"",
"sh",
"(",
"\"sphinx-apidoc -o apidoc -f -T -M ../src/pyrocore\"",
")",
"sh",
"(",
"\"sphinx-apidoc -o apidoc -f -T -M $(dirname $(python -c 'import tempita; print(tempita.__file__)'))\"",
")",
"print",
"\"\\n*** Generating HTML doc ***\\n\"",
"sh",
"(",
"'nohup %s/Makefile SPHINXBUILD=\"sphinx-autobuild -p %d'",
"' -i \\'.*\\' -i \\'*.log\\' -i \\'*.png\\' -i \\'*.txt\\'\" html >autobuild.log 2>&1 &'",
"%",
"(",
"os",
".",
"getcwd",
"(",
")",
",",
"SPHINX_AUTOBUILD_PORT",
")",
")",
"for",
"i",
"in",
"range",
"(",
"25",
")",
":",
"time",
".",
"sleep",
"(",
"2.5",
")",
"pid",
"=",
"watchdog_pid",
"(",
")",
"if",
"pid",
":",
"sh",
"(",
"\"touch docs/index.rst\"",
")",
"sh",
"(",
"'ps {}'",
".",
"format",
"(",
"pid",
")",
")",
"url",
"=",
"'http://localhost:{port:d}/'",
".",
"format",
"(",
"port",
"=",
"SPHINX_AUTOBUILD_PORT",
")",
"print",
"(",
"\"\\n*** Open '{}' in your browser...\"",
".",
"format",
"(",
"url",
")",
")",
"break"
] |
create Sphinx docs locally, and start a watchdog
|
[
"create",
"Sphinx",
"docs",
"locally",
"and",
"start",
"a",
"watchdog"
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/pavement.py#L275-L299
|
train
|
pyroscope/pyrocore
|
pavement.py
|
stopdocs
|
def stopdocs():
"stop Sphinx watchdog"
for i in range(4):
pid = watchdog_pid()
if pid:
if not i:
sh('ps {}'.format(pid))
sh('kill {}'.format(pid))
time.sleep(.5)
else:
break
|
python
|
def stopdocs():
"stop Sphinx watchdog"
for i in range(4):
pid = watchdog_pid()
if pid:
if not i:
sh('ps {}'.format(pid))
sh('kill {}'.format(pid))
time.sleep(.5)
else:
break
|
[
"def",
"stopdocs",
"(",
")",
":",
"for",
"i",
"in",
"range",
"(",
"4",
")",
":",
"pid",
"=",
"watchdog_pid",
"(",
")",
"if",
"pid",
":",
"if",
"not",
"i",
":",
"sh",
"(",
"'ps {}'",
".",
"format",
"(",
"pid",
")",
")",
"sh",
"(",
"'kill {}'",
".",
"format",
"(",
"pid",
")",
")",
"time",
".",
"sleep",
"(",
".5",
")",
"else",
":",
"break"
] |
stop Sphinx watchdog
|
[
"stop",
"Sphinx",
"watchdog"
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/pavement.py#L303-L313
|
train
|
pyroscope/pyrocore
|
pavement.py
|
coverage
|
def coverage():
"generate coverage report and show in browser"
coverage_index = path("build/coverage/index.html")
coverage_index.remove()
sh("paver test")
coverage_index.exists() and webbrowser.open(coverage_index)
|
python
|
def coverage():
"generate coverage report and show in browser"
coverage_index = path("build/coverage/index.html")
coverage_index.remove()
sh("paver test")
coverage_index.exists() and webbrowser.open(coverage_index)
|
[
"def",
"coverage",
"(",
")",
":",
"coverage_index",
"=",
"path",
"(",
"\"build/coverage/index.html\"",
")",
"coverage_index",
".",
"remove",
"(",
")",
"sh",
"(",
"\"paver test\"",
")",
"coverage_index",
".",
"exists",
"(",
")",
"and",
"webbrowser",
".",
"open",
"(",
"coverage_index",
")"
] |
generate coverage report and show in browser
|
[
"generate",
"coverage",
"report",
"and",
"show",
"in",
"browser"
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/pavement.py#L327-L332
|
train
|
pyroscope/pyrocore
|
src/pyrocore/config.py
|
lookup_announce_alias
|
def lookup_announce_alias(name):
""" Get canonical alias name and announce URL list for the given alias.
"""
for alias, urls in announce.items():
if alias.lower() == name.lower():
return alias, urls
raise KeyError("Unknown alias %s" % (name,))
|
python
|
def lookup_announce_alias(name):
""" Get canonical alias name and announce URL list for the given alias.
"""
for alias, urls in announce.items():
if alias.lower() == name.lower():
return alias, urls
raise KeyError("Unknown alias %s" % (name,))
|
[
"def",
"lookup_announce_alias",
"(",
"name",
")",
":",
"for",
"alias",
",",
"urls",
"in",
"announce",
".",
"items",
"(",
")",
":",
"if",
"alias",
".",
"lower",
"(",
")",
"==",
"name",
".",
"lower",
"(",
")",
":",
"return",
"alias",
",",
"urls",
"raise",
"KeyError",
"(",
"\"Unknown alias %s\"",
"%",
"(",
"name",
",",
")",
")"
] |
Get canonical alias name and announce URL list for the given alias.
|
[
"Get",
"canonical",
"alias",
"name",
"and",
"announce",
"URL",
"list",
"for",
"the",
"given",
"alias",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/config.py#L27-L34
|
train
|
pyroscope/pyrocore
|
src/pyrocore/config.py
|
map_announce2alias
|
def map_announce2alias(url):
""" Get tracker alias for announce URL, and if none is defined, the 2nd level domain.
"""
import urlparse
# Try to find an exact alias URL match and return its label
for alias, urls in announce.items():
if any(i == url for i in urls):
return alias
# Try to find an alias URL prefix and return its label
parts = urlparse.urlparse(url)
server = urlparse.urlunparse((parts.scheme, parts.netloc, "/", None, None, None))
for alias, urls in announce.items():
if any(i.startswith(server) for i in urls):
return alias
# Return 2nd level domain name if no alias found
try:
return '.'.join(parts.netloc.split(':')[0].split('.')[-2:])
except IndexError:
return parts.netloc
|
python
|
def map_announce2alias(url):
""" Get tracker alias for announce URL, and if none is defined, the 2nd level domain.
"""
import urlparse
# Try to find an exact alias URL match and return its label
for alias, urls in announce.items():
if any(i == url for i in urls):
return alias
# Try to find an alias URL prefix and return its label
parts = urlparse.urlparse(url)
server = urlparse.urlunparse((parts.scheme, parts.netloc, "/", None, None, None))
for alias, urls in announce.items():
if any(i.startswith(server) for i in urls):
return alias
# Return 2nd level domain name if no alias found
try:
return '.'.join(parts.netloc.split(':')[0].split('.')[-2:])
except IndexError:
return parts.netloc
|
[
"def",
"map_announce2alias",
"(",
"url",
")",
":",
"import",
"urlparse",
"# Try to find an exact alias URL match and return its label",
"for",
"alias",
",",
"urls",
"in",
"announce",
".",
"items",
"(",
")",
":",
"if",
"any",
"(",
"i",
"==",
"url",
"for",
"i",
"in",
"urls",
")",
":",
"return",
"alias",
"# Try to find an alias URL prefix and return its label",
"parts",
"=",
"urlparse",
".",
"urlparse",
"(",
"url",
")",
"server",
"=",
"urlparse",
".",
"urlunparse",
"(",
"(",
"parts",
".",
"scheme",
",",
"parts",
".",
"netloc",
",",
"\"/\"",
",",
"None",
",",
"None",
",",
"None",
")",
")",
"for",
"alias",
",",
"urls",
"in",
"announce",
".",
"items",
"(",
")",
":",
"if",
"any",
"(",
"i",
".",
"startswith",
"(",
"server",
")",
"for",
"i",
"in",
"urls",
")",
":",
"return",
"alias",
"# Return 2nd level domain name if no alias found",
"try",
":",
"return",
"'.'",
".",
"join",
"(",
"parts",
".",
"netloc",
".",
"split",
"(",
"':'",
")",
"[",
"0",
"]",
".",
"split",
"(",
"'.'",
")",
"[",
"-",
"2",
":",
"]",
")",
"except",
"IndexError",
":",
"return",
"parts",
".",
"netloc"
] |
Get tracker alias for announce URL, and if none is defined, the 2nd level domain.
|
[
"Get",
"tracker",
"alias",
"for",
"announce",
"URL",
"and",
"if",
"none",
"is",
"defined",
"the",
"2nd",
"level",
"domain",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/config.py#L37-L59
|
train
|
pyroscope/pyrocore
|
src/pyrocore/util/load_config.py
|
validate
|
def validate(key, val):
""" Validate a configuration value.
"""
if val and val.startswith("~/"):
return os.path.expanduser(val)
if key == "output_header_frequency":
return int(val, 10)
if key.endswith("_ecma48"):
return eval("'%s'" % val.replace("'", r"\'")) # pylint: disable=eval-used
return val
|
python
|
def validate(key, val):
""" Validate a configuration value.
"""
if val and val.startswith("~/"):
return os.path.expanduser(val)
if key == "output_header_frequency":
return int(val, 10)
if key.endswith("_ecma48"):
return eval("'%s'" % val.replace("'", r"\'")) # pylint: disable=eval-used
return val
|
[
"def",
"validate",
"(",
"key",
",",
"val",
")",
":",
"if",
"val",
"and",
"val",
".",
"startswith",
"(",
"\"~/\"",
")",
":",
"return",
"os",
".",
"path",
".",
"expanduser",
"(",
"val",
")",
"if",
"key",
"==",
"\"output_header_frequency\"",
":",
"return",
"int",
"(",
"val",
",",
"10",
")",
"if",
"key",
".",
"endswith",
"(",
"\"_ecma48\"",
")",
":",
"return",
"eval",
"(",
"\"'%s'\"",
"%",
"val",
".",
"replace",
"(",
"\"'\"",
",",
"r\"\\'\"",
")",
")",
"# pylint: disable=eval-used",
"return",
"val"
] |
Validate a configuration value.
|
[
"Validate",
"a",
"configuration",
"value",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/load_config.py#L35-L45
|
train
|
pyroscope/pyrocore
|
src/pyrocore/util/load_config.py
|
ConfigLoader._update_config
|
def _update_config(self, namespace): # pylint: disable=no-self-use
""" Inject the items from the given dict into the configuration.
"""
for key, val in namespace.items():
setattr(config, key, val)
|
python
|
def _update_config(self, namespace): # pylint: disable=no-self-use
""" Inject the items from the given dict into the configuration.
"""
for key, val in namespace.items():
setattr(config, key, val)
|
[
"def",
"_update_config",
"(",
"self",
",",
"namespace",
")",
":",
"# pylint: disable=no-self-use",
"for",
"key",
",",
"val",
"in",
"namespace",
".",
"items",
"(",
")",
":",
"setattr",
"(",
"config",
",",
"key",
",",
"val",
")"
] |
Inject the items from the given dict into the configuration.
|
[
"Inject",
"the",
"items",
"from",
"the",
"given",
"dict",
"into",
"the",
"configuration",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/load_config.py#L85-L89
|
train
|
pyroscope/pyrocore
|
src/pyrocore/util/load_config.py
|
ConfigLoader._interpolation_escape
|
def _interpolation_escape(self, namespace):
""" Re-escape interpolation strings.
"""
for key, val in namespace.items():
if '%' in val:
namespace[key] = self.INTERPOLATION_ESCAPE.sub(lambda match: '%' + match.group(0), val)
|
python
|
def _interpolation_escape(self, namespace):
""" Re-escape interpolation strings.
"""
for key, val in namespace.items():
if '%' in val:
namespace[key] = self.INTERPOLATION_ESCAPE.sub(lambda match: '%' + match.group(0), val)
|
[
"def",
"_interpolation_escape",
"(",
"self",
",",
"namespace",
")",
":",
"for",
"key",
",",
"val",
"in",
"namespace",
".",
"items",
"(",
")",
":",
"if",
"'%'",
"in",
"val",
":",
"namespace",
"[",
"key",
"]",
"=",
"self",
".",
"INTERPOLATION_ESCAPE",
".",
"sub",
"(",
"lambda",
"match",
":",
"'%'",
"+",
"match",
".",
"group",
"(",
"0",
")",
",",
"val",
")"
] |
Re-escape interpolation strings.
|
[
"Re",
"-",
"escape",
"interpolation",
"strings",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/load_config.py#L92-L97
|
train
|
pyroscope/pyrocore
|
src/pyrocore/util/load_config.py
|
ConfigLoader._validate_namespace
|
def _validate_namespace(self, namespace):
""" Validate the given namespace. This method is idempotent!
"""
# Update config values (so other code can access them in the bootstrap phase)
self._update_config(namespace)
# Validate announce URLs
for key, val in namespace["announce"].items():
if isinstance(val, basestring):
namespace["announce"][key] = val.split()
# Re-escape output formats
self._interpolation_escape(namespace["formats"])
# Create objects from module specs
for factory in ("engine",):
if isinstance(namespace[factory], basestring):
namespace[factory] = pymagic.import_name(namespace[factory])() if namespace[factory] else None
# Do some standard type conversions
for key in namespace:
# Split lists
if key.endswith("_list") and isinstance(namespace[key], basestring):
namespace[key] = [i.strip() for i in namespace[key].replace(',', ' ').split()]
# Resolve factory and callback handler lists
elif any(key.endswith(i) for i in ("_factories", "_callbacks")) and isinstance(namespace[key], basestring):
namespace[key] = [pymagic.import_name(i.strip()) for i in namespace[key].replace(',', ' ').split()]
# Update config values again
self._update_config(namespace)
|
python
|
def _validate_namespace(self, namespace):
""" Validate the given namespace. This method is idempotent!
"""
# Update config values (so other code can access them in the bootstrap phase)
self._update_config(namespace)
# Validate announce URLs
for key, val in namespace["announce"].items():
if isinstance(val, basestring):
namespace["announce"][key] = val.split()
# Re-escape output formats
self._interpolation_escape(namespace["formats"])
# Create objects from module specs
for factory in ("engine",):
if isinstance(namespace[factory], basestring):
namespace[factory] = pymagic.import_name(namespace[factory])() if namespace[factory] else None
# Do some standard type conversions
for key in namespace:
# Split lists
if key.endswith("_list") and isinstance(namespace[key], basestring):
namespace[key] = [i.strip() for i in namespace[key].replace(',', ' ').split()]
# Resolve factory and callback handler lists
elif any(key.endswith(i) for i in ("_factories", "_callbacks")) and isinstance(namespace[key], basestring):
namespace[key] = [pymagic.import_name(i.strip()) for i in namespace[key].replace(',', ' ').split()]
# Update config values again
self._update_config(namespace)
|
[
"def",
"_validate_namespace",
"(",
"self",
",",
"namespace",
")",
":",
"# Update config values (so other code can access them in the bootstrap phase)",
"self",
".",
"_update_config",
"(",
"namespace",
")",
"# Validate announce URLs",
"for",
"key",
",",
"val",
"in",
"namespace",
"[",
"\"announce\"",
"]",
".",
"items",
"(",
")",
":",
"if",
"isinstance",
"(",
"val",
",",
"basestring",
")",
":",
"namespace",
"[",
"\"announce\"",
"]",
"[",
"key",
"]",
"=",
"val",
".",
"split",
"(",
")",
"# Re-escape output formats",
"self",
".",
"_interpolation_escape",
"(",
"namespace",
"[",
"\"formats\"",
"]",
")",
"# Create objects from module specs",
"for",
"factory",
"in",
"(",
"\"engine\"",
",",
")",
":",
"if",
"isinstance",
"(",
"namespace",
"[",
"factory",
"]",
",",
"basestring",
")",
":",
"namespace",
"[",
"factory",
"]",
"=",
"pymagic",
".",
"import_name",
"(",
"namespace",
"[",
"factory",
"]",
")",
"(",
")",
"if",
"namespace",
"[",
"factory",
"]",
"else",
"None",
"# Do some standard type conversions",
"for",
"key",
"in",
"namespace",
":",
"# Split lists",
"if",
"key",
".",
"endswith",
"(",
"\"_list\"",
")",
"and",
"isinstance",
"(",
"namespace",
"[",
"key",
"]",
",",
"basestring",
")",
":",
"namespace",
"[",
"key",
"]",
"=",
"[",
"i",
".",
"strip",
"(",
")",
"for",
"i",
"in",
"namespace",
"[",
"key",
"]",
".",
"replace",
"(",
"','",
",",
"' '",
")",
".",
"split",
"(",
")",
"]",
"# Resolve factory and callback handler lists",
"elif",
"any",
"(",
"key",
".",
"endswith",
"(",
"i",
")",
"for",
"i",
"in",
"(",
"\"_factories\"",
",",
"\"_callbacks\"",
")",
")",
"and",
"isinstance",
"(",
"namespace",
"[",
"key",
"]",
",",
"basestring",
")",
":",
"namespace",
"[",
"key",
"]",
"=",
"[",
"pymagic",
".",
"import_name",
"(",
"i",
".",
"strip",
"(",
")",
")",
"for",
"i",
"in",
"namespace",
"[",
"key",
"]",
".",
"replace",
"(",
"','",
",",
"' '",
")",
".",
"split",
"(",
")",
"]",
"# Update config values again",
"self",
".",
"_update_config",
"(",
"namespace",
")"
] |
Validate the given namespace. This method is idempotent!
|
[
"Validate",
"the",
"given",
"namespace",
".",
"This",
"method",
"is",
"idempotent!"
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/load_config.py#L100-L130
|
train
|
pyroscope/pyrocore
|
src/pyrocore/util/load_config.py
|
ConfigLoader._set_from_ini
|
def _set_from_ini(self, namespace, ini_file):
""" Copy values from loaded INI file to namespace.
"""
# Isolate global values
global_vars = dict((key, val)
for key, val in namespace.items()
if isinstance(val, basestring)
)
# Copy all sections
for section in ini_file.sections():
# Get values set so far
if section == "GLOBAL":
raw_vars = global_vars
else:
raw_vars = namespace.setdefault(section.lower(), {})
# Override with values set in this INI file
raw_vars.update(dict(ini_file.items(section, raw=True)))
# Interpolate and validate all values
if section == "FORMATS":
self._interpolation_escape(raw_vars)
raw_vars.update(dict(
(key, validate(key, val))
for key, val in ini_file.items(section, vars=raw_vars)
))
# Update global values
namespace.update(global_vars)
|
python
|
def _set_from_ini(self, namespace, ini_file):
""" Copy values from loaded INI file to namespace.
"""
# Isolate global values
global_vars = dict((key, val)
for key, val in namespace.items()
if isinstance(val, basestring)
)
# Copy all sections
for section in ini_file.sections():
# Get values set so far
if section == "GLOBAL":
raw_vars = global_vars
else:
raw_vars = namespace.setdefault(section.lower(), {})
# Override with values set in this INI file
raw_vars.update(dict(ini_file.items(section, raw=True)))
# Interpolate and validate all values
if section == "FORMATS":
self._interpolation_escape(raw_vars)
raw_vars.update(dict(
(key, validate(key, val))
for key, val in ini_file.items(section, vars=raw_vars)
))
# Update global values
namespace.update(global_vars)
|
[
"def",
"_set_from_ini",
"(",
"self",
",",
"namespace",
",",
"ini_file",
")",
":",
"# Isolate global values",
"global_vars",
"=",
"dict",
"(",
"(",
"key",
",",
"val",
")",
"for",
"key",
",",
"val",
"in",
"namespace",
".",
"items",
"(",
")",
"if",
"isinstance",
"(",
"val",
",",
"basestring",
")",
")",
"# Copy all sections",
"for",
"section",
"in",
"ini_file",
".",
"sections",
"(",
")",
":",
"# Get values set so far",
"if",
"section",
"==",
"\"GLOBAL\"",
":",
"raw_vars",
"=",
"global_vars",
"else",
":",
"raw_vars",
"=",
"namespace",
".",
"setdefault",
"(",
"section",
".",
"lower",
"(",
")",
",",
"{",
"}",
")",
"# Override with values set in this INI file",
"raw_vars",
".",
"update",
"(",
"dict",
"(",
"ini_file",
".",
"items",
"(",
"section",
",",
"raw",
"=",
"True",
")",
")",
")",
"# Interpolate and validate all values",
"if",
"section",
"==",
"\"FORMATS\"",
":",
"self",
".",
"_interpolation_escape",
"(",
"raw_vars",
")",
"raw_vars",
".",
"update",
"(",
"dict",
"(",
"(",
"key",
",",
"validate",
"(",
"key",
",",
"val",
")",
")",
"for",
"key",
",",
"val",
"in",
"ini_file",
".",
"items",
"(",
"section",
",",
"vars",
"=",
"raw_vars",
")",
")",
")",
"# Update global values",
"namespace",
".",
"update",
"(",
"global_vars",
")"
] |
Copy values from loaded INI file to namespace.
|
[
"Copy",
"values",
"from",
"loaded",
"INI",
"file",
"to",
"namespace",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/load_config.py#L133-L162
|
train
|
pyroscope/pyrocore
|
src/pyrocore/util/load_config.py
|
ConfigLoader._set_defaults
|
def _set_defaults(self, namespace, optional_cfg_files):
""" Set default values in the given dict.
"""
# Add current configuration directory
namespace["config_dir"] = self.config_dir
# Load defaults
for idx, cfg_file in enumerate([self.CONFIG_INI] + optional_cfg_files):
if any(i in cfg_file for i in set('/' + os.sep)):
continue # skip any non-plain filenames
try:
defaults = pymagic.resource_string("pyrocore", "data/config/" + cfg_file) #@UndefinedVariable
except IOError as exc:
if idx and exc.errno == errno.ENOENT:
continue
raise
ini_file = ConfigParser.SafeConfigParser()
ini_file.optionxform = str # case-sensitive option names
ini_file.readfp(StringIO.StringIO(defaults), "<defaults>")
self._set_from_ini(namespace, ini_file)
|
python
|
def _set_defaults(self, namespace, optional_cfg_files):
""" Set default values in the given dict.
"""
# Add current configuration directory
namespace["config_dir"] = self.config_dir
# Load defaults
for idx, cfg_file in enumerate([self.CONFIG_INI] + optional_cfg_files):
if any(i in cfg_file for i in set('/' + os.sep)):
continue # skip any non-plain filenames
try:
defaults = pymagic.resource_string("pyrocore", "data/config/" + cfg_file) #@UndefinedVariable
except IOError as exc:
if idx and exc.errno == errno.ENOENT:
continue
raise
ini_file = ConfigParser.SafeConfigParser()
ini_file.optionxform = str # case-sensitive option names
ini_file.readfp(StringIO.StringIO(defaults), "<defaults>")
self._set_from_ini(namespace, ini_file)
|
[
"def",
"_set_defaults",
"(",
"self",
",",
"namespace",
",",
"optional_cfg_files",
")",
":",
"# Add current configuration directory",
"namespace",
"[",
"\"config_dir\"",
"]",
"=",
"self",
".",
"config_dir",
"# Load defaults",
"for",
"idx",
",",
"cfg_file",
"in",
"enumerate",
"(",
"[",
"self",
".",
"CONFIG_INI",
"]",
"+",
"optional_cfg_files",
")",
":",
"if",
"any",
"(",
"i",
"in",
"cfg_file",
"for",
"i",
"in",
"set",
"(",
"'/'",
"+",
"os",
".",
"sep",
")",
")",
":",
"continue",
"# skip any non-plain filenames",
"try",
":",
"defaults",
"=",
"pymagic",
".",
"resource_string",
"(",
"\"pyrocore\"",
",",
"\"data/config/\"",
"+",
"cfg_file",
")",
"#@UndefinedVariable",
"except",
"IOError",
"as",
"exc",
":",
"if",
"idx",
"and",
"exc",
".",
"errno",
"==",
"errno",
".",
"ENOENT",
":",
"continue",
"raise",
"ini_file",
"=",
"ConfigParser",
".",
"SafeConfigParser",
"(",
")",
"ini_file",
".",
"optionxform",
"=",
"str",
"# case-sensitive option names",
"ini_file",
".",
"readfp",
"(",
"StringIO",
".",
"StringIO",
"(",
"defaults",
")",
",",
"\"<defaults>\"",
")",
"self",
".",
"_set_from_ini",
"(",
"namespace",
",",
"ini_file",
")"
] |
Set default values in the given dict.
|
[
"Set",
"default",
"values",
"in",
"the",
"given",
"dict",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/load_config.py#L165-L186
|
train
|
pyroscope/pyrocore
|
src/pyrocore/util/load_config.py
|
ConfigLoader._load_ini
|
def _load_ini(self, namespace, config_file):
""" Load INI style configuration.
"""
self.LOG.debug("Loading %r..." % (config_file,))
ini_file = ConfigParser.SafeConfigParser()
ini_file.optionxform = str # case-sensitive option names
if ini_file.read(config_file):
self._set_from_ini(namespace, ini_file)
else:
self.LOG.warning("Configuration file %r not found,"
" use the command 'pyroadmin --create-config' to create it!" % (config_file,))
|
python
|
def _load_ini(self, namespace, config_file):
""" Load INI style configuration.
"""
self.LOG.debug("Loading %r..." % (config_file,))
ini_file = ConfigParser.SafeConfigParser()
ini_file.optionxform = str # case-sensitive option names
if ini_file.read(config_file):
self._set_from_ini(namespace, ini_file)
else:
self.LOG.warning("Configuration file %r not found,"
" use the command 'pyroadmin --create-config' to create it!" % (config_file,))
|
[
"def",
"_load_ini",
"(",
"self",
",",
"namespace",
",",
"config_file",
")",
":",
"self",
".",
"LOG",
".",
"debug",
"(",
"\"Loading %r...\"",
"%",
"(",
"config_file",
",",
")",
")",
"ini_file",
"=",
"ConfigParser",
".",
"SafeConfigParser",
"(",
")",
"ini_file",
".",
"optionxform",
"=",
"str",
"# case-sensitive option names",
"if",
"ini_file",
".",
"read",
"(",
"config_file",
")",
":",
"self",
".",
"_set_from_ini",
"(",
"namespace",
",",
"ini_file",
")",
"else",
":",
"self",
".",
"LOG",
".",
"warning",
"(",
"\"Configuration file %r not found,\"",
"\" use the command 'pyroadmin --create-config' to create it!\"",
"%",
"(",
"config_file",
",",
")",
")"
] |
Load INI style configuration.
|
[
"Load",
"INI",
"style",
"configuration",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/load_config.py#L189-L199
|
train
|
pyroscope/pyrocore
|
src/pyrocore/util/load_config.py
|
ConfigLoader._load_py
|
def _load_py(self, namespace, config_file):
""" Load scripted configuration.
"""
if config_file and os.path.isfile(config_file):
self.LOG.debug("Loading %r..." % (config_file,))
exec(compile(open(config_file).read(), config_file, 'exec'), # pylint: disable=exec-used
vars(config), namespace)
else:
self.LOG.warning("Configuration file %r not found!" % (config_file,))
|
python
|
def _load_py(self, namespace, config_file):
""" Load scripted configuration.
"""
if config_file and os.path.isfile(config_file):
self.LOG.debug("Loading %r..." % (config_file,))
exec(compile(open(config_file).read(), config_file, 'exec'), # pylint: disable=exec-used
vars(config), namespace)
else:
self.LOG.warning("Configuration file %r not found!" % (config_file,))
|
[
"def",
"_load_py",
"(",
"self",
",",
"namespace",
",",
"config_file",
")",
":",
"if",
"config_file",
"and",
"os",
".",
"path",
".",
"isfile",
"(",
"config_file",
")",
":",
"self",
".",
"LOG",
".",
"debug",
"(",
"\"Loading %r...\"",
"%",
"(",
"config_file",
",",
")",
")",
"exec",
"(",
"compile",
"(",
"open",
"(",
"config_file",
")",
".",
"read",
"(",
")",
",",
"config_file",
",",
"'exec'",
")",
",",
"# pylint: disable=exec-used",
"vars",
"(",
"config",
")",
",",
"namespace",
")",
"else",
":",
"self",
".",
"LOG",
".",
"warning",
"(",
"\"Configuration file %r not found!\"",
"%",
"(",
"config_file",
",",
")",
")"
] |
Load scripted configuration.
|
[
"Load",
"scripted",
"configuration",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/load_config.py#L202-L210
|
train
|
pyroscope/pyrocore
|
src/pyrocore/util/load_config.py
|
ConfigLoader.load
|
def load(self, optional_cfg_files=None):
""" Actually load the configuation from either the default location or the given directory.
"""
optional_cfg_files = optional_cfg_files or []
# Guard against coding errors
if self._loaded:
raise RuntimeError("INTERNAL ERROR: Attempt to load configuration twice!")
try:
# Load configuration
namespace = {}
self._set_defaults(namespace, optional_cfg_files)
self._load_ini(namespace, os.path.join(self.config_dir, self.CONFIG_INI))
for cfg_file in optional_cfg_files:
if not os.path.isabs(cfg_file):
cfg_file = os.path.join(self.config_dir, cfg_file)
if os.path.exists(cfg_file):
self._load_ini(namespace, cfg_file)
self._validate_namespace(namespace)
self._load_py(namespace, namespace["config_script"])
self._validate_namespace(namespace)
for callback in namespace["config_validator_callbacks"]:
callback()
except ConfigParser.ParsingError as exc:
raise error.UserError(exc)
# Ready to go...
self._loaded = True
|
python
|
def load(self, optional_cfg_files=None):
""" Actually load the configuation from either the default location or the given directory.
"""
optional_cfg_files = optional_cfg_files or []
# Guard against coding errors
if self._loaded:
raise RuntimeError("INTERNAL ERROR: Attempt to load configuration twice!")
try:
# Load configuration
namespace = {}
self._set_defaults(namespace, optional_cfg_files)
self._load_ini(namespace, os.path.join(self.config_dir, self.CONFIG_INI))
for cfg_file in optional_cfg_files:
if not os.path.isabs(cfg_file):
cfg_file = os.path.join(self.config_dir, cfg_file)
if os.path.exists(cfg_file):
self._load_ini(namespace, cfg_file)
self._validate_namespace(namespace)
self._load_py(namespace, namespace["config_script"])
self._validate_namespace(namespace)
for callback in namespace["config_validator_callbacks"]:
callback()
except ConfigParser.ParsingError as exc:
raise error.UserError(exc)
# Ready to go...
self._loaded = True
|
[
"def",
"load",
"(",
"self",
",",
"optional_cfg_files",
"=",
"None",
")",
":",
"optional_cfg_files",
"=",
"optional_cfg_files",
"or",
"[",
"]",
"# Guard against coding errors",
"if",
"self",
".",
"_loaded",
":",
"raise",
"RuntimeError",
"(",
"\"INTERNAL ERROR: Attempt to load configuration twice!\"",
")",
"try",
":",
"# Load configuration",
"namespace",
"=",
"{",
"}",
"self",
".",
"_set_defaults",
"(",
"namespace",
",",
"optional_cfg_files",
")",
"self",
".",
"_load_ini",
"(",
"namespace",
",",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"config_dir",
",",
"self",
".",
"CONFIG_INI",
")",
")",
"for",
"cfg_file",
"in",
"optional_cfg_files",
":",
"if",
"not",
"os",
".",
"path",
".",
"isabs",
"(",
"cfg_file",
")",
":",
"cfg_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"config_dir",
",",
"cfg_file",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"cfg_file",
")",
":",
"self",
".",
"_load_ini",
"(",
"namespace",
",",
"cfg_file",
")",
"self",
".",
"_validate_namespace",
"(",
"namespace",
")",
"self",
".",
"_load_py",
"(",
"namespace",
",",
"namespace",
"[",
"\"config_script\"",
"]",
")",
"self",
".",
"_validate_namespace",
"(",
"namespace",
")",
"for",
"callback",
"in",
"namespace",
"[",
"\"config_validator_callbacks\"",
"]",
":",
"callback",
"(",
")",
"except",
"ConfigParser",
".",
"ParsingError",
"as",
"exc",
":",
"raise",
"error",
".",
"UserError",
"(",
"exc",
")",
"# Ready to go...",
"self",
".",
"_loaded",
"=",
"True"
] |
Actually load the configuation from either the default location or the given directory.
|
[
"Actually",
"load",
"the",
"configuation",
"from",
"either",
"the",
"default",
"location",
"or",
"the",
"given",
"directory",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/load_config.py#L213-L246
|
train
|
pyroscope/pyrocore
|
src/pyrocore/util/load_config.py
|
ConfigLoader.create
|
def create(self, remove_all_rc_files=False):
""" Create default configuration files at either the default location or the given directory.
"""
# Check and create configuration directory
if os.path.exists(self.config_dir):
self.LOG.debug("Configuration directory %r already exists!" % (self.config_dir,))
else:
os.mkdir(self.config_dir)
if remove_all_rc_files:
for subdir in ('.', 'rtorrent.d'):
config_files = list(glob.glob(os.path.join(os.path.abspath(self.config_dir), subdir, '*.rc')))
config_files += list(glob.glob(os.path.join(os.path.abspath(self.config_dir), subdir, '*.rc.default')))
for config_file in config_files:
self.LOG.info("Removing %r!" % (config_file,))
os.remove(config_file)
# Create default configuration files
for filepath in sorted(walk_resources("pyrocore", "data/config")):
# Load from package data
text = pymagic.resource_string("pyrocore", "data/config" + filepath)
# Create missing subdirs
config_file = self.config_dir + filepath
if not os.path.exists(os.path.dirname(config_file)):
os.makedirs(os.path.dirname(config_file))
# Write configuration files
config_trail = [".default"]
if os.path.exists(config_file):
self.LOG.debug("Configuration file %r already exists!" % (config_file,))
else:
config_trail.append('')
for i in config_trail:
with open(config_file + i, "w") as handle:
handle.write(text)
self.LOG.info("Configuration file %r written!" % (config_file + i,))
|
python
|
def create(self, remove_all_rc_files=False):
""" Create default configuration files at either the default location or the given directory.
"""
# Check and create configuration directory
if os.path.exists(self.config_dir):
self.LOG.debug("Configuration directory %r already exists!" % (self.config_dir,))
else:
os.mkdir(self.config_dir)
if remove_all_rc_files:
for subdir in ('.', 'rtorrent.d'):
config_files = list(glob.glob(os.path.join(os.path.abspath(self.config_dir), subdir, '*.rc')))
config_files += list(glob.glob(os.path.join(os.path.abspath(self.config_dir), subdir, '*.rc.default')))
for config_file in config_files:
self.LOG.info("Removing %r!" % (config_file,))
os.remove(config_file)
# Create default configuration files
for filepath in sorted(walk_resources("pyrocore", "data/config")):
# Load from package data
text = pymagic.resource_string("pyrocore", "data/config" + filepath)
# Create missing subdirs
config_file = self.config_dir + filepath
if not os.path.exists(os.path.dirname(config_file)):
os.makedirs(os.path.dirname(config_file))
# Write configuration files
config_trail = [".default"]
if os.path.exists(config_file):
self.LOG.debug("Configuration file %r already exists!" % (config_file,))
else:
config_trail.append('')
for i in config_trail:
with open(config_file + i, "w") as handle:
handle.write(text)
self.LOG.info("Configuration file %r written!" % (config_file + i,))
|
[
"def",
"create",
"(",
"self",
",",
"remove_all_rc_files",
"=",
"False",
")",
":",
"# Check and create configuration directory",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"config_dir",
")",
":",
"self",
".",
"LOG",
".",
"debug",
"(",
"\"Configuration directory %r already exists!\"",
"%",
"(",
"self",
".",
"config_dir",
",",
")",
")",
"else",
":",
"os",
".",
"mkdir",
"(",
"self",
".",
"config_dir",
")",
"if",
"remove_all_rc_files",
":",
"for",
"subdir",
"in",
"(",
"'.'",
",",
"'rtorrent.d'",
")",
":",
"config_files",
"=",
"list",
"(",
"glob",
".",
"glob",
"(",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"abspath",
"(",
"self",
".",
"config_dir",
")",
",",
"subdir",
",",
"'*.rc'",
")",
")",
")",
"config_files",
"+=",
"list",
"(",
"glob",
".",
"glob",
"(",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"abspath",
"(",
"self",
".",
"config_dir",
")",
",",
"subdir",
",",
"'*.rc.default'",
")",
")",
")",
"for",
"config_file",
"in",
"config_files",
":",
"self",
".",
"LOG",
".",
"info",
"(",
"\"Removing %r!\"",
"%",
"(",
"config_file",
",",
")",
")",
"os",
".",
"remove",
"(",
"config_file",
")",
"# Create default configuration files",
"for",
"filepath",
"in",
"sorted",
"(",
"walk_resources",
"(",
"\"pyrocore\"",
",",
"\"data/config\"",
")",
")",
":",
"# Load from package data",
"text",
"=",
"pymagic",
".",
"resource_string",
"(",
"\"pyrocore\"",
",",
"\"data/config\"",
"+",
"filepath",
")",
"# Create missing subdirs",
"config_file",
"=",
"self",
".",
"config_dir",
"+",
"filepath",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"config_file",
")",
")",
":",
"os",
".",
"makedirs",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"config_file",
")",
")",
"# Write configuration files",
"config_trail",
"=",
"[",
"\".default\"",
"]",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"config_file",
")",
":",
"self",
".",
"LOG",
".",
"debug",
"(",
"\"Configuration file %r already exists!\"",
"%",
"(",
"config_file",
",",
")",
")",
"else",
":",
"config_trail",
".",
"append",
"(",
"''",
")",
"for",
"i",
"in",
"config_trail",
":",
"with",
"open",
"(",
"config_file",
"+",
"i",
",",
"\"w\"",
")",
"as",
"handle",
":",
"handle",
".",
"write",
"(",
"text",
")",
"self",
".",
"LOG",
".",
"info",
"(",
"\"Configuration file %r written!\"",
"%",
"(",
"config_file",
"+",
"i",
",",
")",
")"
] |
Create default configuration files at either the default location or the given directory.
|
[
"Create",
"default",
"configuration",
"files",
"at",
"either",
"the",
"default",
"location",
"or",
"the",
"given",
"directory",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/load_config.py#L249-L285
|
train
|
pyroscope/pyrocore
|
src/pyrocore/scripts/mktor.py
|
MetafileCreator.make_magnet_meta
|
def make_magnet_meta(self, magnet_uri):
""" Create a magnet-uri torrent.
"""
import cgi
import hashlib
if magnet_uri.startswith("magnet:"):
magnet_uri = magnet_uri[7:]
meta = {"magnet-uri": "magnet:" + magnet_uri}
magnet_params = cgi.parse_qs(magnet_uri.lstrip('?'))
meta_name = magnet_params.get("xt", [hashlib.sha1(magnet_uri).hexdigest()])[0]
if "dn" in magnet_params:
meta_name = "%s-%s" % (magnet_params["dn"][0], meta_name)
meta_name = re.sub(r"[^-_,a-zA-Z0-9]+", '.', meta_name).strip('.').replace("urn.btih.", "")
if not config.magnet_watch:
self.fatal("You MUST set the 'magnet_watch' config option!")
meta_path = os.path.join(config.magnet_watch, "magnet-%s.torrent" % meta_name)
self.LOG.debug("Writing magnet-uri metafile %r..." % (meta_path,))
try:
bencode.bwrite(meta_path, meta)
except EnvironmentError as exc:
self.fatal("Error writing magnet-uri metafile %r (%s)" % (meta_path, exc,))
raise
|
python
|
def make_magnet_meta(self, magnet_uri):
""" Create a magnet-uri torrent.
"""
import cgi
import hashlib
if magnet_uri.startswith("magnet:"):
magnet_uri = magnet_uri[7:]
meta = {"magnet-uri": "magnet:" + magnet_uri}
magnet_params = cgi.parse_qs(magnet_uri.lstrip('?'))
meta_name = magnet_params.get("xt", [hashlib.sha1(magnet_uri).hexdigest()])[0]
if "dn" in magnet_params:
meta_name = "%s-%s" % (magnet_params["dn"][0], meta_name)
meta_name = re.sub(r"[^-_,a-zA-Z0-9]+", '.', meta_name).strip('.').replace("urn.btih.", "")
if not config.magnet_watch:
self.fatal("You MUST set the 'magnet_watch' config option!")
meta_path = os.path.join(config.magnet_watch, "magnet-%s.torrent" % meta_name)
self.LOG.debug("Writing magnet-uri metafile %r..." % (meta_path,))
try:
bencode.bwrite(meta_path, meta)
except EnvironmentError as exc:
self.fatal("Error writing magnet-uri metafile %r (%s)" % (meta_path, exc,))
raise
|
[
"def",
"make_magnet_meta",
"(",
"self",
",",
"magnet_uri",
")",
":",
"import",
"cgi",
"import",
"hashlib",
"if",
"magnet_uri",
".",
"startswith",
"(",
"\"magnet:\"",
")",
":",
"magnet_uri",
"=",
"magnet_uri",
"[",
"7",
":",
"]",
"meta",
"=",
"{",
"\"magnet-uri\"",
":",
"\"magnet:\"",
"+",
"magnet_uri",
"}",
"magnet_params",
"=",
"cgi",
".",
"parse_qs",
"(",
"magnet_uri",
".",
"lstrip",
"(",
"'?'",
")",
")",
"meta_name",
"=",
"magnet_params",
".",
"get",
"(",
"\"xt\"",
",",
"[",
"hashlib",
".",
"sha1",
"(",
"magnet_uri",
")",
".",
"hexdigest",
"(",
")",
"]",
")",
"[",
"0",
"]",
"if",
"\"dn\"",
"in",
"magnet_params",
":",
"meta_name",
"=",
"\"%s-%s\"",
"%",
"(",
"magnet_params",
"[",
"\"dn\"",
"]",
"[",
"0",
"]",
",",
"meta_name",
")",
"meta_name",
"=",
"re",
".",
"sub",
"(",
"r\"[^-_,a-zA-Z0-9]+\"",
",",
"'.'",
",",
"meta_name",
")",
".",
"strip",
"(",
"'.'",
")",
".",
"replace",
"(",
"\"urn.btih.\"",
",",
"\"\"",
")",
"if",
"not",
"config",
".",
"magnet_watch",
":",
"self",
".",
"fatal",
"(",
"\"You MUST set the 'magnet_watch' config option!\"",
")",
"meta_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"config",
".",
"magnet_watch",
",",
"\"magnet-%s.torrent\"",
"%",
"meta_name",
")",
"self",
".",
"LOG",
".",
"debug",
"(",
"\"Writing magnet-uri metafile %r...\"",
"%",
"(",
"meta_path",
",",
")",
")",
"try",
":",
"bencode",
".",
"bwrite",
"(",
"meta_path",
",",
"meta",
")",
"except",
"EnvironmentError",
"as",
"exc",
":",
"self",
".",
"fatal",
"(",
"\"Error writing magnet-uri metafile %r (%s)\"",
"%",
"(",
"meta_path",
",",
"exc",
",",
")",
")",
"raise"
] |
Create a magnet-uri torrent.
|
[
"Create",
"a",
"magnet",
"-",
"uri",
"torrent",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/scripts/mktor.py#L84-L109
|
train
|
pyroscope/pyrocore
|
src/pyrocore/util/pymagic.py
|
get_class_logger
|
def get_class_logger(obj):
""" Get a logger specific for the given object's class.
"""
return logging.getLogger(obj.__class__.__module__ + '.' + obj.__class__.__name__)
|
python
|
def get_class_logger(obj):
""" Get a logger specific for the given object's class.
"""
return logging.getLogger(obj.__class__.__module__ + '.' + obj.__class__.__name__)
|
[
"def",
"get_class_logger",
"(",
"obj",
")",
":",
"return",
"logging",
".",
"getLogger",
"(",
"obj",
".",
"__class__",
".",
"__module__",
"+",
"'.'",
"+",
"obj",
".",
"__class__",
".",
"__name__",
")"
] |
Get a logger specific for the given object's class.
|
[
"Get",
"a",
"logger",
"specific",
"for",
"the",
"given",
"object",
"s",
"class",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/pymagic.py#L67-L70
|
train
|
pyroscope/pyrocore
|
src/pyrocore/util/pymagic.py
|
JSONEncoder.default
|
def default(self, o): # pylint: disable=method-hidden
"""Support more object types."""
if isinstance(o, set):
return list(sorted(o))
elif hasattr(o, 'as_dict'):
return o.as_dict()
else:
return super(JSONEncoder, self).default(o)
|
python
|
def default(self, o): # pylint: disable=method-hidden
"""Support more object types."""
if isinstance(o, set):
return list(sorted(o))
elif hasattr(o, 'as_dict'):
return o.as_dict()
else:
return super(JSONEncoder, self).default(o)
|
[
"def",
"default",
"(",
"self",
",",
"o",
")",
":",
"# pylint: disable=method-hidden",
"if",
"isinstance",
"(",
"o",
",",
"set",
")",
":",
"return",
"list",
"(",
"sorted",
"(",
"o",
")",
")",
"elif",
"hasattr",
"(",
"o",
",",
"'as_dict'",
")",
":",
"return",
"o",
".",
"as_dict",
"(",
")",
"else",
":",
"return",
"super",
"(",
"JSONEncoder",
",",
"self",
")",
".",
"default",
"(",
"o",
")"
] |
Support more object types.
|
[
"Support",
"more",
"object",
"types",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/util/pymagic.py#L85-L92
|
train
|
pyroscope/pyrocore
|
src/pyrocore/torrent/formatting.py
|
fmt_sz
|
def fmt_sz(intval):
""" Format a byte sized value.
"""
try:
return fmt.human_size(intval)
except (ValueError, TypeError):
return "N/A".rjust(len(fmt.human_size(0)))
|
python
|
def fmt_sz(intval):
""" Format a byte sized value.
"""
try:
return fmt.human_size(intval)
except (ValueError, TypeError):
return "N/A".rjust(len(fmt.human_size(0)))
|
[
"def",
"fmt_sz",
"(",
"intval",
")",
":",
"try",
":",
"return",
"fmt",
".",
"human_size",
"(",
"intval",
")",
"except",
"(",
"ValueError",
",",
"TypeError",
")",
":",
"return",
"\"N/A\"",
".",
"rjust",
"(",
"len",
"(",
"fmt",
".",
"human_size",
"(",
"0",
")",
")",
")"
] |
Format a byte sized value.
|
[
"Format",
"a",
"byte",
"sized",
"value",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/formatting.py#L41-L47
|
train
|
pyroscope/pyrocore
|
src/pyrocore/torrent/formatting.py
|
fmt_iso
|
def fmt_iso(timestamp):
""" Format a UNIX timestamp to an ISO datetime string.
"""
try:
return fmt.iso_datetime(timestamp)
except (ValueError, TypeError):
return "N/A".rjust(len(fmt.iso_datetime(0)))
|
python
|
def fmt_iso(timestamp):
""" Format a UNIX timestamp to an ISO datetime string.
"""
try:
return fmt.iso_datetime(timestamp)
except (ValueError, TypeError):
return "N/A".rjust(len(fmt.iso_datetime(0)))
|
[
"def",
"fmt_iso",
"(",
"timestamp",
")",
":",
"try",
":",
"return",
"fmt",
".",
"iso_datetime",
"(",
"timestamp",
")",
"except",
"(",
"ValueError",
",",
"TypeError",
")",
":",
"return",
"\"N/A\"",
".",
"rjust",
"(",
"len",
"(",
"fmt",
".",
"iso_datetime",
"(",
"0",
")",
")",
")"
] |
Format a UNIX timestamp to an ISO datetime string.
|
[
"Format",
"a",
"UNIX",
"timestamp",
"to",
"an",
"ISO",
"datetime",
"string",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/formatting.py#L50-L56
|
train
|
pyroscope/pyrocore
|
src/pyrocore/torrent/formatting.py
|
fmt_duration
|
def fmt_duration(duration):
""" Format a duration value in seconds to a readable form.
"""
try:
return fmt.human_duration(float(duration), 0, 2, True)
except (ValueError, TypeError):
return "N/A".rjust(len(fmt.human_duration(0, 0, 2, True)))
|
python
|
def fmt_duration(duration):
""" Format a duration value in seconds to a readable form.
"""
try:
return fmt.human_duration(float(duration), 0, 2, True)
except (ValueError, TypeError):
return "N/A".rjust(len(fmt.human_duration(0, 0, 2, True)))
|
[
"def",
"fmt_duration",
"(",
"duration",
")",
":",
"try",
":",
"return",
"fmt",
".",
"human_duration",
"(",
"float",
"(",
"duration",
")",
",",
"0",
",",
"2",
",",
"True",
")",
"except",
"(",
"ValueError",
",",
"TypeError",
")",
":",
"return",
"\"N/A\"",
".",
"rjust",
"(",
"len",
"(",
"fmt",
".",
"human_duration",
"(",
"0",
",",
"0",
",",
"2",
",",
"True",
")",
")",
")"
] |
Format a duration value in seconds to a readable form.
|
[
"Format",
"a",
"duration",
"value",
"in",
"seconds",
"to",
"a",
"readable",
"form",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/formatting.py#L59-L65
|
train
|
pyroscope/pyrocore
|
src/pyrocore/torrent/formatting.py
|
fmt_subst
|
def fmt_subst(regex, subst):
"""Replace regex with string."""
return lambda text: re.sub(regex, subst, text) if text else text
|
python
|
def fmt_subst(regex, subst):
"""Replace regex with string."""
return lambda text: re.sub(regex, subst, text) if text else text
|
[
"def",
"fmt_subst",
"(",
"regex",
",",
"subst",
")",
":",
"return",
"lambda",
"text",
":",
"re",
".",
"sub",
"(",
"regex",
",",
"subst",
",",
"text",
")",
"if",
"text",
"else",
"text"
] |
Replace regex with string.
|
[
"Replace",
"regex",
"with",
"string",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/formatting.py#L89-L91
|
train
|
pyroscope/pyrocore
|
src/pyrocore/torrent/formatting.py
|
preparse
|
def preparse(output_format):
""" Do any special processing of a template, and return the result.
"""
try:
return templating.preparse(output_format, lambda path: os.path.join(config.config_dir, "templates", path))
except ImportError as exc:
if "tempita" in str(exc):
raise error.UserError("To be able to use Tempita templates, install the 'tempita' package (%s)\n"
" Possibly USING THE FOLLOWING COMMAND:\n"
" %s/easy_install tempita" % (exc, os.path.dirname(sys.executable)))
raise
except IOError as exc:
raise error.LoggableError("Cannot read template: {}".format(exc))
|
python
|
def preparse(output_format):
""" Do any special processing of a template, and return the result.
"""
try:
return templating.preparse(output_format, lambda path: os.path.join(config.config_dir, "templates", path))
except ImportError as exc:
if "tempita" in str(exc):
raise error.UserError("To be able to use Tempita templates, install the 'tempita' package (%s)\n"
" Possibly USING THE FOLLOWING COMMAND:\n"
" %s/easy_install tempita" % (exc, os.path.dirname(sys.executable)))
raise
except IOError as exc:
raise error.LoggableError("Cannot read template: {}".format(exc))
|
[
"def",
"preparse",
"(",
"output_format",
")",
":",
"try",
":",
"return",
"templating",
".",
"preparse",
"(",
"output_format",
",",
"lambda",
"path",
":",
"os",
".",
"path",
".",
"join",
"(",
"config",
".",
"config_dir",
",",
"\"templates\"",
",",
"path",
")",
")",
"except",
"ImportError",
"as",
"exc",
":",
"if",
"\"tempita\"",
"in",
"str",
"(",
"exc",
")",
":",
"raise",
"error",
".",
"UserError",
"(",
"\"To be able to use Tempita templates, install the 'tempita' package (%s)\\n\"",
"\" Possibly USING THE FOLLOWING COMMAND:\\n\"",
"\" %s/easy_install tempita\"",
"%",
"(",
"exc",
",",
"os",
".",
"path",
".",
"dirname",
"(",
"sys",
".",
"executable",
")",
")",
")",
"raise",
"except",
"IOError",
"as",
"exc",
":",
"raise",
"error",
".",
"LoggableError",
"(",
"\"Cannot read template: {}\"",
".",
"format",
"(",
"exc",
")",
")"
] |
Do any special processing of a template, and return the result.
|
[
"Do",
"any",
"special",
"processing",
"of",
"a",
"template",
"and",
"return",
"the",
"result",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/formatting.py#L214-L226
|
train
|
pyroscope/pyrocore
|
src/pyrocore/torrent/formatting.py
|
validate_field_list
|
def validate_field_list(fields, allow_fmt_specs=False, name_filter=None):
""" Make sure the fields in the given list exist.
@param fields: List of fields (comma-/space-separated if a string).
@type fields: list or str
@return: validated field names.
@rtype: list
"""
formats = [i[4:] for i in globals() if i.startswith("fmt_")]
try:
fields = [i.strip() for i in fields.replace(',', ' ').split()]
except AttributeError:
# Not a string, expecting an iterable
pass
if name_filter:
fields = [name_filter(name) for name in fields]
for name in fields:
if allow_fmt_specs and '.' in name:
fullname = name
name, fmtspecs = name.split('.', 1)
for fmtspec in fmtspecs.split('.'):
if fmtspec not in formats and fmtspec != "raw":
raise error.UserError("Unknown format specification %r in %r" % (fmtspec, fullname))
if name not in engine.FieldDefinition.FIELDS and not engine.TorrentProxy.add_manifold_attribute(name):
raise error.UserError("Unknown field name %r" % (name,))
return fields
|
python
|
def validate_field_list(fields, allow_fmt_specs=False, name_filter=None):
""" Make sure the fields in the given list exist.
@param fields: List of fields (comma-/space-separated if a string).
@type fields: list or str
@return: validated field names.
@rtype: list
"""
formats = [i[4:] for i in globals() if i.startswith("fmt_")]
try:
fields = [i.strip() for i in fields.replace(',', ' ').split()]
except AttributeError:
# Not a string, expecting an iterable
pass
if name_filter:
fields = [name_filter(name) for name in fields]
for name in fields:
if allow_fmt_specs and '.' in name:
fullname = name
name, fmtspecs = name.split('.', 1)
for fmtspec in fmtspecs.split('.'):
if fmtspec not in formats and fmtspec != "raw":
raise error.UserError("Unknown format specification %r in %r" % (fmtspec, fullname))
if name not in engine.FieldDefinition.FIELDS and not engine.TorrentProxy.add_manifold_attribute(name):
raise error.UserError("Unknown field name %r" % (name,))
return fields
|
[
"def",
"validate_field_list",
"(",
"fields",
",",
"allow_fmt_specs",
"=",
"False",
",",
"name_filter",
"=",
"None",
")",
":",
"formats",
"=",
"[",
"i",
"[",
"4",
":",
"]",
"for",
"i",
"in",
"globals",
"(",
")",
"if",
"i",
".",
"startswith",
"(",
"\"fmt_\"",
")",
"]",
"try",
":",
"fields",
"=",
"[",
"i",
".",
"strip",
"(",
")",
"for",
"i",
"in",
"fields",
".",
"replace",
"(",
"','",
",",
"' '",
")",
".",
"split",
"(",
")",
"]",
"except",
"AttributeError",
":",
"# Not a string, expecting an iterable",
"pass",
"if",
"name_filter",
":",
"fields",
"=",
"[",
"name_filter",
"(",
"name",
")",
"for",
"name",
"in",
"fields",
"]",
"for",
"name",
"in",
"fields",
":",
"if",
"allow_fmt_specs",
"and",
"'.'",
"in",
"name",
":",
"fullname",
"=",
"name",
"name",
",",
"fmtspecs",
"=",
"name",
".",
"split",
"(",
"'.'",
",",
"1",
")",
"for",
"fmtspec",
"in",
"fmtspecs",
".",
"split",
"(",
"'.'",
")",
":",
"if",
"fmtspec",
"not",
"in",
"formats",
"and",
"fmtspec",
"!=",
"\"raw\"",
":",
"raise",
"error",
".",
"UserError",
"(",
"\"Unknown format specification %r in %r\"",
"%",
"(",
"fmtspec",
",",
"fullname",
")",
")",
"if",
"name",
"not",
"in",
"engine",
".",
"FieldDefinition",
".",
"FIELDS",
"and",
"not",
"engine",
".",
"TorrentProxy",
".",
"add_manifold_attribute",
"(",
"name",
")",
":",
"raise",
"error",
".",
"UserError",
"(",
"\"Unknown field name %r\"",
"%",
"(",
"name",
",",
")",
")",
"return",
"fields"
] |
Make sure the fields in the given list exist.
@param fields: List of fields (comma-/space-separated if a string).
@type fields: list or str
@return: validated field names.
@rtype: list
|
[
"Make",
"sure",
"the",
"fields",
"in",
"the",
"given",
"list",
"exist",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/formatting.py#L319-L349
|
train
|
pyroscope/pyrocore
|
src/pyrocore/torrent/formatting.py
|
validate_sort_fields
|
def validate_sort_fields(sort_fields):
""" Make sure the fields in the given list exist, and return sorting key.
If field names are prefixed with '-', sort order is reversed for that field (descending).
"""
# Allow descending order per field by prefixing with '-'
descending = set()
def sort_order_filter(name):
"Helper to remove flag and memoize sort order"
if name.startswith('-'):
name = name[1:]
descending.add(name)
return name
# Split and validate field list
sort_fields = validate_field_list(sort_fields, name_filter=sort_order_filter)
log.debug("Sorting order is: %s" % ", ".join([('-' if i in descending else '') + i
for i in sort_fields]))
# No descending fields?
if not descending:
return operator.attrgetter(*tuple(sort_fields))
# Need to provide complex key
class Key(object):
"Complex sort order key"
def __init__(self, obj, *args):
"Remember object to be compared"
self.obj = obj
def __lt__(self, other):
"Compare to other key"
for field in sort_fields:
lhs, rhs = getattr(self.obj, field), getattr(other.obj, field)
if lhs == rhs:
continue
return rhs < lhs if field in descending else lhs < rhs
return False
return Key
|
python
|
def validate_sort_fields(sort_fields):
""" Make sure the fields in the given list exist, and return sorting key.
If field names are prefixed with '-', sort order is reversed for that field (descending).
"""
# Allow descending order per field by prefixing with '-'
descending = set()
def sort_order_filter(name):
"Helper to remove flag and memoize sort order"
if name.startswith('-'):
name = name[1:]
descending.add(name)
return name
# Split and validate field list
sort_fields = validate_field_list(sort_fields, name_filter=sort_order_filter)
log.debug("Sorting order is: %s" % ", ".join([('-' if i in descending else '') + i
for i in sort_fields]))
# No descending fields?
if not descending:
return operator.attrgetter(*tuple(sort_fields))
# Need to provide complex key
class Key(object):
"Complex sort order key"
def __init__(self, obj, *args):
"Remember object to be compared"
self.obj = obj
def __lt__(self, other):
"Compare to other key"
for field in sort_fields:
lhs, rhs = getattr(self.obj, field), getattr(other.obj, field)
if lhs == rhs:
continue
return rhs < lhs if field in descending else lhs < rhs
return False
return Key
|
[
"def",
"validate_sort_fields",
"(",
"sort_fields",
")",
":",
"# Allow descending order per field by prefixing with '-'",
"descending",
"=",
"set",
"(",
")",
"def",
"sort_order_filter",
"(",
"name",
")",
":",
"\"Helper to remove flag and memoize sort order\"",
"if",
"name",
".",
"startswith",
"(",
"'-'",
")",
":",
"name",
"=",
"name",
"[",
"1",
":",
"]",
"descending",
".",
"add",
"(",
"name",
")",
"return",
"name",
"# Split and validate field list",
"sort_fields",
"=",
"validate_field_list",
"(",
"sort_fields",
",",
"name_filter",
"=",
"sort_order_filter",
")",
"log",
".",
"debug",
"(",
"\"Sorting order is: %s\"",
"%",
"\", \"",
".",
"join",
"(",
"[",
"(",
"'-'",
"if",
"i",
"in",
"descending",
"else",
"''",
")",
"+",
"i",
"for",
"i",
"in",
"sort_fields",
"]",
")",
")",
"# No descending fields?",
"if",
"not",
"descending",
":",
"return",
"operator",
".",
"attrgetter",
"(",
"*",
"tuple",
"(",
"sort_fields",
")",
")",
"# Need to provide complex key",
"class",
"Key",
"(",
"object",
")",
":",
"\"Complex sort order key\"",
"def",
"__init__",
"(",
"self",
",",
"obj",
",",
"*",
"args",
")",
":",
"\"Remember object to be compared\"",
"self",
".",
"obj",
"=",
"obj",
"def",
"__lt__",
"(",
"self",
",",
"other",
")",
":",
"\"Compare to other key\"",
"for",
"field",
"in",
"sort_fields",
":",
"lhs",
",",
"rhs",
"=",
"getattr",
"(",
"self",
".",
"obj",
",",
"field",
")",
",",
"getattr",
"(",
"other",
".",
"obj",
",",
"field",
")",
"if",
"lhs",
"==",
"rhs",
":",
"continue",
"return",
"rhs",
"<",
"lhs",
"if",
"field",
"in",
"descending",
"else",
"lhs",
"<",
"rhs",
"return",
"False",
"return",
"Key"
] |
Make sure the fields in the given list exist, and return sorting key.
If field names are prefixed with '-', sort order is reversed for that field (descending).
|
[
"Make",
"sure",
"the",
"fields",
"in",
"the",
"given",
"list",
"exist",
"and",
"return",
"sorting",
"key",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/formatting.py#L352-L390
|
train
|
pyroscope/pyrocore
|
src/pyrocore/torrent/formatting.py
|
OutputMapping.formatter_help
|
def formatter_help(cls):
""" Return a list of format specifiers and their documentation.
"""
result = [("raw", "Switch off the default field formatter.")]
for name, method in globals().items():
if name.startswith("fmt_"):
result.append((name[4:], method.__doc__.strip()))
return result
|
python
|
def formatter_help(cls):
""" Return a list of format specifiers and their documentation.
"""
result = [("raw", "Switch off the default field formatter.")]
for name, method in globals().items():
if name.startswith("fmt_"):
result.append((name[4:], method.__doc__.strip()))
return result
|
[
"def",
"formatter_help",
"(",
"cls",
")",
":",
"result",
"=",
"[",
"(",
"\"raw\"",
",",
"\"Switch off the default field formatter.\"",
")",
"]",
"for",
"name",
",",
"method",
"in",
"globals",
"(",
")",
".",
"items",
"(",
")",
":",
"if",
"name",
".",
"startswith",
"(",
"\"fmt_\"",
")",
":",
"result",
".",
"append",
"(",
"(",
"name",
"[",
"4",
":",
"]",
",",
"method",
".",
"__doc__",
".",
"strip",
"(",
")",
")",
")",
"return",
"result"
] |
Return a list of format specifiers and their documentation.
|
[
"Return",
"a",
"list",
"of",
"format",
"specifiers",
"and",
"their",
"documentation",
"."
] |
89ad01346a570943d20311a0b488440975876612
|
https://github.com/pyroscope/pyrocore/blob/89ad01346a570943d20311a0b488440975876612/src/pyrocore/torrent/formatting.py#L138-L147
|
train
|
wroberts/pytimeparse
|
pytimeparse/timeparse.py
|
timeparse
|
def timeparse(sval, granularity='seconds'):
'''
Parse a time expression, returning it as a number of seconds. If
possible, the return value will be an `int`; if this is not
possible, the return will be a `float`. Returns `None` if a time
expression cannot be parsed from the given string.
Arguments:
- `sval`: the string value to parse
>>> timeparse('1:24')
84
>>> timeparse(':22')
22
>>> timeparse('1 minute, 24 secs')
84
>>> timeparse('1m24s')
84
>>> timeparse('1.2 minutes')
72
>>> timeparse('1.2 seconds')
1.2
Time expressions can be signed.
>>> timeparse('- 1 minute')
-60
>>> timeparse('+ 1 minute')
60
If granularity is specified as ``minutes``, then ambiguous digits following
a colon will be interpreted as minutes; otherwise they are considered seconds.
>>> timeparse('1:30')
90
>>> timeparse('1:30', granularity='minutes')
5400
'''
match = COMPILED_SIGN.match(sval)
sign = -1 if match.groupdict()['sign'] == '-' else 1
sval = match.groupdict()['unsigned']
for timefmt in COMPILED_TIMEFORMATS:
match = timefmt.match(sval)
if match and match.group(0).strip():
mdict = match.groupdict()
if granularity == 'minutes':
mdict = _interpret_as_minutes(sval, mdict)
# if all of the fields are integer numbers
if all(v.isdigit() for v in list(mdict.values()) if v):
return sign * sum([MULTIPLIERS[k] * int(v, 10) for (k, v) in
list(mdict.items()) if v is not None])
# if SECS is an integer number
elif ('secs' not in mdict or
mdict['secs'] is None or
mdict['secs'].isdigit()):
# we will return an integer
return (
sign * int(sum([MULTIPLIERS[k] * float(v) for (k, v) in
list(mdict.items()) if k != 'secs' and v is not None])) +
(int(mdict['secs'], 10) if mdict['secs'] else 0))
else:
# SECS is a float, we will return a float
return sign * sum([MULTIPLIERS[k] * float(v) for (k, v) in
list(mdict.items()) if v is not None])
|
python
|
def timeparse(sval, granularity='seconds'):
'''
Parse a time expression, returning it as a number of seconds. If
possible, the return value will be an `int`; if this is not
possible, the return will be a `float`. Returns `None` if a time
expression cannot be parsed from the given string.
Arguments:
- `sval`: the string value to parse
>>> timeparse('1:24')
84
>>> timeparse(':22')
22
>>> timeparse('1 minute, 24 secs')
84
>>> timeparse('1m24s')
84
>>> timeparse('1.2 minutes')
72
>>> timeparse('1.2 seconds')
1.2
Time expressions can be signed.
>>> timeparse('- 1 minute')
-60
>>> timeparse('+ 1 minute')
60
If granularity is specified as ``minutes``, then ambiguous digits following
a colon will be interpreted as minutes; otherwise they are considered seconds.
>>> timeparse('1:30')
90
>>> timeparse('1:30', granularity='minutes')
5400
'''
match = COMPILED_SIGN.match(sval)
sign = -1 if match.groupdict()['sign'] == '-' else 1
sval = match.groupdict()['unsigned']
for timefmt in COMPILED_TIMEFORMATS:
match = timefmt.match(sval)
if match and match.group(0).strip():
mdict = match.groupdict()
if granularity == 'minutes':
mdict = _interpret_as_minutes(sval, mdict)
# if all of the fields are integer numbers
if all(v.isdigit() for v in list(mdict.values()) if v):
return sign * sum([MULTIPLIERS[k] * int(v, 10) for (k, v) in
list(mdict.items()) if v is not None])
# if SECS is an integer number
elif ('secs' not in mdict or
mdict['secs'] is None or
mdict['secs'].isdigit()):
# we will return an integer
return (
sign * int(sum([MULTIPLIERS[k] * float(v) for (k, v) in
list(mdict.items()) if k != 'secs' and v is not None])) +
(int(mdict['secs'], 10) if mdict['secs'] else 0))
else:
# SECS is a float, we will return a float
return sign * sum([MULTIPLIERS[k] * float(v) for (k, v) in
list(mdict.items()) if v is not None])
|
[
"def",
"timeparse",
"(",
"sval",
",",
"granularity",
"=",
"'seconds'",
")",
":",
"match",
"=",
"COMPILED_SIGN",
".",
"match",
"(",
"sval",
")",
"sign",
"=",
"-",
"1",
"if",
"match",
".",
"groupdict",
"(",
")",
"[",
"'sign'",
"]",
"==",
"'-'",
"else",
"1",
"sval",
"=",
"match",
".",
"groupdict",
"(",
")",
"[",
"'unsigned'",
"]",
"for",
"timefmt",
"in",
"COMPILED_TIMEFORMATS",
":",
"match",
"=",
"timefmt",
".",
"match",
"(",
"sval",
")",
"if",
"match",
"and",
"match",
".",
"group",
"(",
"0",
")",
".",
"strip",
"(",
")",
":",
"mdict",
"=",
"match",
".",
"groupdict",
"(",
")",
"if",
"granularity",
"==",
"'minutes'",
":",
"mdict",
"=",
"_interpret_as_minutes",
"(",
"sval",
",",
"mdict",
")",
"# if all of the fields are integer numbers",
"if",
"all",
"(",
"v",
".",
"isdigit",
"(",
")",
"for",
"v",
"in",
"list",
"(",
"mdict",
".",
"values",
"(",
")",
")",
"if",
"v",
")",
":",
"return",
"sign",
"*",
"sum",
"(",
"[",
"MULTIPLIERS",
"[",
"k",
"]",
"*",
"int",
"(",
"v",
",",
"10",
")",
"for",
"(",
"k",
",",
"v",
")",
"in",
"list",
"(",
"mdict",
".",
"items",
"(",
")",
")",
"if",
"v",
"is",
"not",
"None",
"]",
")",
"# if SECS is an integer number",
"elif",
"(",
"'secs'",
"not",
"in",
"mdict",
"or",
"mdict",
"[",
"'secs'",
"]",
"is",
"None",
"or",
"mdict",
"[",
"'secs'",
"]",
".",
"isdigit",
"(",
")",
")",
":",
"# we will return an integer",
"return",
"(",
"sign",
"*",
"int",
"(",
"sum",
"(",
"[",
"MULTIPLIERS",
"[",
"k",
"]",
"*",
"float",
"(",
"v",
")",
"for",
"(",
"k",
",",
"v",
")",
"in",
"list",
"(",
"mdict",
".",
"items",
"(",
")",
")",
"if",
"k",
"!=",
"'secs'",
"and",
"v",
"is",
"not",
"None",
"]",
")",
")",
"+",
"(",
"int",
"(",
"mdict",
"[",
"'secs'",
"]",
",",
"10",
")",
"if",
"mdict",
"[",
"'secs'",
"]",
"else",
"0",
")",
")",
"else",
":",
"# SECS is a float, we will return a float",
"return",
"sign",
"*",
"sum",
"(",
"[",
"MULTIPLIERS",
"[",
"k",
"]",
"*",
"float",
"(",
"v",
")",
"for",
"(",
"k",
",",
"v",
")",
"in",
"list",
"(",
"mdict",
".",
"items",
"(",
")",
")",
"if",
"v",
"is",
"not",
"None",
"]",
")"
] |
Parse a time expression, returning it as a number of seconds. If
possible, the return value will be an `int`; if this is not
possible, the return will be a `float`. Returns `None` if a time
expression cannot be parsed from the given string.
Arguments:
- `sval`: the string value to parse
>>> timeparse('1:24')
84
>>> timeparse(':22')
22
>>> timeparse('1 minute, 24 secs')
84
>>> timeparse('1m24s')
84
>>> timeparse('1.2 minutes')
72
>>> timeparse('1.2 seconds')
1.2
Time expressions can be signed.
>>> timeparse('- 1 minute')
-60
>>> timeparse('+ 1 minute')
60
If granularity is specified as ``minutes``, then ambiguous digits following
a colon will be interpreted as minutes; otherwise they are considered seconds.
>>> timeparse('1:30')
90
>>> timeparse('1:30', granularity='minutes')
5400
|
[
"Parse",
"a",
"time",
"expression",
"returning",
"it",
"as",
"a",
"number",
"of",
"seconds",
".",
"If",
"possible",
"the",
"return",
"value",
"will",
"be",
"an",
"int",
";",
"if",
"this",
"is",
"not",
"possible",
"the",
"return",
"will",
"be",
"a",
"float",
".",
"Returns",
"None",
"if",
"a",
"time",
"expression",
"cannot",
"be",
"parsed",
"from",
"the",
"given",
"string",
"."
] |
dc7e783216b98a04d3f749bd82c863d6d7c41f6e
|
https://github.com/wroberts/pytimeparse/blob/dc7e783216b98a04d3f749bd82c863d6d7c41f6e/pytimeparse/timeparse.py#L118-L181
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
get_client
|
def get_client(project_id=None, credentials=None,
service_url=None, service_account=None,
private_key=None, private_key_file=None,
json_key=None, json_key_file=None,
readonly=True, swallow_results=True,
num_retries=0):
"""Return a singleton instance of BigQueryClient. Either
AssertionCredentials or a service account and private key combination need
to be provided in order to authenticate requests to BigQuery.
Parameters
----------
project_id : str, optional
The BigQuery project id, required unless json_key or json_key_file is
provided.
credentials : oauth2client.client.SignedJwtAssertionCredentials, optional
AssertionCredentials instance to authenticate requests to BigQuery
(optional, must provide `service_account` and (`private_key` or
`private_key_file`) or (`json_key` or `json_key_file`) if not included
service_url : str, optional
A URI string template pointing to the location of Google's API
discovery service. Requires two parameters {api} and {apiVersion} that
when filled in produce an absolute URI to the discovery document for
that service. If not set then the default googleapiclient discovery URI
is used. See `credentials`
service_account : str, optional
The Google API service account name. See `credentials`
private_key : str, optional
The private key associated with the service account in PKCS12 or PEM
format. See `credentials`
private_key_file : str, optional
The name of the file containing the private key associated with the
service account in PKCS12 or PEM format. See `credentials`
json_key : dict, optional
The JSON key associated with the service account. See `credentials`
json_key_file : str, optional
The name of the JSON key file associated with the service account. See
`credentials`.
readonly : bool
Bool indicating if BigQuery access is read-only. Has no effect if
credentials are provided. Default True.
swallow_results : bool
If set to False, then return the actual response value instead of
converting to boolean. Default True.
num_retries : int, optional
The number of times to retry the request. Default 0 (no retry).
Returns
-------
BigQueryClient
An instance of the BigQuery client.
"""
if not credentials:
assert (service_account and (private_key or private_key_file)) or (
json_key or json_key_file), \
'Must provide AssertionCredentials or service account and P12 key\
or JSON key'
if not project_id:
assert json_key or json_key_file, \
'Must provide project_id unless json_key or json_key_file is\
provided'
if service_url is None:
service_url = DISCOVERY_URI
scope = BIGQUERY_SCOPE_READ_ONLY if readonly else BIGQUERY_SCOPE
if private_key_file:
credentials = _credentials().from_p12_keyfile(service_account,
private_key_file,
scopes=scope)
if private_key:
try:
if isinstance(private_key, basestring):
private_key = private_key.decode('utf-8')
except NameError:
# python3 -- private_key is already unicode
pass
credentials = _credentials().from_p12_keyfile_buffer(
service_account,
StringIO(private_key),
scopes=scope)
if json_key_file:
with open(json_key_file, 'r') as key_file:
json_key = json.load(key_file)
if json_key:
credentials = _credentials().from_json_keyfile_dict(json_key,
scopes=scope)
if not project_id:
project_id = json_key['project_id']
bq_service = _get_bq_service(credentials=credentials,
service_url=service_url)
return BigQueryClient(bq_service, project_id, swallow_results,
num_retries)
|
python
|
def get_client(project_id=None, credentials=None,
service_url=None, service_account=None,
private_key=None, private_key_file=None,
json_key=None, json_key_file=None,
readonly=True, swallow_results=True,
num_retries=0):
"""Return a singleton instance of BigQueryClient. Either
AssertionCredentials or a service account and private key combination need
to be provided in order to authenticate requests to BigQuery.
Parameters
----------
project_id : str, optional
The BigQuery project id, required unless json_key or json_key_file is
provided.
credentials : oauth2client.client.SignedJwtAssertionCredentials, optional
AssertionCredentials instance to authenticate requests to BigQuery
(optional, must provide `service_account` and (`private_key` or
`private_key_file`) or (`json_key` or `json_key_file`) if not included
service_url : str, optional
A URI string template pointing to the location of Google's API
discovery service. Requires two parameters {api} and {apiVersion} that
when filled in produce an absolute URI to the discovery document for
that service. If not set then the default googleapiclient discovery URI
is used. See `credentials`
service_account : str, optional
The Google API service account name. See `credentials`
private_key : str, optional
The private key associated with the service account in PKCS12 or PEM
format. See `credentials`
private_key_file : str, optional
The name of the file containing the private key associated with the
service account in PKCS12 or PEM format. See `credentials`
json_key : dict, optional
The JSON key associated with the service account. See `credentials`
json_key_file : str, optional
The name of the JSON key file associated with the service account. See
`credentials`.
readonly : bool
Bool indicating if BigQuery access is read-only. Has no effect if
credentials are provided. Default True.
swallow_results : bool
If set to False, then return the actual response value instead of
converting to boolean. Default True.
num_retries : int, optional
The number of times to retry the request. Default 0 (no retry).
Returns
-------
BigQueryClient
An instance of the BigQuery client.
"""
if not credentials:
assert (service_account and (private_key or private_key_file)) or (
json_key or json_key_file), \
'Must provide AssertionCredentials or service account and P12 key\
or JSON key'
if not project_id:
assert json_key or json_key_file, \
'Must provide project_id unless json_key or json_key_file is\
provided'
if service_url is None:
service_url = DISCOVERY_URI
scope = BIGQUERY_SCOPE_READ_ONLY if readonly else BIGQUERY_SCOPE
if private_key_file:
credentials = _credentials().from_p12_keyfile(service_account,
private_key_file,
scopes=scope)
if private_key:
try:
if isinstance(private_key, basestring):
private_key = private_key.decode('utf-8')
except NameError:
# python3 -- private_key is already unicode
pass
credentials = _credentials().from_p12_keyfile_buffer(
service_account,
StringIO(private_key),
scopes=scope)
if json_key_file:
with open(json_key_file, 'r') as key_file:
json_key = json.load(key_file)
if json_key:
credentials = _credentials().from_json_keyfile_dict(json_key,
scopes=scope)
if not project_id:
project_id = json_key['project_id']
bq_service = _get_bq_service(credentials=credentials,
service_url=service_url)
return BigQueryClient(bq_service, project_id, swallow_results,
num_retries)
|
[
"def",
"get_client",
"(",
"project_id",
"=",
"None",
",",
"credentials",
"=",
"None",
",",
"service_url",
"=",
"None",
",",
"service_account",
"=",
"None",
",",
"private_key",
"=",
"None",
",",
"private_key_file",
"=",
"None",
",",
"json_key",
"=",
"None",
",",
"json_key_file",
"=",
"None",
",",
"readonly",
"=",
"True",
",",
"swallow_results",
"=",
"True",
",",
"num_retries",
"=",
"0",
")",
":",
"if",
"not",
"credentials",
":",
"assert",
"(",
"service_account",
"and",
"(",
"private_key",
"or",
"private_key_file",
")",
")",
"or",
"(",
"json_key",
"or",
"json_key_file",
")",
",",
"'Must provide AssertionCredentials or service account and P12 key\\\n or JSON key'",
"if",
"not",
"project_id",
":",
"assert",
"json_key",
"or",
"json_key_file",
",",
"'Must provide project_id unless json_key or json_key_file is\\\n provided'",
"if",
"service_url",
"is",
"None",
":",
"service_url",
"=",
"DISCOVERY_URI",
"scope",
"=",
"BIGQUERY_SCOPE_READ_ONLY",
"if",
"readonly",
"else",
"BIGQUERY_SCOPE",
"if",
"private_key_file",
":",
"credentials",
"=",
"_credentials",
"(",
")",
".",
"from_p12_keyfile",
"(",
"service_account",
",",
"private_key_file",
",",
"scopes",
"=",
"scope",
")",
"if",
"private_key",
":",
"try",
":",
"if",
"isinstance",
"(",
"private_key",
",",
"basestring",
")",
":",
"private_key",
"=",
"private_key",
".",
"decode",
"(",
"'utf-8'",
")",
"except",
"NameError",
":",
"# python3 -- private_key is already unicode",
"pass",
"credentials",
"=",
"_credentials",
"(",
")",
".",
"from_p12_keyfile_buffer",
"(",
"service_account",
",",
"StringIO",
"(",
"private_key",
")",
",",
"scopes",
"=",
"scope",
")",
"if",
"json_key_file",
":",
"with",
"open",
"(",
"json_key_file",
",",
"'r'",
")",
"as",
"key_file",
":",
"json_key",
"=",
"json",
".",
"load",
"(",
"key_file",
")",
"if",
"json_key",
":",
"credentials",
"=",
"_credentials",
"(",
")",
".",
"from_json_keyfile_dict",
"(",
"json_key",
",",
"scopes",
"=",
"scope",
")",
"if",
"not",
"project_id",
":",
"project_id",
"=",
"json_key",
"[",
"'project_id'",
"]",
"bq_service",
"=",
"_get_bq_service",
"(",
"credentials",
"=",
"credentials",
",",
"service_url",
"=",
"service_url",
")",
"return",
"BigQueryClient",
"(",
"bq_service",
",",
"project_id",
",",
"swallow_results",
",",
"num_retries",
")"
] |
Return a singleton instance of BigQueryClient. Either
AssertionCredentials or a service account and private key combination need
to be provided in order to authenticate requests to BigQuery.
Parameters
----------
project_id : str, optional
The BigQuery project id, required unless json_key or json_key_file is
provided.
credentials : oauth2client.client.SignedJwtAssertionCredentials, optional
AssertionCredentials instance to authenticate requests to BigQuery
(optional, must provide `service_account` and (`private_key` or
`private_key_file`) or (`json_key` or `json_key_file`) if not included
service_url : str, optional
A URI string template pointing to the location of Google's API
discovery service. Requires two parameters {api} and {apiVersion} that
when filled in produce an absolute URI to the discovery document for
that service. If not set then the default googleapiclient discovery URI
is used. See `credentials`
service_account : str, optional
The Google API service account name. See `credentials`
private_key : str, optional
The private key associated with the service account in PKCS12 or PEM
format. See `credentials`
private_key_file : str, optional
The name of the file containing the private key associated with the
service account in PKCS12 or PEM format. See `credentials`
json_key : dict, optional
The JSON key associated with the service account. See `credentials`
json_key_file : str, optional
The name of the JSON key file associated with the service account. See
`credentials`.
readonly : bool
Bool indicating if BigQuery access is read-only. Has no effect if
credentials are provided. Default True.
swallow_results : bool
If set to False, then return the actual response value instead of
converting to boolean. Default True.
num_retries : int, optional
The number of times to retry the request. Default 0 (no retry).
Returns
-------
BigQueryClient
An instance of the BigQuery client.
|
[
"Return",
"a",
"singleton",
"instance",
"of",
"BigQueryClient",
".",
"Either",
"AssertionCredentials",
"or",
"a",
"service",
"account",
"and",
"private",
"key",
"combination",
"need",
"to",
"be",
"provided",
"in",
"order",
"to",
"authenticate",
"requests",
"to",
"BigQuery",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L54-L155
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
get_projects
|
def get_projects(bq_service):
"""Given the BigQuery service, return data about all projects."""
projects_request = bq_service.projects().list().execute()
projects = []
for project in projects_request.get('projects', []):
project_data = {
'id': project['id'],
'name': project['friendlyName']
}
projects.append(project_data)
return projects
|
python
|
def get_projects(bq_service):
"""Given the BigQuery service, return data about all projects."""
projects_request = bq_service.projects().list().execute()
projects = []
for project in projects_request.get('projects', []):
project_data = {
'id': project['id'],
'name': project['friendlyName']
}
projects.append(project_data)
return projects
|
[
"def",
"get_projects",
"(",
"bq_service",
")",
":",
"projects_request",
"=",
"bq_service",
".",
"projects",
"(",
")",
".",
"list",
"(",
")",
".",
"execute",
"(",
")",
"projects",
"=",
"[",
"]",
"for",
"project",
"in",
"projects_request",
".",
"get",
"(",
"'projects'",
",",
"[",
"]",
")",
":",
"project_data",
"=",
"{",
"'id'",
":",
"project",
"[",
"'id'",
"]",
",",
"'name'",
":",
"project",
"[",
"'friendlyName'",
"]",
"}",
"projects",
".",
"append",
"(",
"project_data",
")",
"return",
"projects"
] |
Given the BigQuery service, return data about all projects.
|
[
"Given",
"the",
"BigQuery",
"service",
"return",
"data",
"about",
"all",
"projects",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L158-L169
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
_get_bq_service
|
def _get_bq_service(credentials=None, service_url=None):
"""Construct an authorized BigQuery service object."""
assert credentials, 'Must provide ServiceAccountCredentials'
http = credentials.authorize(Http())
service = build(
'bigquery',
'v2',
http=http,
discoveryServiceUrl=service_url,
cache_discovery=False
)
return service
|
python
|
def _get_bq_service(credentials=None, service_url=None):
"""Construct an authorized BigQuery service object."""
assert credentials, 'Must provide ServiceAccountCredentials'
http = credentials.authorize(Http())
service = build(
'bigquery',
'v2',
http=http,
discoveryServiceUrl=service_url,
cache_discovery=False
)
return service
|
[
"def",
"_get_bq_service",
"(",
"credentials",
"=",
"None",
",",
"service_url",
"=",
"None",
")",
":",
"assert",
"credentials",
",",
"'Must provide ServiceAccountCredentials'",
"http",
"=",
"credentials",
".",
"authorize",
"(",
"Http",
"(",
")",
")",
"service",
"=",
"build",
"(",
"'bigquery'",
",",
"'v2'",
",",
"http",
"=",
"http",
",",
"discoveryServiceUrl",
"=",
"service_url",
",",
"cache_discovery",
"=",
"False",
")",
"return",
"service"
] |
Construct an authorized BigQuery service object.
|
[
"Construct",
"an",
"authorized",
"BigQuery",
"service",
"object",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L172-L186
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient._submit_query_job
|
def _submit_query_job(self, query_data):
""" Submit a query job to BigQuery.
This is similar to BigQueryClient.query, but gives the user
direct access to the query method on the offical BigQuery
python client.
For fine-grained control over a query job, see:
https://google-api-client-libraries.appspot.com/documentation/bigquery/v2/python/latest/bigquery_v2.jobs.html#query
Parameters
----------
query_data
query object as per "configuration.query" in
https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.query
Returns
-------
tuple
job id and query results if query completed. If dry_run is True,
job id will be None and results will be empty if the query is valid
or a dict containing the response if invalid.
Raises
------
BigQueryTimeoutException
On timeout
"""
logger.debug('Submitting query job: %s' % query_data)
job_collection = self.bigquery.jobs()
try:
query_reply = job_collection.query(
projectId=self.project_id, body=query_data).execute(
num_retries=self.num_retries)
except HttpError as e:
if query_data.get("dryRun", False):
return None, json.loads(e.content.decode('utf8'))
raise
job_id = query_reply['jobReference'].get('jobId')
schema = query_reply.get('schema', {'fields': None})['fields']
rows = query_reply.get('rows', [])
job_complete = query_reply.get('jobComplete', False)
# raise exceptions if it's not an async query
# and job is not completed after timeout
if not job_complete and query_data.get("timeoutMs", False):
logger.error('BigQuery job %s timeout' % job_id)
raise BigQueryTimeoutException()
return job_id, [self._transform_row(row, schema) for row in rows]
|
python
|
def _submit_query_job(self, query_data):
""" Submit a query job to BigQuery.
This is similar to BigQueryClient.query, but gives the user
direct access to the query method on the offical BigQuery
python client.
For fine-grained control over a query job, see:
https://google-api-client-libraries.appspot.com/documentation/bigquery/v2/python/latest/bigquery_v2.jobs.html#query
Parameters
----------
query_data
query object as per "configuration.query" in
https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.query
Returns
-------
tuple
job id and query results if query completed. If dry_run is True,
job id will be None and results will be empty if the query is valid
or a dict containing the response if invalid.
Raises
------
BigQueryTimeoutException
On timeout
"""
logger.debug('Submitting query job: %s' % query_data)
job_collection = self.bigquery.jobs()
try:
query_reply = job_collection.query(
projectId=self.project_id, body=query_data).execute(
num_retries=self.num_retries)
except HttpError as e:
if query_data.get("dryRun", False):
return None, json.loads(e.content.decode('utf8'))
raise
job_id = query_reply['jobReference'].get('jobId')
schema = query_reply.get('schema', {'fields': None})['fields']
rows = query_reply.get('rows', [])
job_complete = query_reply.get('jobComplete', False)
# raise exceptions if it's not an async query
# and job is not completed after timeout
if not job_complete and query_data.get("timeoutMs", False):
logger.error('BigQuery job %s timeout' % job_id)
raise BigQueryTimeoutException()
return job_id, [self._transform_row(row, schema) for row in rows]
|
[
"def",
"_submit_query_job",
"(",
"self",
",",
"query_data",
")",
":",
"logger",
".",
"debug",
"(",
"'Submitting query job: %s'",
"%",
"query_data",
")",
"job_collection",
"=",
"self",
".",
"bigquery",
".",
"jobs",
"(",
")",
"try",
":",
"query_reply",
"=",
"job_collection",
".",
"query",
"(",
"projectId",
"=",
"self",
".",
"project_id",
",",
"body",
"=",
"query_data",
")",
".",
"execute",
"(",
"num_retries",
"=",
"self",
".",
"num_retries",
")",
"except",
"HttpError",
"as",
"e",
":",
"if",
"query_data",
".",
"get",
"(",
"\"dryRun\"",
",",
"False",
")",
":",
"return",
"None",
",",
"json",
".",
"loads",
"(",
"e",
".",
"content",
".",
"decode",
"(",
"'utf8'",
")",
")",
"raise",
"job_id",
"=",
"query_reply",
"[",
"'jobReference'",
"]",
".",
"get",
"(",
"'jobId'",
")",
"schema",
"=",
"query_reply",
".",
"get",
"(",
"'schema'",
",",
"{",
"'fields'",
":",
"None",
"}",
")",
"[",
"'fields'",
"]",
"rows",
"=",
"query_reply",
".",
"get",
"(",
"'rows'",
",",
"[",
"]",
")",
"job_complete",
"=",
"query_reply",
".",
"get",
"(",
"'jobComplete'",
",",
"False",
")",
"# raise exceptions if it's not an async query",
"# and job is not completed after timeout",
"if",
"not",
"job_complete",
"and",
"query_data",
".",
"get",
"(",
"\"timeoutMs\"",
",",
"False",
")",
":",
"logger",
".",
"error",
"(",
"'BigQuery job %s timeout'",
"%",
"job_id",
")",
"raise",
"BigQueryTimeoutException",
"(",
")",
"return",
"job_id",
",",
"[",
"self",
".",
"_transform_row",
"(",
"row",
",",
"schema",
")",
"for",
"row",
"in",
"rows",
"]"
] |
Submit a query job to BigQuery.
This is similar to BigQueryClient.query, but gives the user
direct access to the query method on the offical BigQuery
python client.
For fine-grained control over a query job, see:
https://google-api-client-libraries.appspot.com/documentation/bigquery/v2/python/latest/bigquery_v2.jobs.html#query
Parameters
----------
query_data
query object as per "configuration.query" in
https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.query
Returns
-------
tuple
job id and query results if query completed. If dry_run is True,
job id will be None and results will be empty if the query is valid
or a dict containing the response if invalid.
Raises
------
BigQueryTimeoutException
On timeout
|
[
"Submit",
"a",
"query",
"job",
"to",
"BigQuery",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L226-L279
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient._insert_job
|
def _insert_job(self, body_object):
""" Submit a job to BigQuery
Direct proxy to the insert() method of the offical BigQuery
python client.
Able to submit load, link, query, copy, or extract jobs.
For more details, see:
https://google-api-client-libraries.appspot.com/documentation/bigquery/v2/python/latest/bigquery_v2.jobs.html#insert
Parameters
----------
body_object : body object passed to bigquery.jobs().insert()
Returns
-------
response of the bigquery.jobs().insert().execute() call
Raises
------
BigQueryTimeoutException on timeout
"""
logger.debug('Submitting job: %s' % body_object)
job_collection = self.bigquery.jobs()
return job_collection.insert(
projectId=self.project_id,
body=body_object
).execute(num_retries=self.num_retries)
|
python
|
def _insert_job(self, body_object):
""" Submit a job to BigQuery
Direct proxy to the insert() method of the offical BigQuery
python client.
Able to submit load, link, query, copy, or extract jobs.
For more details, see:
https://google-api-client-libraries.appspot.com/documentation/bigquery/v2/python/latest/bigquery_v2.jobs.html#insert
Parameters
----------
body_object : body object passed to bigquery.jobs().insert()
Returns
-------
response of the bigquery.jobs().insert().execute() call
Raises
------
BigQueryTimeoutException on timeout
"""
logger.debug('Submitting job: %s' % body_object)
job_collection = self.bigquery.jobs()
return job_collection.insert(
projectId=self.project_id,
body=body_object
).execute(num_retries=self.num_retries)
|
[
"def",
"_insert_job",
"(",
"self",
",",
"body_object",
")",
":",
"logger",
".",
"debug",
"(",
"'Submitting job: %s'",
"%",
"body_object",
")",
"job_collection",
"=",
"self",
".",
"bigquery",
".",
"jobs",
"(",
")",
"return",
"job_collection",
".",
"insert",
"(",
"projectId",
"=",
"self",
".",
"project_id",
",",
"body",
"=",
"body_object",
")",
".",
"execute",
"(",
"num_retries",
"=",
"self",
".",
"num_retries",
")"
] |
Submit a job to BigQuery
Direct proxy to the insert() method of the offical BigQuery
python client.
Able to submit load, link, query, copy, or extract jobs.
For more details, see:
https://google-api-client-libraries.appspot.com/documentation/bigquery/v2/python/latest/bigquery_v2.jobs.html#insert
Parameters
----------
body_object : body object passed to bigquery.jobs().insert()
Returns
-------
response of the bigquery.jobs().insert().execute() call
Raises
------
BigQueryTimeoutException on timeout
|
[
"Submit",
"a",
"job",
"to",
"BigQuery"
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L302-L333
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.query
|
def query(self, query, max_results=None, timeout=0, dry_run=False, use_legacy_sql=None, external_udf_uris=None):
"""Submit a query to BigQuery.
Parameters
----------
query : str
BigQuery query string
max_results : int, optional
The maximum number of rows to return per page of results.
timeout : float, optional
How long to wait for the query to complete, in seconds before
the request times out and returns.
dry_run : bool, optional
If True, the query isn't actually run. A valid query will return an
empty response, while an invalid one will return the same error
message it would if it wasn't a dry run.
use_legacy_sql : bool, optional. Default True.
If False, the query will use BigQuery's standard SQL (https://cloud.google.com/bigquery/sql-reference/)
external_udf_uris : list, optional
Contains external UDF URIs. If given, URIs must be Google Cloud
Storage and have .js extensions.
Returns
-------
tuple
(job id, query results) if the query completed. If dry_run is True,
job id will be None and results will be empty if the query is valid
or a ``dict`` containing the response if invalid.
Raises
------
BigQueryTimeoutException
on timeout
"""
logger.debug('Executing query: %s' % query)
query_data = {
'query': query,
'timeoutMs': timeout * 1000,
'dryRun': dry_run,
'maxResults': max_results
}
if use_legacy_sql is not None:
query_data['useLegacySql'] = use_legacy_sql
if external_udf_uris:
query_data['userDefinedFunctionResources'] = \
[ {'resourceUri': u} for u in external_udf_uris ]
return self._submit_query_job(query_data)
|
python
|
def query(self, query, max_results=None, timeout=0, dry_run=False, use_legacy_sql=None, external_udf_uris=None):
"""Submit a query to BigQuery.
Parameters
----------
query : str
BigQuery query string
max_results : int, optional
The maximum number of rows to return per page of results.
timeout : float, optional
How long to wait for the query to complete, in seconds before
the request times out and returns.
dry_run : bool, optional
If True, the query isn't actually run. A valid query will return an
empty response, while an invalid one will return the same error
message it would if it wasn't a dry run.
use_legacy_sql : bool, optional. Default True.
If False, the query will use BigQuery's standard SQL (https://cloud.google.com/bigquery/sql-reference/)
external_udf_uris : list, optional
Contains external UDF URIs. If given, URIs must be Google Cloud
Storage and have .js extensions.
Returns
-------
tuple
(job id, query results) if the query completed. If dry_run is True,
job id will be None and results will be empty if the query is valid
or a ``dict`` containing the response if invalid.
Raises
------
BigQueryTimeoutException
on timeout
"""
logger.debug('Executing query: %s' % query)
query_data = {
'query': query,
'timeoutMs': timeout * 1000,
'dryRun': dry_run,
'maxResults': max_results
}
if use_legacy_sql is not None:
query_data['useLegacySql'] = use_legacy_sql
if external_udf_uris:
query_data['userDefinedFunctionResources'] = \
[ {'resourceUri': u} for u in external_udf_uris ]
return self._submit_query_job(query_data)
|
[
"def",
"query",
"(",
"self",
",",
"query",
",",
"max_results",
"=",
"None",
",",
"timeout",
"=",
"0",
",",
"dry_run",
"=",
"False",
",",
"use_legacy_sql",
"=",
"None",
",",
"external_udf_uris",
"=",
"None",
")",
":",
"logger",
".",
"debug",
"(",
"'Executing query: %s'",
"%",
"query",
")",
"query_data",
"=",
"{",
"'query'",
":",
"query",
",",
"'timeoutMs'",
":",
"timeout",
"*",
"1000",
",",
"'dryRun'",
":",
"dry_run",
",",
"'maxResults'",
":",
"max_results",
"}",
"if",
"use_legacy_sql",
"is",
"not",
"None",
":",
"query_data",
"[",
"'useLegacySql'",
"]",
"=",
"use_legacy_sql",
"if",
"external_udf_uris",
":",
"query_data",
"[",
"'userDefinedFunctionResources'",
"]",
"=",
"[",
"{",
"'resourceUri'",
":",
"u",
"}",
"for",
"u",
"in",
"external_udf_uris",
"]",
"return",
"self",
".",
"_submit_query_job",
"(",
"query_data",
")"
] |
Submit a query to BigQuery.
Parameters
----------
query : str
BigQuery query string
max_results : int, optional
The maximum number of rows to return per page of results.
timeout : float, optional
How long to wait for the query to complete, in seconds before
the request times out and returns.
dry_run : bool, optional
If True, the query isn't actually run. A valid query will return an
empty response, while an invalid one will return the same error
message it would if it wasn't a dry run.
use_legacy_sql : bool, optional. Default True.
If False, the query will use BigQuery's standard SQL (https://cloud.google.com/bigquery/sql-reference/)
external_udf_uris : list, optional
Contains external UDF URIs. If given, URIs must be Google Cloud
Storage and have .js extensions.
Returns
-------
tuple
(job id, query results) if the query completed. If dry_run is True,
job id will be None and results will be empty if the query is valid
or a ``dict`` containing the response if invalid.
Raises
------
BigQueryTimeoutException
on timeout
|
[
"Submit",
"a",
"query",
"to",
"BigQuery",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L335-L387
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.get_query_schema
|
def get_query_schema(self, job_id):
"""Retrieve the schema of a query by job id.
Parameters
----------
job_id : str
The job_id that references a BigQuery query
Returns
-------
list
A ``list`` of ``dict`` objects that represent the schema.
"""
query_reply = self.get_query_results(job_id, offset=0, limit=0)
if not query_reply['jobComplete']:
logger.warning('BigQuery job %s not complete' % job_id)
raise UnfinishedQueryException()
return query_reply['schema']['fields']
|
python
|
def get_query_schema(self, job_id):
"""Retrieve the schema of a query by job id.
Parameters
----------
job_id : str
The job_id that references a BigQuery query
Returns
-------
list
A ``list`` of ``dict`` objects that represent the schema.
"""
query_reply = self.get_query_results(job_id, offset=0, limit=0)
if not query_reply['jobComplete']:
logger.warning('BigQuery job %s not complete' % job_id)
raise UnfinishedQueryException()
return query_reply['schema']['fields']
|
[
"def",
"get_query_schema",
"(",
"self",
",",
"job_id",
")",
":",
"query_reply",
"=",
"self",
".",
"get_query_results",
"(",
"job_id",
",",
"offset",
"=",
"0",
",",
"limit",
"=",
"0",
")",
"if",
"not",
"query_reply",
"[",
"'jobComplete'",
"]",
":",
"logger",
".",
"warning",
"(",
"'BigQuery job %s not complete'",
"%",
"job_id",
")",
"raise",
"UnfinishedQueryException",
"(",
")",
"return",
"query_reply",
"[",
"'schema'",
"]",
"[",
"'fields'",
"]"
] |
Retrieve the schema of a query by job id.
Parameters
----------
job_id : str
The job_id that references a BigQuery query
Returns
-------
list
A ``list`` of ``dict`` objects that represent the schema.
|
[
"Retrieve",
"the",
"schema",
"of",
"a",
"query",
"by",
"job",
"id",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L389-L409
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.get_table_schema
|
def get_table_schema(self, dataset, table, project_id=None):
"""Return the table schema.
Parameters
----------
dataset : str
The dataset containing the `table`.
table : str
The table to get the schema for
project_id: str, optional
The project of the dataset.
Returns
-------
list
A ``list`` of ``dict`` objects that represent the table schema. If
the table doesn't exist, None is returned.
"""
project_id = self._get_project_id(project_id)
try:
result = self.bigquery.tables().get(
projectId=project_id,
tableId=table,
datasetId=dataset).execute(num_retries=self.num_retries)
except HttpError as e:
if int(e.resp['status']) == 404:
logger.warn('Table %s.%s does not exist', dataset, table)
return None
raise
return result['schema']['fields']
|
python
|
def get_table_schema(self, dataset, table, project_id=None):
"""Return the table schema.
Parameters
----------
dataset : str
The dataset containing the `table`.
table : str
The table to get the schema for
project_id: str, optional
The project of the dataset.
Returns
-------
list
A ``list`` of ``dict`` objects that represent the table schema. If
the table doesn't exist, None is returned.
"""
project_id = self._get_project_id(project_id)
try:
result = self.bigquery.tables().get(
projectId=project_id,
tableId=table,
datasetId=dataset).execute(num_retries=self.num_retries)
except HttpError as e:
if int(e.resp['status']) == 404:
logger.warn('Table %s.%s does not exist', dataset, table)
return None
raise
return result['schema']['fields']
|
[
"def",
"get_table_schema",
"(",
"self",
",",
"dataset",
",",
"table",
",",
"project_id",
"=",
"None",
")",
":",
"project_id",
"=",
"self",
".",
"_get_project_id",
"(",
"project_id",
")",
"try",
":",
"result",
"=",
"self",
".",
"bigquery",
".",
"tables",
"(",
")",
".",
"get",
"(",
"projectId",
"=",
"project_id",
",",
"tableId",
"=",
"table",
",",
"datasetId",
"=",
"dataset",
")",
".",
"execute",
"(",
"num_retries",
"=",
"self",
".",
"num_retries",
")",
"except",
"HttpError",
"as",
"e",
":",
"if",
"int",
"(",
"e",
".",
"resp",
"[",
"'status'",
"]",
")",
"==",
"404",
":",
"logger",
".",
"warn",
"(",
"'Table %s.%s does not exist'",
",",
"dataset",
",",
"table",
")",
"return",
"None",
"raise",
"return",
"result",
"[",
"'schema'",
"]",
"[",
"'fields'",
"]"
] |
Return the table schema.
Parameters
----------
dataset : str
The dataset containing the `table`.
table : str
The table to get the schema for
project_id: str, optional
The project of the dataset.
Returns
-------
list
A ``list`` of ``dict`` objects that represent the table schema. If
the table doesn't exist, None is returned.
|
[
"Return",
"the",
"table",
"schema",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L411-L442
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.check_job
|
def check_job(self, job_id):
"""Return the state and number of results of a query by job id.
Parameters
----------
job_id : str
The job id of the query to check.
Returns
-------
tuple
(``bool``, ``int``) Whether or not the query has completed and the
total number of rows included in the query table if it has
completed (else 0)
"""
query_reply = self.get_query_results(job_id, offset=0, limit=0)
return (query_reply.get('jobComplete', False),
int(query_reply.get('totalRows', 0)))
|
python
|
def check_job(self, job_id):
"""Return the state and number of results of a query by job id.
Parameters
----------
job_id : str
The job id of the query to check.
Returns
-------
tuple
(``bool``, ``int``) Whether or not the query has completed and the
total number of rows included in the query table if it has
completed (else 0)
"""
query_reply = self.get_query_results(job_id, offset=0, limit=0)
return (query_reply.get('jobComplete', False),
int(query_reply.get('totalRows', 0)))
|
[
"def",
"check_job",
"(",
"self",
",",
"job_id",
")",
":",
"query_reply",
"=",
"self",
".",
"get_query_results",
"(",
"job_id",
",",
"offset",
"=",
"0",
",",
"limit",
"=",
"0",
")",
"return",
"(",
"query_reply",
".",
"get",
"(",
"'jobComplete'",
",",
"False",
")",
",",
"int",
"(",
"query_reply",
".",
"get",
"(",
"'totalRows'",
",",
"0",
")",
")",
")"
] |
Return the state and number of results of a query by job id.
Parameters
----------
job_id : str
The job id of the query to check.
Returns
-------
tuple
(``bool``, ``int``) Whether or not the query has completed and the
total number of rows included in the query table if it has
completed (else 0)
|
[
"Return",
"the",
"state",
"and",
"number",
"of",
"results",
"of",
"a",
"query",
"by",
"job",
"id",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L444-L463
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.get_query_rows
|
def get_query_rows(self, job_id, offset=None, limit=None, timeout=0):
"""Retrieve a list of rows from a query table by job id.
This method will append results from multiple pages together. If you
want to manually page through results, you can use `get_query_results`
method directly.
Parameters
----------
job_id : str
The job id that references a BigQuery query.
offset : int, optional
The offset of the rows to pull from BigQuery
limit : int, optional
The number of rows to retrieve from a query table.
timeout : float, optional
Timeout in seconds.
Returns
-------
list
A ``list`` of ``dict`` objects that represent table rows.
"""
# Get query results
query_reply = self.get_query_results(job_id, offset=offset,
limit=limit, timeout=timeout)
if not query_reply['jobComplete']:
logger.warning('BigQuery job %s not complete' % job_id)
raise UnfinishedQueryException()
schema = query_reply["schema"]["fields"]
rows = query_reply.get('rows', [])
page_token = query_reply.get("pageToken")
records = [self._transform_row(row, schema) for row in rows]
# Append to records if there are multiple pages for query results
while page_token and (not limit or len(records) < limit):
query_reply = self.get_query_results(
job_id, offset=offset, limit=limit, page_token=page_token,
timeout=timeout)
page_token = query_reply.get("pageToken")
rows = query_reply.get('rows', [])
records += [self._transform_row(row, schema) for row in rows]
return records[:limit] if limit else records
|
python
|
def get_query_rows(self, job_id, offset=None, limit=None, timeout=0):
"""Retrieve a list of rows from a query table by job id.
This method will append results from multiple pages together. If you
want to manually page through results, you can use `get_query_results`
method directly.
Parameters
----------
job_id : str
The job id that references a BigQuery query.
offset : int, optional
The offset of the rows to pull from BigQuery
limit : int, optional
The number of rows to retrieve from a query table.
timeout : float, optional
Timeout in seconds.
Returns
-------
list
A ``list`` of ``dict`` objects that represent table rows.
"""
# Get query results
query_reply = self.get_query_results(job_id, offset=offset,
limit=limit, timeout=timeout)
if not query_reply['jobComplete']:
logger.warning('BigQuery job %s not complete' % job_id)
raise UnfinishedQueryException()
schema = query_reply["schema"]["fields"]
rows = query_reply.get('rows', [])
page_token = query_reply.get("pageToken")
records = [self._transform_row(row, schema) for row in rows]
# Append to records if there are multiple pages for query results
while page_token and (not limit or len(records) < limit):
query_reply = self.get_query_results(
job_id, offset=offset, limit=limit, page_token=page_token,
timeout=timeout)
page_token = query_reply.get("pageToken")
rows = query_reply.get('rows', [])
records += [self._transform_row(row, schema) for row in rows]
return records[:limit] if limit else records
|
[
"def",
"get_query_rows",
"(",
"self",
",",
"job_id",
",",
"offset",
"=",
"None",
",",
"limit",
"=",
"None",
",",
"timeout",
"=",
"0",
")",
":",
"# Get query results",
"query_reply",
"=",
"self",
".",
"get_query_results",
"(",
"job_id",
",",
"offset",
"=",
"offset",
",",
"limit",
"=",
"limit",
",",
"timeout",
"=",
"timeout",
")",
"if",
"not",
"query_reply",
"[",
"'jobComplete'",
"]",
":",
"logger",
".",
"warning",
"(",
"'BigQuery job %s not complete'",
"%",
"job_id",
")",
"raise",
"UnfinishedQueryException",
"(",
")",
"schema",
"=",
"query_reply",
"[",
"\"schema\"",
"]",
"[",
"\"fields\"",
"]",
"rows",
"=",
"query_reply",
".",
"get",
"(",
"'rows'",
",",
"[",
"]",
")",
"page_token",
"=",
"query_reply",
".",
"get",
"(",
"\"pageToken\"",
")",
"records",
"=",
"[",
"self",
".",
"_transform_row",
"(",
"row",
",",
"schema",
")",
"for",
"row",
"in",
"rows",
"]",
"# Append to records if there are multiple pages for query results",
"while",
"page_token",
"and",
"(",
"not",
"limit",
"or",
"len",
"(",
"records",
")",
"<",
"limit",
")",
":",
"query_reply",
"=",
"self",
".",
"get_query_results",
"(",
"job_id",
",",
"offset",
"=",
"offset",
",",
"limit",
"=",
"limit",
",",
"page_token",
"=",
"page_token",
",",
"timeout",
"=",
"timeout",
")",
"page_token",
"=",
"query_reply",
".",
"get",
"(",
"\"pageToken\"",
")",
"rows",
"=",
"query_reply",
".",
"get",
"(",
"'rows'",
",",
"[",
"]",
")",
"records",
"+=",
"[",
"self",
".",
"_transform_row",
"(",
"row",
",",
"schema",
")",
"for",
"row",
"in",
"rows",
"]",
"return",
"records",
"[",
":",
"limit",
"]",
"if",
"limit",
"else",
"records"
] |
Retrieve a list of rows from a query table by job id.
This method will append results from multiple pages together. If you
want to manually page through results, you can use `get_query_results`
method directly.
Parameters
----------
job_id : str
The job id that references a BigQuery query.
offset : int, optional
The offset of the rows to pull from BigQuery
limit : int, optional
The number of rows to retrieve from a query table.
timeout : float, optional
Timeout in seconds.
Returns
-------
list
A ``list`` of ``dict`` objects that represent table rows.
|
[
"Retrieve",
"a",
"list",
"of",
"rows",
"from",
"a",
"query",
"table",
"by",
"job",
"id",
".",
"This",
"method",
"will",
"append",
"results",
"from",
"multiple",
"pages",
"together",
".",
"If",
"you",
"want",
"to",
"manually",
"page",
"through",
"results",
"you",
"can",
"use",
"get_query_results",
"method",
"directly",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L465-L508
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.check_dataset
|
def check_dataset(self, dataset_id, project_id=None):
"""Check to see if a dataset exists.
Parameters
----------
dataset_id : str
Dataset unique id
project_id: str, optional
The project the dataset is in
Returns
-------
bool
True if dataset at `dataset_id` exists, else Fasle
"""
dataset = self.get_dataset(dataset_id, project_id)
return bool(dataset)
|
python
|
def check_dataset(self, dataset_id, project_id=None):
"""Check to see if a dataset exists.
Parameters
----------
dataset_id : str
Dataset unique id
project_id: str, optional
The project the dataset is in
Returns
-------
bool
True if dataset at `dataset_id` exists, else Fasle
"""
dataset = self.get_dataset(dataset_id, project_id)
return bool(dataset)
|
[
"def",
"check_dataset",
"(",
"self",
",",
"dataset_id",
",",
"project_id",
"=",
"None",
")",
":",
"dataset",
"=",
"self",
".",
"get_dataset",
"(",
"dataset_id",
",",
"project_id",
")",
"return",
"bool",
"(",
"dataset",
")"
] |
Check to see if a dataset exists.
Parameters
----------
dataset_id : str
Dataset unique id
project_id: str, optional
The project the dataset is in
Returns
-------
bool
True if dataset at `dataset_id` exists, else Fasle
|
[
"Check",
"to",
"see",
"if",
"a",
"dataset",
"exists",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L510-L526
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.get_dataset
|
def get_dataset(self, dataset_id, project_id=None):
"""Retrieve a dataset if it exists, otherwise return an empty dict.
Parameters
----------
dataset_id : str
Dataset unique id
project_id: str, optional
The project the dataset is in
Returns
-------
dict
Contains dataset object if it exists, else empty
"""
project_id = self._get_project_id(project_id)
try:
dataset = self.bigquery.datasets().get(
projectId=project_id, datasetId=dataset_id).execute(
num_retries=self.num_retries)
except HttpError:
dataset = {}
return dataset
|
python
|
def get_dataset(self, dataset_id, project_id=None):
"""Retrieve a dataset if it exists, otherwise return an empty dict.
Parameters
----------
dataset_id : str
Dataset unique id
project_id: str, optional
The project the dataset is in
Returns
-------
dict
Contains dataset object if it exists, else empty
"""
project_id = self._get_project_id(project_id)
try:
dataset = self.bigquery.datasets().get(
projectId=project_id, datasetId=dataset_id).execute(
num_retries=self.num_retries)
except HttpError:
dataset = {}
return dataset
|
[
"def",
"get_dataset",
"(",
"self",
",",
"dataset_id",
",",
"project_id",
"=",
"None",
")",
":",
"project_id",
"=",
"self",
".",
"_get_project_id",
"(",
"project_id",
")",
"try",
":",
"dataset",
"=",
"self",
".",
"bigquery",
".",
"datasets",
"(",
")",
".",
"get",
"(",
"projectId",
"=",
"project_id",
",",
"datasetId",
"=",
"dataset_id",
")",
".",
"execute",
"(",
"num_retries",
"=",
"self",
".",
"num_retries",
")",
"except",
"HttpError",
":",
"dataset",
"=",
"{",
"}",
"return",
"dataset"
] |
Retrieve a dataset if it exists, otherwise return an empty dict.
Parameters
----------
dataset_id : str
Dataset unique id
project_id: str, optional
The project the dataset is in
Returns
-------
dict
Contains dataset object if it exists, else empty
|
[
"Retrieve",
"a",
"dataset",
"if",
"it",
"exists",
"otherwise",
"return",
"an",
"empty",
"dict",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L528-L552
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.get_table
|
def get_table(self, dataset, table, project_id=None):
""" Retrieve a table if it exists, otherwise return an empty dict.
Parameters
----------
dataset : str
The dataset that the table is in
table : str
The name of the table
project_id: str, optional
The project that the table is in
Returns
-------
dict
Containing the table object if it exists, else empty
"""
project_id = self._get_project_id(project_id)
try:
table = self.bigquery.tables().get(
projectId=project_id, datasetId=dataset,
tableId=table).execute(num_retries=self.num_retries)
except HttpError:
table = {}
return table
|
python
|
def get_table(self, dataset, table, project_id=None):
""" Retrieve a table if it exists, otherwise return an empty dict.
Parameters
----------
dataset : str
The dataset that the table is in
table : str
The name of the table
project_id: str, optional
The project that the table is in
Returns
-------
dict
Containing the table object if it exists, else empty
"""
project_id = self._get_project_id(project_id)
try:
table = self.bigquery.tables().get(
projectId=project_id, datasetId=dataset,
tableId=table).execute(num_retries=self.num_retries)
except HttpError:
table = {}
return table
|
[
"def",
"get_table",
"(",
"self",
",",
"dataset",
",",
"table",
",",
"project_id",
"=",
"None",
")",
":",
"project_id",
"=",
"self",
".",
"_get_project_id",
"(",
"project_id",
")",
"try",
":",
"table",
"=",
"self",
".",
"bigquery",
".",
"tables",
"(",
")",
".",
"get",
"(",
"projectId",
"=",
"project_id",
",",
"datasetId",
"=",
"dataset",
",",
"tableId",
"=",
"table",
")",
".",
"execute",
"(",
"num_retries",
"=",
"self",
".",
"num_retries",
")",
"except",
"HttpError",
":",
"table",
"=",
"{",
"}",
"return",
"table"
] |
Retrieve a table if it exists, otherwise return an empty dict.
Parameters
----------
dataset : str
The dataset that the table is in
table : str
The name of the table
project_id: str, optional
The project that the table is in
Returns
-------
dict
Containing the table object if it exists, else empty
|
[
"Retrieve",
"a",
"table",
"if",
"it",
"exists",
"otherwise",
"return",
"an",
"empty",
"dict",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L574-L599
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.create_table
|
def create_table(self, dataset, table, schema,
expiration_time=None, time_partitioning=False,
project_id=None):
"""Create a new table in the dataset.
Parameters
----------
dataset : str
The dataset to create the table in
table : str
The name of the table to create
schema : dict
The table schema
expiration_time : int or double, optional
The expiry time in milliseconds since the epoch.
time_partitioning : bool, optional
Create a time partitioning.
project_id: str, optional
The project to create the table in
Returns
-------
Union[bool, dict]
If the table was successfully created, or response from BigQuery
if swallow_results is set to False
"""
project_id = self._get_project_id(project_id)
body = {
'schema': {'fields': schema},
'tableReference': {
'tableId': table,
'projectId': project_id,
'datasetId': dataset
}
}
if expiration_time is not None:
body['expirationTime'] = expiration_time
if time_partitioning:
body['timePartitioning'] = {'type': 'DAY'}
try:
table = self.bigquery.tables().insert(
projectId=project_id,
datasetId=dataset,
body=body
).execute(num_retries=self.num_retries)
if self.swallow_results:
return True
else:
return table
except HttpError as e:
logger.error(('Cannot create table {0}.{1}.{2}\n'
'Http Error: {3}').format(project_id, dataset, table, e.content))
if self.swallow_results:
return False
else:
return {}
|
python
|
def create_table(self, dataset, table, schema,
expiration_time=None, time_partitioning=False,
project_id=None):
"""Create a new table in the dataset.
Parameters
----------
dataset : str
The dataset to create the table in
table : str
The name of the table to create
schema : dict
The table schema
expiration_time : int or double, optional
The expiry time in milliseconds since the epoch.
time_partitioning : bool, optional
Create a time partitioning.
project_id: str, optional
The project to create the table in
Returns
-------
Union[bool, dict]
If the table was successfully created, or response from BigQuery
if swallow_results is set to False
"""
project_id = self._get_project_id(project_id)
body = {
'schema': {'fields': schema},
'tableReference': {
'tableId': table,
'projectId': project_id,
'datasetId': dataset
}
}
if expiration_time is not None:
body['expirationTime'] = expiration_time
if time_partitioning:
body['timePartitioning'] = {'type': 'DAY'}
try:
table = self.bigquery.tables().insert(
projectId=project_id,
datasetId=dataset,
body=body
).execute(num_retries=self.num_retries)
if self.swallow_results:
return True
else:
return table
except HttpError as e:
logger.error(('Cannot create table {0}.{1}.{2}\n'
'Http Error: {3}').format(project_id, dataset, table, e.content))
if self.swallow_results:
return False
else:
return {}
|
[
"def",
"create_table",
"(",
"self",
",",
"dataset",
",",
"table",
",",
"schema",
",",
"expiration_time",
"=",
"None",
",",
"time_partitioning",
"=",
"False",
",",
"project_id",
"=",
"None",
")",
":",
"project_id",
"=",
"self",
".",
"_get_project_id",
"(",
"project_id",
")",
"body",
"=",
"{",
"'schema'",
":",
"{",
"'fields'",
":",
"schema",
"}",
",",
"'tableReference'",
":",
"{",
"'tableId'",
":",
"table",
",",
"'projectId'",
":",
"project_id",
",",
"'datasetId'",
":",
"dataset",
"}",
"}",
"if",
"expiration_time",
"is",
"not",
"None",
":",
"body",
"[",
"'expirationTime'",
"]",
"=",
"expiration_time",
"if",
"time_partitioning",
":",
"body",
"[",
"'timePartitioning'",
"]",
"=",
"{",
"'type'",
":",
"'DAY'",
"}",
"try",
":",
"table",
"=",
"self",
".",
"bigquery",
".",
"tables",
"(",
")",
".",
"insert",
"(",
"projectId",
"=",
"project_id",
",",
"datasetId",
"=",
"dataset",
",",
"body",
"=",
"body",
")",
".",
"execute",
"(",
"num_retries",
"=",
"self",
".",
"num_retries",
")",
"if",
"self",
".",
"swallow_results",
":",
"return",
"True",
"else",
":",
"return",
"table",
"except",
"HttpError",
"as",
"e",
":",
"logger",
".",
"error",
"(",
"(",
"'Cannot create table {0}.{1}.{2}\\n'",
"'Http Error: {3}'",
")",
".",
"format",
"(",
"project_id",
",",
"dataset",
",",
"table",
",",
"e",
".",
"content",
")",
")",
"if",
"self",
".",
"swallow_results",
":",
"return",
"False",
"else",
":",
"return",
"{",
"}"
] |
Create a new table in the dataset.
Parameters
----------
dataset : str
The dataset to create the table in
table : str
The name of the table to create
schema : dict
The table schema
expiration_time : int or double, optional
The expiry time in milliseconds since the epoch.
time_partitioning : bool, optional
Create a time partitioning.
project_id: str, optional
The project to create the table in
Returns
-------
Union[bool, dict]
If the table was successfully created, or response from BigQuery
if swallow_results is set to False
|
[
"Create",
"a",
"new",
"table",
"in",
"the",
"dataset",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L601-L661
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.patch_table
|
def patch_table(self, dataset, table, schema, project_id=None):
"""Patch an existing table in the dataset.
Parameters
----------
dataset : str
The dataset to patch the table in
table : str
The name of the table to patch
schema : dict
The table schema
project_id: str, optional
The project to patch the table in
Returns
-------
Union[bool, dict]
Bool indicating if the table was successfully patched or not,
or response from BigQuery if swallow_results is set to False
"""
project_id = self._get_project_id(project_id)
body = {
'schema': {'fields': schema},
'tableReference': {
'tableId': table,
'projectId': project_id,
'datasetId': dataset
}
}
try:
result = self.bigquery.tables().patch(
projectId=project_id,
datasetId=dataset,
body=body
).execute(num_retries=self.num_retries)
if self.swallow_results:
return True
else:
return result
except HttpError as e:
logger.error(('Cannot patch table {0}.{1}.{2}\n'
'Http Error: {3}').format(project_id, dataset, table, e.content))
if self.swallow_results:
return False
else:
return {}
|
python
|
def patch_table(self, dataset, table, schema, project_id=None):
"""Patch an existing table in the dataset.
Parameters
----------
dataset : str
The dataset to patch the table in
table : str
The name of the table to patch
schema : dict
The table schema
project_id: str, optional
The project to patch the table in
Returns
-------
Union[bool, dict]
Bool indicating if the table was successfully patched or not,
or response from BigQuery if swallow_results is set to False
"""
project_id = self._get_project_id(project_id)
body = {
'schema': {'fields': schema},
'tableReference': {
'tableId': table,
'projectId': project_id,
'datasetId': dataset
}
}
try:
result = self.bigquery.tables().patch(
projectId=project_id,
datasetId=dataset,
body=body
).execute(num_retries=self.num_retries)
if self.swallow_results:
return True
else:
return result
except HttpError as e:
logger.error(('Cannot patch table {0}.{1}.{2}\n'
'Http Error: {3}').format(project_id, dataset, table, e.content))
if self.swallow_results:
return False
else:
return {}
|
[
"def",
"patch_table",
"(",
"self",
",",
"dataset",
",",
"table",
",",
"schema",
",",
"project_id",
"=",
"None",
")",
":",
"project_id",
"=",
"self",
".",
"_get_project_id",
"(",
"project_id",
")",
"body",
"=",
"{",
"'schema'",
":",
"{",
"'fields'",
":",
"schema",
"}",
",",
"'tableReference'",
":",
"{",
"'tableId'",
":",
"table",
",",
"'projectId'",
":",
"project_id",
",",
"'datasetId'",
":",
"dataset",
"}",
"}",
"try",
":",
"result",
"=",
"self",
".",
"bigquery",
".",
"tables",
"(",
")",
".",
"patch",
"(",
"projectId",
"=",
"project_id",
",",
"datasetId",
"=",
"dataset",
",",
"body",
"=",
"body",
")",
".",
"execute",
"(",
"num_retries",
"=",
"self",
".",
"num_retries",
")",
"if",
"self",
".",
"swallow_results",
":",
"return",
"True",
"else",
":",
"return",
"result",
"except",
"HttpError",
"as",
"e",
":",
"logger",
".",
"error",
"(",
"(",
"'Cannot patch table {0}.{1}.{2}\\n'",
"'Http Error: {3}'",
")",
".",
"format",
"(",
"project_id",
",",
"dataset",
",",
"table",
",",
"e",
".",
"content",
")",
")",
"if",
"self",
".",
"swallow_results",
":",
"return",
"False",
"else",
":",
"return",
"{",
"}"
] |
Patch an existing table in the dataset.
Parameters
----------
dataset : str
The dataset to patch the table in
table : str
The name of the table to patch
schema : dict
The table schema
project_id: str, optional
The project to patch the table in
Returns
-------
Union[bool, dict]
Bool indicating if the table was successfully patched or not,
or response from BigQuery if swallow_results is set to False
|
[
"Patch",
"an",
"existing",
"table",
"in",
"the",
"dataset",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L714-L762
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.create_view
|
def create_view(self, dataset, view, query, use_legacy_sql=None, project_id=None):
"""Create a new view in the dataset.
Parameters
----------
dataset : str
The dataset to create the view in
view : str
The name of the view to create
query : dict
A query that BigQuery executes when the view is referenced.
use_legacy_sql : bool, optional
If False, the query will use BigQuery's standard SQL
(https://cloud.google.com/bigquery/sql-reference/)
project_id: str, optional
The project to create the view in
Returns
-------
Union[bool, dict]
bool indicating if the view was successfully created or not,
or response from BigQuery if swallow_results is set to False.
"""
project_id = self._get_project_id(project_id)
body = {
'tableReference': {
'tableId': view,
'projectId': project_id,
'datasetId': dataset
},
'view': {
'query': query
}
}
if use_legacy_sql is not None:
body['view']['useLegacySql'] = use_legacy_sql
try:
view = self.bigquery.tables().insert(
projectId=project_id,
datasetId=dataset,
body=body
).execute(num_retries=self.num_retries)
if self.swallow_results:
return True
else:
return view
except HttpError as e:
logger.error(('Cannot create view {0}.{1}\n'
'Http Error: {2}').format(dataset, view, e.content))
if self.swallow_results:
return False
else:
return {}
|
python
|
def create_view(self, dataset, view, query, use_legacy_sql=None, project_id=None):
"""Create a new view in the dataset.
Parameters
----------
dataset : str
The dataset to create the view in
view : str
The name of the view to create
query : dict
A query that BigQuery executes when the view is referenced.
use_legacy_sql : bool, optional
If False, the query will use BigQuery's standard SQL
(https://cloud.google.com/bigquery/sql-reference/)
project_id: str, optional
The project to create the view in
Returns
-------
Union[bool, dict]
bool indicating if the view was successfully created or not,
or response from BigQuery if swallow_results is set to False.
"""
project_id = self._get_project_id(project_id)
body = {
'tableReference': {
'tableId': view,
'projectId': project_id,
'datasetId': dataset
},
'view': {
'query': query
}
}
if use_legacy_sql is not None:
body['view']['useLegacySql'] = use_legacy_sql
try:
view = self.bigquery.tables().insert(
projectId=project_id,
datasetId=dataset,
body=body
).execute(num_retries=self.num_retries)
if self.swallow_results:
return True
else:
return view
except HttpError as e:
logger.error(('Cannot create view {0}.{1}\n'
'Http Error: {2}').format(dataset, view, e.content))
if self.swallow_results:
return False
else:
return {}
|
[
"def",
"create_view",
"(",
"self",
",",
"dataset",
",",
"view",
",",
"query",
",",
"use_legacy_sql",
"=",
"None",
",",
"project_id",
"=",
"None",
")",
":",
"project_id",
"=",
"self",
".",
"_get_project_id",
"(",
"project_id",
")",
"body",
"=",
"{",
"'tableReference'",
":",
"{",
"'tableId'",
":",
"view",
",",
"'projectId'",
":",
"project_id",
",",
"'datasetId'",
":",
"dataset",
"}",
",",
"'view'",
":",
"{",
"'query'",
":",
"query",
"}",
"}",
"if",
"use_legacy_sql",
"is",
"not",
"None",
":",
"body",
"[",
"'view'",
"]",
"[",
"'useLegacySql'",
"]",
"=",
"use_legacy_sql",
"try",
":",
"view",
"=",
"self",
".",
"bigquery",
".",
"tables",
"(",
")",
".",
"insert",
"(",
"projectId",
"=",
"project_id",
",",
"datasetId",
"=",
"dataset",
",",
"body",
"=",
"body",
")",
".",
"execute",
"(",
"num_retries",
"=",
"self",
".",
"num_retries",
")",
"if",
"self",
".",
"swallow_results",
":",
"return",
"True",
"else",
":",
"return",
"view",
"except",
"HttpError",
"as",
"e",
":",
"logger",
".",
"error",
"(",
"(",
"'Cannot create view {0}.{1}\\n'",
"'Http Error: {2}'",
")",
".",
"format",
"(",
"dataset",
",",
"view",
",",
"e",
".",
"content",
")",
")",
"if",
"self",
".",
"swallow_results",
":",
"return",
"False",
"else",
":",
"return",
"{",
"}"
] |
Create a new view in the dataset.
Parameters
----------
dataset : str
The dataset to create the view in
view : str
The name of the view to create
query : dict
A query that BigQuery executes when the view is referenced.
use_legacy_sql : bool, optional
If False, the query will use BigQuery's standard SQL
(https://cloud.google.com/bigquery/sql-reference/)
project_id: str, optional
The project to create the view in
Returns
-------
Union[bool, dict]
bool indicating if the view was successfully created or not,
or response from BigQuery if swallow_results is set to False.
|
[
"Create",
"a",
"new",
"view",
"in",
"the",
"dataset",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L764-L820
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.delete_table
|
def delete_table(self, dataset, table, project_id=None):
"""Delete a table from the dataset.
Parameters
----------
dataset : str
The dataset to delete the table from.
table : str
The name of the table to delete
project_id: str, optional
String id of the project
Returns
-------
Union[bool, dict]
bool indicating if the table was successfully deleted or not,
or response from BigQuery if swallow_results is set for False.
"""
project_id = self._get_project_id(project_id)
try:
response = self.bigquery.tables().delete(
projectId=project_id,
datasetId=dataset,
tableId=table
).execute(num_retries=self.num_retries)
if self.swallow_results:
return True
else:
return response
except HttpError as e:
logger.error(('Cannot delete table {0}.{1}\n'
'Http Error: {2}').format(dataset, table, e.content))
if self.swallow_results:
return False
else:
return {}
|
python
|
def delete_table(self, dataset, table, project_id=None):
"""Delete a table from the dataset.
Parameters
----------
dataset : str
The dataset to delete the table from.
table : str
The name of the table to delete
project_id: str, optional
String id of the project
Returns
-------
Union[bool, dict]
bool indicating if the table was successfully deleted or not,
or response from BigQuery if swallow_results is set for False.
"""
project_id = self._get_project_id(project_id)
try:
response = self.bigquery.tables().delete(
projectId=project_id,
datasetId=dataset,
tableId=table
).execute(num_retries=self.num_retries)
if self.swallow_results:
return True
else:
return response
except HttpError as e:
logger.error(('Cannot delete table {0}.{1}\n'
'Http Error: {2}').format(dataset, table, e.content))
if self.swallow_results:
return False
else:
return {}
|
[
"def",
"delete_table",
"(",
"self",
",",
"dataset",
",",
"table",
",",
"project_id",
"=",
"None",
")",
":",
"project_id",
"=",
"self",
".",
"_get_project_id",
"(",
"project_id",
")",
"try",
":",
"response",
"=",
"self",
".",
"bigquery",
".",
"tables",
"(",
")",
".",
"delete",
"(",
"projectId",
"=",
"project_id",
",",
"datasetId",
"=",
"dataset",
",",
"tableId",
"=",
"table",
")",
".",
"execute",
"(",
"num_retries",
"=",
"self",
".",
"num_retries",
")",
"if",
"self",
".",
"swallow_results",
":",
"return",
"True",
"else",
":",
"return",
"response",
"except",
"HttpError",
"as",
"e",
":",
"logger",
".",
"error",
"(",
"(",
"'Cannot delete table {0}.{1}\\n'",
"'Http Error: {2}'",
")",
".",
"format",
"(",
"dataset",
",",
"table",
",",
"e",
".",
"content",
")",
")",
"if",
"self",
".",
"swallow_results",
":",
"return",
"False",
"else",
":",
"return",
"{",
"}"
] |
Delete a table from the dataset.
Parameters
----------
dataset : str
The dataset to delete the table from.
table : str
The name of the table to delete
project_id: str, optional
String id of the project
Returns
-------
Union[bool, dict]
bool indicating if the table was successfully deleted or not,
or response from BigQuery if swallow_results is set for False.
|
[
"Delete",
"a",
"table",
"from",
"the",
"dataset",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L822-L859
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.get_tables
|
def get_tables(self, dataset_id, app_id, start_time, end_time, project_id=None):
"""Retrieve a list of tables that are related to the given app id
and are inside the range of start and end times.
Parameters
----------
dataset_id : str
The BigQuery dataset id to consider.
app_id : str
The appspot name
start_time : Union[datetime, int]
The datetime or unix time after which records will be fetched.
end_time : Union[datetime, int]
The datetime or unix time up to which records will be fetched.
project_id: str, optional
String id of the project
Returns
-------
list
A ``list`` of table names.
"""
if isinstance(start_time, datetime):
start_time = calendar.timegm(start_time.utctimetuple())
if isinstance(end_time, datetime):
end_time = calendar.timegm(end_time.utctimetuple())
every_table = self._get_all_tables(dataset_id, project_id)
app_tables = every_table.get(app_id, {})
return self._filter_tables_by_time(app_tables, start_time, end_time)
|
python
|
def get_tables(self, dataset_id, app_id, start_time, end_time, project_id=None):
"""Retrieve a list of tables that are related to the given app id
and are inside the range of start and end times.
Parameters
----------
dataset_id : str
The BigQuery dataset id to consider.
app_id : str
The appspot name
start_time : Union[datetime, int]
The datetime or unix time after which records will be fetched.
end_time : Union[datetime, int]
The datetime or unix time up to which records will be fetched.
project_id: str, optional
String id of the project
Returns
-------
list
A ``list`` of table names.
"""
if isinstance(start_time, datetime):
start_time = calendar.timegm(start_time.utctimetuple())
if isinstance(end_time, datetime):
end_time = calendar.timegm(end_time.utctimetuple())
every_table = self._get_all_tables(dataset_id, project_id)
app_tables = every_table.get(app_id, {})
return self._filter_tables_by_time(app_tables, start_time, end_time)
|
[
"def",
"get_tables",
"(",
"self",
",",
"dataset_id",
",",
"app_id",
",",
"start_time",
",",
"end_time",
",",
"project_id",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"start_time",
",",
"datetime",
")",
":",
"start_time",
"=",
"calendar",
".",
"timegm",
"(",
"start_time",
".",
"utctimetuple",
"(",
")",
")",
"if",
"isinstance",
"(",
"end_time",
",",
"datetime",
")",
":",
"end_time",
"=",
"calendar",
".",
"timegm",
"(",
"end_time",
".",
"utctimetuple",
"(",
")",
")",
"every_table",
"=",
"self",
".",
"_get_all_tables",
"(",
"dataset_id",
",",
"project_id",
")",
"app_tables",
"=",
"every_table",
".",
"get",
"(",
"app_id",
",",
"{",
"}",
")",
"return",
"self",
".",
"_filter_tables_by_time",
"(",
"app_tables",
",",
"start_time",
",",
"end_time",
")"
] |
Retrieve a list of tables that are related to the given app id
and are inside the range of start and end times.
Parameters
----------
dataset_id : str
The BigQuery dataset id to consider.
app_id : str
The appspot name
start_time : Union[datetime, int]
The datetime or unix time after which records will be fetched.
end_time : Union[datetime, int]
The datetime or unix time up to which records will be fetched.
project_id: str, optional
String id of the project
Returns
-------
list
A ``list`` of table names.
|
[
"Retrieve",
"a",
"list",
"of",
"tables",
"that",
"are",
"related",
"to",
"the",
"given",
"app",
"id",
"and",
"are",
"inside",
"the",
"range",
"of",
"start",
"and",
"end",
"times",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L861-L893
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.wait_for_job
|
def wait_for_job(self, job, interval=5, timeout=60):
"""
Waits until the job indicated by job_resource is done or has failed
Parameters
----------
job : Union[dict, str]
``dict`` representing a BigQuery job resource, or a ``str``
representing the BigQuery job id
interval : float, optional
Polling interval in seconds, default = 5
timeout : float, optional
Timeout in seconds, default = 60
Returns
-------
dict
Final state of the job resouce, as described here:
https://developers.google.com/resources/api-libraries/documentation/bigquery/v2/python/latest/bigquery_v2.jobs.html#get
Raises
------
Union[JobExecutingException, BigQueryTimeoutException]
On http/auth failures or timeout
"""
complete = False
job_id = str(job if isinstance(job,
(six.binary_type, six.text_type, int))
else job['jobReference']['jobId'])
job_resource = None
start_time = time()
elapsed_time = 0
while not (complete or elapsed_time > timeout):
sleep(interval)
request = self.bigquery.jobs().get(projectId=self.project_id,
jobId=job_id)
job_resource = request.execute(num_retries=self.num_retries)
self._raise_executing_exception_if_error(job_resource)
complete = job_resource.get('status').get('state') == u'DONE'
elapsed_time = time() - start_time
# raise exceptions if timeout
if not complete:
logger.error('BigQuery job %s timeout' % job_id)
raise BigQueryTimeoutException()
return job_resource
|
python
|
def wait_for_job(self, job, interval=5, timeout=60):
"""
Waits until the job indicated by job_resource is done or has failed
Parameters
----------
job : Union[dict, str]
``dict`` representing a BigQuery job resource, or a ``str``
representing the BigQuery job id
interval : float, optional
Polling interval in seconds, default = 5
timeout : float, optional
Timeout in seconds, default = 60
Returns
-------
dict
Final state of the job resouce, as described here:
https://developers.google.com/resources/api-libraries/documentation/bigquery/v2/python/latest/bigquery_v2.jobs.html#get
Raises
------
Union[JobExecutingException, BigQueryTimeoutException]
On http/auth failures or timeout
"""
complete = False
job_id = str(job if isinstance(job,
(six.binary_type, six.text_type, int))
else job['jobReference']['jobId'])
job_resource = None
start_time = time()
elapsed_time = 0
while not (complete or elapsed_time > timeout):
sleep(interval)
request = self.bigquery.jobs().get(projectId=self.project_id,
jobId=job_id)
job_resource = request.execute(num_retries=self.num_retries)
self._raise_executing_exception_if_error(job_resource)
complete = job_resource.get('status').get('state') == u'DONE'
elapsed_time = time() - start_time
# raise exceptions if timeout
if not complete:
logger.error('BigQuery job %s timeout' % job_id)
raise BigQueryTimeoutException()
return job_resource
|
[
"def",
"wait_for_job",
"(",
"self",
",",
"job",
",",
"interval",
"=",
"5",
",",
"timeout",
"=",
"60",
")",
":",
"complete",
"=",
"False",
"job_id",
"=",
"str",
"(",
"job",
"if",
"isinstance",
"(",
"job",
",",
"(",
"six",
".",
"binary_type",
",",
"six",
".",
"text_type",
",",
"int",
")",
")",
"else",
"job",
"[",
"'jobReference'",
"]",
"[",
"'jobId'",
"]",
")",
"job_resource",
"=",
"None",
"start_time",
"=",
"time",
"(",
")",
"elapsed_time",
"=",
"0",
"while",
"not",
"(",
"complete",
"or",
"elapsed_time",
">",
"timeout",
")",
":",
"sleep",
"(",
"interval",
")",
"request",
"=",
"self",
".",
"bigquery",
".",
"jobs",
"(",
")",
".",
"get",
"(",
"projectId",
"=",
"self",
".",
"project_id",
",",
"jobId",
"=",
"job_id",
")",
"job_resource",
"=",
"request",
".",
"execute",
"(",
"num_retries",
"=",
"self",
".",
"num_retries",
")",
"self",
".",
"_raise_executing_exception_if_error",
"(",
"job_resource",
")",
"complete",
"=",
"job_resource",
".",
"get",
"(",
"'status'",
")",
".",
"get",
"(",
"'state'",
")",
"==",
"u'DONE'",
"elapsed_time",
"=",
"time",
"(",
")",
"-",
"start_time",
"# raise exceptions if timeout",
"if",
"not",
"complete",
":",
"logger",
".",
"error",
"(",
"'BigQuery job %s timeout'",
"%",
"job_id",
")",
"raise",
"BigQueryTimeoutException",
"(",
")",
"return",
"job_resource"
] |
Waits until the job indicated by job_resource is done or has failed
Parameters
----------
job : Union[dict, str]
``dict`` representing a BigQuery job resource, or a ``str``
representing the BigQuery job id
interval : float, optional
Polling interval in seconds, default = 5
timeout : float, optional
Timeout in seconds, default = 60
Returns
-------
dict
Final state of the job resouce, as described here:
https://developers.google.com/resources/api-libraries/documentation/bigquery/v2/python/latest/bigquery_v2.jobs.html#get
Raises
------
Union[JobExecutingException, BigQueryTimeoutException]
On http/auth failures or timeout
|
[
"Waits",
"until",
"the",
"job",
"indicated",
"by",
"job_resource",
"is",
"done",
"or",
"has",
"failed"
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L1274-L1321
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.push_rows
|
def push_rows(self, dataset, table, rows, insert_id_key=None,
skip_invalid_rows=None, ignore_unknown_values=None,
template_suffix=None, project_id=None):
"""Upload rows to BigQuery table.
Parameters
----------
dataset : str
The dataset to upload to
table : str
The name of the table to insert rows into
rows : list
A ``list`` of rows (``dict`` objects) to add to the table
insert_id_key : str, optional
Key for insertId in row.
You can use dot separated key for nested column.
skip_invalid_rows : bool, optional
Insert all valid rows of a request, even if invalid rows exist.
ignore_unknown_values : bool, optional
Accept rows that contain values that do not match the schema.
template_suffix : str, optional
Inserts the rows into an {table}{template_suffix}.
If table {table}{template_suffix} doesn't exist, create from {table}.
project_id: str, optional
The project to upload to
Returns
-------
Union[bool, dict]
bool indicating if insert succeeded or not, or response
from BigQuery if swallow_results is set for False.
"""
project_id = self._get_project_id(project_id)
table_data = self.bigquery.tabledata()
rows_data = []
for row in rows:
each_row = {}
each_row["json"] = row
if insert_id_key is not None:
keys = insert_id_key.split('.')
val = reduce(lambda d, key: d.get(key) if d else None, keys, row)
if val is not None:
each_row["insertId"] = val
rows_data.append(each_row)
data = {
"kind": "bigquery#tableDataInsertAllRequest",
"rows": rows_data
}
if skip_invalid_rows is not None:
data['skipInvalidRows'] = skip_invalid_rows
if ignore_unknown_values is not None:
data['ignoreUnknownValues'] = ignore_unknown_values
if template_suffix is not None:
data['templateSuffix'] = template_suffix
try:
response = table_data.insertAll(
projectId=project_id,
datasetId=dataset,
tableId=table,
body=data
).execute(num_retries=self.num_retries)
if response.get('insertErrors'):
logger.error('BigQuery insert errors: %s' % response)
if self.swallow_results:
return False
else:
return response
if self.swallow_results:
return True
else:
return response
except HttpError as e:
logger.exception('Problem with BigQuery insertAll')
if self.swallow_results:
return False
else:
return {
'insertErrors': [{
'errors': [{
'reason': 'httperror',
'message': e
}]
}]
}
|
python
|
def push_rows(self, dataset, table, rows, insert_id_key=None,
skip_invalid_rows=None, ignore_unknown_values=None,
template_suffix=None, project_id=None):
"""Upload rows to BigQuery table.
Parameters
----------
dataset : str
The dataset to upload to
table : str
The name of the table to insert rows into
rows : list
A ``list`` of rows (``dict`` objects) to add to the table
insert_id_key : str, optional
Key for insertId in row.
You can use dot separated key for nested column.
skip_invalid_rows : bool, optional
Insert all valid rows of a request, even if invalid rows exist.
ignore_unknown_values : bool, optional
Accept rows that contain values that do not match the schema.
template_suffix : str, optional
Inserts the rows into an {table}{template_suffix}.
If table {table}{template_suffix} doesn't exist, create from {table}.
project_id: str, optional
The project to upload to
Returns
-------
Union[bool, dict]
bool indicating if insert succeeded or not, or response
from BigQuery if swallow_results is set for False.
"""
project_id = self._get_project_id(project_id)
table_data = self.bigquery.tabledata()
rows_data = []
for row in rows:
each_row = {}
each_row["json"] = row
if insert_id_key is not None:
keys = insert_id_key.split('.')
val = reduce(lambda d, key: d.get(key) if d else None, keys, row)
if val is not None:
each_row["insertId"] = val
rows_data.append(each_row)
data = {
"kind": "bigquery#tableDataInsertAllRequest",
"rows": rows_data
}
if skip_invalid_rows is not None:
data['skipInvalidRows'] = skip_invalid_rows
if ignore_unknown_values is not None:
data['ignoreUnknownValues'] = ignore_unknown_values
if template_suffix is not None:
data['templateSuffix'] = template_suffix
try:
response = table_data.insertAll(
projectId=project_id,
datasetId=dataset,
tableId=table,
body=data
).execute(num_retries=self.num_retries)
if response.get('insertErrors'):
logger.error('BigQuery insert errors: %s' % response)
if self.swallow_results:
return False
else:
return response
if self.swallow_results:
return True
else:
return response
except HttpError as e:
logger.exception('Problem with BigQuery insertAll')
if self.swallow_results:
return False
else:
return {
'insertErrors': [{
'errors': [{
'reason': 'httperror',
'message': e
}]
}]
}
|
[
"def",
"push_rows",
"(",
"self",
",",
"dataset",
",",
"table",
",",
"rows",
",",
"insert_id_key",
"=",
"None",
",",
"skip_invalid_rows",
"=",
"None",
",",
"ignore_unknown_values",
"=",
"None",
",",
"template_suffix",
"=",
"None",
",",
"project_id",
"=",
"None",
")",
":",
"project_id",
"=",
"self",
".",
"_get_project_id",
"(",
"project_id",
")",
"table_data",
"=",
"self",
".",
"bigquery",
".",
"tabledata",
"(",
")",
"rows_data",
"=",
"[",
"]",
"for",
"row",
"in",
"rows",
":",
"each_row",
"=",
"{",
"}",
"each_row",
"[",
"\"json\"",
"]",
"=",
"row",
"if",
"insert_id_key",
"is",
"not",
"None",
":",
"keys",
"=",
"insert_id_key",
".",
"split",
"(",
"'.'",
")",
"val",
"=",
"reduce",
"(",
"lambda",
"d",
",",
"key",
":",
"d",
".",
"get",
"(",
"key",
")",
"if",
"d",
"else",
"None",
",",
"keys",
",",
"row",
")",
"if",
"val",
"is",
"not",
"None",
":",
"each_row",
"[",
"\"insertId\"",
"]",
"=",
"val",
"rows_data",
".",
"append",
"(",
"each_row",
")",
"data",
"=",
"{",
"\"kind\"",
":",
"\"bigquery#tableDataInsertAllRequest\"",
",",
"\"rows\"",
":",
"rows_data",
"}",
"if",
"skip_invalid_rows",
"is",
"not",
"None",
":",
"data",
"[",
"'skipInvalidRows'",
"]",
"=",
"skip_invalid_rows",
"if",
"ignore_unknown_values",
"is",
"not",
"None",
":",
"data",
"[",
"'ignoreUnknownValues'",
"]",
"=",
"ignore_unknown_values",
"if",
"template_suffix",
"is",
"not",
"None",
":",
"data",
"[",
"'templateSuffix'",
"]",
"=",
"template_suffix",
"try",
":",
"response",
"=",
"table_data",
".",
"insertAll",
"(",
"projectId",
"=",
"project_id",
",",
"datasetId",
"=",
"dataset",
",",
"tableId",
"=",
"table",
",",
"body",
"=",
"data",
")",
".",
"execute",
"(",
"num_retries",
"=",
"self",
".",
"num_retries",
")",
"if",
"response",
".",
"get",
"(",
"'insertErrors'",
")",
":",
"logger",
".",
"error",
"(",
"'BigQuery insert errors: %s'",
"%",
"response",
")",
"if",
"self",
".",
"swallow_results",
":",
"return",
"False",
"else",
":",
"return",
"response",
"if",
"self",
".",
"swallow_results",
":",
"return",
"True",
"else",
":",
"return",
"response",
"except",
"HttpError",
"as",
"e",
":",
"logger",
".",
"exception",
"(",
"'Problem with BigQuery insertAll'",
")",
"if",
"self",
".",
"swallow_results",
":",
"return",
"False",
"else",
":",
"return",
"{",
"'insertErrors'",
":",
"[",
"{",
"'errors'",
":",
"[",
"{",
"'reason'",
":",
"'httperror'",
",",
"'message'",
":",
"e",
"}",
"]",
"}",
"]",
"}"
] |
Upload rows to BigQuery table.
Parameters
----------
dataset : str
The dataset to upload to
table : str
The name of the table to insert rows into
rows : list
A ``list`` of rows (``dict`` objects) to add to the table
insert_id_key : str, optional
Key for insertId in row.
You can use dot separated key for nested column.
skip_invalid_rows : bool, optional
Insert all valid rows of a request, even if invalid rows exist.
ignore_unknown_values : bool, optional
Accept rows that contain values that do not match the schema.
template_suffix : str, optional
Inserts the rows into an {table}{template_suffix}.
If table {table}{template_suffix} doesn't exist, create from {table}.
project_id: str, optional
The project to upload to
Returns
-------
Union[bool, dict]
bool indicating if insert succeeded or not, or response
from BigQuery if swallow_results is set for False.
|
[
"Upload",
"rows",
"to",
"BigQuery",
"table",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L1323-L1415
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.get_all_tables
|
def get_all_tables(self, dataset_id, project_id=None):
"""Retrieve a list of tables for the dataset.
Parameters
----------
dataset_id : str
The dataset to retrieve table data for.
project_id: str
Unique ``str`` identifying the BigQuery project contains the dataset
Returns
-------
A ``list`` with all table names
"""
tables_data = self._get_all_tables_for_dataset(dataset_id, project_id)
tables = []
for table in tables_data.get('tables', []):
table_name = table.get('tableReference', {}).get('tableId')
if table_name:
tables.append(table_name)
return tables
|
python
|
def get_all_tables(self, dataset_id, project_id=None):
"""Retrieve a list of tables for the dataset.
Parameters
----------
dataset_id : str
The dataset to retrieve table data for.
project_id: str
Unique ``str`` identifying the BigQuery project contains the dataset
Returns
-------
A ``list`` with all table names
"""
tables_data = self._get_all_tables_for_dataset(dataset_id, project_id)
tables = []
for table in tables_data.get('tables', []):
table_name = table.get('tableReference', {}).get('tableId')
if table_name:
tables.append(table_name)
return tables
|
[
"def",
"get_all_tables",
"(",
"self",
",",
"dataset_id",
",",
"project_id",
"=",
"None",
")",
":",
"tables_data",
"=",
"self",
".",
"_get_all_tables_for_dataset",
"(",
"dataset_id",
",",
"project_id",
")",
"tables",
"=",
"[",
"]",
"for",
"table",
"in",
"tables_data",
".",
"get",
"(",
"'tables'",
",",
"[",
"]",
")",
":",
"table_name",
"=",
"table",
".",
"get",
"(",
"'tableReference'",
",",
"{",
"}",
")",
".",
"get",
"(",
"'tableId'",
")",
"if",
"table_name",
":",
"tables",
".",
"append",
"(",
"table_name",
")",
"return",
"tables"
] |
Retrieve a list of tables for the dataset.
Parameters
----------
dataset_id : str
The dataset to retrieve table data for.
project_id: str
Unique ``str`` identifying the BigQuery project contains the dataset
Returns
-------
A ``list`` with all table names
|
[
"Retrieve",
"a",
"list",
"of",
"tables",
"for",
"the",
"dataset",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L1417-L1438
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient._get_all_tables_for_dataset
|
def _get_all_tables_for_dataset(self, dataset_id, project_id=None):
"""Retrieve a list of all tables for the dataset.
Parameters
----------
dataset_id : str
The dataset to retrieve table names for
project_id: str
Unique ``str`` identifying the BigQuery project contains the dataset
Returns
-------
dict
A ``dict`` containing tables key with all tables
"""
project_id = self._get_project_id(project_id)
result = self.bigquery.tables().list(
projectId=project_id,
datasetId=dataset_id).execute(num_retries=self.num_retries)
page_token = result.get('nextPageToken')
while page_token:
res = self.bigquery.tables().list(
projectId=project_id,
datasetId=dataset_id,
pageToken=page_token
).execute(num_retries=self.num_retries)
page_token = res.get('nextPageToken')
result['tables'] += res.get('tables', [])
return result
|
python
|
def _get_all_tables_for_dataset(self, dataset_id, project_id=None):
"""Retrieve a list of all tables for the dataset.
Parameters
----------
dataset_id : str
The dataset to retrieve table names for
project_id: str
Unique ``str`` identifying the BigQuery project contains the dataset
Returns
-------
dict
A ``dict`` containing tables key with all tables
"""
project_id = self._get_project_id(project_id)
result = self.bigquery.tables().list(
projectId=project_id,
datasetId=dataset_id).execute(num_retries=self.num_retries)
page_token = result.get('nextPageToken')
while page_token:
res = self.bigquery.tables().list(
projectId=project_id,
datasetId=dataset_id,
pageToken=page_token
).execute(num_retries=self.num_retries)
page_token = res.get('nextPageToken')
result['tables'] += res.get('tables', [])
return result
|
[
"def",
"_get_all_tables_for_dataset",
"(",
"self",
",",
"dataset_id",
",",
"project_id",
"=",
"None",
")",
":",
"project_id",
"=",
"self",
".",
"_get_project_id",
"(",
"project_id",
")",
"result",
"=",
"self",
".",
"bigquery",
".",
"tables",
"(",
")",
".",
"list",
"(",
"projectId",
"=",
"project_id",
",",
"datasetId",
"=",
"dataset_id",
")",
".",
"execute",
"(",
"num_retries",
"=",
"self",
".",
"num_retries",
")",
"page_token",
"=",
"result",
".",
"get",
"(",
"'nextPageToken'",
")",
"while",
"page_token",
":",
"res",
"=",
"self",
".",
"bigquery",
".",
"tables",
"(",
")",
".",
"list",
"(",
"projectId",
"=",
"project_id",
",",
"datasetId",
"=",
"dataset_id",
",",
"pageToken",
"=",
"page_token",
")",
".",
"execute",
"(",
"num_retries",
"=",
"self",
".",
"num_retries",
")",
"page_token",
"=",
"res",
".",
"get",
"(",
"'nextPageToken'",
")",
"result",
"[",
"'tables'",
"]",
"+=",
"res",
".",
"get",
"(",
"'tables'",
",",
"[",
"]",
")",
"return",
"result"
] |
Retrieve a list of all tables for the dataset.
Parameters
----------
dataset_id : str
The dataset to retrieve table names for
project_id: str
Unique ``str`` identifying the BigQuery project contains the dataset
Returns
-------
dict
A ``dict`` containing tables key with all tables
|
[
"Retrieve",
"a",
"list",
"of",
"all",
"tables",
"for",
"the",
"dataset",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L1472-L1502
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient._parse_table_list_response
|
def _parse_table_list_response(self, list_response):
"""Parse the response received from calling list on tables.
Parameters
----------
list_response
The response found by calling list on a BigQuery table object.
Returns
-------
dict
Dates referenced by table names
"""
tables = defaultdict(dict)
for table in list_response.get('tables', []):
table_ref = table.get('tableReference')
if not table_ref:
continue
table_id = table_ref.get('tableId', '')
year_month, app_id = self._parse_table_name(table_id)
if not year_month:
continue
table_date = datetime.strptime(year_month, '%Y-%m')
unix_seconds = calendar.timegm(table_date.timetuple())
tables[app_id].update({table_id: unix_seconds})
# Turn off defualting
tables.default_factory = None
return tables
|
python
|
def _parse_table_list_response(self, list_response):
"""Parse the response received from calling list on tables.
Parameters
----------
list_response
The response found by calling list on a BigQuery table object.
Returns
-------
dict
Dates referenced by table names
"""
tables = defaultdict(dict)
for table in list_response.get('tables', []):
table_ref = table.get('tableReference')
if not table_ref:
continue
table_id = table_ref.get('tableId', '')
year_month, app_id = self._parse_table_name(table_id)
if not year_month:
continue
table_date = datetime.strptime(year_month, '%Y-%m')
unix_seconds = calendar.timegm(table_date.timetuple())
tables[app_id].update({table_id: unix_seconds})
# Turn off defualting
tables.default_factory = None
return tables
|
[
"def",
"_parse_table_list_response",
"(",
"self",
",",
"list_response",
")",
":",
"tables",
"=",
"defaultdict",
"(",
"dict",
")",
"for",
"table",
"in",
"list_response",
".",
"get",
"(",
"'tables'",
",",
"[",
"]",
")",
":",
"table_ref",
"=",
"table",
".",
"get",
"(",
"'tableReference'",
")",
"if",
"not",
"table_ref",
":",
"continue",
"table_id",
"=",
"table_ref",
".",
"get",
"(",
"'tableId'",
",",
"''",
")",
"year_month",
",",
"app_id",
"=",
"self",
".",
"_parse_table_name",
"(",
"table_id",
")",
"if",
"not",
"year_month",
":",
"continue",
"table_date",
"=",
"datetime",
".",
"strptime",
"(",
"year_month",
",",
"'%Y-%m'",
")",
"unix_seconds",
"=",
"calendar",
".",
"timegm",
"(",
"table_date",
".",
"timetuple",
"(",
")",
")",
"tables",
"[",
"app_id",
"]",
".",
"update",
"(",
"{",
"table_id",
":",
"unix_seconds",
"}",
")",
"# Turn off defualting",
"tables",
".",
"default_factory",
"=",
"None",
"return",
"tables"
] |
Parse the response received from calling list on tables.
Parameters
----------
list_response
The response found by calling list on a BigQuery table object.
Returns
-------
dict
Dates referenced by table names
|
[
"Parse",
"the",
"response",
"received",
"from",
"calling",
"list",
"on",
"tables",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L1504-L1540
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient._parse_table_name
|
def _parse_table_name(self, table_id):
"""Parse a table name in the form of appid_YYYY_MM or
YYYY_MM_appid and return a tuple consisting of YYYY-MM and the app id.
Returns (None, None) in the event of a name like <desc>_YYYYMMDD_<int>
Parameters
----------
table_id : str
The table id as listed by BigQuery
Returns
-------
tuple
(year/month, app id), or (None, None) if the table id cannot be
parsed.
"""
# Prefix date
attributes = table_id.split('_')
year_month = "-".join(attributes[:2])
app_id = "-".join(attributes[2:])
# Check if date parsed correctly
if year_month.count("-") == 1 and all(
[num.isdigit() for num in year_month.split('-')]):
return year_month, app_id
# Postfix date
attributes = table_id.split('_')
year_month = "-".join(attributes[-2:])
app_id = "-".join(attributes[:-2])
# Check if date parsed correctly
if year_month.count("-") == 1 and all(
[num.isdigit() for num in year_month.split('-')]) and len(year_month) == 7:
return year_month, app_id
return None, None
|
python
|
def _parse_table_name(self, table_id):
"""Parse a table name in the form of appid_YYYY_MM or
YYYY_MM_appid and return a tuple consisting of YYYY-MM and the app id.
Returns (None, None) in the event of a name like <desc>_YYYYMMDD_<int>
Parameters
----------
table_id : str
The table id as listed by BigQuery
Returns
-------
tuple
(year/month, app id), or (None, None) if the table id cannot be
parsed.
"""
# Prefix date
attributes = table_id.split('_')
year_month = "-".join(attributes[:2])
app_id = "-".join(attributes[2:])
# Check if date parsed correctly
if year_month.count("-") == 1 and all(
[num.isdigit() for num in year_month.split('-')]):
return year_month, app_id
# Postfix date
attributes = table_id.split('_')
year_month = "-".join(attributes[-2:])
app_id = "-".join(attributes[:-2])
# Check if date parsed correctly
if year_month.count("-") == 1 and all(
[num.isdigit() for num in year_month.split('-')]) and len(year_month) == 7:
return year_month, app_id
return None, None
|
[
"def",
"_parse_table_name",
"(",
"self",
",",
"table_id",
")",
":",
"# Prefix date",
"attributes",
"=",
"table_id",
".",
"split",
"(",
"'_'",
")",
"year_month",
"=",
"\"-\"",
".",
"join",
"(",
"attributes",
"[",
":",
"2",
"]",
")",
"app_id",
"=",
"\"-\"",
".",
"join",
"(",
"attributes",
"[",
"2",
":",
"]",
")",
"# Check if date parsed correctly",
"if",
"year_month",
".",
"count",
"(",
"\"-\"",
")",
"==",
"1",
"and",
"all",
"(",
"[",
"num",
".",
"isdigit",
"(",
")",
"for",
"num",
"in",
"year_month",
".",
"split",
"(",
"'-'",
")",
"]",
")",
":",
"return",
"year_month",
",",
"app_id",
"# Postfix date",
"attributes",
"=",
"table_id",
".",
"split",
"(",
"'_'",
")",
"year_month",
"=",
"\"-\"",
".",
"join",
"(",
"attributes",
"[",
"-",
"2",
":",
"]",
")",
"app_id",
"=",
"\"-\"",
".",
"join",
"(",
"attributes",
"[",
":",
"-",
"2",
"]",
")",
"# Check if date parsed correctly",
"if",
"year_month",
".",
"count",
"(",
"\"-\"",
")",
"==",
"1",
"and",
"all",
"(",
"[",
"num",
".",
"isdigit",
"(",
")",
"for",
"num",
"in",
"year_month",
".",
"split",
"(",
"'-'",
")",
"]",
")",
"and",
"len",
"(",
"year_month",
")",
"==",
"7",
":",
"return",
"year_month",
",",
"app_id",
"return",
"None",
",",
"None"
] |
Parse a table name in the form of appid_YYYY_MM or
YYYY_MM_appid and return a tuple consisting of YYYY-MM and the app id.
Returns (None, None) in the event of a name like <desc>_YYYYMMDD_<int>
Parameters
----------
table_id : str
The table id as listed by BigQuery
Returns
-------
tuple
(year/month, app id), or (None, None) if the table id cannot be
parsed.
|
[
"Parse",
"a",
"table",
"name",
"in",
"the",
"form",
"of",
"appid_YYYY_MM",
"or",
"YYYY_MM_appid",
"and",
"return",
"a",
"tuple",
"consisting",
"of",
"YYYY",
"-",
"MM",
"and",
"the",
"app",
"id",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L1542-L1581
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient._filter_tables_by_time
|
def _filter_tables_by_time(self, tables, start_time, end_time):
"""Filter a table dictionary and return table names based on the range
of start and end times in unix seconds.
Parameters
----------
tables : dict
Dates referenced by table names
start_time : int
The unix time after which records will be fetched
end_time : int
The unix time up to which records will be fetched
Returns
-------
list
Table names that are inside the time range
"""
return [table_name for (table_name, unix_seconds) in tables.items()
if self._in_range(start_time, end_time, unix_seconds)]
|
python
|
def _filter_tables_by_time(self, tables, start_time, end_time):
"""Filter a table dictionary and return table names based on the range
of start and end times in unix seconds.
Parameters
----------
tables : dict
Dates referenced by table names
start_time : int
The unix time after which records will be fetched
end_time : int
The unix time up to which records will be fetched
Returns
-------
list
Table names that are inside the time range
"""
return [table_name for (table_name, unix_seconds) in tables.items()
if self._in_range(start_time, end_time, unix_seconds)]
|
[
"def",
"_filter_tables_by_time",
"(",
"self",
",",
"tables",
",",
"start_time",
",",
"end_time",
")",
":",
"return",
"[",
"table_name",
"for",
"(",
"table_name",
",",
"unix_seconds",
")",
"in",
"tables",
".",
"items",
"(",
")",
"if",
"self",
".",
"_in_range",
"(",
"start_time",
",",
"end_time",
",",
"unix_seconds",
")",
"]"
] |
Filter a table dictionary and return table names based on the range
of start and end times in unix seconds.
Parameters
----------
tables : dict
Dates referenced by table names
start_time : int
The unix time after which records will be fetched
end_time : int
The unix time up to which records will be fetched
Returns
-------
list
Table names that are inside the time range
|
[
"Filter",
"a",
"table",
"dictionary",
"and",
"return",
"table",
"names",
"based",
"on",
"the",
"range",
"of",
"start",
"and",
"end",
"times",
"in",
"unix",
"seconds",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L1583-L1603
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient._in_range
|
def _in_range(self, start_time, end_time, time):
"""Indicate if the given time falls inside of the given range.
Parameters
----------
start_time : int
The unix time for the start of the range
end_time : int
The unix time for the end of the range
time : int
The unix time to check
Returns
-------
bool
True if the time falls within the range, False otherwise.
"""
ONE_MONTH = 2764800 # 32 days
return start_time <= time <= end_time or \
time <= start_time <= time + ONE_MONTH or \
time <= end_time <= time + ONE_MONTH
|
python
|
def _in_range(self, start_time, end_time, time):
"""Indicate if the given time falls inside of the given range.
Parameters
----------
start_time : int
The unix time for the start of the range
end_time : int
The unix time for the end of the range
time : int
The unix time to check
Returns
-------
bool
True if the time falls within the range, False otherwise.
"""
ONE_MONTH = 2764800 # 32 days
return start_time <= time <= end_time or \
time <= start_time <= time + ONE_MONTH or \
time <= end_time <= time + ONE_MONTH
|
[
"def",
"_in_range",
"(",
"self",
",",
"start_time",
",",
"end_time",
",",
"time",
")",
":",
"ONE_MONTH",
"=",
"2764800",
"# 32 days",
"return",
"start_time",
"<=",
"time",
"<=",
"end_time",
"or",
"time",
"<=",
"start_time",
"<=",
"time",
"+",
"ONE_MONTH",
"or",
"time",
"<=",
"end_time",
"<=",
"time",
"+",
"ONE_MONTH"
] |
Indicate if the given time falls inside of the given range.
Parameters
----------
start_time : int
The unix time for the start of the range
end_time : int
The unix time for the end of the range
time : int
The unix time to check
Returns
-------
bool
True if the time falls within the range, False otherwise.
|
[
"Indicate",
"if",
"the",
"given",
"time",
"falls",
"inside",
"of",
"the",
"given",
"range",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L1605-L1627
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient._transform_row
|
def _transform_row(self, row, schema):
"""Apply the given schema to the given BigQuery data row.
Parameters
----------
row
A single BigQuery row to transform
schema : list
The BigQuery table schema to apply to the row, specifically
the list of field dicts.
Returns
-------
dict
Mapping schema to row
"""
log = {}
# Match each schema column with its associated row value
for index, col_dict in enumerate(schema):
col_name = col_dict['name']
row_value = row['f'][index]['v']
if row_value is None:
log[col_name] = None
continue
# Recurse on nested records
if col_dict['type'] == 'RECORD':
row_value = self._recurse_on_row(col_dict, row_value)
# Otherwise just cast the value
elif col_dict['type'] == 'INTEGER':
row_value = int(row_value)
elif col_dict['type'] == 'FLOAT':
row_value = float(row_value)
elif col_dict['type'] == 'BOOLEAN':
row_value = row_value in ('True', 'true', 'TRUE')
elif col_dict['type'] == 'TIMESTAMP':
row_value = float(row_value)
log[col_name] = row_value
return log
|
python
|
def _transform_row(self, row, schema):
"""Apply the given schema to the given BigQuery data row.
Parameters
----------
row
A single BigQuery row to transform
schema : list
The BigQuery table schema to apply to the row, specifically
the list of field dicts.
Returns
-------
dict
Mapping schema to row
"""
log = {}
# Match each schema column with its associated row value
for index, col_dict in enumerate(schema):
col_name = col_dict['name']
row_value = row['f'][index]['v']
if row_value is None:
log[col_name] = None
continue
# Recurse on nested records
if col_dict['type'] == 'RECORD':
row_value = self._recurse_on_row(col_dict, row_value)
# Otherwise just cast the value
elif col_dict['type'] == 'INTEGER':
row_value = int(row_value)
elif col_dict['type'] == 'FLOAT':
row_value = float(row_value)
elif col_dict['type'] == 'BOOLEAN':
row_value = row_value in ('True', 'true', 'TRUE')
elif col_dict['type'] == 'TIMESTAMP':
row_value = float(row_value)
log[col_name] = row_value
return log
|
[
"def",
"_transform_row",
"(",
"self",
",",
"row",
",",
"schema",
")",
":",
"log",
"=",
"{",
"}",
"# Match each schema column with its associated row value",
"for",
"index",
",",
"col_dict",
"in",
"enumerate",
"(",
"schema",
")",
":",
"col_name",
"=",
"col_dict",
"[",
"'name'",
"]",
"row_value",
"=",
"row",
"[",
"'f'",
"]",
"[",
"index",
"]",
"[",
"'v'",
"]",
"if",
"row_value",
"is",
"None",
":",
"log",
"[",
"col_name",
"]",
"=",
"None",
"continue",
"# Recurse on nested records",
"if",
"col_dict",
"[",
"'type'",
"]",
"==",
"'RECORD'",
":",
"row_value",
"=",
"self",
".",
"_recurse_on_row",
"(",
"col_dict",
",",
"row_value",
")",
"# Otherwise just cast the value",
"elif",
"col_dict",
"[",
"'type'",
"]",
"==",
"'INTEGER'",
":",
"row_value",
"=",
"int",
"(",
"row_value",
")",
"elif",
"col_dict",
"[",
"'type'",
"]",
"==",
"'FLOAT'",
":",
"row_value",
"=",
"float",
"(",
"row_value",
")",
"elif",
"col_dict",
"[",
"'type'",
"]",
"==",
"'BOOLEAN'",
":",
"row_value",
"=",
"row_value",
"in",
"(",
"'True'",
",",
"'true'",
",",
"'TRUE'",
")",
"elif",
"col_dict",
"[",
"'type'",
"]",
"==",
"'TIMESTAMP'",
":",
"row_value",
"=",
"float",
"(",
"row_value",
")",
"log",
"[",
"col_name",
"]",
"=",
"row_value",
"return",
"log"
] |
Apply the given schema to the given BigQuery data row.
Parameters
----------
row
A single BigQuery row to transform
schema : list
The BigQuery table schema to apply to the row, specifically
the list of field dicts.
Returns
-------
dict
Mapping schema to row
|
[
"Apply",
"the",
"given",
"schema",
"to",
"the",
"given",
"BigQuery",
"data",
"row",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L1664-L1711
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient._recurse_on_row
|
def _recurse_on_row(self, col_dict, nested_value):
"""Apply the schema specified by the given dict to the nested value by
recursing on it.
Parameters
----------
col_dict : dict
The schema to apply to the nested value.
nested_value : A value nested in a BigQuery row.
Returns
-------
Union[dict, list]
``dict`` or ``list`` of ``dict`` objects from applied schema.
"""
row_value = None
# Multiple nested records
if col_dict['mode'] == 'REPEATED' and isinstance(nested_value, list):
row_value = [self._transform_row(record['v'], col_dict['fields'])
for record in nested_value]
# A single nested record
else:
row_value = self._transform_row(nested_value, col_dict['fields'])
return row_value
|
python
|
def _recurse_on_row(self, col_dict, nested_value):
"""Apply the schema specified by the given dict to the nested value by
recursing on it.
Parameters
----------
col_dict : dict
The schema to apply to the nested value.
nested_value : A value nested in a BigQuery row.
Returns
-------
Union[dict, list]
``dict`` or ``list`` of ``dict`` objects from applied schema.
"""
row_value = None
# Multiple nested records
if col_dict['mode'] == 'REPEATED' and isinstance(nested_value, list):
row_value = [self._transform_row(record['v'], col_dict['fields'])
for record in nested_value]
# A single nested record
else:
row_value = self._transform_row(nested_value, col_dict['fields'])
return row_value
|
[
"def",
"_recurse_on_row",
"(",
"self",
",",
"col_dict",
",",
"nested_value",
")",
":",
"row_value",
"=",
"None",
"# Multiple nested records",
"if",
"col_dict",
"[",
"'mode'",
"]",
"==",
"'REPEATED'",
"and",
"isinstance",
"(",
"nested_value",
",",
"list",
")",
":",
"row_value",
"=",
"[",
"self",
".",
"_transform_row",
"(",
"record",
"[",
"'v'",
"]",
",",
"col_dict",
"[",
"'fields'",
"]",
")",
"for",
"record",
"in",
"nested_value",
"]",
"# A single nested record",
"else",
":",
"row_value",
"=",
"self",
".",
"_transform_row",
"(",
"nested_value",
",",
"col_dict",
"[",
"'fields'",
"]",
")",
"return",
"row_value"
] |
Apply the schema specified by the given dict to the nested value by
recursing on it.
Parameters
----------
col_dict : dict
The schema to apply to the nested value.
nested_value : A value nested in a BigQuery row.
Returns
-------
Union[dict, list]
``dict`` or ``list`` of ``dict`` objects from applied schema.
|
[
"Apply",
"the",
"schema",
"specified",
"by",
"the",
"given",
"dict",
"to",
"the",
"nested",
"value",
"by",
"recursing",
"on",
"it",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L1713-L1740
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient._generate_hex_for_uris
|
def _generate_hex_for_uris(self, uris):
"""Given uris, generate and return hex version of it
Parameters
----------
uris : list
Containing all uris
Returns
-------
str
Hexed uris
"""
return sha256((":".join(uris) + str(time())).encode()).hexdigest()
|
python
|
def _generate_hex_for_uris(self, uris):
"""Given uris, generate and return hex version of it
Parameters
----------
uris : list
Containing all uris
Returns
-------
str
Hexed uris
"""
return sha256((":".join(uris) + str(time())).encode()).hexdigest()
|
[
"def",
"_generate_hex_for_uris",
"(",
"self",
",",
"uris",
")",
":",
"return",
"sha256",
"(",
"(",
"\":\"",
".",
"join",
"(",
"uris",
")",
"+",
"str",
"(",
"time",
"(",
")",
")",
")",
".",
"encode",
"(",
")",
")",
".",
"hexdigest",
"(",
")"
] |
Given uris, generate and return hex version of it
Parameters
----------
uris : list
Containing all uris
Returns
-------
str
Hexed uris
|
[
"Given",
"uris",
"generate",
"and",
"return",
"hex",
"version",
"of",
"it"
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L1742-L1755
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.create_dataset
|
def create_dataset(self, dataset_id, friendly_name=None, description=None,
access=None, location=None, project_id=None):
"""Create a new BigQuery dataset.
Parameters
----------
dataset_id : str
Unique ``str`` identifying the dataset with the project (the
referenceID of the dataset, not the integer id of the dataset)
friendly_name: str, optional
A human readable name
description: str, optional
Longer string providing a description
access : list, optional
Indicating access permissions (see
https://developers.google.com/bigquery/docs/reference/v2/datasets#resource)
location : str, optional
Indicating where dataset should be stored: EU or US (see
https://developers.google.com/bigquery/docs/reference/v2/datasets#resource)
project_id: str
Unique ``str`` identifying the BigQuery project contains the dataset
Returns
-------
Union[bool, dict]
``bool`` indicating if dataset was created or not, or response
from BigQuery if swallow_results is set for False
"""
project_id = self._get_project_id(project_id)
try:
datasets = self.bigquery.datasets()
dataset_data = self.dataset_resource(dataset_id,
project_id=project_id,
friendly_name=friendly_name,
description=description,
access=access,
location=location
)
response = datasets.insert(projectId=project_id,
body=dataset_data).execute(
num_retries=self.num_retries)
if self.swallow_results:
return True
else:
return response
except HttpError as e:
logger.error(
'Cannot create dataset {0}, {1}'.format(dataset_id, e))
if self.swallow_results:
return False
else:
return {}
|
python
|
def create_dataset(self, dataset_id, friendly_name=None, description=None,
access=None, location=None, project_id=None):
"""Create a new BigQuery dataset.
Parameters
----------
dataset_id : str
Unique ``str`` identifying the dataset with the project (the
referenceID of the dataset, not the integer id of the dataset)
friendly_name: str, optional
A human readable name
description: str, optional
Longer string providing a description
access : list, optional
Indicating access permissions (see
https://developers.google.com/bigquery/docs/reference/v2/datasets#resource)
location : str, optional
Indicating where dataset should be stored: EU or US (see
https://developers.google.com/bigquery/docs/reference/v2/datasets#resource)
project_id: str
Unique ``str`` identifying the BigQuery project contains the dataset
Returns
-------
Union[bool, dict]
``bool`` indicating if dataset was created or not, or response
from BigQuery if swallow_results is set for False
"""
project_id = self._get_project_id(project_id)
try:
datasets = self.bigquery.datasets()
dataset_data = self.dataset_resource(dataset_id,
project_id=project_id,
friendly_name=friendly_name,
description=description,
access=access,
location=location
)
response = datasets.insert(projectId=project_id,
body=dataset_data).execute(
num_retries=self.num_retries)
if self.swallow_results:
return True
else:
return response
except HttpError as e:
logger.error(
'Cannot create dataset {0}, {1}'.format(dataset_id, e))
if self.swallow_results:
return False
else:
return {}
|
[
"def",
"create_dataset",
"(",
"self",
",",
"dataset_id",
",",
"friendly_name",
"=",
"None",
",",
"description",
"=",
"None",
",",
"access",
"=",
"None",
",",
"location",
"=",
"None",
",",
"project_id",
"=",
"None",
")",
":",
"project_id",
"=",
"self",
".",
"_get_project_id",
"(",
"project_id",
")",
"try",
":",
"datasets",
"=",
"self",
".",
"bigquery",
".",
"datasets",
"(",
")",
"dataset_data",
"=",
"self",
".",
"dataset_resource",
"(",
"dataset_id",
",",
"project_id",
"=",
"project_id",
",",
"friendly_name",
"=",
"friendly_name",
",",
"description",
"=",
"description",
",",
"access",
"=",
"access",
",",
"location",
"=",
"location",
")",
"response",
"=",
"datasets",
".",
"insert",
"(",
"projectId",
"=",
"project_id",
",",
"body",
"=",
"dataset_data",
")",
".",
"execute",
"(",
"num_retries",
"=",
"self",
".",
"num_retries",
")",
"if",
"self",
".",
"swallow_results",
":",
"return",
"True",
"else",
":",
"return",
"response",
"except",
"HttpError",
"as",
"e",
":",
"logger",
".",
"error",
"(",
"'Cannot create dataset {0}, {1}'",
".",
"format",
"(",
"dataset_id",
",",
"e",
")",
")",
"if",
"self",
".",
"swallow_results",
":",
"return",
"False",
"else",
":",
"return",
"{",
"}"
] |
Create a new BigQuery dataset.
Parameters
----------
dataset_id : str
Unique ``str`` identifying the dataset with the project (the
referenceID of the dataset, not the integer id of the dataset)
friendly_name: str, optional
A human readable name
description: str, optional
Longer string providing a description
access : list, optional
Indicating access permissions (see
https://developers.google.com/bigquery/docs/reference/v2/datasets#resource)
location : str, optional
Indicating where dataset should be stored: EU or US (see
https://developers.google.com/bigquery/docs/reference/v2/datasets#resource)
project_id: str
Unique ``str`` identifying the BigQuery project contains the dataset
Returns
-------
Union[bool, dict]
``bool`` indicating if dataset was created or not, or response
from BigQuery if swallow_results is set for False
|
[
"Create",
"a",
"new",
"BigQuery",
"dataset",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L1782-L1835
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.delete_dataset
|
def delete_dataset(self, dataset_id, delete_contents=False, project_id=None):
"""Delete a BigQuery dataset.
Parameters
----------
dataset_id : str
Unique ``str`` identifying the dataset with the project (the
referenceId of the dataset)
Unique ``str`` identifying the BigQuery project contains the dataset
delete_contents : bool, optional
If True, forces the deletion of the dataset even when the dataset
contains data (Default = False)
project_id: str, optional
Returns
-------
Union[bool, dict[
ool indicating if the delete was successful or not, or response
from BigQuery if swallow_results is set for False
Raises
-------
HttpError
404 when dataset with dataset_id does not exist
"""
project_id = self._get_project_id(project_id)
try:
datasets = self.bigquery.datasets()
request = datasets.delete(projectId=project_id,
datasetId=dataset_id,
deleteContents=delete_contents)
response = request.execute(num_retries=self.num_retries)
if self.swallow_results:
return True
else:
return response
except HttpError as e:
logger.error(
'Cannot delete dataset {0}: {1}'.format(dataset_id, e))
if self.swallow_results:
return False
else:
return {}
|
python
|
def delete_dataset(self, dataset_id, delete_contents=False, project_id=None):
"""Delete a BigQuery dataset.
Parameters
----------
dataset_id : str
Unique ``str`` identifying the dataset with the project (the
referenceId of the dataset)
Unique ``str`` identifying the BigQuery project contains the dataset
delete_contents : bool, optional
If True, forces the deletion of the dataset even when the dataset
contains data (Default = False)
project_id: str, optional
Returns
-------
Union[bool, dict[
ool indicating if the delete was successful or not, or response
from BigQuery if swallow_results is set for False
Raises
-------
HttpError
404 when dataset with dataset_id does not exist
"""
project_id = self._get_project_id(project_id)
try:
datasets = self.bigquery.datasets()
request = datasets.delete(projectId=project_id,
datasetId=dataset_id,
deleteContents=delete_contents)
response = request.execute(num_retries=self.num_retries)
if self.swallow_results:
return True
else:
return response
except HttpError as e:
logger.error(
'Cannot delete dataset {0}: {1}'.format(dataset_id, e))
if self.swallow_results:
return False
else:
return {}
|
[
"def",
"delete_dataset",
"(",
"self",
",",
"dataset_id",
",",
"delete_contents",
"=",
"False",
",",
"project_id",
"=",
"None",
")",
":",
"project_id",
"=",
"self",
".",
"_get_project_id",
"(",
"project_id",
")",
"try",
":",
"datasets",
"=",
"self",
".",
"bigquery",
".",
"datasets",
"(",
")",
"request",
"=",
"datasets",
".",
"delete",
"(",
"projectId",
"=",
"project_id",
",",
"datasetId",
"=",
"dataset_id",
",",
"deleteContents",
"=",
"delete_contents",
")",
"response",
"=",
"request",
".",
"execute",
"(",
"num_retries",
"=",
"self",
".",
"num_retries",
")",
"if",
"self",
".",
"swallow_results",
":",
"return",
"True",
"else",
":",
"return",
"response",
"except",
"HttpError",
"as",
"e",
":",
"logger",
".",
"error",
"(",
"'Cannot delete dataset {0}: {1}'",
".",
"format",
"(",
"dataset_id",
",",
"e",
")",
")",
"if",
"self",
".",
"swallow_results",
":",
"return",
"False",
"else",
":",
"return",
"{",
"}"
] |
Delete a BigQuery dataset.
Parameters
----------
dataset_id : str
Unique ``str`` identifying the dataset with the project (the
referenceId of the dataset)
Unique ``str`` identifying the BigQuery project contains the dataset
delete_contents : bool, optional
If True, forces the deletion of the dataset even when the dataset
contains data (Default = False)
project_id: str, optional
Returns
-------
Union[bool, dict[
ool indicating if the delete was successful or not, or response
from BigQuery if swallow_results is set for False
Raises
-------
HttpError
404 when dataset with dataset_id does not exist
|
[
"Delete",
"a",
"BigQuery",
"dataset",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L1861-L1904
|
train
|
tylertreat/BigQuery-Python
|
bigquery/client.py
|
BigQueryClient.update_dataset
|
def update_dataset(self, dataset_id, friendly_name=None, description=None,
access=None, project_id=None):
"""Updates information in an existing dataset. The update method
replaces the entire dataset resource, whereas the patch method only
replaces fields that are provided in the submitted dataset resource.
Parameters
----------
dataset_id : str
Unique ``str`` identifying the dataset with the project (the
referencedId of the dataset)
friendly_name : str, optional
An optional descriptive name for the dataset.
description : str, optional
An optional description of the dataset.
access : list, optional
Indicating access permissions
project_id: str, optional
Unique ``str`` identifying the BigQuery project contains the dataset
Returns
-------
Union[bool, dict]
``bool`` indicating if the update was successful or not, or
response from BigQuery if swallow_results is set for False.
"""
project_id = self._get_project_id(project_id)
try:
datasets = self.bigquery.datasets()
body = self.dataset_resource(dataset_id,
friendly_name=friendly_name,
description=description,
access=access,
project_id=project_id)
request = datasets.update(projectId=project_id,
datasetId=dataset_id,
body=body)
response = request.execute(num_retries=self.num_retries)
if self.swallow_results:
return True
else:
return response
except HttpError as e:
logger.error(
'Cannot update dataset {0}: {1}'.format(dataset_id, e))
if self.swallow_results:
return False
else:
return {}
|
python
|
def update_dataset(self, dataset_id, friendly_name=None, description=None,
access=None, project_id=None):
"""Updates information in an existing dataset. The update method
replaces the entire dataset resource, whereas the patch method only
replaces fields that are provided in the submitted dataset resource.
Parameters
----------
dataset_id : str
Unique ``str`` identifying the dataset with the project (the
referencedId of the dataset)
friendly_name : str, optional
An optional descriptive name for the dataset.
description : str, optional
An optional description of the dataset.
access : list, optional
Indicating access permissions
project_id: str, optional
Unique ``str`` identifying the BigQuery project contains the dataset
Returns
-------
Union[bool, dict]
``bool`` indicating if the update was successful or not, or
response from BigQuery if swallow_results is set for False.
"""
project_id = self._get_project_id(project_id)
try:
datasets = self.bigquery.datasets()
body = self.dataset_resource(dataset_id,
friendly_name=friendly_name,
description=description,
access=access,
project_id=project_id)
request = datasets.update(projectId=project_id,
datasetId=dataset_id,
body=body)
response = request.execute(num_retries=self.num_retries)
if self.swallow_results:
return True
else:
return response
except HttpError as e:
logger.error(
'Cannot update dataset {0}: {1}'.format(dataset_id, e))
if self.swallow_results:
return False
else:
return {}
|
[
"def",
"update_dataset",
"(",
"self",
",",
"dataset_id",
",",
"friendly_name",
"=",
"None",
",",
"description",
"=",
"None",
",",
"access",
"=",
"None",
",",
"project_id",
"=",
"None",
")",
":",
"project_id",
"=",
"self",
".",
"_get_project_id",
"(",
"project_id",
")",
"try",
":",
"datasets",
"=",
"self",
".",
"bigquery",
".",
"datasets",
"(",
")",
"body",
"=",
"self",
".",
"dataset_resource",
"(",
"dataset_id",
",",
"friendly_name",
"=",
"friendly_name",
",",
"description",
"=",
"description",
",",
"access",
"=",
"access",
",",
"project_id",
"=",
"project_id",
")",
"request",
"=",
"datasets",
".",
"update",
"(",
"projectId",
"=",
"project_id",
",",
"datasetId",
"=",
"dataset_id",
",",
"body",
"=",
"body",
")",
"response",
"=",
"request",
".",
"execute",
"(",
"num_retries",
"=",
"self",
".",
"num_retries",
")",
"if",
"self",
".",
"swallow_results",
":",
"return",
"True",
"else",
":",
"return",
"response",
"except",
"HttpError",
"as",
"e",
":",
"logger",
".",
"error",
"(",
"'Cannot update dataset {0}: {1}'",
".",
"format",
"(",
"dataset_id",
",",
"e",
")",
")",
"if",
"self",
".",
"swallow_results",
":",
"return",
"False",
"else",
":",
"return",
"{",
"}"
] |
Updates information in an existing dataset. The update method
replaces the entire dataset resource, whereas the patch method only
replaces fields that are provided in the submitted dataset resource.
Parameters
----------
dataset_id : str
Unique ``str`` identifying the dataset with the project (the
referencedId of the dataset)
friendly_name : str, optional
An optional descriptive name for the dataset.
description : str, optional
An optional description of the dataset.
access : list, optional
Indicating access permissions
project_id: str, optional
Unique ``str`` identifying the BigQuery project contains the dataset
Returns
-------
Union[bool, dict]
``bool`` indicating if the update was successful or not, or
response from BigQuery if swallow_results is set for False.
|
[
"Updates",
"information",
"in",
"an",
"existing",
"dataset",
".",
"The",
"update",
"method",
"replaces",
"the",
"entire",
"dataset",
"resource",
"whereas",
"the",
"patch",
"method",
"only",
"replaces",
"fields",
"that",
"are",
"provided",
"in",
"the",
"submitted",
"dataset",
"resource",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/client.py#L1906-L1956
|
train
|
tylertreat/BigQuery-Python
|
bigquery/schema_builder.py
|
schema_from_record
|
def schema_from_record(record, timestamp_parser=default_timestamp_parser):
"""Generate a BigQuery schema given an example of a record that is to be
inserted into BigQuery.
Parameters
----------
record : dict
Example of a record that is to be inserted into BigQuery
timestamp_parser : function, optional
Unary function taking a ``str`` and returning and ``bool`` that is
True if the string represents a date
Returns
-------
Schema: list
"""
return [describe_field(k, v, timestamp_parser=timestamp_parser)
for k, v in list(record.items())]
|
python
|
def schema_from_record(record, timestamp_parser=default_timestamp_parser):
"""Generate a BigQuery schema given an example of a record that is to be
inserted into BigQuery.
Parameters
----------
record : dict
Example of a record that is to be inserted into BigQuery
timestamp_parser : function, optional
Unary function taking a ``str`` and returning and ``bool`` that is
True if the string represents a date
Returns
-------
Schema: list
"""
return [describe_field(k, v, timestamp_parser=timestamp_parser)
for k, v in list(record.items())]
|
[
"def",
"schema_from_record",
"(",
"record",
",",
"timestamp_parser",
"=",
"default_timestamp_parser",
")",
":",
"return",
"[",
"describe_field",
"(",
"k",
",",
"v",
",",
"timestamp_parser",
"=",
"timestamp_parser",
")",
"for",
"k",
",",
"v",
"in",
"list",
"(",
"record",
".",
"items",
"(",
")",
")",
"]"
] |
Generate a BigQuery schema given an example of a record that is to be
inserted into BigQuery.
Parameters
----------
record : dict
Example of a record that is to be inserted into BigQuery
timestamp_parser : function, optional
Unary function taking a ``str`` and returning and ``bool`` that is
True if the string represents a date
Returns
-------
Schema: list
|
[
"Generate",
"a",
"BigQuery",
"schema",
"given",
"an",
"example",
"of",
"a",
"record",
"that",
"is",
"to",
"be",
"inserted",
"into",
"BigQuery",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/schema_builder.py#L22-L39
|
train
|
tylertreat/BigQuery-Python
|
bigquery/schema_builder.py
|
describe_field
|
def describe_field(k, v, timestamp_parser=default_timestamp_parser):
"""Given a key representing a column name and value representing the value
stored in the column, return a representation of the BigQuery schema
element describing that field. Raise errors if invalid value types are
provided.
Parameters
----------
k : Union[str, unicode]
Key representing the column
v : Union[str, unicode, int, float, datetime, object]
Value mapped to by `k`
Returns
-------
object
Describing the field
Raises
------
Exception
If invalid value types are provided.
Examples
--------
>>> describe_field("username", "Bob")
{"name": "username", "type": "string", "mode": "nullable"}
>>> describe_field("users", [{"username": "Bob"}])
{"name": "users", "type": "record", "mode": "repeated",
"fields": [{"name":"username","type":"string","mode":"nullable"}]}
"""
def bq_schema_field(name, bq_type, mode):
return {"name": name, "type": bq_type, "mode": mode}
if isinstance(v, list):
if len(v) == 0:
raise Exception(
"Can't describe schema because of empty list {0}:[]".format(k))
v = v[0]
mode = "repeated"
else:
mode = "nullable"
bq_type = bigquery_type(v, timestamp_parser=timestamp_parser)
if not bq_type:
raise InvalidTypeException(k, v)
field = bq_schema_field(k, bq_type, mode)
if bq_type == "record":
try:
field['fields'] = schema_from_record(v, timestamp_parser)
except InvalidTypeException as e:
# recursively construct the key causing the error
raise InvalidTypeException("%s.%s" % (k, e.key), e.value)
return field
|
python
|
def describe_field(k, v, timestamp_parser=default_timestamp_parser):
"""Given a key representing a column name and value representing the value
stored in the column, return a representation of the BigQuery schema
element describing that field. Raise errors if invalid value types are
provided.
Parameters
----------
k : Union[str, unicode]
Key representing the column
v : Union[str, unicode, int, float, datetime, object]
Value mapped to by `k`
Returns
-------
object
Describing the field
Raises
------
Exception
If invalid value types are provided.
Examples
--------
>>> describe_field("username", "Bob")
{"name": "username", "type": "string", "mode": "nullable"}
>>> describe_field("users", [{"username": "Bob"}])
{"name": "users", "type": "record", "mode": "repeated",
"fields": [{"name":"username","type":"string","mode":"nullable"}]}
"""
def bq_schema_field(name, bq_type, mode):
return {"name": name, "type": bq_type, "mode": mode}
if isinstance(v, list):
if len(v) == 0:
raise Exception(
"Can't describe schema because of empty list {0}:[]".format(k))
v = v[0]
mode = "repeated"
else:
mode = "nullable"
bq_type = bigquery_type(v, timestamp_parser=timestamp_parser)
if not bq_type:
raise InvalidTypeException(k, v)
field = bq_schema_field(k, bq_type, mode)
if bq_type == "record":
try:
field['fields'] = schema_from_record(v, timestamp_parser)
except InvalidTypeException as e:
# recursively construct the key causing the error
raise InvalidTypeException("%s.%s" % (k, e.key), e.value)
return field
|
[
"def",
"describe_field",
"(",
"k",
",",
"v",
",",
"timestamp_parser",
"=",
"default_timestamp_parser",
")",
":",
"def",
"bq_schema_field",
"(",
"name",
",",
"bq_type",
",",
"mode",
")",
":",
"return",
"{",
"\"name\"",
":",
"name",
",",
"\"type\"",
":",
"bq_type",
",",
"\"mode\"",
":",
"mode",
"}",
"if",
"isinstance",
"(",
"v",
",",
"list",
")",
":",
"if",
"len",
"(",
"v",
")",
"==",
"0",
":",
"raise",
"Exception",
"(",
"\"Can't describe schema because of empty list {0}:[]\"",
".",
"format",
"(",
"k",
")",
")",
"v",
"=",
"v",
"[",
"0",
"]",
"mode",
"=",
"\"repeated\"",
"else",
":",
"mode",
"=",
"\"nullable\"",
"bq_type",
"=",
"bigquery_type",
"(",
"v",
",",
"timestamp_parser",
"=",
"timestamp_parser",
")",
"if",
"not",
"bq_type",
":",
"raise",
"InvalidTypeException",
"(",
"k",
",",
"v",
")",
"field",
"=",
"bq_schema_field",
"(",
"k",
",",
"bq_type",
",",
"mode",
")",
"if",
"bq_type",
"==",
"\"record\"",
":",
"try",
":",
"field",
"[",
"'fields'",
"]",
"=",
"schema_from_record",
"(",
"v",
",",
"timestamp_parser",
")",
"except",
"InvalidTypeException",
"as",
"e",
":",
"# recursively construct the key causing the error",
"raise",
"InvalidTypeException",
"(",
"\"%s.%s\"",
"%",
"(",
"k",
",",
"e",
".",
"key",
")",
",",
"e",
".",
"value",
")",
"return",
"field"
] |
Given a key representing a column name and value representing the value
stored in the column, return a representation of the BigQuery schema
element describing that field. Raise errors if invalid value types are
provided.
Parameters
----------
k : Union[str, unicode]
Key representing the column
v : Union[str, unicode, int, float, datetime, object]
Value mapped to by `k`
Returns
-------
object
Describing the field
Raises
------
Exception
If invalid value types are provided.
Examples
--------
>>> describe_field("username", "Bob")
{"name": "username", "type": "string", "mode": "nullable"}
>>> describe_field("users", [{"username": "Bob"}])
{"name": "users", "type": "record", "mode": "repeated",
"fields": [{"name":"username","type":"string","mode":"nullable"}]}
|
[
"Given",
"a",
"key",
"representing",
"a",
"column",
"name",
"and",
"value",
"representing",
"the",
"value",
"stored",
"in",
"the",
"column",
"return",
"a",
"representation",
"of",
"the",
"BigQuery",
"schema",
"element",
"describing",
"that",
"field",
".",
"Raise",
"errors",
"if",
"invalid",
"value",
"types",
"are",
"provided",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/schema_builder.py#L42-L98
|
train
|
tylertreat/BigQuery-Python
|
bigquery/query_builder.py
|
render_query
|
def render_query(dataset, tables, select=None, conditions=None,
groupings=None, having=None, order_by=None, limit=None):
"""Render a query that will run over the given tables using the specified
parameters.
Parameters
----------
dataset : str
The BigQuery dataset to query data from
tables : Union[dict, list]
The table in `dataset` to query.
select : dict, optional
The keys function as column names and the values function as options to
apply to the select field such as alias and format. For example,
select['start_time'] might have the form
{'alias': 'StartTime', 'format': 'INTEGER-FORMAT_UTC_USEC'}, which
would be represented as 'SEC_TO_TIMESTAMP(INTEGER(start_time)) as
StartTime' in a query. Pass `None` to select all.
conditions : list, optional
a ``list`` of ``dict`` objects to filter results by. Each dict should
have the keys 'field', 'type', and 'comparators'. The first two map to
strings representing the field (e.g. 'foo') and type (e.g. 'FLOAT').
'comparators' maps to another ``dict`` containing the keys 'condition',
'negate', and 'value'.
If 'comparators' = {'condition': '>=', 'negate': False, 'value': 1},
this example will be rendered as 'foo >= FLOAT('1')' in the query.
``list`` of field names to group by
order_by : dict, optional
Keys = {'field', 'direction'}. `dict` should be formatted as
{'field':'TimeStamp, 'direction':'desc'} or similar
limit : int, optional
Limit the amount of data needed to be returned.
Returns
-------
str
A rendered query
"""
if None in (dataset, tables):
return None
query = "%s %s %s %s %s %s %s" % (
_render_select(select),
_render_sources(dataset, tables),
_render_conditions(conditions),
_render_groupings(groupings),
_render_having(having),
_render_order(order_by),
_render_limit(limit)
)
return query
|
python
|
def render_query(dataset, tables, select=None, conditions=None,
groupings=None, having=None, order_by=None, limit=None):
"""Render a query that will run over the given tables using the specified
parameters.
Parameters
----------
dataset : str
The BigQuery dataset to query data from
tables : Union[dict, list]
The table in `dataset` to query.
select : dict, optional
The keys function as column names and the values function as options to
apply to the select field such as alias and format. For example,
select['start_time'] might have the form
{'alias': 'StartTime', 'format': 'INTEGER-FORMAT_UTC_USEC'}, which
would be represented as 'SEC_TO_TIMESTAMP(INTEGER(start_time)) as
StartTime' in a query. Pass `None` to select all.
conditions : list, optional
a ``list`` of ``dict`` objects to filter results by. Each dict should
have the keys 'field', 'type', and 'comparators'. The first two map to
strings representing the field (e.g. 'foo') and type (e.g. 'FLOAT').
'comparators' maps to another ``dict`` containing the keys 'condition',
'negate', and 'value'.
If 'comparators' = {'condition': '>=', 'negate': False, 'value': 1},
this example will be rendered as 'foo >= FLOAT('1')' in the query.
``list`` of field names to group by
order_by : dict, optional
Keys = {'field', 'direction'}. `dict` should be formatted as
{'field':'TimeStamp, 'direction':'desc'} or similar
limit : int, optional
Limit the amount of data needed to be returned.
Returns
-------
str
A rendered query
"""
if None in (dataset, tables):
return None
query = "%s %s %s %s %s %s %s" % (
_render_select(select),
_render_sources(dataset, tables),
_render_conditions(conditions),
_render_groupings(groupings),
_render_having(having),
_render_order(order_by),
_render_limit(limit)
)
return query
|
[
"def",
"render_query",
"(",
"dataset",
",",
"tables",
",",
"select",
"=",
"None",
",",
"conditions",
"=",
"None",
",",
"groupings",
"=",
"None",
",",
"having",
"=",
"None",
",",
"order_by",
"=",
"None",
",",
"limit",
"=",
"None",
")",
":",
"if",
"None",
"in",
"(",
"dataset",
",",
"tables",
")",
":",
"return",
"None",
"query",
"=",
"\"%s %s %s %s %s %s %s\"",
"%",
"(",
"_render_select",
"(",
"select",
")",
",",
"_render_sources",
"(",
"dataset",
",",
"tables",
")",
",",
"_render_conditions",
"(",
"conditions",
")",
",",
"_render_groupings",
"(",
"groupings",
")",
",",
"_render_having",
"(",
"having",
")",
",",
"_render_order",
"(",
"order_by",
")",
",",
"_render_limit",
"(",
"limit",
")",
")",
"return",
"query"
] |
Render a query that will run over the given tables using the specified
parameters.
Parameters
----------
dataset : str
The BigQuery dataset to query data from
tables : Union[dict, list]
The table in `dataset` to query.
select : dict, optional
The keys function as column names and the values function as options to
apply to the select field such as alias and format. For example,
select['start_time'] might have the form
{'alias': 'StartTime', 'format': 'INTEGER-FORMAT_UTC_USEC'}, which
would be represented as 'SEC_TO_TIMESTAMP(INTEGER(start_time)) as
StartTime' in a query. Pass `None` to select all.
conditions : list, optional
a ``list`` of ``dict`` objects to filter results by. Each dict should
have the keys 'field', 'type', and 'comparators'. The first two map to
strings representing the field (e.g. 'foo') and type (e.g. 'FLOAT').
'comparators' maps to another ``dict`` containing the keys 'condition',
'negate', and 'value'.
If 'comparators' = {'condition': '>=', 'negate': False, 'value': 1},
this example will be rendered as 'foo >= FLOAT('1')' in the query.
``list`` of field names to group by
order_by : dict, optional
Keys = {'field', 'direction'}. `dict` should be formatted as
{'field':'TimeStamp, 'direction':'desc'} or similar
limit : int, optional
Limit the amount of data needed to be returned.
Returns
-------
str
A rendered query
|
[
"Render",
"a",
"query",
"that",
"will",
"run",
"over",
"the",
"given",
"tables",
"using",
"the",
"specified",
"parameters",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/query_builder.py#L7-L59
|
train
|
tylertreat/BigQuery-Python
|
bigquery/query_builder.py
|
_render_select
|
def _render_select(selections):
"""Render the selection part of a query.
Parameters
----------
selections : dict
Selections for a table
Returns
-------
str
A string for the "select" part of a query
See Also
--------
render_query : Further clarification of `selections` dict formatting
"""
if not selections:
return 'SELECT *'
rendered_selections = []
for name, options in selections.items():
if not isinstance(options, list):
options = [options]
original_name = name
for options_dict in options:
name = original_name
alias = options_dict.get('alias')
alias = "as %s" % alias if alias else ""
formatter = options_dict.get('format')
if formatter:
name = _format_select(formatter, name)
rendered_selections.append("%s %s" % (name, alias))
return "SELECT " + ", ".join(rendered_selections)
|
python
|
def _render_select(selections):
"""Render the selection part of a query.
Parameters
----------
selections : dict
Selections for a table
Returns
-------
str
A string for the "select" part of a query
See Also
--------
render_query : Further clarification of `selections` dict formatting
"""
if not selections:
return 'SELECT *'
rendered_selections = []
for name, options in selections.items():
if not isinstance(options, list):
options = [options]
original_name = name
for options_dict in options:
name = original_name
alias = options_dict.get('alias')
alias = "as %s" % alias if alias else ""
formatter = options_dict.get('format')
if formatter:
name = _format_select(formatter, name)
rendered_selections.append("%s %s" % (name, alias))
return "SELECT " + ", ".join(rendered_selections)
|
[
"def",
"_render_select",
"(",
"selections",
")",
":",
"if",
"not",
"selections",
":",
"return",
"'SELECT *'",
"rendered_selections",
"=",
"[",
"]",
"for",
"name",
",",
"options",
"in",
"selections",
".",
"items",
"(",
")",
":",
"if",
"not",
"isinstance",
"(",
"options",
",",
"list",
")",
":",
"options",
"=",
"[",
"options",
"]",
"original_name",
"=",
"name",
"for",
"options_dict",
"in",
"options",
":",
"name",
"=",
"original_name",
"alias",
"=",
"options_dict",
".",
"get",
"(",
"'alias'",
")",
"alias",
"=",
"\"as %s\"",
"%",
"alias",
"if",
"alias",
"else",
"\"\"",
"formatter",
"=",
"options_dict",
".",
"get",
"(",
"'format'",
")",
"if",
"formatter",
":",
"name",
"=",
"_format_select",
"(",
"formatter",
",",
"name",
")",
"rendered_selections",
".",
"append",
"(",
"\"%s %s\"",
"%",
"(",
"name",
",",
"alias",
")",
")",
"return",
"\"SELECT \"",
"+",
"\", \"",
".",
"join",
"(",
"rendered_selections",
")"
] |
Render the selection part of a query.
Parameters
----------
selections : dict
Selections for a table
Returns
-------
str
A string for the "select" part of a query
See Also
--------
render_query : Further clarification of `selections` dict formatting
|
[
"Render",
"the",
"selection",
"part",
"of",
"a",
"query",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/query_builder.py#L62-L100
|
train
|
tylertreat/BigQuery-Python
|
bigquery/query_builder.py
|
_format_select
|
def _format_select(formatter, name):
"""Modify the query selector by applying any formatters to it.
Parameters
----------
formatter : str
Hyphen-delimited formatter string where formatters are
applied inside-out, e.g. the formatter string
SEC_TO_MICRO-INTEGER-FORMAT_UTC_USEC applied to the selector
foo would result in FORMAT_UTC_USEC(INTEGER(foo*1000000)).
name: str
The name of the selector to apply formatters to.
Returns
-------
str
The formatted selector
"""
for caster in formatter.split('-'):
if caster == 'SEC_TO_MICRO':
name = "%s*1000000" % name
elif ':' in caster:
caster, args = caster.split(':')
name = "%s(%s,%s)" % (caster, name, args)
else:
name = "%s(%s)" % (caster, name)
return name
|
python
|
def _format_select(formatter, name):
"""Modify the query selector by applying any formatters to it.
Parameters
----------
formatter : str
Hyphen-delimited formatter string where formatters are
applied inside-out, e.g. the formatter string
SEC_TO_MICRO-INTEGER-FORMAT_UTC_USEC applied to the selector
foo would result in FORMAT_UTC_USEC(INTEGER(foo*1000000)).
name: str
The name of the selector to apply formatters to.
Returns
-------
str
The formatted selector
"""
for caster in formatter.split('-'):
if caster == 'SEC_TO_MICRO':
name = "%s*1000000" % name
elif ':' in caster:
caster, args = caster.split(':')
name = "%s(%s,%s)" % (caster, name, args)
else:
name = "%s(%s)" % (caster, name)
return name
|
[
"def",
"_format_select",
"(",
"formatter",
",",
"name",
")",
":",
"for",
"caster",
"in",
"formatter",
".",
"split",
"(",
"'-'",
")",
":",
"if",
"caster",
"==",
"'SEC_TO_MICRO'",
":",
"name",
"=",
"\"%s*1000000\"",
"%",
"name",
"elif",
"':'",
"in",
"caster",
":",
"caster",
",",
"args",
"=",
"caster",
".",
"split",
"(",
"':'",
")",
"name",
"=",
"\"%s(%s,%s)\"",
"%",
"(",
"caster",
",",
"name",
",",
"args",
")",
"else",
":",
"name",
"=",
"\"%s(%s)\"",
"%",
"(",
"caster",
",",
"name",
")",
"return",
"name"
] |
Modify the query selector by applying any formatters to it.
Parameters
----------
formatter : str
Hyphen-delimited formatter string where formatters are
applied inside-out, e.g. the formatter string
SEC_TO_MICRO-INTEGER-FORMAT_UTC_USEC applied to the selector
foo would result in FORMAT_UTC_USEC(INTEGER(foo*1000000)).
name: str
The name of the selector to apply formatters to.
Returns
-------
str
The formatted selector
|
[
"Modify",
"the",
"query",
"selector",
"by",
"applying",
"any",
"formatters",
"to",
"it",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/query_builder.py#L103-L131
|
train
|
tylertreat/BigQuery-Python
|
bigquery/query_builder.py
|
_render_sources
|
def _render_sources(dataset, tables):
"""Render the source part of a query.
Parameters
----------
dataset : str
The data set to fetch log data from.
tables : Union[dict, list]
The tables to fetch log data from
Returns
-------
str
A string that represents the "from" part of a query.
"""
if isinstance(tables, dict):
if tables.get('date_range', False):
try:
dataset_table = '.'.join([dataset, tables['table']])
return "FROM (TABLE_DATE_RANGE([{}], TIMESTAMP('{}'),"\
" TIMESTAMP('{}'))) ".format(dataset_table,
tables['from_date'],
tables['to_date'])
except KeyError as exp:
logger.warn(
'Missing parameter %s in selecting sources' % (exp))
else:
return "FROM " + ", ".join(
["[%s.%s]" % (dataset, table) for table in tables])
|
python
|
def _render_sources(dataset, tables):
"""Render the source part of a query.
Parameters
----------
dataset : str
The data set to fetch log data from.
tables : Union[dict, list]
The tables to fetch log data from
Returns
-------
str
A string that represents the "from" part of a query.
"""
if isinstance(tables, dict):
if tables.get('date_range', False):
try:
dataset_table = '.'.join([dataset, tables['table']])
return "FROM (TABLE_DATE_RANGE([{}], TIMESTAMP('{}'),"\
" TIMESTAMP('{}'))) ".format(dataset_table,
tables['from_date'],
tables['to_date'])
except KeyError as exp:
logger.warn(
'Missing parameter %s in selecting sources' % (exp))
else:
return "FROM " + ", ".join(
["[%s.%s]" % (dataset, table) for table in tables])
|
[
"def",
"_render_sources",
"(",
"dataset",
",",
"tables",
")",
":",
"if",
"isinstance",
"(",
"tables",
",",
"dict",
")",
":",
"if",
"tables",
".",
"get",
"(",
"'date_range'",
",",
"False",
")",
":",
"try",
":",
"dataset_table",
"=",
"'.'",
".",
"join",
"(",
"[",
"dataset",
",",
"tables",
"[",
"'table'",
"]",
"]",
")",
"return",
"\"FROM (TABLE_DATE_RANGE([{}], TIMESTAMP('{}'),\"",
"\" TIMESTAMP('{}'))) \"",
".",
"format",
"(",
"dataset_table",
",",
"tables",
"[",
"'from_date'",
"]",
",",
"tables",
"[",
"'to_date'",
"]",
")",
"except",
"KeyError",
"as",
"exp",
":",
"logger",
".",
"warn",
"(",
"'Missing parameter %s in selecting sources'",
"%",
"(",
"exp",
")",
")",
"else",
":",
"return",
"\"FROM \"",
"+",
"\", \"",
".",
"join",
"(",
"[",
"\"[%s.%s]\"",
"%",
"(",
"dataset",
",",
"table",
")",
"for",
"table",
"in",
"tables",
"]",
")"
] |
Render the source part of a query.
Parameters
----------
dataset : str
The data set to fetch log data from.
tables : Union[dict, list]
The tables to fetch log data from
Returns
-------
str
A string that represents the "from" part of a query.
|
[
"Render",
"the",
"source",
"part",
"of",
"a",
"query",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/query_builder.py#L134-L164
|
train
|
tylertreat/BigQuery-Python
|
bigquery/query_builder.py
|
_render_conditions
|
def _render_conditions(conditions):
"""Render the conditions part of a query.
Parameters
----------
conditions : list
A list of dictionary items to filter a table.
Returns
-------
str
A string that represents the "where" part of a query
See Also
--------
render_query : Further clarification of `conditions` formatting.
"""
if not conditions:
return ""
rendered_conditions = []
for condition in conditions:
field = condition.get('field')
field_type = condition.get('type')
comparators = condition.get('comparators')
if None in (field, field_type, comparators) or not comparators:
logger.warn('Invalid condition passed in: %s' % condition)
continue
rendered_conditions.append(
_render_condition(field, field_type, comparators))
if not rendered_conditions:
return ""
return "WHERE %s" % (" AND ".join(rendered_conditions))
|
python
|
def _render_conditions(conditions):
"""Render the conditions part of a query.
Parameters
----------
conditions : list
A list of dictionary items to filter a table.
Returns
-------
str
A string that represents the "where" part of a query
See Also
--------
render_query : Further clarification of `conditions` formatting.
"""
if not conditions:
return ""
rendered_conditions = []
for condition in conditions:
field = condition.get('field')
field_type = condition.get('type')
comparators = condition.get('comparators')
if None in (field, field_type, comparators) or not comparators:
logger.warn('Invalid condition passed in: %s' % condition)
continue
rendered_conditions.append(
_render_condition(field, field_type, comparators))
if not rendered_conditions:
return ""
return "WHERE %s" % (" AND ".join(rendered_conditions))
|
[
"def",
"_render_conditions",
"(",
"conditions",
")",
":",
"if",
"not",
"conditions",
":",
"return",
"\"\"",
"rendered_conditions",
"=",
"[",
"]",
"for",
"condition",
"in",
"conditions",
":",
"field",
"=",
"condition",
".",
"get",
"(",
"'field'",
")",
"field_type",
"=",
"condition",
".",
"get",
"(",
"'type'",
")",
"comparators",
"=",
"condition",
".",
"get",
"(",
"'comparators'",
")",
"if",
"None",
"in",
"(",
"field",
",",
"field_type",
",",
"comparators",
")",
"or",
"not",
"comparators",
":",
"logger",
".",
"warn",
"(",
"'Invalid condition passed in: %s'",
"%",
"condition",
")",
"continue",
"rendered_conditions",
".",
"append",
"(",
"_render_condition",
"(",
"field",
",",
"field_type",
",",
"comparators",
")",
")",
"if",
"not",
"rendered_conditions",
":",
"return",
"\"\"",
"return",
"\"WHERE %s\"",
"%",
"(",
"\" AND \"",
".",
"join",
"(",
"rendered_conditions",
")",
")"
] |
Render the conditions part of a query.
Parameters
----------
conditions : list
A list of dictionary items to filter a table.
Returns
-------
str
A string that represents the "where" part of a query
See Also
--------
render_query : Further clarification of `conditions` formatting.
|
[
"Render",
"the",
"conditions",
"part",
"of",
"a",
"query",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/query_builder.py#L167-L205
|
train
|
tylertreat/BigQuery-Python
|
bigquery/query_builder.py
|
_render_condition
|
def _render_condition(field, field_type, comparators):
"""Render a single query condition.
Parameters
----------
field : str
The field the condition applies to
field_type : str
The data type of the field.
comparators : array_like
An iterable of logic operators to use.
Returns
-------
str
a condition string.
"""
field_type = field_type.upper()
negated_conditions, normal_conditions = [], []
for comparator in comparators:
condition = comparator.get("condition").upper()
negated = "NOT " if comparator.get("negate") else ""
value = comparator.get("value")
if condition == "IN":
if isinstance(value, (list, tuple, set)):
value = ', '.join(
sorted([_render_condition_value(v, field_type)
for v in value])
)
else:
value = _render_condition_value(value, field_type)
value = "(" + value + ")"
elif condition == "IS NULL" or condition == "IS NOT NULL":
return field + " " + condition
elif condition == "BETWEEN":
if isinstance(value, (tuple, list, set)) and len(value) == 2:
value = ' AND '.join(
sorted([_render_condition_value(v, field_type)
for v in value])
)
elif isinstance(value, (tuple, list, set)) and len(value) != 2:
logger.warn('Invalid condition passed in: %s' % condition)
else:
value = _render_condition_value(value, field_type)
rendered_sub_condition = "%s%s %s %s" % (
negated, field, condition, value)
if comparator.get("negate"):
negated_conditions.append(rendered_sub_condition)
else:
normal_conditions.append(rendered_sub_condition)
rendered_normal = " AND ".join(normal_conditions)
rendered_negated = " AND ".join(negated_conditions)
if rendered_normal and rendered_negated:
return "((%s) AND (%s))" % (rendered_normal, rendered_negated)
return "(%s)" % (rendered_normal or rendered_negated)
|
python
|
def _render_condition(field, field_type, comparators):
"""Render a single query condition.
Parameters
----------
field : str
The field the condition applies to
field_type : str
The data type of the field.
comparators : array_like
An iterable of logic operators to use.
Returns
-------
str
a condition string.
"""
field_type = field_type.upper()
negated_conditions, normal_conditions = [], []
for comparator in comparators:
condition = comparator.get("condition").upper()
negated = "NOT " if comparator.get("negate") else ""
value = comparator.get("value")
if condition == "IN":
if isinstance(value, (list, tuple, set)):
value = ', '.join(
sorted([_render_condition_value(v, field_type)
for v in value])
)
else:
value = _render_condition_value(value, field_type)
value = "(" + value + ")"
elif condition == "IS NULL" or condition == "IS NOT NULL":
return field + " " + condition
elif condition == "BETWEEN":
if isinstance(value, (tuple, list, set)) and len(value) == 2:
value = ' AND '.join(
sorted([_render_condition_value(v, field_type)
for v in value])
)
elif isinstance(value, (tuple, list, set)) and len(value) != 2:
logger.warn('Invalid condition passed in: %s' % condition)
else:
value = _render_condition_value(value, field_type)
rendered_sub_condition = "%s%s %s %s" % (
negated, field, condition, value)
if comparator.get("negate"):
negated_conditions.append(rendered_sub_condition)
else:
normal_conditions.append(rendered_sub_condition)
rendered_normal = " AND ".join(normal_conditions)
rendered_negated = " AND ".join(negated_conditions)
if rendered_normal and rendered_negated:
return "((%s) AND (%s))" % (rendered_normal, rendered_negated)
return "(%s)" % (rendered_normal or rendered_negated)
|
[
"def",
"_render_condition",
"(",
"field",
",",
"field_type",
",",
"comparators",
")",
":",
"field_type",
"=",
"field_type",
".",
"upper",
"(",
")",
"negated_conditions",
",",
"normal_conditions",
"=",
"[",
"]",
",",
"[",
"]",
"for",
"comparator",
"in",
"comparators",
":",
"condition",
"=",
"comparator",
".",
"get",
"(",
"\"condition\"",
")",
".",
"upper",
"(",
")",
"negated",
"=",
"\"NOT \"",
"if",
"comparator",
".",
"get",
"(",
"\"negate\"",
")",
"else",
"\"\"",
"value",
"=",
"comparator",
".",
"get",
"(",
"\"value\"",
")",
"if",
"condition",
"==",
"\"IN\"",
":",
"if",
"isinstance",
"(",
"value",
",",
"(",
"list",
",",
"tuple",
",",
"set",
")",
")",
":",
"value",
"=",
"', '",
".",
"join",
"(",
"sorted",
"(",
"[",
"_render_condition_value",
"(",
"v",
",",
"field_type",
")",
"for",
"v",
"in",
"value",
"]",
")",
")",
"else",
":",
"value",
"=",
"_render_condition_value",
"(",
"value",
",",
"field_type",
")",
"value",
"=",
"\"(\"",
"+",
"value",
"+",
"\")\"",
"elif",
"condition",
"==",
"\"IS NULL\"",
"or",
"condition",
"==",
"\"IS NOT NULL\"",
":",
"return",
"field",
"+",
"\" \"",
"+",
"condition",
"elif",
"condition",
"==",
"\"BETWEEN\"",
":",
"if",
"isinstance",
"(",
"value",
",",
"(",
"tuple",
",",
"list",
",",
"set",
")",
")",
"and",
"len",
"(",
"value",
")",
"==",
"2",
":",
"value",
"=",
"' AND '",
".",
"join",
"(",
"sorted",
"(",
"[",
"_render_condition_value",
"(",
"v",
",",
"field_type",
")",
"for",
"v",
"in",
"value",
"]",
")",
")",
"elif",
"isinstance",
"(",
"value",
",",
"(",
"tuple",
",",
"list",
",",
"set",
")",
")",
"and",
"len",
"(",
"value",
")",
"!=",
"2",
":",
"logger",
".",
"warn",
"(",
"'Invalid condition passed in: %s'",
"%",
"condition",
")",
"else",
":",
"value",
"=",
"_render_condition_value",
"(",
"value",
",",
"field_type",
")",
"rendered_sub_condition",
"=",
"\"%s%s %s %s\"",
"%",
"(",
"negated",
",",
"field",
",",
"condition",
",",
"value",
")",
"if",
"comparator",
".",
"get",
"(",
"\"negate\"",
")",
":",
"negated_conditions",
".",
"append",
"(",
"rendered_sub_condition",
")",
"else",
":",
"normal_conditions",
".",
"append",
"(",
"rendered_sub_condition",
")",
"rendered_normal",
"=",
"\" AND \"",
".",
"join",
"(",
"normal_conditions",
")",
"rendered_negated",
"=",
"\" AND \"",
".",
"join",
"(",
"negated_conditions",
")",
"if",
"rendered_normal",
"and",
"rendered_negated",
":",
"return",
"\"((%s) AND (%s))\"",
"%",
"(",
"rendered_normal",
",",
"rendered_negated",
")",
"return",
"\"(%s)\"",
"%",
"(",
"rendered_normal",
"or",
"rendered_negated",
")"
] |
Render a single query condition.
Parameters
----------
field : str
The field the condition applies to
field_type : str
The data type of the field.
comparators : array_like
An iterable of logic operators to use.
Returns
-------
str
a condition string.
|
[
"Render",
"a",
"single",
"query",
"condition",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/query_builder.py#L208-L272
|
train
|
tylertreat/BigQuery-Python
|
bigquery/query_builder.py
|
_render_condition_value
|
def _render_condition_value(value, field_type):
"""Render a query condition value.
Parameters
----------
value : Union[bool, int, float, str, datetime]
The value of the condition
field_type : str
The data type of the field
Returns
-------
str
A value string.
"""
# BigQuery cannot cast strings to booleans, convert to ints
if field_type == "BOOLEAN":
value = 1 if value else 0
elif field_type in ("STRING", "INTEGER", "FLOAT"):
value = "'%s'" % (value)
elif field_type in ("TIMESTAMP"):
value = "'%s'" % (str(value))
return "%s(%s)" % (field_type, value)
|
python
|
def _render_condition_value(value, field_type):
"""Render a query condition value.
Parameters
----------
value : Union[bool, int, float, str, datetime]
The value of the condition
field_type : str
The data type of the field
Returns
-------
str
A value string.
"""
# BigQuery cannot cast strings to booleans, convert to ints
if field_type == "BOOLEAN":
value = 1 if value else 0
elif field_type in ("STRING", "INTEGER", "FLOAT"):
value = "'%s'" % (value)
elif field_type in ("TIMESTAMP"):
value = "'%s'" % (str(value))
return "%s(%s)" % (field_type, value)
|
[
"def",
"_render_condition_value",
"(",
"value",
",",
"field_type",
")",
":",
"# BigQuery cannot cast strings to booleans, convert to ints",
"if",
"field_type",
"==",
"\"BOOLEAN\"",
":",
"value",
"=",
"1",
"if",
"value",
"else",
"0",
"elif",
"field_type",
"in",
"(",
"\"STRING\"",
",",
"\"INTEGER\"",
",",
"\"FLOAT\"",
")",
":",
"value",
"=",
"\"'%s'\"",
"%",
"(",
"value",
")",
"elif",
"field_type",
"in",
"(",
"\"TIMESTAMP\"",
")",
":",
"value",
"=",
"\"'%s'\"",
"%",
"(",
"str",
"(",
"value",
")",
")",
"return",
"\"%s(%s)\"",
"%",
"(",
"field_type",
",",
"value",
")"
] |
Render a query condition value.
Parameters
----------
value : Union[bool, int, float, str, datetime]
The value of the condition
field_type : str
The data type of the field
Returns
-------
str
A value string.
|
[
"Render",
"a",
"query",
"condition",
"value",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/query_builder.py#L275-L298
|
train
|
tylertreat/BigQuery-Python
|
bigquery/query_builder.py
|
_render_having
|
def _render_having(having_conditions):
"""Render the having part of a query.
Parameters
----------
having_conditions : list
A ``list`` of ``dict``s to filter the rows
Returns
-------
str
A string that represents the "having" part of a query.
See Also
--------
render_query : Further clarification of `conditions` formatting.
"""
if not having_conditions:
return ""
rendered_conditions = []
for condition in having_conditions:
field = condition.get('field')
field_type = condition.get('type')
comparators = condition.get('comparators')
if None in (field, field_type, comparators) or not comparators:
logger.warn('Invalid condition passed in: %s' % condition)
continue
rendered_conditions.append(
_render_condition(field, field_type, comparators))
if not rendered_conditions:
return ""
return "HAVING %s" % (" AND ".join(rendered_conditions))
|
python
|
def _render_having(having_conditions):
"""Render the having part of a query.
Parameters
----------
having_conditions : list
A ``list`` of ``dict``s to filter the rows
Returns
-------
str
A string that represents the "having" part of a query.
See Also
--------
render_query : Further clarification of `conditions` formatting.
"""
if not having_conditions:
return ""
rendered_conditions = []
for condition in having_conditions:
field = condition.get('field')
field_type = condition.get('type')
comparators = condition.get('comparators')
if None in (field, field_type, comparators) or not comparators:
logger.warn('Invalid condition passed in: %s' % condition)
continue
rendered_conditions.append(
_render_condition(field, field_type, comparators))
if not rendered_conditions:
return ""
return "HAVING %s" % (" AND ".join(rendered_conditions))
|
[
"def",
"_render_having",
"(",
"having_conditions",
")",
":",
"if",
"not",
"having_conditions",
":",
"return",
"\"\"",
"rendered_conditions",
"=",
"[",
"]",
"for",
"condition",
"in",
"having_conditions",
":",
"field",
"=",
"condition",
".",
"get",
"(",
"'field'",
")",
"field_type",
"=",
"condition",
".",
"get",
"(",
"'type'",
")",
"comparators",
"=",
"condition",
".",
"get",
"(",
"'comparators'",
")",
"if",
"None",
"in",
"(",
"field",
",",
"field_type",
",",
"comparators",
")",
"or",
"not",
"comparators",
":",
"logger",
".",
"warn",
"(",
"'Invalid condition passed in: %s'",
"%",
"condition",
")",
"continue",
"rendered_conditions",
".",
"append",
"(",
"_render_condition",
"(",
"field",
",",
"field_type",
",",
"comparators",
")",
")",
"if",
"not",
"rendered_conditions",
":",
"return",
"\"\"",
"return",
"\"HAVING %s\"",
"%",
"(",
"\" AND \"",
".",
"join",
"(",
"rendered_conditions",
")",
")"
] |
Render the having part of a query.
Parameters
----------
having_conditions : list
A ``list`` of ``dict``s to filter the rows
Returns
-------
str
A string that represents the "having" part of a query.
See Also
--------
render_query : Further clarification of `conditions` formatting.
|
[
"Render",
"the",
"having",
"part",
"of",
"a",
"query",
"."
] |
88d99de42d954d49fc281460068f0e95003da098
|
https://github.com/tylertreat/BigQuery-Python/blob/88d99de42d954d49fc281460068f0e95003da098/bigquery/query_builder.py#L321-L358
|
train
|
stlehmann/Flask-MQTT
|
flask_mqtt/__init__.py
|
Mqtt.init_app
|
def init_app(self, app):
# type: (Flask) -> None
"""Init the Flask-MQTT addon."""
self.client_id = app.config.get("MQTT_CLIENT_ID", "")
if isinstance(self.client_id, unicode):
self.client._client_id = self.client_id.encode('utf-8')
else:
self.client._client_id = self.client_id
self.client._transport = app.config.get("MQTT_TRANSPORT", "tcp").lower()
self.client._protocol = app.config.get("MQTT_PROTOCOL_VERSION", MQTTv311)
self.client.on_connect = self._handle_connect
self.client.on_disconnect = self._handle_disconnect
self.username = app.config.get("MQTT_USERNAME")
self.password = app.config.get("MQTT_PASSWORD")
self.broker_url = app.config.get("MQTT_BROKER_URL", "localhost")
self.broker_port = app.config.get("MQTT_BROKER_PORT", 1883)
self.tls_enabled = app.config.get("MQTT_TLS_ENABLED", False)
self.keepalive = app.config.get("MQTT_KEEPALIVE", 60)
self.last_will_topic = app.config.get("MQTT_LAST_WILL_TOPIC")
self.last_will_message = app.config.get("MQTT_LAST_WILL_MESSAGE")
self.last_will_qos = app.config.get("MQTT_LAST_WILL_QOS", 0)
self.last_will_retain = app.config.get("MQTT_LAST_WILL_RETAIN", False)
if self.tls_enabled:
self.tls_ca_certs = app.config["MQTT_TLS_CA_CERTS"]
self.tls_certfile = app.config.get("MQTT_TLS_CERTFILE")
self.tls_keyfile = app.config.get("MQTT_TLS_KEYFILE")
self.tls_cert_reqs = app.config.get("MQTT_TLS_CERT_REQS",
ssl.CERT_REQUIRED)
self.tls_version = app.config.get("MQTT_TLS_VERSION",
ssl.PROTOCOL_TLSv1)
self.tls_ciphers = app.config.get("MQTT_TLS_CIPHERS")
self.tls_insecure = app.config.get("MQTT_TLS_INSECURE", False)
# set last will message
if self.last_will_topic is not None:
self.client.will_set(
self.last_will_topic,
self.last_will_message,
self.last_will_qos,
self.last_will_retain,
)
self._connect()
|
python
|
def init_app(self, app):
# type: (Flask) -> None
"""Init the Flask-MQTT addon."""
self.client_id = app.config.get("MQTT_CLIENT_ID", "")
if isinstance(self.client_id, unicode):
self.client._client_id = self.client_id.encode('utf-8')
else:
self.client._client_id = self.client_id
self.client._transport = app.config.get("MQTT_TRANSPORT", "tcp").lower()
self.client._protocol = app.config.get("MQTT_PROTOCOL_VERSION", MQTTv311)
self.client.on_connect = self._handle_connect
self.client.on_disconnect = self._handle_disconnect
self.username = app.config.get("MQTT_USERNAME")
self.password = app.config.get("MQTT_PASSWORD")
self.broker_url = app.config.get("MQTT_BROKER_URL", "localhost")
self.broker_port = app.config.get("MQTT_BROKER_PORT", 1883)
self.tls_enabled = app.config.get("MQTT_TLS_ENABLED", False)
self.keepalive = app.config.get("MQTT_KEEPALIVE", 60)
self.last_will_topic = app.config.get("MQTT_LAST_WILL_TOPIC")
self.last_will_message = app.config.get("MQTT_LAST_WILL_MESSAGE")
self.last_will_qos = app.config.get("MQTT_LAST_WILL_QOS", 0)
self.last_will_retain = app.config.get("MQTT_LAST_WILL_RETAIN", False)
if self.tls_enabled:
self.tls_ca_certs = app.config["MQTT_TLS_CA_CERTS"]
self.tls_certfile = app.config.get("MQTT_TLS_CERTFILE")
self.tls_keyfile = app.config.get("MQTT_TLS_KEYFILE")
self.tls_cert_reqs = app.config.get("MQTT_TLS_CERT_REQS",
ssl.CERT_REQUIRED)
self.tls_version = app.config.get("MQTT_TLS_VERSION",
ssl.PROTOCOL_TLSv1)
self.tls_ciphers = app.config.get("MQTT_TLS_CIPHERS")
self.tls_insecure = app.config.get("MQTT_TLS_INSECURE", False)
# set last will message
if self.last_will_topic is not None:
self.client.will_set(
self.last_will_topic,
self.last_will_message,
self.last_will_qos,
self.last_will_retain,
)
self._connect()
|
[
"def",
"init_app",
"(",
"self",
",",
"app",
")",
":",
"# type: (Flask) -> None",
"self",
".",
"client_id",
"=",
"app",
".",
"config",
".",
"get",
"(",
"\"MQTT_CLIENT_ID\"",
",",
"\"\"",
")",
"if",
"isinstance",
"(",
"self",
".",
"client_id",
",",
"unicode",
")",
":",
"self",
".",
"client",
".",
"_client_id",
"=",
"self",
".",
"client_id",
".",
"encode",
"(",
"'utf-8'",
")",
"else",
":",
"self",
".",
"client",
".",
"_client_id",
"=",
"self",
".",
"client_id",
"self",
".",
"client",
".",
"_transport",
"=",
"app",
".",
"config",
".",
"get",
"(",
"\"MQTT_TRANSPORT\"",
",",
"\"tcp\"",
")",
".",
"lower",
"(",
")",
"self",
".",
"client",
".",
"_protocol",
"=",
"app",
".",
"config",
".",
"get",
"(",
"\"MQTT_PROTOCOL_VERSION\"",
",",
"MQTTv311",
")",
"self",
".",
"client",
".",
"on_connect",
"=",
"self",
".",
"_handle_connect",
"self",
".",
"client",
".",
"on_disconnect",
"=",
"self",
".",
"_handle_disconnect",
"self",
".",
"username",
"=",
"app",
".",
"config",
".",
"get",
"(",
"\"MQTT_USERNAME\"",
")",
"self",
".",
"password",
"=",
"app",
".",
"config",
".",
"get",
"(",
"\"MQTT_PASSWORD\"",
")",
"self",
".",
"broker_url",
"=",
"app",
".",
"config",
".",
"get",
"(",
"\"MQTT_BROKER_URL\"",
",",
"\"localhost\"",
")",
"self",
".",
"broker_port",
"=",
"app",
".",
"config",
".",
"get",
"(",
"\"MQTT_BROKER_PORT\"",
",",
"1883",
")",
"self",
".",
"tls_enabled",
"=",
"app",
".",
"config",
".",
"get",
"(",
"\"MQTT_TLS_ENABLED\"",
",",
"False",
")",
"self",
".",
"keepalive",
"=",
"app",
".",
"config",
".",
"get",
"(",
"\"MQTT_KEEPALIVE\"",
",",
"60",
")",
"self",
".",
"last_will_topic",
"=",
"app",
".",
"config",
".",
"get",
"(",
"\"MQTT_LAST_WILL_TOPIC\"",
")",
"self",
".",
"last_will_message",
"=",
"app",
".",
"config",
".",
"get",
"(",
"\"MQTT_LAST_WILL_MESSAGE\"",
")",
"self",
".",
"last_will_qos",
"=",
"app",
".",
"config",
".",
"get",
"(",
"\"MQTT_LAST_WILL_QOS\"",
",",
"0",
")",
"self",
".",
"last_will_retain",
"=",
"app",
".",
"config",
".",
"get",
"(",
"\"MQTT_LAST_WILL_RETAIN\"",
",",
"False",
")",
"if",
"self",
".",
"tls_enabled",
":",
"self",
".",
"tls_ca_certs",
"=",
"app",
".",
"config",
"[",
"\"MQTT_TLS_CA_CERTS\"",
"]",
"self",
".",
"tls_certfile",
"=",
"app",
".",
"config",
".",
"get",
"(",
"\"MQTT_TLS_CERTFILE\"",
")",
"self",
".",
"tls_keyfile",
"=",
"app",
".",
"config",
".",
"get",
"(",
"\"MQTT_TLS_KEYFILE\"",
")",
"self",
".",
"tls_cert_reqs",
"=",
"app",
".",
"config",
".",
"get",
"(",
"\"MQTT_TLS_CERT_REQS\"",
",",
"ssl",
".",
"CERT_REQUIRED",
")",
"self",
".",
"tls_version",
"=",
"app",
".",
"config",
".",
"get",
"(",
"\"MQTT_TLS_VERSION\"",
",",
"ssl",
".",
"PROTOCOL_TLSv1",
")",
"self",
".",
"tls_ciphers",
"=",
"app",
".",
"config",
".",
"get",
"(",
"\"MQTT_TLS_CIPHERS\"",
")",
"self",
".",
"tls_insecure",
"=",
"app",
".",
"config",
".",
"get",
"(",
"\"MQTT_TLS_INSECURE\"",
",",
"False",
")",
"# set last will message",
"if",
"self",
".",
"last_will_topic",
"is",
"not",
"None",
":",
"self",
".",
"client",
".",
"will_set",
"(",
"self",
".",
"last_will_topic",
",",
"self",
".",
"last_will_message",
",",
"self",
".",
"last_will_qos",
",",
"self",
".",
"last_will_retain",
",",
")",
"self",
".",
"_connect",
"(",
")"
] |
Init the Flask-MQTT addon.
|
[
"Init",
"the",
"Flask",
"-",
"MQTT",
"addon",
"."
] |
77d474ab87484ae6eaef2fee3bf02406beee2e17
|
https://github.com/stlehmann/Flask-MQTT/blob/77d474ab87484ae6eaef2fee3bf02406beee2e17/flask_mqtt/__init__.py#L87-L133
|
train
|
stlehmann/Flask-MQTT
|
flask_mqtt/__init__.py
|
Mqtt.subscribe
|
def subscribe(self, topic, qos=0):
# type: (str, int) -> Tuple[int, int]
"""
Subscribe to a certain topic.
:param topic: a string specifying the subscription topic to
subscribe to.
:param qos: the desired quality of service level for the subscription.
Defaults to 0.
:rtype: (int, int)
:result: (result, mid)
A topic is a UTF-8 string, which is used by the broker to filter
messages for each connected client. A topic consists of one or more
topic levels. Each topic level is separated by a forward slash
(topic level separator).
The function returns a tuple (result, mid), where result is
MQTT_ERR_SUCCESS to indicate success or (MQTT_ERR_NO_CONN, None) if the
client is not currently connected. mid is the message ID for the
subscribe request. The mid value can be used to track the subscribe
request by checking against the mid argument in the on_subscribe()
callback if it is defined.
**Topic example:** `myhome/groundfloor/livingroom/temperature`
"""
# TODO: add support for list of topics
# don't subscribe if already subscribed
# try to subscribe
result, mid = self.client.subscribe(topic=topic, qos=qos)
# if successful add to topics
if result == MQTT_ERR_SUCCESS:
self.topics[topic] = TopicQos(topic=topic, qos=qos)
logger.debug('Subscribed to topic: {0}, qos: {1}'
.format(topic, qos))
else:
logger.error('Error {0} subscribing to topic: {1}'
.format(result, topic))
return (result, mid)
|
python
|
def subscribe(self, topic, qos=0):
# type: (str, int) -> Tuple[int, int]
"""
Subscribe to a certain topic.
:param topic: a string specifying the subscription topic to
subscribe to.
:param qos: the desired quality of service level for the subscription.
Defaults to 0.
:rtype: (int, int)
:result: (result, mid)
A topic is a UTF-8 string, which is used by the broker to filter
messages for each connected client. A topic consists of one or more
topic levels. Each topic level is separated by a forward slash
(topic level separator).
The function returns a tuple (result, mid), where result is
MQTT_ERR_SUCCESS to indicate success or (MQTT_ERR_NO_CONN, None) if the
client is not currently connected. mid is the message ID for the
subscribe request. The mid value can be used to track the subscribe
request by checking against the mid argument in the on_subscribe()
callback if it is defined.
**Topic example:** `myhome/groundfloor/livingroom/temperature`
"""
# TODO: add support for list of topics
# don't subscribe if already subscribed
# try to subscribe
result, mid = self.client.subscribe(topic=topic, qos=qos)
# if successful add to topics
if result == MQTT_ERR_SUCCESS:
self.topics[topic] = TopicQos(topic=topic, qos=qos)
logger.debug('Subscribed to topic: {0}, qos: {1}'
.format(topic, qos))
else:
logger.error('Error {0} subscribing to topic: {1}'
.format(result, topic))
return (result, mid)
|
[
"def",
"subscribe",
"(",
"self",
",",
"topic",
",",
"qos",
"=",
"0",
")",
":",
"# type: (str, int) -> Tuple[int, int]",
"# TODO: add support for list of topics",
"# don't subscribe if already subscribed",
"# try to subscribe",
"result",
",",
"mid",
"=",
"self",
".",
"client",
".",
"subscribe",
"(",
"topic",
"=",
"topic",
",",
"qos",
"=",
"qos",
")",
"# if successful add to topics",
"if",
"result",
"==",
"MQTT_ERR_SUCCESS",
":",
"self",
".",
"topics",
"[",
"topic",
"]",
"=",
"TopicQos",
"(",
"topic",
"=",
"topic",
",",
"qos",
"=",
"qos",
")",
"logger",
".",
"debug",
"(",
"'Subscribed to topic: {0}, qos: {1}'",
".",
"format",
"(",
"topic",
",",
"qos",
")",
")",
"else",
":",
"logger",
".",
"error",
"(",
"'Error {0} subscribing to topic: {1}'",
".",
"format",
"(",
"result",
",",
"topic",
")",
")",
"return",
"(",
"result",
",",
"mid",
")"
] |
Subscribe to a certain topic.
:param topic: a string specifying the subscription topic to
subscribe to.
:param qos: the desired quality of service level for the subscription.
Defaults to 0.
:rtype: (int, int)
:result: (result, mid)
A topic is a UTF-8 string, which is used by the broker to filter
messages for each connected client. A topic consists of one or more
topic levels. Each topic level is separated by a forward slash
(topic level separator).
The function returns a tuple (result, mid), where result is
MQTT_ERR_SUCCESS to indicate success or (MQTT_ERR_NO_CONN, None) if the
client is not currently connected. mid is the message ID for the
subscribe request. The mid value can be used to track the subscribe
request by checking against the mid argument in the on_subscribe()
callback if it is defined.
**Topic example:** `myhome/groundfloor/livingroom/temperature`
|
[
"Subscribe",
"to",
"a",
"certain",
"topic",
"."
] |
77d474ab87484ae6eaef2fee3bf02406beee2e17
|
https://github.com/stlehmann/Flask-MQTT/blob/77d474ab87484ae6eaef2fee3bf02406beee2e17/flask_mqtt/__init__.py#L225-L268
|
train
|
stlehmann/Flask-MQTT
|
flask_mqtt/__init__.py
|
Mqtt.unsubscribe
|
def unsubscribe(self, topic):
# type: (str) -> Optional[Tuple[int, int]]
"""
Unsubscribe from a single topic.
:param topic: a single string that is the subscription topic to
unsubscribe from
:rtype: (int, int)
:result: (result, mid)
Returns a tuple (result, mid), where result is MQTT_ERR_SUCCESS
to indicate success or (MQTT_ERR_NO_CONN, None) if the client is not
currently connected.
mid is the message ID for the unsubscribe request. The mid value can be
used to track the unsubscribe request by checking against the mid
argument in the on_unsubscribe() callback if it is defined.
"""
# don't unsubscribe if not in topics
if topic in self.topics:
result, mid = self.client.unsubscribe(topic)
if result == MQTT_ERR_SUCCESS:
self.topics.pop(topic)
logger.debug('Unsubscribed from topic: {0}'.format(topic))
else:
logger.debug('Error {0} unsubscribing from topic: {1}'
.format(result, topic))
# if successful remove from topics
return result, mid
return None
|
python
|
def unsubscribe(self, topic):
# type: (str) -> Optional[Tuple[int, int]]
"""
Unsubscribe from a single topic.
:param topic: a single string that is the subscription topic to
unsubscribe from
:rtype: (int, int)
:result: (result, mid)
Returns a tuple (result, mid), where result is MQTT_ERR_SUCCESS
to indicate success or (MQTT_ERR_NO_CONN, None) if the client is not
currently connected.
mid is the message ID for the unsubscribe request. The mid value can be
used to track the unsubscribe request by checking against the mid
argument in the on_unsubscribe() callback if it is defined.
"""
# don't unsubscribe if not in topics
if topic in self.topics:
result, mid = self.client.unsubscribe(topic)
if result == MQTT_ERR_SUCCESS:
self.topics.pop(topic)
logger.debug('Unsubscribed from topic: {0}'.format(topic))
else:
logger.debug('Error {0} unsubscribing from topic: {1}'
.format(result, topic))
# if successful remove from topics
return result, mid
return None
|
[
"def",
"unsubscribe",
"(",
"self",
",",
"topic",
")",
":",
"# type: (str) -> Optional[Tuple[int, int]]",
"# don't unsubscribe if not in topics",
"if",
"topic",
"in",
"self",
".",
"topics",
":",
"result",
",",
"mid",
"=",
"self",
".",
"client",
".",
"unsubscribe",
"(",
"topic",
")",
"if",
"result",
"==",
"MQTT_ERR_SUCCESS",
":",
"self",
".",
"topics",
".",
"pop",
"(",
"topic",
")",
"logger",
".",
"debug",
"(",
"'Unsubscribed from topic: {0}'",
".",
"format",
"(",
"topic",
")",
")",
"else",
":",
"logger",
".",
"debug",
"(",
"'Error {0} unsubscribing from topic: {1}'",
".",
"format",
"(",
"result",
",",
"topic",
")",
")",
"# if successful remove from topics",
"return",
"result",
",",
"mid",
"return",
"None"
] |
Unsubscribe from a single topic.
:param topic: a single string that is the subscription topic to
unsubscribe from
:rtype: (int, int)
:result: (result, mid)
Returns a tuple (result, mid), where result is MQTT_ERR_SUCCESS
to indicate success or (MQTT_ERR_NO_CONN, None) if the client is not
currently connected.
mid is the message ID for the unsubscribe request. The mid value can be
used to track the unsubscribe request by checking against the mid
argument in the on_unsubscribe() callback if it is defined.
|
[
"Unsubscribe",
"from",
"a",
"single",
"topic",
"."
] |
77d474ab87484ae6eaef2fee3bf02406beee2e17
|
https://github.com/stlehmann/Flask-MQTT/blob/77d474ab87484ae6eaef2fee3bf02406beee2e17/flask_mqtt/__init__.py#L270-L302
|
train
|
stlehmann/Flask-MQTT
|
flask_mqtt/__init__.py
|
Mqtt.unsubscribe_all
|
def unsubscribe_all(self):
# type: () -> None
"""Unsubscribe from all topics."""
topics = list(self.topics.keys())
for topic in topics:
self.unsubscribe(topic)
|
python
|
def unsubscribe_all(self):
# type: () -> None
"""Unsubscribe from all topics."""
topics = list(self.topics.keys())
for topic in topics:
self.unsubscribe(topic)
|
[
"def",
"unsubscribe_all",
"(",
"self",
")",
":",
"# type: () -> None",
"topics",
"=",
"list",
"(",
"self",
".",
"topics",
".",
"keys",
"(",
")",
")",
"for",
"topic",
"in",
"topics",
":",
"self",
".",
"unsubscribe",
"(",
"topic",
")"
] |
Unsubscribe from all topics.
|
[
"Unsubscribe",
"from",
"all",
"topics",
"."
] |
77d474ab87484ae6eaef2fee3bf02406beee2e17
|
https://github.com/stlehmann/Flask-MQTT/blob/77d474ab87484ae6eaef2fee3bf02406beee2e17/flask_mqtt/__init__.py#L304-L309
|
train
|
stlehmann/Flask-MQTT
|
flask_mqtt/__init__.py
|
Mqtt.publish
|
def publish(self, topic, payload=None, qos=0, retain=False):
# type: (str, bytes, int, bool) -> Tuple[int, int]
"""
Send a message to the broker.
:param topic: the topic that the message should be published on
:param payload: the actual message to send. If not given, or set to
None a zero length message will be used. Passing an
int or float will result in the payload being
converted to a string representing that number.
If you wish to send a true int/float, use struct.pack()
to create the payload you require.
:param qos: the quality of service level to use
:param retain: if set to True, the message will be set as the
"last known good"/retained message for the topic
:returns: Returns a tuple (result, mid), where result is
MQTT_ERR_SUCCESS to indicate success or MQTT_ERR_NO_CONN
if the client is not currently connected. mid is the message
ID for the publish request.
"""
if not self.connected:
self.client.reconnect()
result, mid = self.client.publish(topic, payload, qos, retain)
if result == MQTT_ERR_SUCCESS:
logger.debug('Published topic {0}: {1}'.format(topic, payload))
else:
logger.error('Error {0} publishing topic {1}'
.format(result, topic))
return (result, mid)
|
python
|
def publish(self, topic, payload=None, qos=0, retain=False):
# type: (str, bytes, int, bool) -> Tuple[int, int]
"""
Send a message to the broker.
:param topic: the topic that the message should be published on
:param payload: the actual message to send. If not given, or set to
None a zero length message will be used. Passing an
int or float will result in the payload being
converted to a string representing that number.
If you wish to send a true int/float, use struct.pack()
to create the payload you require.
:param qos: the quality of service level to use
:param retain: if set to True, the message will be set as the
"last known good"/retained message for the topic
:returns: Returns a tuple (result, mid), where result is
MQTT_ERR_SUCCESS to indicate success or MQTT_ERR_NO_CONN
if the client is not currently connected. mid is the message
ID for the publish request.
"""
if not self.connected:
self.client.reconnect()
result, mid = self.client.publish(topic, payload, qos, retain)
if result == MQTT_ERR_SUCCESS:
logger.debug('Published topic {0}: {1}'.format(topic, payload))
else:
logger.error('Error {0} publishing topic {1}'
.format(result, topic))
return (result, mid)
|
[
"def",
"publish",
"(",
"self",
",",
"topic",
",",
"payload",
"=",
"None",
",",
"qos",
"=",
"0",
",",
"retain",
"=",
"False",
")",
":",
"# type: (str, bytes, int, bool) -> Tuple[int, int]",
"if",
"not",
"self",
".",
"connected",
":",
"self",
".",
"client",
".",
"reconnect",
"(",
")",
"result",
",",
"mid",
"=",
"self",
".",
"client",
".",
"publish",
"(",
"topic",
",",
"payload",
",",
"qos",
",",
"retain",
")",
"if",
"result",
"==",
"MQTT_ERR_SUCCESS",
":",
"logger",
".",
"debug",
"(",
"'Published topic {0}: {1}'",
".",
"format",
"(",
"topic",
",",
"payload",
")",
")",
"else",
":",
"logger",
".",
"error",
"(",
"'Error {0} publishing topic {1}'",
".",
"format",
"(",
"result",
",",
"topic",
")",
")",
"return",
"(",
"result",
",",
"mid",
")"
] |
Send a message to the broker.
:param topic: the topic that the message should be published on
:param payload: the actual message to send. If not given, or set to
None a zero length message will be used. Passing an
int or float will result in the payload being
converted to a string representing that number.
If you wish to send a true int/float, use struct.pack()
to create the payload you require.
:param qos: the quality of service level to use
:param retain: if set to True, the message will be set as the
"last known good"/retained message for the topic
:returns: Returns a tuple (result, mid), where result is
MQTT_ERR_SUCCESS to indicate success or MQTT_ERR_NO_CONN
if the client is not currently connected. mid is the message
ID for the publish request.
|
[
"Send",
"a",
"message",
"to",
"the",
"broker",
"."
] |
77d474ab87484ae6eaef2fee3bf02406beee2e17
|
https://github.com/stlehmann/Flask-MQTT/blob/77d474ab87484ae6eaef2fee3bf02406beee2e17/flask_mqtt/__init__.py#L311-L343
|
train
|
stlehmann/Flask-MQTT
|
flask_mqtt/__init__.py
|
Mqtt.on_subscribe
|
def on_subscribe(self):
# type: () -> Callable
"""Decorate a callback function to handle subscritions.
**Usage:**::
@mqtt.on_subscribe()
def handle_subscribe(client, userdata, mid, granted_qos):
print('Subscription id {} granted with qos {}.'
.format(mid, granted_qos))
"""
def decorator(handler):
# type: (Callable) -> Callable
self.client.on_subscribe = handler
return handler
return decorator
|
python
|
def on_subscribe(self):
# type: () -> Callable
"""Decorate a callback function to handle subscritions.
**Usage:**::
@mqtt.on_subscribe()
def handle_subscribe(client, userdata, mid, granted_qos):
print('Subscription id {} granted with qos {}.'
.format(mid, granted_qos))
"""
def decorator(handler):
# type: (Callable) -> Callable
self.client.on_subscribe = handler
return handler
return decorator
|
[
"def",
"on_subscribe",
"(",
"self",
")",
":",
"# type: () -> Callable",
"def",
"decorator",
"(",
"handler",
")",
":",
"# type: (Callable) -> Callable",
"self",
".",
"client",
".",
"on_subscribe",
"=",
"handler",
"return",
"handler",
"return",
"decorator"
] |
Decorate a callback function to handle subscritions.
**Usage:**::
@mqtt.on_subscribe()
def handle_subscribe(client, userdata, mid, granted_qos):
print('Subscription id {} granted with qos {}.'
.format(mid, granted_qos))
|
[
"Decorate",
"a",
"callback",
"function",
"to",
"handle",
"subscritions",
"."
] |
77d474ab87484ae6eaef2fee3bf02406beee2e17
|
https://github.com/stlehmann/Flask-MQTT/blob/77d474ab87484ae6eaef2fee3bf02406beee2e17/flask_mqtt/__init__.py#L421-L437
|
train
|
stlehmann/Flask-MQTT
|
flask_mqtt/__init__.py
|
Mqtt.on_unsubscribe
|
def on_unsubscribe(self):
# type: () -> Callable
"""Decorate a callback funtion to handle unsubscribtions.
**Usage:**::
@mqtt.unsubscribe()
def handle_unsubscribe(client, userdata, mid)
print('Unsubscribed from topic (id: {})'
.format(mid)')
"""
def decorator(handler):
# type: (Callable) -> Callable
self.client.on_unsubscribe = handler
return handler
return decorator
|
python
|
def on_unsubscribe(self):
# type: () -> Callable
"""Decorate a callback funtion to handle unsubscribtions.
**Usage:**::
@mqtt.unsubscribe()
def handle_unsubscribe(client, userdata, mid)
print('Unsubscribed from topic (id: {})'
.format(mid)')
"""
def decorator(handler):
# type: (Callable) -> Callable
self.client.on_unsubscribe = handler
return handler
return decorator
|
[
"def",
"on_unsubscribe",
"(",
"self",
")",
":",
"# type: () -> Callable",
"def",
"decorator",
"(",
"handler",
")",
":",
"# type: (Callable) -> Callable",
"self",
".",
"client",
".",
"on_unsubscribe",
"=",
"handler",
"return",
"handler",
"return",
"decorator"
] |
Decorate a callback funtion to handle unsubscribtions.
**Usage:**::
@mqtt.unsubscribe()
def handle_unsubscribe(client, userdata, mid)
print('Unsubscribed from topic (id: {})'
.format(mid)')
|
[
"Decorate",
"a",
"callback",
"funtion",
"to",
"handle",
"unsubscribtions",
"."
] |
77d474ab87484ae6eaef2fee3bf02406beee2e17
|
https://github.com/stlehmann/Flask-MQTT/blob/77d474ab87484ae6eaef2fee3bf02406beee2e17/flask_mqtt/__init__.py#L439-L455
|
train
|
stlehmann/Flask-MQTT
|
flask_mqtt/__init__.py
|
Mqtt.on_log
|
def on_log(self):
# type: () -> Callable
"""Decorate a callback function to handle MQTT logging.
**Example Usage:**
::
@mqtt.on_log()
def handle_logging(client, userdata, level, buf):
print(client, userdata, level, buf)
"""
def decorator(handler):
# type: (Callable) -> Callable
self.client.on_log = handler
return handler
return decorator
|
python
|
def on_log(self):
# type: () -> Callable
"""Decorate a callback function to handle MQTT logging.
**Example Usage:**
::
@mqtt.on_log()
def handle_logging(client, userdata, level, buf):
print(client, userdata, level, buf)
"""
def decorator(handler):
# type: (Callable) -> Callable
self.client.on_log = handler
return handler
return decorator
|
[
"def",
"on_log",
"(",
"self",
")",
":",
"# type: () -> Callable",
"def",
"decorator",
"(",
"handler",
")",
":",
"# type: (Callable) -> Callable",
"self",
".",
"client",
".",
"on_log",
"=",
"handler",
"return",
"handler",
"return",
"decorator"
] |
Decorate a callback function to handle MQTT logging.
**Example Usage:**
::
@mqtt.on_log()
def handle_logging(client, userdata, level, buf):
print(client, userdata, level, buf)
|
[
"Decorate",
"a",
"callback",
"function",
"to",
"handle",
"MQTT",
"logging",
"."
] |
77d474ab87484ae6eaef2fee3bf02406beee2e17
|
https://github.com/stlehmann/Flask-MQTT/blob/77d474ab87484ae6eaef2fee3bf02406beee2e17/flask_mqtt/__init__.py#L457-L474
|
train
|
kennethreitz/bucketstore
|
bucketstore.py
|
list
|
def list():
"""Lists buckets, by name."""
s3 = boto3.resource('s3')
return [b.name for b in s3.buckets.all()]
|
python
|
def list():
"""Lists buckets, by name."""
s3 = boto3.resource('s3')
return [b.name for b in s3.buckets.all()]
|
[
"def",
"list",
"(",
")",
":",
"s3",
"=",
"boto3",
".",
"resource",
"(",
"'s3'",
")",
"return",
"[",
"b",
".",
"name",
"for",
"b",
"in",
"s3",
".",
"buckets",
".",
"all",
"(",
")",
"]"
] |
Lists buckets, by name.
|
[
"Lists",
"buckets",
"by",
"name",
"."
] |
2d79584d44b9c422192d7fdf08a85a49addf83d5
|
https://github.com/kennethreitz/bucketstore/blob/2d79584d44b9c422192d7fdf08a85a49addf83d5/bucketstore.py#L6-L9
|
train
|
kennethreitz/bucketstore
|
bucketstore.py
|
S3Bucket.delete
|
def delete(self, key=None):
"""Deletes the given key, or the whole bucket."""
# Delete the whole bucket.
if key is None:
# Delete everything in the bucket.
for key in self.all():
key.delete()
# Delete the bucket.
return self._boto_bucket.delete()
# If a key was passed, delete they key.
k = self.key(key)
return k.delete()
|
python
|
def delete(self, key=None):
"""Deletes the given key, or the whole bucket."""
# Delete the whole bucket.
if key is None:
# Delete everything in the bucket.
for key in self.all():
key.delete()
# Delete the bucket.
return self._boto_bucket.delete()
# If a key was passed, delete they key.
k = self.key(key)
return k.delete()
|
[
"def",
"delete",
"(",
"self",
",",
"key",
"=",
"None",
")",
":",
"# Delete the whole bucket.",
"if",
"key",
"is",
"None",
":",
"# Delete everything in the bucket.",
"for",
"key",
"in",
"self",
".",
"all",
"(",
")",
":",
"key",
".",
"delete",
"(",
")",
"# Delete the bucket.",
"return",
"self",
".",
"_boto_bucket",
".",
"delete",
"(",
")",
"# If a key was passed, delete they key.",
"k",
"=",
"self",
".",
"key",
"(",
"key",
")",
"return",
"k",
".",
"delete",
"(",
")"
] |
Deletes the given key, or the whole bucket.
|
[
"Deletes",
"the",
"given",
"key",
"or",
"the",
"whole",
"bucket",
"."
] |
2d79584d44b9c422192d7fdf08a85a49addf83d5
|
https://github.com/kennethreitz/bucketstore/blob/2d79584d44b9c422192d7fdf08a85a49addf83d5/bucketstore.py#L80-L94
|
train
|
kennethreitz/bucketstore
|
bucketstore.py
|
S3Key.rename
|
def rename(self, new_name):
"""Renames the key to a given new name."""
# Write the new object.
self.bucket.set(new_name, self.get(), self.meta)
# Delete the current key.
self.delete()
# Set the new name.
self.name = new_name
|
python
|
def rename(self, new_name):
"""Renames the key to a given new name."""
# Write the new object.
self.bucket.set(new_name, self.get(), self.meta)
# Delete the current key.
self.delete()
# Set the new name.
self.name = new_name
|
[
"def",
"rename",
"(",
"self",
",",
"new_name",
")",
":",
"# Write the new object.",
"self",
".",
"bucket",
".",
"set",
"(",
"new_name",
",",
"self",
".",
"get",
"(",
")",
",",
"self",
".",
"meta",
")",
"# Delete the current key.",
"self",
".",
"delete",
"(",
")",
"# Set the new name.",
"self",
".",
"name",
"=",
"new_name"
] |
Renames the key to a given new name.
|
[
"Renames",
"the",
"key",
"to",
"a",
"given",
"new",
"name",
"."
] |
2d79584d44b9c422192d7fdf08a85a49addf83d5
|
https://github.com/kennethreitz/bucketstore/blob/2d79584d44b9c422192d7fdf08a85a49addf83d5/bucketstore.py#L126-L135
|
train
|
kennethreitz/bucketstore
|
bucketstore.py
|
S3Key.is_public
|
def is_public(self):
"""Returns True if the public-read ACL is set for the Key."""
for grant in self._boto_object.Acl().grants:
if 'AllUsers' in grant['Grantee'].get('URI', ''):
if grant['Permission'] == 'READ':
return True
return False
|
python
|
def is_public(self):
"""Returns True if the public-read ACL is set for the Key."""
for grant in self._boto_object.Acl().grants:
if 'AllUsers' in grant['Grantee'].get('URI', ''):
if grant['Permission'] == 'READ':
return True
return False
|
[
"def",
"is_public",
"(",
"self",
")",
":",
"for",
"grant",
"in",
"self",
".",
"_boto_object",
".",
"Acl",
"(",
")",
".",
"grants",
":",
"if",
"'AllUsers'",
"in",
"grant",
"[",
"'Grantee'",
"]",
".",
"get",
"(",
"'URI'",
",",
"''",
")",
":",
"if",
"grant",
"[",
"'Permission'",
"]",
"==",
"'READ'",
":",
"return",
"True",
"return",
"False"
] |
Returns True if the public-read ACL is set for the Key.
|
[
"Returns",
"True",
"if",
"the",
"public",
"-",
"read",
"ACL",
"is",
"set",
"for",
"the",
"Key",
"."
] |
2d79584d44b9c422192d7fdf08a85a49addf83d5
|
https://github.com/kennethreitz/bucketstore/blob/2d79584d44b9c422192d7fdf08a85a49addf83d5/bucketstore.py#L142-L149
|
train
|
kennethreitz/bucketstore
|
bucketstore.py
|
S3Key.url
|
def url(self):
"""Returns the public URL for the given key."""
if self.is_public:
return '{0}/{1}/{2}'.format(
self.bucket._boto_s3.meta.client.meta.endpoint_url,
self.bucket.name,
self.name
)
else:
raise ValueError('{0!r} does not have the public-read ACL set. '
'Use the make_public() method to allow for '
'public URL sharing.'.format(self.name))
|
python
|
def url(self):
"""Returns the public URL for the given key."""
if self.is_public:
return '{0}/{1}/{2}'.format(
self.bucket._boto_s3.meta.client.meta.endpoint_url,
self.bucket.name,
self.name
)
else:
raise ValueError('{0!r} does not have the public-read ACL set. '
'Use the make_public() method to allow for '
'public URL sharing.'.format(self.name))
|
[
"def",
"url",
"(",
"self",
")",
":",
"if",
"self",
".",
"is_public",
":",
"return",
"'{0}/{1}/{2}'",
".",
"format",
"(",
"self",
".",
"bucket",
".",
"_boto_s3",
".",
"meta",
".",
"client",
".",
"meta",
".",
"endpoint_url",
",",
"self",
".",
"bucket",
".",
"name",
",",
"self",
".",
"name",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'{0!r} does not have the public-read ACL set. '",
"'Use the make_public() method to allow for '",
"'public URL sharing.'",
".",
"format",
"(",
"self",
".",
"name",
")",
")"
] |
Returns the public URL for the given key.
|
[
"Returns",
"the",
"public",
"URL",
"for",
"the",
"given",
"key",
"."
] |
2d79584d44b9c422192d7fdf08a85a49addf83d5
|
https://github.com/kennethreitz/bucketstore/blob/2d79584d44b9c422192d7fdf08a85a49addf83d5/bucketstore.py#L167-L178
|
train
|
kennethreitz/bucketstore
|
bucketstore.py
|
S3Key.temp_url
|
def temp_url(self, duration=120):
"""Returns a temporary URL for the given key."""
return self.bucket._boto_s3.meta.client.generate_presigned_url(
'get_object',
Params={'Bucket': self.bucket.name, 'Key': self.name},
ExpiresIn=duration
)
|
python
|
def temp_url(self, duration=120):
"""Returns a temporary URL for the given key."""
return self.bucket._boto_s3.meta.client.generate_presigned_url(
'get_object',
Params={'Bucket': self.bucket.name, 'Key': self.name},
ExpiresIn=duration
)
|
[
"def",
"temp_url",
"(",
"self",
",",
"duration",
"=",
"120",
")",
":",
"return",
"self",
".",
"bucket",
".",
"_boto_s3",
".",
"meta",
".",
"client",
".",
"generate_presigned_url",
"(",
"'get_object'",
",",
"Params",
"=",
"{",
"'Bucket'",
":",
"self",
".",
"bucket",
".",
"name",
",",
"'Key'",
":",
"self",
".",
"name",
"}",
",",
"ExpiresIn",
"=",
"duration",
")"
] |
Returns a temporary URL for the given key.
|
[
"Returns",
"a",
"temporary",
"URL",
"for",
"the",
"given",
"key",
"."
] |
2d79584d44b9c422192d7fdf08a85a49addf83d5
|
https://github.com/kennethreitz/bucketstore/blob/2d79584d44b9c422192d7fdf08a85a49addf83d5/bucketstore.py#L180-L186
|
train
|
cs50/python-cs50
|
src/cs50/cs50.py
|
eprint
|
def eprint(*args, **kwargs):
"""
Print an error message to standard error, prefixing it with
file name and line number from which method was called.
"""
end = kwargs.get("end", "\n")
sep = kwargs.get("sep", " ")
(filename, lineno) = inspect.stack()[1][1:3]
print("{}:{}: ".format(filename, lineno), end="")
print(*args, end=end, file=sys.stderr, sep=sep)
|
python
|
def eprint(*args, **kwargs):
"""
Print an error message to standard error, prefixing it with
file name and line number from which method was called.
"""
end = kwargs.get("end", "\n")
sep = kwargs.get("sep", " ")
(filename, lineno) = inspect.stack()[1][1:3]
print("{}:{}: ".format(filename, lineno), end="")
print(*args, end=end, file=sys.stderr, sep=sep)
|
[
"def",
"eprint",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"end",
"=",
"kwargs",
".",
"get",
"(",
"\"end\"",
",",
"\"\\n\"",
")",
"sep",
"=",
"kwargs",
".",
"get",
"(",
"\"sep\"",
",",
"\" \"",
")",
"(",
"filename",
",",
"lineno",
")",
"=",
"inspect",
".",
"stack",
"(",
")",
"[",
"1",
"]",
"[",
"1",
":",
"3",
"]",
"print",
"(",
"\"{}:{}: \"",
".",
"format",
"(",
"filename",
",",
"lineno",
")",
",",
"end",
"=",
"\"\"",
")",
"print",
"(",
"*",
"args",
",",
"end",
"=",
"end",
",",
"file",
"=",
"sys",
".",
"stderr",
",",
"sep",
"=",
"sep",
")"
] |
Print an error message to standard error, prefixing it with
file name and line number from which method was called.
|
[
"Print",
"an",
"error",
"message",
"to",
"standard",
"error",
"prefixing",
"it",
"with",
"file",
"name",
"and",
"line",
"number",
"from",
"which",
"method",
"was",
"called",
"."
] |
f987e9036bcf1bf60adf50a2827cc2cd5b9fd08a
|
https://github.com/cs50/python-cs50/blob/f987e9036bcf1bf60adf50a2827cc2cd5b9fd08a/src/cs50/cs50.py#L35-L44
|
train
|
cs50/python-cs50
|
src/cs50/cs50.py
|
formatException
|
def formatException(type, value, tb):
"""
Format traceback, darkening entries from global site-packages directories
and user-specific site-packages directory.
https://stackoverflow.com/a/46071447/5156190
"""
# Absolute paths to site-packages
packages = tuple(join(abspath(p), "") for p in sys.path[1:])
# Highlight lines not referring to files in site-packages
lines = []
for line in format_exception(type, value, tb):
matches = re.search(r"^ File \"([^\"]+)\", line \d+, in .+", line)
if matches and matches.group(1).startswith(packages):
lines += line
else:
matches = re.search(r"^(\s*)(.*?)(\s*)$", line, re.DOTALL)
lines.append(matches.group(1) + colored(matches.group(2), "yellow") + matches.group(3))
return "".join(lines).rstrip()
|
python
|
def formatException(type, value, tb):
"""
Format traceback, darkening entries from global site-packages directories
and user-specific site-packages directory.
https://stackoverflow.com/a/46071447/5156190
"""
# Absolute paths to site-packages
packages = tuple(join(abspath(p), "") for p in sys.path[1:])
# Highlight lines not referring to files in site-packages
lines = []
for line in format_exception(type, value, tb):
matches = re.search(r"^ File \"([^\"]+)\", line \d+, in .+", line)
if matches and matches.group(1).startswith(packages):
lines += line
else:
matches = re.search(r"^(\s*)(.*?)(\s*)$", line, re.DOTALL)
lines.append(matches.group(1) + colored(matches.group(2), "yellow") + matches.group(3))
return "".join(lines).rstrip()
|
[
"def",
"formatException",
"(",
"type",
",",
"value",
",",
"tb",
")",
":",
"# Absolute paths to site-packages",
"packages",
"=",
"tuple",
"(",
"join",
"(",
"abspath",
"(",
"p",
")",
",",
"\"\"",
")",
"for",
"p",
"in",
"sys",
".",
"path",
"[",
"1",
":",
"]",
")",
"# Highlight lines not referring to files in site-packages",
"lines",
"=",
"[",
"]",
"for",
"line",
"in",
"format_exception",
"(",
"type",
",",
"value",
",",
"tb",
")",
":",
"matches",
"=",
"re",
".",
"search",
"(",
"r\"^ File \\\"([^\\\"]+)\\\", line \\d+, in .+\"",
",",
"line",
")",
"if",
"matches",
"and",
"matches",
".",
"group",
"(",
"1",
")",
".",
"startswith",
"(",
"packages",
")",
":",
"lines",
"+=",
"line",
"else",
":",
"matches",
"=",
"re",
".",
"search",
"(",
"r\"^(\\s*)(.*?)(\\s*)$\"",
",",
"line",
",",
"re",
".",
"DOTALL",
")",
"lines",
".",
"append",
"(",
"matches",
".",
"group",
"(",
"1",
")",
"+",
"colored",
"(",
"matches",
".",
"group",
"(",
"2",
")",
",",
"\"yellow\"",
")",
"+",
"matches",
".",
"group",
"(",
"3",
")",
")",
"return",
"\"\"",
".",
"join",
"(",
"lines",
")",
".",
"rstrip",
"(",
")"
] |
Format traceback, darkening entries from global site-packages directories
and user-specific site-packages directory.
https://stackoverflow.com/a/46071447/5156190
|
[
"Format",
"traceback",
"darkening",
"entries",
"from",
"global",
"site",
"-",
"packages",
"directories",
"and",
"user",
"-",
"specific",
"site",
"-",
"packages",
"directory",
"."
] |
f987e9036bcf1bf60adf50a2827cc2cd5b9fd08a
|
https://github.com/cs50/python-cs50/blob/f987e9036bcf1bf60adf50a2827cc2cd5b9fd08a/src/cs50/cs50.py#L47-L67
|
train
|
cs50/python-cs50
|
src/cs50/cs50.py
|
get_char
|
def get_char(prompt=None):
"""
Read a line of text from standard input and return the equivalent char;
if text is not a single char, user is prompted to retry. If line can't
be read, return None.
"""
while True:
s = get_string(prompt)
if s is None:
return None
if len(s) == 1:
return s[0]
# Temporarily here for backwards compatibility
if prompt is None:
print("Retry: ", end="")
|
python
|
def get_char(prompt=None):
"""
Read a line of text from standard input and return the equivalent char;
if text is not a single char, user is prompted to retry. If line can't
be read, return None.
"""
while True:
s = get_string(prompt)
if s is None:
return None
if len(s) == 1:
return s[0]
# Temporarily here for backwards compatibility
if prompt is None:
print("Retry: ", end="")
|
[
"def",
"get_char",
"(",
"prompt",
"=",
"None",
")",
":",
"while",
"True",
":",
"s",
"=",
"get_string",
"(",
"prompt",
")",
"if",
"s",
"is",
"None",
":",
"return",
"None",
"if",
"len",
"(",
"s",
")",
"==",
"1",
":",
"return",
"s",
"[",
"0",
"]",
"# Temporarily here for backwards compatibility",
"if",
"prompt",
"is",
"None",
":",
"print",
"(",
"\"Retry: \"",
",",
"end",
"=",
"\"\"",
")"
] |
Read a line of text from standard input and return the equivalent char;
if text is not a single char, user is prompted to retry. If line can't
be read, return None.
|
[
"Read",
"a",
"line",
"of",
"text",
"from",
"standard",
"input",
"and",
"return",
"the",
"equivalent",
"char",
";",
"if",
"text",
"is",
"not",
"a",
"single",
"char",
"user",
"is",
"prompted",
"to",
"retry",
".",
"If",
"line",
"can",
"t",
"be",
"read",
"return",
"None",
"."
] |
f987e9036bcf1bf60adf50a2827cc2cd5b9fd08a
|
https://github.com/cs50/python-cs50/blob/f987e9036bcf1bf60adf50a2827cc2cd5b9fd08a/src/cs50/cs50.py#L73-L88
|
train
|
cs50/python-cs50
|
src/cs50/cs50.py
|
get_float
|
def get_float(prompt=None):
"""
Read a line of text from standard input and return the equivalent float
as precisely as possible; if text does not represent a double, user is
prompted to retry. If line can't be read, return None.
"""
while True:
s = get_string(prompt)
if s is None:
return None
if len(s) > 0 and re.search(r"^[+-]?\d*(?:\.\d*)?$", s):
try:
return float(s)
except ValueError:
pass
# Temporarily here for backwards compatibility
if prompt is None:
print("Retry: ", end="")
|
python
|
def get_float(prompt=None):
"""
Read a line of text from standard input and return the equivalent float
as precisely as possible; if text does not represent a double, user is
prompted to retry. If line can't be read, return None.
"""
while True:
s = get_string(prompt)
if s is None:
return None
if len(s) > 0 and re.search(r"^[+-]?\d*(?:\.\d*)?$", s):
try:
return float(s)
except ValueError:
pass
# Temporarily here for backwards compatibility
if prompt is None:
print("Retry: ", end="")
|
[
"def",
"get_float",
"(",
"prompt",
"=",
"None",
")",
":",
"while",
"True",
":",
"s",
"=",
"get_string",
"(",
"prompt",
")",
"if",
"s",
"is",
"None",
":",
"return",
"None",
"if",
"len",
"(",
"s",
")",
">",
"0",
"and",
"re",
".",
"search",
"(",
"r\"^[+-]?\\d*(?:\\.\\d*)?$\"",
",",
"s",
")",
":",
"try",
":",
"return",
"float",
"(",
"s",
")",
"except",
"ValueError",
":",
"pass",
"# Temporarily here for backwards compatibility",
"if",
"prompt",
"is",
"None",
":",
"print",
"(",
"\"Retry: \"",
",",
"end",
"=",
"\"\"",
")"
] |
Read a line of text from standard input and return the equivalent float
as precisely as possible; if text does not represent a double, user is
prompted to retry. If line can't be read, return None.
|
[
"Read",
"a",
"line",
"of",
"text",
"from",
"standard",
"input",
"and",
"return",
"the",
"equivalent",
"float",
"as",
"precisely",
"as",
"possible",
";",
"if",
"text",
"does",
"not",
"represent",
"a",
"double",
"user",
"is",
"prompted",
"to",
"retry",
".",
"If",
"line",
"can",
"t",
"be",
"read",
"return",
"None",
"."
] |
f987e9036bcf1bf60adf50a2827cc2cd5b9fd08a
|
https://github.com/cs50/python-cs50/blob/f987e9036bcf1bf60adf50a2827cc2cd5b9fd08a/src/cs50/cs50.py#L91-L109
|
train
|
cs50/python-cs50
|
src/cs50/cs50.py
|
get_int
|
def get_int(prompt=None):
"""
Read a line of text from standard input and return the equivalent int;
if text does not represent an int, user is prompted to retry. If line
can't be read, return None.
"""
while True:
s = get_string(prompt)
if s is None:
return None
if re.search(r"^[+-]?\d+$", s):
try:
i = int(s, 10)
if type(i) is int: # Could become long in Python 2
return i
except ValueError:
pass
# Temporarily here for backwards compatibility
if prompt is None:
print("Retry: ", end="")
|
python
|
def get_int(prompt=None):
"""
Read a line of text from standard input and return the equivalent int;
if text does not represent an int, user is prompted to retry. If line
can't be read, return None.
"""
while True:
s = get_string(prompt)
if s is None:
return None
if re.search(r"^[+-]?\d+$", s):
try:
i = int(s, 10)
if type(i) is int: # Could become long in Python 2
return i
except ValueError:
pass
# Temporarily here for backwards compatibility
if prompt is None:
print("Retry: ", end="")
|
[
"def",
"get_int",
"(",
"prompt",
"=",
"None",
")",
":",
"while",
"True",
":",
"s",
"=",
"get_string",
"(",
"prompt",
")",
"if",
"s",
"is",
"None",
":",
"return",
"None",
"if",
"re",
".",
"search",
"(",
"r\"^[+-]?\\d+$\"",
",",
"s",
")",
":",
"try",
":",
"i",
"=",
"int",
"(",
"s",
",",
"10",
")",
"if",
"type",
"(",
"i",
")",
"is",
"int",
":",
"# Could become long in Python 2",
"return",
"i",
"except",
"ValueError",
":",
"pass",
"# Temporarily here for backwards compatibility",
"if",
"prompt",
"is",
"None",
":",
"print",
"(",
"\"Retry: \"",
",",
"end",
"=",
"\"\"",
")"
] |
Read a line of text from standard input and return the equivalent int;
if text does not represent an int, user is prompted to retry. If line
can't be read, return None.
|
[
"Read",
"a",
"line",
"of",
"text",
"from",
"standard",
"input",
"and",
"return",
"the",
"equivalent",
"int",
";",
"if",
"text",
"does",
"not",
"represent",
"an",
"int",
"user",
"is",
"prompted",
"to",
"retry",
".",
"If",
"line",
"can",
"t",
"be",
"read",
"return",
"None",
"."
] |
f987e9036bcf1bf60adf50a2827cc2cd5b9fd08a
|
https://github.com/cs50/python-cs50/blob/f987e9036bcf1bf60adf50a2827cc2cd5b9fd08a/src/cs50/cs50.py#L112-L132
|
train
|
cs50/python-cs50
|
src/cs50/sql.py
|
_connect
|
def _connect(dbapi_connection, connection_record):
"""Enables foreign key support."""
# If back end is sqlite
if type(dbapi_connection) is sqlite3.Connection:
# Respect foreign key constraints by default
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON")
cursor.close()
|
python
|
def _connect(dbapi_connection, connection_record):
"""Enables foreign key support."""
# If back end is sqlite
if type(dbapi_connection) is sqlite3.Connection:
# Respect foreign key constraints by default
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON")
cursor.close()
|
[
"def",
"_connect",
"(",
"dbapi_connection",
",",
"connection_record",
")",
":",
"# If back end is sqlite",
"if",
"type",
"(",
"dbapi_connection",
")",
"is",
"sqlite3",
".",
"Connection",
":",
"# Respect foreign key constraints by default",
"cursor",
"=",
"dbapi_connection",
".",
"cursor",
"(",
")",
"cursor",
".",
"execute",
"(",
"\"PRAGMA foreign_keys=ON\"",
")",
"cursor",
".",
"close",
"(",
")"
] |
Enables foreign key support.
|
[
"Enables",
"foreign",
"key",
"support",
"."
] |
f987e9036bcf1bf60adf50a2827cc2cd5b9fd08a
|
https://github.com/cs50/python-cs50/blob/f987e9036bcf1bf60adf50a2827cc2cd5b9fd08a/src/cs50/sql.py#L233-L242
|
train
|
cs50/python-cs50
|
src/cs50/sql.py
|
SQL._parse
|
def _parse(self, e):
"""Parses an exception, returns its message."""
# MySQL
matches = re.search(r"^\(_mysql_exceptions\.OperationalError\) \(\d+, \"(.+)\"\)$", str(e))
if matches:
return matches.group(1)
# PostgreSQL
matches = re.search(r"^\(psycopg2\.OperationalError\) (.+)$", str(e))
if matches:
return matches.group(1)
# SQLite
matches = re.search(r"^\(sqlite3\.OperationalError\) (.+)$", str(e))
if matches:
return matches.group(1)
# Default
return str(e)
|
python
|
def _parse(self, e):
"""Parses an exception, returns its message."""
# MySQL
matches = re.search(r"^\(_mysql_exceptions\.OperationalError\) \(\d+, \"(.+)\"\)$", str(e))
if matches:
return matches.group(1)
# PostgreSQL
matches = re.search(r"^\(psycopg2\.OperationalError\) (.+)$", str(e))
if matches:
return matches.group(1)
# SQLite
matches = re.search(r"^\(sqlite3\.OperationalError\) (.+)$", str(e))
if matches:
return matches.group(1)
# Default
return str(e)
|
[
"def",
"_parse",
"(",
"self",
",",
"e",
")",
":",
"# MySQL",
"matches",
"=",
"re",
".",
"search",
"(",
"r\"^\\(_mysql_exceptions\\.OperationalError\\) \\(\\d+, \\\"(.+)\\\"\\)$\"",
",",
"str",
"(",
"e",
")",
")",
"if",
"matches",
":",
"return",
"matches",
".",
"group",
"(",
"1",
")",
"# PostgreSQL",
"matches",
"=",
"re",
".",
"search",
"(",
"r\"^\\(psycopg2\\.OperationalError\\) (.+)$\"",
",",
"str",
"(",
"e",
")",
")",
"if",
"matches",
":",
"return",
"matches",
".",
"group",
"(",
"1",
")",
"# SQLite",
"matches",
"=",
"re",
".",
"search",
"(",
"r\"^\\(sqlite3\\.OperationalError\\) (.+)$\"",
",",
"str",
"(",
"e",
")",
")",
"if",
"matches",
":",
"return",
"matches",
".",
"group",
"(",
"1",
")",
"# Default",
"return",
"str",
"(",
"e",
")"
] |
Parses an exception, returns its message.
|
[
"Parses",
"an",
"exception",
"returns",
"its",
"message",
"."
] |
f987e9036bcf1bf60adf50a2827cc2cd5b9fd08a
|
https://github.com/cs50/python-cs50/blob/f987e9036bcf1bf60adf50a2827cc2cd5b9fd08a/src/cs50/sql.py#L68-L87
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.