repo
stringlengths 7
55
| path
stringlengths 4
223
| func_name
stringlengths 1
134
| original_string
stringlengths 75
104k
| language
stringclasses 1
value | code
stringlengths 75
104k
| code_tokens
listlengths 19
28.4k
| docstring
stringlengths 1
46.9k
| docstring_tokens
listlengths 1
1.97k
| sha
stringlengths 40
40
| url
stringlengths 87
315
| partition
stringclasses 1
value |
|---|---|---|---|---|---|---|---|---|---|---|---|
jay-johnson/antinex-client
|
antinex_client/scripts/ai_train_dnn.py
|
train_new_deep_neural_network
|
def train_new_deep_neural_network():
"""train_new_deep_neural_network
Train a new deep neural network and store the results as a new:
``MLJob`` and ``MLJobResult`` database records.
"""
parser = argparse.ArgumentParser(
description=(
"Python client to Train a Deep Neural Network "
"with AntiNex Django Rest Framework"))
parser.add_argument(
"-u",
help="username",
required=False,
dest="user")
parser.add_argument(
"-p",
help="user password",
required=False,
dest="password")
parser.add_argument(
"-e",
help="user email",
required=False,
dest="email")
parser.add_argument(
"-a",
help="url endpoint with default http://localhost:8010",
required=False,
dest="url")
parser.add_argument(
"-f",
help="file to use default ./examples/test-keras-dnn.json",
required=False,
dest="datafile")
parser.add_argument(
"-b",
help=(
"optional - path to CA bundle directory for "
"client encryption over HTTP"),
required=False,
dest="ca_dir")
parser.add_argument(
"-c",
help=(
"optional - path to x509 certificate for "
"client encryption over HTTP"),
required=False,
dest="cert_file")
parser.add_argument(
"-k",
help=(
"optional - path to x509 key file for "
"client encryption over HTTP"),
required=False,
dest="key_file")
parser.add_argument(
"-s",
help="silent",
required=False,
dest="silent",
action="store_true")
parser.add_argument(
"-d",
help="debug",
required=False,
dest="debug",
action="store_true")
args = parser.parse_args()
user = ev(
"API_USER",
"user-not-set")
password = ev(
"API_PASSWORD",
"password-not-set")
email = ev(
"API_EMAIL",
"email-not-set")
url = ev(
"API_URL",
"http://localhost:8010")
datafile = ev(
"DATAFILE",
"datafile-not-set")
ca_dir = os.getenv(
"API_CA_BUNDLE_DIR",
None)
cert_file = os.getenv(
"API_CERT_FILE",
None)
key_file = os.getenv(
"API_KEY_FILE",
None)
verbose = bool(str(ev(
"API_VERBOSE",
"true")).lower() == "true")
debug = bool(str(ev(
"API_DEBUG",
"false")).lower() == "true")
if args.user:
user = args.user
if args.password:
password = args.password
if args.email:
email = args.email
if args.url:
url = args.url
if args.datafile:
datafile = args.datafile
if args.ca_dir:
ca_dir = args.ca_dir
if args.cert_file:
cert_file = args.cert_file
if args.key_file:
key_file = args.key_file
if args.silent:
verbose = False
if args.debug:
debug = True
usage = (
"Please run with "
"-u <username> "
"-p <password> "
"-a <AntiNex URL http://localhost:8010> "
"-f <path to json file> "
"-b <optional - path to CA bundle directory> "
"-c <optional - path to x509 ssl certificate file> "
"-k <optional - path to x509 ssl key file>")
valid = True
if not user or user == "user-not-set":
log.error("missing user")
valid = False
if not password or password == "password-not-set":
log.error("missing password")
valid = False
if not datafile or datafile == "datafile-not-set":
log.error("missing datafile")
valid = False
else:
if not os.path.exists(datafile):
log.error(("did not find datafile={} on disk")
.format(
datafile))
valid = False
if not valid:
log.error(usage)
sys.exit(1)
if verbose:
log.info((
"creating client user={} url={} file={} "
"ca_dir={} cert_file={} key_file={}").format(
user,
url,
datafile,
ca_dir,
cert_file,
key_file))
client = AIClient(
user=user,
email=email,
password=password,
url=url,
ca_dir=ca_dir,
cert_file=cert_file,
key_file=key_file,
verbose=verbose,
debug=debug)
if verbose:
log.info(("loading request in datafile={}")
.format(
datafile))
req_body = None
with open(datafile, "r") as f:
req_body = json.loads(f.read())
if verbose:
log.info("running job")
job_was_started = False
response = client.run_job(
body=req_body)
if response["status"] == SUCCESS:
log.info(("job started with response={}")
.format(
response["data"]))
job_was_started = True
elif response["status"] == FAILED:
log.error(("job failed with error='{}' with response={}")
.format(
response["error"],
response["data"]))
elif response["status"] == ERROR:
log.error(("job had an error='{}' with response={}")
.format(
response["error"],
response["data"]))
elif response["status"] == LOGIN_FAILED:
log.error(("job reported user was not able to log in "
"with an error='{}' with response={}")
.format(
response["error"],
response["data"]))
if not job_was_started:
sys.exit(1)
if debug:
log.info(("parsing response data={}")
.format(
response["data"]))
else:
if verbose:
log.info("parsing data")
res_data = response["data"]
job_data = res_data.get(
"job",
None)
result_data = res_data.get(
"results",
None)
if not job_data:
log.error(("missing job dictionary in response data={}")
.format(
response["data"]))
sys.exit(1)
if not result_data:
log.error(("missing results dictionary in response data={}")
.format(
response["data"]))
sys.exit(1)
job_id = job_data.get("id", None)
job_status = job_data.get("status", None)
result_id = result_data.get("id", None)
result_status = result_data.get("status", None)
log.info(("started job.id={} job.status={} with "
"result.id={} result.status={}")
.format(
job_id,
job_status,
result_id,
result_status))
job_results = client.wait_for_job_to_finish(
job_id=job_id)
if job_results["status"] != SUCCESS:
log.error(("failed waiting for job.id={} to finish error={} data={}")
.format(
job_id,
job_results["error"],
job_results["data"]))
sys.exit(1)
final_job = job_results["data"]["job"]
final_result = job_results["data"]["result"]
if verbose:
log.info(("job={}")
.format(
ppj(final_job)))
else:
log.info(("job={}")
.format(
str(final_job)[0:10]))
if verbose:
log.info(("result={}")
.format(
ppj(final_result)))
else:
log.info(("result={}")
.format(
str(final_result)[0:10]))
log.info(("job.id={} is done")
.format(
job_id))
predictions = final_result["predictions_json"].get(
"predictions",
[])
log.info(("loading predictions={} into pandas dataframe")
.format(
len(predictions)))
df = pd.DataFrame(predictions)
if verbose:
log.info(("dataframe={}")
.format(
df))
|
python
|
def train_new_deep_neural_network():
"""train_new_deep_neural_network
Train a new deep neural network and store the results as a new:
``MLJob`` and ``MLJobResult`` database records.
"""
parser = argparse.ArgumentParser(
description=(
"Python client to Train a Deep Neural Network "
"with AntiNex Django Rest Framework"))
parser.add_argument(
"-u",
help="username",
required=False,
dest="user")
parser.add_argument(
"-p",
help="user password",
required=False,
dest="password")
parser.add_argument(
"-e",
help="user email",
required=False,
dest="email")
parser.add_argument(
"-a",
help="url endpoint with default http://localhost:8010",
required=False,
dest="url")
parser.add_argument(
"-f",
help="file to use default ./examples/test-keras-dnn.json",
required=False,
dest="datafile")
parser.add_argument(
"-b",
help=(
"optional - path to CA bundle directory for "
"client encryption over HTTP"),
required=False,
dest="ca_dir")
parser.add_argument(
"-c",
help=(
"optional - path to x509 certificate for "
"client encryption over HTTP"),
required=False,
dest="cert_file")
parser.add_argument(
"-k",
help=(
"optional - path to x509 key file for "
"client encryption over HTTP"),
required=False,
dest="key_file")
parser.add_argument(
"-s",
help="silent",
required=False,
dest="silent",
action="store_true")
parser.add_argument(
"-d",
help="debug",
required=False,
dest="debug",
action="store_true")
args = parser.parse_args()
user = ev(
"API_USER",
"user-not-set")
password = ev(
"API_PASSWORD",
"password-not-set")
email = ev(
"API_EMAIL",
"email-not-set")
url = ev(
"API_URL",
"http://localhost:8010")
datafile = ev(
"DATAFILE",
"datafile-not-set")
ca_dir = os.getenv(
"API_CA_BUNDLE_DIR",
None)
cert_file = os.getenv(
"API_CERT_FILE",
None)
key_file = os.getenv(
"API_KEY_FILE",
None)
verbose = bool(str(ev(
"API_VERBOSE",
"true")).lower() == "true")
debug = bool(str(ev(
"API_DEBUG",
"false")).lower() == "true")
if args.user:
user = args.user
if args.password:
password = args.password
if args.email:
email = args.email
if args.url:
url = args.url
if args.datafile:
datafile = args.datafile
if args.ca_dir:
ca_dir = args.ca_dir
if args.cert_file:
cert_file = args.cert_file
if args.key_file:
key_file = args.key_file
if args.silent:
verbose = False
if args.debug:
debug = True
usage = (
"Please run with "
"-u <username> "
"-p <password> "
"-a <AntiNex URL http://localhost:8010> "
"-f <path to json file> "
"-b <optional - path to CA bundle directory> "
"-c <optional - path to x509 ssl certificate file> "
"-k <optional - path to x509 ssl key file>")
valid = True
if not user or user == "user-not-set":
log.error("missing user")
valid = False
if not password or password == "password-not-set":
log.error("missing password")
valid = False
if not datafile or datafile == "datafile-not-set":
log.error("missing datafile")
valid = False
else:
if not os.path.exists(datafile):
log.error(("did not find datafile={} on disk")
.format(
datafile))
valid = False
if not valid:
log.error(usage)
sys.exit(1)
if verbose:
log.info((
"creating client user={} url={} file={} "
"ca_dir={} cert_file={} key_file={}").format(
user,
url,
datafile,
ca_dir,
cert_file,
key_file))
client = AIClient(
user=user,
email=email,
password=password,
url=url,
ca_dir=ca_dir,
cert_file=cert_file,
key_file=key_file,
verbose=verbose,
debug=debug)
if verbose:
log.info(("loading request in datafile={}")
.format(
datafile))
req_body = None
with open(datafile, "r") as f:
req_body = json.loads(f.read())
if verbose:
log.info("running job")
job_was_started = False
response = client.run_job(
body=req_body)
if response["status"] == SUCCESS:
log.info(("job started with response={}")
.format(
response["data"]))
job_was_started = True
elif response["status"] == FAILED:
log.error(("job failed with error='{}' with response={}")
.format(
response["error"],
response["data"]))
elif response["status"] == ERROR:
log.error(("job had an error='{}' with response={}")
.format(
response["error"],
response["data"]))
elif response["status"] == LOGIN_FAILED:
log.error(("job reported user was not able to log in "
"with an error='{}' with response={}")
.format(
response["error"],
response["data"]))
if not job_was_started:
sys.exit(1)
if debug:
log.info(("parsing response data={}")
.format(
response["data"]))
else:
if verbose:
log.info("parsing data")
res_data = response["data"]
job_data = res_data.get(
"job",
None)
result_data = res_data.get(
"results",
None)
if not job_data:
log.error(("missing job dictionary in response data={}")
.format(
response["data"]))
sys.exit(1)
if not result_data:
log.error(("missing results dictionary in response data={}")
.format(
response["data"]))
sys.exit(1)
job_id = job_data.get("id", None)
job_status = job_data.get("status", None)
result_id = result_data.get("id", None)
result_status = result_data.get("status", None)
log.info(("started job.id={} job.status={} with "
"result.id={} result.status={}")
.format(
job_id,
job_status,
result_id,
result_status))
job_results = client.wait_for_job_to_finish(
job_id=job_id)
if job_results["status"] != SUCCESS:
log.error(("failed waiting for job.id={} to finish error={} data={}")
.format(
job_id,
job_results["error"],
job_results["data"]))
sys.exit(1)
final_job = job_results["data"]["job"]
final_result = job_results["data"]["result"]
if verbose:
log.info(("job={}")
.format(
ppj(final_job)))
else:
log.info(("job={}")
.format(
str(final_job)[0:10]))
if verbose:
log.info(("result={}")
.format(
ppj(final_result)))
else:
log.info(("result={}")
.format(
str(final_result)[0:10]))
log.info(("job.id={} is done")
.format(
job_id))
predictions = final_result["predictions_json"].get(
"predictions",
[])
log.info(("loading predictions={} into pandas dataframe")
.format(
len(predictions)))
df = pd.DataFrame(predictions)
if verbose:
log.info(("dataframe={}")
.format(
df))
|
[
"def",
"train_new_deep_neural_network",
"(",
")",
":",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"description",
"=",
"(",
"\"Python client to Train a Deep Neural Network \"",
"\"with AntiNex Django Rest Framework\"",
")",
")",
"parser",
".",
"add_argument",
"(",
"\"-u\"",
",",
"help",
"=",
"\"username\"",
",",
"required",
"=",
"False",
",",
"dest",
"=",
"\"user\"",
")",
"parser",
".",
"add_argument",
"(",
"\"-p\"",
",",
"help",
"=",
"\"user password\"",
",",
"required",
"=",
"False",
",",
"dest",
"=",
"\"password\"",
")",
"parser",
".",
"add_argument",
"(",
"\"-e\"",
",",
"help",
"=",
"\"user email\"",
",",
"required",
"=",
"False",
",",
"dest",
"=",
"\"email\"",
")",
"parser",
".",
"add_argument",
"(",
"\"-a\"",
",",
"help",
"=",
"\"url endpoint with default http://localhost:8010\"",
",",
"required",
"=",
"False",
",",
"dest",
"=",
"\"url\"",
")",
"parser",
".",
"add_argument",
"(",
"\"-f\"",
",",
"help",
"=",
"\"file to use default ./examples/test-keras-dnn.json\"",
",",
"required",
"=",
"False",
",",
"dest",
"=",
"\"datafile\"",
")",
"parser",
".",
"add_argument",
"(",
"\"-b\"",
",",
"help",
"=",
"(",
"\"optional - path to CA bundle directory for \"",
"\"client encryption over HTTP\"",
")",
",",
"required",
"=",
"False",
",",
"dest",
"=",
"\"ca_dir\"",
")",
"parser",
".",
"add_argument",
"(",
"\"-c\"",
",",
"help",
"=",
"(",
"\"optional - path to x509 certificate for \"",
"\"client encryption over HTTP\"",
")",
",",
"required",
"=",
"False",
",",
"dest",
"=",
"\"cert_file\"",
")",
"parser",
".",
"add_argument",
"(",
"\"-k\"",
",",
"help",
"=",
"(",
"\"optional - path to x509 key file for \"",
"\"client encryption over HTTP\"",
")",
",",
"required",
"=",
"False",
",",
"dest",
"=",
"\"key_file\"",
")",
"parser",
".",
"add_argument",
"(",
"\"-s\"",
",",
"help",
"=",
"\"silent\"",
",",
"required",
"=",
"False",
",",
"dest",
"=",
"\"silent\"",
",",
"action",
"=",
"\"store_true\"",
")",
"parser",
".",
"add_argument",
"(",
"\"-d\"",
",",
"help",
"=",
"\"debug\"",
",",
"required",
"=",
"False",
",",
"dest",
"=",
"\"debug\"",
",",
"action",
"=",
"\"store_true\"",
")",
"args",
"=",
"parser",
".",
"parse_args",
"(",
")",
"user",
"=",
"ev",
"(",
"\"API_USER\"",
",",
"\"user-not-set\"",
")",
"password",
"=",
"ev",
"(",
"\"API_PASSWORD\"",
",",
"\"password-not-set\"",
")",
"email",
"=",
"ev",
"(",
"\"API_EMAIL\"",
",",
"\"email-not-set\"",
")",
"url",
"=",
"ev",
"(",
"\"API_URL\"",
",",
"\"http://localhost:8010\"",
")",
"datafile",
"=",
"ev",
"(",
"\"DATAFILE\"",
",",
"\"datafile-not-set\"",
")",
"ca_dir",
"=",
"os",
".",
"getenv",
"(",
"\"API_CA_BUNDLE_DIR\"",
",",
"None",
")",
"cert_file",
"=",
"os",
".",
"getenv",
"(",
"\"API_CERT_FILE\"",
",",
"None",
")",
"key_file",
"=",
"os",
".",
"getenv",
"(",
"\"API_KEY_FILE\"",
",",
"None",
")",
"verbose",
"=",
"bool",
"(",
"str",
"(",
"ev",
"(",
"\"API_VERBOSE\"",
",",
"\"true\"",
")",
")",
".",
"lower",
"(",
")",
"==",
"\"true\"",
")",
"debug",
"=",
"bool",
"(",
"str",
"(",
"ev",
"(",
"\"API_DEBUG\"",
",",
"\"false\"",
")",
")",
".",
"lower",
"(",
")",
"==",
"\"true\"",
")",
"if",
"args",
".",
"user",
":",
"user",
"=",
"args",
".",
"user",
"if",
"args",
".",
"password",
":",
"password",
"=",
"args",
".",
"password",
"if",
"args",
".",
"email",
":",
"email",
"=",
"args",
".",
"email",
"if",
"args",
".",
"url",
":",
"url",
"=",
"args",
".",
"url",
"if",
"args",
".",
"datafile",
":",
"datafile",
"=",
"args",
".",
"datafile",
"if",
"args",
".",
"ca_dir",
":",
"ca_dir",
"=",
"args",
".",
"ca_dir",
"if",
"args",
".",
"cert_file",
":",
"cert_file",
"=",
"args",
".",
"cert_file",
"if",
"args",
".",
"key_file",
":",
"key_file",
"=",
"args",
".",
"key_file",
"if",
"args",
".",
"silent",
":",
"verbose",
"=",
"False",
"if",
"args",
".",
"debug",
":",
"debug",
"=",
"True",
"usage",
"=",
"(",
"\"Please run with \"",
"\"-u <username> \"",
"\"-p <password> \"",
"\"-a <AntiNex URL http://localhost:8010> \"",
"\"-f <path to json file> \"",
"\"-b <optional - path to CA bundle directory> \"",
"\"-c <optional - path to x509 ssl certificate file> \"",
"\"-k <optional - path to x509 ssl key file>\"",
")",
"valid",
"=",
"True",
"if",
"not",
"user",
"or",
"user",
"==",
"\"user-not-set\"",
":",
"log",
".",
"error",
"(",
"\"missing user\"",
")",
"valid",
"=",
"False",
"if",
"not",
"password",
"or",
"password",
"==",
"\"password-not-set\"",
":",
"log",
".",
"error",
"(",
"\"missing password\"",
")",
"valid",
"=",
"False",
"if",
"not",
"datafile",
"or",
"datafile",
"==",
"\"datafile-not-set\"",
":",
"log",
".",
"error",
"(",
"\"missing datafile\"",
")",
"valid",
"=",
"False",
"else",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"datafile",
")",
":",
"log",
".",
"error",
"(",
"(",
"\"did not find datafile={} on disk\"",
")",
".",
"format",
"(",
"datafile",
")",
")",
"valid",
"=",
"False",
"if",
"not",
"valid",
":",
"log",
".",
"error",
"(",
"usage",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"if",
"verbose",
":",
"log",
".",
"info",
"(",
"(",
"\"creating client user={} url={} file={} \"",
"\"ca_dir={} cert_file={} key_file={}\"",
")",
".",
"format",
"(",
"user",
",",
"url",
",",
"datafile",
",",
"ca_dir",
",",
"cert_file",
",",
"key_file",
")",
")",
"client",
"=",
"AIClient",
"(",
"user",
"=",
"user",
",",
"email",
"=",
"email",
",",
"password",
"=",
"password",
",",
"url",
"=",
"url",
",",
"ca_dir",
"=",
"ca_dir",
",",
"cert_file",
"=",
"cert_file",
",",
"key_file",
"=",
"key_file",
",",
"verbose",
"=",
"verbose",
",",
"debug",
"=",
"debug",
")",
"if",
"verbose",
":",
"log",
".",
"info",
"(",
"(",
"\"loading request in datafile={}\"",
")",
".",
"format",
"(",
"datafile",
")",
")",
"req_body",
"=",
"None",
"with",
"open",
"(",
"datafile",
",",
"\"r\"",
")",
"as",
"f",
":",
"req_body",
"=",
"json",
".",
"loads",
"(",
"f",
".",
"read",
"(",
")",
")",
"if",
"verbose",
":",
"log",
".",
"info",
"(",
"\"running job\"",
")",
"job_was_started",
"=",
"False",
"response",
"=",
"client",
".",
"run_job",
"(",
"body",
"=",
"req_body",
")",
"if",
"response",
"[",
"\"status\"",
"]",
"==",
"SUCCESS",
":",
"log",
".",
"info",
"(",
"(",
"\"job started with response={}\"",
")",
".",
"format",
"(",
"response",
"[",
"\"data\"",
"]",
")",
")",
"job_was_started",
"=",
"True",
"elif",
"response",
"[",
"\"status\"",
"]",
"==",
"FAILED",
":",
"log",
".",
"error",
"(",
"(",
"\"job failed with error='{}' with response={}\"",
")",
".",
"format",
"(",
"response",
"[",
"\"error\"",
"]",
",",
"response",
"[",
"\"data\"",
"]",
")",
")",
"elif",
"response",
"[",
"\"status\"",
"]",
"==",
"ERROR",
":",
"log",
".",
"error",
"(",
"(",
"\"job had an error='{}' with response={}\"",
")",
".",
"format",
"(",
"response",
"[",
"\"error\"",
"]",
",",
"response",
"[",
"\"data\"",
"]",
")",
")",
"elif",
"response",
"[",
"\"status\"",
"]",
"==",
"LOGIN_FAILED",
":",
"log",
".",
"error",
"(",
"(",
"\"job reported user was not able to log in \"",
"\"with an error='{}' with response={}\"",
")",
".",
"format",
"(",
"response",
"[",
"\"error\"",
"]",
",",
"response",
"[",
"\"data\"",
"]",
")",
")",
"if",
"not",
"job_was_started",
":",
"sys",
".",
"exit",
"(",
"1",
")",
"if",
"debug",
":",
"log",
".",
"info",
"(",
"(",
"\"parsing response data={}\"",
")",
".",
"format",
"(",
"response",
"[",
"\"data\"",
"]",
")",
")",
"else",
":",
"if",
"verbose",
":",
"log",
".",
"info",
"(",
"\"parsing data\"",
")",
"res_data",
"=",
"response",
"[",
"\"data\"",
"]",
"job_data",
"=",
"res_data",
".",
"get",
"(",
"\"job\"",
",",
"None",
")",
"result_data",
"=",
"res_data",
".",
"get",
"(",
"\"results\"",
",",
"None",
")",
"if",
"not",
"job_data",
":",
"log",
".",
"error",
"(",
"(",
"\"missing job dictionary in response data={}\"",
")",
".",
"format",
"(",
"response",
"[",
"\"data\"",
"]",
")",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"if",
"not",
"result_data",
":",
"log",
".",
"error",
"(",
"(",
"\"missing results dictionary in response data={}\"",
")",
".",
"format",
"(",
"response",
"[",
"\"data\"",
"]",
")",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"job_id",
"=",
"job_data",
".",
"get",
"(",
"\"id\"",
",",
"None",
")",
"job_status",
"=",
"job_data",
".",
"get",
"(",
"\"status\"",
",",
"None",
")",
"result_id",
"=",
"result_data",
".",
"get",
"(",
"\"id\"",
",",
"None",
")",
"result_status",
"=",
"result_data",
".",
"get",
"(",
"\"status\"",
",",
"None",
")",
"log",
".",
"info",
"(",
"(",
"\"started job.id={} job.status={} with \"",
"\"result.id={} result.status={}\"",
")",
".",
"format",
"(",
"job_id",
",",
"job_status",
",",
"result_id",
",",
"result_status",
")",
")",
"job_results",
"=",
"client",
".",
"wait_for_job_to_finish",
"(",
"job_id",
"=",
"job_id",
")",
"if",
"job_results",
"[",
"\"status\"",
"]",
"!=",
"SUCCESS",
":",
"log",
".",
"error",
"(",
"(",
"\"failed waiting for job.id={} to finish error={} data={}\"",
")",
".",
"format",
"(",
"job_id",
",",
"job_results",
"[",
"\"error\"",
"]",
",",
"job_results",
"[",
"\"data\"",
"]",
")",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"final_job",
"=",
"job_results",
"[",
"\"data\"",
"]",
"[",
"\"job\"",
"]",
"final_result",
"=",
"job_results",
"[",
"\"data\"",
"]",
"[",
"\"result\"",
"]",
"if",
"verbose",
":",
"log",
".",
"info",
"(",
"(",
"\"job={}\"",
")",
".",
"format",
"(",
"ppj",
"(",
"final_job",
")",
")",
")",
"else",
":",
"log",
".",
"info",
"(",
"(",
"\"job={}\"",
")",
".",
"format",
"(",
"str",
"(",
"final_job",
")",
"[",
"0",
":",
"10",
"]",
")",
")",
"if",
"verbose",
":",
"log",
".",
"info",
"(",
"(",
"\"result={}\"",
")",
".",
"format",
"(",
"ppj",
"(",
"final_result",
")",
")",
")",
"else",
":",
"log",
".",
"info",
"(",
"(",
"\"result={}\"",
")",
".",
"format",
"(",
"str",
"(",
"final_result",
")",
"[",
"0",
":",
"10",
"]",
")",
")",
"log",
".",
"info",
"(",
"(",
"\"job.id={} is done\"",
")",
".",
"format",
"(",
"job_id",
")",
")",
"predictions",
"=",
"final_result",
"[",
"\"predictions_json\"",
"]",
".",
"get",
"(",
"\"predictions\"",
",",
"[",
"]",
")",
"log",
".",
"info",
"(",
"(",
"\"loading predictions={} into pandas dataframe\"",
")",
".",
"format",
"(",
"len",
"(",
"predictions",
")",
")",
")",
"df",
"=",
"pd",
".",
"DataFrame",
"(",
"predictions",
")",
"if",
"verbose",
":",
"log",
".",
"info",
"(",
"(",
"\"dataframe={}\"",
")",
".",
"format",
"(",
"df",
")",
")"
] |
train_new_deep_neural_network
Train a new deep neural network and store the results as a new:
``MLJob`` and ``MLJobResult`` database records.
|
[
"train_new_deep_neural_network"
] |
850ba2a2fe21c836e071def618dcecc9caf5d59c
|
https://github.com/jay-johnson/antinex-client/blob/850ba2a2fe21c836e071def618dcecc9caf5d59c/antinex_client/scripts/ai_train_dnn.py#L22-L329
|
train
|
jay-johnson/antinex-client
|
antinex_client/scripts/ai_prepare_dataset.py
|
prepare_new_dataset
|
def prepare_new_dataset():
"""prepare_new_dataset
Prepare a new ``MLPrepare`` record and dataset files on disk.
"""
parser = argparse.ArgumentParser(
description=(
"Python client to Prepare a dataset"))
parser.add_argument(
"-u",
help="username",
required=False,
dest="user")
parser.add_argument(
"-p",
help="user password",
required=False,
dest="password")
parser.add_argument(
"-e",
help="user email",
required=False,
dest="email")
parser.add_argument(
"-a",
help="url endpoint with default http://localhost:8010",
required=False,
dest="url")
parser.add_argument(
"-f",
help="file to use default ./examples/test-keras-dnn.json",
required=False,
dest="prepare_file")
parser.add_argument(
"-b",
help=(
"optional - path to CA bundle directory for "
"client encryption over HTTP"),
required=False,
dest="ca_dir")
parser.add_argument(
"-c",
help=(
"optional - path to x509 certificate for "
"client encryption over HTTP"),
required=False,
dest="cert_file")
parser.add_argument(
"-k",
help=(
"optional - path to x509 key file for "
"client encryption over HTTP"),
required=False,
dest="key_file")
parser.add_argument(
"-s",
help="silent",
required=False,
dest="silent",
action="store_true")
parser.add_argument(
"-d",
help="debug",
required=False,
dest="debug",
action="store_true")
args = parser.parse_args()
user = ev(
"API_USER",
"user-not-set")
password = ev(
"API_PASSWORD",
"password-not-set")
email = ev(
"API_EMAIL",
"email-not-set")
url = ev(
"API_URL",
"http://localhost:8010")
prepare_file = ev(
"DATAFILE",
"prepare_file-not-set")
ca_dir = os.getenv(
"API_CA_BUNDLE_DIR",
None)
cert_file = os.getenv(
"API_CERT_FILE",
None)
key_file = os.getenv(
"API_KEY_FILE",
None)
verbose = bool(str(ev(
"API_VERBOSE",
"true")).lower() == "true")
debug = bool(str(ev(
"API_DEBUG",
"false")).lower() == "true")
if args.user:
user = args.user
if args.password:
password = args.password
if args.email:
email = args.email
if args.url:
url = args.url
if args.prepare_file:
prepare_file = args.prepare_file
if args.ca_dir:
ca_dir = args.ca_dir
if args.cert_file:
cert_file = args.cert_file
if args.key_file:
key_file = args.key_file
if args.silent:
verbose = False
if args.debug:
debug = True
usage = (
"Please run with "
"-u <username> "
"-p <password> "
"-a <AntiNex URL http://localhost:8010> "
"-f <path to prepare file> "
"-b <optional - path to CA bundle directory> "
"-c <optional - path to x509 ssl certificate file> "
"-k <optional - path to x509 ssl key file>")
valid = True
if not user or user == "user-not-set":
log.error("missing user")
valid = False
if not password or password == "password-not-set":
log.error("missing password")
valid = False
if not prepare_file or prepare_file == "prepare_file-not-set":
log.error("missing prepare_file")
valid = False
else:
if not os.path.exists(prepare_file):
log.error(("did not find prepare_file={} on disk")
.format(
prepare_file))
valid = False
if not valid:
log.error(usage)
sys.exit(1)
if verbose:
log.info(("creating client user={} url={} file={}")
.format(
user,
url,
prepare_file))
client = AIClient(
user=user,
email=email,
password=password,
url=url,
ca_dir=ca_dir,
cert_file=cert_file,
key_file=key_file,
verbose=verbose,
debug=debug)
if verbose:
log.info(("loading request in prepare_file={}")
.format(
prepare_file))
req_body = None
with open(prepare_file, "r") as f:
req_body = json.loads(f.read())
if verbose:
log.info("running prepare")
prepare_was_started = False
response = client.run_prepare(
body=req_body)
if response["status"] == SUCCESS:
log.info(("prepare started with response={}")
.format(
response["data"]))
prepare_was_started = True
elif response["status"] == FAILED:
log.error(("prepare failed with error='{}' with response={}")
.format(
response["error"],
response["data"]))
elif response["status"] == ERROR:
log.error(("prepare had an error='{}' with response={}")
.format(
response["error"],
response["data"]))
elif response["status"] == LOGIN_FAILED:
log.error(("prepare reported user was not able to log in "
"with an error='{}' with response={}")
.format(
response["error"],
response["data"]))
if not prepare_was_started:
sys.exit(1)
if debug:
log.info(("parsing response data={}")
.format(
response["data"]))
else:
if verbose:
log.info("parsing data")
prepare_data = response["data"]
if not prepare_data:
log.error(("missing prepare dictionary in response data={}")
.format(
response["data"]))
sys.exit(1)
prepare_id = prepare_data.get("id", None)
prepare_status = prepare_data.get("status", None)
log.info(("started prepare.id={} prepare.status={}")
.format(
prepare_id,
prepare_status))
prepare_results = client.wait_for_prepare_to_finish(
prepare_id=prepare_id)
if prepare_results["status"] != SUCCESS:
log.error(("failed waiting for prepare.id={} to finish "
"error={} data={}")
.format(
prepare_id,
prepare_results["error"],
prepare_results["data"]))
sys.exit(1)
final_prepare = prepare_results["data"]
log.info(("prepare={}")
.format(
ppj(final_prepare)))
log.info(("prepare.id={} is done")
.format(
prepare_id))
|
python
|
def prepare_new_dataset():
"""prepare_new_dataset
Prepare a new ``MLPrepare`` record and dataset files on disk.
"""
parser = argparse.ArgumentParser(
description=(
"Python client to Prepare a dataset"))
parser.add_argument(
"-u",
help="username",
required=False,
dest="user")
parser.add_argument(
"-p",
help="user password",
required=False,
dest="password")
parser.add_argument(
"-e",
help="user email",
required=False,
dest="email")
parser.add_argument(
"-a",
help="url endpoint with default http://localhost:8010",
required=False,
dest="url")
parser.add_argument(
"-f",
help="file to use default ./examples/test-keras-dnn.json",
required=False,
dest="prepare_file")
parser.add_argument(
"-b",
help=(
"optional - path to CA bundle directory for "
"client encryption over HTTP"),
required=False,
dest="ca_dir")
parser.add_argument(
"-c",
help=(
"optional - path to x509 certificate for "
"client encryption over HTTP"),
required=False,
dest="cert_file")
parser.add_argument(
"-k",
help=(
"optional - path to x509 key file for "
"client encryption over HTTP"),
required=False,
dest="key_file")
parser.add_argument(
"-s",
help="silent",
required=False,
dest="silent",
action="store_true")
parser.add_argument(
"-d",
help="debug",
required=False,
dest="debug",
action="store_true")
args = parser.parse_args()
user = ev(
"API_USER",
"user-not-set")
password = ev(
"API_PASSWORD",
"password-not-set")
email = ev(
"API_EMAIL",
"email-not-set")
url = ev(
"API_URL",
"http://localhost:8010")
prepare_file = ev(
"DATAFILE",
"prepare_file-not-set")
ca_dir = os.getenv(
"API_CA_BUNDLE_DIR",
None)
cert_file = os.getenv(
"API_CERT_FILE",
None)
key_file = os.getenv(
"API_KEY_FILE",
None)
verbose = bool(str(ev(
"API_VERBOSE",
"true")).lower() == "true")
debug = bool(str(ev(
"API_DEBUG",
"false")).lower() == "true")
if args.user:
user = args.user
if args.password:
password = args.password
if args.email:
email = args.email
if args.url:
url = args.url
if args.prepare_file:
prepare_file = args.prepare_file
if args.ca_dir:
ca_dir = args.ca_dir
if args.cert_file:
cert_file = args.cert_file
if args.key_file:
key_file = args.key_file
if args.silent:
verbose = False
if args.debug:
debug = True
usage = (
"Please run with "
"-u <username> "
"-p <password> "
"-a <AntiNex URL http://localhost:8010> "
"-f <path to prepare file> "
"-b <optional - path to CA bundle directory> "
"-c <optional - path to x509 ssl certificate file> "
"-k <optional - path to x509 ssl key file>")
valid = True
if not user or user == "user-not-set":
log.error("missing user")
valid = False
if not password or password == "password-not-set":
log.error("missing password")
valid = False
if not prepare_file or prepare_file == "prepare_file-not-set":
log.error("missing prepare_file")
valid = False
else:
if not os.path.exists(prepare_file):
log.error(("did not find prepare_file={} on disk")
.format(
prepare_file))
valid = False
if not valid:
log.error(usage)
sys.exit(1)
if verbose:
log.info(("creating client user={} url={} file={}")
.format(
user,
url,
prepare_file))
client = AIClient(
user=user,
email=email,
password=password,
url=url,
ca_dir=ca_dir,
cert_file=cert_file,
key_file=key_file,
verbose=verbose,
debug=debug)
if verbose:
log.info(("loading request in prepare_file={}")
.format(
prepare_file))
req_body = None
with open(prepare_file, "r") as f:
req_body = json.loads(f.read())
if verbose:
log.info("running prepare")
prepare_was_started = False
response = client.run_prepare(
body=req_body)
if response["status"] == SUCCESS:
log.info(("prepare started with response={}")
.format(
response["data"]))
prepare_was_started = True
elif response["status"] == FAILED:
log.error(("prepare failed with error='{}' with response={}")
.format(
response["error"],
response["data"]))
elif response["status"] == ERROR:
log.error(("prepare had an error='{}' with response={}")
.format(
response["error"],
response["data"]))
elif response["status"] == LOGIN_FAILED:
log.error(("prepare reported user was not able to log in "
"with an error='{}' with response={}")
.format(
response["error"],
response["data"]))
if not prepare_was_started:
sys.exit(1)
if debug:
log.info(("parsing response data={}")
.format(
response["data"]))
else:
if verbose:
log.info("parsing data")
prepare_data = response["data"]
if not prepare_data:
log.error(("missing prepare dictionary in response data={}")
.format(
response["data"]))
sys.exit(1)
prepare_id = prepare_data.get("id", None)
prepare_status = prepare_data.get("status", None)
log.info(("started prepare.id={} prepare.status={}")
.format(
prepare_id,
prepare_status))
prepare_results = client.wait_for_prepare_to_finish(
prepare_id=prepare_id)
if prepare_results["status"] != SUCCESS:
log.error(("failed waiting for prepare.id={} to finish "
"error={} data={}")
.format(
prepare_id,
prepare_results["error"],
prepare_results["data"]))
sys.exit(1)
final_prepare = prepare_results["data"]
log.info(("prepare={}")
.format(
ppj(final_prepare)))
log.info(("prepare.id={} is done")
.format(
prepare_id))
|
[
"def",
"prepare_new_dataset",
"(",
")",
":",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"description",
"=",
"(",
"\"Python client to Prepare a dataset\"",
")",
")",
"parser",
".",
"add_argument",
"(",
"\"-u\"",
",",
"help",
"=",
"\"username\"",
",",
"required",
"=",
"False",
",",
"dest",
"=",
"\"user\"",
")",
"parser",
".",
"add_argument",
"(",
"\"-p\"",
",",
"help",
"=",
"\"user password\"",
",",
"required",
"=",
"False",
",",
"dest",
"=",
"\"password\"",
")",
"parser",
".",
"add_argument",
"(",
"\"-e\"",
",",
"help",
"=",
"\"user email\"",
",",
"required",
"=",
"False",
",",
"dest",
"=",
"\"email\"",
")",
"parser",
".",
"add_argument",
"(",
"\"-a\"",
",",
"help",
"=",
"\"url endpoint with default http://localhost:8010\"",
",",
"required",
"=",
"False",
",",
"dest",
"=",
"\"url\"",
")",
"parser",
".",
"add_argument",
"(",
"\"-f\"",
",",
"help",
"=",
"\"file to use default ./examples/test-keras-dnn.json\"",
",",
"required",
"=",
"False",
",",
"dest",
"=",
"\"prepare_file\"",
")",
"parser",
".",
"add_argument",
"(",
"\"-b\"",
",",
"help",
"=",
"(",
"\"optional - path to CA bundle directory for \"",
"\"client encryption over HTTP\"",
")",
",",
"required",
"=",
"False",
",",
"dest",
"=",
"\"ca_dir\"",
")",
"parser",
".",
"add_argument",
"(",
"\"-c\"",
",",
"help",
"=",
"(",
"\"optional - path to x509 certificate for \"",
"\"client encryption over HTTP\"",
")",
",",
"required",
"=",
"False",
",",
"dest",
"=",
"\"cert_file\"",
")",
"parser",
".",
"add_argument",
"(",
"\"-k\"",
",",
"help",
"=",
"(",
"\"optional - path to x509 key file for \"",
"\"client encryption over HTTP\"",
")",
",",
"required",
"=",
"False",
",",
"dest",
"=",
"\"key_file\"",
")",
"parser",
".",
"add_argument",
"(",
"\"-s\"",
",",
"help",
"=",
"\"silent\"",
",",
"required",
"=",
"False",
",",
"dest",
"=",
"\"silent\"",
",",
"action",
"=",
"\"store_true\"",
")",
"parser",
".",
"add_argument",
"(",
"\"-d\"",
",",
"help",
"=",
"\"debug\"",
",",
"required",
"=",
"False",
",",
"dest",
"=",
"\"debug\"",
",",
"action",
"=",
"\"store_true\"",
")",
"args",
"=",
"parser",
".",
"parse_args",
"(",
")",
"user",
"=",
"ev",
"(",
"\"API_USER\"",
",",
"\"user-not-set\"",
")",
"password",
"=",
"ev",
"(",
"\"API_PASSWORD\"",
",",
"\"password-not-set\"",
")",
"email",
"=",
"ev",
"(",
"\"API_EMAIL\"",
",",
"\"email-not-set\"",
")",
"url",
"=",
"ev",
"(",
"\"API_URL\"",
",",
"\"http://localhost:8010\"",
")",
"prepare_file",
"=",
"ev",
"(",
"\"DATAFILE\"",
",",
"\"prepare_file-not-set\"",
")",
"ca_dir",
"=",
"os",
".",
"getenv",
"(",
"\"API_CA_BUNDLE_DIR\"",
",",
"None",
")",
"cert_file",
"=",
"os",
".",
"getenv",
"(",
"\"API_CERT_FILE\"",
",",
"None",
")",
"key_file",
"=",
"os",
".",
"getenv",
"(",
"\"API_KEY_FILE\"",
",",
"None",
")",
"verbose",
"=",
"bool",
"(",
"str",
"(",
"ev",
"(",
"\"API_VERBOSE\"",
",",
"\"true\"",
")",
")",
".",
"lower",
"(",
")",
"==",
"\"true\"",
")",
"debug",
"=",
"bool",
"(",
"str",
"(",
"ev",
"(",
"\"API_DEBUG\"",
",",
"\"false\"",
")",
")",
".",
"lower",
"(",
")",
"==",
"\"true\"",
")",
"if",
"args",
".",
"user",
":",
"user",
"=",
"args",
".",
"user",
"if",
"args",
".",
"password",
":",
"password",
"=",
"args",
".",
"password",
"if",
"args",
".",
"email",
":",
"email",
"=",
"args",
".",
"email",
"if",
"args",
".",
"url",
":",
"url",
"=",
"args",
".",
"url",
"if",
"args",
".",
"prepare_file",
":",
"prepare_file",
"=",
"args",
".",
"prepare_file",
"if",
"args",
".",
"ca_dir",
":",
"ca_dir",
"=",
"args",
".",
"ca_dir",
"if",
"args",
".",
"cert_file",
":",
"cert_file",
"=",
"args",
".",
"cert_file",
"if",
"args",
".",
"key_file",
":",
"key_file",
"=",
"args",
".",
"key_file",
"if",
"args",
".",
"silent",
":",
"verbose",
"=",
"False",
"if",
"args",
".",
"debug",
":",
"debug",
"=",
"True",
"usage",
"=",
"(",
"\"Please run with \"",
"\"-u <username> \"",
"\"-p <password> \"",
"\"-a <AntiNex URL http://localhost:8010> \"",
"\"-f <path to prepare file> \"",
"\"-b <optional - path to CA bundle directory> \"",
"\"-c <optional - path to x509 ssl certificate file> \"",
"\"-k <optional - path to x509 ssl key file>\"",
")",
"valid",
"=",
"True",
"if",
"not",
"user",
"or",
"user",
"==",
"\"user-not-set\"",
":",
"log",
".",
"error",
"(",
"\"missing user\"",
")",
"valid",
"=",
"False",
"if",
"not",
"password",
"or",
"password",
"==",
"\"password-not-set\"",
":",
"log",
".",
"error",
"(",
"\"missing password\"",
")",
"valid",
"=",
"False",
"if",
"not",
"prepare_file",
"or",
"prepare_file",
"==",
"\"prepare_file-not-set\"",
":",
"log",
".",
"error",
"(",
"\"missing prepare_file\"",
")",
"valid",
"=",
"False",
"else",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"prepare_file",
")",
":",
"log",
".",
"error",
"(",
"(",
"\"did not find prepare_file={} on disk\"",
")",
".",
"format",
"(",
"prepare_file",
")",
")",
"valid",
"=",
"False",
"if",
"not",
"valid",
":",
"log",
".",
"error",
"(",
"usage",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"if",
"verbose",
":",
"log",
".",
"info",
"(",
"(",
"\"creating client user={} url={} file={}\"",
")",
".",
"format",
"(",
"user",
",",
"url",
",",
"prepare_file",
")",
")",
"client",
"=",
"AIClient",
"(",
"user",
"=",
"user",
",",
"email",
"=",
"email",
",",
"password",
"=",
"password",
",",
"url",
"=",
"url",
",",
"ca_dir",
"=",
"ca_dir",
",",
"cert_file",
"=",
"cert_file",
",",
"key_file",
"=",
"key_file",
",",
"verbose",
"=",
"verbose",
",",
"debug",
"=",
"debug",
")",
"if",
"verbose",
":",
"log",
".",
"info",
"(",
"(",
"\"loading request in prepare_file={}\"",
")",
".",
"format",
"(",
"prepare_file",
")",
")",
"req_body",
"=",
"None",
"with",
"open",
"(",
"prepare_file",
",",
"\"r\"",
")",
"as",
"f",
":",
"req_body",
"=",
"json",
".",
"loads",
"(",
"f",
".",
"read",
"(",
")",
")",
"if",
"verbose",
":",
"log",
".",
"info",
"(",
"\"running prepare\"",
")",
"prepare_was_started",
"=",
"False",
"response",
"=",
"client",
".",
"run_prepare",
"(",
"body",
"=",
"req_body",
")",
"if",
"response",
"[",
"\"status\"",
"]",
"==",
"SUCCESS",
":",
"log",
".",
"info",
"(",
"(",
"\"prepare started with response={}\"",
")",
".",
"format",
"(",
"response",
"[",
"\"data\"",
"]",
")",
")",
"prepare_was_started",
"=",
"True",
"elif",
"response",
"[",
"\"status\"",
"]",
"==",
"FAILED",
":",
"log",
".",
"error",
"(",
"(",
"\"prepare failed with error='{}' with response={}\"",
")",
".",
"format",
"(",
"response",
"[",
"\"error\"",
"]",
",",
"response",
"[",
"\"data\"",
"]",
")",
")",
"elif",
"response",
"[",
"\"status\"",
"]",
"==",
"ERROR",
":",
"log",
".",
"error",
"(",
"(",
"\"prepare had an error='{}' with response={}\"",
")",
".",
"format",
"(",
"response",
"[",
"\"error\"",
"]",
",",
"response",
"[",
"\"data\"",
"]",
")",
")",
"elif",
"response",
"[",
"\"status\"",
"]",
"==",
"LOGIN_FAILED",
":",
"log",
".",
"error",
"(",
"(",
"\"prepare reported user was not able to log in \"",
"\"with an error='{}' with response={}\"",
")",
".",
"format",
"(",
"response",
"[",
"\"error\"",
"]",
",",
"response",
"[",
"\"data\"",
"]",
")",
")",
"if",
"not",
"prepare_was_started",
":",
"sys",
".",
"exit",
"(",
"1",
")",
"if",
"debug",
":",
"log",
".",
"info",
"(",
"(",
"\"parsing response data={}\"",
")",
".",
"format",
"(",
"response",
"[",
"\"data\"",
"]",
")",
")",
"else",
":",
"if",
"verbose",
":",
"log",
".",
"info",
"(",
"\"parsing data\"",
")",
"prepare_data",
"=",
"response",
"[",
"\"data\"",
"]",
"if",
"not",
"prepare_data",
":",
"log",
".",
"error",
"(",
"(",
"\"missing prepare dictionary in response data={}\"",
")",
".",
"format",
"(",
"response",
"[",
"\"data\"",
"]",
")",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"prepare_id",
"=",
"prepare_data",
".",
"get",
"(",
"\"id\"",
",",
"None",
")",
"prepare_status",
"=",
"prepare_data",
".",
"get",
"(",
"\"status\"",
",",
"None",
")",
"log",
".",
"info",
"(",
"(",
"\"started prepare.id={} prepare.status={}\"",
")",
".",
"format",
"(",
"prepare_id",
",",
"prepare_status",
")",
")",
"prepare_results",
"=",
"client",
".",
"wait_for_prepare_to_finish",
"(",
"prepare_id",
"=",
"prepare_id",
")",
"if",
"prepare_results",
"[",
"\"status\"",
"]",
"!=",
"SUCCESS",
":",
"log",
".",
"error",
"(",
"(",
"\"failed waiting for prepare.id={} to finish \"",
"\"error={} data={}\"",
")",
".",
"format",
"(",
"prepare_id",
",",
"prepare_results",
"[",
"\"error\"",
"]",
",",
"prepare_results",
"[",
"\"data\"",
"]",
")",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"final_prepare",
"=",
"prepare_results",
"[",
"\"data\"",
"]",
"log",
".",
"info",
"(",
"(",
"\"prepare={}\"",
")",
".",
"format",
"(",
"ppj",
"(",
"final_prepare",
")",
")",
")",
"log",
".",
"info",
"(",
"(",
"\"prepare.id={} is done\"",
")",
".",
"format",
"(",
"prepare_id",
")",
")"
] |
prepare_new_dataset
Prepare a new ``MLPrepare`` record and dataset files on disk.
|
[
"prepare_new_dataset"
] |
850ba2a2fe21c836e071def618dcecc9caf5d59c
|
https://github.com/jay-johnson/antinex-client/blob/850ba2a2fe21c836e071def618dcecc9caf5d59c/antinex_client/scripts/ai_prepare_dataset.py#L21-L276
|
train
|
mrahnis/drapery
|
drapery/ops/sample.py
|
drape
|
def drape(raster, feature):
"""Convert a 2D feature to a 3D feature by sampling a raster
Parameters:
raster (rasterio): raster to provide the z coordinate
feature (dict): fiona feature record to convert
Returns:
result (Point or Linestring): shapely Point or LineString of xyz coordinate triples
"""
coords = feature['geometry']['coordinates']
geom_type = feature['geometry']['type']
if geom_type == 'Point':
xyz = sample(raster, [coords])
result = Point(xyz[0])
elif geom_type == 'LineString':
xyz = sample(raster, coords)
points = [Point(x, y, z) for x, y, z in xyz]
result = LineString(points)
else:
logging.error('drape not implemented for {}'.format(geom_type))
return result
|
python
|
def drape(raster, feature):
"""Convert a 2D feature to a 3D feature by sampling a raster
Parameters:
raster (rasterio): raster to provide the z coordinate
feature (dict): fiona feature record to convert
Returns:
result (Point or Linestring): shapely Point or LineString of xyz coordinate triples
"""
coords = feature['geometry']['coordinates']
geom_type = feature['geometry']['type']
if geom_type == 'Point':
xyz = sample(raster, [coords])
result = Point(xyz[0])
elif geom_type == 'LineString':
xyz = sample(raster, coords)
points = [Point(x, y, z) for x, y, z in xyz]
result = LineString(points)
else:
logging.error('drape not implemented for {}'.format(geom_type))
return result
|
[
"def",
"drape",
"(",
"raster",
",",
"feature",
")",
":",
"coords",
"=",
"feature",
"[",
"'geometry'",
"]",
"[",
"'coordinates'",
"]",
"geom_type",
"=",
"feature",
"[",
"'geometry'",
"]",
"[",
"'type'",
"]",
"if",
"geom_type",
"==",
"'Point'",
":",
"xyz",
"=",
"sample",
"(",
"raster",
",",
"[",
"coords",
"]",
")",
"result",
"=",
"Point",
"(",
"xyz",
"[",
"0",
"]",
")",
"elif",
"geom_type",
"==",
"'LineString'",
":",
"xyz",
"=",
"sample",
"(",
"raster",
",",
"coords",
")",
"points",
"=",
"[",
"Point",
"(",
"x",
",",
"y",
",",
"z",
")",
"for",
"x",
",",
"y",
",",
"z",
"in",
"xyz",
"]",
"result",
"=",
"LineString",
"(",
"points",
")",
"else",
":",
"logging",
".",
"error",
"(",
"'drape not implemented for {}'",
".",
"format",
"(",
"geom_type",
")",
")",
"return",
"result"
] |
Convert a 2D feature to a 3D feature by sampling a raster
Parameters:
raster (rasterio): raster to provide the z coordinate
feature (dict): fiona feature record to convert
Returns:
result (Point or Linestring): shapely Point or LineString of xyz coordinate triples
|
[
"Convert",
"a",
"2D",
"feature",
"to",
"a",
"3D",
"feature",
"by",
"sampling",
"a",
"raster"
] |
c0c0906fb5ff846cf591cb9fe8a9eaee68e8820c
|
https://github.com/mrahnis/drapery/blob/c0c0906fb5ff846cf591cb9fe8a9eaee68e8820c/drapery/ops/sample.py#L6-L30
|
train
|
mrahnis/drapery
|
drapery/ops/sample.py
|
sample
|
def sample(raster, coords):
"""Sample a raster at given coordinates
Given a list of coordinates, return a list of x,y,z triples with z coordinates sampled from an input raster
Parameters:
raster (rasterio): raster dataset to sample
coords: array of tuples containing coordinate pairs (x,y) or triples (x,y,z)
Returns:
result: array of tuples containing coordinate triples (x,y,z)
"""
if len(coords[0]) == 3:
logging.info('Input is a 3D geometry, z coordinate will be updated.')
z = raster.sample([(x, y) for x, y, z in coords], indexes=raster.indexes)
else:
z = raster.sample(coords, indexes=raster.indexes)
result = [(vert[0], vert[1], vert_z) for vert, vert_z in zip(coords, z)]
return result
|
python
|
def sample(raster, coords):
"""Sample a raster at given coordinates
Given a list of coordinates, return a list of x,y,z triples with z coordinates sampled from an input raster
Parameters:
raster (rasterio): raster dataset to sample
coords: array of tuples containing coordinate pairs (x,y) or triples (x,y,z)
Returns:
result: array of tuples containing coordinate triples (x,y,z)
"""
if len(coords[0]) == 3:
logging.info('Input is a 3D geometry, z coordinate will be updated.')
z = raster.sample([(x, y) for x, y, z in coords], indexes=raster.indexes)
else:
z = raster.sample(coords, indexes=raster.indexes)
result = [(vert[0], vert[1], vert_z) for vert, vert_z in zip(coords, z)]
return result
|
[
"def",
"sample",
"(",
"raster",
",",
"coords",
")",
":",
"if",
"len",
"(",
"coords",
"[",
"0",
"]",
")",
"==",
"3",
":",
"logging",
".",
"info",
"(",
"'Input is a 3D geometry, z coordinate will be updated.'",
")",
"z",
"=",
"raster",
".",
"sample",
"(",
"[",
"(",
"x",
",",
"y",
")",
"for",
"x",
",",
"y",
",",
"z",
"in",
"coords",
"]",
",",
"indexes",
"=",
"raster",
".",
"indexes",
")",
"else",
":",
"z",
"=",
"raster",
".",
"sample",
"(",
"coords",
",",
"indexes",
"=",
"raster",
".",
"indexes",
")",
"result",
"=",
"[",
"(",
"vert",
"[",
"0",
"]",
",",
"vert",
"[",
"1",
"]",
",",
"vert_z",
")",
"for",
"vert",
",",
"vert_z",
"in",
"zip",
"(",
"coords",
",",
"z",
")",
"]",
"return",
"result"
] |
Sample a raster at given coordinates
Given a list of coordinates, return a list of x,y,z triples with z coordinates sampled from an input raster
Parameters:
raster (rasterio): raster dataset to sample
coords: array of tuples containing coordinate pairs (x,y) or triples (x,y,z)
Returns:
result: array of tuples containing coordinate triples (x,y,z)
|
[
"Sample",
"a",
"raster",
"at",
"given",
"coordinates"
] |
c0c0906fb5ff846cf591cb9fe8a9eaee68e8820c
|
https://github.com/mrahnis/drapery/blob/c0c0906fb5ff846cf591cb9fe8a9eaee68e8820c/drapery/ops/sample.py#L32-L53
|
train
|
jay-johnson/antinex-client
|
antinex_client/log/setup_logging.py
|
setup_logging
|
def setup_logging(
default_level=logging.INFO,
default_path="{}/logging.json".format(
os.getenv(
"LOG_DIR",
os.path.dirname(os.path.realpath(__file__)))),
env_key="LOG_CFG",
config_name=None):
"""setup_logging
Setup logging configuration
:param default_level: level to log
:param default_path: path to config (optional)
:param env_key: path to config in this env var
:param config_name: filename for config
"""
path = default_path
file_name = default_path.split("/")[-1]
if config_name:
file_name = config_name
path = "{}/{}".format(
"/".join(default_path.split("/")[:-1]),
file_name)
value = os.getenv(env_key, None)
if value:
path = value
if os.path.exists(path):
with open(path, "rt") as f:
config = json.load(f)
logging.config.dictConfig(config)
return
else:
cwd_path = os.getcwd() + "/antinex_client/log/{}".format(
file_name)
if os.path.exists(cwd_path):
with open(cwd_path, "rt") as f:
config = json.load(f)
logging.config.dictConfig(config)
return
rels_path = os.getcwd() + "/../log/{}".format(
file_name)
if os.path.exists(rels_path):
with open(rels_path, "rt") as f:
config = json.load(f)
logging.config.dictConfig(config)
return
else:
logging.basicConfig(level=default_level)
return
|
python
|
def setup_logging(
default_level=logging.INFO,
default_path="{}/logging.json".format(
os.getenv(
"LOG_DIR",
os.path.dirname(os.path.realpath(__file__)))),
env_key="LOG_CFG",
config_name=None):
"""setup_logging
Setup logging configuration
:param default_level: level to log
:param default_path: path to config (optional)
:param env_key: path to config in this env var
:param config_name: filename for config
"""
path = default_path
file_name = default_path.split("/")[-1]
if config_name:
file_name = config_name
path = "{}/{}".format(
"/".join(default_path.split("/")[:-1]),
file_name)
value = os.getenv(env_key, None)
if value:
path = value
if os.path.exists(path):
with open(path, "rt") as f:
config = json.load(f)
logging.config.dictConfig(config)
return
else:
cwd_path = os.getcwd() + "/antinex_client/log/{}".format(
file_name)
if os.path.exists(cwd_path):
with open(cwd_path, "rt") as f:
config = json.load(f)
logging.config.dictConfig(config)
return
rels_path = os.getcwd() + "/../log/{}".format(
file_name)
if os.path.exists(rels_path):
with open(rels_path, "rt") as f:
config = json.load(f)
logging.config.dictConfig(config)
return
else:
logging.basicConfig(level=default_level)
return
|
[
"def",
"setup_logging",
"(",
"default_level",
"=",
"logging",
".",
"INFO",
",",
"default_path",
"=",
"\"{}/logging.json\"",
".",
"format",
"(",
"os",
".",
"getenv",
"(",
"\"LOG_DIR\"",
",",
"os",
".",
"path",
".",
"dirname",
"(",
"os",
".",
"path",
".",
"realpath",
"(",
"__file__",
")",
")",
")",
")",
",",
"env_key",
"=",
"\"LOG_CFG\"",
",",
"config_name",
"=",
"None",
")",
":",
"path",
"=",
"default_path",
"file_name",
"=",
"default_path",
".",
"split",
"(",
"\"/\"",
")",
"[",
"-",
"1",
"]",
"if",
"config_name",
":",
"file_name",
"=",
"config_name",
"path",
"=",
"\"{}/{}\"",
".",
"format",
"(",
"\"/\"",
".",
"join",
"(",
"default_path",
".",
"split",
"(",
"\"/\"",
")",
"[",
":",
"-",
"1",
"]",
")",
",",
"file_name",
")",
"value",
"=",
"os",
".",
"getenv",
"(",
"env_key",
",",
"None",
")",
"if",
"value",
":",
"path",
"=",
"value",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"path",
")",
":",
"with",
"open",
"(",
"path",
",",
"\"rt\"",
")",
"as",
"f",
":",
"config",
"=",
"json",
".",
"load",
"(",
"f",
")",
"logging",
".",
"config",
".",
"dictConfig",
"(",
"config",
")",
"return",
"else",
":",
"cwd_path",
"=",
"os",
".",
"getcwd",
"(",
")",
"+",
"\"/antinex_client/log/{}\"",
".",
"format",
"(",
"file_name",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"cwd_path",
")",
":",
"with",
"open",
"(",
"cwd_path",
",",
"\"rt\"",
")",
"as",
"f",
":",
"config",
"=",
"json",
".",
"load",
"(",
"f",
")",
"logging",
".",
"config",
".",
"dictConfig",
"(",
"config",
")",
"return",
"rels_path",
"=",
"os",
".",
"getcwd",
"(",
")",
"+",
"\"/../log/{}\"",
".",
"format",
"(",
"file_name",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"rels_path",
")",
":",
"with",
"open",
"(",
"rels_path",
",",
"\"rt\"",
")",
"as",
"f",
":",
"config",
"=",
"json",
".",
"load",
"(",
"f",
")",
"logging",
".",
"config",
".",
"dictConfig",
"(",
"config",
")",
"return",
"else",
":",
"logging",
".",
"basicConfig",
"(",
"level",
"=",
"default_level",
")",
"return"
] |
setup_logging
Setup logging configuration
:param default_level: level to log
:param default_path: path to config (optional)
:param env_key: path to config in this env var
:param config_name: filename for config
|
[
"setup_logging"
] |
850ba2a2fe21c836e071def618dcecc9caf5d59c
|
https://github.com/jay-johnson/antinex-client/blob/850ba2a2fe21c836e071def618dcecc9caf5d59c/antinex_client/log/setup_logging.py#L6-L59
|
train
|
jay-johnson/antinex-client
|
antinex_client/log/setup_logging.py
|
build_logger
|
def build_logger(
name=os.getenv(
"LOG_NAME",
"client"),
config="logging.json",
log_level=logging.INFO,
log_config_path="{}/logging.json".format(
os.getenv(
"LOG_CFG",
os.path.dirname(os.path.realpath(__file__))))):
"""build_logger
:param name: name that shows in the logger
:param config: name of the config file
:param log_level: level to log
:param log_config_path: path to log config file
"""
use_config = ("./log/{}").format(
"{}".format(
config))
if not os.path.exists(use_config):
use_config = log_config_path
if not os.path.exists(use_config):
use_config = ("./antinex_client/log/{}").format(
"logging.json")
# find the log processing
setup_logging(
default_level=log_level,
default_path=use_config)
return logging.getLogger(name)
|
python
|
def build_logger(
name=os.getenv(
"LOG_NAME",
"client"),
config="logging.json",
log_level=logging.INFO,
log_config_path="{}/logging.json".format(
os.getenv(
"LOG_CFG",
os.path.dirname(os.path.realpath(__file__))))):
"""build_logger
:param name: name that shows in the logger
:param config: name of the config file
:param log_level: level to log
:param log_config_path: path to log config file
"""
use_config = ("./log/{}").format(
"{}".format(
config))
if not os.path.exists(use_config):
use_config = log_config_path
if not os.path.exists(use_config):
use_config = ("./antinex_client/log/{}").format(
"logging.json")
# find the log processing
setup_logging(
default_level=log_level,
default_path=use_config)
return logging.getLogger(name)
|
[
"def",
"build_logger",
"(",
"name",
"=",
"os",
".",
"getenv",
"(",
"\"LOG_NAME\"",
",",
"\"client\"",
")",
",",
"config",
"=",
"\"logging.json\"",
",",
"log_level",
"=",
"logging",
".",
"INFO",
",",
"log_config_path",
"=",
"\"{}/logging.json\"",
".",
"format",
"(",
"os",
".",
"getenv",
"(",
"\"LOG_CFG\"",
",",
"os",
".",
"path",
".",
"dirname",
"(",
"os",
".",
"path",
".",
"realpath",
"(",
"__file__",
")",
")",
")",
")",
")",
":",
"use_config",
"=",
"(",
"\"./log/{}\"",
")",
".",
"format",
"(",
"\"{}\"",
".",
"format",
"(",
"config",
")",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"use_config",
")",
":",
"use_config",
"=",
"log_config_path",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"use_config",
")",
":",
"use_config",
"=",
"(",
"\"./antinex_client/log/{}\"",
")",
".",
"format",
"(",
"\"logging.json\"",
")",
"# find the log processing",
"setup_logging",
"(",
"default_level",
"=",
"log_level",
",",
"default_path",
"=",
"use_config",
")",
"return",
"logging",
".",
"getLogger",
"(",
"name",
")"
] |
build_logger
:param name: name that shows in the logger
:param config: name of the config file
:param log_level: level to log
:param log_config_path: path to log config file
|
[
"build_logger"
] |
850ba2a2fe21c836e071def618dcecc9caf5d59c
|
https://github.com/jay-johnson/antinex-client/blob/850ba2a2fe21c836e071def618dcecc9caf5d59c/antinex_client/log/setup_logging.py#L63-L94
|
train
|
jay-johnson/antinex-client
|
antinex_client/log/setup_logging.py
|
build_colorized_logger
|
def build_colorized_logger(
name=os.getenv(
"LOG_NAME",
"client"),
config="colors-logging.json",
log_level=logging.INFO,
log_config_path="{}/logging.json".format(
os.getenv(
"LOG_CFG",
os.path.dirname(os.path.realpath(__file__))))):
"""build_colorized_logger
:param name: name that shows in the logger
:param config: name of the config file
:param log_level: level to log
:param log_config_path: path to log config file
"""
override_config = os.getenv(
"SHARED_LOG_CFG",
None)
debug_log_config = bool(os.getenv(
"DEBUG_SHARED_LOG_CFG",
"0") == "1")
if override_config:
if debug_log_config:
print((
"creating logger config env var: "
"SHARED_LOG_CFG={}".format(
override_config)))
if os.path.exists(override_config):
setup_logging(
default_level=log_level,
default_path=override_config)
return logging.getLogger(name)
if debug_log_config:
print((
"Failed to find log config using env var: "
"SHARED_LOG_CFG={}".format(
override_config)))
else:
if debug_log_config:
print((
"Not using shared logging env var: "
"SHARED_LOG_CFG={}".format(
override_config)))
# allow a shared log config across all components
use_config = ("{}").format(
config)
if not os.path.exists(use_config):
use_config = ("./antinex_client/log/{}").format(
config)
if not os.path.exists(use_config):
use_config = log_config_path
if not os.path.exists(use_config):
use_config = ("./log/{}").format(
config)
if not os.path.exists(use_config):
use_config = ("./antinex_client/log/{}").format(
"logging.json")
# find the last log config backup from the base of the repo
# find the log config from the defaults with the env LOG_CFG
# find the log config from the base of the repo
# find the log config by the given path
setup_logging(
default_level=log_level,
default_path=use_config)
return logging.getLogger(name)
|
python
|
def build_colorized_logger(
name=os.getenv(
"LOG_NAME",
"client"),
config="colors-logging.json",
log_level=logging.INFO,
log_config_path="{}/logging.json".format(
os.getenv(
"LOG_CFG",
os.path.dirname(os.path.realpath(__file__))))):
"""build_colorized_logger
:param name: name that shows in the logger
:param config: name of the config file
:param log_level: level to log
:param log_config_path: path to log config file
"""
override_config = os.getenv(
"SHARED_LOG_CFG",
None)
debug_log_config = bool(os.getenv(
"DEBUG_SHARED_LOG_CFG",
"0") == "1")
if override_config:
if debug_log_config:
print((
"creating logger config env var: "
"SHARED_LOG_CFG={}".format(
override_config)))
if os.path.exists(override_config):
setup_logging(
default_level=log_level,
default_path=override_config)
return logging.getLogger(name)
if debug_log_config:
print((
"Failed to find log config using env var: "
"SHARED_LOG_CFG={}".format(
override_config)))
else:
if debug_log_config:
print((
"Not using shared logging env var: "
"SHARED_LOG_CFG={}".format(
override_config)))
# allow a shared log config across all components
use_config = ("{}").format(
config)
if not os.path.exists(use_config):
use_config = ("./antinex_client/log/{}").format(
config)
if not os.path.exists(use_config):
use_config = log_config_path
if not os.path.exists(use_config):
use_config = ("./log/{}").format(
config)
if not os.path.exists(use_config):
use_config = ("./antinex_client/log/{}").format(
"logging.json")
# find the last log config backup from the base of the repo
# find the log config from the defaults with the env LOG_CFG
# find the log config from the base of the repo
# find the log config by the given path
setup_logging(
default_level=log_level,
default_path=use_config)
return logging.getLogger(name)
|
[
"def",
"build_colorized_logger",
"(",
"name",
"=",
"os",
".",
"getenv",
"(",
"\"LOG_NAME\"",
",",
"\"client\"",
")",
",",
"config",
"=",
"\"colors-logging.json\"",
",",
"log_level",
"=",
"logging",
".",
"INFO",
",",
"log_config_path",
"=",
"\"{}/logging.json\"",
".",
"format",
"(",
"os",
".",
"getenv",
"(",
"\"LOG_CFG\"",
",",
"os",
".",
"path",
".",
"dirname",
"(",
"os",
".",
"path",
".",
"realpath",
"(",
"__file__",
")",
")",
")",
")",
")",
":",
"override_config",
"=",
"os",
".",
"getenv",
"(",
"\"SHARED_LOG_CFG\"",
",",
"None",
")",
"debug_log_config",
"=",
"bool",
"(",
"os",
".",
"getenv",
"(",
"\"DEBUG_SHARED_LOG_CFG\"",
",",
"\"0\"",
")",
"==",
"\"1\"",
")",
"if",
"override_config",
":",
"if",
"debug_log_config",
":",
"print",
"(",
"(",
"\"creating logger config env var: \"",
"\"SHARED_LOG_CFG={}\"",
".",
"format",
"(",
"override_config",
")",
")",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"override_config",
")",
":",
"setup_logging",
"(",
"default_level",
"=",
"log_level",
",",
"default_path",
"=",
"override_config",
")",
"return",
"logging",
".",
"getLogger",
"(",
"name",
")",
"if",
"debug_log_config",
":",
"print",
"(",
"(",
"\"Failed to find log config using env var: \"",
"\"SHARED_LOG_CFG={}\"",
".",
"format",
"(",
"override_config",
")",
")",
")",
"else",
":",
"if",
"debug_log_config",
":",
"print",
"(",
"(",
"\"Not using shared logging env var: \"",
"\"SHARED_LOG_CFG={}\"",
".",
"format",
"(",
"override_config",
")",
")",
")",
"# allow a shared log config across all components",
"use_config",
"=",
"(",
"\"{}\"",
")",
".",
"format",
"(",
"config",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"use_config",
")",
":",
"use_config",
"=",
"(",
"\"./antinex_client/log/{}\"",
")",
".",
"format",
"(",
"config",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"use_config",
")",
":",
"use_config",
"=",
"log_config_path",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"use_config",
")",
":",
"use_config",
"=",
"(",
"\"./log/{}\"",
")",
".",
"format",
"(",
"config",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"use_config",
")",
":",
"use_config",
"=",
"(",
"\"./antinex_client/log/{}\"",
")",
".",
"format",
"(",
"\"logging.json\"",
")",
"# find the last log config backup from the base of the repo",
"# find the log config from the defaults with the env LOG_CFG",
"# find the log config from the base of the repo",
"# find the log config by the given path",
"setup_logging",
"(",
"default_level",
"=",
"log_level",
",",
"default_path",
"=",
"use_config",
")",
"return",
"logging",
".",
"getLogger",
"(",
"name",
")"
] |
build_colorized_logger
:param name: name that shows in the logger
:param config: name of the config file
:param log_level: level to log
:param log_config_path: path to log config file
|
[
"build_colorized_logger"
] |
850ba2a2fe21c836e071def618dcecc9caf5d59c
|
https://github.com/jay-johnson/antinex-client/blob/850ba2a2fe21c836e071def618dcecc9caf5d59c/antinex_client/log/setup_logging.py#L98-L169
|
train
|
feliphebueno/Rinzler
|
rinzler/auth/base_auth_service.py
|
BaseAuthService.authenticate
|
def authenticate(self, request: HttpRequest, auth_route: str, actual_params: dict) -> bool:
"""
Your AuhtService should override this method for request authentication, otherwise means no authentication.
:param request: HttpRequest Django's HttpRequest object
:param auth_route: str User's resqueted route
:param actual_params: User's url parameters
:return: bool
"""
if auth_route and actual_params:
self.auth_data = {}
return True
|
python
|
def authenticate(self, request: HttpRequest, auth_route: str, actual_params: dict) -> bool:
"""
Your AuhtService should override this method for request authentication, otherwise means no authentication.
:param request: HttpRequest Django's HttpRequest object
:param auth_route: str User's resqueted route
:param actual_params: User's url parameters
:return: bool
"""
if auth_route and actual_params:
self.auth_data = {}
return True
|
[
"def",
"authenticate",
"(",
"self",
",",
"request",
":",
"HttpRequest",
",",
"auth_route",
":",
"str",
",",
"actual_params",
":",
"dict",
")",
"->",
"bool",
":",
"if",
"auth_route",
"and",
"actual_params",
":",
"self",
".",
"auth_data",
"=",
"{",
"}",
"return",
"True"
] |
Your AuhtService should override this method for request authentication, otherwise means no authentication.
:param request: HttpRequest Django's HttpRequest object
:param auth_route: str User's resqueted route
:param actual_params: User's url parameters
:return: bool
|
[
"Your",
"AuhtService",
"should",
"override",
"this",
"method",
"for",
"request",
"authentication",
"otherwise",
"means",
"no",
"authentication",
".",
":",
"param",
"request",
":",
"HttpRequest",
"Django",
"s",
"HttpRequest",
"object",
":",
"param",
"auth_route",
":",
"str",
"User",
"s",
"resqueted",
"route",
":",
"param",
"actual_params",
":",
"User",
"s",
"url",
"parameters",
":",
"return",
":",
"bool"
] |
7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8
|
https://github.com/feliphebueno/Rinzler/blob/7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8/rinzler/auth/base_auth_service.py#L13-L23
|
train
|
mrahnis/drapery
|
drapery/cli/drape.py
|
cli
|
def cli(source_f, raster_f, output, verbose):
"""
Converts 2D geometries to 3D using GEOS sample through fiona.
\b
Example:
drape point.shp elevation.tif -o point_z.shp
"""
with fiona.open(source_f, 'r') as source:
source_driver = source.driver
source_crs = source.crs
sink_schema = source.schema.copy()
source_geom = source.schema['geometry']
if source_geom == 'Point':
sink_schema['geometry'] = '3D Point'
elif source_geom == 'LineString':
sink_schema['geometry'] = '3D LineString'
elif source_geom == '3D Point' or source_geom == '3D LineString':
pass
else:
click.BadParameter("Source geometry type {} not implemented".format(source_geom))
with rasterio.open(raster_f) as raster:
if source_crs != raster.crs:
click.BadParameter("Features and raster have different CRS.")
if raster.count > 1:
warnings.warn("Found {0} bands in {1}, expected a single band raster".format(raster.bands, raster_f))
supported = ['int16', 'int32', 'float32', 'float64']
if raster.dtypes[0] not in supported:
warnings.warn("Found {0} type in {1}, expected one of {2}".format(raster.dtypes[0]), raster_f, supported)
with fiona.open(
output, 'w',
driver=source_driver,
crs=source_crs,
schema=sink_schema) as sink:
for feature in source:
try:
feature_z = drapery.drape(raster, feature)
sink.write({
'geometry': mapping(feature_z),
'properties': feature['properties'],
})
except Exception:
logging.exception("Error processing feature %s:", feature['id'])
|
python
|
def cli(source_f, raster_f, output, verbose):
"""
Converts 2D geometries to 3D using GEOS sample through fiona.
\b
Example:
drape point.shp elevation.tif -o point_z.shp
"""
with fiona.open(source_f, 'r') as source:
source_driver = source.driver
source_crs = source.crs
sink_schema = source.schema.copy()
source_geom = source.schema['geometry']
if source_geom == 'Point':
sink_schema['geometry'] = '3D Point'
elif source_geom == 'LineString':
sink_schema['geometry'] = '3D LineString'
elif source_geom == '3D Point' or source_geom == '3D LineString':
pass
else:
click.BadParameter("Source geometry type {} not implemented".format(source_geom))
with rasterio.open(raster_f) as raster:
if source_crs != raster.crs:
click.BadParameter("Features and raster have different CRS.")
if raster.count > 1:
warnings.warn("Found {0} bands in {1}, expected a single band raster".format(raster.bands, raster_f))
supported = ['int16', 'int32', 'float32', 'float64']
if raster.dtypes[0] not in supported:
warnings.warn("Found {0} type in {1}, expected one of {2}".format(raster.dtypes[0]), raster_f, supported)
with fiona.open(
output, 'w',
driver=source_driver,
crs=source_crs,
schema=sink_schema) as sink:
for feature in source:
try:
feature_z = drapery.drape(raster, feature)
sink.write({
'geometry': mapping(feature_z),
'properties': feature['properties'],
})
except Exception:
logging.exception("Error processing feature %s:", feature['id'])
|
[
"def",
"cli",
"(",
"source_f",
",",
"raster_f",
",",
"output",
",",
"verbose",
")",
":",
"with",
"fiona",
".",
"open",
"(",
"source_f",
",",
"'r'",
")",
"as",
"source",
":",
"source_driver",
"=",
"source",
".",
"driver",
"source_crs",
"=",
"source",
".",
"crs",
"sink_schema",
"=",
"source",
".",
"schema",
".",
"copy",
"(",
")",
"source_geom",
"=",
"source",
".",
"schema",
"[",
"'geometry'",
"]",
"if",
"source_geom",
"==",
"'Point'",
":",
"sink_schema",
"[",
"'geometry'",
"]",
"=",
"'3D Point'",
"elif",
"source_geom",
"==",
"'LineString'",
":",
"sink_schema",
"[",
"'geometry'",
"]",
"=",
"'3D LineString'",
"elif",
"source_geom",
"==",
"'3D Point'",
"or",
"source_geom",
"==",
"'3D LineString'",
":",
"pass",
"else",
":",
"click",
".",
"BadParameter",
"(",
"\"Source geometry type {} not implemented\"",
".",
"format",
"(",
"source_geom",
")",
")",
"with",
"rasterio",
".",
"open",
"(",
"raster_f",
")",
"as",
"raster",
":",
"if",
"source_crs",
"!=",
"raster",
".",
"crs",
":",
"click",
".",
"BadParameter",
"(",
"\"Features and raster have different CRS.\"",
")",
"if",
"raster",
".",
"count",
">",
"1",
":",
"warnings",
".",
"warn",
"(",
"\"Found {0} bands in {1}, expected a single band raster\"",
".",
"format",
"(",
"raster",
".",
"bands",
",",
"raster_f",
")",
")",
"supported",
"=",
"[",
"'int16'",
",",
"'int32'",
",",
"'float32'",
",",
"'float64'",
"]",
"if",
"raster",
".",
"dtypes",
"[",
"0",
"]",
"not",
"in",
"supported",
":",
"warnings",
".",
"warn",
"(",
"\"Found {0} type in {1}, expected one of {2}\"",
".",
"format",
"(",
"raster",
".",
"dtypes",
"[",
"0",
"]",
")",
",",
"raster_f",
",",
"supported",
")",
"with",
"fiona",
".",
"open",
"(",
"output",
",",
"'w'",
",",
"driver",
"=",
"source_driver",
",",
"crs",
"=",
"source_crs",
",",
"schema",
"=",
"sink_schema",
")",
"as",
"sink",
":",
"for",
"feature",
"in",
"source",
":",
"try",
":",
"feature_z",
"=",
"drapery",
".",
"drape",
"(",
"raster",
",",
"feature",
")",
"sink",
".",
"write",
"(",
"{",
"'geometry'",
":",
"mapping",
"(",
"feature_z",
")",
",",
"'properties'",
":",
"feature",
"[",
"'properties'",
"]",
",",
"}",
")",
"except",
"Exception",
":",
"logging",
".",
"exception",
"(",
"\"Error processing feature %s:\"",
",",
"feature",
"[",
"'id'",
"]",
")"
] |
Converts 2D geometries to 3D using GEOS sample through fiona.
\b
Example:
drape point.shp elevation.tif -o point_z.shp
|
[
"Converts",
"2D",
"geometries",
"to",
"3D",
"using",
"GEOS",
"sample",
"through",
"fiona",
"."
] |
c0c0906fb5ff846cf591cb9fe8a9eaee68e8820c
|
https://github.com/mrahnis/drapery/blob/c0c0906fb5ff846cf591cb9fe8a9eaee68e8820c/drapery/cli/drape.py#L21-L67
|
train
|
praekeltfoundation/molo.commenting
|
molo/commenting/managers.py
|
MoloCommentManager.for_model
|
def for_model(self, model):
"""
QuerySet for all comments for a particular model (either an instance or
a class).
"""
ct = ContentType.objects.get_for_model(model)
qs = self.get_queryset().filter(content_type=ct)
if isinstance(model, models.Model):
qs = qs.filter(object_pk=force_text(model._get_pk_val()))
return qs
|
python
|
def for_model(self, model):
"""
QuerySet for all comments for a particular model (either an instance or
a class).
"""
ct = ContentType.objects.get_for_model(model)
qs = self.get_queryset().filter(content_type=ct)
if isinstance(model, models.Model):
qs = qs.filter(object_pk=force_text(model._get_pk_val()))
return qs
|
[
"def",
"for_model",
"(",
"self",
",",
"model",
")",
":",
"ct",
"=",
"ContentType",
".",
"objects",
".",
"get_for_model",
"(",
"model",
")",
"qs",
"=",
"self",
".",
"get_queryset",
"(",
")",
".",
"filter",
"(",
"content_type",
"=",
"ct",
")",
"if",
"isinstance",
"(",
"model",
",",
"models",
".",
"Model",
")",
":",
"qs",
"=",
"qs",
".",
"filter",
"(",
"object_pk",
"=",
"force_text",
"(",
"model",
".",
"_get_pk_val",
"(",
")",
")",
")",
"return",
"qs"
] |
QuerySet for all comments for a particular model (either an instance or
a class).
|
[
"QuerySet",
"for",
"all",
"comments",
"for",
"a",
"particular",
"model",
"(",
"either",
"an",
"instance",
"or",
"a",
"class",
")",
"."
] |
94549bd75e4a5c5b3db43149e32d636330b3969c
|
https://github.com/praekeltfoundation/molo.commenting/blob/94549bd75e4a5c5b3db43149e32d636330b3969c/molo/commenting/managers.py#L9-L18
|
train
|
oksome/Skink
|
skink/remote.py
|
RemotePage.eval
|
def eval(self, command):
'Blocking call, returns the value of the execution in JS'
event = threading.Event()
# TODO: Add event to server
#job_id = str(id(command))
import random
job_id = str(random.random())
server.EVALUATIONS[job_id] = event
message = '?' + job_id + '=' + command
logging.info(('message:', [message]))
for listener in server.LISTENERS.get(self.path, []):
logging.debug(('listener:', listener))
listener.write_message(message)
success = event.wait(timeout=30)
if success:
value_parser = server.RESULTS[job_id]
del server.EVALUATIONS[job_id]
del server.RESULTS[job_id]
return value_parser()
else:
del server.EVALUATIONS[job_id]
if job_id in server.RESULTS:
del server.RESULTS[job_id]
raise IOError('Evaluation failed.')
|
python
|
def eval(self, command):
'Blocking call, returns the value of the execution in JS'
event = threading.Event()
# TODO: Add event to server
#job_id = str(id(command))
import random
job_id = str(random.random())
server.EVALUATIONS[job_id] = event
message = '?' + job_id + '=' + command
logging.info(('message:', [message]))
for listener in server.LISTENERS.get(self.path, []):
logging.debug(('listener:', listener))
listener.write_message(message)
success = event.wait(timeout=30)
if success:
value_parser = server.RESULTS[job_id]
del server.EVALUATIONS[job_id]
del server.RESULTS[job_id]
return value_parser()
else:
del server.EVALUATIONS[job_id]
if job_id in server.RESULTS:
del server.RESULTS[job_id]
raise IOError('Evaluation failed.')
|
[
"def",
"eval",
"(",
"self",
",",
"command",
")",
":",
"event",
"=",
"threading",
".",
"Event",
"(",
")",
"# TODO: Add event to server",
"#job_id = str(id(command))",
"import",
"random",
"job_id",
"=",
"str",
"(",
"random",
".",
"random",
"(",
")",
")",
"server",
".",
"EVALUATIONS",
"[",
"job_id",
"]",
"=",
"event",
"message",
"=",
"'?'",
"+",
"job_id",
"+",
"'='",
"+",
"command",
"logging",
".",
"info",
"(",
"(",
"'message:'",
",",
"[",
"message",
"]",
")",
")",
"for",
"listener",
"in",
"server",
".",
"LISTENERS",
".",
"get",
"(",
"self",
".",
"path",
",",
"[",
"]",
")",
":",
"logging",
".",
"debug",
"(",
"(",
"'listener:'",
",",
"listener",
")",
")",
"listener",
".",
"write_message",
"(",
"message",
")",
"success",
"=",
"event",
".",
"wait",
"(",
"timeout",
"=",
"30",
")",
"if",
"success",
":",
"value_parser",
"=",
"server",
".",
"RESULTS",
"[",
"job_id",
"]",
"del",
"server",
".",
"EVALUATIONS",
"[",
"job_id",
"]",
"del",
"server",
".",
"RESULTS",
"[",
"job_id",
"]",
"return",
"value_parser",
"(",
")",
"else",
":",
"del",
"server",
".",
"EVALUATIONS",
"[",
"job_id",
"]",
"if",
"job_id",
"in",
"server",
".",
"RESULTS",
":",
"del",
"server",
".",
"RESULTS",
"[",
"job_id",
"]",
"raise",
"IOError",
"(",
"'Evaluation failed.'",
")"
] |
Blocking call, returns the value of the execution in JS
|
[
"Blocking",
"call",
"returns",
"the",
"value",
"of",
"the",
"execution",
"in",
"JS"
] |
77560fb92749c1a21924cbd0017b8481a727be3e
|
https://github.com/oksome/Skink/blob/77560fb92749c1a21924cbd0017b8481a727be3e/skink/remote.py#L132-L158
|
train
|
oksome/Skink
|
skink/remote.py
|
RemotePage.register
|
def register(self, callback, name):
'Register a callback on server and on connected clients.'
server.CALLBACKS[name] = callback
self.run('''
window.skink.%s = function(args=[]) {
window.skink.call("%s", args);
}''' % (name, name))
|
python
|
def register(self, callback, name):
'Register a callback on server and on connected clients.'
server.CALLBACKS[name] = callback
self.run('''
window.skink.%s = function(args=[]) {
window.skink.call("%s", args);
}''' % (name, name))
|
[
"def",
"register",
"(",
"self",
",",
"callback",
",",
"name",
")",
":",
"server",
".",
"CALLBACKS",
"[",
"name",
"]",
"=",
"callback",
"self",
".",
"run",
"(",
"'''\n window.skink.%s = function(args=[]) {\n window.skink.call(\"%s\", args);\n }'''",
"%",
"(",
"name",
",",
"name",
")",
")"
] |
Register a callback on server and on connected clients.
|
[
"Register",
"a",
"callback",
"on",
"server",
"and",
"on",
"connected",
"clients",
"."
] |
77560fb92749c1a21924cbd0017b8481a727be3e
|
https://github.com/oksome/Skink/blob/77560fb92749c1a21924cbd0017b8481a727be3e/skink/remote.py#L160-L166
|
train
|
oksome/Skink
|
skink/server.py
|
launch_exception
|
def launch_exception(message):
"""
Launch a Python exception from an error that took place in the browser.
messsage format:
- name: str
- description: str
"""
error_name = message['name']
error_descr = message['description']
mapping = {
'ReferenceError': NameError,
}
if message['name'] in mapping:
raise mapping[error_name](error_descr)
else:
raise Exception('{}: {}'.format(error_name, error_descr))
|
python
|
def launch_exception(message):
"""
Launch a Python exception from an error that took place in the browser.
messsage format:
- name: str
- description: str
"""
error_name = message['name']
error_descr = message['description']
mapping = {
'ReferenceError': NameError,
}
if message['name'] in mapping:
raise mapping[error_name](error_descr)
else:
raise Exception('{}: {}'.format(error_name, error_descr))
|
[
"def",
"launch_exception",
"(",
"message",
")",
":",
"error_name",
"=",
"message",
"[",
"'name'",
"]",
"error_descr",
"=",
"message",
"[",
"'description'",
"]",
"mapping",
"=",
"{",
"'ReferenceError'",
":",
"NameError",
",",
"}",
"if",
"message",
"[",
"'name'",
"]",
"in",
"mapping",
":",
"raise",
"mapping",
"[",
"error_name",
"]",
"(",
"error_descr",
")",
"else",
":",
"raise",
"Exception",
"(",
"'{}: {}'",
".",
"format",
"(",
"error_name",
",",
"error_descr",
")",
")"
] |
Launch a Python exception from an error that took place in the browser.
messsage format:
- name: str
- description: str
|
[
"Launch",
"a",
"Python",
"exception",
"from",
"an",
"error",
"that",
"took",
"place",
"in",
"the",
"browser",
"."
] |
77560fb92749c1a21924cbd0017b8481a727be3e
|
https://github.com/oksome/Skink/blob/77560fb92749c1a21924cbd0017b8481a727be3e/skink/server.py#L42-L58
|
train
|
jay-johnson/antinex-client
|
antinex_client/scripts/ai_env_predict.py
|
start_predictions
|
def start_predictions():
"""start_predictions
Using environment variables, create an AntiNex AI Client.
You can also use command line args if you want.
This can train a new deep neural network if it does not
exist or it can use an existing pre-trained deep neural
network within the AntiNex Core to make new predictions.
"""
parser = argparse.ArgumentParser(
description=(
"Python client to make Predictions "
"using a Pre-trained Deep Neural Network "
"with AntiNex Django Rest Framework"))
parser.add_argument(
"-f",
help=(
"file to use default ./examples/"
"predict-rows-scaler-full-django.json"),
required=False,
dest="datafile")
parser.add_argument(
"-m",
help="send mock data",
required=False,
dest="use_fake_rows",
action="store_true")
parser.add_argument(
"-b",
help=(
"optional - path to CA bundle directory for "
"client encryption over HTTP"),
required=False,
dest="ca_dir")
parser.add_argument(
"-c",
help=(
"optional - path to x509 certificate for "
"client encryption over HTTP"),
required=False,
dest="cert_file")
parser.add_argument(
"-k",
help=(
"optional - path to x509 key file for "
"client encryption over HTTP"),
required=False,
dest="key_file")
parser.add_argument(
"-s",
help="silent",
required=False,
dest="silent",
action="store_true")
parser.add_argument(
"-d",
help="debug",
required=False,
dest="debug",
action="store_true")
args = parser.parse_args()
datafile = ev(
"DATAFILE",
"./examples/predict-rows-scaler-full-django.json")
ca_dir = os.getenv(
"API_CA_BUNDLE_DIR",
None)
cert_file = os.getenv(
"API_CERT_FILE",
None)
key_file = os.getenv(
"API_KEY_FILE",
None)
verbose = bool(str(ev(
"API_CLIENT_VERBOSE",
"1")).lower() == "1")
debug = bool(str(ev(
"API_CLIENT_DEBUG",
"0")).lower() == "1")
use_fake_rows = False
if args.use_fake_rows:
use_fake_rows = True
if args.datafile:
datafile = args.datafile
if args.ca_dir:
ca_dir = args.ca_dir
if args.cert_file:
cert_file = args.cert_file
if args.key_file:
key_file = args.key_file
if args.silent:
verbose = False
if args.debug:
debug = True
if verbose:
log.info("creating client")
client = build_ai_client_from_env(
ca_dir=ca_dir,
cert_file=cert_file,
key_file=key_file,
verbose=verbose,
debug=debug)
if verbose:
log.info(("loading request in datafile={}")
.format(
datafile))
# pass in full or partial prediction record dictionaries
# the generate_ai_request will fill in gaps with defaults
fake_rows_for_predicting = [
{
"tcp_seq": 1
},
{
"tcp_seq": 2
},
{
"tcp_seq": 3
},
{
"tcp_seq": 4
}
]
res_gen = None
if use_fake_rows:
res_gen = generate_ai_request(
predict_rows=fake_rows_for_predicting)
else:
req_with_org_rows = None
with open(datafile, "r") as f:
req_with_org_rows = json.loads(f.read())
res_gen = generate_ai_request(
predict_rows=req_with_org_rows["predict_rows"])
# end of sending mock data from this file or a file on disk
if res_gen["status"] != SUCCESS:
log.error(("failed generate_ai_request with error={}")
.format(
res_gen["error"]))
sys.exit(1)
req_body = res_gen["data"]
if verbose:
log.info("running job")
job_was_started = False
response = client.run_job(
body=req_body)
if response["status"] == SUCCESS:
log.info(("job started with response={}")
.format(
response["data"]))
job_was_started = True
elif response["status"] == FAILED:
log.error(("job failed with error='{}' with response={}")
.format(
response["error"],
response["data"]))
elif response["status"] == ERROR:
log.error(("job had an error='{}' with response={}")
.format(
response["error"],
response["data"]))
elif response["status"] == LOGIN_FAILED:
log.error(("job reported user was not able to log in "
"with an error='{}' with response={}")
.format(
response["error"],
response["data"]))
if not job_was_started:
sys.exit(1)
if debug:
log.info(("parsing response data={}")
.format(
response["data"]))
else:
if verbose:
log.info("parsing data")
res_data = response["data"]
job_data = res_data.get(
"job",
None)
result_data = res_data.get(
"results",
None)
if not job_data:
log.error(("missing job dictionary in response data={}")
.format(
response["data"]))
sys.exit(1)
if not result_data:
log.error(("missing results dictionary in response data={}")
.format(
response["data"]))
sys.exit(1)
job_id = job_data.get("id", None)
job_status = job_data.get("status", None)
result_id = result_data.get("id", None)
result_status = result_data.get("status", None)
log.info(("started job.id={} job.status={} with "
"result.id={} result.status={}")
.format(
job_id,
job_status,
result_id,
result_status))
job_results = client.wait_for_job_to_finish(
job_id=job_id)
if job_results["status"] != SUCCESS:
log.error(("failed waiting for job.id={} to finish error={} data={}")
.format(
job_id,
job_results["error"],
job_results["data"]))
sys.exit(1)
final_job = job_results["data"]["job"]
final_result = job_results["data"]["result"]
log.info(("job={}")
.format(
ppj(final_job)))
log.info(("result={}")
.format(
ppj(final_result)))
log.info(("job.id={} is done")
.format(
job_id))
predictions = final_result["predictions_json"].get(
"predictions",
[])
log.info(("loading predictions={} into pandas dataframe")
.format(
len(predictions)))
df = pd.DataFrame(predictions)
log.info(("dataframe={}")
.format(
df))
|
python
|
def start_predictions():
"""start_predictions
Using environment variables, create an AntiNex AI Client.
You can also use command line args if you want.
This can train a new deep neural network if it does not
exist or it can use an existing pre-trained deep neural
network within the AntiNex Core to make new predictions.
"""
parser = argparse.ArgumentParser(
description=(
"Python client to make Predictions "
"using a Pre-trained Deep Neural Network "
"with AntiNex Django Rest Framework"))
parser.add_argument(
"-f",
help=(
"file to use default ./examples/"
"predict-rows-scaler-full-django.json"),
required=False,
dest="datafile")
parser.add_argument(
"-m",
help="send mock data",
required=False,
dest="use_fake_rows",
action="store_true")
parser.add_argument(
"-b",
help=(
"optional - path to CA bundle directory for "
"client encryption over HTTP"),
required=False,
dest="ca_dir")
parser.add_argument(
"-c",
help=(
"optional - path to x509 certificate for "
"client encryption over HTTP"),
required=False,
dest="cert_file")
parser.add_argument(
"-k",
help=(
"optional - path to x509 key file for "
"client encryption over HTTP"),
required=False,
dest="key_file")
parser.add_argument(
"-s",
help="silent",
required=False,
dest="silent",
action="store_true")
parser.add_argument(
"-d",
help="debug",
required=False,
dest="debug",
action="store_true")
args = parser.parse_args()
datafile = ev(
"DATAFILE",
"./examples/predict-rows-scaler-full-django.json")
ca_dir = os.getenv(
"API_CA_BUNDLE_DIR",
None)
cert_file = os.getenv(
"API_CERT_FILE",
None)
key_file = os.getenv(
"API_KEY_FILE",
None)
verbose = bool(str(ev(
"API_CLIENT_VERBOSE",
"1")).lower() == "1")
debug = bool(str(ev(
"API_CLIENT_DEBUG",
"0")).lower() == "1")
use_fake_rows = False
if args.use_fake_rows:
use_fake_rows = True
if args.datafile:
datafile = args.datafile
if args.ca_dir:
ca_dir = args.ca_dir
if args.cert_file:
cert_file = args.cert_file
if args.key_file:
key_file = args.key_file
if args.silent:
verbose = False
if args.debug:
debug = True
if verbose:
log.info("creating client")
client = build_ai_client_from_env(
ca_dir=ca_dir,
cert_file=cert_file,
key_file=key_file,
verbose=verbose,
debug=debug)
if verbose:
log.info(("loading request in datafile={}")
.format(
datafile))
# pass in full or partial prediction record dictionaries
# the generate_ai_request will fill in gaps with defaults
fake_rows_for_predicting = [
{
"tcp_seq": 1
},
{
"tcp_seq": 2
},
{
"tcp_seq": 3
},
{
"tcp_seq": 4
}
]
res_gen = None
if use_fake_rows:
res_gen = generate_ai_request(
predict_rows=fake_rows_for_predicting)
else:
req_with_org_rows = None
with open(datafile, "r") as f:
req_with_org_rows = json.loads(f.read())
res_gen = generate_ai_request(
predict_rows=req_with_org_rows["predict_rows"])
# end of sending mock data from this file or a file on disk
if res_gen["status"] != SUCCESS:
log.error(("failed generate_ai_request with error={}")
.format(
res_gen["error"]))
sys.exit(1)
req_body = res_gen["data"]
if verbose:
log.info("running job")
job_was_started = False
response = client.run_job(
body=req_body)
if response["status"] == SUCCESS:
log.info(("job started with response={}")
.format(
response["data"]))
job_was_started = True
elif response["status"] == FAILED:
log.error(("job failed with error='{}' with response={}")
.format(
response["error"],
response["data"]))
elif response["status"] == ERROR:
log.error(("job had an error='{}' with response={}")
.format(
response["error"],
response["data"]))
elif response["status"] == LOGIN_FAILED:
log.error(("job reported user was not able to log in "
"with an error='{}' with response={}")
.format(
response["error"],
response["data"]))
if not job_was_started:
sys.exit(1)
if debug:
log.info(("parsing response data={}")
.format(
response["data"]))
else:
if verbose:
log.info("parsing data")
res_data = response["data"]
job_data = res_data.get(
"job",
None)
result_data = res_data.get(
"results",
None)
if not job_data:
log.error(("missing job dictionary in response data={}")
.format(
response["data"]))
sys.exit(1)
if not result_data:
log.error(("missing results dictionary in response data={}")
.format(
response["data"]))
sys.exit(1)
job_id = job_data.get("id", None)
job_status = job_data.get("status", None)
result_id = result_data.get("id", None)
result_status = result_data.get("status", None)
log.info(("started job.id={} job.status={} with "
"result.id={} result.status={}")
.format(
job_id,
job_status,
result_id,
result_status))
job_results = client.wait_for_job_to_finish(
job_id=job_id)
if job_results["status"] != SUCCESS:
log.error(("failed waiting for job.id={} to finish error={} data={}")
.format(
job_id,
job_results["error"],
job_results["data"]))
sys.exit(1)
final_job = job_results["data"]["job"]
final_result = job_results["data"]["result"]
log.info(("job={}")
.format(
ppj(final_job)))
log.info(("result={}")
.format(
ppj(final_result)))
log.info(("job.id={} is done")
.format(
job_id))
predictions = final_result["predictions_json"].get(
"predictions",
[])
log.info(("loading predictions={} into pandas dataframe")
.format(
len(predictions)))
df = pd.DataFrame(predictions)
log.info(("dataframe={}")
.format(
df))
|
[
"def",
"start_predictions",
"(",
")",
":",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"description",
"=",
"(",
"\"Python client to make Predictions \"",
"\"using a Pre-trained Deep Neural Network \"",
"\"with AntiNex Django Rest Framework\"",
")",
")",
"parser",
".",
"add_argument",
"(",
"\"-f\"",
",",
"help",
"=",
"(",
"\"file to use default ./examples/\"",
"\"predict-rows-scaler-full-django.json\"",
")",
",",
"required",
"=",
"False",
",",
"dest",
"=",
"\"datafile\"",
")",
"parser",
".",
"add_argument",
"(",
"\"-m\"",
",",
"help",
"=",
"\"send mock data\"",
",",
"required",
"=",
"False",
",",
"dest",
"=",
"\"use_fake_rows\"",
",",
"action",
"=",
"\"store_true\"",
")",
"parser",
".",
"add_argument",
"(",
"\"-b\"",
",",
"help",
"=",
"(",
"\"optional - path to CA bundle directory for \"",
"\"client encryption over HTTP\"",
")",
",",
"required",
"=",
"False",
",",
"dest",
"=",
"\"ca_dir\"",
")",
"parser",
".",
"add_argument",
"(",
"\"-c\"",
",",
"help",
"=",
"(",
"\"optional - path to x509 certificate for \"",
"\"client encryption over HTTP\"",
")",
",",
"required",
"=",
"False",
",",
"dest",
"=",
"\"cert_file\"",
")",
"parser",
".",
"add_argument",
"(",
"\"-k\"",
",",
"help",
"=",
"(",
"\"optional - path to x509 key file for \"",
"\"client encryption over HTTP\"",
")",
",",
"required",
"=",
"False",
",",
"dest",
"=",
"\"key_file\"",
")",
"parser",
".",
"add_argument",
"(",
"\"-s\"",
",",
"help",
"=",
"\"silent\"",
",",
"required",
"=",
"False",
",",
"dest",
"=",
"\"silent\"",
",",
"action",
"=",
"\"store_true\"",
")",
"parser",
".",
"add_argument",
"(",
"\"-d\"",
",",
"help",
"=",
"\"debug\"",
",",
"required",
"=",
"False",
",",
"dest",
"=",
"\"debug\"",
",",
"action",
"=",
"\"store_true\"",
")",
"args",
"=",
"parser",
".",
"parse_args",
"(",
")",
"datafile",
"=",
"ev",
"(",
"\"DATAFILE\"",
",",
"\"./examples/predict-rows-scaler-full-django.json\"",
")",
"ca_dir",
"=",
"os",
".",
"getenv",
"(",
"\"API_CA_BUNDLE_DIR\"",
",",
"None",
")",
"cert_file",
"=",
"os",
".",
"getenv",
"(",
"\"API_CERT_FILE\"",
",",
"None",
")",
"key_file",
"=",
"os",
".",
"getenv",
"(",
"\"API_KEY_FILE\"",
",",
"None",
")",
"verbose",
"=",
"bool",
"(",
"str",
"(",
"ev",
"(",
"\"API_CLIENT_VERBOSE\"",
",",
"\"1\"",
")",
")",
".",
"lower",
"(",
")",
"==",
"\"1\"",
")",
"debug",
"=",
"bool",
"(",
"str",
"(",
"ev",
"(",
"\"API_CLIENT_DEBUG\"",
",",
"\"0\"",
")",
")",
".",
"lower",
"(",
")",
"==",
"\"1\"",
")",
"use_fake_rows",
"=",
"False",
"if",
"args",
".",
"use_fake_rows",
":",
"use_fake_rows",
"=",
"True",
"if",
"args",
".",
"datafile",
":",
"datafile",
"=",
"args",
".",
"datafile",
"if",
"args",
".",
"ca_dir",
":",
"ca_dir",
"=",
"args",
".",
"ca_dir",
"if",
"args",
".",
"cert_file",
":",
"cert_file",
"=",
"args",
".",
"cert_file",
"if",
"args",
".",
"key_file",
":",
"key_file",
"=",
"args",
".",
"key_file",
"if",
"args",
".",
"silent",
":",
"verbose",
"=",
"False",
"if",
"args",
".",
"debug",
":",
"debug",
"=",
"True",
"if",
"verbose",
":",
"log",
".",
"info",
"(",
"\"creating client\"",
")",
"client",
"=",
"build_ai_client_from_env",
"(",
"ca_dir",
"=",
"ca_dir",
",",
"cert_file",
"=",
"cert_file",
",",
"key_file",
"=",
"key_file",
",",
"verbose",
"=",
"verbose",
",",
"debug",
"=",
"debug",
")",
"if",
"verbose",
":",
"log",
".",
"info",
"(",
"(",
"\"loading request in datafile={}\"",
")",
".",
"format",
"(",
"datafile",
")",
")",
"# pass in full or partial prediction record dictionaries",
"# the generate_ai_request will fill in gaps with defaults",
"fake_rows_for_predicting",
"=",
"[",
"{",
"\"tcp_seq\"",
":",
"1",
"}",
",",
"{",
"\"tcp_seq\"",
":",
"2",
"}",
",",
"{",
"\"tcp_seq\"",
":",
"3",
"}",
",",
"{",
"\"tcp_seq\"",
":",
"4",
"}",
"]",
"res_gen",
"=",
"None",
"if",
"use_fake_rows",
":",
"res_gen",
"=",
"generate_ai_request",
"(",
"predict_rows",
"=",
"fake_rows_for_predicting",
")",
"else",
":",
"req_with_org_rows",
"=",
"None",
"with",
"open",
"(",
"datafile",
",",
"\"r\"",
")",
"as",
"f",
":",
"req_with_org_rows",
"=",
"json",
".",
"loads",
"(",
"f",
".",
"read",
"(",
")",
")",
"res_gen",
"=",
"generate_ai_request",
"(",
"predict_rows",
"=",
"req_with_org_rows",
"[",
"\"predict_rows\"",
"]",
")",
"# end of sending mock data from this file or a file on disk",
"if",
"res_gen",
"[",
"\"status\"",
"]",
"!=",
"SUCCESS",
":",
"log",
".",
"error",
"(",
"(",
"\"failed generate_ai_request with error={}\"",
")",
".",
"format",
"(",
"res_gen",
"[",
"\"error\"",
"]",
")",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"req_body",
"=",
"res_gen",
"[",
"\"data\"",
"]",
"if",
"verbose",
":",
"log",
".",
"info",
"(",
"\"running job\"",
")",
"job_was_started",
"=",
"False",
"response",
"=",
"client",
".",
"run_job",
"(",
"body",
"=",
"req_body",
")",
"if",
"response",
"[",
"\"status\"",
"]",
"==",
"SUCCESS",
":",
"log",
".",
"info",
"(",
"(",
"\"job started with response={}\"",
")",
".",
"format",
"(",
"response",
"[",
"\"data\"",
"]",
")",
")",
"job_was_started",
"=",
"True",
"elif",
"response",
"[",
"\"status\"",
"]",
"==",
"FAILED",
":",
"log",
".",
"error",
"(",
"(",
"\"job failed with error='{}' with response={}\"",
")",
".",
"format",
"(",
"response",
"[",
"\"error\"",
"]",
",",
"response",
"[",
"\"data\"",
"]",
")",
")",
"elif",
"response",
"[",
"\"status\"",
"]",
"==",
"ERROR",
":",
"log",
".",
"error",
"(",
"(",
"\"job had an error='{}' with response={}\"",
")",
".",
"format",
"(",
"response",
"[",
"\"error\"",
"]",
",",
"response",
"[",
"\"data\"",
"]",
")",
")",
"elif",
"response",
"[",
"\"status\"",
"]",
"==",
"LOGIN_FAILED",
":",
"log",
".",
"error",
"(",
"(",
"\"job reported user was not able to log in \"",
"\"with an error='{}' with response={}\"",
")",
".",
"format",
"(",
"response",
"[",
"\"error\"",
"]",
",",
"response",
"[",
"\"data\"",
"]",
")",
")",
"if",
"not",
"job_was_started",
":",
"sys",
".",
"exit",
"(",
"1",
")",
"if",
"debug",
":",
"log",
".",
"info",
"(",
"(",
"\"parsing response data={}\"",
")",
".",
"format",
"(",
"response",
"[",
"\"data\"",
"]",
")",
")",
"else",
":",
"if",
"verbose",
":",
"log",
".",
"info",
"(",
"\"parsing data\"",
")",
"res_data",
"=",
"response",
"[",
"\"data\"",
"]",
"job_data",
"=",
"res_data",
".",
"get",
"(",
"\"job\"",
",",
"None",
")",
"result_data",
"=",
"res_data",
".",
"get",
"(",
"\"results\"",
",",
"None",
")",
"if",
"not",
"job_data",
":",
"log",
".",
"error",
"(",
"(",
"\"missing job dictionary in response data={}\"",
")",
".",
"format",
"(",
"response",
"[",
"\"data\"",
"]",
")",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"if",
"not",
"result_data",
":",
"log",
".",
"error",
"(",
"(",
"\"missing results dictionary in response data={}\"",
")",
".",
"format",
"(",
"response",
"[",
"\"data\"",
"]",
")",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"job_id",
"=",
"job_data",
".",
"get",
"(",
"\"id\"",
",",
"None",
")",
"job_status",
"=",
"job_data",
".",
"get",
"(",
"\"status\"",
",",
"None",
")",
"result_id",
"=",
"result_data",
".",
"get",
"(",
"\"id\"",
",",
"None",
")",
"result_status",
"=",
"result_data",
".",
"get",
"(",
"\"status\"",
",",
"None",
")",
"log",
".",
"info",
"(",
"(",
"\"started job.id={} job.status={} with \"",
"\"result.id={} result.status={}\"",
")",
".",
"format",
"(",
"job_id",
",",
"job_status",
",",
"result_id",
",",
"result_status",
")",
")",
"job_results",
"=",
"client",
".",
"wait_for_job_to_finish",
"(",
"job_id",
"=",
"job_id",
")",
"if",
"job_results",
"[",
"\"status\"",
"]",
"!=",
"SUCCESS",
":",
"log",
".",
"error",
"(",
"(",
"\"failed waiting for job.id={} to finish error={} data={}\"",
")",
".",
"format",
"(",
"job_id",
",",
"job_results",
"[",
"\"error\"",
"]",
",",
"job_results",
"[",
"\"data\"",
"]",
")",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"final_job",
"=",
"job_results",
"[",
"\"data\"",
"]",
"[",
"\"job\"",
"]",
"final_result",
"=",
"job_results",
"[",
"\"data\"",
"]",
"[",
"\"result\"",
"]",
"log",
".",
"info",
"(",
"(",
"\"job={}\"",
")",
".",
"format",
"(",
"ppj",
"(",
"final_job",
")",
")",
")",
"log",
".",
"info",
"(",
"(",
"\"result={}\"",
")",
".",
"format",
"(",
"ppj",
"(",
"final_result",
")",
")",
")",
"log",
".",
"info",
"(",
"(",
"\"job.id={} is done\"",
")",
".",
"format",
"(",
"job_id",
")",
")",
"predictions",
"=",
"final_result",
"[",
"\"predictions_json\"",
"]",
".",
"get",
"(",
"\"predictions\"",
",",
"[",
"]",
")",
"log",
".",
"info",
"(",
"(",
"\"loading predictions={} into pandas dataframe\"",
")",
".",
"format",
"(",
"len",
"(",
"predictions",
")",
")",
")",
"df",
"=",
"pd",
".",
"DataFrame",
"(",
"predictions",
")",
"log",
".",
"info",
"(",
"(",
"\"dataframe={}\"",
")",
".",
"format",
"(",
"df",
")",
")"
] |
start_predictions
Using environment variables, create an AntiNex AI Client.
You can also use command line args if you want.
This can train a new deep neural network if it does not
exist or it can use an existing pre-trained deep neural
network within the AntiNex Core to make new predictions.
|
[
"start_predictions"
] |
850ba2a2fe21c836e071def618dcecc9caf5d59c
|
https://github.com/jay-johnson/antinex-client/blob/850ba2a2fe21c836e071def618dcecc9caf5d59c/antinex_client/scripts/ai_env_predict.py#L23-L288
|
train
|
jay-johnson/antinex-client
|
antinex_client/ai_client.py
|
AIClient.login
|
def login(
self):
"""login"""
auth_url = self.api_urls["login"]
if self.verbose:
log.info(("log in user={} url={} ca_dir={} cert={}")
.format(
self.user,
auth_url,
self.ca_dir,
self.cert))
use_headers = {
"Content-type": "application/json"
}
login_data = {
"username": self.user,
"password": self.password
}
if self.debug:
log.info((
"LOGIN with body={} headers={} url={} "
"verify={} cert={}").format(
login_data,
use_headers,
auth_url,
self.use_verify,
self.cert))
response = requests.post(
auth_url,
verify=self.use_verify,
cert=self.cert,
data=json.dumps(login_data),
headers=use_headers)
if self.debug:
log.info(("LOGIN response status_code={} text={} reason={}")
.format(
response.status_code,
response.text,
response.reason))
user_token = ""
if response.status_code == 200:
user_token = json.loads(response.text)["token"]
if user_token != "":
self.token = user_token
self.login_status = LOGIN_SUCCESS
if self.verbose:
log.debug("login success")
else:
log.error(("failed to login user={} to url={} text={}")
.format(
self.user,
auth_url,
response.text))
self.login_status = LOGIN_FAILED
# if the user token exists
return self.login_status
|
python
|
def login(
self):
"""login"""
auth_url = self.api_urls["login"]
if self.verbose:
log.info(("log in user={} url={} ca_dir={} cert={}")
.format(
self.user,
auth_url,
self.ca_dir,
self.cert))
use_headers = {
"Content-type": "application/json"
}
login_data = {
"username": self.user,
"password": self.password
}
if self.debug:
log.info((
"LOGIN with body={} headers={} url={} "
"verify={} cert={}").format(
login_data,
use_headers,
auth_url,
self.use_verify,
self.cert))
response = requests.post(
auth_url,
verify=self.use_verify,
cert=self.cert,
data=json.dumps(login_data),
headers=use_headers)
if self.debug:
log.info(("LOGIN response status_code={} text={} reason={}")
.format(
response.status_code,
response.text,
response.reason))
user_token = ""
if response.status_code == 200:
user_token = json.loads(response.text)["token"]
if user_token != "":
self.token = user_token
self.login_status = LOGIN_SUCCESS
if self.verbose:
log.debug("login success")
else:
log.error(("failed to login user={} to url={} text={}")
.format(
self.user,
auth_url,
response.text))
self.login_status = LOGIN_FAILED
# if the user token exists
return self.login_status
|
[
"def",
"login",
"(",
"self",
")",
":",
"auth_url",
"=",
"self",
".",
"api_urls",
"[",
"\"login\"",
"]",
"if",
"self",
".",
"verbose",
":",
"log",
".",
"info",
"(",
"(",
"\"log in user={} url={} ca_dir={} cert={}\"",
")",
".",
"format",
"(",
"self",
".",
"user",
",",
"auth_url",
",",
"self",
".",
"ca_dir",
",",
"self",
".",
"cert",
")",
")",
"use_headers",
"=",
"{",
"\"Content-type\"",
":",
"\"application/json\"",
"}",
"login_data",
"=",
"{",
"\"username\"",
":",
"self",
".",
"user",
",",
"\"password\"",
":",
"self",
".",
"password",
"}",
"if",
"self",
".",
"debug",
":",
"log",
".",
"info",
"(",
"(",
"\"LOGIN with body={} headers={} url={} \"",
"\"verify={} cert={}\"",
")",
".",
"format",
"(",
"login_data",
",",
"use_headers",
",",
"auth_url",
",",
"self",
".",
"use_verify",
",",
"self",
".",
"cert",
")",
")",
"response",
"=",
"requests",
".",
"post",
"(",
"auth_url",
",",
"verify",
"=",
"self",
".",
"use_verify",
",",
"cert",
"=",
"self",
".",
"cert",
",",
"data",
"=",
"json",
".",
"dumps",
"(",
"login_data",
")",
",",
"headers",
"=",
"use_headers",
")",
"if",
"self",
".",
"debug",
":",
"log",
".",
"info",
"(",
"(",
"\"LOGIN response status_code={} text={} reason={}\"",
")",
".",
"format",
"(",
"response",
".",
"status_code",
",",
"response",
".",
"text",
",",
"response",
".",
"reason",
")",
")",
"user_token",
"=",
"\"\"",
"if",
"response",
".",
"status_code",
"==",
"200",
":",
"user_token",
"=",
"json",
".",
"loads",
"(",
"response",
".",
"text",
")",
"[",
"\"token\"",
"]",
"if",
"user_token",
"!=",
"\"\"",
":",
"self",
".",
"token",
"=",
"user_token",
"self",
".",
"login_status",
"=",
"LOGIN_SUCCESS",
"if",
"self",
".",
"verbose",
":",
"log",
".",
"debug",
"(",
"\"login success\"",
")",
"else",
":",
"log",
".",
"error",
"(",
"(",
"\"failed to login user={} to url={} text={}\"",
")",
".",
"format",
"(",
"self",
".",
"user",
",",
"auth_url",
",",
"response",
".",
"text",
")",
")",
"self",
".",
"login_status",
"=",
"LOGIN_FAILED",
"# if the user token exists",
"return",
"self",
".",
"login_status"
] |
login
|
[
"login"
] |
850ba2a2fe21c836e071def618dcecc9caf5d59c
|
https://github.com/jay-johnson/antinex-client/blob/850ba2a2fe21c836e071def618dcecc9caf5d59c/antinex_client/ai_client.py#L121-L186
|
train
|
jay-johnson/antinex-client
|
antinex_client/ai_client.py
|
AIClient.build_response
|
def build_response(
self,
status=NOT_SET,
error="",
data=None):
"""build_response
:param status: status code
:param error: error message
:param data: dictionary to send back
"""
res_node = {
"status": status,
"error": error,
"data": data
}
return res_node
|
python
|
def build_response(
self,
status=NOT_SET,
error="",
data=None):
"""build_response
:param status: status code
:param error: error message
:param data: dictionary to send back
"""
res_node = {
"status": status,
"error": error,
"data": data
}
return res_node
|
[
"def",
"build_response",
"(",
"self",
",",
"status",
"=",
"NOT_SET",
",",
"error",
"=",
"\"\"",
",",
"data",
"=",
"None",
")",
":",
"res_node",
"=",
"{",
"\"status\"",
":",
"status",
",",
"\"error\"",
":",
"error",
",",
"\"data\"",
":",
"data",
"}",
"return",
"res_node"
] |
build_response
:param status: status code
:param error: error message
:param data: dictionary to send back
|
[
"build_response"
] |
850ba2a2fe21c836e071def618dcecc9caf5d59c
|
https://github.com/jay-johnson/antinex-client/blob/850ba2a2fe21c836e071def618dcecc9caf5d59c/antinex_client/ai_client.py#L211-L228
|
train
|
jay-johnson/antinex-client
|
antinex_client/ai_client.py
|
AIClient.retry_login
|
def retry_login(
self):
"""retry_login"""
if not self.user or not self.password:
return self.build_response(
status=ERROR,
error="please set the user and password")
retry = 0
not_done = True
while not_done:
if self.is_logged_in():
return self.build_response(
status=SUCCESS)
else:
if self.verbose:
log.debug(("login attempt={} max={}")
.format(
retry,
self.max_retries))
if self.login() == LOGIN_SUCCESS:
return self.build_response(
status=SUCCESS)
else:
time.sleep(
self.login_retry_wait_time)
# if able to login or not
retry += 1
if retry > self.max_retries:
return self.build_response(
status=ERROR,
error="failed logging in user={} retries={}".format(
self.user,
self.max_retries))
# if login worked or not
return self.build_response(
status=FAILED,
error="user={} not able to login attempts={}".format(
self.user,
retry))
|
python
|
def retry_login(
self):
"""retry_login"""
if not self.user or not self.password:
return self.build_response(
status=ERROR,
error="please set the user and password")
retry = 0
not_done = True
while not_done:
if self.is_logged_in():
return self.build_response(
status=SUCCESS)
else:
if self.verbose:
log.debug(("login attempt={} max={}")
.format(
retry,
self.max_retries))
if self.login() == LOGIN_SUCCESS:
return self.build_response(
status=SUCCESS)
else:
time.sleep(
self.login_retry_wait_time)
# if able to login or not
retry += 1
if retry > self.max_retries:
return self.build_response(
status=ERROR,
error="failed logging in user={} retries={}".format(
self.user,
self.max_retries))
# if login worked or not
return self.build_response(
status=FAILED,
error="user={} not able to login attempts={}".format(
self.user,
retry))
|
[
"def",
"retry_login",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"user",
"or",
"not",
"self",
".",
"password",
":",
"return",
"self",
".",
"build_response",
"(",
"status",
"=",
"ERROR",
",",
"error",
"=",
"\"please set the user and password\"",
")",
"retry",
"=",
"0",
"not_done",
"=",
"True",
"while",
"not_done",
":",
"if",
"self",
".",
"is_logged_in",
"(",
")",
":",
"return",
"self",
".",
"build_response",
"(",
"status",
"=",
"SUCCESS",
")",
"else",
":",
"if",
"self",
".",
"verbose",
":",
"log",
".",
"debug",
"(",
"(",
"\"login attempt={} max={}\"",
")",
".",
"format",
"(",
"retry",
",",
"self",
".",
"max_retries",
")",
")",
"if",
"self",
".",
"login",
"(",
")",
"==",
"LOGIN_SUCCESS",
":",
"return",
"self",
".",
"build_response",
"(",
"status",
"=",
"SUCCESS",
")",
"else",
":",
"time",
".",
"sleep",
"(",
"self",
".",
"login_retry_wait_time",
")",
"# if able to login or not",
"retry",
"+=",
"1",
"if",
"retry",
">",
"self",
".",
"max_retries",
":",
"return",
"self",
".",
"build_response",
"(",
"status",
"=",
"ERROR",
",",
"error",
"=",
"\"failed logging in user={} retries={}\"",
".",
"format",
"(",
"self",
".",
"user",
",",
"self",
".",
"max_retries",
")",
")",
"# if login worked or not",
"return",
"self",
".",
"build_response",
"(",
"status",
"=",
"FAILED",
",",
"error",
"=",
"\"user={} not able to login attempts={}\"",
".",
"format",
"(",
"self",
".",
"user",
",",
"retry",
")",
")"
] |
retry_login
|
[
"retry_login"
] |
850ba2a2fe21c836e071def618dcecc9caf5d59c
|
https://github.com/jay-johnson/antinex-client/blob/850ba2a2fe21c836e071def618dcecc9caf5d59c/antinex_client/ai_client.py#L231-L273
|
train
|
jay-johnson/antinex-client
|
antinex_client/ai_client.py
|
AIClient.get_prepare_by_id
|
def get_prepare_by_id(
self,
prepare_id=None):
"""get_prepare_by_id
:param prepare_id: MLJob.id in the database
"""
if not prepare_id:
log.error("missing prepare_id for get_prepare_by_id")
return self.build_response(
status=ERROR,
error="missing prepare_id for get_prepare_by_id")
if self.debug:
log.info(("user={} getting prepare={}")
.format(
self.user,
prepare_id))
url = "{}{}".format(
self.api_urls["prepare"],
prepare_id)
not_done = True
while not_done:
if self.debug:
log.info((
"JOB attempting to get={} to url={} "
"verify={} cert={}").format(
prepare_id,
url,
self.use_verify,
self.cert))
response = requests.get(
url,
verify=self.use_verify,
cert=self.cert,
headers=self.get_auth_header())
if self.debug:
log.info(("JOB response status_code={} text={} reason={}")
.format(
response.status_code,
response.text,
response.reason))
if response.status_code == 401:
login_res = self.retry_login()
if login_res["status"] != SUCCESS:
if self.verbose:
log.error(
"retry login attempts failed")
return self.build_response(
status=login_res["status"],
error=login_res["error"])
# if able to log back in just retry the call
elif response.status_code == 200:
if self.verbose:
log.debug("deserializing")
prepare_data = json.loads(
response.text)
prepare_id = prepare_data.get(
"id",
None)
if not prepare_id:
return self.build_response(
status=ERROR,
error="missing prepare.id",
data="text={} reason={}".format(
response.reason,
response.text))
self.all_prepares[str(prepare_id)] = prepare_data
if self.debug:
log.info(("added prepare={} all_prepares={}")
.format(
prepare_id,
len(self.all_prepares)))
return self.build_response(
status=SUCCESS,
error="",
data=prepare_data)
else:
err_msg = ("failed with "
"status_code={} text={} reason={}").format(
response.status_code,
response.text,
response.reason)
if self.verbose:
log.error(err_msg)
return self.build_response(
status=ERROR,
error=err_msg)
|
python
|
def get_prepare_by_id(
self,
prepare_id=None):
"""get_prepare_by_id
:param prepare_id: MLJob.id in the database
"""
if not prepare_id:
log.error("missing prepare_id for get_prepare_by_id")
return self.build_response(
status=ERROR,
error="missing prepare_id for get_prepare_by_id")
if self.debug:
log.info(("user={} getting prepare={}")
.format(
self.user,
prepare_id))
url = "{}{}".format(
self.api_urls["prepare"],
prepare_id)
not_done = True
while not_done:
if self.debug:
log.info((
"JOB attempting to get={} to url={} "
"verify={} cert={}").format(
prepare_id,
url,
self.use_verify,
self.cert))
response = requests.get(
url,
verify=self.use_verify,
cert=self.cert,
headers=self.get_auth_header())
if self.debug:
log.info(("JOB response status_code={} text={} reason={}")
.format(
response.status_code,
response.text,
response.reason))
if response.status_code == 401:
login_res = self.retry_login()
if login_res["status"] != SUCCESS:
if self.verbose:
log.error(
"retry login attempts failed")
return self.build_response(
status=login_res["status"],
error=login_res["error"])
# if able to log back in just retry the call
elif response.status_code == 200:
if self.verbose:
log.debug("deserializing")
prepare_data = json.loads(
response.text)
prepare_id = prepare_data.get(
"id",
None)
if not prepare_id:
return self.build_response(
status=ERROR,
error="missing prepare.id",
data="text={} reason={}".format(
response.reason,
response.text))
self.all_prepares[str(prepare_id)] = prepare_data
if self.debug:
log.info(("added prepare={} all_prepares={}")
.format(
prepare_id,
len(self.all_prepares)))
return self.build_response(
status=SUCCESS,
error="",
data=prepare_data)
else:
err_msg = ("failed with "
"status_code={} text={} reason={}").format(
response.status_code,
response.text,
response.reason)
if self.verbose:
log.error(err_msg)
return self.build_response(
status=ERROR,
error=err_msg)
|
[
"def",
"get_prepare_by_id",
"(",
"self",
",",
"prepare_id",
"=",
"None",
")",
":",
"if",
"not",
"prepare_id",
":",
"log",
".",
"error",
"(",
"\"missing prepare_id for get_prepare_by_id\"",
")",
"return",
"self",
".",
"build_response",
"(",
"status",
"=",
"ERROR",
",",
"error",
"=",
"\"missing prepare_id for get_prepare_by_id\"",
")",
"if",
"self",
".",
"debug",
":",
"log",
".",
"info",
"(",
"(",
"\"user={} getting prepare={}\"",
")",
".",
"format",
"(",
"self",
".",
"user",
",",
"prepare_id",
")",
")",
"url",
"=",
"\"{}{}\"",
".",
"format",
"(",
"self",
".",
"api_urls",
"[",
"\"prepare\"",
"]",
",",
"prepare_id",
")",
"not_done",
"=",
"True",
"while",
"not_done",
":",
"if",
"self",
".",
"debug",
":",
"log",
".",
"info",
"(",
"(",
"\"JOB attempting to get={} to url={} \"",
"\"verify={} cert={}\"",
")",
".",
"format",
"(",
"prepare_id",
",",
"url",
",",
"self",
".",
"use_verify",
",",
"self",
".",
"cert",
")",
")",
"response",
"=",
"requests",
".",
"get",
"(",
"url",
",",
"verify",
"=",
"self",
".",
"use_verify",
",",
"cert",
"=",
"self",
".",
"cert",
",",
"headers",
"=",
"self",
".",
"get_auth_header",
"(",
")",
")",
"if",
"self",
".",
"debug",
":",
"log",
".",
"info",
"(",
"(",
"\"JOB response status_code={} text={} reason={}\"",
")",
".",
"format",
"(",
"response",
".",
"status_code",
",",
"response",
".",
"text",
",",
"response",
".",
"reason",
")",
")",
"if",
"response",
".",
"status_code",
"==",
"401",
":",
"login_res",
"=",
"self",
".",
"retry_login",
"(",
")",
"if",
"login_res",
"[",
"\"status\"",
"]",
"!=",
"SUCCESS",
":",
"if",
"self",
".",
"verbose",
":",
"log",
".",
"error",
"(",
"\"retry login attempts failed\"",
")",
"return",
"self",
".",
"build_response",
"(",
"status",
"=",
"login_res",
"[",
"\"status\"",
"]",
",",
"error",
"=",
"login_res",
"[",
"\"error\"",
"]",
")",
"# if able to log back in just retry the call",
"elif",
"response",
".",
"status_code",
"==",
"200",
":",
"if",
"self",
".",
"verbose",
":",
"log",
".",
"debug",
"(",
"\"deserializing\"",
")",
"prepare_data",
"=",
"json",
".",
"loads",
"(",
"response",
".",
"text",
")",
"prepare_id",
"=",
"prepare_data",
".",
"get",
"(",
"\"id\"",
",",
"None",
")",
"if",
"not",
"prepare_id",
":",
"return",
"self",
".",
"build_response",
"(",
"status",
"=",
"ERROR",
",",
"error",
"=",
"\"missing prepare.id\"",
",",
"data",
"=",
"\"text={} reason={}\"",
".",
"format",
"(",
"response",
".",
"reason",
",",
"response",
".",
"text",
")",
")",
"self",
".",
"all_prepares",
"[",
"str",
"(",
"prepare_id",
")",
"]",
"=",
"prepare_data",
"if",
"self",
".",
"debug",
":",
"log",
".",
"info",
"(",
"(",
"\"added prepare={} all_prepares={}\"",
")",
".",
"format",
"(",
"prepare_id",
",",
"len",
"(",
"self",
".",
"all_prepares",
")",
")",
")",
"return",
"self",
".",
"build_response",
"(",
"status",
"=",
"SUCCESS",
",",
"error",
"=",
"\"\"",
",",
"data",
"=",
"prepare_data",
")",
"else",
":",
"err_msg",
"=",
"(",
"\"failed with \"",
"\"status_code={} text={} reason={}\"",
")",
".",
"format",
"(",
"response",
".",
"status_code",
",",
"response",
".",
"text",
",",
"response",
".",
"reason",
")",
"if",
"self",
".",
"verbose",
":",
"log",
".",
"error",
"(",
"err_msg",
")",
"return",
"self",
".",
"build_response",
"(",
"status",
"=",
"ERROR",
",",
"error",
"=",
"err_msg",
")"
] |
get_prepare_by_id
:param prepare_id: MLJob.id in the database
|
[
"get_prepare_by_id"
] |
850ba2a2fe21c836e071def618dcecc9caf5d59c
|
https://github.com/jay-johnson/antinex-client/blob/850ba2a2fe21c836e071def618dcecc9caf5d59c/antinex_client/ai_client.py#L276-L377
|
train
|
jay-johnson/antinex-client
|
antinex_client/ai_client.py
|
AIClient.wait_for_job_to_finish
|
def wait_for_job_to_finish(
self,
job_id,
sec_to_sleep=5.0,
max_retries=100000):
"""wait_for_job_to_finish
:param job_id: MLJob.id to wait on
:param sec_to_sleep: seconds to sleep during polling
:param max_retries: max retires until stopping
"""
not_done = True
retry_attempt = 1
while not_done:
if self.debug:
log.info(("JOBSTATUS getting job.id={} details")
.format(
job_id))
response = self.get_job_by_id(job_id)
if self.debug:
log.info(("JOBSTATUS got job.id={} response={}")
.format(
job_id,
response))
if response["status"] != SUCCESS:
log.error(("JOBSTATUS failed to get job.id={} with error={}")
.format(
job_id,
response["error"]))
return self.build_response(
status=ERROR,
error=response["error"],
data=response["data"])
# stop if this failed getting the job details
job_data = response.get(
"data",
None)
if not job_data:
return self.build_response(
status=ERROR,
error="failed to find job dictionary in response",
data=response["data"])
job_status = job_data["status"]
if job_status == "finished" \
or job_status == "completed" \
or job_status == "launched":
if self.debug:
log.info(("job.id={} is done with status={}")
.format(
job_id,
job_status))
result_id = job_data["predict_manifest"]["result_id"]
if self.debug:
log.info(("JOBRESULT getting result.id={} details")
.format(
result_id))
response = self.get_result_by_id(result_id)
if self.debug:
log.info(("JOBRESULT got result.id={} response={}")
.format(
result_id,
response))
if response["status"] != SUCCESS:
log.error(("JOBRESULT failed to get "
"result.id={} with error={}")
.format(
result_id,
response["error"]))
return self.build_response(
status=ERROR,
error=response["error"],
data=response["data"])
# stop if this failed getting the result details
result_data = response.get(
"data",
None)
if result_data["status"] == "finished":
full_response = {
"job": job_data,
"result": result_data
}
not_done = False
return self.build_response(
status=SUCCESS,
error="",
data=full_response)
else:
if retry_attempt % 100 == 0:
if self.verbose:
log.info(("result_id={} are not done retry={}")
.format(
result_id,
retry_attempt))
retry_attempt += 1
if retry_attempt > max_retries:
err_msg = ("failed waiting "
"for job.id={} result.id={} "
"to finish").format(
job_id,
result_id)
log.error(err_msg)
return self.build_response(
status=ERROR,
error=err_msg)
else:
time.sleep(sec_to_sleep)
# wait while results are written to the db
else:
retry_attempt += 1
if retry_attempt > max_retries:
err_msg = ("failed waiting "
"for job.id={} to finish").format(
job_id)
log.error(err_msg)
return self.build_response(
status=ERROR,
error=err_msg)
else:
if self.verbose:
if retry_attempt % 100 == 0:
log.info(("waiting on job.id={} retry={}")
.format(
job_id,
retry_attempt))
# if logging just to show this is running
time.sleep(sec_to_sleep)
|
python
|
def wait_for_job_to_finish(
self,
job_id,
sec_to_sleep=5.0,
max_retries=100000):
"""wait_for_job_to_finish
:param job_id: MLJob.id to wait on
:param sec_to_sleep: seconds to sleep during polling
:param max_retries: max retires until stopping
"""
not_done = True
retry_attempt = 1
while not_done:
if self.debug:
log.info(("JOBSTATUS getting job.id={} details")
.format(
job_id))
response = self.get_job_by_id(job_id)
if self.debug:
log.info(("JOBSTATUS got job.id={} response={}")
.format(
job_id,
response))
if response["status"] != SUCCESS:
log.error(("JOBSTATUS failed to get job.id={} with error={}")
.format(
job_id,
response["error"]))
return self.build_response(
status=ERROR,
error=response["error"],
data=response["data"])
# stop if this failed getting the job details
job_data = response.get(
"data",
None)
if not job_data:
return self.build_response(
status=ERROR,
error="failed to find job dictionary in response",
data=response["data"])
job_status = job_data["status"]
if job_status == "finished" \
or job_status == "completed" \
or job_status == "launched":
if self.debug:
log.info(("job.id={} is done with status={}")
.format(
job_id,
job_status))
result_id = job_data["predict_manifest"]["result_id"]
if self.debug:
log.info(("JOBRESULT getting result.id={} details")
.format(
result_id))
response = self.get_result_by_id(result_id)
if self.debug:
log.info(("JOBRESULT got result.id={} response={}")
.format(
result_id,
response))
if response["status"] != SUCCESS:
log.error(("JOBRESULT failed to get "
"result.id={} with error={}")
.format(
result_id,
response["error"]))
return self.build_response(
status=ERROR,
error=response["error"],
data=response["data"])
# stop if this failed getting the result details
result_data = response.get(
"data",
None)
if result_data["status"] == "finished":
full_response = {
"job": job_data,
"result": result_data
}
not_done = False
return self.build_response(
status=SUCCESS,
error="",
data=full_response)
else:
if retry_attempt % 100 == 0:
if self.verbose:
log.info(("result_id={} are not done retry={}")
.format(
result_id,
retry_attempt))
retry_attempt += 1
if retry_attempt > max_retries:
err_msg = ("failed waiting "
"for job.id={} result.id={} "
"to finish").format(
job_id,
result_id)
log.error(err_msg)
return self.build_response(
status=ERROR,
error=err_msg)
else:
time.sleep(sec_to_sleep)
# wait while results are written to the db
else:
retry_attempt += 1
if retry_attempt > max_retries:
err_msg = ("failed waiting "
"for job.id={} to finish").format(
job_id)
log.error(err_msg)
return self.build_response(
status=ERROR,
error=err_msg)
else:
if self.verbose:
if retry_attempt % 100 == 0:
log.info(("waiting on job.id={} retry={}")
.format(
job_id,
retry_attempt))
# if logging just to show this is running
time.sleep(sec_to_sleep)
|
[
"def",
"wait_for_job_to_finish",
"(",
"self",
",",
"job_id",
",",
"sec_to_sleep",
"=",
"5.0",
",",
"max_retries",
"=",
"100000",
")",
":",
"not_done",
"=",
"True",
"retry_attempt",
"=",
"1",
"while",
"not_done",
":",
"if",
"self",
".",
"debug",
":",
"log",
".",
"info",
"(",
"(",
"\"JOBSTATUS getting job.id={} details\"",
")",
".",
"format",
"(",
"job_id",
")",
")",
"response",
"=",
"self",
".",
"get_job_by_id",
"(",
"job_id",
")",
"if",
"self",
".",
"debug",
":",
"log",
".",
"info",
"(",
"(",
"\"JOBSTATUS got job.id={} response={}\"",
")",
".",
"format",
"(",
"job_id",
",",
"response",
")",
")",
"if",
"response",
"[",
"\"status\"",
"]",
"!=",
"SUCCESS",
":",
"log",
".",
"error",
"(",
"(",
"\"JOBSTATUS failed to get job.id={} with error={}\"",
")",
".",
"format",
"(",
"job_id",
",",
"response",
"[",
"\"error\"",
"]",
")",
")",
"return",
"self",
".",
"build_response",
"(",
"status",
"=",
"ERROR",
",",
"error",
"=",
"response",
"[",
"\"error\"",
"]",
",",
"data",
"=",
"response",
"[",
"\"data\"",
"]",
")",
"# stop if this failed getting the job details",
"job_data",
"=",
"response",
".",
"get",
"(",
"\"data\"",
",",
"None",
")",
"if",
"not",
"job_data",
":",
"return",
"self",
".",
"build_response",
"(",
"status",
"=",
"ERROR",
",",
"error",
"=",
"\"failed to find job dictionary in response\"",
",",
"data",
"=",
"response",
"[",
"\"data\"",
"]",
")",
"job_status",
"=",
"job_data",
"[",
"\"status\"",
"]",
"if",
"job_status",
"==",
"\"finished\"",
"or",
"job_status",
"==",
"\"completed\"",
"or",
"job_status",
"==",
"\"launched\"",
":",
"if",
"self",
".",
"debug",
":",
"log",
".",
"info",
"(",
"(",
"\"job.id={} is done with status={}\"",
")",
".",
"format",
"(",
"job_id",
",",
"job_status",
")",
")",
"result_id",
"=",
"job_data",
"[",
"\"predict_manifest\"",
"]",
"[",
"\"result_id\"",
"]",
"if",
"self",
".",
"debug",
":",
"log",
".",
"info",
"(",
"(",
"\"JOBRESULT getting result.id={} details\"",
")",
".",
"format",
"(",
"result_id",
")",
")",
"response",
"=",
"self",
".",
"get_result_by_id",
"(",
"result_id",
")",
"if",
"self",
".",
"debug",
":",
"log",
".",
"info",
"(",
"(",
"\"JOBRESULT got result.id={} response={}\"",
")",
".",
"format",
"(",
"result_id",
",",
"response",
")",
")",
"if",
"response",
"[",
"\"status\"",
"]",
"!=",
"SUCCESS",
":",
"log",
".",
"error",
"(",
"(",
"\"JOBRESULT failed to get \"",
"\"result.id={} with error={}\"",
")",
".",
"format",
"(",
"result_id",
",",
"response",
"[",
"\"error\"",
"]",
")",
")",
"return",
"self",
".",
"build_response",
"(",
"status",
"=",
"ERROR",
",",
"error",
"=",
"response",
"[",
"\"error\"",
"]",
",",
"data",
"=",
"response",
"[",
"\"data\"",
"]",
")",
"# stop if this failed getting the result details",
"result_data",
"=",
"response",
".",
"get",
"(",
"\"data\"",
",",
"None",
")",
"if",
"result_data",
"[",
"\"status\"",
"]",
"==",
"\"finished\"",
":",
"full_response",
"=",
"{",
"\"job\"",
":",
"job_data",
",",
"\"result\"",
":",
"result_data",
"}",
"not_done",
"=",
"False",
"return",
"self",
".",
"build_response",
"(",
"status",
"=",
"SUCCESS",
",",
"error",
"=",
"\"\"",
",",
"data",
"=",
"full_response",
")",
"else",
":",
"if",
"retry_attempt",
"%",
"100",
"==",
"0",
":",
"if",
"self",
".",
"verbose",
":",
"log",
".",
"info",
"(",
"(",
"\"result_id={} are not done retry={}\"",
")",
".",
"format",
"(",
"result_id",
",",
"retry_attempt",
")",
")",
"retry_attempt",
"+=",
"1",
"if",
"retry_attempt",
">",
"max_retries",
":",
"err_msg",
"=",
"(",
"\"failed waiting \"",
"\"for job.id={} result.id={} \"",
"\"to finish\"",
")",
".",
"format",
"(",
"job_id",
",",
"result_id",
")",
"log",
".",
"error",
"(",
"err_msg",
")",
"return",
"self",
".",
"build_response",
"(",
"status",
"=",
"ERROR",
",",
"error",
"=",
"err_msg",
")",
"else",
":",
"time",
".",
"sleep",
"(",
"sec_to_sleep",
")",
"# wait while results are written to the db",
"else",
":",
"retry_attempt",
"+=",
"1",
"if",
"retry_attempt",
">",
"max_retries",
":",
"err_msg",
"=",
"(",
"\"failed waiting \"",
"\"for job.id={} to finish\"",
")",
".",
"format",
"(",
"job_id",
")",
"log",
".",
"error",
"(",
"err_msg",
")",
"return",
"self",
".",
"build_response",
"(",
"status",
"=",
"ERROR",
",",
"error",
"=",
"err_msg",
")",
"else",
":",
"if",
"self",
".",
"verbose",
":",
"if",
"retry_attempt",
"%",
"100",
"==",
"0",
":",
"log",
".",
"info",
"(",
"(",
"\"waiting on job.id={} retry={}\"",
")",
".",
"format",
"(",
"job_id",
",",
"retry_attempt",
")",
")",
"# if logging just to show this is running",
"time",
".",
"sleep",
"(",
"sec_to_sleep",
")"
] |
wait_for_job_to_finish
:param job_id: MLJob.id to wait on
:param sec_to_sleep: seconds to sleep during polling
:param max_retries: max retires until stopping
|
[
"wait_for_job_to_finish"
] |
850ba2a2fe21c836e071def618dcecc9caf5d59c
|
https://github.com/jay-johnson/antinex-client/blob/850ba2a2fe21c836e071def618dcecc9caf5d59c/antinex_client/ai_client.py#L726-L870
|
train
|
jay-johnson/antinex-client
|
antinex_client/ai_client.py
|
AIClient.wait_for_prepare_to_finish
|
def wait_for_prepare_to_finish(
self,
prepare_id,
sec_to_sleep=5.0,
max_retries=100000):
"""wait_for_prepare_to_finish
:param prepare_id: MLPrepare.id to wait on
:param sec_to_sleep: seconds to sleep during polling
:param max_retries: max retires until stopping
"""
not_done = True
retry_attempt = 1
while not_done:
if self.debug:
log.info(("PREPSTATUS getting prepare.id={} details")
.format(
prepare_id))
response = self.get_prepare_by_id(prepare_id)
if self.debug:
log.info(("PREPSTATUS got prepare.id={} response={}")
.format(
prepare_id,
response))
if response["status"] != SUCCESS:
log.error(("PREPSTATUS failed to get prepare.id={} "
"with error={}")
.format(
prepare_id,
response["error"]))
return self.build_response(
status=ERROR,
error=response["error"],
data=response["data"])
# stop if this failed getting the prepare details
prepare_data = response.get(
"data",
None)
if not prepare_data:
return self.build_response(
status=ERROR,
error="failed to find prepare dictionary in response",
data=response["data"])
prepare_status = prepare_data["status"]
if prepare_status == "finished" \
or prepare_status == "completed":
not_done = False
return self.build_response(
status=SUCCESS,
error="",
data=prepare_data)
else:
retry_attempt += 1
if retry_attempt > max_retries:
err_msg = ("failed waiting "
"for prepare.id={} to finish").format(
prepare_id)
log.error(err_msg)
return self.build_response(
status=ERROR,
error=err_msg)
else:
if self.verbose:
if retry_attempt % 100 == 0:
log.info(("waiting on prepare.id={} retry={}")
.format(
prepare_id,
retry_attempt))
# if logging just to show this is running
time.sleep(sec_to_sleep)
|
python
|
def wait_for_prepare_to_finish(
self,
prepare_id,
sec_to_sleep=5.0,
max_retries=100000):
"""wait_for_prepare_to_finish
:param prepare_id: MLPrepare.id to wait on
:param sec_to_sleep: seconds to sleep during polling
:param max_retries: max retires until stopping
"""
not_done = True
retry_attempt = 1
while not_done:
if self.debug:
log.info(("PREPSTATUS getting prepare.id={} details")
.format(
prepare_id))
response = self.get_prepare_by_id(prepare_id)
if self.debug:
log.info(("PREPSTATUS got prepare.id={} response={}")
.format(
prepare_id,
response))
if response["status"] != SUCCESS:
log.error(("PREPSTATUS failed to get prepare.id={} "
"with error={}")
.format(
prepare_id,
response["error"]))
return self.build_response(
status=ERROR,
error=response["error"],
data=response["data"])
# stop if this failed getting the prepare details
prepare_data = response.get(
"data",
None)
if not prepare_data:
return self.build_response(
status=ERROR,
error="failed to find prepare dictionary in response",
data=response["data"])
prepare_status = prepare_data["status"]
if prepare_status == "finished" \
or prepare_status == "completed":
not_done = False
return self.build_response(
status=SUCCESS,
error="",
data=prepare_data)
else:
retry_attempt += 1
if retry_attempt > max_retries:
err_msg = ("failed waiting "
"for prepare.id={} to finish").format(
prepare_id)
log.error(err_msg)
return self.build_response(
status=ERROR,
error=err_msg)
else:
if self.verbose:
if retry_attempt % 100 == 0:
log.info(("waiting on prepare.id={} retry={}")
.format(
prepare_id,
retry_attempt))
# if logging just to show this is running
time.sleep(sec_to_sleep)
|
[
"def",
"wait_for_prepare_to_finish",
"(",
"self",
",",
"prepare_id",
",",
"sec_to_sleep",
"=",
"5.0",
",",
"max_retries",
"=",
"100000",
")",
":",
"not_done",
"=",
"True",
"retry_attempt",
"=",
"1",
"while",
"not_done",
":",
"if",
"self",
".",
"debug",
":",
"log",
".",
"info",
"(",
"(",
"\"PREPSTATUS getting prepare.id={} details\"",
")",
".",
"format",
"(",
"prepare_id",
")",
")",
"response",
"=",
"self",
".",
"get_prepare_by_id",
"(",
"prepare_id",
")",
"if",
"self",
".",
"debug",
":",
"log",
".",
"info",
"(",
"(",
"\"PREPSTATUS got prepare.id={} response={}\"",
")",
".",
"format",
"(",
"prepare_id",
",",
"response",
")",
")",
"if",
"response",
"[",
"\"status\"",
"]",
"!=",
"SUCCESS",
":",
"log",
".",
"error",
"(",
"(",
"\"PREPSTATUS failed to get prepare.id={} \"",
"\"with error={}\"",
")",
".",
"format",
"(",
"prepare_id",
",",
"response",
"[",
"\"error\"",
"]",
")",
")",
"return",
"self",
".",
"build_response",
"(",
"status",
"=",
"ERROR",
",",
"error",
"=",
"response",
"[",
"\"error\"",
"]",
",",
"data",
"=",
"response",
"[",
"\"data\"",
"]",
")",
"# stop if this failed getting the prepare details",
"prepare_data",
"=",
"response",
".",
"get",
"(",
"\"data\"",
",",
"None",
")",
"if",
"not",
"prepare_data",
":",
"return",
"self",
".",
"build_response",
"(",
"status",
"=",
"ERROR",
",",
"error",
"=",
"\"failed to find prepare dictionary in response\"",
",",
"data",
"=",
"response",
"[",
"\"data\"",
"]",
")",
"prepare_status",
"=",
"prepare_data",
"[",
"\"status\"",
"]",
"if",
"prepare_status",
"==",
"\"finished\"",
"or",
"prepare_status",
"==",
"\"completed\"",
":",
"not_done",
"=",
"False",
"return",
"self",
".",
"build_response",
"(",
"status",
"=",
"SUCCESS",
",",
"error",
"=",
"\"\"",
",",
"data",
"=",
"prepare_data",
")",
"else",
":",
"retry_attempt",
"+=",
"1",
"if",
"retry_attempt",
">",
"max_retries",
":",
"err_msg",
"=",
"(",
"\"failed waiting \"",
"\"for prepare.id={} to finish\"",
")",
".",
"format",
"(",
"prepare_id",
")",
"log",
".",
"error",
"(",
"err_msg",
")",
"return",
"self",
".",
"build_response",
"(",
"status",
"=",
"ERROR",
",",
"error",
"=",
"err_msg",
")",
"else",
":",
"if",
"self",
".",
"verbose",
":",
"if",
"retry_attempt",
"%",
"100",
"==",
"0",
":",
"log",
".",
"info",
"(",
"(",
"\"waiting on prepare.id={} retry={}\"",
")",
".",
"format",
"(",
"prepare_id",
",",
"retry_attempt",
")",
")",
"# if logging just to show this is running",
"time",
".",
"sleep",
"(",
"sec_to_sleep",
")"
] |
wait_for_prepare_to_finish
:param prepare_id: MLPrepare.id to wait on
:param sec_to_sleep: seconds to sleep during polling
:param max_retries: max retires until stopping
|
[
"wait_for_prepare_to_finish"
] |
850ba2a2fe21c836e071def618dcecc9caf5d59c
|
https://github.com/jay-johnson/antinex-client/blob/850ba2a2fe21c836e071def618dcecc9caf5d59c/antinex_client/ai_client.py#L985-L1065
|
train
|
praekeltfoundation/molo.commenting
|
molo/commenting/forms.py
|
MoloCommentForm.get_comment_object
|
def get_comment_object(self):
"""
NB: Overridden to remove dupe comment check for admins (necessary for
canned responses)
Return a new (unsaved) comment object based on the information in this
form. Assumes that the form is already validated and will throw a
ValueError if not.
Does not set any of the fields that would come from a Request object
(i.e. ``user`` or ``ip_address``).
"""
if not self.is_valid():
raise ValueError(
"get_comment_object may only be called on valid forms")
CommentModel = self.get_comment_model()
new = CommentModel(**self.get_comment_create_data())
user_model = get_user_model()
try:
user = user_model.objects.get(username=new.user_name)
if not user.is_staff:
new = self.check_for_duplicate_comment(new)
except user_model.DoesNotExist:
# post_molo_comment may have set the username to 'Anonymous'
new = self.check_for_duplicate_comment(new)
return new
|
python
|
def get_comment_object(self):
"""
NB: Overridden to remove dupe comment check for admins (necessary for
canned responses)
Return a new (unsaved) comment object based on the information in this
form. Assumes that the form is already validated and will throw a
ValueError if not.
Does not set any of the fields that would come from a Request object
(i.e. ``user`` or ``ip_address``).
"""
if not self.is_valid():
raise ValueError(
"get_comment_object may only be called on valid forms")
CommentModel = self.get_comment_model()
new = CommentModel(**self.get_comment_create_data())
user_model = get_user_model()
try:
user = user_model.objects.get(username=new.user_name)
if not user.is_staff:
new = self.check_for_duplicate_comment(new)
except user_model.DoesNotExist:
# post_molo_comment may have set the username to 'Anonymous'
new = self.check_for_duplicate_comment(new)
return new
|
[
"def",
"get_comment_object",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"is_valid",
"(",
")",
":",
"raise",
"ValueError",
"(",
"\"get_comment_object may only be called on valid forms\"",
")",
"CommentModel",
"=",
"self",
".",
"get_comment_model",
"(",
")",
"new",
"=",
"CommentModel",
"(",
"*",
"*",
"self",
".",
"get_comment_create_data",
"(",
")",
")",
"user_model",
"=",
"get_user_model",
"(",
")",
"try",
":",
"user",
"=",
"user_model",
".",
"objects",
".",
"get",
"(",
"username",
"=",
"new",
".",
"user_name",
")",
"if",
"not",
"user",
".",
"is_staff",
":",
"new",
"=",
"self",
".",
"check_for_duplicate_comment",
"(",
"new",
")",
"except",
"user_model",
".",
"DoesNotExist",
":",
"# post_molo_comment may have set the username to 'Anonymous'",
"new",
"=",
"self",
".",
"check_for_duplicate_comment",
"(",
"new",
")",
"return",
"new"
] |
NB: Overridden to remove dupe comment check for admins (necessary for
canned responses)
Return a new (unsaved) comment object based on the information in this
form. Assumes that the form is already validated and will throw a
ValueError if not.
Does not set any of the fields that would come from a Request object
(i.e. ``user`` or ``ip_address``).
|
[
"NB",
":",
"Overridden",
"to",
"remove",
"dupe",
"comment",
"check",
"for",
"admins",
"(",
"necessary",
"for",
"canned",
"responses",
")"
] |
94549bd75e4a5c5b3db43149e32d636330b3969c
|
https://github.com/praekeltfoundation/molo.commenting/blob/94549bd75e4a5c5b3db43149e32d636330b3969c/molo/commenting/forms.py#L31-L59
|
train
|
feliphebueno/Rinzler
|
rinzler/__init__.py
|
boot
|
def boot(app_name) -> Rinzler:
"""
Start Rinzler App
:param app_name: str Application's identifier
:return: dict
"""
app = Rinzler(app_name)
app.log.info("App booted =)")
return app
|
python
|
def boot(app_name) -> Rinzler:
"""
Start Rinzler App
:param app_name: str Application's identifier
:return: dict
"""
app = Rinzler(app_name)
app.log.info("App booted =)")
return app
|
[
"def",
"boot",
"(",
"app_name",
")",
"->",
"Rinzler",
":",
"app",
"=",
"Rinzler",
"(",
"app_name",
")",
"app",
".",
"log",
".",
"info",
"(",
"\"App booted =)\"",
")",
"return",
"app"
] |
Start Rinzler App
:param app_name: str Application's identifier
:return: dict
|
[
"Start",
"Rinzler",
"App",
":",
"param",
"app_name",
":",
"str",
"Application",
"s",
"identifier",
":",
"return",
":",
"dict"
] |
7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8
|
https://github.com/feliphebueno/Rinzler/blob/7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8/rinzler/__init__.py#L391-L400
|
train
|
feliphebueno/Rinzler
|
rinzler/__init__.py
|
Rinzler.mount
|
def mount(self, route: str, controller: callable) -> url:
"""
Maps a route namespace with the given params and point it's requests to the especified controller.
:param route: str Namespace route to be mapped
:param controller: callback Controller callable to map end-points
:rtype: url
"""
if issubclass(controller, TemplateView):
return url(
r"%s" % route,
Router(self, route, controller).handle
)
else:
raise TypeError("The controller %s must be a subclass of %s" % (
controller, TemplateView
)
)
|
python
|
def mount(self, route: str, controller: callable) -> url:
"""
Maps a route namespace with the given params and point it's requests to the especified controller.
:param route: str Namespace route to be mapped
:param controller: callback Controller callable to map end-points
:rtype: url
"""
if issubclass(controller, TemplateView):
return url(
r"%s" % route,
Router(self, route, controller).handle
)
else:
raise TypeError("The controller %s must be a subclass of %s" % (
controller, TemplateView
)
)
|
[
"def",
"mount",
"(",
"self",
",",
"route",
":",
"str",
",",
"controller",
":",
"callable",
")",
"->",
"url",
":",
"if",
"issubclass",
"(",
"controller",
",",
"TemplateView",
")",
":",
"return",
"url",
"(",
"r\"%s\"",
"%",
"route",
",",
"Router",
"(",
"self",
",",
"route",
",",
"controller",
")",
".",
"handle",
")",
"else",
":",
"raise",
"TypeError",
"(",
"\"The controller %s must be a subclass of %s\"",
"%",
"(",
"controller",
",",
"TemplateView",
")",
")"
] |
Maps a route namespace with the given params and point it's requests to the especified controller.
:param route: str Namespace route to be mapped
:param controller: callback Controller callable to map end-points
:rtype: url
|
[
"Maps",
"a",
"route",
"namespace",
"with",
"the",
"given",
"params",
"and",
"point",
"it",
"s",
"requests",
"to",
"the",
"especified",
"controller",
".",
":",
"param",
"route",
":",
"str",
"Namespace",
"route",
"to",
"be",
"mapped",
":",
"param",
"controller",
":",
"callback",
"Controller",
"callable",
"to",
"map",
"end",
"-",
"points",
":",
"rtype",
":",
"url"
] |
7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8
|
https://github.com/feliphebueno/Rinzler/blob/7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8/rinzler/__init__.py#L52-L68
|
train
|
feliphebueno/Rinzler
|
rinzler/__init__.py
|
Rinzler.set_auth_service
|
def set_auth_service(self, auth_service: BaseAuthService):
"""
Sets the authentication service
:param auth_service: BaseAuthService Authentication service
:raises: TypeError If the auth_service object is not a subclass of rinzler.auth.BaseAuthService
:rtype: Rinzler
"""
if issubclass(auth_service.__class__, BaseAuthService):
self.auth_service = auth_service
return self
else:
raise TypeError("Your auth service object must be a subclass of rinzler.auth.BaseAuthService.")
|
python
|
def set_auth_service(self, auth_service: BaseAuthService):
"""
Sets the authentication service
:param auth_service: BaseAuthService Authentication service
:raises: TypeError If the auth_service object is not a subclass of rinzler.auth.BaseAuthService
:rtype: Rinzler
"""
if issubclass(auth_service.__class__, BaseAuthService):
self.auth_service = auth_service
return self
else:
raise TypeError("Your auth service object must be a subclass of rinzler.auth.BaseAuthService.")
|
[
"def",
"set_auth_service",
"(",
"self",
",",
"auth_service",
":",
"BaseAuthService",
")",
":",
"if",
"issubclass",
"(",
"auth_service",
".",
"__class__",
",",
"BaseAuthService",
")",
":",
"self",
".",
"auth_service",
"=",
"auth_service",
"return",
"self",
"else",
":",
"raise",
"TypeError",
"(",
"\"Your auth service object must be a subclass of rinzler.auth.BaseAuthService.\"",
")"
] |
Sets the authentication service
:param auth_service: BaseAuthService Authentication service
:raises: TypeError If the auth_service object is not a subclass of rinzler.auth.BaseAuthService
:rtype: Rinzler
|
[
"Sets",
"the",
"authentication",
"service",
":",
"param",
"auth_service",
":",
"BaseAuthService",
"Authentication",
"service",
":",
"raises",
":",
"TypeError",
"If",
"the",
"auth_service",
"object",
"is",
"not",
"a",
"subclass",
"of",
"rinzler",
".",
"auth",
".",
"BaseAuthService",
":",
"rtype",
":",
"Rinzler"
] |
7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8
|
https://github.com/feliphebueno/Rinzler/blob/7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8/rinzler/__init__.py#L88-L99
|
train
|
feliphebueno/Rinzler
|
rinzler/__init__.py
|
Router.handle
|
def handle(self, request: HttpRequest) -> HttpResponse:
"""
Prepares for the CallBackResolver and handles the response and exceptions
:param request HttpRequest
:rtype: HttpResponse
"""
self.__request_start = datetime.now()
self.__request = request
self.__uri = request.path[1:]
self.__method = request.method
# Initializes the callable controller and call it's connect method to get the mapped end-points.
controller: RouteMapping = self.__controller().connect(self.app)
self.__end_points = controller.get_routes()
indent = self.get_json_ident(request.META)
if self.set_end_point_uri() is False:
return self.set_response_headers(self.no_route_found(self.__request).render(indent))
response = HttpResponse(None)
try:
response = self.exec_route_callback()
except RinzlerHttpException as e:
client.captureException()
self.app.log.error(f"< {e.status_code}", exc_info=True)
response = Response(None, status=e.status_code)
except RequestDataTooBig:
client.captureException()
self.app.log.error("< 413", exc_info=True)
response = Response(None, status=413)
except BaseException:
client.captureException()
self.app.log.error("< 500", exc_info=True)
response = Response(None, status=500)
finally:
if type(response) == Response:
return self.set_response_headers(response.render(indent))
else:
return self.set_response_headers(response)
|
python
|
def handle(self, request: HttpRequest) -> HttpResponse:
"""
Prepares for the CallBackResolver and handles the response and exceptions
:param request HttpRequest
:rtype: HttpResponse
"""
self.__request_start = datetime.now()
self.__request = request
self.__uri = request.path[1:]
self.__method = request.method
# Initializes the callable controller and call it's connect method to get the mapped end-points.
controller: RouteMapping = self.__controller().connect(self.app)
self.__end_points = controller.get_routes()
indent = self.get_json_ident(request.META)
if self.set_end_point_uri() is False:
return self.set_response_headers(self.no_route_found(self.__request).render(indent))
response = HttpResponse(None)
try:
response = self.exec_route_callback()
except RinzlerHttpException as e:
client.captureException()
self.app.log.error(f"< {e.status_code}", exc_info=True)
response = Response(None, status=e.status_code)
except RequestDataTooBig:
client.captureException()
self.app.log.error("< 413", exc_info=True)
response = Response(None, status=413)
except BaseException:
client.captureException()
self.app.log.error("< 500", exc_info=True)
response = Response(None, status=500)
finally:
if type(response) == Response:
return self.set_response_headers(response.render(indent))
else:
return self.set_response_headers(response)
|
[
"def",
"handle",
"(",
"self",
",",
"request",
":",
"HttpRequest",
")",
"->",
"HttpResponse",
":",
"self",
".",
"__request_start",
"=",
"datetime",
".",
"now",
"(",
")",
"self",
".",
"__request",
"=",
"request",
"self",
".",
"__uri",
"=",
"request",
".",
"path",
"[",
"1",
":",
"]",
"self",
".",
"__method",
"=",
"request",
".",
"method",
"# Initializes the callable controller and call it's connect method to get the mapped end-points.",
"controller",
":",
"RouteMapping",
"=",
"self",
".",
"__controller",
"(",
")",
".",
"connect",
"(",
"self",
".",
"app",
")",
"self",
".",
"__end_points",
"=",
"controller",
".",
"get_routes",
"(",
")",
"indent",
"=",
"self",
".",
"get_json_ident",
"(",
"request",
".",
"META",
")",
"if",
"self",
".",
"set_end_point_uri",
"(",
")",
"is",
"False",
":",
"return",
"self",
".",
"set_response_headers",
"(",
"self",
".",
"no_route_found",
"(",
"self",
".",
"__request",
")",
".",
"render",
"(",
"indent",
")",
")",
"response",
"=",
"HttpResponse",
"(",
"None",
")",
"try",
":",
"response",
"=",
"self",
".",
"exec_route_callback",
"(",
")",
"except",
"RinzlerHttpException",
"as",
"e",
":",
"client",
".",
"captureException",
"(",
")",
"self",
".",
"app",
".",
"log",
".",
"error",
"(",
"f\"< {e.status_code}\"",
",",
"exc_info",
"=",
"True",
")",
"response",
"=",
"Response",
"(",
"None",
",",
"status",
"=",
"e",
".",
"status_code",
")",
"except",
"RequestDataTooBig",
":",
"client",
".",
"captureException",
"(",
")",
"self",
".",
"app",
".",
"log",
".",
"error",
"(",
"\"< 413\"",
",",
"exc_info",
"=",
"True",
")",
"response",
"=",
"Response",
"(",
"None",
",",
"status",
"=",
"413",
")",
"except",
"BaseException",
":",
"client",
".",
"captureException",
"(",
")",
"self",
".",
"app",
".",
"log",
".",
"error",
"(",
"\"< 500\"",
",",
"exc_info",
"=",
"True",
")",
"response",
"=",
"Response",
"(",
"None",
",",
"status",
"=",
"500",
")",
"finally",
":",
"if",
"type",
"(",
"response",
")",
"==",
"Response",
":",
"return",
"self",
".",
"set_response_headers",
"(",
"response",
".",
"render",
"(",
"indent",
")",
")",
"else",
":",
"return",
"self",
".",
"set_response_headers",
"(",
"response",
")"
] |
Prepares for the CallBackResolver and handles the response and exceptions
:param request HttpRequest
:rtype: HttpResponse
|
[
"Prepares",
"for",
"the",
"CallBackResolver",
"and",
"handles",
"the",
"response",
"and",
"exceptions",
":",
"param",
"request",
"HttpRequest",
":",
"rtype",
":",
"HttpResponse"
] |
7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8
|
https://github.com/feliphebueno/Rinzler/blob/7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8/rinzler/__init__.py#L125-L165
|
train
|
feliphebueno/Rinzler
|
rinzler/__init__.py
|
Router.exec_route_callback
|
def exec_route_callback(self) -> Response or object:
"""
Executes the resolved end-point callback, or its fallback
:rtype: Response or object
"""
if self.__method.lower() in self.__end_points:
for bound in self.__end_points[self.__method.lower()]:
route = list(bound)[0]
expected_params = self.get_url_params(route)
actual_params = self.get_url_params(self.get_end_point_uri())
if self.request_matches_route(self.get_end_point_uri(), route):
self.app.log.info("> {0} {1}".format(self.__method, self.__uri))
if self.authenticate(route, actual_params):
self.app.log.debug(
"%s(%d) %s" % ("body ", len(self.__request.body), self.__request.body.decode('utf-8'))
)
pattern_params = self.get_callback_pattern(expected_params, actual_params)
self.app.request_handle_time = (
lambda d: int((d.days * 24 * 60 * 60 * 1000) + (d.seconds * 1000) + (d.microseconds / 1000))
)(datetime.now() - self.__request_start)
return bound[route](self.__request, self.app, **pattern_params)
else:
raise AuthException("Authentication failed.")
if self.__method == "OPTIONS":
self.app.log.info("Route matched: {0} {1}".format(self.__method, self.__uri))
return self.default_route_options()
if self.__route == '' and self.__uri == '':
return self.welcome_page()
else:
return self.no_route_found(self.__request)
|
python
|
def exec_route_callback(self) -> Response or object:
"""
Executes the resolved end-point callback, or its fallback
:rtype: Response or object
"""
if self.__method.lower() in self.__end_points:
for bound in self.__end_points[self.__method.lower()]:
route = list(bound)[0]
expected_params = self.get_url_params(route)
actual_params = self.get_url_params(self.get_end_point_uri())
if self.request_matches_route(self.get_end_point_uri(), route):
self.app.log.info("> {0} {1}".format(self.__method, self.__uri))
if self.authenticate(route, actual_params):
self.app.log.debug(
"%s(%d) %s" % ("body ", len(self.__request.body), self.__request.body.decode('utf-8'))
)
pattern_params = self.get_callback_pattern(expected_params, actual_params)
self.app.request_handle_time = (
lambda d: int((d.days * 24 * 60 * 60 * 1000) + (d.seconds * 1000) + (d.microseconds / 1000))
)(datetime.now() - self.__request_start)
return bound[route](self.__request, self.app, **pattern_params)
else:
raise AuthException("Authentication failed.")
if self.__method == "OPTIONS":
self.app.log.info("Route matched: {0} {1}".format(self.__method, self.__uri))
return self.default_route_options()
if self.__route == '' and self.__uri == '':
return self.welcome_page()
else:
return self.no_route_found(self.__request)
|
[
"def",
"exec_route_callback",
"(",
"self",
")",
"->",
"Response",
"or",
"object",
":",
"if",
"self",
".",
"__method",
".",
"lower",
"(",
")",
"in",
"self",
".",
"__end_points",
":",
"for",
"bound",
"in",
"self",
".",
"__end_points",
"[",
"self",
".",
"__method",
".",
"lower",
"(",
")",
"]",
":",
"route",
"=",
"list",
"(",
"bound",
")",
"[",
"0",
"]",
"expected_params",
"=",
"self",
".",
"get_url_params",
"(",
"route",
")",
"actual_params",
"=",
"self",
".",
"get_url_params",
"(",
"self",
".",
"get_end_point_uri",
"(",
")",
")",
"if",
"self",
".",
"request_matches_route",
"(",
"self",
".",
"get_end_point_uri",
"(",
")",
",",
"route",
")",
":",
"self",
".",
"app",
".",
"log",
".",
"info",
"(",
"\"> {0} {1}\"",
".",
"format",
"(",
"self",
".",
"__method",
",",
"self",
".",
"__uri",
")",
")",
"if",
"self",
".",
"authenticate",
"(",
"route",
",",
"actual_params",
")",
":",
"self",
".",
"app",
".",
"log",
".",
"debug",
"(",
"\"%s(%d) %s\"",
"%",
"(",
"\"body \"",
",",
"len",
"(",
"self",
".",
"__request",
".",
"body",
")",
",",
"self",
".",
"__request",
".",
"body",
".",
"decode",
"(",
"'utf-8'",
")",
")",
")",
"pattern_params",
"=",
"self",
".",
"get_callback_pattern",
"(",
"expected_params",
",",
"actual_params",
")",
"self",
".",
"app",
".",
"request_handle_time",
"=",
"(",
"lambda",
"d",
":",
"int",
"(",
"(",
"d",
".",
"days",
"*",
"24",
"*",
"60",
"*",
"60",
"*",
"1000",
")",
"+",
"(",
"d",
".",
"seconds",
"*",
"1000",
")",
"+",
"(",
"d",
".",
"microseconds",
"/",
"1000",
")",
")",
")",
"(",
"datetime",
".",
"now",
"(",
")",
"-",
"self",
".",
"__request_start",
")",
"return",
"bound",
"[",
"route",
"]",
"(",
"self",
".",
"__request",
",",
"self",
".",
"app",
",",
"*",
"*",
"pattern_params",
")",
"else",
":",
"raise",
"AuthException",
"(",
"\"Authentication failed.\"",
")",
"if",
"self",
".",
"__method",
"==",
"\"OPTIONS\"",
":",
"self",
".",
"app",
".",
"log",
".",
"info",
"(",
"\"Route matched: {0} {1}\"",
".",
"format",
"(",
"self",
".",
"__method",
",",
"self",
".",
"__uri",
")",
")",
"return",
"self",
".",
"default_route_options",
"(",
")",
"if",
"self",
".",
"__route",
"==",
"''",
"and",
"self",
".",
"__uri",
"==",
"''",
":",
"return",
"self",
".",
"welcome_page",
"(",
")",
"else",
":",
"return",
"self",
".",
"no_route_found",
"(",
"self",
".",
"__request",
")"
] |
Executes the resolved end-point callback, or its fallback
:rtype: Response or object
|
[
"Executes",
"the",
"resolved",
"end",
"-",
"point",
"callback",
"or",
"its",
"fallback",
":",
"rtype",
":",
"Response",
"or",
"object"
] |
7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8
|
https://github.com/feliphebueno/Rinzler/blob/7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8/rinzler/__init__.py#L167-L201
|
train
|
feliphebueno/Rinzler
|
rinzler/__init__.py
|
Router.request_matches_route
|
def request_matches_route(self, actual_route: str, expected_route: str):
"""
Determines whether a route matches the actual requested route or not
:param actual_route str
:param expected_route
:rtype: Boolean
"""
expected_params = self.get_url_params(expected_route)
actual_params = self.get_url_params(actual_route)
i = 0
if len(expected_params) == len(actual_params):
for param in actual_params:
if expected_params[i][0] != "{":
if param != expected_params[i]:
return False
i += 1
else:
return False
return True
|
python
|
def request_matches_route(self, actual_route: str, expected_route: str):
"""
Determines whether a route matches the actual requested route or not
:param actual_route str
:param expected_route
:rtype: Boolean
"""
expected_params = self.get_url_params(expected_route)
actual_params = self.get_url_params(actual_route)
i = 0
if len(expected_params) == len(actual_params):
for param in actual_params:
if expected_params[i][0] != "{":
if param != expected_params[i]:
return False
i += 1
else:
return False
return True
|
[
"def",
"request_matches_route",
"(",
"self",
",",
"actual_route",
":",
"str",
",",
"expected_route",
":",
"str",
")",
":",
"expected_params",
"=",
"self",
".",
"get_url_params",
"(",
"expected_route",
")",
"actual_params",
"=",
"self",
".",
"get_url_params",
"(",
"actual_route",
")",
"i",
"=",
"0",
"if",
"len",
"(",
"expected_params",
")",
"==",
"len",
"(",
"actual_params",
")",
":",
"for",
"param",
"in",
"actual_params",
":",
"if",
"expected_params",
"[",
"i",
"]",
"[",
"0",
"]",
"!=",
"\"{\"",
":",
"if",
"param",
"!=",
"expected_params",
"[",
"i",
"]",
":",
"return",
"False",
"i",
"+=",
"1",
"else",
":",
"return",
"False",
"return",
"True"
] |
Determines whether a route matches the actual requested route or not
:param actual_route str
:param expected_route
:rtype: Boolean
|
[
"Determines",
"whether",
"a",
"route",
"matches",
"the",
"actual",
"requested",
"route",
"or",
"not",
":",
"param",
"actual_route",
"str",
":",
"param",
"expected_route",
":",
"rtype",
":",
"Boolean"
] |
7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8
|
https://github.com/feliphebueno/Rinzler/blob/7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8/rinzler/__init__.py#L203-L223
|
train
|
feliphebueno/Rinzler
|
rinzler/__init__.py
|
Router.authenticate
|
def authenticate(self, bound_route, actual_params) -> bool:
"""
Runs the pre-defined authenticaton service
:param bound_route str route matched
:param actual_params dict actual url parameters
:rtype: bool
"""
if self.__auth_service is not None:
auth_route = "{0}_{1}{2}".format(self.__method, self.__route, bound_route)
auth_data = self.__auth_service.authenticate(self.__request, auth_route, actual_params)
if auth_data is True:
self.app.auth_data = self.__auth_service.auth_data
else:
return False
return True
|
python
|
def authenticate(self, bound_route, actual_params) -> bool:
"""
Runs the pre-defined authenticaton service
:param bound_route str route matched
:param actual_params dict actual url parameters
:rtype: bool
"""
if self.__auth_service is not None:
auth_route = "{0}_{1}{2}".format(self.__method, self.__route, bound_route)
auth_data = self.__auth_service.authenticate(self.__request, auth_route, actual_params)
if auth_data is True:
self.app.auth_data = self.__auth_service.auth_data
else:
return False
return True
|
[
"def",
"authenticate",
"(",
"self",
",",
"bound_route",
",",
"actual_params",
")",
"->",
"bool",
":",
"if",
"self",
".",
"__auth_service",
"is",
"not",
"None",
":",
"auth_route",
"=",
"\"{0}_{1}{2}\"",
".",
"format",
"(",
"self",
".",
"__method",
",",
"self",
".",
"__route",
",",
"bound_route",
")",
"auth_data",
"=",
"self",
".",
"__auth_service",
".",
"authenticate",
"(",
"self",
".",
"__request",
",",
"auth_route",
",",
"actual_params",
")",
"if",
"auth_data",
"is",
"True",
":",
"self",
".",
"app",
".",
"auth_data",
"=",
"self",
".",
"__auth_service",
".",
"auth_data",
"else",
":",
"return",
"False",
"return",
"True"
] |
Runs the pre-defined authenticaton service
:param bound_route str route matched
:param actual_params dict actual url parameters
:rtype: bool
|
[
"Runs",
"the",
"pre",
"-",
"defined",
"authenticaton",
"service",
":",
"param",
"bound_route",
"str",
"route",
"matched",
":",
"param",
"actual_params",
"dict",
"actual",
"url",
"parameters",
":",
"rtype",
":",
"bool"
] |
7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8
|
https://github.com/feliphebueno/Rinzler/blob/7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8/rinzler/__init__.py#L225-L240
|
train
|
feliphebueno/Rinzler
|
rinzler/__init__.py
|
Router.get_callback_pattern
|
def get_callback_pattern(expected_params, actual_params):
"""
Assembles a dictionary whith the parameters schema defined for this route
:param expected_params dict parameters schema defined for this route
:param actual_params dict actual url parameters
:rtype: dict
"""
pattern = dict()
key = 0
for exp_param in expected_params:
if exp_param[0] == '{' and exp_param[-1:] == '}':
pattern[exp_param[1:-1]] = actual_params[key]
key = key + 1
return pattern
|
python
|
def get_callback_pattern(expected_params, actual_params):
"""
Assembles a dictionary whith the parameters schema defined for this route
:param expected_params dict parameters schema defined for this route
:param actual_params dict actual url parameters
:rtype: dict
"""
pattern = dict()
key = 0
for exp_param in expected_params:
if exp_param[0] == '{' and exp_param[-1:] == '}':
pattern[exp_param[1:-1]] = actual_params[key]
key = key + 1
return pattern
|
[
"def",
"get_callback_pattern",
"(",
"expected_params",
",",
"actual_params",
")",
":",
"pattern",
"=",
"dict",
"(",
")",
"key",
"=",
"0",
"for",
"exp_param",
"in",
"expected_params",
":",
"if",
"exp_param",
"[",
"0",
"]",
"==",
"'{'",
"and",
"exp_param",
"[",
"-",
"1",
":",
"]",
"==",
"'}'",
":",
"pattern",
"[",
"exp_param",
"[",
"1",
":",
"-",
"1",
"]",
"]",
"=",
"actual_params",
"[",
"key",
"]",
"key",
"=",
"key",
"+",
"1",
"return",
"pattern"
] |
Assembles a dictionary whith the parameters schema defined for this route
:param expected_params dict parameters schema defined for this route
:param actual_params dict actual url parameters
:rtype: dict
|
[
"Assembles",
"a",
"dictionary",
"whith",
"the",
"parameters",
"schema",
"defined",
"for",
"this",
"route",
":",
"param",
"expected_params",
"dict",
"parameters",
"schema",
"defined",
"for",
"this",
"route",
":",
"param",
"actual_params",
"dict",
"actual",
"url",
"parameters",
":",
"rtype",
":",
"dict"
] |
7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8
|
https://github.com/feliphebueno/Rinzler/blob/7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8/rinzler/__init__.py#L243-L256
|
train
|
feliphebueno/Rinzler
|
rinzler/__init__.py
|
Router.get_url_params
|
def get_url_params(end_point: str) -> list:
"""
Gets route parameters as dictionary
:param end_point str target route
:rtype: list
"""
var_params = end_point.split('/')
if len(var_params) == 1 and var_params[0] == '':
return []
elif len(var_params) == 1 and var_params[0] != '':
return [var_params[0]]
else:
params = list()
for param in var_params:
if len(param) > 0:
params.append(param)
return params
|
python
|
def get_url_params(end_point: str) -> list:
"""
Gets route parameters as dictionary
:param end_point str target route
:rtype: list
"""
var_params = end_point.split('/')
if len(var_params) == 1 and var_params[0] == '':
return []
elif len(var_params) == 1 and var_params[0] != '':
return [var_params[0]]
else:
params = list()
for param in var_params:
if len(param) > 0:
params.append(param)
return params
|
[
"def",
"get_url_params",
"(",
"end_point",
":",
"str",
")",
"->",
"list",
":",
"var_params",
"=",
"end_point",
".",
"split",
"(",
"'/'",
")",
"if",
"len",
"(",
"var_params",
")",
"==",
"1",
"and",
"var_params",
"[",
"0",
"]",
"==",
"''",
":",
"return",
"[",
"]",
"elif",
"len",
"(",
"var_params",
")",
"==",
"1",
"and",
"var_params",
"[",
"0",
"]",
"!=",
"''",
":",
"return",
"[",
"var_params",
"[",
"0",
"]",
"]",
"else",
":",
"params",
"=",
"list",
"(",
")",
"for",
"param",
"in",
"var_params",
":",
"if",
"len",
"(",
"param",
")",
">",
"0",
":",
"params",
".",
"append",
"(",
"param",
")",
"return",
"params"
] |
Gets route parameters as dictionary
:param end_point str target route
:rtype: list
|
[
"Gets",
"route",
"parameters",
"as",
"dictionary",
":",
"param",
"end_point",
"str",
"target",
"route",
":",
"rtype",
":",
"list"
] |
7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8
|
https://github.com/feliphebueno/Rinzler/blob/7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8/rinzler/__init__.py#L259-L277
|
train
|
feliphebueno/Rinzler
|
rinzler/__init__.py
|
Router.set_end_point_uri
|
def set_end_point_uri(self) -> bool:
"""
Extracts the route from the accessed URL and sets it to __end_point_uri
:rtype: bool
"""
expected_parts = self.__route.split("/")
actual_parts = self.__uri.split("/")
i = 0
for part in expected_parts:
if part != actual_parts[i]:
return False
i = i + 1
uri_prefix = len(self.__route)
self.__end_point_uri = self.__uri[uri_prefix:]
return True
|
python
|
def set_end_point_uri(self) -> bool:
"""
Extracts the route from the accessed URL and sets it to __end_point_uri
:rtype: bool
"""
expected_parts = self.__route.split("/")
actual_parts = self.__uri.split("/")
i = 0
for part in expected_parts:
if part != actual_parts[i]:
return False
i = i + 1
uri_prefix = len(self.__route)
self.__end_point_uri = self.__uri[uri_prefix:]
return True
|
[
"def",
"set_end_point_uri",
"(",
"self",
")",
"->",
"bool",
":",
"expected_parts",
"=",
"self",
".",
"__route",
".",
"split",
"(",
"\"/\"",
")",
"actual_parts",
"=",
"self",
".",
"__uri",
".",
"split",
"(",
"\"/\"",
")",
"i",
"=",
"0",
"for",
"part",
"in",
"expected_parts",
":",
"if",
"part",
"!=",
"actual_parts",
"[",
"i",
"]",
":",
"return",
"False",
"i",
"=",
"i",
"+",
"1",
"uri_prefix",
"=",
"len",
"(",
"self",
".",
"__route",
")",
"self",
".",
"__end_point_uri",
"=",
"self",
".",
"__uri",
"[",
"uri_prefix",
":",
"]",
"return",
"True"
] |
Extracts the route from the accessed URL and sets it to __end_point_uri
:rtype: bool
|
[
"Extracts",
"the",
"route",
"from",
"the",
"accessed",
"URL",
"and",
"sets",
"it",
"to",
"__end_point_uri",
":",
"rtype",
":",
"bool"
] |
7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8
|
https://github.com/feliphebueno/Rinzler/blob/7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8/rinzler/__init__.py#L286-L302
|
train
|
feliphebueno/Rinzler
|
rinzler/__init__.py
|
Router.no_route_found
|
def no_route_found(self, request):
"""
Default callback for route not found
:param request HttpRequest
:rtype: Response
"""
response_obj = OrderedDict()
response_obj["status"] = False
response_obj["exceptions"] = {
"message": "No route found for {0} {1}".format(self.__method, self.__uri),
}
response_obj["request"] = {
"method": self.__method,
"path_info": self.__uri,
"content": request.body.decode("utf-8")
}
response_obj["message"] = "We are sorry, but something went terribly wrong."
return Response(response_obj, content_type="application/json", status=404, charset="utf-8")
|
python
|
def no_route_found(self, request):
"""
Default callback for route not found
:param request HttpRequest
:rtype: Response
"""
response_obj = OrderedDict()
response_obj["status"] = False
response_obj["exceptions"] = {
"message": "No route found for {0} {1}".format(self.__method, self.__uri),
}
response_obj["request"] = {
"method": self.__method,
"path_info": self.__uri,
"content": request.body.decode("utf-8")
}
response_obj["message"] = "We are sorry, but something went terribly wrong."
return Response(response_obj, content_type="application/json", status=404, charset="utf-8")
|
[
"def",
"no_route_found",
"(",
"self",
",",
"request",
")",
":",
"response_obj",
"=",
"OrderedDict",
"(",
")",
"response_obj",
"[",
"\"status\"",
"]",
"=",
"False",
"response_obj",
"[",
"\"exceptions\"",
"]",
"=",
"{",
"\"message\"",
":",
"\"No route found for {0} {1}\"",
".",
"format",
"(",
"self",
".",
"__method",
",",
"self",
".",
"__uri",
")",
",",
"}",
"response_obj",
"[",
"\"request\"",
"]",
"=",
"{",
"\"method\"",
":",
"self",
".",
"__method",
",",
"\"path_info\"",
":",
"self",
".",
"__uri",
",",
"\"content\"",
":",
"request",
".",
"body",
".",
"decode",
"(",
"\"utf-8\"",
")",
"}",
"response_obj",
"[",
"\"message\"",
"]",
"=",
"\"We are sorry, but something went terribly wrong.\"",
"return",
"Response",
"(",
"response_obj",
",",
"content_type",
"=",
"\"application/json\"",
",",
"status",
"=",
"404",
",",
"charset",
"=",
"\"utf-8\"",
")"
] |
Default callback for route not found
:param request HttpRequest
:rtype: Response
|
[
"Default",
"callback",
"for",
"route",
"not",
"found",
":",
"param",
"request",
"HttpRequest",
":",
"rtype",
":",
"Response"
] |
7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8
|
https://github.com/feliphebueno/Rinzler/blob/7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8/rinzler/__init__.py#L304-L322
|
train
|
feliphebueno/Rinzler
|
rinzler/__init__.py
|
Router.welcome_page
|
def welcome_page(self):
"""
Defaulf welcome page when the route / is note mapped yet
:rtype: HttpResponse
"""
message = "HTTP/1.1 200 OK RINZLER FRAMEWORK"
return HttpResponse(
"<center><h1>{0}({1})</h1></center>".format(message, self.app.app_name),
content_type="text/html", charset="utf-8"
)
|
python
|
def welcome_page(self):
"""
Defaulf welcome page when the route / is note mapped yet
:rtype: HttpResponse
"""
message = "HTTP/1.1 200 OK RINZLER FRAMEWORK"
return HttpResponse(
"<center><h1>{0}({1})</h1></center>".format(message, self.app.app_name),
content_type="text/html", charset="utf-8"
)
|
[
"def",
"welcome_page",
"(",
"self",
")",
":",
"message",
"=",
"\"HTTP/1.1 200 OK RINZLER FRAMEWORK\"",
"return",
"HttpResponse",
"(",
"\"<center><h1>{0}({1})</h1></center>\"",
".",
"format",
"(",
"message",
",",
"self",
".",
"app",
".",
"app_name",
")",
",",
"content_type",
"=",
"\"text/html\"",
",",
"charset",
"=",
"\"utf-8\"",
")"
] |
Defaulf welcome page when the route / is note mapped yet
:rtype: HttpResponse
|
[
"Defaulf",
"welcome",
"page",
"when",
"the",
"route",
"/",
"is",
"note",
"mapped",
"yet",
":",
"rtype",
":",
"HttpResponse"
] |
7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8
|
https://github.com/feliphebueno/Rinzler/blob/7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8/rinzler/__init__.py#L324-L333
|
train
|
feliphebueno/Rinzler
|
rinzler/__init__.py
|
Router.default_route_options
|
def default_route_options():
"""
Default callback for OPTIONS request
:rtype: Response
"""
response_obj = OrderedDict()
response_obj["status"] = True
response_obj["data"] = "Ok"
return Response(response_obj, content_type="application/json", charset="utf-8")
|
python
|
def default_route_options():
"""
Default callback for OPTIONS request
:rtype: Response
"""
response_obj = OrderedDict()
response_obj["status"] = True
response_obj["data"] = "Ok"
return Response(response_obj, content_type="application/json", charset="utf-8")
|
[
"def",
"default_route_options",
"(",
")",
":",
"response_obj",
"=",
"OrderedDict",
"(",
")",
"response_obj",
"[",
"\"status\"",
"]",
"=",
"True",
"response_obj",
"[",
"\"data\"",
"]",
"=",
"\"Ok\"",
"return",
"Response",
"(",
"response_obj",
",",
"content_type",
"=",
"\"application/json\"",
",",
"charset",
"=",
"\"utf-8\"",
")"
] |
Default callback for OPTIONS request
:rtype: Response
|
[
"Default",
"callback",
"for",
"OPTIONS",
"request",
":",
"rtype",
":",
"Response"
] |
7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8
|
https://github.com/feliphebueno/Rinzler/blob/7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8/rinzler/__init__.py#L336-L346
|
train
|
feliphebueno/Rinzler
|
rinzler/__init__.py
|
Router.set_response_headers
|
def set_response_headers(self, response: HttpResponse) -> HttpResponse:
"""
Appends default headers to every response returned by the API
:param response HttpResponse
:rtype: HttpResponse
"""
public_name = os.environ.get('SERVER_PUBLIC_NAME')
response_headers = {
'access-control-allow-headers': self.app.allowed_headers,
'access-control-allow-methods': self.app.allowed_methods,
'access-control-allow-origin': self.app.allowed_origins,
'access-control-allow-credentials': True,
'www-authenticate': "Bearer",
'server-public-name': public_name if public_name else "No one",
'user-info': "Rinzler Framework rulez!"
}
response_headers.update(self.app.default_headers)
for key in response_headers:
response[key] = response_headers[key]
status = response.status_code
if status != 404:
self.app.log.info("< {0}".format(status))
return response
|
python
|
def set_response_headers(self, response: HttpResponse) -> HttpResponse:
"""
Appends default headers to every response returned by the API
:param response HttpResponse
:rtype: HttpResponse
"""
public_name = os.environ.get('SERVER_PUBLIC_NAME')
response_headers = {
'access-control-allow-headers': self.app.allowed_headers,
'access-control-allow-methods': self.app.allowed_methods,
'access-control-allow-origin': self.app.allowed_origins,
'access-control-allow-credentials': True,
'www-authenticate': "Bearer",
'server-public-name': public_name if public_name else "No one",
'user-info': "Rinzler Framework rulez!"
}
response_headers.update(self.app.default_headers)
for key in response_headers:
response[key] = response_headers[key]
status = response.status_code
if status != 404:
self.app.log.info("< {0}".format(status))
return response
|
[
"def",
"set_response_headers",
"(",
"self",
",",
"response",
":",
"HttpResponse",
")",
"->",
"HttpResponse",
":",
"public_name",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'SERVER_PUBLIC_NAME'",
")",
"response_headers",
"=",
"{",
"'access-control-allow-headers'",
":",
"self",
".",
"app",
".",
"allowed_headers",
",",
"'access-control-allow-methods'",
":",
"self",
".",
"app",
".",
"allowed_methods",
",",
"'access-control-allow-origin'",
":",
"self",
".",
"app",
".",
"allowed_origins",
",",
"'access-control-allow-credentials'",
":",
"True",
",",
"'www-authenticate'",
":",
"\"Bearer\"",
",",
"'server-public-name'",
":",
"public_name",
"if",
"public_name",
"else",
"\"No one\"",
",",
"'user-info'",
":",
"\"Rinzler Framework rulez!\"",
"}",
"response_headers",
".",
"update",
"(",
"self",
".",
"app",
".",
"default_headers",
")",
"for",
"key",
"in",
"response_headers",
":",
"response",
"[",
"key",
"]",
"=",
"response_headers",
"[",
"key",
"]",
"status",
"=",
"response",
".",
"status_code",
"if",
"status",
"!=",
"404",
":",
"self",
".",
"app",
".",
"log",
".",
"info",
"(",
"\"< {0}\"",
".",
"format",
"(",
"status",
")",
")",
"return",
"response"
] |
Appends default headers to every response returned by the API
:param response HttpResponse
:rtype: HttpResponse
|
[
"Appends",
"default",
"headers",
"to",
"every",
"response",
"returned",
"by",
"the",
"API",
":",
"param",
"response",
"HttpResponse",
":",
"rtype",
":",
"HttpResponse"
] |
7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8
|
https://github.com/feliphebueno/Rinzler/blob/7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8/rinzler/__init__.py#L348-L374
|
train
|
feliphebueno/Rinzler
|
rinzler/__init__.py
|
Router.get_json_ident
|
def get_json_ident(request_headers: dict) -> int:
"""
Defines whether the JSON response will be indented or not
:param request_headers: dict
:return: self
"""
if 'HTTP_USER_AGENT' in request_headers:
indent = 2 if re.match("[Mozilla]{7}", request_headers['HTTP_USER_AGENT']) else 0
else:
indent = 0
return indent
|
python
|
def get_json_ident(request_headers: dict) -> int:
"""
Defines whether the JSON response will be indented or not
:param request_headers: dict
:return: self
"""
if 'HTTP_USER_AGENT' in request_headers:
indent = 2 if re.match("[Mozilla]{7}", request_headers['HTTP_USER_AGENT']) else 0
else:
indent = 0
return indent
|
[
"def",
"get_json_ident",
"(",
"request_headers",
":",
"dict",
")",
"->",
"int",
":",
"if",
"'HTTP_USER_AGENT'",
"in",
"request_headers",
":",
"indent",
"=",
"2",
"if",
"re",
".",
"match",
"(",
"\"[Mozilla]{7}\"",
",",
"request_headers",
"[",
"'HTTP_USER_AGENT'",
"]",
")",
"else",
"0",
"else",
":",
"indent",
"=",
"0",
"return",
"indent"
] |
Defines whether the JSON response will be indented or not
:param request_headers: dict
:return: self
|
[
"Defines",
"whether",
"the",
"JSON",
"response",
"will",
"be",
"indented",
"or",
"not",
":",
"param",
"request_headers",
":",
"dict",
":",
"return",
":",
"self"
] |
7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8
|
https://github.com/feliphebueno/Rinzler/blob/7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8/rinzler/__init__.py#L377-L388
|
train
|
calocan/rescape-python-helpers
|
rescape_python_helpers/functional/ramda.py
|
prop
|
def prop(key, dct_or_obj):
"""
Implementation of prop (get_item) that also supports object attributes
:param key:
:param dct_or_obj:
:return:
"""
# Note that hasattr is a builtin and getattr is a ramda function, hence the different arg position
if isinstance(dict, dct_or_obj):
if has(key, dct_or_obj):
return dct_or_obj[key]
else:
raise Exception("No key %s found for dict %s" % (key, dct_or_obj))
elif isinstance(list, dct_or_obj):
if isint(key):
return dct_or_obj[key]
else:
raise Exception("Key %s not expected for list type: %s" % (key, dct_or_obj))
elif isinstance(object, dct_or_obj):
if hasattr(dct_or_obj, key):
return getattr(key, dct_or_obj)
else:
raise Exception("No key %s found for objects %s" % (key, dct_or_obj))
else:
raise Exception("%s is neither a dict nor objects" % dct_or_obj)
|
python
|
def prop(key, dct_or_obj):
"""
Implementation of prop (get_item) that also supports object attributes
:param key:
:param dct_or_obj:
:return:
"""
# Note that hasattr is a builtin and getattr is a ramda function, hence the different arg position
if isinstance(dict, dct_or_obj):
if has(key, dct_or_obj):
return dct_or_obj[key]
else:
raise Exception("No key %s found for dict %s" % (key, dct_or_obj))
elif isinstance(list, dct_or_obj):
if isint(key):
return dct_or_obj[key]
else:
raise Exception("Key %s not expected for list type: %s" % (key, dct_or_obj))
elif isinstance(object, dct_or_obj):
if hasattr(dct_or_obj, key):
return getattr(key, dct_or_obj)
else:
raise Exception("No key %s found for objects %s" % (key, dct_or_obj))
else:
raise Exception("%s is neither a dict nor objects" % dct_or_obj)
|
[
"def",
"prop",
"(",
"key",
",",
"dct_or_obj",
")",
":",
"# Note that hasattr is a builtin and getattr is a ramda function, hence the different arg position",
"if",
"isinstance",
"(",
"dict",
",",
"dct_or_obj",
")",
":",
"if",
"has",
"(",
"key",
",",
"dct_or_obj",
")",
":",
"return",
"dct_or_obj",
"[",
"key",
"]",
"else",
":",
"raise",
"Exception",
"(",
"\"No key %s found for dict %s\"",
"%",
"(",
"key",
",",
"dct_or_obj",
")",
")",
"elif",
"isinstance",
"(",
"list",
",",
"dct_or_obj",
")",
":",
"if",
"isint",
"(",
"key",
")",
":",
"return",
"dct_or_obj",
"[",
"key",
"]",
"else",
":",
"raise",
"Exception",
"(",
"\"Key %s not expected for list type: %s\"",
"%",
"(",
"key",
",",
"dct_or_obj",
")",
")",
"elif",
"isinstance",
"(",
"object",
",",
"dct_or_obj",
")",
":",
"if",
"hasattr",
"(",
"dct_or_obj",
",",
"key",
")",
":",
"return",
"getattr",
"(",
"key",
",",
"dct_or_obj",
")",
"else",
":",
"raise",
"Exception",
"(",
"\"No key %s found for objects %s\"",
"%",
"(",
"key",
",",
"dct_or_obj",
")",
")",
"else",
":",
"raise",
"Exception",
"(",
"\"%s is neither a dict nor objects\"",
"%",
"dct_or_obj",
")"
] |
Implementation of prop (get_item) that also supports object attributes
:param key:
:param dct_or_obj:
:return:
|
[
"Implementation",
"of",
"prop",
"(",
"get_item",
")",
"that",
"also",
"supports",
"object",
"attributes",
":",
"param",
"key",
":",
":",
"param",
"dct_or_obj",
":",
":",
"return",
":"
] |
91a1724f062ee40a25aa60fc96b2d7acadd99618
|
https://github.com/calocan/rescape-python-helpers/blob/91a1724f062ee40a25aa60fc96b2d7acadd99618/rescape_python_helpers/functional/ramda.py#L12-L36
|
train
|
calocan/rescape-python-helpers
|
rescape_python_helpers/functional/ramda.py
|
all_pass_dict
|
def all_pass_dict(f, dct):
"""
Returns true if all dct values pass f
:param f: binary lambda predicate
:param dct:
:return: True or false
"""
return all(map_with_obj_to_values(
lambda key, value: f(key, value),
dct
))
|
python
|
def all_pass_dict(f, dct):
"""
Returns true if all dct values pass f
:param f: binary lambda predicate
:param dct:
:return: True or false
"""
return all(map_with_obj_to_values(
lambda key, value: f(key, value),
dct
))
|
[
"def",
"all_pass_dict",
"(",
"f",
",",
"dct",
")",
":",
"return",
"all",
"(",
"map_with_obj_to_values",
"(",
"lambda",
"key",
",",
"value",
":",
"f",
"(",
"key",
",",
"value",
")",
",",
"dct",
")",
")"
] |
Returns true if all dct values pass f
:param f: binary lambda predicate
:param dct:
:return: True or false
|
[
"Returns",
"true",
"if",
"all",
"dct",
"values",
"pass",
"f",
":",
"param",
"f",
":",
"binary",
"lambda",
"predicate",
":",
"param",
"dct",
":",
":",
"return",
":",
"True",
"or",
"false"
] |
91a1724f062ee40a25aa60fc96b2d7acadd99618
|
https://github.com/calocan/rescape-python-helpers/blob/91a1724f062ee40a25aa60fc96b2d7acadd99618/rescape_python_helpers/functional/ramda.py#L50-L60
|
train
|
calocan/rescape-python-helpers
|
rescape_python_helpers/functional/ramda.py
|
prop_or
|
def prop_or(default, key, dct_or_obj):
"""
Ramda propOr implementation. This also resolves object attributes, so key
can be a dict prop or an attribute of dct_or_obj
:param default: Value if dct_or_obj doesn't have key_or_prop or the resolved value is null
:param key:
:param dct_or_obj:
:return:
"""
# Note that hasattr is a builtin and getattr is a ramda function, hence the different arg position
if isinstance(dict, dct_or_obj):
value = dct_or_obj[key] if has(key, dct_or_obj) else default
elif isinstance(object, dct_or_obj):
value = getattr(key, dct_or_obj) if hasattr(dct_or_obj, key) else default
else:
value = default
# 0 and False are ok, None defaults
if value == None:
return default
return value
|
python
|
def prop_or(default, key, dct_or_obj):
"""
Ramda propOr implementation. This also resolves object attributes, so key
can be a dict prop or an attribute of dct_or_obj
:param default: Value if dct_or_obj doesn't have key_or_prop or the resolved value is null
:param key:
:param dct_or_obj:
:return:
"""
# Note that hasattr is a builtin and getattr is a ramda function, hence the different arg position
if isinstance(dict, dct_or_obj):
value = dct_or_obj[key] if has(key, dct_or_obj) else default
elif isinstance(object, dct_or_obj):
value = getattr(key, dct_or_obj) if hasattr(dct_or_obj, key) else default
else:
value = default
# 0 and False are ok, None defaults
if value == None:
return default
return value
|
[
"def",
"prop_or",
"(",
"default",
",",
"key",
",",
"dct_or_obj",
")",
":",
"# Note that hasattr is a builtin and getattr is a ramda function, hence the different arg position",
"if",
"isinstance",
"(",
"dict",
",",
"dct_or_obj",
")",
":",
"value",
"=",
"dct_or_obj",
"[",
"key",
"]",
"if",
"has",
"(",
"key",
",",
"dct_or_obj",
")",
"else",
"default",
"elif",
"isinstance",
"(",
"object",
",",
"dct_or_obj",
")",
":",
"value",
"=",
"getattr",
"(",
"key",
",",
"dct_or_obj",
")",
"if",
"hasattr",
"(",
"dct_or_obj",
",",
"key",
")",
"else",
"default",
"else",
":",
"value",
"=",
"default",
"# 0 and False are ok, None defaults",
"if",
"value",
"==",
"None",
":",
"return",
"default",
"return",
"value"
] |
Ramda propOr implementation. This also resolves object attributes, so key
can be a dict prop or an attribute of dct_or_obj
:param default: Value if dct_or_obj doesn't have key_or_prop or the resolved value is null
:param key:
:param dct_or_obj:
:return:
|
[
"Ramda",
"propOr",
"implementation",
".",
"This",
"also",
"resolves",
"object",
"attributes",
"so",
"key",
"can",
"be",
"a",
"dict",
"prop",
"or",
"an",
"attribute",
"of",
"dct_or_obj",
":",
"param",
"default",
":",
"Value",
"if",
"dct_or_obj",
"doesn",
"t",
"have",
"key_or_prop",
"or",
"the",
"resolved",
"value",
"is",
"null",
":",
"param",
"key",
":",
":",
"param",
"dct_or_obj",
":",
":",
"return",
":"
] |
91a1724f062ee40a25aa60fc96b2d7acadd99618
|
https://github.com/calocan/rescape-python-helpers/blob/91a1724f062ee40a25aa60fc96b2d7acadd99618/rescape_python_helpers/functional/ramda.py#L82-L101
|
train
|
calocan/rescape-python-helpers
|
rescape_python_helpers/functional/ramda.py
|
prop_eq_or
|
def prop_eq_or(default, key, value, dct):
"""
Ramda propEq plus propOr implementation
:param default:
:param key:
:param value:
:param dct:
:return:
"""
return dct[key] and dct[key] == value if key in dct else default
|
python
|
def prop_eq_or(default, key, value, dct):
"""
Ramda propEq plus propOr implementation
:param default:
:param key:
:param value:
:param dct:
:return:
"""
return dct[key] and dct[key] == value if key in dct else default
|
[
"def",
"prop_eq_or",
"(",
"default",
",",
"key",
",",
"value",
",",
"dct",
")",
":",
"return",
"dct",
"[",
"key",
"]",
"and",
"dct",
"[",
"key",
"]",
"==",
"value",
"if",
"key",
"in",
"dct",
"else",
"default"
] |
Ramda propEq plus propOr implementation
:param default:
:param key:
:param value:
:param dct:
:return:
|
[
"Ramda",
"propEq",
"plus",
"propOr",
"implementation",
":",
"param",
"default",
":",
":",
"param",
"key",
":",
":",
"param",
"value",
":",
":",
"param",
"dct",
":",
":",
"return",
":"
] |
91a1724f062ee40a25aa60fc96b2d7acadd99618
|
https://github.com/calocan/rescape-python-helpers/blob/91a1724f062ee40a25aa60fc96b2d7acadd99618/rescape_python_helpers/functional/ramda.py#L117-L126
|
train
|
calocan/rescape-python-helpers
|
rescape_python_helpers/functional/ramda.py
|
prop_eq_or_in_or
|
def prop_eq_or_in_or(default, key, value, dct):
"""
Ramda propEq/propIn plus propOr
:param default:
:param key:
:param value:
:param dct:
:return:
"""
return has(key, dct) and \
(dct[key] == value if key in dct else (
dct[key] in value if isinstance((list, tuple), value) and not isinstance(str, value)
else default
))
|
python
|
def prop_eq_or_in_or(default, key, value, dct):
"""
Ramda propEq/propIn plus propOr
:param default:
:param key:
:param value:
:param dct:
:return:
"""
return has(key, dct) and \
(dct[key] == value if key in dct else (
dct[key] in value if isinstance((list, tuple), value) and not isinstance(str, value)
else default
))
|
[
"def",
"prop_eq_or_in_or",
"(",
"default",
",",
"key",
",",
"value",
",",
"dct",
")",
":",
"return",
"has",
"(",
"key",
",",
"dct",
")",
"and",
"(",
"dct",
"[",
"key",
"]",
"==",
"value",
"if",
"key",
"in",
"dct",
"else",
"(",
"dct",
"[",
"key",
"]",
"in",
"value",
"if",
"isinstance",
"(",
"(",
"list",
",",
"tuple",
")",
",",
"value",
")",
"and",
"not",
"isinstance",
"(",
"str",
",",
"value",
")",
"else",
"default",
")",
")"
] |
Ramda propEq/propIn plus propOr
:param default:
:param key:
:param value:
:param dct:
:return:
|
[
"Ramda",
"propEq",
"/",
"propIn",
"plus",
"propOr",
":",
"param",
"default",
":",
":",
"param",
"key",
":",
":",
"param",
"value",
":",
":",
"param",
"dct",
":",
":",
"return",
":"
] |
91a1724f062ee40a25aa60fc96b2d7acadd99618
|
https://github.com/calocan/rescape-python-helpers/blob/91a1724f062ee40a25aa60fc96b2d7acadd99618/rescape_python_helpers/functional/ramda.py#L142-L155
|
train
|
calocan/rescape-python-helpers
|
rescape_python_helpers/functional/ramda.py
|
item_path_or
|
def item_path_or(default, keys, dict_or_obj):
"""
Optional version of item_path with a default value. keys can be dict keys or object attributes, or a combination
:param default:
:param keys: List of keys or dot-separated string
:param dict_or_obj: A dict or obj
:return:
"""
if not keys:
raise ValueError("Expected at least one key, got {0}".format(keys))
resolved_keys = keys.split('.') if isinstance(str, keys) else keys
current_value = dict_or_obj
for key in resolved_keys:
current_value = prop_or(default, key, default_to({}, current_value))
return current_value
|
python
|
def item_path_or(default, keys, dict_or_obj):
"""
Optional version of item_path with a default value. keys can be dict keys or object attributes, or a combination
:param default:
:param keys: List of keys or dot-separated string
:param dict_or_obj: A dict or obj
:return:
"""
if not keys:
raise ValueError("Expected at least one key, got {0}".format(keys))
resolved_keys = keys.split('.') if isinstance(str, keys) else keys
current_value = dict_or_obj
for key in resolved_keys:
current_value = prop_or(default, key, default_to({}, current_value))
return current_value
|
[
"def",
"item_path_or",
"(",
"default",
",",
"keys",
",",
"dict_or_obj",
")",
":",
"if",
"not",
"keys",
":",
"raise",
"ValueError",
"(",
"\"Expected at least one key, got {0}\"",
".",
"format",
"(",
"keys",
")",
")",
"resolved_keys",
"=",
"keys",
".",
"split",
"(",
"'.'",
")",
"if",
"isinstance",
"(",
"str",
",",
"keys",
")",
"else",
"keys",
"current_value",
"=",
"dict_or_obj",
"for",
"key",
"in",
"resolved_keys",
":",
"current_value",
"=",
"prop_or",
"(",
"default",
",",
"key",
",",
"default_to",
"(",
"{",
"}",
",",
"current_value",
")",
")",
"return",
"current_value"
] |
Optional version of item_path with a default value. keys can be dict keys or object attributes, or a combination
:param default:
:param keys: List of keys or dot-separated string
:param dict_or_obj: A dict or obj
:return:
|
[
"Optional",
"version",
"of",
"item_path",
"with",
"a",
"default",
"value",
".",
"keys",
"can",
"be",
"dict",
"keys",
"or",
"object",
"attributes",
"or",
"a",
"combination",
":",
"param",
"default",
":",
":",
"param",
"keys",
":",
"List",
"of",
"keys",
"or",
"dot",
"-",
"separated",
"string",
":",
"param",
"dict_or_obj",
":",
"A",
"dict",
"or",
"obj",
":",
"return",
":"
] |
91a1724f062ee40a25aa60fc96b2d7acadd99618
|
https://github.com/calocan/rescape-python-helpers/blob/91a1724f062ee40a25aa60fc96b2d7acadd99618/rescape_python_helpers/functional/ramda.py#L170-L184
|
train
|
calocan/rescape-python-helpers
|
rescape_python_helpers/functional/ramda.py
|
item_str_path
|
def item_str_path(keys, dct):
"""
Given a string of path segments separated by ., splits them into an array. Int strings are converted
to numbers to serve as an array index
:param keys: e.g. 'foo.bar.1.goo'
:param dct: e.g. dict(foo=dict(bar=[dict(goo='a'), dict(goo='b')])
:return: The resolved value or an error. E.g. for above the result would be b
"""
return item_path(map(lambda segment: int(segment) if isint(segment) else segment, keys.split('.')), dct)
|
python
|
def item_str_path(keys, dct):
"""
Given a string of path segments separated by ., splits them into an array. Int strings are converted
to numbers to serve as an array index
:param keys: e.g. 'foo.bar.1.goo'
:param dct: e.g. dict(foo=dict(bar=[dict(goo='a'), dict(goo='b')])
:return: The resolved value or an error. E.g. for above the result would be b
"""
return item_path(map(lambda segment: int(segment) if isint(segment) else segment, keys.split('.')), dct)
|
[
"def",
"item_str_path",
"(",
"keys",
",",
"dct",
")",
":",
"return",
"item_path",
"(",
"map",
"(",
"lambda",
"segment",
":",
"int",
"(",
"segment",
")",
"if",
"isint",
"(",
"segment",
")",
"else",
"segment",
",",
"keys",
".",
"split",
"(",
"'.'",
")",
")",
",",
"dct",
")"
] |
Given a string of path segments separated by ., splits them into an array. Int strings are converted
to numbers to serve as an array index
:param keys: e.g. 'foo.bar.1.goo'
:param dct: e.g. dict(foo=dict(bar=[dict(goo='a'), dict(goo='b')])
:return: The resolved value or an error. E.g. for above the result would be b
|
[
"Given",
"a",
"string",
"of",
"path",
"segments",
"separated",
"by",
".",
"splits",
"them",
"into",
"an",
"array",
".",
"Int",
"strings",
"are",
"converted",
"to",
"numbers",
"to",
"serve",
"as",
"an",
"array",
"index",
":",
"param",
"keys",
":",
"e",
".",
"g",
".",
"foo",
".",
"bar",
".",
"1",
".",
"goo",
":",
"param",
"dct",
":",
"e",
".",
"g",
".",
"dict",
"(",
"foo",
"=",
"dict",
"(",
"bar",
"=",
"[",
"dict",
"(",
"goo",
"=",
"a",
")",
"dict",
"(",
"goo",
"=",
"b",
")",
"]",
")",
":",
"return",
":",
"The",
"resolved",
"value",
"or",
"an",
"error",
".",
"E",
".",
"g",
".",
"for",
"above",
"the",
"result",
"would",
"be",
"b"
] |
91a1724f062ee40a25aa60fc96b2d7acadd99618
|
https://github.com/calocan/rescape-python-helpers/blob/91a1724f062ee40a25aa60fc96b2d7acadd99618/rescape_python_helpers/functional/ramda.py#L212-L220
|
train
|
calocan/rescape-python-helpers
|
rescape_python_helpers/functional/ramda.py
|
item_str_path_or
|
def item_str_path_or(default, keys, dct):
"""
Given a string of path segments separated by ., splits them into an array. Int strings are converted
to numbers to serve as an array index
:param default: Value if any part yields None or undefined
:param keys: e.g. 'foo.bar.1.goo'
:param dct: e.g. dict(foo=dict(bar=[dict(goo='a'), dict(goo='b')])
:return: The resolved value or an error. E.g. for above the result would be b
"""
return item_path_or(default, map(lambda segment: int(segment) if isint(segment) else segment, keys.split('.')), dct)
|
python
|
def item_str_path_or(default, keys, dct):
"""
Given a string of path segments separated by ., splits them into an array. Int strings are converted
to numbers to serve as an array index
:param default: Value if any part yields None or undefined
:param keys: e.g. 'foo.bar.1.goo'
:param dct: e.g. dict(foo=dict(bar=[dict(goo='a'), dict(goo='b')])
:return: The resolved value or an error. E.g. for above the result would be b
"""
return item_path_or(default, map(lambda segment: int(segment) if isint(segment) else segment, keys.split('.')), dct)
|
[
"def",
"item_str_path_or",
"(",
"default",
",",
"keys",
",",
"dct",
")",
":",
"return",
"item_path_or",
"(",
"default",
",",
"map",
"(",
"lambda",
"segment",
":",
"int",
"(",
"segment",
")",
"if",
"isint",
"(",
"segment",
")",
"else",
"segment",
",",
"keys",
".",
"split",
"(",
"'.'",
")",
")",
",",
"dct",
")"
] |
Given a string of path segments separated by ., splits them into an array. Int strings are converted
to numbers to serve as an array index
:param default: Value if any part yields None or undefined
:param keys: e.g. 'foo.bar.1.goo'
:param dct: e.g. dict(foo=dict(bar=[dict(goo='a'), dict(goo='b')])
:return: The resolved value or an error. E.g. for above the result would be b
|
[
"Given",
"a",
"string",
"of",
"path",
"segments",
"separated",
"by",
".",
"splits",
"them",
"into",
"an",
"array",
".",
"Int",
"strings",
"are",
"converted",
"to",
"numbers",
"to",
"serve",
"as",
"an",
"array",
"index",
":",
"param",
"default",
":",
"Value",
"if",
"any",
"part",
"yields",
"None",
"or",
"undefined",
":",
"param",
"keys",
":",
"e",
".",
"g",
".",
"foo",
".",
"bar",
".",
"1",
".",
"goo",
":",
"param",
"dct",
":",
"e",
".",
"g",
".",
"dict",
"(",
"foo",
"=",
"dict",
"(",
"bar",
"=",
"[",
"dict",
"(",
"goo",
"=",
"a",
")",
"dict",
"(",
"goo",
"=",
"b",
")",
"]",
")",
":",
"return",
":",
"The",
"resolved",
"value",
"or",
"an",
"error",
".",
"E",
".",
"g",
".",
"for",
"above",
"the",
"result",
"would",
"be",
"b"
] |
91a1724f062ee40a25aa60fc96b2d7acadd99618
|
https://github.com/calocan/rescape-python-helpers/blob/91a1724f062ee40a25aa60fc96b2d7acadd99618/rescape_python_helpers/functional/ramda.py#L224-L233
|
train
|
calocan/rescape-python-helpers
|
rescape_python_helpers/functional/ramda.py
|
has
|
def has(prop, object_or_dct):
"""
Implementation of ramda has
:param prop:
:param object_or_dct:
:return:
"""
return prop in object_or_dct if isinstance(dict, object_or_dct) else hasattr(object_or_dct, prop)
|
python
|
def has(prop, object_or_dct):
"""
Implementation of ramda has
:param prop:
:param object_or_dct:
:return:
"""
return prop in object_or_dct if isinstance(dict, object_or_dct) else hasattr(object_or_dct, prop)
|
[
"def",
"has",
"(",
"prop",
",",
"object_or_dct",
")",
":",
"return",
"prop",
"in",
"object_or_dct",
"if",
"isinstance",
"(",
"dict",
",",
"object_or_dct",
")",
"else",
"hasattr",
"(",
"object_or_dct",
",",
"prop",
")"
] |
Implementation of ramda has
:param prop:
:param object_or_dct:
:return:
|
[
"Implementation",
"of",
"ramda",
"has",
":",
"param",
"prop",
":",
":",
"param",
"object_or_dct",
":",
":",
"return",
":"
] |
91a1724f062ee40a25aa60fc96b2d7acadd99618
|
https://github.com/calocan/rescape-python-helpers/blob/91a1724f062ee40a25aa60fc96b2d7acadd99618/rescape_python_helpers/functional/ramda.py#L237-L244
|
train
|
calocan/rescape-python-helpers
|
rescape_python_helpers/functional/ramda.py
|
omit_deep
|
def omit_deep(omit_props, dct):
"""
Implementation of omit that recurses. This tests the same keys at every level of dict and in lists
:param omit_props:
:param dct:
:return:
"""
omit_partial = omit_deep(omit_props)
if isinstance(dict, dct):
# Filter out keys and then recurse on each value that wasn't filtered out
return map_dict(omit_partial, compact_dict(omit(omit_props, dct)))
if isinstance((list, tuple), dct):
# run omit_deep on each value
return map(omit_partial, dct)
# scalar
return dct
|
python
|
def omit_deep(omit_props, dct):
"""
Implementation of omit that recurses. This tests the same keys at every level of dict and in lists
:param omit_props:
:param dct:
:return:
"""
omit_partial = omit_deep(omit_props)
if isinstance(dict, dct):
# Filter out keys and then recurse on each value that wasn't filtered out
return map_dict(omit_partial, compact_dict(omit(omit_props, dct)))
if isinstance((list, tuple), dct):
# run omit_deep on each value
return map(omit_partial, dct)
# scalar
return dct
|
[
"def",
"omit_deep",
"(",
"omit_props",
",",
"dct",
")",
":",
"omit_partial",
"=",
"omit_deep",
"(",
"omit_props",
")",
"if",
"isinstance",
"(",
"dict",
",",
"dct",
")",
":",
"# Filter out keys and then recurse on each value that wasn't filtered out",
"return",
"map_dict",
"(",
"omit_partial",
",",
"compact_dict",
"(",
"omit",
"(",
"omit_props",
",",
"dct",
")",
")",
")",
"if",
"isinstance",
"(",
"(",
"list",
",",
"tuple",
")",
",",
"dct",
")",
":",
"# run omit_deep on each value",
"return",
"map",
"(",
"omit_partial",
",",
"dct",
")",
"# scalar",
"return",
"dct"
] |
Implementation of omit that recurses. This tests the same keys at every level of dict and in lists
:param omit_props:
:param dct:
:return:
|
[
"Implementation",
"of",
"omit",
"that",
"recurses",
".",
"This",
"tests",
"the",
"same",
"keys",
"at",
"every",
"level",
"of",
"dict",
"and",
"in",
"lists",
":",
"param",
"omit_props",
":",
":",
"param",
"dct",
":",
":",
"return",
":"
] |
91a1724f062ee40a25aa60fc96b2d7acadd99618
|
https://github.com/calocan/rescape-python-helpers/blob/91a1724f062ee40a25aa60fc96b2d7acadd99618/rescape_python_helpers/functional/ramda.py#L259-L276
|
train
|
calocan/rescape-python-helpers
|
rescape_python_helpers/functional/ramda.py
|
pick_deep
|
def pick_deep(pick_dct, dct):
"""
Implementation of pick that recurses. This tests the same keys at every level of dict and in lists
:param pick_dct: Deep dict matching some portion of dct.
:param dct: Dct to filter. Any key matching pick_dct pass through. It doesn't matter what the pick_dct value
is as long as the key exists. Arrays also pass through if the have matching values in pick_dct
:return:
"""
if isinstance(dict, dct):
# Filter out keys and then recurse on each value that wasn't filtered out
return map_with_obj(
lambda k, v: pick_deep(prop(k, pick_dct), v),
pick(keys(pick_dct), dct)
)
if isinstance((list, tuple), dct):
# run pick_deep on each value
return map(
lambda tup: pick_deep(*tup),
list(zip(pick_dct or [], dct))
)
# scalar
return dct
|
python
|
def pick_deep(pick_dct, dct):
"""
Implementation of pick that recurses. This tests the same keys at every level of dict and in lists
:param pick_dct: Deep dict matching some portion of dct.
:param dct: Dct to filter. Any key matching pick_dct pass through. It doesn't matter what the pick_dct value
is as long as the key exists. Arrays also pass through if the have matching values in pick_dct
:return:
"""
if isinstance(dict, dct):
# Filter out keys and then recurse on each value that wasn't filtered out
return map_with_obj(
lambda k, v: pick_deep(prop(k, pick_dct), v),
pick(keys(pick_dct), dct)
)
if isinstance((list, tuple), dct):
# run pick_deep on each value
return map(
lambda tup: pick_deep(*tup),
list(zip(pick_dct or [], dct))
)
# scalar
return dct
|
[
"def",
"pick_deep",
"(",
"pick_dct",
",",
"dct",
")",
":",
"if",
"isinstance",
"(",
"dict",
",",
"dct",
")",
":",
"# Filter out keys and then recurse on each value that wasn't filtered out",
"return",
"map_with_obj",
"(",
"lambda",
"k",
",",
"v",
":",
"pick_deep",
"(",
"prop",
"(",
"k",
",",
"pick_dct",
")",
",",
"v",
")",
",",
"pick",
"(",
"keys",
"(",
"pick_dct",
")",
",",
"dct",
")",
")",
"if",
"isinstance",
"(",
"(",
"list",
",",
"tuple",
")",
",",
"dct",
")",
":",
"# run pick_deep on each value",
"return",
"map",
"(",
"lambda",
"tup",
":",
"pick_deep",
"(",
"*",
"tup",
")",
",",
"list",
"(",
"zip",
"(",
"pick_dct",
"or",
"[",
"]",
",",
"dct",
")",
")",
")",
"# scalar",
"return",
"dct"
] |
Implementation of pick that recurses. This tests the same keys at every level of dict and in lists
:param pick_dct: Deep dict matching some portion of dct.
:param dct: Dct to filter. Any key matching pick_dct pass through. It doesn't matter what the pick_dct value
is as long as the key exists. Arrays also pass through if the have matching values in pick_dct
:return:
|
[
"Implementation",
"of",
"pick",
"that",
"recurses",
".",
"This",
"tests",
"the",
"same",
"keys",
"at",
"every",
"level",
"of",
"dict",
"and",
"in",
"lists",
":",
"param",
"pick_dct",
":",
"Deep",
"dict",
"matching",
"some",
"portion",
"of",
"dct",
".",
":",
"param",
"dct",
":",
"Dct",
"to",
"filter",
".",
"Any",
"key",
"matching",
"pick_dct",
"pass",
"through",
".",
"It",
"doesn",
"t",
"matter",
"what",
"the",
"pick_dct",
"value",
"is",
"as",
"long",
"as",
"the",
"key",
"exists",
".",
"Arrays",
"also",
"pass",
"through",
"if",
"the",
"have",
"matching",
"values",
"in",
"pick_dct",
":",
"return",
":"
] |
91a1724f062ee40a25aa60fc96b2d7acadd99618
|
https://github.com/calocan/rescape-python-helpers/blob/91a1724f062ee40a25aa60fc96b2d7acadd99618/rescape_python_helpers/functional/ramda.py#L280-L302
|
train
|
calocan/rescape-python-helpers
|
rescape_python_helpers/functional/ramda.py
|
map_with_obj_deep
|
def map_with_obj_deep(f, dct):
"""
Implementation of map that recurses. This tests the same keys at every level of dict and in lists
:param f: 2-ary function expecting a key and value and returns a modified value
:param dct: Dict for deep processing
:return: Modified dct with matching props mapped
"""
return _map_deep(lambda k, v: [k, f(k, v)], dct)
|
python
|
def map_with_obj_deep(f, dct):
"""
Implementation of map that recurses. This tests the same keys at every level of dict and in lists
:param f: 2-ary function expecting a key and value and returns a modified value
:param dct: Dict for deep processing
:return: Modified dct with matching props mapped
"""
return _map_deep(lambda k, v: [k, f(k, v)], dct)
|
[
"def",
"map_with_obj_deep",
"(",
"f",
",",
"dct",
")",
":",
"return",
"_map_deep",
"(",
"lambda",
"k",
",",
"v",
":",
"[",
"k",
",",
"f",
"(",
"k",
",",
"v",
")",
"]",
",",
"dct",
")"
] |
Implementation of map that recurses. This tests the same keys at every level of dict and in lists
:param f: 2-ary function expecting a key and value and returns a modified value
:param dct: Dict for deep processing
:return: Modified dct with matching props mapped
|
[
"Implementation",
"of",
"map",
"that",
"recurses",
".",
"This",
"tests",
"the",
"same",
"keys",
"at",
"every",
"level",
"of",
"dict",
"and",
"in",
"lists",
":",
"param",
"f",
":",
"2",
"-",
"ary",
"function",
"expecting",
"a",
"key",
"and",
"value",
"and",
"returns",
"a",
"modified",
"value",
":",
"param",
"dct",
":",
"Dict",
"for",
"deep",
"processing",
":",
"return",
":",
"Modified",
"dct",
"with",
"matching",
"props",
"mapped"
] |
91a1724f062ee40a25aa60fc96b2d7acadd99618
|
https://github.com/calocan/rescape-python-helpers/blob/91a1724f062ee40a25aa60fc96b2d7acadd99618/rescape_python_helpers/functional/ramda.py#L306-L313
|
train
|
calocan/rescape-python-helpers
|
rescape_python_helpers/functional/ramda.py
|
map_keys_deep
|
def map_keys_deep(f, dct):
"""
Implementation of map that recurses. This tests the same keys at every level of dict and in lists
:param f: 2-ary function expecting a key and value and returns a modified key
:param dct: Dict for deep processing
:return: Modified dct with matching props mapped
"""
return _map_deep(lambda k, v: [f(k, v), v], dct)
|
python
|
def map_keys_deep(f, dct):
"""
Implementation of map that recurses. This tests the same keys at every level of dict and in lists
:param f: 2-ary function expecting a key and value and returns a modified key
:param dct: Dict for deep processing
:return: Modified dct with matching props mapped
"""
return _map_deep(lambda k, v: [f(k, v), v], dct)
|
[
"def",
"map_keys_deep",
"(",
"f",
",",
"dct",
")",
":",
"return",
"_map_deep",
"(",
"lambda",
"k",
",",
"v",
":",
"[",
"f",
"(",
"k",
",",
"v",
")",
",",
"v",
"]",
",",
"dct",
")"
] |
Implementation of map that recurses. This tests the same keys at every level of dict and in lists
:param f: 2-ary function expecting a key and value and returns a modified key
:param dct: Dict for deep processing
:return: Modified dct with matching props mapped
|
[
"Implementation",
"of",
"map",
"that",
"recurses",
".",
"This",
"tests",
"the",
"same",
"keys",
"at",
"every",
"level",
"of",
"dict",
"and",
"in",
"lists",
":",
"param",
"f",
":",
"2",
"-",
"ary",
"function",
"expecting",
"a",
"key",
"and",
"value",
"and",
"returns",
"a",
"modified",
"key",
":",
"param",
"dct",
":",
"Dict",
"for",
"deep",
"processing",
":",
"return",
":",
"Modified",
"dct",
"with",
"matching",
"props",
"mapped"
] |
91a1724f062ee40a25aa60fc96b2d7acadd99618
|
https://github.com/calocan/rescape-python-helpers/blob/91a1724f062ee40a25aa60fc96b2d7acadd99618/rescape_python_helpers/functional/ramda.py#L317-L324
|
train
|
calocan/rescape-python-helpers
|
rescape_python_helpers/functional/ramda.py
|
_map_deep
|
def _map_deep(f, dct):
"""
Used by map_deep and map_keys_deep
:param map_props:
:param f: Expects a key and value and returns a pair
:param dct:
:return:
"""
if isinstance(dict, dct):
return map_key_values(lambda k, v: f(k, _map_deep(f, v)), dct)
elif isinstance((list, tuple), dct):
# Call each value with the index as the key. Since f returns a key value discard the key that it returns
# Even if this is called with map_keys_deep we can't manipulate index values here
return map(lambda iv: f(iv[0], _map_deep(f, iv[1]))[1], enumerate(dct))
# scalar
return dct
|
python
|
def _map_deep(f, dct):
"""
Used by map_deep and map_keys_deep
:param map_props:
:param f: Expects a key and value and returns a pair
:param dct:
:return:
"""
if isinstance(dict, dct):
return map_key_values(lambda k, v: f(k, _map_deep(f, v)), dct)
elif isinstance((list, tuple), dct):
# Call each value with the index as the key. Since f returns a key value discard the key that it returns
# Even if this is called with map_keys_deep we can't manipulate index values here
return map(lambda iv: f(iv[0], _map_deep(f, iv[1]))[1], enumerate(dct))
# scalar
return dct
|
[
"def",
"_map_deep",
"(",
"f",
",",
"dct",
")",
":",
"if",
"isinstance",
"(",
"dict",
",",
"dct",
")",
":",
"return",
"map_key_values",
"(",
"lambda",
"k",
",",
"v",
":",
"f",
"(",
"k",
",",
"_map_deep",
"(",
"f",
",",
"v",
")",
")",
",",
"dct",
")",
"elif",
"isinstance",
"(",
"(",
"list",
",",
"tuple",
")",
",",
"dct",
")",
":",
"# Call each value with the index as the key. Since f returns a key value discard the key that it returns",
"# Even if this is called with map_keys_deep we can't manipulate index values here",
"return",
"map",
"(",
"lambda",
"iv",
":",
"f",
"(",
"iv",
"[",
"0",
"]",
",",
"_map_deep",
"(",
"f",
",",
"iv",
"[",
"1",
"]",
")",
")",
"[",
"1",
"]",
",",
"enumerate",
"(",
"dct",
")",
")",
"# scalar",
"return",
"dct"
] |
Used by map_deep and map_keys_deep
:param map_props:
:param f: Expects a key and value and returns a pair
:param dct:
:return:
|
[
"Used",
"by",
"map_deep",
"and",
"map_keys_deep",
":",
"param",
"map_props",
":",
":",
"param",
"f",
":",
"Expects",
"a",
"key",
"and",
"value",
"and",
"returns",
"a",
"pair",
":",
"param",
"dct",
":",
":",
"return",
":"
] |
91a1724f062ee40a25aa60fc96b2d7acadd99618
|
https://github.com/calocan/rescape-python-helpers/blob/91a1724f062ee40a25aa60fc96b2d7acadd99618/rescape_python_helpers/functional/ramda.py#L327-L343
|
train
|
calocan/rescape-python-helpers
|
rescape_python_helpers/functional/ramda.py
|
dict_matches_params_deep
|
def dict_matches_params_deep(params_dct, dct):
"""
Filters deeply by comparing dct to filter_dct's value at each depth. Whenever a mismatch occurs the whole
thing returns false
:param params_dct: dict matching any portion of dct. E.g. filter_dct = {foo: {bar: 1}} would allow
{foo: {bar: 1, car: 2}} to pass, {foo: {bar: 2}} would fail, {goo: ...} would fail
:param dct: Dict for deep processing
:return: True if all pass else false
"""
def recurse_if_param_exists(params, key, value):
"""
If a param[key] exists, recurse. Otherwise return True since there is no param to contest value
:param params:
:param key:
:param value:
:return:
"""
return dict_matches_params_deep(
prop(key, params),
value
) if has(key, params) else True
def recurse_if_array_param_exists(params, index, value):
"""
If a param[key] exists, recurse. Otherwise return True since there is no param to contest value
:param params:
:param index:
:param value:
:return:
"""
return dict_matches_params_deep(
params[index],
value
) if isinstance((list, tuple), params_dct) and index < length(params_dct) else True
if isinstance(dict, dct):
# Filter out keys and then recurse on each value
return all_pass_dict(
# Recurse on each value if there is a corresponding filter_dct[key]. If not we pass
lambda key, value: recurse_if_param_exists(params_dct, key, value),
# We shallow merge, giving dct priority with (hopefully) unmatchable values
merge(map_with_obj(lambda k, v: 1 / (-e * pi), params_dct), dct)
)
if isinstance((list, tuple), dct):
if isinstance((list, tuple), params_dct) and length(dct) < length(params_dct):
# if there are more param items then dct items fail
return False
# run map_deep on each value
return all(map(
lambda ivalue: recurse_if_array_param_exists(params_dct, *ivalue),
enumerate(dct)
))
# scalar. Not that anything not truthy, False, None, 0, are considered equal
return params_dct == dct
|
python
|
def dict_matches_params_deep(params_dct, dct):
"""
Filters deeply by comparing dct to filter_dct's value at each depth. Whenever a mismatch occurs the whole
thing returns false
:param params_dct: dict matching any portion of dct. E.g. filter_dct = {foo: {bar: 1}} would allow
{foo: {bar: 1, car: 2}} to pass, {foo: {bar: 2}} would fail, {goo: ...} would fail
:param dct: Dict for deep processing
:return: True if all pass else false
"""
def recurse_if_param_exists(params, key, value):
"""
If a param[key] exists, recurse. Otherwise return True since there is no param to contest value
:param params:
:param key:
:param value:
:return:
"""
return dict_matches_params_deep(
prop(key, params),
value
) if has(key, params) else True
def recurse_if_array_param_exists(params, index, value):
"""
If a param[key] exists, recurse. Otherwise return True since there is no param to contest value
:param params:
:param index:
:param value:
:return:
"""
return dict_matches_params_deep(
params[index],
value
) if isinstance((list, tuple), params_dct) and index < length(params_dct) else True
if isinstance(dict, dct):
# Filter out keys and then recurse on each value
return all_pass_dict(
# Recurse on each value if there is a corresponding filter_dct[key]. If not we pass
lambda key, value: recurse_if_param_exists(params_dct, key, value),
# We shallow merge, giving dct priority with (hopefully) unmatchable values
merge(map_with_obj(lambda k, v: 1 / (-e * pi), params_dct), dct)
)
if isinstance((list, tuple), dct):
if isinstance((list, tuple), params_dct) and length(dct) < length(params_dct):
# if there are more param items then dct items fail
return False
# run map_deep on each value
return all(map(
lambda ivalue: recurse_if_array_param_exists(params_dct, *ivalue),
enumerate(dct)
))
# scalar. Not that anything not truthy, False, None, 0, are considered equal
return params_dct == dct
|
[
"def",
"dict_matches_params_deep",
"(",
"params_dct",
",",
"dct",
")",
":",
"def",
"recurse_if_param_exists",
"(",
"params",
",",
"key",
",",
"value",
")",
":",
"\"\"\"\n If a param[key] exists, recurse. Otherwise return True since there is no param to contest value\n :param params:\n :param key:\n :param value:\n :return:\n \"\"\"",
"return",
"dict_matches_params_deep",
"(",
"prop",
"(",
"key",
",",
"params",
")",
",",
"value",
")",
"if",
"has",
"(",
"key",
",",
"params",
")",
"else",
"True",
"def",
"recurse_if_array_param_exists",
"(",
"params",
",",
"index",
",",
"value",
")",
":",
"\"\"\"\n If a param[key] exists, recurse. Otherwise return True since there is no param to contest value\n :param params:\n :param index:\n :param value:\n :return:\n \"\"\"",
"return",
"dict_matches_params_deep",
"(",
"params",
"[",
"index",
"]",
",",
"value",
")",
"if",
"isinstance",
"(",
"(",
"list",
",",
"tuple",
")",
",",
"params_dct",
")",
"and",
"index",
"<",
"length",
"(",
"params_dct",
")",
"else",
"True",
"if",
"isinstance",
"(",
"dict",
",",
"dct",
")",
":",
"# Filter out keys and then recurse on each value",
"return",
"all_pass_dict",
"(",
"# Recurse on each value if there is a corresponding filter_dct[key]. If not we pass",
"lambda",
"key",
",",
"value",
":",
"recurse_if_param_exists",
"(",
"params_dct",
",",
"key",
",",
"value",
")",
",",
"# We shallow merge, giving dct priority with (hopefully) unmatchable values",
"merge",
"(",
"map_with_obj",
"(",
"lambda",
"k",
",",
"v",
":",
"1",
"/",
"(",
"-",
"e",
"*",
"pi",
")",
",",
"params_dct",
")",
",",
"dct",
")",
")",
"if",
"isinstance",
"(",
"(",
"list",
",",
"tuple",
")",
",",
"dct",
")",
":",
"if",
"isinstance",
"(",
"(",
"list",
",",
"tuple",
")",
",",
"params_dct",
")",
"and",
"length",
"(",
"dct",
")",
"<",
"length",
"(",
"params_dct",
")",
":",
"# if there are more param items then dct items fail",
"return",
"False",
"# run map_deep on each value",
"return",
"all",
"(",
"map",
"(",
"lambda",
"ivalue",
":",
"recurse_if_array_param_exists",
"(",
"params_dct",
",",
"*",
"ivalue",
")",
",",
"enumerate",
"(",
"dct",
")",
")",
")",
"# scalar. Not that anything not truthy, False, None, 0, are considered equal",
"return",
"params_dct",
"==",
"dct"
] |
Filters deeply by comparing dct to filter_dct's value at each depth. Whenever a mismatch occurs the whole
thing returns false
:param params_dct: dict matching any portion of dct. E.g. filter_dct = {foo: {bar: 1}} would allow
{foo: {bar: 1, car: 2}} to pass, {foo: {bar: 2}} would fail, {goo: ...} would fail
:param dct: Dict for deep processing
:return: True if all pass else false
|
[
"Filters",
"deeply",
"by",
"comparing",
"dct",
"to",
"filter_dct",
"s",
"value",
"at",
"each",
"depth",
".",
"Whenever",
"a",
"mismatch",
"occurs",
"the",
"whole",
"thing",
"returns",
"false",
":",
"param",
"params_dct",
":",
"dict",
"matching",
"any",
"portion",
"of",
"dct",
".",
"E",
".",
"g",
".",
"filter_dct",
"=",
"{",
"foo",
":",
"{",
"bar",
":",
"1",
"}}",
"would",
"allow",
"{",
"foo",
":",
"{",
"bar",
":",
"1",
"car",
":",
"2",
"}}",
"to",
"pass",
"{",
"foo",
":",
"{",
"bar",
":",
"2",
"}}",
"would",
"fail",
"{",
"goo",
":",
"...",
"}",
"would",
"fail",
":",
"param",
"dct",
":",
"Dict",
"for",
"deep",
"processing",
":",
"return",
":",
"True",
"if",
"all",
"pass",
"else",
"false"
] |
91a1724f062ee40a25aa60fc96b2d7acadd99618
|
https://github.com/calocan/rescape-python-helpers/blob/91a1724f062ee40a25aa60fc96b2d7acadd99618/rescape_python_helpers/functional/ramda.py#L347-L402
|
train
|
calocan/rescape-python-helpers
|
rescape_python_helpers/functional/ramda.py
|
join
|
def join(strin, items):
"""
Ramda implementation of join
:param strin:
:param items:
:return:
"""
return strin.join(map(lambda item: str(item), items))
|
python
|
def join(strin, items):
"""
Ramda implementation of join
:param strin:
:param items:
:return:
"""
return strin.join(map(lambda item: str(item), items))
|
[
"def",
"join",
"(",
"strin",
",",
"items",
")",
":",
"return",
"strin",
".",
"join",
"(",
"map",
"(",
"lambda",
"item",
":",
"str",
"(",
"item",
")",
",",
"items",
")",
")"
] |
Ramda implementation of join
:param strin:
:param items:
:return:
|
[
"Ramda",
"implementation",
"of",
"join",
":",
"param",
"strin",
":",
":",
"param",
"items",
":",
":",
"return",
":"
] |
91a1724f062ee40a25aa60fc96b2d7acadd99618
|
https://github.com/calocan/rescape-python-helpers/blob/91a1724f062ee40a25aa60fc96b2d7acadd99618/rescape_python_helpers/functional/ramda.py#L406-L413
|
train
|
calocan/rescape-python-helpers
|
rescape_python_helpers/functional/ramda.py
|
map_with_obj
|
def map_with_obj(f, dct):
"""
Implementation of Ramda's mapObjIndexed without the final argument.
This returns the original key with the mapped value. Use map_key_values to modify the keys too
:param f: Called with a key and value
:param dct:
:return {dict}: Keyed by the original key, valued by the mapped value
"""
f_dict = {}
for k, v in dct.items():
f_dict[k] = f(k, v)
return f_dict
|
python
|
def map_with_obj(f, dct):
"""
Implementation of Ramda's mapObjIndexed without the final argument.
This returns the original key with the mapped value. Use map_key_values to modify the keys too
:param f: Called with a key and value
:param dct:
:return {dict}: Keyed by the original key, valued by the mapped value
"""
f_dict = {}
for k, v in dct.items():
f_dict[k] = f(k, v)
return f_dict
|
[
"def",
"map_with_obj",
"(",
"f",
",",
"dct",
")",
":",
"f_dict",
"=",
"{",
"}",
"for",
"k",
",",
"v",
"in",
"dct",
".",
"items",
"(",
")",
":",
"f_dict",
"[",
"k",
"]",
"=",
"f",
"(",
"k",
",",
"v",
")",
"return",
"f_dict"
] |
Implementation of Ramda's mapObjIndexed without the final argument.
This returns the original key with the mapped value. Use map_key_values to modify the keys too
:param f: Called with a key and value
:param dct:
:return {dict}: Keyed by the original key, valued by the mapped value
|
[
"Implementation",
"of",
"Ramda",
"s",
"mapObjIndexed",
"without",
"the",
"final",
"argument",
".",
"This",
"returns",
"the",
"original",
"key",
"with",
"the",
"mapped",
"value",
".",
"Use",
"map_key_values",
"to",
"modify",
"the",
"keys",
"too",
":",
"param",
"f",
":",
"Called",
"with",
"a",
"key",
"and",
"value",
":",
"param",
"dct",
":",
":",
"return",
"{",
"dict",
"}",
":",
"Keyed",
"by",
"the",
"original",
"key",
"valued",
"by",
"the",
"mapped",
"value"
] |
91a1724f062ee40a25aa60fc96b2d7acadd99618
|
https://github.com/calocan/rescape-python-helpers/blob/91a1724f062ee40a25aa60fc96b2d7acadd99618/rescape_python_helpers/functional/ramda.py#L462-L473
|
train
|
calocan/rescape-python-helpers
|
rescape_python_helpers/functional/ramda.py
|
map_keys
|
def map_keys(f, dct):
"""
Calls f with each key of dct, possibly returning a modified key. Values are unchanged
:param f: Called with each key and returns the same key or a modified key
:param dct:
:return: A dct with keys possibly modifed but values unchanged
"""
f_dict = {}
for k, v in dct.items():
f_dict[f(k)] = v
return f_dict
|
python
|
def map_keys(f, dct):
"""
Calls f with each key of dct, possibly returning a modified key. Values are unchanged
:param f: Called with each key and returns the same key or a modified key
:param dct:
:return: A dct with keys possibly modifed but values unchanged
"""
f_dict = {}
for k, v in dct.items():
f_dict[f(k)] = v
return f_dict
|
[
"def",
"map_keys",
"(",
"f",
",",
"dct",
")",
":",
"f_dict",
"=",
"{",
"}",
"for",
"k",
",",
"v",
"in",
"dct",
".",
"items",
"(",
")",
":",
"f_dict",
"[",
"f",
"(",
"k",
")",
"]",
"=",
"v",
"return",
"f_dict"
] |
Calls f with each key of dct, possibly returning a modified key. Values are unchanged
:param f: Called with each key and returns the same key or a modified key
:param dct:
:return: A dct with keys possibly modifed but values unchanged
|
[
"Calls",
"f",
"with",
"each",
"key",
"of",
"dct",
"possibly",
"returning",
"a",
"modified",
"key",
".",
"Values",
"are",
"unchanged",
":",
"param",
"f",
":",
"Called",
"with",
"each",
"key",
"and",
"returns",
"the",
"same",
"key",
"or",
"a",
"modified",
"key",
":",
"param",
"dct",
":",
":",
"return",
":",
"A",
"dct",
"with",
"keys",
"possibly",
"modifed",
"but",
"values",
"unchanged"
] |
91a1724f062ee40a25aa60fc96b2d7acadd99618
|
https://github.com/calocan/rescape-python-helpers/blob/91a1724f062ee40a25aa60fc96b2d7acadd99618/rescape_python_helpers/functional/ramda.py#L499-L509
|
train
|
calocan/rescape-python-helpers
|
rescape_python_helpers/functional/ramda.py
|
map_keys_with_obj
|
def map_keys_with_obj(f, dct):
"""
Calls f with each key and value of dct, possibly returning a modified key. Values are unchanged
:param f: Called with each key and value and returns the same key or a modified key
:param dct:
:return: A dct with keys possibly modifed but values unchanged
"""
f_dict = {}
for k, v in dct.items():
f_dict[f(k, v)] = v
return f_dict
|
python
|
def map_keys_with_obj(f, dct):
"""
Calls f with each key and value of dct, possibly returning a modified key. Values are unchanged
:param f: Called with each key and value and returns the same key or a modified key
:param dct:
:return: A dct with keys possibly modifed but values unchanged
"""
f_dict = {}
for k, v in dct.items():
f_dict[f(k, v)] = v
return f_dict
|
[
"def",
"map_keys_with_obj",
"(",
"f",
",",
"dct",
")",
":",
"f_dict",
"=",
"{",
"}",
"for",
"k",
",",
"v",
"in",
"dct",
".",
"items",
"(",
")",
":",
"f_dict",
"[",
"f",
"(",
"k",
",",
"v",
")",
"]",
"=",
"v",
"return",
"f_dict"
] |
Calls f with each key and value of dct, possibly returning a modified key. Values are unchanged
:param f: Called with each key and value and returns the same key or a modified key
:param dct:
:return: A dct with keys possibly modifed but values unchanged
|
[
"Calls",
"f",
"with",
"each",
"key",
"and",
"value",
"of",
"dct",
"possibly",
"returning",
"a",
"modified",
"key",
".",
"Values",
"are",
"unchanged",
":",
"param",
"f",
":",
"Called",
"with",
"each",
"key",
"and",
"value",
"and",
"returns",
"the",
"same",
"key",
"or",
"a",
"modified",
"key",
":",
"param",
"dct",
":",
":",
"return",
":",
"A",
"dct",
"with",
"keys",
"possibly",
"modifed",
"but",
"values",
"unchanged"
] |
91a1724f062ee40a25aa60fc96b2d7acadd99618
|
https://github.com/calocan/rescape-python-helpers/blob/91a1724f062ee40a25aa60fc96b2d7acadd99618/rescape_python_helpers/functional/ramda.py#L513-L523
|
train
|
calocan/rescape-python-helpers
|
rescape_python_helpers/functional/ramda.py
|
merge_deep
|
def merge_deep(dct1, dct2, merger=None):
"""
Deep merge by this spec below
:param dct1:
:param dct2:
:param merger Optional merger
:return:
"""
my_merger = merger or Merger(
# pass in a list of tuples,with the
# strategies you are looking to apply
# to each type.
[
(list, ["append"]),
(dict, ["merge"])
],
# next, choose the fallback strategies,
# applied to all other types:
["override"],
# finally, choose the strategies in
# the case where the types conflict:
["override"]
)
return my_merger.merge(dct1, dct2)
|
python
|
def merge_deep(dct1, dct2, merger=None):
"""
Deep merge by this spec below
:param dct1:
:param dct2:
:param merger Optional merger
:return:
"""
my_merger = merger or Merger(
# pass in a list of tuples,with the
# strategies you are looking to apply
# to each type.
[
(list, ["append"]),
(dict, ["merge"])
],
# next, choose the fallback strategies,
# applied to all other types:
["override"],
# finally, choose the strategies in
# the case where the types conflict:
["override"]
)
return my_merger.merge(dct1, dct2)
|
[
"def",
"merge_deep",
"(",
"dct1",
",",
"dct2",
",",
"merger",
"=",
"None",
")",
":",
"my_merger",
"=",
"merger",
"or",
"Merger",
"(",
"# pass in a list of tuples,with the",
"# strategies you are looking to apply",
"# to each type.",
"[",
"(",
"list",
",",
"[",
"\"append\"",
"]",
")",
",",
"(",
"dict",
",",
"[",
"\"merge\"",
"]",
")",
"]",
",",
"# next, choose the fallback strategies,",
"# applied to all other types:",
"[",
"\"override\"",
"]",
",",
"# finally, choose the strategies in",
"# the case where the types conflict:",
"[",
"\"override\"",
"]",
")",
"return",
"my_merger",
".",
"merge",
"(",
"dct1",
",",
"dct2",
")"
] |
Deep merge by this spec below
:param dct1:
:param dct2:
:param merger Optional merger
:return:
|
[
"Deep",
"merge",
"by",
"this",
"spec",
"below",
":",
"param",
"dct1",
":",
":",
"param",
"dct2",
":",
":",
"param",
"merger",
"Optional",
"merger",
":",
"return",
":"
] |
91a1724f062ee40a25aa60fc96b2d7acadd99618
|
https://github.com/calocan/rescape-python-helpers/blob/91a1724f062ee40a25aa60fc96b2d7acadd99618/rescape_python_helpers/functional/ramda.py#L538-L561
|
train
|
calocan/rescape-python-helpers
|
rescape_python_helpers/functional/ramda.py
|
merge_all
|
def merge_all(dcts):
"""
Shallow merge all the dcts
:param dcts:
:return:
"""
return reduce(
lambda accum, dct: merge(accum, dct),
dict(),
dcts
)
|
python
|
def merge_all(dcts):
"""
Shallow merge all the dcts
:param dcts:
:return:
"""
return reduce(
lambda accum, dct: merge(accum, dct),
dict(),
dcts
)
|
[
"def",
"merge_all",
"(",
"dcts",
")",
":",
"return",
"reduce",
"(",
"lambda",
"accum",
",",
"dct",
":",
"merge",
"(",
"accum",
",",
"dct",
")",
",",
"dict",
"(",
")",
",",
"dcts",
")"
] |
Shallow merge all the dcts
:param dcts:
:return:
|
[
"Shallow",
"merge",
"all",
"the",
"dcts",
":",
"param",
"dcts",
":",
":",
"return",
":"
] |
91a1724f062ee40a25aa60fc96b2d7acadd99618
|
https://github.com/calocan/rescape-python-helpers/blob/91a1724f062ee40a25aa60fc96b2d7acadd99618/rescape_python_helpers/functional/ramda.py#L564-L574
|
train
|
calocan/rescape-python-helpers
|
rescape_python_helpers/functional/ramda.py
|
from_pairs_to_array_values
|
def from_pairs_to_array_values(pairs):
"""
Like from pairs but combines duplicate key values into arrays
:param pairs:
:return:
"""
result = {}
for pair in pairs:
result[pair[0]] = concat(prop_or([], pair[0], result), [pair[1]])
return result
|
python
|
def from_pairs_to_array_values(pairs):
"""
Like from pairs but combines duplicate key values into arrays
:param pairs:
:return:
"""
result = {}
for pair in pairs:
result[pair[0]] = concat(prop_or([], pair[0], result), [pair[1]])
return result
|
[
"def",
"from_pairs_to_array_values",
"(",
"pairs",
")",
":",
"result",
"=",
"{",
"}",
"for",
"pair",
"in",
"pairs",
":",
"result",
"[",
"pair",
"[",
"0",
"]",
"]",
"=",
"concat",
"(",
"prop_or",
"(",
"[",
"]",
",",
"pair",
"[",
"0",
"]",
",",
"result",
")",
",",
"[",
"pair",
"[",
"1",
"]",
"]",
")",
"return",
"result"
] |
Like from pairs but combines duplicate key values into arrays
:param pairs:
:return:
|
[
"Like",
"from",
"pairs",
"but",
"combines",
"duplicate",
"key",
"values",
"into",
"arrays",
":",
"param",
"pairs",
":",
":",
"return",
":"
] |
91a1724f062ee40a25aa60fc96b2d7acadd99618
|
https://github.com/calocan/rescape-python-helpers/blob/91a1724f062ee40a25aa60fc96b2d7acadd99618/rescape_python_helpers/functional/ramda.py#L658-L667
|
train
|
calocan/rescape-python-helpers
|
rescape_python_helpers/functional/ramda.py
|
map_prop_value_as_index
|
def map_prop_value_as_index(prp, lst):
"""
Returns the given prop of each item in the list
:param prp:
:param lst:
:return:
"""
return from_pairs(map(lambda item: (prop(prp, item), item), lst))
|
python
|
def map_prop_value_as_index(prp, lst):
"""
Returns the given prop of each item in the list
:param prp:
:param lst:
:return:
"""
return from_pairs(map(lambda item: (prop(prp, item), item), lst))
|
[
"def",
"map_prop_value_as_index",
"(",
"prp",
",",
"lst",
")",
":",
"return",
"from_pairs",
"(",
"map",
"(",
"lambda",
"item",
":",
"(",
"prop",
"(",
"prp",
",",
"item",
")",
",",
"item",
")",
",",
"lst",
")",
")"
] |
Returns the given prop of each item in the list
:param prp:
:param lst:
:return:
|
[
"Returns",
"the",
"given",
"prop",
"of",
"each",
"item",
"in",
"the",
"list",
":",
"param",
"prp",
":",
":",
"param",
"lst",
":",
":",
"return",
":"
] |
91a1724f062ee40a25aa60fc96b2d7acadd99618
|
https://github.com/calocan/rescape-python-helpers/blob/91a1724f062ee40a25aa60fc96b2d7acadd99618/rescape_python_helpers/functional/ramda.py#L698-L705
|
train
|
calocan/rescape-python-helpers
|
rescape_python_helpers/functional/ramda.py
|
key_string_to_lens_path
|
def key_string_to_lens_path(key_string):
"""
Converts a key string like 'foo.bar.0.wopper' to ['foo', 'bar', 0, 'wopper']
:param {String} keyString The dot-separated key string
:return {[String]} The lens array containing string or integers
"""
return map(
if_else(
isinstance(int),
# convert to int
lambda s: int(s),
# Leave the string alone
identity
),
key_string.split('.')
)
|
python
|
def key_string_to_lens_path(key_string):
"""
Converts a key string like 'foo.bar.0.wopper' to ['foo', 'bar', 0, 'wopper']
:param {String} keyString The dot-separated key string
:return {[String]} The lens array containing string or integers
"""
return map(
if_else(
isinstance(int),
# convert to int
lambda s: int(s),
# Leave the string alone
identity
),
key_string.split('.')
)
|
[
"def",
"key_string_to_lens_path",
"(",
"key_string",
")",
":",
"return",
"map",
"(",
"if_else",
"(",
"isinstance",
"(",
"int",
")",
",",
"# convert to int",
"lambda",
"s",
":",
"int",
"(",
"s",
")",
",",
"# Leave the string alone",
"identity",
")",
",",
"key_string",
".",
"split",
"(",
"'.'",
")",
")"
] |
Converts a key string like 'foo.bar.0.wopper' to ['foo', 'bar', 0, 'wopper']
:param {String} keyString The dot-separated key string
:return {[String]} The lens array containing string or integers
|
[
"Converts",
"a",
"key",
"string",
"like",
"foo",
".",
"bar",
".",
"0",
".",
"wopper",
"to",
"[",
"foo",
"bar",
"0",
"wopper",
"]",
":",
"param",
"{",
"String",
"}",
"keyString",
"The",
"dot",
"-",
"separated",
"key",
"string",
":",
"return",
"{",
"[",
"String",
"]",
"}",
"The",
"lens",
"array",
"containing",
"string",
"or",
"integers"
] |
91a1724f062ee40a25aa60fc96b2d7acadd99618
|
https://github.com/calocan/rescape-python-helpers/blob/91a1724f062ee40a25aa60fc96b2d7acadd99618/rescape_python_helpers/functional/ramda.py#L791-L806
|
train
|
calocan/rescape-python-helpers
|
rescape_python_helpers/functional/ramda.py
|
fake_lens_path_view
|
def fake_lens_path_view(lens_path, obj):
"""
Simulates R.view with a lens_path since we don't have lens functions
:param lens_path: Array of string paths
:param obj: Object containing the given path
:return: The value at the path or None
"""
segment = head(lens_path)
return if_else(
both(lambda _: identity(segment), has(segment)),
# Recurse on the rest of the path
compose(fake_lens_path_view(tail(lens_path)), getitem(segment)),
# Give up
lambda _: None
)(obj)
|
python
|
def fake_lens_path_view(lens_path, obj):
"""
Simulates R.view with a lens_path since we don't have lens functions
:param lens_path: Array of string paths
:param obj: Object containing the given path
:return: The value at the path or None
"""
segment = head(lens_path)
return if_else(
both(lambda _: identity(segment), has(segment)),
# Recurse on the rest of the path
compose(fake_lens_path_view(tail(lens_path)), getitem(segment)),
# Give up
lambda _: None
)(obj)
|
[
"def",
"fake_lens_path_view",
"(",
"lens_path",
",",
"obj",
")",
":",
"segment",
"=",
"head",
"(",
"lens_path",
")",
"return",
"if_else",
"(",
"both",
"(",
"lambda",
"_",
":",
"identity",
"(",
"segment",
")",
",",
"has",
"(",
"segment",
")",
")",
",",
"# Recurse on the rest of the path",
"compose",
"(",
"fake_lens_path_view",
"(",
"tail",
"(",
"lens_path",
")",
")",
",",
"getitem",
"(",
"segment",
")",
")",
",",
"# Give up",
"lambda",
"_",
":",
"None",
")",
"(",
"obj",
")"
] |
Simulates R.view with a lens_path since we don't have lens functions
:param lens_path: Array of string paths
:param obj: Object containing the given path
:return: The value at the path or None
|
[
"Simulates",
"R",
".",
"view",
"with",
"a",
"lens_path",
"since",
"we",
"don",
"t",
"have",
"lens",
"functions",
":",
"param",
"lens_path",
":",
"Array",
"of",
"string",
"paths",
":",
"param",
"obj",
":",
"Object",
"containing",
"the",
"given",
"path",
":",
"return",
":",
"The",
"value",
"at",
"the",
"path",
"or",
"None"
] |
91a1724f062ee40a25aa60fc96b2d7acadd99618
|
https://github.com/calocan/rescape-python-helpers/blob/91a1724f062ee40a25aa60fc96b2d7acadd99618/rescape_python_helpers/functional/ramda.py#L810-L824
|
train
|
calocan/rescape-python-helpers
|
rescape_python_helpers/functional/ramda.py
|
fake_lens_path_set
|
def fake_lens_path_set(lens_path, value, obj):
"""
Simulates R.set with a lens_path since we don't have lens functions
:param lens_path: Array of string paths
:param value: The value to set at the lens path
:param obj: Object containing the given path
:return: The value at the path or None
"""
segment = head(lens_path)
obj_copy = copy.copy(obj)
def set_array_index(i, v, l):
# Fill the array with None up to the given index and set the index to v
try:
l[i] = v
except IndexError:
for _ in range(i - len(l) + 1):
l.append(None)
l[i] = v
if not (length(lens_path) - 1):
# Done
new_value = value
else:
# Find the value at the path or create a {} or [] at obj[segment]
found_or_created = item_path_or(
if_else(
lambda segment: segment.isnumeric(),
always([]),
always({})
)(head(tail(lens_path))),
segment,
obj
)
# Recurse on the rest of the path
new_value = fake_lens_path_set(tail(lens_path), value, found_or_created)
# Set or replace
if segment.isnumeric():
set_array_index(int(segment), new_value, obj_copy)
else:
obj_copy[segment] = new_value
return obj_copy
|
python
|
def fake_lens_path_set(lens_path, value, obj):
"""
Simulates R.set with a lens_path since we don't have lens functions
:param lens_path: Array of string paths
:param value: The value to set at the lens path
:param obj: Object containing the given path
:return: The value at the path or None
"""
segment = head(lens_path)
obj_copy = copy.copy(obj)
def set_array_index(i, v, l):
# Fill the array with None up to the given index and set the index to v
try:
l[i] = v
except IndexError:
for _ in range(i - len(l) + 1):
l.append(None)
l[i] = v
if not (length(lens_path) - 1):
# Done
new_value = value
else:
# Find the value at the path or create a {} or [] at obj[segment]
found_or_created = item_path_or(
if_else(
lambda segment: segment.isnumeric(),
always([]),
always({})
)(head(tail(lens_path))),
segment,
obj
)
# Recurse on the rest of the path
new_value = fake_lens_path_set(tail(lens_path), value, found_or_created)
# Set or replace
if segment.isnumeric():
set_array_index(int(segment), new_value, obj_copy)
else:
obj_copy[segment] = new_value
return obj_copy
|
[
"def",
"fake_lens_path_set",
"(",
"lens_path",
",",
"value",
",",
"obj",
")",
":",
"segment",
"=",
"head",
"(",
"lens_path",
")",
"obj_copy",
"=",
"copy",
".",
"copy",
"(",
"obj",
")",
"def",
"set_array_index",
"(",
"i",
",",
"v",
",",
"l",
")",
":",
"# Fill the array with None up to the given index and set the index to v",
"try",
":",
"l",
"[",
"i",
"]",
"=",
"v",
"except",
"IndexError",
":",
"for",
"_",
"in",
"range",
"(",
"i",
"-",
"len",
"(",
"l",
")",
"+",
"1",
")",
":",
"l",
".",
"append",
"(",
"None",
")",
"l",
"[",
"i",
"]",
"=",
"v",
"if",
"not",
"(",
"length",
"(",
"lens_path",
")",
"-",
"1",
")",
":",
"# Done",
"new_value",
"=",
"value",
"else",
":",
"# Find the value at the path or create a {} or [] at obj[segment]",
"found_or_created",
"=",
"item_path_or",
"(",
"if_else",
"(",
"lambda",
"segment",
":",
"segment",
".",
"isnumeric",
"(",
")",
",",
"always",
"(",
"[",
"]",
")",
",",
"always",
"(",
"{",
"}",
")",
")",
"(",
"head",
"(",
"tail",
"(",
"lens_path",
")",
")",
")",
",",
"segment",
",",
"obj",
")",
"# Recurse on the rest of the path",
"new_value",
"=",
"fake_lens_path_set",
"(",
"tail",
"(",
"lens_path",
")",
",",
"value",
",",
"found_or_created",
")",
"# Set or replace",
"if",
"segment",
".",
"isnumeric",
"(",
")",
":",
"set_array_index",
"(",
"int",
"(",
"segment",
")",
",",
"new_value",
",",
"obj_copy",
")",
"else",
":",
"obj_copy",
"[",
"segment",
"]",
"=",
"new_value",
"return",
"obj_copy"
] |
Simulates R.set with a lens_path since we don't have lens functions
:param lens_path: Array of string paths
:param value: The value to set at the lens path
:param obj: Object containing the given path
:return: The value at the path or None
|
[
"Simulates",
"R",
".",
"set",
"with",
"a",
"lens_path",
"since",
"we",
"don",
"t",
"have",
"lens",
"functions",
":",
"param",
"lens_path",
":",
"Array",
"of",
"string",
"paths",
":",
"param",
"value",
":",
"The",
"value",
"to",
"set",
"at",
"the",
"lens",
"path",
":",
"param",
"obj",
":",
"Object",
"containing",
"the",
"given",
"path",
":",
"return",
":",
"The",
"value",
"at",
"the",
"path",
"or",
"None"
] |
91a1724f062ee40a25aa60fc96b2d7acadd99618
|
https://github.com/calocan/rescape-python-helpers/blob/91a1724f062ee40a25aa60fc96b2d7acadd99618/rescape_python_helpers/functional/ramda.py#L828-L870
|
train
|
calocan/rescape-python-helpers
|
rescape_python_helpers/functional/ramda.py
|
unflatten_dct
|
def unflatten_dct(obj):
"""
Undoes the work of flatten_dict
@param {Object} obj 1-D object in the form returned by flattenObj
@returns {Object} The original
:param obj:
:return:
"""
def reduce_func(accum, key_string_and_value):
key_string = key_string_and_value[0]
value = key_string_and_value[1]
item_key_path = key_string_to_lens_path(key_string)
# All but the last segment gives us the item container len
container_key_path = init(item_key_path)
container = unless(
# If the path has any length (not []) and the value is set, don't do anything
both(always(length(container_key_path)), fake_lens_path_view(container_key_path)),
# Else we are at the top level, so use the existing accum or create a [] or {}
# depending on if our item key is a number or not
lambda x: default_to(
if_else(
lambda segment: segment.isnumeric(),
always([]),
always({})
)(head(item_key_path))
)(x)
)(accum)
# Finally set the container at the itemLensPath
return fake_lens_path_set(
item_key_path,
value,
container
)
return compose(
reduce(
reduce_func,
# null initial value
None
),
to_pairs
)(obj)
|
python
|
def unflatten_dct(obj):
"""
Undoes the work of flatten_dict
@param {Object} obj 1-D object in the form returned by flattenObj
@returns {Object} The original
:param obj:
:return:
"""
def reduce_func(accum, key_string_and_value):
key_string = key_string_and_value[0]
value = key_string_and_value[1]
item_key_path = key_string_to_lens_path(key_string)
# All but the last segment gives us the item container len
container_key_path = init(item_key_path)
container = unless(
# If the path has any length (not []) and the value is set, don't do anything
both(always(length(container_key_path)), fake_lens_path_view(container_key_path)),
# Else we are at the top level, so use the existing accum or create a [] or {}
# depending on if our item key is a number or not
lambda x: default_to(
if_else(
lambda segment: segment.isnumeric(),
always([]),
always({})
)(head(item_key_path))
)(x)
)(accum)
# Finally set the container at the itemLensPath
return fake_lens_path_set(
item_key_path,
value,
container
)
return compose(
reduce(
reduce_func,
# null initial value
None
),
to_pairs
)(obj)
|
[
"def",
"unflatten_dct",
"(",
"obj",
")",
":",
"def",
"reduce_func",
"(",
"accum",
",",
"key_string_and_value",
")",
":",
"key_string",
"=",
"key_string_and_value",
"[",
"0",
"]",
"value",
"=",
"key_string_and_value",
"[",
"1",
"]",
"item_key_path",
"=",
"key_string_to_lens_path",
"(",
"key_string",
")",
"# All but the last segment gives us the item container len",
"container_key_path",
"=",
"init",
"(",
"item_key_path",
")",
"container",
"=",
"unless",
"(",
"# If the path has any length (not []) and the value is set, don't do anything",
"both",
"(",
"always",
"(",
"length",
"(",
"container_key_path",
")",
")",
",",
"fake_lens_path_view",
"(",
"container_key_path",
")",
")",
",",
"# Else we are at the top level, so use the existing accum or create a [] or {}",
"# depending on if our item key is a number or not",
"lambda",
"x",
":",
"default_to",
"(",
"if_else",
"(",
"lambda",
"segment",
":",
"segment",
".",
"isnumeric",
"(",
")",
",",
"always",
"(",
"[",
"]",
")",
",",
"always",
"(",
"{",
"}",
")",
")",
"(",
"head",
"(",
"item_key_path",
")",
")",
")",
"(",
"x",
")",
")",
"(",
"accum",
")",
"# Finally set the container at the itemLensPath",
"return",
"fake_lens_path_set",
"(",
"item_key_path",
",",
"value",
",",
"container",
")",
"return",
"compose",
"(",
"reduce",
"(",
"reduce_func",
",",
"# null initial value",
"None",
")",
",",
"to_pairs",
")",
"(",
"obj",
")"
] |
Undoes the work of flatten_dict
@param {Object} obj 1-D object in the form returned by flattenObj
@returns {Object} The original
:param obj:
:return:
|
[
"Undoes",
"the",
"work",
"of",
"flatten_dict"
] |
91a1724f062ee40a25aa60fc96b2d7acadd99618
|
https://github.com/calocan/rescape-python-helpers/blob/91a1724f062ee40a25aa60fc96b2d7acadd99618/rescape_python_helpers/functional/ramda.py#L873-L915
|
train
|
jay-johnson/antinex-client
|
antinex_client/utils.py
|
ppj
|
def ppj(json_data):
"""ppj
:param json_data: dictionary to print
"""
return str(json.dumps(
json_data,
sort_keys=True,
indent=4,
separators=(',', ': ')))
|
python
|
def ppj(json_data):
"""ppj
:param json_data: dictionary to print
"""
return str(json.dumps(
json_data,
sort_keys=True,
indent=4,
separators=(',', ': ')))
|
[
"def",
"ppj",
"(",
"json_data",
")",
":",
"return",
"str",
"(",
"json",
".",
"dumps",
"(",
"json_data",
",",
"sort_keys",
"=",
"True",
",",
"indent",
"=",
"4",
",",
"separators",
"=",
"(",
"','",
",",
"': '",
")",
")",
")"
] |
ppj
:param json_data: dictionary to print
|
[
"ppj"
] |
850ba2a2fe21c836e071def618dcecc9caf5d59c
|
https://github.com/jay-johnson/antinex-client/blob/850ba2a2fe21c836e071def618dcecc9caf5d59c/antinex_client/utils.py#L41-L50
|
train
|
praekeltfoundation/molo.commenting
|
molo/commenting/admin.py
|
AdminModeratorMixin.change_view
|
def change_view(self, request, object_id, form_url='', extra_context=None):
"""
Override change view to add extra context enabling moderate tool.
"""
context = {
'has_moderate_tool': True
}
if extra_context:
context.update(extra_context)
return super(AdminModeratorMixin, self).change_view(
request=request,
object_id=object_id,
form_url=form_url,
extra_context=context
)
|
python
|
def change_view(self, request, object_id, form_url='', extra_context=None):
"""
Override change view to add extra context enabling moderate tool.
"""
context = {
'has_moderate_tool': True
}
if extra_context:
context.update(extra_context)
return super(AdminModeratorMixin, self).change_view(
request=request,
object_id=object_id,
form_url=form_url,
extra_context=context
)
|
[
"def",
"change_view",
"(",
"self",
",",
"request",
",",
"object_id",
",",
"form_url",
"=",
"''",
",",
"extra_context",
"=",
"None",
")",
":",
"context",
"=",
"{",
"'has_moderate_tool'",
":",
"True",
"}",
"if",
"extra_context",
":",
"context",
".",
"update",
"(",
"extra_context",
")",
"return",
"super",
"(",
"AdminModeratorMixin",
",",
"self",
")",
".",
"change_view",
"(",
"request",
"=",
"request",
",",
"object_id",
"=",
"object_id",
",",
"form_url",
"=",
"form_url",
",",
"extra_context",
"=",
"context",
")"
] |
Override change view to add extra context enabling moderate tool.
|
[
"Override",
"change",
"view",
"to",
"add",
"extra",
"context",
"enabling",
"moderate",
"tool",
"."
] |
94549bd75e4a5c5b3db43149e32d636330b3969c
|
https://github.com/praekeltfoundation/molo.commenting/blob/94549bd75e4a5c5b3db43149e32d636330b3969c/molo/commenting/admin.py#L156-L170
|
train
|
praekeltfoundation/molo.commenting
|
molo/commenting/admin.py
|
AdminModeratorMixin.get_urls
|
def get_urls(self):
"""
Add aditional moderate url.
"""
from django.conf.urls import url
urls = super(AdminModeratorMixin, self).get_urls()
info = self.model._meta.app_label, self.model._meta.model_name
return [
url(r'^(.+)/moderate/$',
self.admin_site.admin_view(self.moderate_view),
name='%s_%s_moderate' % info),
] + urls
|
python
|
def get_urls(self):
"""
Add aditional moderate url.
"""
from django.conf.urls import url
urls = super(AdminModeratorMixin, self).get_urls()
info = self.model._meta.app_label, self.model._meta.model_name
return [
url(r'^(.+)/moderate/$',
self.admin_site.admin_view(self.moderate_view),
name='%s_%s_moderate' % info),
] + urls
|
[
"def",
"get_urls",
"(",
"self",
")",
":",
"from",
"django",
".",
"conf",
".",
"urls",
"import",
"url",
"urls",
"=",
"super",
"(",
"AdminModeratorMixin",
",",
"self",
")",
".",
"get_urls",
"(",
")",
"info",
"=",
"self",
".",
"model",
".",
"_meta",
".",
"app_label",
",",
"self",
".",
"model",
".",
"_meta",
".",
"model_name",
"return",
"[",
"url",
"(",
"r'^(.+)/moderate/$'",
",",
"self",
".",
"admin_site",
".",
"admin_view",
"(",
"self",
".",
"moderate_view",
")",
",",
"name",
"=",
"'%s_%s_moderate'",
"%",
"info",
")",
",",
"]",
"+",
"urls"
] |
Add aditional moderate url.
|
[
"Add",
"aditional",
"moderate",
"url",
"."
] |
94549bd75e4a5c5b3db43149e32d636330b3969c
|
https://github.com/praekeltfoundation/molo.commenting/blob/94549bd75e4a5c5b3db43149e32d636330b3969c/molo/commenting/admin.py#L172-L183
|
train
|
feliphebueno/Rinzler
|
rinzler/core/response.py
|
Response.render
|
def render(self, indent=0):
"""
Renders a HttpResponse for the ongoing request
:param indent int
:rtype: HttpResponse
"""
self.__indent = indent
return HttpResponse(
str(self), content_type=self.__content_type, charset=self.__charset, **self.__kwargs
)
|
python
|
def render(self, indent=0):
"""
Renders a HttpResponse for the ongoing request
:param indent int
:rtype: HttpResponse
"""
self.__indent = indent
return HttpResponse(
str(self), content_type=self.__content_type, charset=self.__charset, **self.__kwargs
)
|
[
"def",
"render",
"(",
"self",
",",
"indent",
"=",
"0",
")",
":",
"self",
".",
"__indent",
"=",
"indent",
"return",
"HttpResponse",
"(",
"str",
"(",
"self",
")",
",",
"content_type",
"=",
"self",
".",
"__content_type",
",",
"charset",
"=",
"self",
".",
"__charset",
",",
"*",
"*",
"self",
".",
"__kwargs",
")"
] |
Renders a HttpResponse for the ongoing request
:param indent int
:rtype: HttpResponse
|
[
"Renders",
"a",
"HttpResponse",
"for",
"the",
"ongoing",
"request",
":",
"param",
"indent",
"int",
":",
"rtype",
":",
"HttpResponse"
] |
7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8
|
https://github.com/feliphebueno/Rinzler/blob/7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8/rinzler/core/response.py#L34-L43
|
train
|
feliphebueno/Rinzler
|
rinzler/logger/log.py
|
setup_logging
|
def setup_logging(default_path='logging.yaml', env_key='LOG_CFG'):
"""
Setup logging configuration
"""
path = default_path
value = os.getenv(env_key, None)
if value:
path = value
if os.path.exists(path):
with open(path, 'rt') as f:
configs = yaml.safe_load(f.read())
logging.config.dictConfig(configs)
else:
logging.config.dictConfig(config)
|
python
|
def setup_logging(default_path='logging.yaml', env_key='LOG_CFG'):
"""
Setup logging configuration
"""
path = default_path
value = os.getenv(env_key, None)
if value:
path = value
if os.path.exists(path):
with open(path, 'rt') as f:
configs = yaml.safe_load(f.read())
logging.config.dictConfig(configs)
else:
logging.config.dictConfig(config)
|
[
"def",
"setup_logging",
"(",
"default_path",
"=",
"'logging.yaml'",
",",
"env_key",
"=",
"'LOG_CFG'",
")",
":",
"path",
"=",
"default_path",
"value",
"=",
"os",
".",
"getenv",
"(",
"env_key",
",",
"None",
")",
"if",
"value",
":",
"path",
"=",
"value",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"path",
")",
":",
"with",
"open",
"(",
"path",
",",
"'rt'",
")",
"as",
"f",
":",
"configs",
"=",
"yaml",
".",
"safe_load",
"(",
"f",
".",
"read",
"(",
")",
")",
"logging",
".",
"config",
".",
"dictConfig",
"(",
"configs",
")",
"else",
":",
"logging",
".",
"config",
".",
"dictConfig",
"(",
"config",
")"
] |
Setup logging configuration
|
[
"Setup",
"logging",
"configuration"
] |
7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8
|
https://github.com/feliphebueno/Rinzler/blob/7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8/rinzler/logger/log.py#L9-L22
|
train
|
feliphebueno/Rinzler
|
rinzler/core/route_mapping.py
|
RouteMapping.get
|
def get(self, route: str(), callback: object()):
"""
Binds a GET route with the given callback
:rtype: object
"""
self.__set_route('get', {route: callback})
return RouteMapping
|
python
|
def get(self, route: str(), callback: object()):
"""
Binds a GET route with the given callback
:rtype: object
"""
self.__set_route('get', {route: callback})
return RouteMapping
|
[
"def",
"get",
"(",
"self",
",",
"route",
":",
"str",
"(",
")",
",",
"callback",
":",
"object",
"(",
")",
")",
":",
"self",
".",
"__set_route",
"(",
"'get'",
",",
"{",
"route",
":",
"callback",
"}",
")",
"return",
"RouteMapping"
] |
Binds a GET route with the given callback
:rtype: object
|
[
"Binds",
"a",
"GET",
"route",
"with",
"the",
"given",
"callback",
":",
"rtype",
":",
"object"
] |
7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8
|
https://github.com/feliphebueno/Rinzler/blob/7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8/rinzler/core/route_mapping.py#L16-L22
|
train
|
feliphebueno/Rinzler
|
rinzler/core/route_mapping.py
|
RouteMapping.post
|
def post(self, route: str(), callback: object()):
"""
Binds a POST route with the given callback
:rtype: object
"""
self.__set_route('post', {route: callback})
return RouteMapping
|
python
|
def post(self, route: str(), callback: object()):
"""
Binds a POST route with the given callback
:rtype: object
"""
self.__set_route('post', {route: callback})
return RouteMapping
|
[
"def",
"post",
"(",
"self",
",",
"route",
":",
"str",
"(",
")",
",",
"callback",
":",
"object",
"(",
")",
")",
":",
"self",
".",
"__set_route",
"(",
"'post'",
",",
"{",
"route",
":",
"callback",
"}",
")",
"return",
"RouteMapping"
] |
Binds a POST route with the given callback
:rtype: object
|
[
"Binds",
"a",
"POST",
"route",
"with",
"the",
"given",
"callback",
":",
"rtype",
":",
"object"
] |
7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8
|
https://github.com/feliphebueno/Rinzler/blob/7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8/rinzler/core/route_mapping.py#L24-L30
|
train
|
feliphebueno/Rinzler
|
rinzler/core/route_mapping.py
|
RouteMapping.put
|
def put(self, route: str(), callback: object()):
"""
Binds a PUT route with the given callback
:rtype: object
"""
self.__set_route('put', {route: callback})
return RouteMapping
|
python
|
def put(self, route: str(), callback: object()):
"""
Binds a PUT route with the given callback
:rtype: object
"""
self.__set_route('put', {route: callback})
return RouteMapping
|
[
"def",
"put",
"(",
"self",
",",
"route",
":",
"str",
"(",
")",
",",
"callback",
":",
"object",
"(",
")",
")",
":",
"self",
".",
"__set_route",
"(",
"'put'",
",",
"{",
"route",
":",
"callback",
"}",
")",
"return",
"RouteMapping"
] |
Binds a PUT route with the given callback
:rtype: object
|
[
"Binds",
"a",
"PUT",
"route",
"with",
"the",
"given",
"callback",
":",
"rtype",
":",
"object"
] |
7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8
|
https://github.com/feliphebueno/Rinzler/blob/7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8/rinzler/core/route_mapping.py#L32-L38
|
train
|
feliphebueno/Rinzler
|
rinzler/core/route_mapping.py
|
RouteMapping.patch
|
def patch(self, route: str(), callback: object()):
"""
Binds a PATCH route with the given callback
:rtype: object
"""
self.__set_route('patch', {route: callback})
return RouteMapping
|
python
|
def patch(self, route: str(), callback: object()):
"""
Binds a PATCH route with the given callback
:rtype: object
"""
self.__set_route('patch', {route: callback})
return RouteMapping
|
[
"def",
"patch",
"(",
"self",
",",
"route",
":",
"str",
"(",
")",
",",
"callback",
":",
"object",
"(",
")",
")",
":",
"self",
".",
"__set_route",
"(",
"'patch'",
",",
"{",
"route",
":",
"callback",
"}",
")",
"return",
"RouteMapping"
] |
Binds a PATCH route with the given callback
:rtype: object
|
[
"Binds",
"a",
"PATCH",
"route",
"with",
"the",
"given",
"callback",
":",
"rtype",
":",
"object"
] |
7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8
|
https://github.com/feliphebueno/Rinzler/blob/7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8/rinzler/core/route_mapping.py#L40-L46
|
train
|
feliphebueno/Rinzler
|
rinzler/core/route_mapping.py
|
RouteMapping.delete
|
def delete(self, route: str(), callback: object()):
"""
Binds a PUT route with the given callback
:rtype: object
"""
self.__set_route('delete', {route: callback})
return RouteMapping
|
python
|
def delete(self, route: str(), callback: object()):
"""
Binds a PUT route with the given callback
:rtype: object
"""
self.__set_route('delete', {route: callback})
return RouteMapping
|
[
"def",
"delete",
"(",
"self",
",",
"route",
":",
"str",
"(",
")",
",",
"callback",
":",
"object",
"(",
")",
")",
":",
"self",
".",
"__set_route",
"(",
"'delete'",
",",
"{",
"route",
":",
"callback",
"}",
")",
"return",
"RouteMapping"
] |
Binds a PUT route with the given callback
:rtype: object
|
[
"Binds",
"a",
"PUT",
"route",
"with",
"the",
"given",
"callback",
":",
"rtype",
":",
"object"
] |
7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8
|
https://github.com/feliphebueno/Rinzler/blob/7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8/rinzler/core/route_mapping.py#L48-L54
|
train
|
feliphebueno/Rinzler
|
rinzler/core/route_mapping.py
|
RouteMapping.head
|
def head(self, route: str(), callback: object()):
"""
Binds a HEAD route with the given callback
:rtype: object
"""
self.__set_route('head', {route: callback})
return RouteMapping
|
python
|
def head(self, route: str(), callback: object()):
"""
Binds a HEAD route with the given callback
:rtype: object
"""
self.__set_route('head', {route: callback})
return RouteMapping
|
[
"def",
"head",
"(",
"self",
",",
"route",
":",
"str",
"(",
")",
",",
"callback",
":",
"object",
"(",
")",
")",
":",
"self",
".",
"__set_route",
"(",
"'head'",
",",
"{",
"route",
":",
"callback",
"}",
")",
"return",
"RouteMapping"
] |
Binds a HEAD route with the given callback
:rtype: object
|
[
"Binds",
"a",
"HEAD",
"route",
"with",
"the",
"given",
"callback",
":",
"rtype",
":",
"object"
] |
7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8
|
https://github.com/feliphebueno/Rinzler/blob/7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8/rinzler/core/route_mapping.py#L56-L62
|
train
|
feliphebueno/Rinzler
|
rinzler/core/route_mapping.py
|
RouteMapping.options
|
def options(self, route: str(), callback: object()):
"""
Binds a OPTIONS route with the given callback
:rtype: object
"""
self.__set_route('options', {route: callback})
return RouteMapping
|
python
|
def options(self, route: str(), callback: object()):
"""
Binds a OPTIONS route with the given callback
:rtype: object
"""
self.__set_route('options', {route: callback})
return RouteMapping
|
[
"def",
"options",
"(",
"self",
",",
"route",
":",
"str",
"(",
")",
",",
"callback",
":",
"object",
"(",
")",
")",
":",
"self",
".",
"__set_route",
"(",
"'options'",
",",
"{",
"route",
":",
"callback",
"}",
")",
"return",
"RouteMapping"
] |
Binds a OPTIONS route with the given callback
:rtype: object
|
[
"Binds",
"a",
"OPTIONS",
"route",
"with",
"the",
"given",
"callback",
":",
"rtype",
":",
"object"
] |
7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8
|
https://github.com/feliphebueno/Rinzler/blob/7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8/rinzler/core/route_mapping.py#L64-L70
|
train
|
feliphebueno/Rinzler
|
rinzler/core/route_mapping.py
|
RouteMapping.__set_route
|
def __set_route(self, type_route, route):
"""
Sets the given type_route and route to the route mapping
:rtype: object
"""
if type_route in self.__routes:
if not self.verify_route_already_bound(type_route, route):
self.__routes[type_route].append(route)
else:
self.__routes[type_route] = [route]
return RouteMapping
|
python
|
def __set_route(self, type_route, route):
"""
Sets the given type_route and route to the route mapping
:rtype: object
"""
if type_route in self.__routes:
if not self.verify_route_already_bound(type_route, route):
self.__routes[type_route].append(route)
else:
self.__routes[type_route] = [route]
return RouteMapping
|
[
"def",
"__set_route",
"(",
"self",
",",
"type_route",
",",
"route",
")",
":",
"if",
"type_route",
"in",
"self",
".",
"__routes",
":",
"if",
"not",
"self",
".",
"verify_route_already_bound",
"(",
"type_route",
",",
"route",
")",
":",
"self",
".",
"__routes",
"[",
"type_route",
"]",
".",
"append",
"(",
"route",
")",
"else",
":",
"self",
".",
"__routes",
"[",
"type_route",
"]",
"=",
"[",
"route",
"]",
"return",
"RouteMapping"
] |
Sets the given type_route and route to the route mapping
:rtype: object
|
[
"Sets",
"the",
"given",
"type_route",
"and",
"route",
"to",
"the",
"route",
"mapping",
":",
"rtype",
":",
"object"
] |
7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8
|
https://github.com/feliphebueno/Rinzler/blob/7f6d5445b5662cba2e8938bb82c7f3ef94e5ded8/rinzler/core/route_mapping.py#L72-L82
|
train
|
gmr/helper
|
helper/unix.py
|
operating_system
|
def operating_system():
"""Return a string identifying the operating system the application
is running on.
:rtype: str
"""
if platform.system() == 'Darwin':
return 'OS X Version %s' % platform.mac_ver()[0]
distribution = ' '.join(platform.linux_distribution()).strip()
os_platform = platform.platform(True, True)
if distribution:
os_platform += ' (%s)' % distribution
return os_platform
|
python
|
def operating_system():
"""Return a string identifying the operating system the application
is running on.
:rtype: str
"""
if platform.system() == 'Darwin':
return 'OS X Version %s' % platform.mac_ver()[0]
distribution = ' '.join(platform.linux_distribution()).strip()
os_platform = platform.platform(True, True)
if distribution:
os_platform += ' (%s)' % distribution
return os_platform
|
[
"def",
"operating_system",
"(",
")",
":",
"if",
"platform",
".",
"system",
"(",
")",
"==",
"'Darwin'",
":",
"return",
"'OS X Version %s'",
"%",
"platform",
".",
"mac_ver",
"(",
")",
"[",
"0",
"]",
"distribution",
"=",
"' '",
".",
"join",
"(",
"platform",
".",
"linux_distribution",
"(",
")",
")",
".",
"strip",
"(",
")",
"os_platform",
"=",
"platform",
".",
"platform",
"(",
"True",
",",
"True",
")",
"if",
"distribution",
":",
"os_platform",
"+=",
"' (%s)'",
"%",
"distribution",
"return",
"os_platform"
] |
Return a string identifying the operating system the application
is running on.
:rtype: str
|
[
"Return",
"a",
"string",
"identifying",
"the",
"operating",
"system",
"the",
"application",
"is",
"running",
"on",
"."
] |
fe8e45fc8eabf619429b2940c682c252ee33c082
|
https://github.com/gmr/helper/blob/fe8e45fc8eabf619429b2940c682c252ee33c082/helper/unix.py#L27-L40
|
train
|
gmr/helper
|
helper/unix.py
|
Daemon.start
|
def start(self):
"""Daemonize if the process is not already running."""
if self._is_already_running():
LOGGER.error('Is already running')
sys.exit(1)
try:
self._daemonize()
self.controller.start()
except Exception as error:
sys.stderr.write('\nERROR: Startup of %s Failed\n.' %
sys.argv[0].split('/')[-1])
exception_log = self._get_exception_log_path()
if exception_log:
with open(exception_log, 'a') as handle:
timestamp = datetime.datetime.now().isoformat()
handle.write('{:->80}\n'.format(' [START]'))
handle.write('%s Exception [%s]\n' % (sys.argv[0],
timestamp))
handle.write('{:->80}\n'.format(' [INFO]'))
handle.write('Interpreter: %s\n' % sys.executable)
handle.write('CLI arguments: %s\n' % ' '.join(sys.argv))
handle.write('Exception: %s\n' % error)
handle.write('Traceback:\n')
output = traceback.format_exception(*sys.exc_info())
_dev_null = [(handle.write(line),
sys.stdout.write(line)) for line in output]
handle.write('{:->80}\n'.format(' [END]'))
handle.flush()
sys.stderr.write('\nException log: %s\n\n' % exception_log)
sys.exit(1)
|
python
|
def start(self):
"""Daemonize if the process is not already running."""
if self._is_already_running():
LOGGER.error('Is already running')
sys.exit(1)
try:
self._daemonize()
self.controller.start()
except Exception as error:
sys.stderr.write('\nERROR: Startup of %s Failed\n.' %
sys.argv[0].split('/')[-1])
exception_log = self._get_exception_log_path()
if exception_log:
with open(exception_log, 'a') as handle:
timestamp = datetime.datetime.now().isoformat()
handle.write('{:->80}\n'.format(' [START]'))
handle.write('%s Exception [%s]\n' % (sys.argv[0],
timestamp))
handle.write('{:->80}\n'.format(' [INFO]'))
handle.write('Interpreter: %s\n' % sys.executable)
handle.write('CLI arguments: %s\n' % ' '.join(sys.argv))
handle.write('Exception: %s\n' % error)
handle.write('Traceback:\n')
output = traceback.format_exception(*sys.exc_info())
_dev_null = [(handle.write(line),
sys.stdout.write(line)) for line in output]
handle.write('{:->80}\n'.format(' [END]'))
handle.flush()
sys.stderr.write('\nException log: %s\n\n' % exception_log)
sys.exit(1)
|
[
"def",
"start",
"(",
"self",
")",
":",
"if",
"self",
".",
"_is_already_running",
"(",
")",
":",
"LOGGER",
".",
"error",
"(",
"'Is already running'",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"try",
":",
"self",
".",
"_daemonize",
"(",
")",
"self",
".",
"controller",
".",
"start",
"(",
")",
"except",
"Exception",
"as",
"error",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"'\\nERROR: Startup of %s Failed\\n.'",
"%",
"sys",
".",
"argv",
"[",
"0",
"]",
".",
"split",
"(",
"'/'",
")",
"[",
"-",
"1",
"]",
")",
"exception_log",
"=",
"self",
".",
"_get_exception_log_path",
"(",
")",
"if",
"exception_log",
":",
"with",
"open",
"(",
"exception_log",
",",
"'a'",
")",
"as",
"handle",
":",
"timestamp",
"=",
"datetime",
".",
"datetime",
".",
"now",
"(",
")",
".",
"isoformat",
"(",
")",
"handle",
".",
"write",
"(",
"'{:->80}\\n'",
".",
"format",
"(",
"' [START]'",
")",
")",
"handle",
".",
"write",
"(",
"'%s Exception [%s]\\n'",
"%",
"(",
"sys",
".",
"argv",
"[",
"0",
"]",
",",
"timestamp",
")",
")",
"handle",
".",
"write",
"(",
"'{:->80}\\n'",
".",
"format",
"(",
"' [INFO]'",
")",
")",
"handle",
".",
"write",
"(",
"'Interpreter: %s\\n'",
"%",
"sys",
".",
"executable",
")",
"handle",
".",
"write",
"(",
"'CLI arguments: %s\\n'",
"%",
"' '",
".",
"join",
"(",
"sys",
".",
"argv",
")",
")",
"handle",
".",
"write",
"(",
"'Exception: %s\\n'",
"%",
"error",
")",
"handle",
".",
"write",
"(",
"'Traceback:\\n'",
")",
"output",
"=",
"traceback",
".",
"format_exception",
"(",
"*",
"sys",
".",
"exc_info",
"(",
")",
")",
"_dev_null",
"=",
"[",
"(",
"handle",
".",
"write",
"(",
"line",
")",
",",
"sys",
".",
"stdout",
".",
"write",
"(",
"line",
")",
")",
"for",
"line",
"in",
"output",
"]",
"handle",
".",
"write",
"(",
"'{:->80}\\n'",
".",
"format",
"(",
"' [END]'",
")",
")",
"handle",
".",
"flush",
"(",
")",
"sys",
".",
"stderr",
".",
"write",
"(",
"'\\nException log: %s\\n\\n'",
"%",
"exception_log",
")",
"sys",
".",
"exit",
"(",
"1",
")"
] |
Daemonize if the process is not already running.
|
[
"Daemonize",
"if",
"the",
"process",
"is",
"not",
"already",
"running",
"."
] |
fe8e45fc8eabf619429b2940c682c252ee33c082
|
https://github.com/gmr/helper/blob/fe8e45fc8eabf619429b2940c682c252ee33c082/helper/unix.py#L85-L114
|
train
|
gmr/helper
|
helper/unix.py
|
Daemon.gid
|
def gid(self):
"""Return the group id that the daemon will run with
:rtype: int
"""
if not self._gid:
if self.controller.config.daemon.group:
self._gid = grp.getgrnam(self.config.daemon.group).gr_gid
else:
self._gid = os.getgid()
return self._gid
|
python
|
def gid(self):
"""Return the group id that the daemon will run with
:rtype: int
"""
if not self._gid:
if self.controller.config.daemon.group:
self._gid = grp.getgrnam(self.config.daemon.group).gr_gid
else:
self._gid = os.getgid()
return self._gid
|
[
"def",
"gid",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_gid",
":",
"if",
"self",
".",
"controller",
".",
"config",
".",
"daemon",
".",
"group",
":",
"self",
".",
"_gid",
"=",
"grp",
".",
"getgrnam",
"(",
"self",
".",
"config",
".",
"daemon",
".",
"group",
")",
".",
"gr_gid",
"else",
":",
"self",
".",
"_gid",
"=",
"os",
".",
"getgid",
"(",
")",
"return",
"self",
".",
"_gid"
] |
Return the group id that the daemon will run with
:rtype: int
|
[
"Return",
"the",
"group",
"id",
"that",
"the",
"daemon",
"will",
"run",
"with"
] |
fe8e45fc8eabf619429b2940c682c252ee33c082
|
https://github.com/gmr/helper/blob/fe8e45fc8eabf619429b2940c682c252ee33c082/helper/unix.py#L117-L128
|
train
|
gmr/helper
|
helper/unix.py
|
Daemon.uid
|
def uid(self):
"""Return the user id that the process will run as
:rtype: int
"""
if not self._uid:
if self.config.daemon.user:
self._uid = pwd.getpwnam(self.config.daemon.user).pw_uid
else:
self._uid = os.getuid()
return self._uid
|
python
|
def uid(self):
"""Return the user id that the process will run as
:rtype: int
"""
if not self._uid:
if self.config.daemon.user:
self._uid = pwd.getpwnam(self.config.daemon.user).pw_uid
else:
self._uid = os.getuid()
return self._uid
|
[
"def",
"uid",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_uid",
":",
"if",
"self",
".",
"config",
".",
"daemon",
".",
"user",
":",
"self",
".",
"_uid",
"=",
"pwd",
".",
"getpwnam",
"(",
"self",
".",
"config",
".",
"daemon",
".",
"user",
")",
".",
"pw_uid",
"else",
":",
"self",
".",
"_uid",
"=",
"os",
".",
"getuid",
"(",
")",
"return",
"self",
".",
"_uid"
] |
Return the user id that the process will run as
:rtype: int
|
[
"Return",
"the",
"user",
"id",
"that",
"the",
"process",
"will",
"run",
"as"
] |
fe8e45fc8eabf619429b2940c682c252ee33c082
|
https://github.com/gmr/helper/blob/fe8e45fc8eabf619429b2940c682c252ee33c082/helper/unix.py#L131-L142
|
train
|
gmr/helper
|
helper/unix.py
|
Daemon._daemonize
|
def _daemonize(self):
"""Fork into a background process and setup the process, copied in part
from http://www.jejik.com/files/examples/daemon3x.py
"""
LOGGER.info('Forking %s into the background', sys.argv[0])
# Write the pidfile if current uid != final uid
if os.getuid() != self.uid:
fd = open(self.pidfile_path, 'w')
os.fchmod(fd.fileno(), 0o644)
os.fchown(fd.fileno(), self.uid, self.gid)
fd.close()
try:
pid = os.fork()
if pid > 0:
sys.exit(0)
except OSError as error:
raise OSError('Could not fork off parent: %s', error)
# Set the user id
if self.uid != os.getuid():
os.setuid(self.uid)
# Set the group id
if self.gid != os.getgid():
try:
os.setgid(self.gid)
except OSError as error:
LOGGER.error('Could not set group: %s', error)
# Decouple from parent environment
os.chdir('/')
os.setsid()
os.umask(0o022)
# Fork again
try:
pid = os.fork()
if pid > 0:
sys.exit(0)
except OSError as error:
raise OSError('Could not fork child: %s', error)
# redirect standard file descriptors
sys.stdout.flush()
sys.stderr.flush()
si = open(os.devnull, 'r')
so = open(os.devnull, 'a+')
se = open(os.devnull, 'a+')
os.dup2(si.fileno(), sys.stdin.fileno())
os.dup2(so.fileno(), sys.stdout.fileno())
os.dup2(se.fileno(), sys.stderr.fileno())
# Automatically call self._remove_pidfile when the app exits
atexit.register(self._remove_pidfile)
self._write_pidfile()
|
python
|
def _daemonize(self):
"""Fork into a background process and setup the process, copied in part
from http://www.jejik.com/files/examples/daemon3x.py
"""
LOGGER.info('Forking %s into the background', sys.argv[0])
# Write the pidfile if current uid != final uid
if os.getuid() != self.uid:
fd = open(self.pidfile_path, 'w')
os.fchmod(fd.fileno(), 0o644)
os.fchown(fd.fileno(), self.uid, self.gid)
fd.close()
try:
pid = os.fork()
if pid > 0:
sys.exit(0)
except OSError as error:
raise OSError('Could not fork off parent: %s', error)
# Set the user id
if self.uid != os.getuid():
os.setuid(self.uid)
# Set the group id
if self.gid != os.getgid():
try:
os.setgid(self.gid)
except OSError as error:
LOGGER.error('Could not set group: %s', error)
# Decouple from parent environment
os.chdir('/')
os.setsid()
os.umask(0o022)
# Fork again
try:
pid = os.fork()
if pid > 0:
sys.exit(0)
except OSError as error:
raise OSError('Could not fork child: %s', error)
# redirect standard file descriptors
sys.stdout.flush()
sys.stderr.flush()
si = open(os.devnull, 'r')
so = open(os.devnull, 'a+')
se = open(os.devnull, 'a+')
os.dup2(si.fileno(), sys.stdin.fileno())
os.dup2(so.fileno(), sys.stdout.fileno())
os.dup2(se.fileno(), sys.stderr.fileno())
# Automatically call self._remove_pidfile when the app exits
atexit.register(self._remove_pidfile)
self._write_pidfile()
|
[
"def",
"_daemonize",
"(",
"self",
")",
":",
"LOGGER",
".",
"info",
"(",
"'Forking %s into the background'",
",",
"sys",
".",
"argv",
"[",
"0",
"]",
")",
"# Write the pidfile if current uid != final uid",
"if",
"os",
".",
"getuid",
"(",
")",
"!=",
"self",
".",
"uid",
":",
"fd",
"=",
"open",
"(",
"self",
".",
"pidfile_path",
",",
"'w'",
")",
"os",
".",
"fchmod",
"(",
"fd",
".",
"fileno",
"(",
")",
",",
"0o644",
")",
"os",
".",
"fchown",
"(",
"fd",
".",
"fileno",
"(",
")",
",",
"self",
".",
"uid",
",",
"self",
".",
"gid",
")",
"fd",
".",
"close",
"(",
")",
"try",
":",
"pid",
"=",
"os",
".",
"fork",
"(",
")",
"if",
"pid",
">",
"0",
":",
"sys",
".",
"exit",
"(",
"0",
")",
"except",
"OSError",
"as",
"error",
":",
"raise",
"OSError",
"(",
"'Could not fork off parent: %s'",
",",
"error",
")",
"# Set the user id",
"if",
"self",
".",
"uid",
"!=",
"os",
".",
"getuid",
"(",
")",
":",
"os",
".",
"setuid",
"(",
"self",
".",
"uid",
")",
"# Set the group id",
"if",
"self",
".",
"gid",
"!=",
"os",
".",
"getgid",
"(",
")",
":",
"try",
":",
"os",
".",
"setgid",
"(",
"self",
".",
"gid",
")",
"except",
"OSError",
"as",
"error",
":",
"LOGGER",
".",
"error",
"(",
"'Could not set group: %s'",
",",
"error",
")",
"# Decouple from parent environment",
"os",
".",
"chdir",
"(",
"'/'",
")",
"os",
".",
"setsid",
"(",
")",
"os",
".",
"umask",
"(",
"0o022",
")",
"# Fork again",
"try",
":",
"pid",
"=",
"os",
".",
"fork",
"(",
")",
"if",
"pid",
">",
"0",
":",
"sys",
".",
"exit",
"(",
"0",
")",
"except",
"OSError",
"as",
"error",
":",
"raise",
"OSError",
"(",
"'Could not fork child: %s'",
",",
"error",
")",
"# redirect standard file descriptors",
"sys",
".",
"stdout",
".",
"flush",
"(",
")",
"sys",
".",
"stderr",
".",
"flush",
"(",
")",
"si",
"=",
"open",
"(",
"os",
".",
"devnull",
",",
"'r'",
")",
"so",
"=",
"open",
"(",
"os",
".",
"devnull",
",",
"'a+'",
")",
"se",
"=",
"open",
"(",
"os",
".",
"devnull",
",",
"'a+'",
")",
"os",
".",
"dup2",
"(",
"si",
".",
"fileno",
"(",
")",
",",
"sys",
".",
"stdin",
".",
"fileno",
"(",
")",
")",
"os",
".",
"dup2",
"(",
"so",
".",
"fileno",
"(",
")",
",",
"sys",
".",
"stdout",
".",
"fileno",
"(",
")",
")",
"os",
".",
"dup2",
"(",
"se",
".",
"fileno",
"(",
")",
",",
"sys",
".",
"stderr",
".",
"fileno",
"(",
")",
")",
"# Automatically call self._remove_pidfile when the app exits",
"atexit",
".",
"register",
"(",
"self",
".",
"_remove_pidfile",
")",
"self",
".",
"_write_pidfile",
"(",
")"
] |
Fork into a background process and setup the process, copied in part
from http://www.jejik.com/files/examples/daemon3x.py
|
[
"Fork",
"into",
"a",
"background",
"process",
"and",
"setup",
"the",
"process",
"copied",
"in",
"part",
"from",
"http",
":",
"//",
"www",
".",
"jejik",
".",
"com",
"/",
"files",
"/",
"examples",
"/",
"daemon3x",
".",
"py"
] |
fe8e45fc8eabf619429b2940c682c252ee33c082
|
https://github.com/gmr/helper/blob/fe8e45fc8eabf619429b2940c682c252ee33c082/helper/unix.py#L144-L201
|
train
|
gmr/helper
|
helper/unix.py
|
Daemon._get_exception_log_path
|
def _get_exception_log_path():
"""Return the normalized path for the connection log, raising an
exception if it can not written to.
:return: str
"""
app = sys.argv[0].split('/')[-1]
for exception_log in ['/var/log/%s.errors' % app,
'/var/tmp/%s.errors' % app,
'/tmp/%s.errors' % app]:
if os.access(path.dirname(exception_log), os.W_OK):
return exception_log
return None
|
python
|
def _get_exception_log_path():
"""Return the normalized path for the connection log, raising an
exception if it can not written to.
:return: str
"""
app = sys.argv[0].split('/')[-1]
for exception_log in ['/var/log/%s.errors' % app,
'/var/tmp/%s.errors' % app,
'/tmp/%s.errors' % app]:
if os.access(path.dirname(exception_log), os.W_OK):
return exception_log
return None
|
[
"def",
"_get_exception_log_path",
"(",
")",
":",
"app",
"=",
"sys",
".",
"argv",
"[",
"0",
"]",
".",
"split",
"(",
"'/'",
")",
"[",
"-",
"1",
"]",
"for",
"exception_log",
"in",
"[",
"'/var/log/%s.errors'",
"%",
"app",
",",
"'/var/tmp/%s.errors'",
"%",
"app",
",",
"'/tmp/%s.errors'",
"%",
"app",
"]",
":",
"if",
"os",
".",
"access",
"(",
"path",
".",
"dirname",
"(",
"exception_log",
")",
",",
"os",
".",
"W_OK",
")",
":",
"return",
"exception_log",
"return",
"None"
] |
Return the normalized path for the connection log, raising an
exception if it can not written to.
:return: str
|
[
"Return",
"the",
"normalized",
"path",
"for",
"the",
"connection",
"log",
"raising",
"an",
"exception",
"if",
"it",
"can",
"not",
"written",
"to",
"."
] |
fe8e45fc8eabf619429b2940c682c252ee33c082
|
https://github.com/gmr/helper/blob/fe8e45fc8eabf619429b2940c682c252ee33c082/helper/unix.py#L204-L217
|
train
|
gmr/helper
|
helper/unix.py
|
Daemon._get_pidfile_path
|
def _get_pidfile_path(self):
"""Return the normalized path for the pidfile, raising an
exception if it can not written to.
:return: str
:raises: ValueError
:raises: OSError
"""
if self.config.daemon.pidfile:
pidfile = path.abspath(self.config.daemon.pidfile)
if not os.access(path.dirname(pidfile), os.W_OK):
raise ValueError('Cannot write to specified pid file path'
' %s' % pidfile)
return pidfile
app = sys.argv[0].split('/')[-1]
for pidfile in ['%s/pids/%s.pid' % (os.getcwd(), app),
'/var/run/%s.pid' % app,
'/var/run/%s/%s.pid' % (app, app),
'/var/tmp/%s.pid' % app,
'/tmp/%s.pid' % app,
'%s.pid' % app]:
if os.access(path.dirname(pidfile), os.W_OK):
return pidfile
raise OSError('Could not find an appropriate place for a pid file')
|
python
|
def _get_pidfile_path(self):
"""Return the normalized path for the pidfile, raising an
exception if it can not written to.
:return: str
:raises: ValueError
:raises: OSError
"""
if self.config.daemon.pidfile:
pidfile = path.abspath(self.config.daemon.pidfile)
if not os.access(path.dirname(pidfile), os.W_OK):
raise ValueError('Cannot write to specified pid file path'
' %s' % pidfile)
return pidfile
app = sys.argv[0].split('/')[-1]
for pidfile in ['%s/pids/%s.pid' % (os.getcwd(), app),
'/var/run/%s.pid' % app,
'/var/run/%s/%s.pid' % (app, app),
'/var/tmp/%s.pid' % app,
'/tmp/%s.pid' % app,
'%s.pid' % app]:
if os.access(path.dirname(pidfile), os.W_OK):
return pidfile
raise OSError('Could not find an appropriate place for a pid file')
|
[
"def",
"_get_pidfile_path",
"(",
"self",
")",
":",
"if",
"self",
".",
"config",
".",
"daemon",
".",
"pidfile",
":",
"pidfile",
"=",
"path",
".",
"abspath",
"(",
"self",
".",
"config",
".",
"daemon",
".",
"pidfile",
")",
"if",
"not",
"os",
".",
"access",
"(",
"path",
".",
"dirname",
"(",
"pidfile",
")",
",",
"os",
".",
"W_OK",
")",
":",
"raise",
"ValueError",
"(",
"'Cannot write to specified pid file path'",
"' %s'",
"%",
"pidfile",
")",
"return",
"pidfile",
"app",
"=",
"sys",
".",
"argv",
"[",
"0",
"]",
".",
"split",
"(",
"'/'",
")",
"[",
"-",
"1",
"]",
"for",
"pidfile",
"in",
"[",
"'%s/pids/%s.pid'",
"%",
"(",
"os",
".",
"getcwd",
"(",
")",
",",
"app",
")",
",",
"'/var/run/%s.pid'",
"%",
"app",
",",
"'/var/run/%s/%s.pid'",
"%",
"(",
"app",
",",
"app",
")",
",",
"'/var/tmp/%s.pid'",
"%",
"app",
",",
"'/tmp/%s.pid'",
"%",
"app",
",",
"'%s.pid'",
"%",
"app",
"]",
":",
"if",
"os",
".",
"access",
"(",
"path",
".",
"dirname",
"(",
"pidfile",
")",
",",
"os",
".",
"W_OK",
")",
":",
"return",
"pidfile",
"raise",
"OSError",
"(",
"'Could not find an appropriate place for a pid file'",
")"
] |
Return the normalized path for the pidfile, raising an
exception if it can not written to.
:return: str
:raises: ValueError
:raises: OSError
|
[
"Return",
"the",
"normalized",
"path",
"for",
"the",
"pidfile",
"raising",
"an",
"exception",
"if",
"it",
"can",
"not",
"written",
"to",
"."
] |
fe8e45fc8eabf619429b2940c682c252ee33c082
|
https://github.com/gmr/helper/blob/fe8e45fc8eabf619429b2940c682c252ee33c082/helper/unix.py#L219-L243
|
train
|
gmr/helper
|
helper/unix.py
|
Daemon._is_already_running
|
def _is_already_running(self):
"""Check to see if the process is running, first looking for a pidfile,
then shelling out in either case, removing a pidfile if it exists but
the process is not running.
"""
# Look for the pidfile, if exists determine if the process is alive
pidfile = self._get_pidfile_path()
if os.path.exists(pidfile):
pid = open(pidfile).read().strip()
try:
os.kill(int(pid), 0)
sys.stderr.write('Process already running as pid # %s\n' % pid)
return True
except OSError as error:
LOGGER.debug('Found pidfile, no process # %s', error)
os.unlink(pidfile)
# Check the os for a process that is not this one that looks the same
pattern = ' '.join(sys.argv)
pattern = '[%s]%s' % (pattern[0], pattern[1:])
try:
output = subprocess.check_output('ps a | grep "%s"' % pattern,
shell=True)
except AttributeError:
# Python 2.6
stdin, stdout, stderr = os.popen3('ps a | grep "%s"' % pattern)
output = stdout.read()
except subprocess.CalledProcessError:
return False
pids = [int(pid) for pid in (re.findall(r'^([0-9]+)\s',
output.decode('latin-1')))]
if os.getpid() in pids:
pids.remove(os.getpid())
if not pids:
return False
if len(pids) == 1:
pids = pids[0]
sys.stderr.write('Process already running as pid # %s\n' % pids)
return True
|
python
|
def _is_already_running(self):
"""Check to see if the process is running, first looking for a pidfile,
then shelling out in either case, removing a pidfile if it exists but
the process is not running.
"""
# Look for the pidfile, if exists determine if the process is alive
pidfile = self._get_pidfile_path()
if os.path.exists(pidfile):
pid = open(pidfile).read().strip()
try:
os.kill(int(pid), 0)
sys.stderr.write('Process already running as pid # %s\n' % pid)
return True
except OSError as error:
LOGGER.debug('Found pidfile, no process # %s', error)
os.unlink(pidfile)
# Check the os for a process that is not this one that looks the same
pattern = ' '.join(sys.argv)
pattern = '[%s]%s' % (pattern[0], pattern[1:])
try:
output = subprocess.check_output('ps a | grep "%s"' % pattern,
shell=True)
except AttributeError:
# Python 2.6
stdin, stdout, stderr = os.popen3('ps a | grep "%s"' % pattern)
output = stdout.read()
except subprocess.CalledProcessError:
return False
pids = [int(pid) for pid in (re.findall(r'^([0-9]+)\s',
output.decode('latin-1')))]
if os.getpid() in pids:
pids.remove(os.getpid())
if not pids:
return False
if len(pids) == 1:
pids = pids[0]
sys.stderr.write('Process already running as pid # %s\n' % pids)
return True
|
[
"def",
"_is_already_running",
"(",
"self",
")",
":",
"# Look for the pidfile, if exists determine if the process is alive",
"pidfile",
"=",
"self",
".",
"_get_pidfile_path",
"(",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"pidfile",
")",
":",
"pid",
"=",
"open",
"(",
"pidfile",
")",
".",
"read",
"(",
")",
".",
"strip",
"(",
")",
"try",
":",
"os",
".",
"kill",
"(",
"int",
"(",
"pid",
")",
",",
"0",
")",
"sys",
".",
"stderr",
".",
"write",
"(",
"'Process already running as pid # %s\\n'",
"%",
"pid",
")",
"return",
"True",
"except",
"OSError",
"as",
"error",
":",
"LOGGER",
".",
"debug",
"(",
"'Found pidfile, no process # %s'",
",",
"error",
")",
"os",
".",
"unlink",
"(",
"pidfile",
")",
"# Check the os for a process that is not this one that looks the same",
"pattern",
"=",
"' '",
".",
"join",
"(",
"sys",
".",
"argv",
")",
"pattern",
"=",
"'[%s]%s'",
"%",
"(",
"pattern",
"[",
"0",
"]",
",",
"pattern",
"[",
"1",
":",
"]",
")",
"try",
":",
"output",
"=",
"subprocess",
".",
"check_output",
"(",
"'ps a | grep \"%s\"'",
"%",
"pattern",
",",
"shell",
"=",
"True",
")",
"except",
"AttributeError",
":",
"# Python 2.6",
"stdin",
",",
"stdout",
",",
"stderr",
"=",
"os",
".",
"popen3",
"(",
"'ps a | grep \"%s\"'",
"%",
"pattern",
")",
"output",
"=",
"stdout",
".",
"read",
"(",
")",
"except",
"subprocess",
".",
"CalledProcessError",
":",
"return",
"False",
"pids",
"=",
"[",
"int",
"(",
"pid",
")",
"for",
"pid",
"in",
"(",
"re",
".",
"findall",
"(",
"r'^([0-9]+)\\s'",
",",
"output",
".",
"decode",
"(",
"'latin-1'",
")",
")",
")",
"]",
"if",
"os",
".",
"getpid",
"(",
")",
"in",
"pids",
":",
"pids",
".",
"remove",
"(",
"os",
".",
"getpid",
"(",
")",
")",
"if",
"not",
"pids",
":",
"return",
"False",
"if",
"len",
"(",
"pids",
")",
"==",
"1",
":",
"pids",
"=",
"pids",
"[",
"0",
"]",
"sys",
".",
"stderr",
".",
"write",
"(",
"'Process already running as pid # %s\\n'",
"%",
"pids",
")",
"return",
"True"
] |
Check to see if the process is running, first looking for a pidfile,
then shelling out in either case, removing a pidfile if it exists but
the process is not running.
|
[
"Check",
"to",
"see",
"if",
"the",
"process",
"is",
"running",
"first",
"looking",
"for",
"a",
"pidfile",
"then",
"shelling",
"out",
"in",
"either",
"case",
"removing",
"a",
"pidfile",
"if",
"it",
"exists",
"but",
"the",
"process",
"is",
"not",
"running",
"."
] |
fe8e45fc8eabf619429b2940c682c252ee33c082
|
https://github.com/gmr/helper/blob/fe8e45fc8eabf619429b2940c682c252ee33c082/helper/unix.py#L245-L284
|
train
|
gmr/helper
|
helper/unix.py
|
Daemon._remove_pidfile
|
def _remove_pidfile(self):
"""Remove the pid file from the filesystem"""
LOGGER.debug('Removing pidfile: %s', self.pidfile_path)
try:
os.unlink(self.pidfile_path)
except OSError:
pass
|
python
|
def _remove_pidfile(self):
"""Remove the pid file from the filesystem"""
LOGGER.debug('Removing pidfile: %s', self.pidfile_path)
try:
os.unlink(self.pidfile_path)
except OSError:
pass
|
[
"def",
"_remove_pidfile",
"(",
"self",
")",
":",
"LOGGER",
".",
"debug",
"(",
"'Removing pidfile: %s'",
",",
"self",
".",
"pidfile_path",
")",
"try",
":",
"os",
".",
"unlink",
"(",
"self",
".",
"pidfile_path",
")",
"except",
"OSError",
":",
"pass"
] |
Remove the pid file from the filesystem
|
[
"Remove",
"the",
"pid",
"file",
"from",
"the",
"filesystem"
] |
fe8e45fc8eabf619429b2940c682c252ee33c082
|
https://github.com/gmr/helper/blob/fe8e45fc8eabf619429b2940c682c252ee33c082/helper/unix.py#L286-L292
|
train
|
gmr/helper
|
helper/unix.py
|
Daemon._write_pidfile
|
def _write_pidfile(self):
"""Write the pid file out with the process number in the pid file"""
LOGGER.debug('Writing pidfile: %s', self.pidfile_path)
with open(self.pidfile_path, "w") as handle:
handle.write(str(os.getpid()))
|
python
|
def _write_pidfile(self):
"""Write the pid file out with the process number in the pid file"""
LOGGER.debug('Writing pidfile: %s', self.pidfile_path)
with open(self.pidfile_path, "w") as handle:
handle.write(str(os.getpid()))
|
[
"def",
"_write_pidfile",
"(",
"self",
")",
":",
"LOGGER",
".",
"debug",
"(",
"'Writing pidfile: %s'",
",",
"self",
".",
"pidfile_path",
")",
"with",
"open",
"(",
"self",
".",
"pidfile_path",
",",
"\"w\"",
")",
"as",
"handle",
":",
"handle",
".",
"write",
"(",
"str",
"(",
"os",
".",
"getpid",
"(",
")",
")",
")"
] |
Write the pid file out with the process number in the pid file
|
[
"Write",
"the",
"pid",
"file",
"out",
"with",
"the",
"process",
"number",
"in",
"the",
"pid",
"file"
] |
fe8e45fc8eabf619429b2940c682c252ee33c082
|
https://github.com/gmr/helper/blob/fe8e45fc8eabf619429b2940c682c252ee33c082/helper/unix.py#L294-L298
|
train
|
CitrineInformatics/pypif
|
pypif/util/case.py
|
to_camel_case
|
def to_camel_case(snake_case_string):
"""
Convert a string from snake case to camel case. For example, "some_var" would become "someVar".
:param snake_case_string: Snake-cased string to convert to camel case.
:returns: Camel-cased version of snake_case_string.
"""
parts = snake_case_string.lstrip('_').split('_')
return parts[0] + ''.join([i.title() for i in parts[1:]])
|
python
|
def to_camel_case(snake_case_string):
"""
Convert a string from snake case to camel case. For example, "some_var" would become "someVar".
:param snake_case_string: Snake-cased string to convert to camel case.
:returns: Camel-cased version of snake_case_string.
"""
parts = snake_case_string.lstrip('_').split('_')
return parts[0] + ''.join([i.title() for i in parts[1:]])
|
[
"def",
"to_camel_case",
"(",
"snake_case_string",
")",
":",
"parts",
"=",
"snake_case_string",
".",
"lstrip",
"(",
"'_'",
")",
".",
"split",
"(",
"'_'",
")",
"return",
"parts",
"[",
"0",
"]",
"+",
"''",
".",
"join",
"(",
"[",
"i",
".",
"title",
"(",
")",
"for",
"i",
"in",
"parts",
"[",
"1",
":",
"]",
"]",
")"
] |
Convert a string from snake case to camel case. For example, "some_var" would become "someVar".
:param snake_case_string: Snake-cased string to convert to camel case.
:returns: Camel-cased version of snake_case_string.
|
[
"Convert",
"a",
"string",
"from",
"snake",
"case",
"to",
"camel",
"case",
".",
"For",
"example",
"some_var",
"would",
"become",
"someVar",
"."
] |
938348a8ff7b10b330770cccaaeb2109922f681b
|
https://github.com/CitrineInformatics/pypif/blob/938348a8ff7b10b330770cccaaeb2109922f681b/pypif/util/case.py#L8-L16
|
train
|
CitrineInformatics/pypif
|
pypif/util/case.py
|
to_capitalized_camel_case
|
def to_capitalized_camel_case(snake_case_string):
"""
Convert a string from snake case to camel case with the first letter capitalized. For example, "some_var"
would become "SomeVar".
:param snake_case_string: Snake-cased string to convert to camel case.
:returns: Camel-cased version of snake_case_string.
"""
parts = snake_case_string.split('_')
return ''.join([i.title() for i in parts])
|
python
|
def to_capitalized_camel_case(snake_case_string):
"""
Convert a string from snake case to camel case with the first letter capitalized. For example, "some_var"
would become "SomeVar".
:param snake_case_string: Snake-cased string to convert to camel case.
:returns: Camel-cased version of snake_case_string.
"""
parts = snake_case_string.split('_')
return ''.join([i.title() for i in parts])
|
[
"def",
"to_capitalized_camel_case",
"(",
"snake_case_string",
")",
":",
"parts",
"=",
"snake_case_string",
".",
"split",
"(",
"'_'",
")",
"return",
"''",
".",
"join",
"(",
"[",
"i",
".",
"title",
"(",
")",
"for",
"i",
"in",
"parts",
"]",
")"
] |
Convert a string from snake case to camel case with the first letter capitalized. For example, "some_var"
would become "SomeVar".
:param snake_case_string: Snake-cased string to convert to camel case.
:returns: Camel-cased version of snake_case_string.
|
[
"Convert",
"a",
"string",
"from",
"snake",
"case",
"to",
"camel",
"case",
"with",
"the",
"first",
"letter",
"capitalized",
".",
"For",
"example",
"some_var",
"would",
"become",
"SomeVar",
"."
] |
938348a8ff7b10b330770cccaaeb2109922f681b
|
https://github.com/CitrineInformatics/pypif/blob/938348a8ff7b10b330770cccaaeb2109922f681b/pypif/util/case.py#L19-L28
|
train
|
CitrineInformatics/pypif
|
pypif/util/case.py
|
to_snake_case
|
def to_snake_case(camel_case_string):
"""
Convert a string from camel case to snake case. From example, "someVar" would become "some_var".
:param camel_case_string: Camel-cased string to convert to snake case.
:return: Snake-cased version of camel_case_string.
"""
first_pass = _first_camel_case_regex.sub(r'\1_\2', camel_case_string)
return _second_camel_case_regex.sub(r'\1_\2', first_pass).lower()
|
python
|
def to_snake_case(camel_case_string):
"""
Convert a string from camel case to snake case. From example, "someVar" would become "some_var".
:param camel_case_string: Camel-cased string to convert to snake case.
:return: Snake-cased version of camel_case_string.
"""
first_pass = _first_camel_case_regex.sub(r'\1_\2', camel_case_string)
return _second_camel_case_regex.sub(r'\1_\2', first_pass).lower()
|
[
"def",
"to_snake_case",
"(",
"camel_case_string",
")",
":",
"first_pass",
"=",
"_first_camel_case_regex",
".",
"sub",
"(",
"r'\\1_\\2'",
",",
"camel_case_string",
")",
"return",
"_second_camel_case_regex",
".",
"sub",
"(",
"r'\\1_\\2'",
",",
"first_pass",
")",
".",
"lower",
"(",
")"
] |
Convert a string from camel case to snake case. From example, "someVar" would become "some_var".
:param camel_case_string: Camel-cased string to convert to snake case.
:return: Snake-cased version of camel_case_string.
|
[
"Convert",
"a",
"string",
"from",
"camel",
"case",
"to",
"snake",
"case",
".",
"From",
"example",
"someVar",
"would",
"become",
"some_var",
"."
] |
938348a8ff7b10b330770cccaaeb2109922f681b
|
https://github.com/CitrineInformatics/pypif/blob/938348a8ff7b10b330770cccaaeb2109922f681b/pypif/util/case.py#L31-L39
|
train
|
CitrineInformatics/pypif
|
pypif/util/case.py
|
keys_to_snake_case
|
def keys_to_snake_case(camel_case_dict):
"""
Make a copy of a dictionary with all keys converted to snake case. This is just calls to_snake_case on
each of the keys in the dictionary and returns a new dictionary.
:param camel_case_dict: Dictionary with the keys to convert.
:type camel_case_dict: Dictionary.
:return: Dictionary with the keys converted to snake case.
"""
return dict((to_snake_case(key), value) for (key, value) in camel_case_dict.items())
|
python
|
def keys_to_snake_case(camel_case_dict):
"""
Make a copy of a dictionary with all keys converted to snake case. This is just calls to_snake_case on
each of the keys in the dictionary and returns a new dictionary.
:param camel_case_dict: Dictionary with the keys to convert.
:type camel_case_dict: Dictionary.
:return: Dictionary with the keys converted to snake case.
"""
return dict((to_snake_case(key), value) for (key, value) in camel_case_dict.items())
|
[
"def",
"keys_to_snake_case",
"(",
"camel_case_dict",
")",
":",
"return",
"dict",
"(",
"(",
"to_snake_case",
"(",
"key",
")",
",",
"value",
")",
"for",
"(",
"key",
",",
"value",
")",
"in",
"camel_case_dict",
".",
"items",
"(",
")",
")"
] |
Make a copy of a dictionary with all keys converted to snake case. This is just calls to_snake_case on
each of the keys in the dictionary and returns a new dictionary.
:param camel_case_dict: Dictionary with the keys to convert.
:type camel_case_dict: Dictionary.
:return: Dictionary with the keys converted to snake case.
|
[
"Make",
"a",
"copy",
"of",
"a",
"dictionary",
"with",
"all",
"keys",
"converted",
"to",
"snake",
"case",
".",
"This",
"is",
"just",
"calls",
"to_snake_case",
"on",
"each",
"of",
"the",
"keys",
"in",
"the",
"dictionary",
"and",
"returns",
"a",
"new",
"dictionary",
"."
] |
938348a8ff7b10b330770cccaaeb2109922f681b
|
https://github.com/CitrineInformatics/pypif/blob/938348a8ff7b10b330770cccaaeb2109922f681b/pypif/util/case.py#L42-L52
|
train
|
glomex/gcdt
|
gcdt/ramuda_core.py
|
list_functions
|
def list_functions(awsclient):
"""List the deployed lambda functions and print configuration.
:return: exit_code
"""
client_lambda = awsclient.get_client('lambda')
response = client_lambda.list_functions()
for function in response['Functions']:
log.info(function['FunctionName'])
log.info('\t' 'Memory: ' + str(function['MemorySize']))
log.info('\t' 'Timeout: ' + str(function['Timeout']))
log.info('\t' 'Role: ' + str(function['Role']))
log.info('\t' 'Current Version: ' + str(function['Version']))
log.info('\t' 'Last Modified: ' + str(function['LastModified']))
log.info('\t' 'CodeSha256: ' + str(function['CodeSha256']))
log.info('\n')
return 0
|
python
|
def list_functions(awsclient):
"""List the deployed lambda functions and print configuration.
:return: exit_code
"""
client_lambda = awsclient.get_client('lambda')
response = client_lambda.list_functions()
for function in response['Functions']:
log.info(function['FunctionName'])
log.info('\t' 'Memory: ' + str(function['MemorySize']))
log.info('\t' 'Timeout: ' + str(function['Timeout']))
log.info('\t' 'Role: ' + str(function['Role']))
log.info('\t' 'Current Version: ' + str(function['Version']))
log.info('\t' 'Last Modified: ' + str(function['LastModified']))
log.info('\t' 'CodeSha256: ' + str(function['CodeSha256']))
log.info('\n')
return 0
|
[
"def",
"list_functions",
"(",
"awsclient",
")",
":",
"client_lambda",
"=",
"awsclient",
".",
"get_client",
"(",
"'lambda'",
")",
"response",
"=",
"client_lambda",
".",
"list_functions",
"(",
")",
"for",
"function",
"in",
"response",
"[",
"'Functions'",
"]",
":",
"log",
".",
"info",
"(",
"function",
"[",
"'FunctionName'",
"]",
")",
"log",
".",
"info",
"(",
"'\\t'",
"'Memory: '",
"+",
"str",
"(",
"function",
"[",
"'MemorySize'",
"]",
")",
")",
"log",
".",
"info",
"(",
"'\\t'",
"'Timeout: '",
"+",
"str",
"(",
"function",
"[",
"'Timeout'",
"]",
")",
")",
"log",
".",
"info",
"(",
"'\\t'",
"'Role: '",
"+",
"str",
"(",
"function",
"[",
"'Role'",
"]",
")",
")",
"log",
".",
"info",
"(",
"'\\t'",
"'Current Version: '",
"+",
"str",
"(",
"function",
"[",
"'Version'",
"]",
")",
")",
"log",
".",
"info",
"(",
"'\\t'",
"'Last Modified: '",
"+",
"str",
"(",
"function",
"[",
"'LastModified'",
"]",
")",
")",
"log",
".",
"info",
"(",
"'\\t'",
"'CodeSha256: '",
"+",
"str",
"(",
"function",
"[",
"'CodeSha256'",
"]",
")",
")",
"log",
".",
"info",
"(",
"'\\n'",
")",
"return",
"0"
] |
List the deployed lambda functions and print configuration.
:return: exit_code
|
[
"List",
"the",
"deployed",
"lambda",
"functions",
"and",
"print",
"configuration",
"."
] |
cd67cf416371337b83cb9ca3f696277125703339
|
https://github.com/glomex/gcdt/blob/cd67cf416371337b83cb9ca3f696277125703339/gcdt/ramuda_core.py#L125-L142
|
train
|
glomex/gcdt
|
gcdt/ramuda_core.py
|
deploy_lambda
|
def deploy_lambda(awsclient, function_name, role, handler_filename,
handler_function,
folders, description, timeout, memory, subnet_ids=None,
security_groups=None, artifact_bucket=None,
zipfile=None,
fail_deployment_on_unsuccessful_ping=False,
runtime='python2.7', settings=None, environment=None,
retention_in_days=None
):
"""Create or update a lambda function.
:param awsclient:
:param function_name:
:param role:
:param handler_filename:
:param handler_function:
:param folders:
:param description:
:param timeout:
:param memory:
:param subnet_ids:
:param security_groups:
:param artifact_bucket:
:param zipfile:
:param environment: environment variables
:param retention_in_days: retention time of the cloudwatch logs
:return: exit_code
"""
# TODO: the signature of this function is too big, clean this up
# also consolidate create, update, config and add waiters!
if lambda_exists(awsclient, function_name):
function_version = _update_lambda(awsclient, function_name,
handler_filename,
handler_function, folders, role,
description, timeout, memory,
subnet_ids, security_groups,
artifact_bucket=artifact_bucket,
zipfile=zipfile,
environment=environment
)
else:
if not zipfile:
return 1
log.info('buffer size: %0.2f MB' % float(len(zipfile) / 1000000.0))
function_version = _create_lambda(awsclient, function_name, role,
handler_filename, handler_function,
folders, description, timeout,
memory, subnet_ids, security_groups,
artifact_bucket, zipfile,
runtime=runtime,
environment=environment)
# configure cloudwatch logs
if retention_in_days:
log_group_name = '/aws/lambda/%s' % function_name
put_retention_policy(awsclient, log_group_name, retention_in_days)
pong = ping(awsclient, function_name, version=function_version)
if 'alive' in str(pong):
log.info(colored.green('Great you\'re already accepting a ping ' +
'in your Lambda function'))
elif fail_deployment_on_unsuccessful_ping and not 'alive' in pong:
log.info(colored.red('Pinging your lambda function failed'))
# we do not deploy alias and fail command
return 1
else:
log.info(colored.red('Please consider adding a reaction to a ' +
'ping event to your lambda function'))
_deploy_alias(awsclient, function_name, function_version)
return 0
|
python
|
def deploy_lambda(awsclient, function_name, role, handler_filename,
handler_function,
folders, description, timeout, memory, subnet_ids=None,
security_groups=None, artifact_bucket=None,
zipfile=None,
fail_deployment_on_unsuccessful_ping=False,
runtime='python2.7', settings=None, environment=None,
retention_in_days=None
):
"""Create or update a lambda function.
:param awsclient:
:param function_name:
:param role:
:param handler_filename:
:param handler_function:
:param folders:
:param description:
:param timeout:
:param memory:
:param subnet_ids:
:param security_groups:
:param artifact_bucket:
:param zipfile:
:param environment: environment variables
:param retention_in_days: retention time of the cloudwatch logs
:return: exit_code
"""
# TODO: the signature of this function is too big, clean this up
# also consolidate create, update, config and add waiters!
if lambda_exists(awsclient, function_name):
function_version = _update_lambda(awsclient, function_name,
handler_filename,
handler_function, folders, role,
description, timeout, memory,
subnet_ids, security_groups,
artifact_bucket=artifact_bucket,
zipfile=zipfile,
environment=environment
)
else:
if not zipfile:
return 1
log.info('buffer size: %0.2f MB' % float(len(zipfile) / 1000000.0))
function_version = _create_lambda(awsclient, function_name, role,
handler_filename, handler_function,
folders, description, timeout,
memory, subnet_ids, security_groups,
artifact_bucket, zipfile,
runtime=runtime,
environment=environment)
# configure cloudwatch logs
if retention_in_days:
log_group_name = '/aws/lambda/%s' % function_name
put_retention_policy(awsclient, log_group_name, retention_in_days)
pong = ping(awsclient, function_name, version=function_version)
if 'alive' in str(pong):
log.info(colored.green('Great you\'re already accepting a ping ' +
'in your Lambda function'))
elif fail_deployment_on_unsuccessful_ping and not 'alive' in pong:
log.info(colored.red('Pinging your lambda function failed'))
# we do not deploy alias and fail command
return 1
else:
log.info(colored.red('Please consider adding a reaction to a ' +
'ping event to your lambda function'))
_deploy_alias(awsclient, function_name, function_version)
return 0
|
[
"def",
"deploy_lambda",
"(",
"awsclient",
",",
"function_name",
",",
"role",
",",
"handler_filename",
",",
"handler_function",
",",
"folders",
",",
"description",
",",
"timeout",
",",
"memory",
",",
"subnet_ids",
"=",
"None",
",",
"security_groups",
"=",
"None",
",",
"artifact_bucket",
"=",
"None",
",",
"zipfile",
"=",
"None",
",",
"fail_deployment_on_unsuccessful_ping",
"=",
"False",
",",
"runtime",
"=",
"'python2.7'",
",",
"settings",
"=",
"None",
",",
"environment",
"=",
"None",
",",
"retention_in_days",
"=",
"None",
")",
":",
"# TODO: the signature of this function is too big, clean this up",
"# also consolidate create, update, config and add waiters!",
"if",
"lambda_exists",
"(",
"awsclient",
",",
"function_name",
")",
":",
"function_version",
"=",
"_update_lambda",
"(",
"awsclient",
",",
"function_name",
",",
"handler_filename",
",",
"handler_function",
",",
"folders",
",",
"role",
",",
"description",
",",
"timeout",
",",
"memory",
",",
"subnet_ids",
",",
"security_groups",
",",
"artifact_bucket",
"=",
"artifact_bucket",
",",
"zipfile",
"=",
"zipfile",
",",
"environment",
"=",
"environment",
")",
"else",
":",
"if",
"not",
"zipfile",
":",
"return",
"1",
"log",
".",
"info",
"(",
"'buffer size: %0.2f MB'",
"%",
"float",
"(",
"len",
"(",
"zipfile",
")",
"/",
"1000000.0",
")",
")",
"function_version",
"=",
"_create_lambda",
"(",
"awsclient",
",",
"function_name",
",",
"role",
",",
"handler_filename",
",",
"handler_function",
",",
"folders",
",",
"description",
",",
"timeout",
",",
"memory",
",",
"subnet_ids",
",",
"security_groups",
",",
"artifact_bucket",
",",
"zipfile",
",",
"runtime",
"=",
"runtime",
",",
"environment",
"=",
"environment",
")",
"# configure cloudwatch logs",
"if",
"retention_in_days",
":",
"log_group_name",
"=",
"'/aws/lambda/%s'",
"%",
"function_name",
"put_retention_policy",
"(",
"awsclient",
",",
"log_group_name",
",",
"retention_in_days",
")",
"pong",
"=",
"ping",
"(",
"awsclient",
",",
"function_name",
",",
"version",
"=",
"function_version",
")",
"if",
"'alive'",
"in",
"str",
"(",
"pong",
")",
":",
"log",
".",
"info",
"(",
"colored",
".",
"green",
"(",
"'Great you\\'re already accepting a ping '",
"+",
"'in your Lambda function'",
")",
")",
"elif",
"fail_deployment_on_unsuccessful_ping",
"and",
"not",
"'alive'",
"in",
"pong",
":",
"log",
".",
"info",
"(",
"colored",
".",
"red",
"(",
"'Pinging your lambda function failed'",
")",
")",
"# we do not deploy alias and fail command",
"return",
"1",
"else",
":",
"log",
".",
"info",
"(",
"colored",
".",
"red",
"(",
"'Please consider adding a reaction to a '",
"+",
"'ping event to your lambda function'",
")",
")",
"_deploy_alias",
"(",
"awsclient",
",",
"function_name",
",",
"function_version",
")",
"return",
"0"
] |
Create or update a lambda function.
:param awsclient:
:param function_name:
:param role:
:param handler_filename:
:param handler_function:
:param folders:
:param description:
:param timeout:
:param memory:
:param subnet_ids:
:param security_groups:
:param artifact_bucket:
:param zipfile:
:param environment: environment variables
:param retention_in_days: retention time of the cloudwatch logs
:return: exit_code
|
[
"Create",
"or",
"update",
"a",
"lambda",
"function",
"."
] |
cd67cf416371337b83cb9ca3f696277125703339
|
https://github.com/glomex/gcdt/blob/cd67cf416371337b83cb9ca3f696277125703339/gcdt/ramuda_core.py#L145-L213
|
train
|
glomex/gcdt
|
gcdt/ramuda_core.py
|
bundle_lambda
|
def bundle_lambda(zipfile):
"""Write zipfile contents to file.
:param zipfile:
:return: exit_code
"""
# TODO have 'bundle.zip' as default config
if not zipfile:
return 1
with open('bundle.zip', 'wb') as zfile:
zfile.write(zipfile)
log.info('Finished - a bundle.zip is waiting for you...')
return 0
|
python
|
def bundle_lambda(zipfile):
"""Write zipfile contents to file.
:param zipfile:
:return: exit_code
"""
# TODO have 'bundle.zip' as default config
if not zipfile:
return 1
with open('bundle.zip', 'wb') as zfile:
zfile.write(zipfile)
log.info('Finished - a bundle.zip is waiting for you...')
return 0
|
[
"def",
"bundle_lambda",
"(",
"zipfile",
")",
":",
"# TODO have 'bundle.zip' as default config",
"if",
"not",
"zipfile",
":",
"return",
"1",
"with",
"open",
"(",
"'bundle.zip'",
",",
"'wb'",
")",
"as",
"zfile",
":",
"zfile",
".",
"write",
"(",
"zipfile",
")",
"log",
".",
"info",
"(",
"'Finished - a bundle.zip is waiting for you...'",
")",
"return",
"0"
] |
Write zipfile contents to file.
:param zipfile:
:return: exit_code
|
[
"Write",
"zipfile",
"contents",
"to",
"file",
"."
] |
cd67cf416371337b83cb9ca3f696277125703339
|
https://github.com/glomex/gcdt/blob/cd67cf416371337b83cb9ca3f696277125703339/gcdt/ramuda_core.py#L319-L331
|
train
|
glomex/gcdt
|
gcdt/ramuda_core.py
|
get_metrics
|
def get_metrics(awsclient, name):
"""Print out cloudformation metrics for a lambda function.
:param awsclient
:param name: name of the lambda function
:return: exit_code
"""
metrics = ['Duration', 'Errors', 'Invocations', 'Throttles']
client_cw = awsclient.get_client('cloudwatch')
for metric in metrics:
response = client_cw.get_metric_statistics(
Namespace='AWS/Lambda',
MetricName=metric,
Dimensions=[
{
'Name': 'FunctionName',
'Value': name
},
],
# StartTime=datetime.now() + timedelta(days=-1),
# EndTime=datetime.now(),
StartTime=maya.now().subtract(days=1).datetime(),
EndTime=maya.now().datetime(),
Period=3600,
Statistics=[
'Sum',
],
Unit=unit(metric)
)
log.info('\t%s %s' % (metric,
repr(aggregate_datapoints(response['Datapoints']))))
return 0
|
python
|
def get_metrics(awsclient, name):
"""Print out cloudformation metrics for a lambda function.
:param awsclient
:param name: name of the lambda function
:return: exit_code
"""
metrics = ['Duration', 'Errors', 'Invocations', 'Throttles']
client_cw = awsclient.get_client('cloudwatch')
for metric in metrics:
response = client_cw.get_metric_statistics(
Namespace='AWS/Lambda',
MetricName=metric,
Dimensions=[
{
'Name': 'FunctionName',
'Value': name
},
],
# StartTime=datetime.now() + timedelta(days=-1),
# EndTime=datetime.now(),
StartTime=maya.now().subtract(days=1).datetime(),
EndTime=maya.now().datetime(),
Period=3600,
Statistics=[
'Sum',
],
Unit=unit(metric)
)
log.info('\t%s %s' % (metric,
repr(aggregate_datapoints(response['Datapoints']))))
return 0
|
[
"def",
"get_metrics",
"(",
"awsclient",
",",
"name",
")",
":",
"metrics",
"=",
"[",
"'Duration'",
",",
"'Errors'",
",",
"'Invocations'",
",",
"'Throttles'",
"]",
"client_cw",
"=",
"awsclient",
".",
"get_client",
"(",
"'cloudwatch'",
")",
"for",
"metric",
"in",
"metrics",
":",
"response",
"=",
"client_cw",
".",
"get_metric_statistics",
"(",
"Namespace",
"=",
"'AWS/Lambda'",
",",
"MetricName",
"=",
"metric",
",",
"Dimensions",
"=",
"[",
"{",
"'Name'",
":",
"'FunctionName'",
",",
"'Value'",
":",
"name",
"}",
",",
"]",
",",
"# StartTime=datetime.now() + timedelta(days=-1),",
"# EndTime=datetime.now(),",
"StartTime",
"=",
"maya",
".",
"now",
"(",
")",
".",
"subtract",
"(",
"days",
"=",
"1",
")",
".",
"datetime",
"(",
")",
",",
"EndTime",
"=",
"maya",
".",
"now",
"(",
")",
".",
"datetime",
"(",
")",
",",
"Period",
"=",
"3600",
",",
"Statistics",
"=",
"[",
"'Sum'",
",",
"]",
",",
"Unit",
"=",
"unit",
"(",
"metric",
")",
")",
"log",
".",
"info",
"(",
"'\\t%s %s'",
"%",
"(",
"metric",
",",
"repr",
"(",
"aggregate_datapoints",
"(",
"response",
"[",
"'Datapoints'",
"]",
")",
")",
")",
")",
"return",
"0"
] |
Print out cloudformation metrics for a lambda function.
:param awsclient
:param name: name of the lambda function
:return: exit_code
|
[
"Print",
"out",
"cloudformation",
"metrics",
"for",
"a",
"lambda",
"function",
"."
] |
cd67cf416371337b83cb9ca3f696277125703339
|
https://github.com/glomex/gcdt/blob/cd67cf416371337b83cb9ca3f696277125703339/gcdt/ramuda_core.py#L419-L450
|
train
|
glomex/gcdt
|
gcdt/ramuda_core.py
|
rollback
|
def rollback(awsclient, function_name, alias_name=ALIAS_NAME, version=None):
"""Rollback a lambda function to a given version.
:param awsclient:
:param function_name:
:param alias_name:
:param version:
:return: exit_code
"""
if version:
log.info('rolling back to version {}'.format(version))
else:
log.info('rolling back to previous version')
version = _get_previous_version(awsclient, function_name, alias_name)
if version == '0':
log.error('unable to find previous version of lambda function')
return 1
log.info('new version is %s' % str(version))
_update_alias(awsclient, function_name, version, alias_name)
return 0
|
python
|
def rollback(awsclient, function_name, alias_name=ALIAS_NAME, version=None):
"""Rollback a lambda function to a given version.
:param awsclient:
:param function_name:
:param alias_name:
:param version:
:return: exit_code
"""
if version:
log.info('rolling back to version {}'.format(version))
else:
log.info('rolling back to previous version')
version = _get_previous_version(awsclient, function_name, alias_name)
if version == '0':
log.error('unable to find previous version of lambda function')
return 1
log.info('new version is %s' % str(version))
_update_alias(awsclient, function_name, version, alias_name)
return 0
|
[
"def",
"rollback",
"(",
"awsclient",
",",
"function_name",
",",
"alias_name",
"=",
"ALIAS_NAME",
",",
"version",
"=",
"None",
")",
":",
"if",
"version",
":",
"log",
".",
"info",
"(",
"'rolling back to version {}'",
".",
"format",
"(",
"version",
")",
")",
"else",
":",
"log",
".",
"info",
"(",
"'rolling back to previous version'",
")",
"version",
"=",
"_get_previous_version",
"(",
"awsclient",
",",
"function_name",
",",
"alias_name",
")",
"if",
"version",
"==",
"'0'",
":",
"log",
".",
"error",
"(",
"'unable to find previous version of lambda function'",
")",
"return",
"1",
"log",
".",
"info",
"(",
"'new version is %s'",
"%",
"str",
"(",
"version",
")",
")",
"_update_alias",
"(",
"awsclient",
",",
"function_name",
",",
"version",
",",
"alias_name",
")",
"return",
"0"
] |
Rollback a lambda function to a given version.
:param awsclient:
:param function_name:
:param alias_name:
:param version:
:return: exit_code
|
[
"Rollback",
"a",
"lambda",
"function",
"to",
"a",
"given",
"version",
"."
] |
cd67cf416371337b83cb9ca3f696277125703339
|
https://github.com/glomex/gcdt/blob/cd67cf416371337b83cb9ca3f696277125703339/gcdt/ramuda_core.py#L453-L474
|
train
|
glomex/gcdt
|
gcdt/ramuda_core.py
|
delete_lambda
|
def delete_lambda(awsclient, function_name, events=None, delete_logs=False):
"""Delete a lambda function.
:param awsclient:
:param function_name:
:param events: list of events
:param delete_logs:
:return: exit_code
"""
if events is not None:
unwire(awsclient, events, function_name, alias_name=ALIAS_NAME)
client_lambda = awsclient.get_client('lambda')
response = client_lambda.delete_function(FunctionName=function_name)
if delete_logs:
log_group_name = '/aws/lambda/%s' % function_name
delete_log_group(awsclient, log_group_name)
# TODO remove event source first and maybe also needed for permissions
log.info(json2table(response))
return 0
|
python
|
def delete_lambda(awsclient, function_name, events=None, delete_logs=False):
"""Delete a lambda function.
:param awsclient:
:param function_name:
:param events: list of events
:param delete_logs:
:return: exit_code
"""
if events is not None:
unwire(awsclient, events, function_name, alias_name=ALIAS_NAME)
client_lambda = awsclient.get_client('lambda')
response = client_lambda.delete_function(FunctionName=function_name)
if delete_logs:
log_group_name = '/aws/lambda/%s' % function_name
delete_log_group(awsclient, log_group_name)
# TODO remove event source first and maybe also needed for permissions
log.info(json2table(response))
return 0
|
[
"def",
"delete_lambda",
"(",
"awsclient",
",",
"function_name",
",",
"events",
"=",
"None",
",",
"delete_logs",
"=",
"False",
")",
":",
"if",
"events",
"is",
"not",
"None",
":",
"unwire",
"(",
"awsclient",
",",
"events",
",",
"function_name",
",",
"alias_name",
"=",
"ALIAS_NAME",
")",
"client_lambda",
"=",
"awsclient",
".",
"get_client",
"(",
"'lambda'",
")",
"response",
"=",
"client_lambda",
".",
"delete_function",
"(",
"FunctionName",
"=",
"function_name",
")",
"if",
"delete_logs",
":",
"log_group_name",
"=",
"'/aws/lambda/%s'",
"%",
"function_name",
"delete_log_group",
"(",
"awsclient",
",",
"log_group_name",
")",
"# TODO remove event source first and maybe also needed for permissions",
"log",
".",
"info",
"(",
"json2table",
"(",
"response",
")",
")",
"return",
"0"
] |
Delete a lambda function.
:param awsclient:
:param function_name:
:param events: list of events
:param delete_logs:
:return: exit_code
|
[
"Delete",
"a",
"lambda",
"function",
"."
] |
cd67cf416371337b83cb9ca3f696277125703339
|
https://github.com/glomex/gcdt/blob/cd67cf416371337b83cb9ca3f696277125703339/gcdt/ramuda_core.py#L477-L496
|
train
|
glomex/gcdt
|
gcdt/ramuda_core.py
|
delete_lambda_deprecated
|
def delete_lambda_deprecated(awsclient, function_name, s3_event_sources=[],
time_event_sources=[], delete_logs=False):
# FIXME: mutable default arguments!
"""Deprecated: please use delete_lambda!
:param awsclient:
:param function_name:
:param s3_event_sources:
:param time_event_sources:
:param delete_logs:
:return: exit_code
"""
unwire_deprecated(awsclient, function_name, s3_event_sources=s3_event_sources,
time_event_sources=time_event_sources,
alias_name=ALIAS_NAME)
client_lambda = awsclient.get_client('lambda')
response = client_lambda.delete_function(FunctionName=function_name)
if delete_logs:
log_group_name = '/aws/lambda/%s' % function_name
delete_log_group(awsclient, log_group_name)
# TODO remove event source first and maybe also needed for permissions
log.info(json2table(response))
return 0
|
python
|
def delete_lambda_deprecated(awsclient, function_name, s3_event_sources=[],
time_event_sources=[], delete_logs=False):
# FIXME: mutable default arguments!
"""Deprecated: please use delete_lambda!
:param awsclient:
:param function_name:
:param s3_event_sources:
:param time_event_sources:
:param delete_logs:
:return: exit_code
"""
unwire_deprecated(awsclient, function_name, s3_event_sources=s3_event_sources,
time_event_sources=time_event_sources,
alias_name=ALIAS_NAME)
client_lambda = awsclient.get_client('lambda')
response = client_lambda.delete_function(FunctionName=function_name)
if delete_logs:
log_group_name = '/aws/lambda/%s' % function_name
delete_log_group(awsclient, log_group_name)
# TODO remove event source first and maybe also needed for permissions
log.info(json2table(response))
return 0
|
[
"def",
"delete_lambda_deprecated",
"(",
"awsclient",
",",
"function_name",
",",
"s3_event_sources",
"=",
"[",
"]",
",",
"time_event_sources",
"=",
"[",
"]",
",",
"delete_logs",
"=",
"False",
")",
":",
"# FIXME: mutable default arguments!",
"unwire_deprecated",
"(",
"awsclient",
",",
"function_name",
",",
"s3_event_sources",
"=",
"s3_event_sources",
",",
"time_event_sources",
"=",
"time_event_sources",
",",
"alias_name",
"=",
"ALIAS_NAME",
")",
"client_lambda",
"=",
"awsclient",
".",
"get_client",
"(",
"'lambda'",
")",
"response",
"=",
"client_lambda",
".",
"delete_function",
"(",
"FunctionName",
"=",
"function_name",
")",
"if",
"delete_logs",
":",
"log_group_name",
"=",
"'/aws/lambda/%s'",
"%",
"function_name",
"delete_log_group",
"(",
"awsclient",
",",
"log_group_name",
")",
"# TODO remove event source first and maybe also needed for permissions",
"log",
".",
"info",
"(",
"json2table",
"(",
"response",
")",
")",
"return",
"0"
] |
Deprecated: please use delete_lambda!
:param awsclient:
:param function_name:
:param s3_event_sources:
:param time_event_sources:
:param delete_logs:
:return: exit_code
|
[
"Deprecated",
":",
"please",
"use",
"delete_lambda!"
] |
cd67cf416371337b83cb9ca3f696277125703339
|
https://github.com/glomex/gcdt/blob/cd67cf416371337b83cb9ca3f696277125703339/gcdt/ramuda_core.py#L499-L522
|
train
|
glomex/gcdt
|
gcdt/ramuda_core.py
|
cleanup_bundle
|
def cleanup_bundle():
"""Deletes files used for creating bundle.
* vendored/*
* bundle.zip
"""
paths = ['./vendored', './bundle.zip']
for path in paths:
if os.path.exists(path):
log.debug("Deleting %s..." % path)
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
|
python
|
def cleanup_bundle():
"""Deletes files used for creating bundle.
* vendored/*
* bundle.zip
"""
paths = ['./vendored', './bundle.zip']
for path in paths:
if os.path.exists(path):
log.debug("Deleting %s..." % path)
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
|
[
"def",
"cleanup_bundle",
"(",
")",
":",
"paths",
"=",
"[",
"'./vendored'",
",",
"'./bundle.zip'",
"]",
"for",
"path",
"in",
"paths",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"path",
")",
":",
"log",
".",
"debug",
"(",
"\"Deleting %s...\"",
"%",
"path",
")",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"path",
")",
":",
"shutil",
".",
"rmtree",
"(",
"path",
")",
"else",
":",
"os",
".",
"remove",
"(",
"path",
")"
] |
Deletes files used for creating bundle.
* vendored/*
* bundle.zip
|
[
"Deletes",
"files",
"used",
"for",
"creating",
"bundle",
".",
"*",
"vendored",
"/",
"*",
"*",
"bundle",
".",
"zip"
] |
cd67cf416371337b83cb9ca3f696277125703339
|
https://github.com/glomex/gcdt/blob/cd67cf416371337b83cb9ca3f696277125703339/gcdt/ramuda_core.py#L623-L635
|
train
|
glomex/gcdt
|
gcdt/ramuda_core.py
|
ping
|
def ping(awsclient, function_name, alias_name=ALIAS_NAME, version=None):
"""Send a ping request to a lambda function.
:param awsclient:
:param function_name:
:param alias_name:
:param version:
:return: ping response payload
"""
log.debug('sending ping to lambda function: %s', function_name)
payload = '{"ramuda_action": "ping"}' # default to ping event
# reuse invoke
return invoke(awsclient, function_name, payload, invocation_type=None,
alias_name=alias_name, version=version)
|
python
|
def ping(awsclient, function_name, alias_name=ALIAS_NAME, version=None):
"""Send a ping request to a lambda function.
:param awsclient:
:param function_name:
:param alias_name:
:param version:
:return: ping response payload
"""
log.debug('sending ping to lambda function: %s', function_name)
payload = '{"ramuda_action": "ping"}' # default to ping event
# reuse invoke
return invoke(awsclient, function_name, payload, invocation_type=None,
alias_name=alias_name, version=version)
|
[
"def",
"ping",
"(",
"awsclient",
",",
"function_name",
",",
"alias_name",
"=",
"ALIAS_NAME",
",",
"version",
"=",
"None",
")",
":",
"log",
".",
"debug",
"(",
"'sending ping to lambda function: %s'",
",",
"function_name",
")",
"payload",
"=",
"'{\"ramuda_action\": \"ping\"}'",
"# default to ping event",
"# reuse invoke",
"return",
"invoke",
"(",
"awsclient",
",",
"function_name",
",",
"payload",
",",
"invocation_type",
"=",
"None",
",",
"alias_name",
"=",
"alias_name",
",",
"version",
"=",
"version",
")"
] |
Send a ping request to a lambda function.
:param awsclient:
:param function_name:
:param alias_name:
:param version:
:return: ping response payload
|
[
"Send",
"a",
"ping",
"request",
"to",
"a",
"lambda",
"function",
"."
] |
cd67cf416371337b83cb9ca3f696277125703339
|
https://github.com/glomex/gcdt/blob/cd67cf416371337b83cb9ca3f696277125703339/gcdt/ramuda_core.py#L638-L651
|
train
|
glomex/gcdt
|
gcdt/ramuda_core.py
|
invoke
|
def invoke(awsclient, function_name, payload, invocation_type=None,
alias_name=ALIAS_NAME, version=None, outfile=None):
"""Send a ping request to a lambda function.
:param awsclient:
:param function_name:
:param payload:
:param invocation_type:
:param alias_name:
:param version:
:param outfile: write response to file
:return: ping response payload
"""
log.debug('invoking lambda function: %s', function_name)
client_lambda = awsclient.get_client('lambda')
if invocation_type is None:
invocation_type = 'RequestResponse'
if payload.startswith('file://'):
log.debug('reading payload from file: %s' % payload)
with open(payload[7:], 'r') as pfile:
payload = pfile.read()
if version:
response = client_lambda.invoke(
FunctionName=function_name,
InvocationType=invocation_type,
Payload=payload,
Qualifier=version
)
else:
response = client_lambda.invoke(
FunctionName=function_name,
InvocationType=invocation_type,
Payload=payload,
Qualifier=alias_name
)
results = response['Payload'].read() # payload is a 'StreamingBody'
log.debug('invoke completed')
# write to file
if outfile:
with open(outfile, 'w') as ofile:
ofile.write(str(results))
ofile.flush()
return
else:
return results
|
python
|
def invoke(awsclient, function_name, payload, invocation_type=None,
alias_name=ALIAS_NAME, version=None, outfile=None):
"""Send a ping request to a lambda function.
:param awsclient:
:param function_name:
:param payload:
:param invocation_type:
:param alias_name:
:param version:
:param outfile: write response to file
:return: ping response payload
"""
log.debug('invoking lambda function: %s', function_name)
client_lambda = awsclient.get_client('lambda')
if invocation_type is None:
invocation_type = 'RequestResponse'
if payload.startswith('file://'):
log.debug('reading payload from file: %s' % payload)
with open(payload[7:], 'r') as pfile:
payload = pfile.read()
if version:
response = client_lambda.invoke(
FunctionName=function_name,
InvocationType=invocation_type,
Payload=payload,
Qualifier=version
)
else:
response = client_lambda.invoke(
FunctionName=function_name,
InvocationType=invocation_type,
Payload=payload,
Qualifier=alias_name
)
results = response['Payload'].read() # payload is a 'StreamingBody'
log.debug('invoke completed')
# write to file
if outfile:
with open(outfile, 'w') as ofile:
ofile.write(str(results))
ofile.flush()
return
else:
return results
|
[
"def",
"invoke",
"(",
"awsclient",
",",
"function_name",
",",
"payload",
",",
"invocation_type",
"=",
"None",
",",
"alias_name",
"=",
"ALIAS_NAME",
",",
"version",
"=",
"None",
",",
"outfile",
"=",
"None",
")",
":",
"log",
".",
"debug",
"(",
"'invoking lambda function: %s'",
",",
"function_name",
")",
"client_lambda",
"=",
"awsclient",
".",
"get_client",
"(",
"'lambda'",
")",
"if",
"invocation_type",
"is",
"None",
":",
"invocation_type",
"=",
"'RequestResponse'",
"if",
"payload",
".",
"startswith",
"(",
"'file://'",
")",
":",
"log",
".",
"debug",
"(",
"'reading payload from file: %s'",
"%",
"payload",
")",
"with",
"open",
"(",
"payload",
"[",
"7",
":",
"]",
",",
"'r'",
")",
"as",
"pfile",
":",
"payload",
"=",
"pfile",
".",
"read",
"(",
")",
"if",
"version",
":",
"response",
"=",
"client_lambda",
".",
"invoke",
"(",
"FunctionName",
"=",
"function_name",
",",
"InvocationType",
"=",
"invocation_type",
",",
"Payload",
"=",
"payload",
",",
"Qualifier",
"=",
"version",
")",
"else",
":",
"response",
"=",
"client_lambda",
".",
"invoke",
"(",
"FunctionName",
"=",
"function_name",
",",
"InvocationType",
"=",
"invocation_type",
",",
"Payload",
"=",
"payload",
",",
"Qualifier",
"=",
"alias_name",
")",
"results",
"=",
"response",
"[",
"'Payload'",
"]",
".",
"read",
"(",
")",
"# payload is a 'StreamingBody'",
"log",
".",
"debug",
"(",
"'invoke completed'",
")",
"# write to file",
"if",
"outfile",
":",
"with",
"open",
"(",
"outfile",
",",
"'w'",
")",
"as",
"ofile",
":",
"ofile",
".",
"write",
"(",
"str",
"(",
"results",
")",
")",
"ofile",
".",
"flush",
"(",
")",
"return",
"else",
":",
"return",
"results"
] |
Send a ping request to a lambda function.
:param awsclient:
:param function_name:
:param payload:
:param invocation_type:
:param alias_name:
:param version:
:param outfile: write response to file
:return: ping response payload
|
[
"Send",
"a",
"ping",
"request",
"to",
"a",
"lambda",
"function",
"."
] |
cd67cf416371337b83cb9ca3f696277125703339
|
https://github.com/glomex/gcdt/blob/cd67cf416371337b83cb9ca3f696277125703339/gcdt/ramuda_core.py#L654-L700
|
train
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.