repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
maxwen/android_kernel_oppo_msm8916_orig | Documentation/target/tcm_mod_builder.py | 2358 | 40707 | #!/usr/bin/python
# The TCM v4 multi-protocol fabric module generation script for drivers/target/$NEW_MOD
#
# Copyright (c) 2010 Rising Tide Systems
# Copyright (c) 2010 Linux-iSCSI.org
#
# Author: nab@kernel.org
#
import os, sys
import subprocess as sub
import string
import re
import optparse
tcm_dir = ""
fabric_ops = []
fabric_mod_dir = ""
fabric_mod_port = ""
fabric_mod_init_port = ""
def tcm_mod_err(msg):
print msg
sys.exit(1)
def tcm_mod_create_module_subdir(fabric_mod_dir_var):
if os.path.isdir(fabric_mod_dir_var) == True:
return 1
print "Creating fabric_mod_dir: " + fabric_mod_dir_var
ret = os.mkdir(fabric_mod_dir_var)
if ret:
tcm_mod_err("Unable to mkdir " + fabric_mod_dir_var)
return
def tcm_mod_build_FC_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for FC Initiator Nport */\n"
buf += " u64 nport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Initiator Nport */\n"
buf += " char nport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* FC lport target portal group tag for TCM */\n"
buf += " u16 lport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_lport */\n"
buf += " struct " + fabric_mod_name + "_lport *lport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_lport {\n"
buf += " /* SCSI protocol the lport is providing */\n"
buf += " u8 lport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for FC Target Lport */\n"
buf += " u64 lport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Target Lport */\n"
buf += " char lport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_lport() */\n"
buf += " struct se_wwn lport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "lport"
fabric_mod_init_port = "nport"
return
def tcm_mod_build_SAS_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for SAS Initiator port */\n"
buf += " u64 iport_wwpn;\n"
buf += " /* ASCII formatted WWPN for Sas Initiator port */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* SAS port target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for SAS Target port */\n"
buf += " u64 tport_wwpn;\n"
buf += " /* ASCII formatted WWPN for SAS Target port */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_iSCSI_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* ASCII formatted InitiatorName */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* iSCSI target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* ASCII formatted TargetName for IQN */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_base_includes(proto_ident, fabric_mod_dir_val, fabric_mod_name):
if proto_ident == "FC":
tcm_mod_build_FC_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "SAS":
tcm_mod_build_SAS_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "iSCSI":
tcm_mod_build_iSCSI_include(fabric_mod_dir_val, fabric_mod_name)
else:
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
return
def tcm_mod_build_configfs(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_configfs.c"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#include <linux/module.h>\n"
buf += "#include <linux/moduleparam.h>\n"
buf += "#include <linux/version.h>\n"
buf += "#include <generated/utsrelease.h>\n"
buf += "#include <linux/utsname.h>\n"
buf += "#include <linux/init.h>\n"
buf += "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/configfs.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_fabric_configfs.h>\n"
buf += "#include <target/target_core_configfs.h>\n"
buf += "#include <target/configfs_macros.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "/* Local pointer to allocated TCM configfs fabric module */\n"
buf += "struct target_fabric_configfs *" + fabric_mod_name + "_fabric_configfs;\n\n"
buf += "static struct se_node_acl *" + fabric_mod_name + "_make_nodeacl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct se_node_acl *se_nacl, *se_nacl_new;\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n"
buf += " u32 nexus_depth;\n\n"
buf += " /* " + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n"
buf += " se_nacl_new = " + fabric_mod_name + "_alloc_fabric_acl(se_tpg);\n"
buf += " if (!se_nacl_new)\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += "//#warning FIXME: Hardcoded nexus depth in " + fabric_mod_name + "_make_nodeacl()\n"
buf += " nexus_depth = 1;\n"
buf += " /*\n"
buf += " * se_nacl_new may be released by core_tpg_add_initiator_node_acl()\n"
buf += " * when converting a NodeACL from demo mode -> explict\n"
buf += " */\n"
buf += " se_nacl = core_tpg_add_initiator_node_acl(se_tpg, se_nacl_new,\n"
buf += " name, nexus_depth);\n"
buf += " if (IS_ERR(se_nacl)) {\n"
buf += " " + fabric_mod_name + "_release_fabric_acl(se_tpg, se_nacl_new);\n"
buf += " return se_nacl;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Locate our struct " + fabric_mod_name + "_nacl and set the FC Nport WWPN\n"
buf += " */\n"
buf += " nacl = container_of(se_nacl, struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " nacl->" + fabric_mod_init_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&nacl->" + fabric_mod_init_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return se_nacl;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_nodeacl(struct se_node_acl *se_acl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_acl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " core_tpg_del_initiator_node_acl(se_acl->se_tpg, se_acl, 1);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
buf += "static struct se_portal_group *" + fabric_mod_name + "_make_tpg(\n"
buf += " struct se_wwn *wwn,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + "*" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg;\n"
buf += " unsigned long tpgt;\n"
buf += " int ret;\n\n"
buf += " if (strstr(name, \"tpgt_\") != name)\n"
buf += " return ERR_PTR(-EINVAL);\n"
buf += " if (strict_strtoul(name + 5, 10, &tpgt) || tpgt > UINT_MAX)\n"
buf += " return ERR_PTR(-EINVAL);\n\n"
buf += " tpg = kzalloc(sizeof(struct " + fabric_mod_name + "_tpg), GFP_KERNEL);\n"
buf += " if (!tpg) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_tpg\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
buf += " tpg->" + fabric_mod_port + " = " + fabric_mod_port + ";\n"
buf += " tpg->" + fabric_mod_port + "_tpgt = tpgt;\n\n"
buf += " ret = core_tpg_register(&" + fabric_mod_name + "_fabric_configfs->tf_ops, wwn,\n"
buf += " &tpg->se_tpg, (void *)tpg,\n"
buf += " TRANSPORT_TPG_TYPE_NORMAL);\n"
buf += " if (ret < 0) {\n"
buf += " kfree(tpg);\n"
buf += " return NULL;\n"
buf += " }\n"
buf += " return &tpg->se_tpg;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_tpg(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n\n"
buf += " core_tpg_deregister(se_tpg);\n"
buf += " kfree(tpg);\n"
buf += "}\n\n"
buf += "static struct se_wwn *" + fabric_mod_name + "_make_" + fabric_mod_port + "(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + ";\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n\n"
buf += " /* if (" + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n\n"
buf += " " + fabric_mod_port + " = kzalloc(sizeof(struct " + fabric_mod_name + "_" + fabric_mod_port + "), GFP_KERNEL);\n"
buf += " if (!" + fabric_mod_port + ") {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_" + fabric_mod_port + "\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " " + fabric_mod_port + "->" + fabric_mod_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&" + fabric_mod_port + "->" + fabric_mod_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_wwn;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_" + fabric_mod_port + "(struct se_wwn *wwn)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n"
buf += " kfree(" + fabric_mod_port + ");\n"
buf += "}\n\n"
buf += "static ssize_t " + fabric_mod_name + "_wwn_show_attr_version(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " char *page)\n"
buf += "{\n"
buf += " return sprintf(page, \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \"on \"UTS_RELEASE\"\\n\", " + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += "}\n\n"
buf += "TF_WWN_ATTR_RO(" + fabric_mod_name + ", version);\n\n"
buf += "static struct configfs_attribute *" + fabric_mod_name + "_wwn_attrs[] = {\n"
buf += " &" + fabric_mod_name + "_wwn_version.attr,\n"
buf += " NULL,\n"
buf += "};\n\n"
buf += "static struct target_core_fabric_ops " + fabric_mod_name + "_ops = {\n"
buf += " .get_fabric_name = " + fabric_mod_name + "_get_fabric_name,\n"
buf += " .get_fabric_proto_ident = " + fabric_mod_name + "_get_fabric_proto_ident,\n"
buf += " .tpg_get_wwn = " + fabric_mod_name + "_get_fabric_wwn,\n"
buf += " .tpg_get_tag = " + fabric_mod_name + "_get_tag,\n"
buf += " .tpg_get_default_depth = " + fabric_mod_name + "_get_default_depth,\n"
buf += " .tpg_get_pr_transport_id = " + fabric_mod_name + "_get_pr_transport_id,\n"
buf += " .tpg_get_pr_transport_id_len = " + fabric_mod_name + "_get_pr_transport_id_len,\n"
buf += " .tpg_parse_pr_out_transport_id = " + fabric_mod_name + "_parse_pr_out_transport_id,\n"
buf += " .tpg_check_demo_mode = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_check_demo_mode_cache = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_demo_mode_write_protect = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_prod_mode_write_protect = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_alloc_fabric_acl = " + fabric_mod_name + "_alloc_fabric_acl,\n"
buf += " .tpg_release_fabric_acl = " + fabric_mod_name + "_release_fabric_acl,\n"
buf += " .tpg_get_inst_index = " + fabric_mod_name + "_tpg_get_inst_index,\n"
buf += " .release_cmd = " + fabric_mod_name + "_release_cmd,\n"
buf += " .shutdown_session = " + fabric_mod_name + "_shutdown_session,\n"
buf += " .close_session = " + fabric_mod_name + "_close_session,\n"
buf += " .stop_session = " + fabric_mod_name + "_stop_session,\n"
buf += " .fall_back_to_erl0 = " + fabric_mod_name + "_reset_nexus,\n"
buf += " .sess_logged_in = " + fabric_mod_name + "_sess_logged_in,\n"
buf += " .sess_get_index = " + fabric_mod_name + "_sess_get_index,\n"
buf += " .sess_get_initiator_sid = NULL,\n"
buf += " .write_pending = " + fabric_mod_name + "_write_pending,\n"
buf += " .write_pending_status = " + fabric_mod_name + "_write_pending_status,\n"
buf += " .set_default_node_attributes = " + fabric_mod_name + "_set_default_node_attrs,\n"
buf += " .get_task_tag = " + fabric_mod_name + "_get_task_tag,\n"
buf += " .get_cmd_state = " + fabric_mod_name + "_get_cmd_state,\n"
buf += " .queue_data_in = " + fabric_mod_name + "_queue_data_in,\n"
buf += " .queue_status = " + fabric_mod_name + "_queue_status,\n"
buf += " .queue_tm_rsp = " + fabric_mod_name + "_queue_tm_rsp,\n"
buf += " .is_state_remove = " + fabric_mod_name + "_is_state_remove,\n"
buf += " /*\n"
buf += " * Setup function pointers for generic logic in target_core_fabric_configfs.c\n"
buf += " */\n"
buf += " .fabric_make_wwn = " + fabric_mod_name + "_make_" + fabric_mod_port + ",\n"
buf += " .fabric_drop_wwn = " + fabric_mod_name + "_drop_" + fabric_mod_port + ",\n"
buf += " .fabric_make_tpg = " + fabric_mod_name + "_make_tpg,\n"
buf += " .fabric_drop_tpg = " + fabric_mod_name + "_drop_tpg,\n"
buf += " .fabric_post_link = NULL,\n"
buf += " .fabric_pre_unlink = NULL,\n"
buf += " .fabric_make_np = NULL,\n"
buf += " .fabric_drop_np = NULL,\n"
buf += " .fabric_make_nodeacl = " + fabric_mod_name + "_make_nodeacl,\n"
buf += " .fabric_drop_nodeacl = " + fabric_mod_name + "_drop_nodeacl,\n"
buf += "};\n\n"
buf += "static int " + fabric_mod_name + "_register_configfs(void)\n"
buf += "{\n"
buf += " struct target_fabric_configfs *fabric;\n"
buf += " int ret;\n\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \" on \"UTS_RELEASE\"\\n\"," + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += " /*\n"
buf += " * Register the top level struct config_item_type with TCM core\n"
buf += " */\n"
buf += " fabric = target_fabric_configfs_init(THIS_MODULE, \"" + fabric_mod_name[4:] + "\");\n"
buf += " if (IS_ERR(fabric)) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_init() failed\\n\");\n"
buf += " return PTR_ERR(fabric);\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup fabric->tf_ops from our local " + fabric_mod_name + "_ops\n"
buf += " */\n"
buf += " fabric->tf_ops = " + fabric_mod_name + "_ops;\n"
buf += " /*\n"
buf += " * Setup default attribute lists for various fabric->tf_cit_tmpl\n"
buf += " */\n"
buf += " TF_CIT_TMPL(fabric)->tfc_wwn_cit.ct_attrs = " + fabric_mod_name + "_wwn_attrs;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_param_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_np_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_auth_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_param_cit.ct_attrs = NULL;\n"
buf += " /*\n"
buf += " * Register the fabric for use within TCM\n"
buf += " */\n"
buf += " ret = target_fabric_configfs_register(fabric);\n"
buf += " if (ret < 0) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_register() failed\"\n"
buf += " \" for " + fabric_mod_name.upper() + "\\n\");\n"
buf += " return ret;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup our local pointer to *fabric\n"
buf += " */\n"
buf += " " + fabric_mod_name + "_fabric_configfs = fabric;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Set fabric -> " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_deregister_configfs(void)\n"
buf += "{\n"
buf += " if (!" + fabric_mod_name + "_fabric_configfs)\n"
buf += " return;\n\n"
buf += " target_fabric_configfs_deregister(" + fabric_mod_name + "_fabric_configfs);\n"
buf += " " + fabric_mod_name + "_fabric_configfs = NULL;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Cleared " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += "};\n\n"
buf += "static int __init " + fabric_mod_name + "_init(void)\n"
buf += "{\n"
buf += " int ret;\n\n"
buf += " ret = " + fabric_mod_name + "_register_configfs();\n"
buf += " if (ret < 0)\n"
buf += " return ret;\n\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_exit(void)\n"
buf += "{\n"
buf += " " + fabric_mod_name + "_deregister_configfs();\n"
buf += "};\n\n"
buf += "MODULE_DESCRIPTION(\"" + fabric_mod_name.upper() + " series fabric driver\");\n"
buf += "MODULE_LICENSE(\"GPL\");\n"
buf += "module_init(" + fabric_mod_name + "_init);\n"
buf += "module_exit(" + fabric_mod_name + "_exit);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_scan_fabric_ops(tcm_dir):
fabric_ops_api = tcm_dir + "include/target/target_core_fabric.h"
print "Using tcm_mod_scan_fabric_ops: " + fabric_ops_api
process_fo = 0;
p = open(fabric_ops_api, 'r')
line = p.readline()
while line:
if process_fo == 0 and re.search('struct target_core_fabric_ops {', line):
line = p.readline()
continue
if process_fo == 0:
process_fo = 1;
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
continue
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
p.close()
return
def tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
bufi = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.c"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
fi = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.h"
print "Writing file: " + fi
pi = open(fi, 'w')
if not pi:
tcm_mod_err("Unable to open file: " + fi)
buf = "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/list.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n"
buf += "#include <scsi/scsi.h>\n"
buf += "#include <scsi/scsi_host.h>\n"
buf += "#include <scsi/scsi_device.h>\n"
buf += "#include <scsi/scsi_cmnd.h>\n"
buf += "#include <scsi/libfc.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_configfs.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "int " + fabric_mod_name + "_check_true(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_true(struct se_portal_group *);\n"
buf += "int " + fabric_mod_name + "_check_false(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_false(struct se_portal_group *);\n"
total_fabric_ops = len(fabric_ops)
i = 0
while i < total_fabric_ops:
fo = fabric_ops[i]
i += 1
# print "fabric_ops: " + fo
if re.search('get_fabric_name', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_name(void)\n"
buf += "{\n"
buf += " return \"" + fabric_mod_name[4:] + "\";\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_name(void);\n"
continue
if re.search('get_fabric_proto_ident', fo):
buf += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " u8 proto_id;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " proto_id = fc_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " proto_id = sas_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " proto_id = iscsi_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return proto_id;\n"
buf += "}\n\n"
bufi += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *);\n"
if re.search('get_wwn', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_name[0];\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *);\n"
if re.search('get_tag', fo):
buf += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " return tpg->" + fabric_mod_port + "_tpgt;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *);\n"
if re.search('get_default_depth', fo):
buf += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *);\n"
if re.search('get_pr_transport_id\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code,\n"
buf += " unsigned char *buf)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *, unsigned char *);\n"
if re.search('get_pr_transport_id_len\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *);\n"
if re.search('parse_pr_out_transport_id\)\(', fo):
buf += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " const char *buf,\n"
buf += " u32 *out_tid_len,\n"
buf += " char **port_nexus_ptr)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " char *tid = NULL;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " tid = fc_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " tid = sas_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " tid = iscsi_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
buf += " }\n\n"
buf += " return tid;\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(struct se_portal_group *,\n"
bufi += " const char *, u32 *, char **);\n"
if re.search('alloc_fabric_acl\)\(', fo):
buf += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n\n"
buf += " nacl = kzalloc(sizeof(struct " + fabric_mod_name + "_nacl), GFP_KERNEL);\n"
buf += " if (!nacl) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_nacl\\n\");\n"
buf += " return NULL;\n"
buf += " }\n\n"
buf += " return &nacl->se_node_acl;\n"
buf += "}\n\n"
bufi += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *);\n"
if re.search('release_fabric_acl\)\(', fo):
buf += "void " + fabric_mod_name + "_release_fabric_acl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_nacl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_fabric_acl(struct se_portal_group *,\n"
bufi += " struct se_node_acl *);\n"
if re.search('tpg_get_inst_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *);\n"
if re.search('\*release_cmd\)\(', fo):
buf += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *);\n"
if re.search('shutdown_session\)\(', fo):
buf += "int " + fabric_mod_name + "_shutdown_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_shutdown_session(struct se_session *);\n"
if re.search('close_session\)\(', fo):
buf += "void " + fabric_mod_name + "_close_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_close_session(struct se_session *);\n"
if re.search('stop_session\)\(', fo):
buf += "void " + fabric_mod_name + "_stop_session(struct se_session *se_sess, int sess_sleep , int conn_sleep)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_stop_session(struct se_session *, int, int);\n"
if re.search('fall_back_to_erl0\)\(', fo):
buf += "void " + fabric_mod_name + "_reset_nexus(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_reset_nexus(struct se_session *);\n"
if re.search('sess_logged_in\)\(', fo):
buf += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *);\n"
if re.search('sess_get_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *);\n"
if re.search('write_pending\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending(struct se_cmd *);\n"
if re.search('write_pending_status\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *);\n"
if re.search('set_default_node_attributes\)\(', fo):
buf += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *nacl)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *);\n"
if re.search('get_task_tag\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *);\n"
if re.search('get_cmd_state\)\(', fo):
buf += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *);\n"
if re.search('queue_data_in\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *);\n"
if re.search('queue_status\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_status(struct se_cmd *);\n"
if re.search('queue_tm_rsp\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *);\n"
if re.search('is_state_remove\)\(', fo):
buf += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
ret = pi.write(bufi)
if ret:
tcm_mod_err("Unable to write fi: " + fi)
pi.close()
return
def tcm_mod_build_kbuild(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Makefile"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf += fabric_mod_name + "-objs := " + fabric_mod_name + "_fabric.o \\\n"
buf += " " + fabric_mod_name + "_configfs.o\n"
buf += "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name + ".o\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_build_kconfig(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Kconfig"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "config " + fabric_mod_name.upper() + "\n"
buf += " tristate \"" + fabric_mod_name.upper() + " fabric module\"\n"
buf += " depends on TARGET_CORE && CONFIGFS_FS\n"
buf += " default n\n"
buf += " ---help---\n"
buf += " Say Y here to enable the " + fabric_mod_name.upper() + " fabric module\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_add_kbuild(tcm_dir, fabric_mod_name):
buf = "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name.lower() + "/\n"
kbuild = tcm_dir + "/drivers/target/Makefile"
f = open(kbuild, 'a')
f.write(buf)
f.close()
return
def tcm_mod_add_kconfig(tcm_dir, fabric_mod_name):
buf = "source \"drivers/target/" + fabric_mod_name.lower() + "/Kconfig\"\n"
kconfig = tcm_dir + "/drivers/target/Kconfig"
f = open(kconfig, 'a')
f.write(buf)
f.close()
return
def main(modname, proto_ident):
# proto_ident = "FC"
# proto_ident = "SAS"
# proto_ident = "iSCSI"
tcm_dir = os.getcwd();
tcm_dir += "/../../"
print "tcm_dir: " + tcm_dir
fabric_mod_name = modname
fabric_mod_dir = tcm_dir + "drivers/target/" + fabric_mod_name
print "Set fabric_mod_name: " + fabric_mod_name
print "Set fabric_mod_dir: " + fabric_mod_dir
print "Using proto_ident: " + proto_ident
if proto_ident != "FC" and proto_ident != "SAS" and proto_ident != "iSCSI":
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
ret = tcm_mod_create_module_subdir(fabric_mod_dir)
if ret:
print "tcm_mod_create_module_subdir() failed because module already exists!"
sys.exit(1)
tcm_mod_build_base_includes(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_scan_fabric_ops(tcm_dir)
tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_configfs(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kbuild(fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kconfig(fabric_mod_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Makefile..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kbuild(tcm_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Kconfig..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kconfig(tcm_dir, fabric_mod_name)
return
parser = optparse.OptionParser()
parser.add_option('-m', '--modulename', help='Module name', dest='modname',
action='store', nargs=1, type='string')
parser.add_option('-p', '--protoident', help='Protocol Ident', dest='protoident',
action='store', nargs=1, type='string')
(opts, args) = parser.parse_args()
mandatories = ['modname', 'protoident']
for m in mandatories:
if not opts.__dict__[m]:
print "mandatory option is missing\n"
parser.print_help()
exit(-1)
if __name__ == "__main__":
main(str(opts.modname), opts.protoident)
| gpl-2.0 |
TRex22/Sick-Beard | cherrypy/wsgiserver/ssl_pyopenssl.py | 74 | 9379 | """A library for integrating pyOpenSSL with CherryPy.
The OpenSSL module must be importable for SSL functionality.
You can obtain it from http://pyopenssl.sourceforge.net/
To use this module, set CherryPyWSGIServer.ssl_adapter to an instance of
SSLAdapter. There are two ways to use SSL:
Method One:
ssl_adapter.context: an instance of SSL.Context.
If this is not None, it is assumed to be an SSL.Context instance,
and will be passed to SSL.Connection on bind(). The developer is
responsible for forming a valid Context object. This approach is
to be preferred for more flexibility, e.g. if the cert and key are
streams instead of files, or need decryption, or SSL.SSLv3_METHOD
is desired instead of the default SSL.SSLv23_METHOD, etc. Consult
the pyOpenSSL documentation for complete options.
Method Two (shortcut):
ssl_adapter.certificate: the filename of the server SSL certificate.
ssl_adapter.private_key: the filename of the server's private key file.
Both are None by default. If ssl_adapter.context is None, but .private_key
and .certificate are both given and valid, they will be read, and the
context will be automatically created from them.
ssl_adapter.certificate_chain: (optional) the filename of CA's intermediate
certificate bundle. This is needed for cheaper "chained root" SSL
certificates, and should be left as None if not required.
"""
import socket
import threading
import time
from cherrypy import wsgiserver
try:
from OpenSSL import SSL
from OpenSSL import crypto
except ImportError:
SSL = None
class SSL_fileobject(wsgiserver.CP_fileobject):
"""SSL file object attached to a socket object."""
ssl_timeout = 3
ssl_retry = .01
def _safe_call(self, is_reader, call, *args, **kwargs):
"""Wrap the given call with SSL error-trapping.
is_reader: if False EOF errors will be raised. If True, EOF errors
will return "" (to emulate normal sockets).
"""
start = time.time()
while True:
try:
return call(*args, **kwargs)
except SSL.WantReadError:
# Sleep and try again. This is dangerous, because it means
# the rest of the stack has no way of differentiating
# between a "new handshake" error and "client dropped".
# Note this isn't an endless loop: there's a timeout below.
time.sleep(self.ssl_retry)
except SSL.WantWriteError:
time.sleep(self.ssl_retry)
except SSL.SysCallError, e:
if is_reader and e.args == (-1, 'Unexpected EOF'):
return ""
errnum = e.args[0]
if is_reader and errnum in wsgiserver.socket_errors_to_ignore:
return ""
raise socket.error(errnum)
except SSL.Error, e:
if is_reader and e.args == (-1, 'Unexpected EOF'):
return ""
thirdarg = None
try:
thirdarg = e.args[0][0][2]
except IndexError:
pass
if thirdarg == 'http request':
# The client is talking HTTP to an HTTPS server.
raise wsgiserver.NoSSLError()
raise wsgiserver.FatalSSLAlert(*e.args)
except:
raise
if time.time() - start > self.ssl_timeout:
raise socket.timeout("timed out")
def recv(self, *args, **kwargs):
buf = []
r = super(SSL_fileobject, self).recv
while True:
data = self._safe_call(True, r, *args, **kwargs)
buf.append(data)
p = self._sock.pending()
if not p:
return "".join(buf)
def sendall(self, *args, **kwargs):
return self._safe_call(False, super(SSL_fileobject, self).sendall,
*args, **kwargs)
def send(self, *args, **kwargs):
return self._safe_call(False, super(SSL_fileobject, self).send,
*args, **kwargs)
class SSLConnection:
"""A thread-safe wrapper for an SSL.Connection.
*args: the arguments to create the wrapped SSL.Connection(*args).
"""
def __init__(self, *args):
self._ssl_conn = SSL.Connection(*args)
self._lock = threading.RLock()
for f in ('get_context', 'pending', 'send', 'write', 'recv', 'read',
'renegotiate', 'bind', 'listen', 'connect', 'accept',
'setblocking', 'fileno', 'close', 'get_cipher_list',
'getpeername', 'getsockname', 'getsockopt', 'setsockopt',
'makefile', 'get_app_data', 'set_app_data', 'state_string',
'sock_shutdown', 'get_peer_certificate', 'want_read',
'want_write', 'set_connect_state', 'set_accept_state',
'connect_ex', 'sendall', 'settimeout', 'gettimeout'):
exec("""def %s(self, *args):
self._lock.acquire()
try:
return self._ssl_conn.%s(*args)
finally:
self._lock.release()
""" % (f, f))
def shutdown(self, *args):
self._lock.acquire()
try:
# pyOpenSSL.socket.shutdown takes no args
return self._ssl_conn.shutdown()
finally:
self._lock.release()
class pyOpenSSLAdapter(wsgiserver.SSLAdapter):
"""A wrapper for integrating pyOpenSSL with CherryPy."""
def __init__(self, certificate, private_key, certificate_chain=None):
if SSL is None:
raise ImportError("You must install pyOpenSSL to use HTTPS.")
self.context = None
self.certificate = certificate
self.private_key = private_key
self.certificate_chain = certificate_chain
self._environ = None
def bind(self, sock):
"""Wrap and return the given socket."""
if self.context is None:
self.context = self.get_context()
conn = SSLConnection(self.context, sock)
self._environ = self.get_environ()
return conn
def wrap(self, sock):
"""Wrap and return the given socket, plus WSGI environ entries."""
return sock, self._environ.copy()
def get_context(self):
"""Return an SSL.Context from self attributes."""
# See http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/442473
c = SSL.Context(SSL.SSLv23_METHOD)
c.use_privatekey_file(self.private_key)
if self.certificate_chain:
c.load_verify_locations(self.certificate_chain)
c.use_certificate_file(self.certificate)
return c
def get_environ(self):
"""Return WSGI environ entries to be merged into each request."""
ssl_environ = {
"HTTPS": "on",
# pyOpenSSL doesn't provide access to any of these AFAICT
## 'SSL_PROTOCOL': 'SSLv2',
## SSL_CIPHER string The cipher specification name
## SSL_VERSION_INTERFACE string The mod_ssl program version
## SSL_VERSION_LIBRARY string The OpenSSL program version
}
if self.certificate:
# Server certificate attributes
cert = open(self.certificate, 'rb').read()
cert = crypto.load_certificate(crypto.FILETYPE_PEM, cert)
ssl_environ.update({
'SSL_SERVER_M_VERSION': cert.get_version(),
'SSL_SERVER_M_SERIAL': cert.get_serial_number(),
## 'SSL_SERVER_V_START': Validity of server's certificate (start time),
## 'SSL_SERVER_V_END': Validity of server's certificate (end time),
})
for prefix, dn in [("I", cert.get_issuer()),
("S", cert.get_subject())]:
# X509Name objects don't seem to have a way to get the
# complete DN string. Use str() and slice it instead,
# because str(dn) == "<X509Name object '/C=US/ST=...'>"
dnstr = str(dn)[18:-2]
wsgikey = 'SSL_SERVER_%s_DN' % prefix
ssl_environ[wsgikey] = dnstr
# The DN should be of the form: /k1=v1/k2=v2, but we must allow
# for any value to contain slashes itself (in a URL).
while dnstr:
pos = dnstr.rfind("=")
dnstr, value = dnstr[:pos], dnstr[pos + 1:]
pos = dnstr.rfind("/")
dnstr, key = dnstr[:pos], dnstr[pos + 1:]
if key and value:
wsgikey = 'SSL_SERVER_%s_DN_%s' % (prefix, key)
ssl_environ[wsgikey] = value
return ssl_environ
def makefile(self, sock, mode='r', bufsize= -1):
if SSL and isinstance(sock, SSL.ConnectionType):
timeout = sock.gettimeout()
f = SSL_fileobject(sock, mode, bufsize)
f.ssl_timeout = timeout
return f
else:
return wsgiserver.CP_fileobject(sock, mode, bufsize)
| gpl-3.0 |
rahuldhote/odoo | addons/account_analytic_analysis/__init__.py | 425 | 1107 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import account_analytic_analysis
import res_config
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
LarryPham/android-quill | jni/libhpdf-2.3.0RC2/if/python/demo/image_demo.py | 32 | 6106 | ###
## * << Haru Free PDF Library 2.0.0 >> -- image_demo.c
## *
## * Copyright (c) 1999-2006 Takeshi Kanno <takeshi_kanno@est.hi-ho.ne.jp>
## *
## * Permission to use, copy, modify, distribute and sell this software
## * and its documentation for any purpose is hereby granted without fee,
## * provided that the above copyright notice appear in all copies and
## * that both that copyright notice and this permission notice appear
## * in supporting documentation.
## * It is provided "as is" without express or implied warranty.
## *
##
## port to python by Li Jun
## http://groups.google.com/group/pythoncia
import os, sys
from ctypes import *
up=2
def setlibpath(up):
import sys
path=os.path.normpath(os.path.split(os.path.realpath(__file__))[0]+'\..'*up)
if path not in sys.path:
sys.path.append(path)
setlibpath(up)
from haru import *
from haru.c_func import *
from haru.hpdf_errorcode import *
from math import *
@HPDF_Error_Handler(None, HPDF_UINT, HPDF_UINT, c_void_p)
def error_handler (error_no, detail_no, user_data):
global pdf
printf ("ERROR: %s, detail_no=%u\n", error_detail[error_no],
detail_no)
HPDF_Free (pdf)
sys.exit(1)
def show_description (page, x, y, text):
HPDF_Page_MoveTo (page, x, y - 10)
HPDF_Page_LineTo (page, x, y + 10)
HPDF_Page_MoveTo (page, x - 10, y)
HPDF_Page_LineTo (page, x + 10, y)
HPDF_Page_Stroke (page)
HPDF_Page_SetFontAndSize (page, HPDF_Page_GetCurrentFont (page), 8)
HPDF_Page_SetRGBFill (page, 0, 0, 0)
HPDF_Page_BeginText (page)
buf="(x=%d,y=%d)" % (int(x), int(y))
HPDF_Page_MoveTextPos (page, x - HPDF_Page_TextWidth (page, buf) - 5,
y - 10)
HPDF_Page_ShowText (page, buf)
HPDF_Page_EndText (page)
HPDF_Page_BeginText (page)
HPDF_Page_MoveTextPos (page, x - 20, y - 25)
HPDF_Page_ShowText (page, text)
HPDF_Page_EndText (page)
def main ():
fname=os.path.realpath(sys.argv[0])
fname=fname[:fname.rfind('.')]+'.pdf'
pdf = HPDF_New (error_handler, NULL)
if (not pdf):
printf ("error: cannot create PdfDoc object\n")
return 1
HPDF_SetCompressionMode (pdf, HPDF_COMP_ALL)
# create default-font
font = HPDF_GetFont (pdf, "Helvetica", NULL)
# add a new page object.
page = HPDF_AddPage (pdf)
HPDF_Page_SetWidth (page, 550)
HPDF_Page_SetHeight (page, 500)
dst = HPDF_Page_CreateDestination (page)
HPDF_Destination_SetXYZ (dst, 0, HPDF_Page_GetHeight (page), 1)
HPDF_SetOpenAction(pdf, dst)
HPDF_Page_BeginText (page)
HPDF_Page_SetFontAndSize (page, font, 20)
HPDF_Page_MoveTextPos (page, 220, HPDF_Page_GetHeight (page) - 70)
HPDF_Page_ShowText (page, "ImageDemo")
HPDF_Page_EndText (page)
# load image file.
image = HPDF_LoadPngImageFromFile (pdf, "pngsuite/basn3p02.png")
# image1 is masked by image2.
image1 = HPDF_LoadPngImageFromFile (pdf, "pngsuite/basn3p02.png")
# image2 is a mask image.
image2 = HPDF_LoadPngImageFromFile (pdf, "pngsuite/basn0g01.png")
# image3 is a RGB-color image. we use this image for color-mask
# * demo.
image3 = HPDF_LoadPngImageFromFile (pdf, "pngsuite/maskimage.png")
iw = HPDF_Image_GetWidth (image)
ih = HPDF_Image_GetHeight (image)
HPDF_Page_SetLineWidth (page, 0.5)
x = 100
y = HPDF_Page_GetHeight (page) - 150
# Draw image to the canvas. (normal-mode with actual size.)
HPDF_Page_DrawImage (page, image, x, y, iw, ih)
show_description (page, x, y, "Actual Size")
x += 150
# Scalling image (X direction)
HPDF_Page_DrawImage (page, image, x, y, iw * 1.5, ih)
show_description (page, x, y, "Scalling image (X direction)")
x += 150
# Scalling image (Y direction).
HPDF_Page_DrawImage (page, image, x, y, iw, ih * 1.5)
show_description (page, x, y, "Scalling image (Y direction)")
x = 100
y -= 120
# Skewing image.
angle1 = 10
angle2 = 20
rad1 = angle1 / 180 * 3.141592
rad2 = angle2 / 180 * 3.141592
HPDF_Page_GSave (page)
HPDF_Page_Concat (page, iw, tan(rad1) * iw, tan(rad2) * ih, ih, x, y)
HPDF_Page_ExecuteXObject (page, image)
HPDF_Page_GRestore (page)
show_description (page, x, y, "Skewing image")
x += 150
# Rotating image
angle = 30; # rotation of 30 degrees.
rad = angle / 180 * 3.141592; # Calcurate the radian value.
HPDF_Page_GSave (page)
HPDF_Page_Concat (page, iw * cos(rad),
iw * sin(rad),
ih * -sin(rad),
ih * cos(rad),
x, y)
HPDF_Page_ExecuteXObject (page, image)
HPDF_Page_GRestore (page)
show_description (page, x, y, "Rotating image")
x += 150
# draw masked image.
# Set image2 to the mask image of image1
HPDF_Image_SetMaskImage (image1, image2)
HPDF_Page_SetRGBFill (page, 0, 0, 0)
HPDF_Page_BeginText (page)
HPDF_Page_MoveTextPos (page, x - 6, y + 14)
HPDF_Page_ShowText (page, "MASKMASK")
HPDF_Page_EndText (page)
HPDF_Page_DrawImage (page, image1, x - 3, y - 3, iw + 6, ih + 6)
show_description (page, x, y, "masked image")
x = 100
y -= 120
# color mask.
HPDF_Page_SetRGBFill (page, 0, 0, 0)
HPDF_Page_BeginText (page)
HPDF_Page_MoveTextPos (page, x - 6, y + 14)
HPDF_Page_ShowText (page, "MASKMASK")
HPDF_Page_EndText (page)
HPDF_Image_SetColorMask (image3, 0, 255, 0, 0, 0, 255)
HPDF_Page_DrawImage (page, image3, x, y, iw, ih)
show_description (page, x, y, "Color Mask")
# save the document to a file
HPDF_SaveToFile (pdf, fname)
# clean up
HPDF_Free (pdf)
return 0
if HPDF_NOPNGLIB:
printf("WARNING: if you want to run this demo, \n"
"make libhpdf with HPDF_USE_PNGLIB option.\n")
sys.exit(1)
else:
main() | gpl-3.0 |
Tejal011089/medsyn2_lib | webnotes/build.py | 24 | 5506 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
from webnotes.utils.minify import JavascriptMinify
"""
Build the `public` folders and setup languages
"""
import os, sys, webnotes
from cssmin import cssmin
def bundle(no_compress, cms_make=True):
"""concat / minify js files"""
# build js files
webnotes.validate_versions()
check_public()
check_lang()
bundle = Bundle()
bundle.no_compress = no_compress
bundle.make()
if cms_make:
try:
from startup.event_handlers import on_build
on_build()
except ImportError, e:
pass
clear_pyc_files()
def watch(no_compress):
"""watch and rebuild if necessary"""
import time
bundle = Bundle()
bundle.no_compress = no_compress
while True:
if bundle.dirty():
bundle.make()
time.sleep(3)
def check_public():
from webnotes.install_lib.setup_public_folder import make
make()
def check_lang():
from webnotes.translate import update_translations
update_translations()
def clear_pyc_files():
from webnotes.utils import get_base_path
for path, folders, files in os.walk(get_base_path()):
if 'locale' in folders: folders.remove('locale')
for f in files:
if f.decode("utf-8").endswith(".pyc"):
os.remove(os.path.join(path, f))
class Bundle:
"""
Concatenate, compress and mix (if required) js+css files from build.json
"""
no_compress = False
timestamps = {}
path = '.'
def concat(self, filelist, outfile=None):
"""
Concat css and js files into a bundle
"""
from cStringIO import StringIO
out_type = outfile and outfile.split('.')[-1] or 'js'
outtxt = ''
for f in filelist:
suffix = None
if ':' in f:
f, suffix = f.split(':')
if not os.path.exists(f) or os.path.isdir(f):
continue
self.timestamps[f] = os.path.getmtime(f)
# get datas
try:
with open(f, 'r') as infile:
# get file type
ftype = f.split('.')[-1]
data = unicode(infile.read(), 'utf-8', errors='ignore')
outtxt += ('\n/*\n *\t%s\n */' % f)
# append
if suffix=='concat' or out_type != 'js' or self.no_compress or ('.min.' in f):
outtxt += '\n' + data + '\n'
else:
jsm = JavascriptMinify()
tmpin = StringIO(data.encode('utf-8'))
tmpout = StringIO()
jsm.minify(tmpin, tmpout)
tmpmin = unicode(tmpout.getvalue() or '', 'utf-8')
tmpmin.strip('\n')
outtxt += tmpmin
except Exception, e:
print "--Error in:" + f + "--"
print webnotes.getTraceback()
if not self.no_compress and out_type == 'css':
outtxt = cssmin(outtxt)
with open(outfile, 'w') as f:
f.write(outtxt.encode("utf-8"))
print "Wrote %s - %sk" % (outfile, str(int(os.path.getsize(outfile)/1024)))
def dirty(self):
"""check if build files are dirty"""
self.make_build_data()
for builddict in self.bdata:
for f in self.get_infiles(builddict):
if ':' in f:
f, suffix = f.split(':')
if not os.path.exists(f) or os.path.isdir(f):
continue
if os.path.getmtime(f) != self.timestamps.get(f):
print f + ' dirty'
return True
else:
return False
def make(self):
"""Build (stitch + compress) the file defined in build.json"""
print "Building js and css files..."
self.make_build_data()
for builddict in self.bdata:
outfile = builddict.keys()[0]
infiles = self.get_infiles(builddict)
self.concat(infiles, os.path.relpath(os.path.join(self.path, outfile), os.curdir))
self.reset_app_html()
def reset_app_html(self):
import webnotes
if os.path.exists("public/app.html"):
os.remove("public/app.html")
splash = ""
if os.path.exists("public/app/images/splash.svg"):
with open("public/app/images/splash.svg") as splash_file:
splash = splash_file.read()
with open('lib/public/html/app.html', 'r') as app_html:
data = app_html.read()
data = data % {
"_version_number": webnotes.generate_hash(),
"splash": splash
}
with open('public/app.html', 'w') as new_app_html:
new_app_html.write(data)
def get_infiles(self, builddict):
"""make list of files to merge"""
outfile = builddict.keys()[0]
infiles = builddict[outfile]
# add app js and css to the list
if outfile in self.appfiles:
for f in self.appfiles[outfile]:
if f not in infiles:
infiles.append(f)
fl = []
for f in infiles:
## load files from directory
if f.endswith('/'):
# add init js first
fl += [os.path.relpath(os.path.join(f, 'init.js'), os.curdir)]
# files other than init.js and beginning with "_"
fl += [os.path.relpath(os.path.join(f, tmp), os.curdir) \
for tmp in os.listdir(f) if (tmp != 'init.js' and not tmp.startswith('_'))]
else:
fl.append(os.path.relpath(os.path.join(self.path, f), os.curdir))
return fl
def make_build_data(self):
"""merge build.json and lib/build.json"""
# framework js and css files
with open('lib/public/build.json', 'r') as bfile:
bdata = eval(bfile.read())
# app js and css files
if os.path.exists('app/public/build.json'):
with open('app/public/build.json', 'r') as bfile:
appfiles = eval(bfile.read())
else:
appfiles = {}
# add additional app files in bdata
buildfile_list = [builddict.keys()[0] for builddict in bdata]
for f in appfiles:
if f not in buildfile_list:
bdata.append({f: appfiles[f]})
self.appfiles = appfiles
self.bdata = bdata
| mit |
bow/bioconda-recipes | recipes/peptide-shaker/1.16.15/peptide-shaker.py | 46 | 3272 | #!/usr/bin/env python
#
# Wrapper script for Java Conda packages that ensures that the java runtime
# is invoked with the right options. Adapted from the bash script (http://stackoverflow.com/questions/59895/can-a-bash-script-tell-what-directory-its-stored-in/246128#246128).
#
# Program Parameters
#
import os
import subprocess
import sys
import shutil
from os import access
from os import getenv
from os import X_OK
jar_file = 'PeptideShaker-1.16.15.jar'
default_jvm_mem_opts = ['-Xms512m', '-Xmx1g']
# !!! End of parameter section. No user-serviceable code below this line !!!
def real_dirname(path):
"""Return the symlink-resolved, canonicalized directory-portion of path."""
return os.path.dirname(os.path.realpath(path))
def java_executable():
"""Return the executable name of the Java interpreter."""
java_home = getenv('JAVA_HOME')
java_bin = os.path.join('bin', 'java')
if java_home and access(os.path.join(java_home, java_bin), X_OK):
return os.path.join(java_home, java_bin)
else:
return 'java'
def jvm_opts(argv):
"""Construct list of Java arguments based on our argument list.
The argument list passed in argv must not include the script name.
The return value is a 3-tuple lists of strings of the form:
(memory_options, prop_options, passthrough_options)
"""
mem_opts = []
prop_opts = []
pass_args = []
exec_dir = None
for arg in argv:
if arg.startswith('-D'):
prop_opts.append(arg)
elif arg.startswith('-XX'):
prop_opts.append(arg)
elif arg.startswith('-Xm'):
mem_opts.append(arg)
elif arg.startswith('--exec_dir='):
exec_dir = arg.split('=')[1].strip('"').strip("'")
if not os.path.exists(exec_dir):
shutil.copytree(real_dirname(sys.argv[0]), exec_dir, symlinks=False, ignore=None)
else:
pass_args.append(arg)
# In the original shell script the test coded below read:
# if [ "$jvm_mem_opts" == "" ] && [ -z ${_JAVA_OPTIONS+x} ]
# To reproduce the behaviour of the above shell code fragment
# it is important to explictly check for equality with None
# in the second condition, so a null envar value counts as True!
if mem_opts == [] and getenv('_JAVA_OPTIONS') is None:
mem_opts = default_jvm_mem_opts
return (mem_opts, prop_opts, pass_args, exec_dir)
def main():
java = java_executable()
"""
PeptideShaker updates files relative to the path of the jar file.
In a multiuser setting, the option --exec_dir="exec_dir"
can be used as the location for the peptide-shaker distribution.
If the exec_dir dies not exist,
we copy the jar file, lib, and resources to the exec_dir directory.
"""
(mem_opts, prop_opts, pass_args, exec_dir) = jvm_opts(sys.argv[1:])
jar_dir = exec_dir if exec_dir else real_dirname(sys.argv[0])
if pass_args != [] and pass_args[0].startswith('eu'):
jar_arg = '-cp'
else:
jar_arg = '-jar'
jar_path = os.path.join(jar_dir, jar_file)
java_args = [java] + mem_opts + prop_opts + [jar_arg] + [jar_path] + pass_args
sys.exit(subprocess.call(java_args))
if __name__ == '__main__':
main()
| mit |
TeamJB/linaro_external_skia | tools/tests/bench_pictures_cfg_test.py | 155 | 1340 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verify that the bench_pictures.cfg file is sane.
"""
import os
import sys
def ThrowIfNotAString(obj):
""" Raise a TypeError if obj is not a string. """
if str(obj) != obj:
raise TypeError('%s is not a string!' % str(obj))
def Main(argv):
""" Verify that the bench_pictures.cfg file is sane.
- Exec the file to ensure that it uses correct Python syntax.
- Make sure that every element is a string, because the buildbot scripts will
fail to execute if this is not the case.
This test does not verify that the well-formed configs are actually valid.
"""
vars = {'import_path': 'tools'}
execfile(os.path.join('tools', 'bench_pictures.cfg'), vars)
bench_pictures_cfg = vars['bench_pictures_cfg']
for config_name, config_list in bench_pictures_cfg.iteritems():
ThrowIfNotAString(config_name)
for config in config_list:
for key, value in config.iteritems():
ThrowIfNotAString(key)
if type(value).__name__ == 'list':
for item in value:
ThrowIfNotAString(item)
elif not value is True:
ThrowIfNotAString(value)
if __name__ == '__main__':
sys.exit(Main(sys.argv)) | bsd-3-clause |
shawnwanderson/cmput404-project | venv/lib/python2.7/site-packages/django/utils/log.py | 84 | 5261 | from __future__ import unicode_literals
import logging
import logging.config # needed when logging_config doesn't start with logging.config
import sys
import warnings
from copy import copy
from django.conf import settings
from django.core import mail
from django.core.mail import get_connection
from django.utils.deprecation import RemovedInNextVersionWarning
from django.utils.module_loading import import_string
from django.views.debug import ExceptionReporter
# Default logging for Django. This sends an email to the site admins on every
# HTTP 500 error. Depending on DEBUG, all other log records are either sent to
# the console (DEBUG=True) or discarded (DEBUG=False) by means of the
# require_debug_true filter.
DEFAULT_LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse',
},
'require_debug_true': {
'()': 'django.utils.log.RequireDebugTrue',
},
},
'handlers': {
'console': {
'level': 'INFO',
'filters': ['require_debug_true'],
'class': 'logging.StreamHandler',
},
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django': {
'handlers': ['console', 'mail_admins'],
'level': 'INFO',
},
'py.warnings': {
'handlers': ['console'],
},
}
}
def configure_logging(logging_config, logging_settings):
if not sys.warnoptions:
# Route warnings through python logging
logging.captureWarnings(True)
# RemovedInNextVersionWarning is a subclass of DeprecationWarning which
# is hidden by default, hence we force the "default" behavior
warnings.simplefilter("default", RemovedInNextVersionWarning)
if logging_config:
# First find the logging configuration function ...
logging_config_func = import_string(logging_config)
logging.config.dictConfig(DEFAULT_LOGGING)
# ... then invoke it with the logging settings
if logging_settings:
logging_config_func(logging_settings)
class AdminEmailHandler(logging.Handler):
"""An exception log handler that emails log entries to site admins.
If the request is passed as the first argument to the log record,
request data will be provided in the email report.
"""
def __init__(self, include_html=False, email_backend=None):
logging.Handler.__init__(self)
self.include_html = include_html
self.email_backend = email_backend
def emit(self, record):
try:
request = record.request
subject = '%s (%s IP): %s' % (
record.levelname,
('internal' if request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS
else 'EXTERNAL'),
record.getMessage()
)
except Exception:
subject = '%s: %s' % (
record.levelname,
record.getMessage()
)
request = None
subject = self.format_subject(subject)
# Since we add a nicely formatted traceback on our own, create a copy
# of the log record without the exception data.
no_exc_record = copy(record)
no_exc_record.exc_info = None
no_exc_record.exc_text = None
if record.exc_info:
exc_info = record.exc_info
else:
exc_info = (None, record.getMessage(), None)
reporter = ExceptionReporter(request, is_email=True, *exc_info)
message = "%s\n\n%s" % (self.format(no_exc_record), reporter.get_traceback_text())
html_message = reporter.get_traceback_html() if self.include_html else None
self.send_mail(subject, message, fail_silently=True, html_message=html_message)
def send_mail(self, subject, message, *args, **kwargs):
mail.mail_admins(subject, message, *args, connection=self.connection(), **kwargs)
def connection(self):
return get_connection(backend=self.email_backend, fail_silently=True)
def format_subject(self, subject):
"""
Escape CR and LF characters, and limit length.
RFC 2822's hard limit is 998 characters per line. So, minus "Subject: "
the actual subject must be no longer than 989 characters.
"""
formatted_subject = subject.replace('\n', '\\n').replace('\r', '\\r')
return formatted_subject[:989]
class CallbackFilter(logging.Filter):
"""
A logging filter that checks the return value of a given callable (which
takes the record-to-be-logged as its only parameter) to decide whether to
log a record.
"""
def __init__(self, callback):
self.callback = callback
def filter(self, record):
if self.callback(record):
return 1
return 0
class RequireDebugFalse(logging.Filter):
def filter(self, record):
return not settings.DEBUG
class RequireDebugTrue(logging.Filter):
def filter(self, record):
return settings.DEBUG
| gpl-3.0 |
CM-zenfone2/android_kernel_asus_moorefield | arch/ia64/scripts/unwcheck.py | 13143 | 1714 | #!/usr/bin/python
#
# Usage: unwcheck.py FILE
#
# This script checks the unwind info of each function in file FILE
# and verifies that the sum of the region-lengths matches the total
# length of the function.
#
# Based on a shell/awk script originally written by Harish Patil,
# which was converted to Perl by Matthew Chapman, which was converted
# to Python by David Mosberger.
#
import os
import re
import sys
if len(sys.argv) != 2:
print "Usage: %s FILE" % sys.argv[0]
sys.exit(2)
readelf = os.getenv("READELF", "readelf")
start_pattern = re.compile("<([^>]*)>: \[0x([0-9a-f]+)-0x([0-9a-f]+)\]")
rlen_pattern = re.compile(".*rlen=([0-9]+)")
def check_func (func, slots, rlen_sum):
if slots != rlen_sum:
global num_errors
num_errors += 1
if not func: func = "[%#x-%#x]" % (start, end)
print "ERROR: %s: %lu slots, total region length = %lu" % (func, slots, rlen_sum)
return
num_funcs = 0
num_errors = 0
func = False
slots = 0
rlen_sum = 0
for line in os.popen("%s -u %s" % (readelf, sys.argv[1])):
m = start_pattern.match(line)
if m:
check_func(func, slots, rlen_sum)
func = m.group(1)
start = long(m.group(2), 16)
end = long(m.group(3), 16)
slots = 3 * (end - start) / 16
rlen_sum = 0L
num_funcs += 1
else:
m = rlen_pattern.match(line)
if m:
rlen_sum += long(m.group(1))
check_func(func, slots, rlen_sum)
if num_errors == 0:
print "No errors detected in %u functions." % num_funcs
else:
if num_errors > 1:
err="errors"
else:
err="error"
print "%u %s detected in %u functions." % (num_errors, err, num_funcs)
sys.exit(1)
| gpl-2.0 |
adamend/django-oscar | src/oscar/apps/wishlists/abstract_models.py | 27 | 4905 | import hashlib
import random
from django.utils import six
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _, pgettext_lazy
from django.core.urlresolvers import reverse
from oscar.core.compat import AUTH_USER_MODEL
@python_2_unicode_compatible
class AbstractWishList(models.Model):
"""
Represents a user's wish lists of products.
A user can have multiple wish lists, move products between them, etc.
"""
# Only authenticated users can have wishlists
owner = models.ForeignKey(AUTH_USER_MODEL, related_name='wishlists',
verbose_name=_('Owner'))
name = models.CharField(verbose_name=_('Name'), default=_('Default'),
max_length=255)
#: This key acts as primary key and is used instead of an int to make it
#: harder to guess
key = models.CharField(_('Key'), max_length=6, db_index=True, unique=True,
editable=False)
# Oscar core does not support public or shared wishlists at the moment, but
# all the right hooks should be there
PUBLIC, PRIVATE, SHARED = ('Public', 'Private', 'Shared')
VISIBILITY_CHOICES = (
(PRIVATE, _('Private - Only the owner can see the wish list')),
(SHARED, _('Shared - Only the owner and people with access to the'
' obfuscated link can see the wish list')),
(PUBLIC, _('Public - Everybody can see the wish list')),
)
visibility = models.CharField(_('Visibility'), max_length=20,
default=PRIVATE, choices=VISIBILITY_CHOICES)
# Convention: A user can have multiple wish lists. The last created wish
# list for a user shall be their "default" wish list.
# If an UI element only allows adding to wish list without
# specifying which one , one shall use the default one.
# That is a rare enough case to handle it by convention instead of a
# BooleanField.
date_created = models.DateTimeField(
_('Date created'), auto_now_add=True, editable=False)
def __str__(self):
return u"%s's Wish List '%s'" % (self.owner, self.name)
def save(self, *args, **kwargs):
if not self.pk or kwargs.get('force_insert', False):
self.key = self.__class__.random_key()
super(AbstractWishList, self).save(*args, **kwargs)
@classmethod
def random_key(cls, length=6):
"""
Get a unique random generated key based on SHA-1 and owner
"""
while True:
rand = six.text_type(random.random()).encode('utf8')
key = hashlib.sha1(rand).hexdigest()[:length]
if not cls._default_manager.filter(key=key).exists():
return key
def is_allowed_to_see(self, user):
if self.visibility in (self.PUBLIC, self.SHARED):
return True
else:
return user == self.owner
def is_allowed_to_edit(self, user):
# currently only the owner can edit their wish list
return user == self.owner
class Meta:
abstract = True
app_label = 'wishlists'
ordering = ('owner', 'date_created', )
verbose_name = _('Wish List')
def get_absolute_url(self):
return reverse('customer:wishlists-detail', kwargs={
'key': self.key})
def add(self, product):
"""
Add a product to this wishlist
"""
lines = self.lines.filter(product=product)
if len(lines) == 0:
self.lines.create(
product=product, title=product.get_title())
else:
line = lines[0]
line.quantity += 1
line.save()
@python_2_unicode_compatible
class AbstractLine(models.Model):
"""
One entry in a wish list. Similar to order lines or basket lines.
"""
wishlist = models.ForeignKey('wishlists.WishList', related_name='lines',
verbose_name=_('Wish List'))
product = models.ForeignKey(
'catalogue.Product', verbose_name=_('Product'),
related_name='wishlists_lines', on_delete=models.SET_NULL,
blank=True, null=True)
quantity = models.PositiveIntegerField(_('Quantity'), default=1)
#: Store the title in case product gets deleted
title = models.CharField(
pgettext_lazy(u"Product title", u"Title"), max_length=255)
def __str__(self):
return u'%sx %s on %s' % (self.quantity, self.title,
self.wishlist.name)
def get_title(self):
if self.product:
return self.product.get_title()
else:
return self.title
class Meta:
abstract = True
app_label = 'wishlists'
unique_together = (('wishlist', 'product'), )
verbose_name = _('Wish list line')
| bsd-3-clause |
opennode/nodeconductor-assembly-waldur | src/waldur_slurm/apps.py | 1 | 2931 | from django.apps import AppConfig
from django.db.models import signals
class SlurmConfig(AppConfig):
name = 'waldur_slurm'
verbose_name = 'SLURM'
service_name = 'SLURM'
def ready(self):
from waldur_core.quotas.fields import QuotaField, CounterQuotaField
from waldur_core.structure import SupportedServices
from waldur_core.structure import models as structure_models
from waldur_core.structure import signals as structure_signals
from waldur_freeipa import models as freeipa_models
from .backend import SlurmBackend
from . import handlers, models, utils
SupportedServices.register_backend(SlurmBackend)
signals.post_save.connect(
handlers.process_user_creation,
sender=freeipa_models.Profile,
dispatch_uid='waldur_slurm.handlers.process_user_creation',
)
signals.pre_delete.connect(
handlers.process_user_deletion,
sender=freeipa_models.Profile,
dispatch_uid='waldur_slurm.handlers.process_user_deletion',
)
structure_models_with_roles = (
structure_models.Customer,
structure_models.Project,
)
for model in structure_models_with_roles:
structure_signals.structure_role_granted.connect(
handlers.process_role_granted,
sender=model,
dispatch_uid='waldur_slurm.handlers.process_role_granted.%s'
% model.__class__,
)
structure_signals.structure_role_revoked.connect(
handlers.process_role_revoked,
sender=model,
dispatch_uid='waldur_slurm.handlers.process_role_revoked.%s'
% model.__class__,
)
for quota in utils.QUOTA_NAMES:
structure_models.Customer.add_quota_field(
name=quota, quota_field=QuotaField(is_backend=True)
)
structure_models.Project.add_quota_field(
name=quota, quota_field=QuotaField(is_backend=True)
)
structure_models.Project.add_quota_field(
name='nc_allocation_count',
quota_field=CounterQuotaField(
target_models=lambda: [models.Allocation],
path_to_scope='service_project_link.project',
),
)
structure_models.Customer.add_quota_field(
name='nc_allocation_count',
quota_field=CounterQuotaField(
target_models=lambda: [models.Allocation],
path_to_scope='service_project_link.project.customer',
),
)
signals.post_save.connect(
handlers.update_quotas_on_allocation_usage_update,
sender=models.Allocation,
dispatch_uid='waldur_slurm.handlers.update_quotas_on_allocation_usage_update',
)
| mit |
addisonElliott/SmartShopTouchScreen | Windows/ExpirationBox_ui.py | 1 | 8236 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ExpirationBox.ui'
#
# Created by: PyQt5 UI code generator 5.7.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_ExpirationBox(object):
def setupUi(self, ExpirationBox):
ExpirationBox.setObjectName("ExpirationBox")
ExpirationBox.resize(506, 364)
font = QtGui.QFont()
font.setPointSize(19)
ExpirationBox.setFont(font)
ExpirationBox.setStyleSheet("QDialog\n"
"{\n"
" border: 1px solid #76797C;\n"
"}")
self.gridLayout = QtWidgets.QGridLayout(ExpirationBox)
self.gridLayout.setContentsMargins(5, 5, 5, 5)
self.gridLayout.setObjectName("gridLayout")
self.day_label = QtWidgets.QLabel(ExpirationBox)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(15)
self.day_label.setFont(font)
self.day_label.setAlignment(QtCore.Qt.AlignCenter)
self.day_label.setObjectName("day_label")
self.gridLayout.addWidget(self.day_label, 3, 2, 1, 1)
self.day_combo = QtWidgets.QComboBox(ExpirationBox)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(16)
font.setBold(False)
font.setWeight(50)
self.day_combo.setFont(font)
self.day_combo.setObjectName("day_combo")
self.day_combo.addItem("")
self.day_combo.setItemText(0, "")
self.gridLayout.addWidget(self.day_combo, 4, 2, 1, 1)
self.month_label = QtWidgets.QLabel(ExpirationBox)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(15)
self.month_label.setFont(font)
self.month_label.setAlignment(QtCore.Qt.AlignCenter)
self.month_label.setObjectName("month_label")
self.gridLayout.addWidget(self.month_label, 3, 1, 1, 1)
self.month_combo = QtWidgets.QComboBox(ExpirationBox)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(16)
font.setBold(False)
font.setItalic(False)
font.setWeight(50)
self.month_combo.setFont(font)
self.month_combo.setStyleSheet("QDialog\n"
"{\n"
" border: 1px solid #76797C;\n"
"}")
self.month_combo.setObjectName("month_combo")
self.month_combo.addItem("")
self.month_combo.setItemText(0, "")
self.gridLayout.addWidget(self.month_combo, 4, 1, 1, 1)
self.year_combo = QtWidgets.QComboBox(ExpirationBox)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(16)
font.setBold(False)
font.setItalic(False)
font.setWeight(50)
self.year_combo.setFont(font)
self.year_combo.setObjectName("year_combo")
self.year_combo.addItem("")
self.year_combo.setItemText(0, "")
self.gridLayout.addWidget(self.year_combo, 4, 3, 1, 1)
spacerItem = QtWidgets.QSpacerItem(20, 20, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
self.gridLayout.addItem(spacerItem, 13, 1, 1, 1)
self.year_label = QtWidgets.QLabel(ExpirationBox)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(15)
self.year_label.setFont(font)
self.year_label.setAlignment(QtCore.Qt.AlignCenter)
self.year_label.setObjectName("year_label")
self.gridLayout.addWidget(self.year_label, 3, 3, 1, 1)
self.qty_label = QtWidgets.QLabel(ExpirationBox)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(15)
self.qty_label.setFont(font)
self.qty_label.setObjectName("qty_label")
self.gridLayout.addWidget(self.qty_label, 6, 1, 1, 2)
self.horizontalLayout_1 = QtWidgets.QHBoxLayout()
self.horizontalLayout_1.setContentsMargins(-1, 0, -1, -1)
self.horizontalLayout_1.setSpacing(15)
self.horizontalLayout_1.setObjectName("horizontalLayout_1")
self.cancel_label = QtWidgets.QLabel(ExpirationBox)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(15)
self.cancel_label.setFont(font)
self.cancel_label.setObjectName("cancel_label")
self.horizontalLayout_1.addWidget(self.cancel_label)
spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_1.addItem(spacerItem1)
self.accept_button = TouchButton(ExpirationBox)
self.accept_button.setMinimumSize(QtCore.QSize(48, 48))
self.accept_button.setMaximumSize(QtCore.QSize(48, 48))
self.accept_button.setStyleSheet("background-color: transparent;\n"
"border: 0;")
self.accept_button.setText("")
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/Icons/Icons/GreenCheckIcon_Finished.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.accept_button.setIcon(icon)
self.accept_button.setIconSize(QtCore.QSize(48, 48))
self.accept_button.setObjectName("accept_button")
self.horizontalLayout_1.addWidget(self.accept_button)
self.cancel_button = TouchButton(ExpirationBox)
self.cancel_button.setMinimumSize(QtCore.QSize(48, 48))
self.cancel_button.setMaximumSize(QtCore.QSize(48, 48))
self.cancel_button.setStyleSheet("background-color: transparent;\n"
"border: 0;")
self.cancel_button.setText("")
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(":/Icons/Icons/RedCancelIcon_Finished.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.cancel_button.setIcon(icon1)
self.cancel_button.setIconSize(QtCore.QSize(48, 48))
self.cancel_button.setObjectName("cancel_button")
self.horizontalLayout_1.addWidget(self.cancel_button)
self.gridLayout.addLayout(self.horizontalLayout_1, 14, 1, 1, 3)
self.qty_combo = QtWidgets.QComboBox(ExpirationBox)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(16)
font.setBold(False)
font.setItalic(False)
font.setWeight(50)
self.qty_combo.setFont(font)
self.qty_combo.setObjectName("qty_combo")
self.gridLayout.addWidget(self.qty_combo, 7, 1, 1, 3)
self.label = QtWidgets.QLabel(ExpirationBox)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(15)
self.label.setFont(font)
self.label.setObjectName("label")
self.gridLayout.addWidget(self.label, 2, 1, 1, 1)
self.itemNameLabel = QtWidgets.QLabel(ExpirationBox)
font = QtGui.QFont()
font.setFamily("Segoe UI")
font.setPointSize(15)
self.itemNameLabel.setFont(font)
self.itemNameLabel.setObjectName("itemNameLabel")
self.gridLayout.addWidget(self.itemNameLabel, 2, 2, 1, 2)
self.exp_label = QtWidgets.QLabel(ExpirationBox)
font = QtGui.QFont()
font.setFamily("Cronus Round")
font.setPointSize(21)
self.exp_label.setFont(font)
self.exp_label.setObjectName("exp_label")
self.gridLayout.addWidget(self.exp_label, 1, 1, 1, 3, QtCore.Qt.AlignHCenter)
self.retranslateUi(ExpirationBox)
QtCore.QMetaObject.connectSlotsByName(ExpirationBox)
def retranslateUi(self, ExpirationBox):
_translate = QtCore.QCoreApplication.translate
ExpirationBox.setWindowTitle(_translate("ExpirationBox", "Dialog"))
self.day_label.setText(_translate("ExpirationBox", "Day"))
self.month_label.setText(_translate("ExpirationBox", "Month"))
self.year_label.setText(_translate("ExpirationBox", "Year"))
self.qty_label.setText(_translate("ExpirationBox", "Quantity"))
self.cancel_label.setText(_translate("ExpirationBox", "Scan to continue"))
self.label.setText(_translate("ExpirationBox", "Item Name:"))
self.itemNameLabel.setText(_translate("ExpirationBox", "Label"))
self.exp_label.setText(_translate("ExpirationBox", "Expiration Date"))
from Widgets.touchButton import TouchButton
import Resource_BY_rc
import style_rc
| agpl-3.0 |
dims/nova | nova/tests/unit/scheduler/test_rpcapi.py | 16 | 4830 | # Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Unit Tests for nova.scheduler.rpcapi
"""
import mock
from oslo_config import cfg
from nova import context
from nova import objects
from nova.scheduler import rpcapi as scheduler_rpcapi
from nova import test
CONF = cfg.CONF
class SchedulerRpcAPITestCase(test.NoDBTestCase):
def _test_scheduler_api(self, method, rpc_method, expected_args=None,
**kwargs):
ctxt = context.RequestContext('fake_user', 'fake_project')
rpcapi = scheduler_rpcapi.SchedulerAPI()
self.assertIsNotNone(rpcapi.client)
self.assertEqual(rpcapi.client.target.topic, CONF.scheduler_topic)
expected_retval = 'foo' if rpc_method == 'call' else None
expected_version = kwargs.pop('version', None)
expected_fanout = kwargs.pop('fanout', None)
expected_kwargs = kwargs.copy()
if expected_args:
expected_kwargs = expected_args
prepare_kwargs = {}
if expected_fanout:
prepare_kwargs['fanout'] = True
if expected_version:
prepare_kwargs['version'] = expected_version
# NOTE(sbauza): We need to persist the method before mocking it
orig_prepare = rpcapi.client.prepare
def fake_can_send_version(version=None):
return orig_prepare(version=version).can_send_version()
@mock.patch.object(rpcapi.client, rpc_method,
return_value=expected_retval)
@mock.patch.object(rpcapi.client, 'prepare',
return_value=rpcapi.client)
@mock.patch.object(rpcapi.client, 'can_send_version',
side_effect=fake_can_send_version)
def do_test(mock_csv, mock_prepare, mock_rpc_method):
retval = getattr(rpcapi, method)(ctxt, **kwargs)
self.assertEqual(retval, expected_retval)
mock_prepare.assert_called_once_with(**prepare_kwargs)
mock_rpc_method.assert_called_once_with(ctxt, method,
**expected_kwargs)
do_test()
def test_select_destinations(self):
fake_spec = objects.RequestSpec()
self._test_scheduler_api('select_destinations', rpc_method='call',
spec_obj=fake_spec,
version='4.3')
@mock.patch.object(objects.RequestSpec, 'to_legacy_filter_properties_dict')
@mock.patch.object(objects.RequestSpec, 'to_legacy_request_spec_dict')
def test_select_destinations_with_old_manager(self, to_spec, to_props):
self.flags(scheduler='4.0', group='upgrade_levels')
to_spec.return_value = 'fake_request_spec'
to_props.return_value = 'fake_prop'
fake_spec = objects.RequestSpec()
self._test_scheduler_api('select_destinations', rpc_method='call',
expected_args={'request_spec': 'fake_request_spec',
'filter_properties': 'fake_prop'},
spec_obj=fake_spec,
version='4.0')
def test_update_aggregates(self):
self._test_scheduler_api('update_aggregates', rpc_method='cast',
aggregates='aggregates',
version='4.1',
fanout=True)
def test_delete_aggregate(self):
self._test_scheduler_api('delete_aggregate', rpc_method='cast',
aggregate='aggregate',
version='4.1',
fanout=True)
def test_update_instance_info(self):
self._test_scheduler_api('update_instance_info', rpc_method='cast',
host_name='fake_host',
instance_info='fake_instance',
fanout=True,
version='4.2')
def test_delete_instance_info(self):
self._test_scheduler_api('delete_instance_info', rpc_method='cast',
host_name='fake_host',
instance_uuid='fake_uuid',
fanout=True,
version='4.2')
def test_sync_instance_info(self):
self._test_scheduler_api('sync_instance_info', rpc_method='cast',
host_name='fake_host',
instance_uuids=['fake1', 'fake2'],
fanout=True,
version='4.2')
| apache-2.0 |
nubark/odoo | addons/survey/wizard/survey_email_compose_message.py | 29 | 10120 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from openerp.osv import osv
from openerp.osv import fields
from openerp.tools.translate import _
from datetime import datetime
from openerp.exceptions import UserError
import re
import uuid
import urlparse
emails_split = re.compile(r"[;,\n\r]+")
class survey_mail_compose_message(osv.TransientModel):
_name = 'survey.mail.compose.message'
_inherit = 'mail.compose.message'
_description = 'Email composition wizard for Survey'
_log_access = True
def _get_public_url(self, cr, uid, ids, name, arg, context=None):
res = dict((id, 0) for id in ids)
survey_obj = self.pool.get('survey.survey')
for wizard in self.browse(cr, uid, ids, context=context):
res[wizard.id] = wizard.survey_id.public_url
return res
def _get_public_url_html(self, cr, uid, ids, name, arg, context=None):
""" Compute if the message is unread by the current user """
urls = self._get_public_url(cr, uid, ids, name, arg, context=context)
for key, url in urls.items():
urls[key] = '<a href="%s">%s</a>' % (url, _("Click here to start survey"))
return urls
_columns = {
'survey_id': fields.many2one('survey.survey', 'Survey', required=True),
'public': fields.selection([('public_link', 'Share the public web link to your audience.'),
('email_public_link', 'Send by email the public web link to your audience.'),
('email_private', 'Send private invitation to your audience (only one response per recipient and per invitation).')],
string='Share options', required=True),
'public_url': fields.function(_get_public_url, string="Public url", type="char"),
'public_url_html': fields.function(_get_public_url_html, string="Public HTML web link", type="char"),
'partner_ids': fields.many2many('res.partner',
'survey_mail_compose_message_res_partner_rel',
'wizard_id', 'partner_id', 'Existing contacts'),
'attachment_ids': fields.many2many('ir.attachment',
'survey_mail_compose_message_ir_attachments_rel',
'wizard_id', 'attachment_id', 'Attachments'),
'multi_email': fields.text(string='List of emails', help="This list of emails of recipients will not converted in contacts. Emails separated by commas, semicolons or newline."),
'date_deadline': fields.date(string="Deadline to which the invitation to respond is valid", help="Deadline to which the invitation to respond for this survey is valid. If the field is empty, the invitation is still valid."),
}
_defaults = {
'public': 'public_link',
'survey_id': lambda self, cr, uid, ctx={}: ctx.get('model') == 'survey.survey' and ctx.get('res_id') or None
}
def default_get(self, cr, uid, fields, context=None):
res = super(survey_mail_compose_message, self).default_get(cr, uid, fields, context=context)
if context.get('active_model') == 'res.partner' and context.get('active_ids'):
res.update({'partner_ids': context.get('active_ids')})
return res
def onchange_multi_email(self, cr, uid, ids, multi_email, context=None):
emails = list(set(emails_split.split(multi_email or "")))
emails_checked = []
error_message = ""
for email in emails:
email = email.strip()
if email:
if not re.search(r"^[^@]+@[^@]+$", email):
error_message += "\n'%s'" % email
else:
emails_checked.append(email)
if error_message:
raise UserError(_("One email at least is incorrect: %s") % error_message)
emails_checked.sort()
values = {'multi_email': '\n'.join(emails_checked)}
return {'value': values}
def onchange_survey_id(self, cr, uid, ids, survey_id, context=None):
""" Compute if the message is unread by the current user. """
if survey_id:
survey = self.pool.get('survey.survey').browse(cr, uid, survey_id, context=context)
return {
'value': {
'subject': survey.title,
'public_url': survey.public_url,
'public_url_html': '<a href="%s">%s</a>' % (survey.public_url, _("Click here to take survey")),
}}
else:
txt = _("Please select a survey")
return {
'value': {
'public_url': txt,
'public_url_html': txt,
}}
#------------------------------------------------------
# Wizard validation and send
#------------------------------------------------------
def send_mail(self, cr, uid, ids, auto_commit=False, context=None):
""" Process the wizard content and proceed with sending the related
email(s), rendering any template patterns on the fly if needed """
if context is None:
context = {}
survey_response_obj = self.pool.get('survey.user_input')
partner_obj = self.pool.get('res.partner')
mail_mail_obj = self.pool.get('mail.mail')
try:
model, anonymous_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'portal', 'group_anonymous')
except ValueError:
anonymous_id = None
def create_response_and_send_mail(wizard, token, partner_id, email):
""" Create one mail by recipients and replace __URL__ by link with identification token """
#set url
url = wizard.survey_id.public_url
url = urlparse.urlparse(url).path[1:] # dirty hack to avoid incorrect urls
if token:
url = url + '/' + token
# post the message
values = {
'model': None,
'res_id': None,
'subject': wizard.subject,
'body': wizard.body.replace("__URL__", url),
'body_html': wizard.body.replace("__URL__", url),
'parent_id': None,
'attachment_ids': wizard.attachment_ids and [(6, 0, wizard.attachment_ids.ids)] or None,
'email_from': wizard.email_from or None,
'auto_delete': True,
}
if partner_id:
values['recipient_ids'] = [(4, partner_id)]
else:
values['email_to'] = email
mail_id = mail_mail_obj.create(cr, uid, values, context=context)
mail_mail_obj.send(cr, uid, [mail_id], context=context)
def create_token(wizard, partner_id, email):
if context.get("survey_resent_token"):
response_ids = survey_response_obj.search(cr, uid, [('survey_id', '=', wizard.survey_id.id), ('state', 'in', ['new', 'skip']), '|', ('partner_id', '=', partner_id), ('email', '=', email)], context=context)
if response_ids:
return survey_response_obj.read(cr, uid, response_ids, ['token'], context=context)[0]['token']
if wizard.public != 'email_private':
return None
else:
token = uuid.uuid4().__str__()
# create response with token
survey_response_obj.create(cr, uid, {
'survey_id': wizard.survey_id.id,
'deadline': wizard.date_deadline,
'date_create': datetime.now(),
'type': 'link',
'state': 'new',
'token': token,
'partner_id': partner_id,
'email': email},
context=context)
return token
for wizard in self.browse(cr, uid, ids, context=context):
# check if __URL__ is in the text
if wizard.body.find("__URL__") < 0:
raise UserError(_("The content of the text don't contain '__URL__'. \
__URL__ is automaticaly converted into the special url of the survey."))
if not wizard.multi_email and not wizard.partner_ids and (context.get('default_partner_ids') or context.get('default_multi_email')):
wizard.multi_email = context.get('default_multi_email')
wizard.partner_ids = context.get('default_partner_ids')
# quick check of email list
emails_list = []
if wizard.multi_email:
emails = list(set(emails_split.split(wizard.multi_email)) - set([partner.email for partner in wizard.partner_ids]))
for email in emails:
email = email.strip()
if re.search(r"^[^@]+@[^@]+$", email):
emails_list.append(email)
# remove public anonymous access
partner_list = []
for partner in wizard.partner_ids:
if not anonymous_id or not partner.user_ids or anonymous_id not in [x.id for x in partner.user_ids[0].groups_id]:
partner_list.append({'id': partner.id, 'email': partner.email})
if not len(emails_list) and not len(partner_list):
if wizard.model == 'res.partner' and wizard.res_id:
return False
raise UserError(_("Please enter at least one valid recipient."))
for email in emails_list:
partner_id = partner_obj.search(cr, uid, [('email', '=', email)], context=context)
partner_id = partner_id and partner_id[0] or None
token = create_token(wizard, partner_id, email)
create_response_and_send_mail(wizard, token, partner_id, email)
for partner in partner_list:
token = create_token(wizard, partner['id'], partner['email'])
create_response_and_send_mail(wizard, token, partner['id'], partner['email'])
return {'type': 'ir.actions.act_window_close'}
| gpl-3.0 |
h3biomed/ansible | lib/ansible/modules/cloud/ovirt/ovirt_api_facts.py | 32 | 2429 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: ovirt_api_facts
short_description: Retrieve facts about the oVirt/RHV API
author: "Ondra Machacek (@machacekondra)"
version_added: "2.5"
description:
- "Retrieve facts about the oVirt/RHV API."
notes:
- "This module creates a new top-level C(ovirt_api) fact,
which contains a information about oVirt/RHV API."
extends_documentation_fragment: ovirt_facts
'''
EXAMPLES = '''
# Examples don't contain auth parameter for simplicity,
# look at ovirt_auth module to see how to reuse authentication:
# Gather facts oVirt API:
- ovirt_api_facts:
- debug:
var: ovirt_api
'''
RETURN = '''
ovirt_api:
description: "Dictionary describing the oVirt API information.
Api attributes are mapped to dictionary keys,
all API attributes can be found at following
url: https://ovirt.example.com/ovirt-engine/api/model#types/api."
returned: On success.
type: dict
'''
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ovirt import (
check_sdk,
create_connection,
get_dict_of_struct,
ovirt_facts_full_argument_spec,
)
def main():
argument_spec = ovirt_facts_full_argument_spec()
module = AnsibleModule(argument_spec)
check_sdk(module)
try:
auth = module.params.pop('auth')
connection = create_connection(auth)
api = connection.system_service().get()
module.exit_json(
changed=False,
ansible_facts=dict(
ovirt_api=get_dict_of_struct(
struct=api,
connection=connection,
fetch_nested=module.params.get('fetch_nested'),
attributes=module.params.get('nested_attributes'),
)
),
)
except Exception as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())
finally:
connection.close(logout=auth.get('token') is None)
if __name__ == '__main__':
main()
| gpl-3.0 |
dexterx17/nodoSocket | clients/Python-2.7.6/Lib/encodings/iso8859_11.py | 593 | 12591 | """ Python Character Mapping Codec iso8859_11 generated from 'MAPPINGS/ISO8859/8859-11.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-11',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\x80' # 0x80 -> <control>
u'\x81' # 0x81 -> <control>
u'\x82' # 0x82 -> <control>
u'\x83' # 0x83 -> <control>
u'\x84' # 0x84 -> <control>
u'\x85' # 0x85 -> <control>
u'\x86' # 0x86 -> <control>
u'\x87' # 0x87 -> <control>
u'\x88' # 0x88 -> <control>
u'\x89' # 0x89 -> <control>
u'\x8a' # 0x8A -> <control>
u'\x8b' # 0x8B -> <control>
u'\x8c' # 0x8C -> <control>
u'\x8d' # 0x8D -> <control>
u'\x8e' # 0x8E -> <control>
u'\x8f' # 0x8F -> <control>
u'\x90' # 0x90 -> <control>
u'\x91' # 0x91 -> <control>
u'\x92' # 0x92 -> <control>
u'\x93' # 0x93 -> <control>
u'\x94' # 0x94 -> <control>
u'\x95' # 0x95 -> <control>
u'\x96' # 0x96 -> <control>
u'\x97' # 0x97 -> <control>
u'\x98' # 0x98 -> <control>
u'\x99' # 0x99 -> <control>
u'\x9a' # 0x9A -> <control>
u'\x9b' # 0x9B -> <control>
u'\x9c' # 0x9C -> <control>
u'\x9d' # 0x9D -> <control>
u'\x9e' # 0x9E -> <control>
u'\x9f' # 0x9F -> <control>
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\u0e01' # 0xA1 -> THAI CHARACTER KO KAI
u'\u0e02' # 0xA2 -> THAI CHARACTER KHO KHAI
u'\u0e03' # 0xA3 -> THAI CHARACTER KHO KHUAT
u'\u0e04' # 0xA4 -> THAI CHARACTER KHO KHWAI
u'\u0e05' # 0xA5 -> THAI CHARACTER KHO KHON
u'\u0e06' # 0xA6 -> THAI CHARACTER KHO RAKHANG
u'\u0e07' # 0xA7 -> THAI CHARACTER NGO NGU
u'\u0e08' # 0xA8 -> THAI CHARACTER CHO CHAN
u'\u0e09' # 0xA9 -> THAI CHARACTER CHO CHING
u'\u0e0a' # 0xAA -> THAI CHARACTER CHO CHANG
u'\u0e0b' # 0xAB -> THAI CHARACTER SO SO
u'\u0e0c' # 0xAC -> THAI CHARACTER CHO CHOE
u'\u0e0d' # 0xAD -> THAI CHARACTER YO YING
u'\u0e0e' # 0xAE -> THAI CHARACTER DO CHADA
u'\u0e0f' # 0xAF -> THAI CHARACTER TO PATAK
u'\u0e10' # 0xB0 -> THAI CHARACTER THO THAN
u'\u0e11' # 0xB1 -> THAI CHARACTER THO NANGMONTHO
u'\u0e12' # 0xB2 -> THAI CHARACTER THO PHUTHAO
u'\u0e13' # 0xB3 -> THAI CHARACTER NO NEN
u'\u0e14' # 0xB4 -> THAI CHARACTER DO DEK
u'\u0e15' # 0xB5 -> THAI CHARACTER TO TAO
u'\u0e16' # 0xB6 -> THAI CHARACTER THO THUNG
u'\u0e17' # 0xB7 -> THAI CHARACTER THO THAHAN
u'\u0e18' # 0xB8 -> THAI CHARACTER THO THONG
u'\u0e19' # 0xB9 -> THAI CHARACTER NO NU
u'\u0e1a' # 0xBA -> THAI CHARACTER BO BAIMAI
u'\u0e1b' # 0xBB -> THAI CHARACTER PO PLA
u'\u0e1c' # 0xBC -> THAI CHARACTER PHO PHUNG
u'\u0e1d' # 0xBD -> THAI CHARACTER FO FA
u'\u0e1e' # 0xBE -> THAI CHARACTER PHO PHAN
u'\u0e1f' # 0xBF -> THAI CHARACTER FO FAN
u'\u0e20' # 0xC0 -> THAI CHARACTER PHO SAMPHAO
u'\u0e21' # 0xC1 -> THAI CHARACTER MO MA
u'\u0e22' # 0xC2 -> THAI CHARACTER YO YAK
u'\u0e23' # 0xC3 -> THAI CHARACTER RO RUA
u'\u0e24' # 0xC4 -> THAI CHARACTER RU
u'\u0e25' # 0xC5 -> THAI CHARACTER LO LING
u'\u0e26' # 0xC6 -> THAI CHARACTER LU
u'\u0e27' # 0xC7 -> THAI CHARACTER WO WAEN
u'\u0e28' # 0xC8 -> THAI CHARACTER SO SALA
u'\u0e29' # 0xC9 -> THAI CHARACTER SO RUSI
u'\u0e2a' # 0xCA -> THAI CHARACTER SO SUA
u'\u0e2b' # 0xCB -> THAI CHARACTER HO HIP
u'\u0e2c' # 0xCC -> THAI CHARACTER LO CHULA
u'\u0e2d' # 0xCD -> THAI CHARACTER O ANG
u'\u0e2e' # 0xCE -> THAI CHARACTER HO NOKHUK
u'\u0e2f' # 0xCF -> THAI CHARACTER PAIYANNOI
u'\u0e30' # 0xD0 -> THAI CHARACTER SARA A
u'\u0e31' # 0xD1 -> THAI CHARACTER MAI HAN-AKAT
u'\u0e32' # 0xD2 -> THAI CHARACTER SARA AA
u'\u0e33' # 0xD3 -> THAI CHARACTER SARA AM
u'\u0e34' # 0xD4 -> THAI CHARACTER SARA I
u'\u0e35' # 0xD5 -> THAI CHARACTER SARA II
u'\u0e36' # 0xD6 -> THAI CHARACTER SARA UE
u'\u0e37' # 0xD7 -> THAI CHARACTER SARA UEE
u'\u0e38' # 0xD8 -> THAI CHARACTER SARA U
u'\u0e39' # 0xD9 -> THAI CHARACTER SARA UU
u'\u0e3a' # 0xDA -> THAI CHARACTER PHINTHU
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\u0e3f' # 0xDF -> THAI CURRENCY SYMBOL BAHT
u'\u0e40' # 0xE0 -> THAI CHARACTER SARA E
u'\u0e41' # 0xE1 -> THAI CHARACTER SARA AE
u'\u0e42' # 0xE2 -> THAI CHARACTER SARA O
u'\u0e43' # 0xE3 -> THAI CHARACTER SARA AI MAIMUAN
u'\u0e44' # 0xE4 -> THAI CHARACTER SARA AI MAIMALAI
u'\u0e45' # 0xE5 -> THAI CHARACTER LAKKHANGYAO
u'\u0e46' # 0xE6 -> THAI CHARACTER MAIYAMOK
u'\u0e47' # 0xE7 -> THAI CHARACTER MAITAIKHU
u'\u0e48' # 0xE8 -> THAI CHARACTER MAI EK
u'\u0e49' # 0xE9 -> THAI CHARACTER MAI THO
u'\u0e4a' # 0xEA -> THAI CHARACTER MAI TRI
u'\u0e4b' # 0xEB -> THAI CHARACTER MAI CHATTAWA
u'\u0e4c' # 0xEC -> THAI CHARACTER THANTHAKHAT
u'\u0e4d' # 0xED -> THAI CHARACTER NIKHAHIT
u'\u0e4e' # 0xEE -> THAI CHARACTER YAMAKKAN
u'\u0e4f' # 0xEF -> THAI CHARACTER FONGMAN
u'\u0e50' # 0xF0 -> THAI DIGIT ZERO
u'\u0e51' # 0xF1 -> THAI DIGIT ONE
u'\u0e52' # 0xF2 -> THAI DIGIT TWO
u'\u0e53' # 0xF3 -> THAI DIGIT THREE
u'\u0e54' # 0xF4 -> THAI DIGIT FOUR
u'\u0e55' # 0xF5 -> THAI DIGIT FIVE
u'\u0e56' # 0xF6 -> THAI DIGIT SIX
u'\u0e57' # 0xF7 -> THAI DIGIT SEVEN
u'\u0e58' # 0xF8 -> THAI DIGIT EIGHT
u'\u0e59' # 0xF9 -> THAI DIGIT NINE
u'\u0e5a' # 0xFA -> THAI CHARACTER ANGKHANKHU
u'\u0e5b' # 0xFB -> THAI CHARACTER KHOMUT
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| mit |
wimnat/ansible-modules-core | network/nxos/nxos_evpn_global.py | 21 | 9097 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: nxos_evpn_global
version_added: "2.2"
short_description: Handles the EVPN control plane for VXLAN.
description:
- Handles the EVPN control plane for VXLAN.
author: Gabriele Gerbino (@GGabriele)
extends_documentation_fragment: nxos
options:
nv_overlay_evpn:
description:
- EVPN control plane.
required: true
choices: ['true', 'false']
'''
EXAMPLES = '''
- nxos_evpn_global:
nv_overlay_evpn: true
username: "{{ un }}"
password: "{{ pwd }}"
host: "{{ inventory_hostname }}"
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: verbose mode
type: dict
sample: {"nv_overlay_evpn": true}
existing:
description: k/v pairs of existing configuration
returned: verbose mode
type: dict
sample: {"nv_overlay_evpn": false}
end_state:
description: k/v pairs of configuration after module execution
returned: verbose mode
type: dict
sample: {"nv_overlay_evpn": true}
updates:
description: commands sent to the device
returned: always
type: list
sample: ["nv overlay evpn"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
# COMMON CODE FOR MIGRATION
import re
import ansible.module_utils.nxos
from ansible.module_utils.basic import get_exception
from ansible.module_utils.netcfg import NetworkConfig, ConfigLine
from ansible.module_utils.network import NetworkModule
from ansible.module_utils.shell import ShellError
def to_list(val):
if isinstance(val, (list, tuple)):
return list(val)
elif val is not None:
return [val]
else:
return list()
class CustomNetworkConfig(NetworkConfig):
def expand_section(self, configobj, S=None):
if S is None:
S = list()
S.append(configobj)
for child in configobj.children:
if child in S:
continue
self.expand_section(child, S)
return S
def get_object(self, path):
for item in self.items:
if item.text == path[-1]:
parents = [p.text for p in item.parents]
if parents == path[:-1]:
return item
def to_block(self, section):
return '\n'.join([item.raw for item in section])
def get_section(self, path):
try:
section = self.get_section_objects(path)
return self.to_block(section)
except ValueError:
return list()
def get_section_objects(self, path):
if not isinstance(path, list):
path = [path]
obj = self.get_object(path)
if not obj:
raise ValueError('path does not exist in config')
return self.expand_section(obj)
def add(self, lines, parents=None):
"""Adds one or lines of configuration
"""
ancestors = list()
offset = 0
obj = None
## global config command
if not parents:
for line in to_list(lines):
item = ConfigLine(line)
item.raw = line
if item not in self.items:
self.items.append(item)
else:
for index, p in enumerate(parents):
try:
i = index + 1
obj = self.get_section_objects(parents[:i])[0]
ancestors.append(obj)
except ValueError:
# add parent to config
offset = index * self.indent
obj = ConfigLine(p)
obj.raw = p.rjust(len(p) + offset)
if ancestors:
obj.parents = list(ancestors)
ancestors[-1].children.append(obj)
self.items.append(obj)
ancestors.append(obj)
# add child objects
for line in to_list(lines):
# check if child already exists
for child in ancestors[-1].children:
if child.text == line:
break
else:
offset = len(parents) * self.indent
item = ConfigLine(line)
item.raw = line.rjust(len(line) + offset)
item.parents = ancestors
ancestors[-1].children.append(item)
self.items.append(item)
def get_network_module(**kwargs):
try:
return get_module(**kwargs)
except NameError:
return NetworkModule(**kwargs)
def get_config(module, include_defaults=False):
config = module.params['config']
if not config:
try:
config = module.get_config()
except AttributeError:
defaults = module.params['include_defaults']
config = module.config.get_config(include_defaults=defaults)
return CustomNetworkConfig(indent=2, contents=config)
def load_config(module, candidate):
config = get_config(module)
commands = candidate.difference(config)
commands = [str(c).strip() for c in commands]
save_config = module.params['save']
result = dict(changed=False)
if commands:
if not module.check_mode:
try:
module.configure(commands)
except AttributeError:
module.config(commands)
if save_config:
try:
module.config.save_config()
except AttributeError:
module.execute(['copy running-config startup-config'])
result['changed'] = True
result['updates'] = commands
return result
# END OF COMMON CODE
PARAM_TO_COMMAND_KEYMAP = {
'nv_overlay_evpn': 'nv overlay evpn',
}
def invoke(name, *args, **kwargs):
func = globals().get(name)
if func:
return func(*args, **kwargs)
def get_value(arg, config, module):
REGEX = re.compile(r'(?:{0}\s)(?P<value>.*)$'.format(PARAM_TO_COMMAND_KEYMAP[arg]), re.M)
value = False
if REGEX.search(config):
value = True
return value
def get_existing(module):
existing = {}
config = str(get_config(module))
existing['nv_overlay_evpn'] = get_value('nv_overlay_evpn', config, module)
return existing
def apply_key_map(key_map, table):
new_dict = {}
for key, value in table.items():
new_key = key_map.get(key)
if new_key:
value = table.get(key)
if value:
new_dict[new_key] = value
else:
new_dict[new_key] = value
return new_dict
def get_commands(module, existing, proposed, candidate):
commands = list()
proposed_commands = apply_key_map(PARAM_TO_COMMAND_KEYMAP, proposed)
existing_commands = apply_key_map(PARAM_TO_COMMAND_KEYMAP, existing)
for key, value in proposed_commands.iteritems():
if value is True:
commands.append(key)
elif value is False:
commands.append('no {0}'.format(key))
if commands:
candidate.add(commands, parents=[])
def main():
argument_spec = dict(
nv_overlay_evpn=dict(required=True, type='bool'),
include_defaults=dict(default=True),
config=dict(),
save=dict(type='bool', default=False)
)
module = get_network_module(argument_spec=argument_spec,
supports_check_mode=True)
existing = invoke('get_existing', module)
end_state = existing
proposed = dict(nv_overlay_evpn=module.params['nv_overlay_evpn'])
result = {}
candidate = CustomNetworkConfig(indent=3)
invoke('get_commands', module, existing, proposed, candidate)
if proposed != existing:
try:
response = load_config(module, candidate)
result.update(response)
except ShellError:
exc = get_exception()
module.fail_json(msg=str(exc))
else:
result['updates'] = []
result['connected'] = module.connected
if module._verbosity > 0:
end_state = invoke('get_existing', module)
result['end_state'] = end_state
result['existing'] = existing
result['proposed'] = proposed
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
wilsonrivera/scalider-v2 | tools/UnidecodeDataCompiler/data/x074.py | 252 | 4696 | data = (
'Han ', # 0x00
'Xuan ', # 0x01
'Yan ', # 0x02
'Qiu ', # 0x03
'Quan ', # 0x04
'Lang ', # 0x05
'Li ', # 0x06
'Xiu ', # 0x07
'Fu ', # 0x08
'Liu ', # 0x09
'Ye ', # 0x0a
'Xi ', # 0x0b
'Ling ', # 0x0c
'Li ', # 0x0d
'Jin ', # 0x0e
'Lian ', # 0x0f
'Suo ', # 0x10
'Chiisai ', # 0x11
'[?] ', # 0x12
'Wan ', # 0x13
'Dian ', # 0x14
'Pin ', # 0x15
'Zhan ', # 0x16
'Cui ', # 0x17
'Min ', # 0x18
'Yu ', # 0x19
'Ju ', # 0x1a
'Chen ', # 0x1b
'Lai ', # 0x1c
'Wen ', # 0x1d
'Sheng ', # 0x1e
'Wei ', # 0x1f
'Dian ', # 0x20
'Chu ', # 0x21
'Zhuo ', # 0x22
'Pei ', # 0x23
'Cheng ', # 0x24
'Hu ', # 0x25
'Qi ', # 0x26
'E ', # 0x27
'Kun ', # 0x28
'Chang ', # 0x29
'Qi ', # 0x2a
'Beng ', # 0x2b
'Wan ', # 0x2c
'Lu ', # 0x2d
'Cong ', # 0x2e
'Guan ', # 0x2f
'Yan ', # 0x30
'Diao ', # 0x31
'Bei ', # 0x32
'Lin ', # 0x33
'Qin ', # 0x34
'Pi ', # 0x35
'Pa ', # 0x36
'Que ', # 0x37
'Zhuo ', # 0x38
'Qin ', # 0x39
'Fa ', # 0x3a
'[?] ', # 0x3b
'Qiong ', # 0x3c
'Du ', # 0x3d
'Jie ', # 0x3e
'Hun ', # 0x3f
'Yu ', # 0x40
'Mao ', # 0x41
'Mei ', # 0x42
'Chun ', # 0x43
'Xuan ', # 0x44
'Ti ', # 0x45
'Xing ', # 0x46
'Dai ', # 0x47
'Rou ', # 0x48
'Min ', # 0x49
'Zhen ', # 0x4a
'Wei ', # 0x4b
'Ruan ', # 0x4c
'Huan ', # 0x4d
'Jie ', # 0x4e
'Chuan ', # 0x4f
'Jian ', # 0x50
'Zhuan ', # 0x51
'Yang ', # 0x52
'Lian ', # 0x53
'Quan ', # 0x54
'Xia ', # 0x55
'Duan ', # 0x56
'Yuan ', # 0x57
'Ye ', # 0x58
'Nao ', # 0x59
'Hu ', # 0x5a
'Ying ', # 0x5b
'Yu ', # 0x5c
'Huang ', # 0x5d
'Rui ', # 0x5e
'Se ', # 0x5f
'Liu ', # 0x60
'Shi ', # 0x61
'Rong ', # 0x62
'Suo ', # 0x63
'Yao ', # 0x64
'Wen ', # 0x65
'Wu ', # 0x66
'Jin ', # 0x67
'Jin ', # 0x68
'Ying ', # 0x69
'Ma ', # 0x6a
'Tao ', # 0x6b
'Liu ', # 0x6c
'Tang ', # 0x6d
'Li ', # 0x6e
'Lang ', # 0x6f
'Gui ', # 0x70
'Zhen ', # 0x71
'Qiang ', # 0x72
'Cuo ', # 0x73
'Jue ', # 0x74
'Zhao ', # 0x75
'Yao ', # 0x76
'Ai ', # 0x77
'Bin ', # 0x78
'Tu ', # 0x79
'Chang ', # 0x7a
'Kun ', # 0x7b
'Zhuan ', # 0x7c
'Cong ', # 0x7d
'Jin ', # 0x7e
'Yi ', # 0x7f
'Cui ', # 0x80
'Cong ', # 0x81
'Qi ', # 0x82
'Li ', # 0x83
'Ying ', # 0x84
'Suo ', # 0x85
'Qiu ', # 0x86
'Xuan ', # 0x87
'Ao ', # 0x88
'Lian ', # 0x89
'Man ', # 0x8a
'Zhang ', # 0x8b
'Yin ', # 0x8c
'[?] ', # 0x8d
'Ying ', # 0x8e
'Zhi ', # 0x8f
'Lu ', # 0x90
'Wu ', # 0x91
'Deng ', # 0x92
'Xiou ', # 0x93
'Zeng ', # 0x94
'Xun ', # 0x95
'Qu ', # 0x96
'Dang ', # 0x97
'Lin ', # 0x98
'Liao ', # 0x99
'Qiong ', # 0x9a
'Su ', # 0x9b
'Huang ', # 0x9c
'Gui ', # 0x9d
'Pu ', # 0x9e
'Jing ', # 0x9f
'Fan ', # 0xa0
'Jin ', # 0xa1
'Liu ', # 0xa2
'Ji ', # 0xa3
'[?] ', # 0xa4
'Jing ', # 0xa5
'Ai ', # 0xa6
'Bi ', # 0xa7
'Can ', # 0xa8
'Qu ', # 0xa9
'Zao ', # 0xaa
'Dang ', # 0xab
'Jiao ', # 0xac
'Gun ', # 0xad
'Tan ', # 0xae
'Hui ', # 0xaf
'Huan ', # 0xb0
'Se ', # 0xb1
'Sui ', # 0xb2
'Tian ', # 0xb3
'[?] ', # 0xb4
'Yu ', # 0xb5
'Jin ', # 0xb6
'Lu ', # 0xb7
'Bin ', # 0xb8
'Shou ', # 0xb9
'Wen ', # 0xba
'Zui ', # 0xbb
'Lan ', # 0xbc
'Xi ', # 0xbd
'Ji ', # 0xbe
'Xuan ', # 0xbf
'Ruan ', # 0xc0
'Huo ', # 0xc1
'Gai ', # 0xc2
'Lei ', # 0xc3
'Du ', # 0xc4
'Li ', # 0xc5
'Zhi ', # 0xc6
'Rou ', # 0xc7
'Li ', # 0xc8
'Zan ', # 0xc9
'Qiong ', # 0xca
'Zhe ', # 0xcb
'Gui ', # 0xcc
'Sui ', # 0xcd
'La ', # 0xce
'Long ', # 0xcf
'Lu ', # 0xd0
'Li ', # 0xd1
'Zan ', # 0xd2
'Lan ', # 0xd3
'Ying ', # 0xd4
'Mi ', # 0xd5
'Xiang ', # 0xd6
'Xi ', # 0xd7
'Guan ', # 0xd8
'Dao ', # 0xd9
'Zan ', # 0xda
'Huan ', # 0xdb
'Gua ', # 0xdc
'Bo ', # 0xdd
'Die ', # 0xde
'Bao ', # 0xdf
'Hu ', # 0xe0
'Zhi ', # 0xe1
'Piao ', # 0xe2
'Ban ', # 0xe3
'Rang ', # 0xe4
'Li ', # 0xe5
'Wa ', # 0xe6
'Dekaguramu ', # 0xe7
'Jiang ', # 0xe8
'Qian ', # 0xe9
'Fan ', # 0xea
'Pen ', # 0xeb
'Fang ', # 0xec
'Dan ', # 0xed
'Weng ', # 0xee
'Ou ', # 0xef
'Deshiguramu ', # 0xf0
'Miriguramu ', # 0xf1
'Thon ', # 0xf2
'Hu ', # 0xf3
'Ling ', # 0xf4
'Yi ', # 0xf5
'Ping ', # 0xf6
'Ci ', # 0xf7
'Hekutogura ', # 0xf8
'Juan ', # 0xf9
'Chang ', # 0xfa
'Chi ', # 0xfb
'Sarake ', # 0xfc
'Dang ', # 0xfd
'Meng ', # 0xfe
'Pou ', # 0xff
)
| apache-2.0 |
BeATz-UnKNoWN/python-for-android | python-build/python-libs/gdata/samples/base/dryRunInsert.py | 94 | 2170 | #!/usr/bin/python
#
# Copyright (C) 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import gdata.base.service
import gdata.service
try:
from xml.etree import ElementTree
except ImportError:
from elementtree import ElementTree
import atom
import gdata.base
import getpass
# Demonstrates item insertion with a dry run insert operation. The item will
# NOT be added to Google Base.
gb_client = gdata.base.service.GBaseService()
gb_client.email = raw_input('Please enter your username: ')
gb_client.password = getpass.getpass()
print 'Logging in'
gb_client.ProgrammaticLogin()
# Create a test item which will be used in a dry run insert
item = gdata.base.GBaseItem()
item.author.append(atom.Author(name=atom.Name(text='Mr. Smith')))
item.title = atom.Title(text='He Jingxian\'s chicken')
item.link.append(atom.Link(rel='alternate', link_type='text/html',
href='http://www.host.com/123456jsh9'))
item.label.append(gdata.base.Label(text='kung pao chicken'))
item.label.append(gdata.base.Label(text='chinese cuisine'))
item.label.append(gdata.base.Label(text='testrecipes'))
item.item_type = gdata.base.ItemType(text='recipes')
item.AddItemAttribute(name='cooking_time', value_type='intUnit', value='30 minutes')
item.AddItemAttribute(name='main_ingredient', value='chicken')
item.AddItemAttribute(name='main_ingredient', value='chili')
# Make an insert request with the dry run flag set so that the item will not
# actually be created.
result = gb_client.InsertItem(item, url_params={'dry-run': 'true'})
# Send the XML from the server to standard out.
print 'Here\'s the XML from the server\'s simulated insert'
print str(result)
print 'Done'
| apache-2.0 |
tilacog/rows | to-do/plugin_mysql.py | 6 | 6134 | # coding: utf-8
# Copyright 2014 Álvaro Justen <https://github.com/turicas/rows/>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import datetime
import MySQLdb
from .rows import Table
from .utils import ipartition, slug
__all__ = [ 'import_from_mysql', 'export_to_mysql']
# TODO: replace 'None' with '' on export_to_*
# TODO: need converters in and out
# TODO: lazy=True|False
# TODO: datetime.time on MYSQL_TYPE
# TODO: import from mysql
# TODO: logging?
# TODO: _mysql_exceptions.OperationalError: (2006, 'MySQL server has gone #
# away')
MYSQL_TYPE = {str: 'TEXT', int: 'INT', float: 'FLOAT', datetime.date: 'DATE',
datetime.datetime: 'DATETIME', bool: 'BOOL'}
# 'BOOL' on MySQL is a shortcut to TINYINT(1)
MYSQLDB_TYPE = {getattr(MySQLdb.FIELD_TYPE, x): x \
for x in dir(MySQLdb.FIELD_TYPE) if not x.startswith('_')}
MYSQLDB_TO_PYTHON = {'ENUM': str,
'STRING': str,
'VAR_STRING': str,
'BLOB': bytes,
'LONG_BLOB': bytes,
'MEDIUM_BLOB': bytes,
'TINY_BLOB': bytes,
'DECIMAL': float,
'DOUBLE': float,
'FLOAT': float,
'INT24': int,
'LONG': int,
'LONGLONG': int,
'TINY': int,
'YEAR': int,
'DATE': datetime.date,
'NEWDATE': datetime.date,
'TIME': int,
'TIMESTAMP': int,
'DATETIME': datetime.datetime}
def _get_mysql_config(connection_str):
colon_index = connection_str.index(':')
at_index = connection_str.index('@')
slash_index = connection_str.index('/')
config = {}
config['user'] = connection_str[:colon_index]
config['passwd'] = connection_str[colon_index + 1:at_index]
config['host'] = connection_str[at_index + 1:slash_index]
config['port'] = 3306
if ':' in config['host']:
data = config['host'].split(':')
config['host'] = data[0]
config['port'] = int(data[1])
if connection_str.count('/') == 1:
table_name = None
config['db'] = connection_str[slash_index + 1:]
else:
second_slash_index = connection_str.index('/', slash_index + 1)
config['db'] = connection_str[slash_index + 1:second_slash_index]
table_name = connection_str[second_slash_index + 1:]
return config, table_name
def _connect_to_mysql(config):
return MySQLdb.connect(**config)
def import_from_mysql(connection_string, limit=None, order_by=None, query=''):
#TODO: add 'lazy' option
config, table_name = _get_mysql_config(connection_string)
connection = _connect_to_mysql(config)
cursor = connection.cursor()
if query:
sql = query
else:
sql = 'SELECT * FROM ' + table_name
if limit is not None:
sql += ' LIMIT {0[0]}, {0[1]}'.format(limit)
if order_by is not None:
sql += ' ORDER BY ' + order_by
cursor.execute(sql)
column_info = [(x[0], x[1]) for x in cursor.description]
table = Table(fields=[x[0] for x in cursor.description])
table.types = {name: MYSQLDB_TO_PYTHON[MYSQLDB_TYPE[type_]] \
for name, type_ in column_info}
table_rows = [list(row) for row in cursor.fetchall()]
encoding = connection.character_set_name()
for row in table_rows:
for column_index, value in enumerate(row):
if type(value) is str:
row[column_index] = value.decode(encoding)
table._rows = table_rows
cursor.close()
connection.close()
return table
def export_to_mysql(table, connection_string, encoding=None, batch_size=1000,
commit_every=10000, callback=None, callback_every=10000):
config, table_name = _get_mysql_config(connection_string)
connection = _connect_to_mysql(config)
cursor = connection.cursor()
# Create table
fields, types = table.fields, table.types
field_slugs = [slug(field) for field in fields]
field_types = [MYSQL_TYPE[types[field]] for field in fields]
columns_definition = ['{} {}'.format(field, type_)
for field, type_ in zip(field_slugs, field_types)]
sql = 'CREATE TABLE IF NOT EXISTS {} ({})'\
.format(table_name, ', '.join(columns_definition))
cursor.execute(sql)
# Insert items
columns = ', '.join(field_slugs)
#placeholders = ['%s' if types[field] in (int, float, bool) else '"%s"'
# for field in fields]
# TODO: fix this string/formatting problem
placeholders = ['%s' for field in fields]
sql = 'INSERT INTO {} ({}) VALUES ({})'.format(table_name, columns,
', '.join(placeholders))
total = last_commit = last_callback = 0
for rows in ipartition(iter(table), batch_size):
values = [[row[field] for field in fields] for row in rows]
added = len(values)
total += added
last_commit += added
last_callback += added
cursor.executemany(sql, values)
if last_commit >= commit_every:
connection.commit()
last_commit = 0
if callback is not None and last_callback >= callback_every:
callback(total)
last_callback = 0
if callback is not None and last_callback > 0:
callback(total)
if last_commit > 0:
connection.commit()
connection.close()
| gpl-3.0 |
jideobs/twilioAngular | venv/lib/python2.7/site-packages/setuptools/msvc9_support.py | 429 | 2187 | try:
import distutils.msvc9compiler
except ImportError:
pass
unpatched = dict()
def patch_for_specialized_compiler():
"""
Patch functions in distutils.msvc9compiler to use the standalone compiler
build for Python (Windows only). Fall back to original behavior when the
standalone compiler is not available.
"""
if 'distutils' not in globals():
# The module isn't available to be patched
return
if unpatched:
# Already patched
return
unpatched.update(vars(distutils.msvc9compiler))
distutils.msvc9compiler.find_vcvarsall = find_vcvarsall
distutils.msvc9compiler.query_vcvarsall = query_vcvarsall
def find_vcvarsall(version):
Reg = distutils.msvc9compiler.Reg
VC_BASE = r'Software\%sMicrosoft\DevDiv\VCForPython\%0.1f'
key = VC_BASE % ('', version)
try:
# Per-user installs register the compiler path here
productdir = Reg.get_value(key, "installdir")
except KeyError:
try:
# All-user installs on a 64-bit system register here
key = VC_BASE % ('Wow6432Node\\', version)
productdir = Reg.get_value(key, "installdir")
except KeyError:
productdir = None
if productdir:
import os
vcvarsall = os.path.join(productdir, "vcvarsall.bat")
if os.path.isfile(vcvarsall):
return vcvarsall
return unpatched['find_vcvarsall'](version)
def query_vcvarsall(version, *args, **kwargs):
try:
return unpatched['query_vcvarsall'](version, *args, **kwargs)
except distutils.errors.DistutilsPlatformError as exc:
if exc and "vcvarsall.bat" in exc.args[0]:
message = 'Microsoft Visual C++ %0.1f is required (%s).' % (version, exc.args[0])
if int(version) == 9:
# This redirection link is maintained by Microsoft.
# Contact vspython@microsoft.com if it needs updating.
raise distutils.errors.DistutilsPlatformError(
message + ' Get it from http://aka.ms/vcpython27'
)
raise distutils.errors.DistutilsPlatformError(message)
raise
| mit |
CSC-ORG/Dynamic-Dashboard-2015 | engine/lib/python2.7/site-packages/pip/_vendor/requests/cookies.py | 821 | 16686 | # -*- coding: utf-8 -*-
"""
Compatibility code to be able to use `cookielib.CookieJar` with requests.
requests.utils imports from here, so be careful with imports.
"""
import time
import collections
from .compat import cookielib, urlparse, urlunparse, Morsel
try:
import threading
# grr, pyflakes: this fixes "redefinition of unused 'threading'"
threading
except ImportError:
import dummy_threading as threading
class MockRequest(object):
"""Wraps a `requests.Request` to mimic a `urllib2.Request`.
The code in `cookielib.CookieJar` expects this interface in order to correctly
manage cookie policies, i.e., determine whether a cookie can be set, given the
domains of the request and the cookie.
The original request object is read-only. The client is responsible for collecting
the new headers via `get_new_headers()` and interpreting them appropriately. You
probably want `get_cookie_header`, defined below.
"""
def __init__(self, request):
self._r = request
self._new_headers = {}
self.type = urlparse(self._r.url).scheme
def get_type(self):
return self.type
def get_host(self):
return urlparse(self._r.url).netloc
def get_origin_req_host(self):
return self.get_host()
def get_full_url(self):
# Only return the response's URL if the user hadn't set the Host
# header
if not self._r.headers.get('Host'):
return self._r.url
# If they did set it, retrieve it and reconstruct the expected domain
host = self._r.headers['Host']
parsed = urlparse(self._r.url)
# Reconstruct the URL as we expect it
return urlunparse([
parsed.scheme, host, parsed.path, parsed.params, parsed.query,
parsed.fragment
])
def is_unverifiable(self):
return True
def has_header(self, name):
return name in self._r.headers or name in self._new_headers
def get_header(self, name, default=None):
return self._r.headers.get(name, self._new_headers.get(name, default))
def add_header(self, key, val):
"""cookielib has no legitimate use for this method; add it back if you find one."""
raise NotImplementedError("Cookie headers should be added with add_unredirected_header()")
def add_unredirected_header(self, name, value):
self._new_headers[name] = value
def get_new_headers(self):
return self._new_headers
@property
def unverifiable(self):
return self.is_unverifiable()
@property
def origin_req_host(self):
return self.get_origin_req_host()
@property
def host(self):
return self.get_host()
class MockResponse(object):
"""Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
...what? Basically, expose the parsed HTTP headers from the server response
the way `cookielib` expects to see them.
"""
def __init__(self, headers):
"""Make a MockResponse for `cookielib` to read.
:param headers: a httplib.HTTPMessage or analogous carrying the headers
"""
self._headers = headers
def info(self):
return self._headers
def getheaders(self, name):
self._headers.getheaders(name)
def extract_cookies_to_jar(jar, request, response):
"""Extract the cookies from the response into a CookieJar.
:param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar)
:param request: our own requests.Request object
:param response: urllib3.HTTPResponse object
"""
if not (hasattr(response, '_original_response') and
response._original_response):
return
# the _original_response field is the wrapped httplib.HTTPResponse object,
req = MockRequest(request)
# pull out the HTTPMessage with the headers and put it in the mock:
res = MockResponse(response._original_response.msg)
jar.extract_cookies(res, req)
def get_cookie_header(jar, request):
"""Produce an appropriate Cookie header string to be sent with `request`, or None."""
r = MockRequest(request)
jar.add_cookie_header(r)
return r.get_new_headers().get('Cookie')
def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
"""Unsets a cookie by name, by default over all domains and paths.
Wraps CookieJar.clear(), is O(n).
"""
clearables = []
for cookie in cookiejar:
if cookie.name == name:
if domain is None or domain == cookie.domain:
if path is None or path == cookie.path:
clearables.append((cookie.domain, cookie.path, cookie.name))
for domain, path, name in clearables:
cookiejar.clear(domain, path, name)
class CookieConflictError(RuntimeError):
"""There are two cookies that meet the criteria specified in the cookie jar.
Use .get and .set and include domain and path args in order to be more specific."""
class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
"""Compatibility class; is a cookielib.CookieJar, but exposes a dict interface.
This is the CookieJar we create by default for requests and sessions that
don't specify one, since some clients may expect response.cookies and
session.cookies to support dict operations.
Don't use the dict interface internally; it's just for compatibility with
with external client code. All `requests` code should work out of the box
with externally provided instances of CookieJar, e.g., LWPCookieJar and
FileCookieJar.
Caution: dictionary operations that are normally O(1) may be O(n).
Unlike a regular CookieJar, this class is pickleable.
"""
def get(self, name, default=None, domain=None, path=None):
"""Dict-like get() that also supports optional domain and path args in
order to resolve naming collisions from using one cookie jar over
multiple domains. Caution: operation is O(n), not O(1)."""
try:
return self._find_no_duplicates(name, domain, path)
except KeyError:
return default
def set(self, name, value, **kwargs):
"""Dict-like set() that also supports optional domain and path args in
order to resolve naming collisions from using one cookie jar over
multiple domains."""
# support client code that unsets cookies by assignment of a None value:
if value is None:
remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path'))
return
if isinstance(value, Morsel):
c = morsel_to_cookie(value)
else:
c = create_cookie(name, value, **kwargs)
self.set_cookie(c)
return c
def iterkeys(self):
"""Dict-like iterkeys() that returns an iterator of names of cookies from the jar.
See itervalues() and iteritems()."""
for cookie in iter(self):
yield cookie.name
def keys(self):
"""Dict-like keys() that returns a list of names of cookies from the jar.
See values() and items()."""
return list(self.iterkeys())
def itervalues(self):
"""Dict-like itervalues() that returns an iterator of values of cookies from the jar.
See iterkeys() and iteritems()."""
for cookie in iter(self):
yield cookie.value
def values(self):
"""Dict-like values() that returns a list of values of cookies from the jar.
See keys() and items()."""
return list(self.itervalues())
def iteritems(self):
"""Dict-like iteritems() that returns an iterator of name-value tuples from the jar.
See iterkeys() and itervalues()."""
for cookie in iter(self):
yield cookie.name, cookie.value
def items(self):
"""Dict-like items() that returns a list of name-value tuples from the jar.
See keys() and values(). Allows client-code to call "dict(RequestsCookieJar)
and get a vanilla python dict of key value pairs."""
return list(self.iteritems())
def list_domains(self):
"""Utility method to list all the domains in the jar."""
domains = []
for cookie in iter(self):
if cookie.domain not in domains:
domains.append(cookie.domain)
return domains
def list_paths(self):
"""Utility method to list all the paths in the jar."""
paths = []
for cookie in iter(self):
if cookie.path not in paths:
paths.append(cookie.path)
return paths
def multiple_domains(self):
"""Returns True if there are multiple domains in the jar.
Returns False otherwise."""
domains = []
for cookie in iter(self):
if cookie.domain is not None and cookie.domain in domains:
return True
domains.append(cookie.domain)
return False # there is only one domain in jar
def get_dict(self, domain=None, path=None):
"""Takes as an argument an optional domain and path and returns a plain old
Python dict of name-value pairs of cookies that meet the requirements."""
dictionary = {}
for cookie in iter(self):
if (domain is None or cookie.domain == domain) and (path is None
or cookie.path == path):
dictionary[cookie.name] = cookie.value
return dictionary
def __getitem__(self, name):
"""Dict-like __getitem__() for compatibility with client code. Throws exception
if there are more than one cookie with name. In that case, use the more
explicit get() method instead. Caution: operation is O(n), not O(1)."""
return self._find_no_duplicates(name)
def __setitem__(self, name, value):
"""Dict-like __setitem__ for compatibility with client code. Throws exception
if there is already a cookie of that name in the jar. In that case, use the more
explicit set() method instead."""
self.set(name, value)
def __delitem__(self, name):
"""Deletes a cookie given a name. Wraps cookielib.CookieJar's remove_cookie_by_name()."""
remove_cookie_by_name(self, name)
def set_cookie(self, cookie, *args, **kwargs):
if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'):
cookie.value = cookie.value.replace('\\"', '')
return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs)
def update(self, other):
"""Updates this jar with cookies from another CookieJar or dict-like"""
if isinstance(other, cookielib.CookieJar):
for cookie in other:
self.set_cookie(cookie)
else:
super(RequestsCookieJar, self).update(other)
def _find(self, name, domain=None, path=None):
"""Requests uses this method internally to get cookie values. Takes as args name
and optional domain and path. Returns a cookie.value. If there are conflicting cookies,
_find arbitrarily chooses one. See _find_no_duplicates if you want an exception thrown
if there are conflicting cookies."""
for cookie in iter(self):
if cookie.name == name:
if domain is None or cookie.domain == domain:
if path is None or cookie.path == path:
return cookie.value
raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
def _find_no_duplicates(self, name, domain=None, path=None):
"""__get_item__ and get call _find_no_duplicates -- never used in Requests internally.
Takes as args name and optional domain and path. Returns a cookie.value.
Throws KeyError if cookie is not found and CookieConflictError if there are
multiple cookies that match name and optionally domain and path."""
toReturn = None
for cookie in iter(self):
if cookie.name == name:
if domain is None or cookie.domain == domain:
if path is None or cookie.path == path:
if toReturn is not None: # if there are multiple cookies that meet passed in criteria
raise CookieConflictError('There are multiple cookies with name, %r' % (name))
toReturn = cookie.value # we will eventually return this as long as no cookie conflict
if toReturn:
return toReturn
raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
def __getstate__(self):
"""Unlike a normal CookieJar, this class is pickleable."""
state = self.__dict__.copy()
# remove the unpickleable RLock object
state.pop('_cookies_lock')
return state
def __setstate__(self, state):
"""Unlike a normal CookieJar, this class is pickleable."""
self.__dict__.update(state)
if '_cookies_lock' not in self.__dict__:
self._cookies_lock = threading.RLock()
def copy(self):
"""Return a copy of this RequestsCookieJar."""
new_cj = RequestsCookieJar()
new_cj.update(self)
return new_cj
def create_cookie(name, value, **kwargs):
"""Make a cookie from underspecified parameters.
By default, the pair of `name` and `value` will be set for the domain ''
and sent on every request (this is sometimes called a "supercookie").
"""
result = dict(
version=0,
name=name,
value=value,
port=None,
domain='',
path='/',
secure=False,
expires=None,
discard=True,
comment=None,
comment_url=None,
rest={'HttpOnly': None},
rfc2109=False,)
badargs = set(kwargs) - set(result)
if badargs:
err = 'create_cookie() got unexpected keyword arguments: %s'
raise TypeError(err % list(badargs))
result.update(kwargs)
result['port_specified'] = bool(result['port'])
result['domain_specified'] = bool(result['domain'])
result['domain_initial_dot'] = result['domain'].startswith('.')
result['path_specified'] = bool(result['path'])
return cookielib.Cookie(**result)
def morsel_to_cookie(morsel):
"""Convert a Morsel object into a Cookie containing the one k/v pair."""
expires = None
if morsel['max-age']:
expires = time.time() + morsel['max-age']
elif morsel['expires']:
time_template = '%a, %d-%b-%Y %H:%M:%S GMT'
expires = time.mktime(
time.strptime(morsel['expires'], time_template)) - time.timezone
return create_cookie(
comment=morsel['comment'],
comment_url=bool(morsel['comment']),
discard=False,
domain=morsel['domain'],
expires=expires,
name=morsel.key,
path=morsel['path'],
port=None,
rest={'HttpOnly': morsel['httponly']},
rfc2109=False,
secure=bool(morsel['secure']),
value=morsel.value,
version=morsel['version'] or 0,
)
def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True):
"""Returns a CookieJar from a key/value dictionary.
:param cookie_dict: Dict of key/values to insert into CookieJar.
:param cookiejar: (optional) A cookiejar to add the cookies to.
:param overwrite: (optional) If False, will not replace cookies
already in the jar with new ones.
"""
if cookiejar is None:
cookiejar = RequestsCookieJar()
if cookie_dict is not None:
names_from_jar = [cookie.name for cookie in cookiejar]
for name in cookie_dict:
if overwrite or (name not in names_from_jar):
cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))
return cookiejar
def merge_cookies(cookiejar, cookies):
"""Add cookies to cookiejar and returns a merged CookieJar.
:param cookiejar: CookieJar object to add the cookies to.
:param cookies: Dictionary or CookieJar object to be added.
"""
if not isinstance(cookiejar, cookielib.CookieJar):
raise ValueError('You can only merge into CookieJar')
if isinstance(cookies, dict):
cookiejar = cookiejar_from_dict(
cookies, cookiejar=cookiejar, overwrite=False)
elif isinstance(cookies, cookielib.CookieJar):
try:
cookiejar.update(cookies)
except AttributeError:
for cookie_in_jar in cookies:
cookiejar.set_cookie(cookie_in_jar)
return cookiejar
| mit |
lgrahl/threema-msgapi-sdk-python | threema/gateway/key.py | 2 | 4302 | """
Contains functions to decode, encode and generate keys.
"""
import enum
import hashlib
import hmac
import libnacl.encode
import libnacl.public
import libnacl.secret
from .exception import GatewayKeyError
__all__ = (
'HMAC',
'Key',
)
class HMAC:
"""
A collection of HMAC functions used for the gateway service.
"""
keys = {
'email': b'\x30\xa5\x50\x0f\xed\x97\x01\xfa\x6d\xef\xdb\x61\x08\x41\x90\x0f'
b'\xeb\xb8\xe4\x30\x88\x1f\x7a\xd8\x16\x82\x62\x64\xec\x09\xba\xd7',
'phone': b'\x85\xad\xf8\x22\x69\x53\xf3\xd9\x6c\xfd\x5d\x09\xbf\x29\x55\x5e'
b'\xb9\x55\xfc\xd8\xaa\x5e\xc4\xf9\xfc\xd8\x69\xe2\x58\x37\x07\x23'
}
@staticmethod
def hash(message, hash_type):
"""
Generate the hash for a message type.
Arguments:
- `message`: A message.
- `hash_type`: `email` or `phone`.
Return a :class:`hmac.HMAC` instance.
"""
return hmac.new(HMAC.keys[hash_type], message.encode('ascii'), hashlib.sha256)
class Key:
"""
Encode or decode a key.
"""
separator = ':'
@enum.unique
class Type(enum.Enum):
"""
The type of a key.
"""
private = 'private'
public = 'public'
@staticmethod
def decode(encoded_key, expected_type):
"""
Decode a key and check its type if required.
Arguments:
- `encoded_key`: The encoded key.
- `expected_type`: One of the types of :class:`Key.Type`.
Return the key as an :class:`libnacl.public.SecretKey` or
:class:`libnacl.public.PublicKey` instance.
"""
# Split key
try:
type_, key = encoded_key.split(Key.separator)
except ValueError as exc:
raise GatewayKeyError('Invalid key format') from exc
type_ = Key.Type(type_)
# Check type
if type_ != expected_type:
raise GatewayKeyError('Invalid key type: {}, expected: {}'.format(
type_, expected_type
))
# De-hexlify
key = libnacl.encode.hex_decode(key)
# Convert to SecretKey or PublicKey
if type_ == Key.Type.private:
key = libnacl.public.SecretKey(key)
elif type_ == Key.Type.public:
key = libnacl.public.PublicKey(key)
return key
@staticmethod
def encode(libnacl_key):
"""
Encode a key.
Arguments:
- `libnacl_key`: An instance of either a
:class:`libnacl.public.SecretKey` or a
:class:`libnacl.public.PublicKey`.
Return the encoded key.
"""
# Detect key type and hexlify
if isinstance(libnacl_key, libnacl.public.SecretKey):
type_ = Key.Type.private
key = libnacl_key.hex_sk()
elif isinstance(libnacl_key, libnacl.public.PublicKey):
type_ = Key.Type.public
key = libnacl.encode.hex_encode(libnacl_key.pk)
else:
raise GatewayKeyError('Unknown key type: {}'.format(libnacl_key))
# Encode key
return Key.separator.join((type_.value, key.decode('utf-8')))
@staticmethod
def generate_pair():
"""
Generate a new key pair.
Return the key pair as a tuple of a
:class:`libnacl.public.SecretKey` instance and a
:class:`libnacl.public.PublicKey` instance.
"""
private_key = libnacl.public.SecretKey()
public_key = libnacl.public.PublicKey(private_key.pk)
return private_key, public_key
@staticmethod
def generate_secret_key():
"""
Generate a new secret key box.
Return a tuple of the key's :class:`bytes` and hex-encoded
representation.
"""
box = libnacl.secret.SecretBox()
return box.sk, box.hex_sk()
@staticmethod
def derive_public(private_key):
"""
Derive a public key from a class:`libnacl.public.SecretKey`
instance.
Arguments:
- `private_key`: A class:`libnacl.public.SecretKey`
instance.
Return the :class:`libnacl.public.PublicKey` instance.
"""
return libnacl.public.PublicKey(private_key.pk)
| mit |
kenshay/ImageScripter | ProgramData/SystemFiles/Python/Lib/idlelib/TreeWidget.py | 10 | 15037 | # XXX TO DO:
# - popup menu
# - support partial or total redisplay
# - key bindings (instead of quick-n-dirty bindings on Canvas):
# - up/down arrow keys to move focus around
# - ditto for page up/down, home/end
# - left/right arrows to expand/collapse & move out/in
# - more doc strings
# - add icons for "file", "module", "class", "method"; better "python" icon
# - callback for selection???
# - multiple-item selection
# - tooltips
# - redo geometry without magic numbers
# - keep track of object ids to allow more careful cleaning
# - optimize tree redraw after expand of subnode
import os
from Tkinter import *
import imp
from idlelib import ZoomHeight
from idlelib.configHandler import idleConf
ICONDIR = "Icons"
# Look for Icons subdirectory in the same directory as this module
try:
_icondir = os.path.join(os.path.dirname(__file__), ICONDIR)
except NameError:
_icondir = ICONDIR
if os.path.isdir(_icondir):
ICONDIR = _icondir
elif not os.path.isdir(ICONDIR):
raise RuntimeError, "can't find icon directory (%r)" % (ICONDIR,)
def listicons(icondir=ICONDIR):
"""Utility to display the available icons."""
root = Tk()
import glob
list = glob.glob(os.path.join(icondir, "*.gif"))
list.sort()
images = []
row = column = 0
for file in list:
name = os.path.splitext(os.path.basename(file))[0]
image = PhotoImage(file=file, master=root)
images.append(image)
label = Label(root, image=image, bd=1, relief="raised")
label.grid(row=row, column=column)
label = Label(root, text=name)
label.grid(row=row+1, column=column)
column = column + 1
if column >= 10:
row = row+2
column = 0
root.images = images
class TreeNode:
def __init__(self, canvas, parent, item):
self.canvas = canvas
self.parent = parent
self.item = item
self.state = 'collapsed'
self.selected = False
self.children = []
self.x = self.y = None
self.iconimages = {} # cache of PhotoImage instances for icons
def destroy(self):
for c in self.children[:]:
self.children.remove(c)
c.destroy()
self.parent = None
def geticonimage(self, name):
try:
return self.iconimages[name]
except KeyError:
pass
file, ext = os.path.splitext(name)
ext = ext or ".gif"
fullname = os.path.join(ICONDIR, file + ext)
image = PhotoImage(master=self.canvas, file=fullname)
self.iconimages[name] = image
return image
def select(self, event=None):
if self.selected:
return
self.deselectall()
self.selected = True
self.canvas.delete(self.image_id)
self.drawicon()
self.drawtext()
def deselect(self, event=None):
if not self.selected:
return
self.selected = False
self.canvas.delete(self.image_id)
self.drawicon()
self.drawtext()
def deselectall(self):
if self.parent:
self.parent.deselectall()
else:
self.deselecttree()
def deselecttree(self):
if self.selected:
self.deselect()
for child in self.children:
child.deselecttree()
def flip(self, event=None):
if self.state == 'expanded':
self.collapse()
else:
self.expand()
self.item.OnDoubleClick()
return "break"
def expand(self, event=None):
if not self.item._IsExpandable():
return
if self.state != 'expanded':
self.state = 'expanded'
self.update()
self.view()
def collapse(self, event=None):
if self.state != 'collapsed':
self.state = 'collapsed'
self.update()
def view(self):
top = self.y - 2
bottom = self.lastvisiblechild().y + 17
height = bottom - top
visible_top = self.canvas.canvasy(0)
visible_height = self.canvas.winfo_height()
visible_bottom = self.canvas.canvasy(visible_height)
if visible_top <= top and bottom <= visible_bottom:
return
x0, y0, x1, y1 = self.canvas._getints(self.canvas['scrollregion'])
if top >= visible_top and height <= visible_height:
fraction = top + height - visible_height
else:
fraction = top
fraction = float(fraction) / y1
self.canvas.yview_moveto(fraction)
def lastvisiblechild(self):
if self.children and self.state == 'expanded':
return self.children[-1].lastvisiblechild()
else:
return self
def update(self):
if self.parent:
self.parent.update()
else:
oldcursor = self.canvas['cursor']
self.canvas['cursor'] = "watch"
self.canvas.update()
self.canvas.delete(ALL) # XXX could be more subtle
self.draw(7, 2)
x0, y0, x1, y1 = self.canvas.bbox(ALL)
self.canvas.configure(scrollregion=(0, 0, x1, y1))
self.canvas['cursor'] = oldcursor
def draw(self, x, y):
# XXX This hard-codes too many geometry constants!
dy = 20
self.x, self.y = x, y
self.drawicon()
self.drawtext()
if self.state != 'expanded':
return y + dy
# draw children
if not self.children:
sublist = self.item._GetSubList()
if not sublist:
# _IsExpandable() was mistaken; that's allowed
return y+17
for item in sublist:
child = self.__class__(self.canvas, self, item)
self.children.append(child)
cx = x+20
cy = y + dy
cylast = 0
for child in self.children:
cylast = cy
self.canvas.create_line(x+9, cy+7, cx, cy+7, fill="gray50")
cy = child.draw(cx, cy)
if child.item._IsExpandable():
if child.state == 'expanded':
iconname = "minusnode"
callback = child.collapse
else:
iconname = "plusnode"
callback = child.expand
image = self.geticonimage(iconname)
id = self.canvas.create_image(x+9, cylast+7, image=image)
# XXX This leaks bindings until canvas is deleted:
self.canvas.tag_bind(id, "<1>", callback)
self.canvas.tag_bind(id, "<Double-1>", lambda x: None)
id = self.canvas.create_line(x+9, y+10, x+9, cylast+7,
##stipple="gray50", # XXX Seems broken in Tk 8.0.x
fill="gray50")
self.canvas.tag_lower(id) # XXX .lower(id) before Python 1.5.2
return cy
def drawicon(self):
if self.selected:
imagename = (self.item.GetSelectedIconName() or
self.item.GetIconName() or
"openfolder")
else:
imagename = self.item.GetIconName() or "folder"
image = self.geticonimage(imagename)
id = self.canvas.create_image(self.x, self.y, anchor="nw", image=image)
self.image_id = id
self.canvas.tag_bind(id, "<1>", self.select)
self.canvas.tag_bind(id, "<Double-1>", self.flip)
def drawtext(self):
textx = self.x+20-1
texty = self.y-4
labeltext = self.item.GetLabelText()
if labeltext:
id = self.canvas.create_text(textx, texty, anchor="nw",
text=labeltext)
self.canvas.tag_bind(id, "<1>", self.select)
self.canvas.tag_bind(id, "<Double-1>", self.flip)
x0, y0, x1, y1 = self.canvas.bbox(id)
textx = max(x1, 200) + 10
text = self.item.GetText() or "<no text>"
try:
self.entry
except AttributeError:
pass
else:
self.edit_finish()
try:
self.label
except AttributeError:
# padding carefully selected (on Windows) to match Entry widget:
self.label = Label(self.canvas, text=text, bd=0, padx=2, pady=2)
theme = idleConf.CurrentTheme()
if self.selected:
self.label.configure(idleConf.GetHighlight(theme, 'hilite'))
else:
self.label.configure(idleConf.GetHighlight(theme, 'normal'))
id = self.canvas.create_window(textx, texty,
anchor="nw", window=self.label)
self.label.bind("<1>", self.select_or_edit)
self.label.bind("<Double-1>", self.flip)
self.text_id = id
def select_or_edit(self, event=None):
if self.selected and self.item.IsEditable():
self.edit(event)
else:
self.select(event)
def edit(self, event=None):
self.entry = Entry(self.label, bd=0, highlightthickness=1, width=0)
self.entry.insert(0, self.label['text'])
self.entry.selection_range(0, END)
self.entry.pack(ipadx=5)
self.entry.focus_set()
self.entry.bind("<Return>", self.edit_finish)
self.entry.bind("<Escape>", self.edit_cancel)
def edit_finish(self, event=None):
try:
entry = self.entry
del self.entry
except AttributeError:
return
text = entry.get()
entry.destroy()
if text and text != self.item.GetText():
self.item.SetText(text)
text = self.item.GetText()
self.label['text'] = text
self.drawtext()
self.canvas.focus_set()
def edit_cancel(self, event=None):
try:
entry = self.entry
del self.entry
except AttributeError:
return
entry.destroy()
self.drawtext()
self.canvas.focus_set()
class TreeItem:
"""Abstract class representing tree items.
Methods should typically be overridden, otherwise a default action
is used.
"""
def __init__(self):
"""Constructor. Do whatever you need to do."""
def GetText(self):
"""Return text string to display."""
def GetLabelText(self):
"""Return label text string to display in front of text (if any)."""
expandable = None
def _IsExpandable(self):
"""Do not override! Called by TreeNode."""
if self.expandable is None:
self.expandable = self.IsExpandable()
return self.expandable
def IsExpandable(self):
"""Return whether there are subitems."""
return 1
def _GetSubList(self):
"""Do not override! Called by TreeNode."""
if not self.IsExpandable():
return []
sublist = self.GetSubList()
if not sublist:
self.expandable = 0
return sublist
def IsEditable(self):
"""Return whether the item's text may be edited."""
def SetText(self, text):
"""Change the item's text (if it is editable)."""
def GetIconName(self):
"""Return name of icon to be displayed normally."""
def GetSelectedIconName(self):
"""Return name of icon to be displayed when selected."""
def GetSubList(self):
"""Return list of items forming sublist."""
def OnDoubleClick(self):
"""Called on a double-click on the item."""
# Example application
class FileTreeItem(TreeItem):
"""Example TreeItem subclass -- browse the file system."""
def __init__(self, path):
self.path = path
def GetText(self):
return os.path.basename(self.path) or self.path
def IsEditable(self):
return os.path.basename(self.path) != ""
def SetText(self, text):
newpath = os.path.dirname(self.path)
newpath = os.path.join(newpath, text)
if os.path.dirname(newpath) != os.path.dirname(self.path):
return
try:
os.rename(self.path, newpath)
self.path = newpath
except os.error:
pass
def GetIconName(self):
if not self.IsExpandable():
return "python" # XXX wish there was a "file" icon
def IsExpandable(self):
return os.path.isdir(self.path)
def GetSubList(self):
try:
names = os.listdir(self.path)
except os.error:
return []
names.sort(key = os.path.normcase)
sublist = []
for name in names:
item = FileTreeItem(os.path.join(self.path, name))
sublist.append(item)
return sublist
# A canvas widget with scroll bars and some useful bindings
class ScrolledCanvas:
def __init__(self, master, **opts):
if 'yscrollincrement' not in opts:
opts['yscrollincrement'] = 17
self.master = master
self.frame = Frame(master)
self.frame.rowconfigure(0, weight=1)
self.frame.columnconfigure(0, weight=1)
self.canvas = Canvas(self.frame, **opts)
self.canvas.grid(row=0, column=0, sticky="nsew")
self.vbar = Scrollbar(self.frame, name="vbar")
self.vbar.grid(row=0, column=1, sticky="nse")
self.hbar = Scrollbar(self.frame, name="hbar", orient="horizontal")
self.hbar.grid(row=1, column=0, sticky="ews")
self.canvas['yscrollcommand'] = self.vbar.set
self.vbar['command'] = self.canvas.yview
self.canvas['xscrollcommand'] = self.hbar.set
self.hbar['command'] = self.canvas.xview
self.canvas.bind("<Key-Prior>", self.page_up)
self.canvas.bind("<Key-Next>", self.page_down)
self.canvas.bind("<Key-Up>", self.unit_up)
self.canvas.bind("<Key-Down>", self.unit_down)
#if isinstance(master, Toplevel) or isinstance(master, Tk):
self.canvas.bind("<Alt-Key-2>", self.zoom_height)
self.canvas.focus_set()
def page_up(self, event):
self.canvas.yview_scroll(-1, "page")
return "break"
def page_down(self, event):
self.canvas.yview_scroll(1, "page")
return "break"
def unit_up(self, event):
self.canvas.yview_scroll(-1, "unit")
return "break"
def unit_down(self, event):
self.canvas.yview_scroll(1, "unit")
return "break"
def zoom_height(self, event):
ZoomHeight.zoom_height(self.master)
return "break"
def _tree_widget(parent):
root = Tk()
root.title("Test TreeWidget")
width, height, x, y = list(map(int, re.split('[x+]', parent.geometry())))
root.geometry("+%d+%d"%(x, y + 150))
sc = ScrolledCanvas(root, bg="white", highlightthickness=0, takefocus=1)
sc.frame.pack(expand=1, fill="both", side=LEFT)
item = FileTreeItem(os.getcwd())
node = TreeNode(sc.canvas, None, item)
node.expand()
root.mainloop()
if __name__ == '__main__':
from idlelib.idle_test.htest import run
run(_tree_widget)
| gpl-3.0 |
dsbrown/FreeCAD | src/Mod/Start/StartPage/DefaultWorkbench.py | 32 | 1910 | #***************************************************************************
#* *
#* Copyright (c) 2012 *
#* Yorik van Havre <yorik@uncreated.net> *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* This program is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Library General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with this program; if not, write to the Free Software *
#* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
#* USA *
#* *
#***************************************************************************
import FreeCAD, FreeCADGui
workbench = FreeCAD.ConfigGet("DefaultWorkbench")
if not workbench: workbench = "CompleteWorkbench"
FreeCADGui.activateWorkbench(workbench)
App.newDocument()
| lgpl-2.1 |
wmbutler/courtlistener | cleaning_scripts/correct_state_cases.py | 2 | 4231 | import os
import sys
execfile('/etc/courtlistener')
sys.path.append(INSTALL_ROOT)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
from alert.search.models import Document, Court
from alert.lib.db_tools import queryset_generator
from optparse import OptionParser
def fixer(simulate=False, verbose=False):
"""Fix a few issues discovered."""
#docs = queryset_generator(Document.objects.filter(source='C', plain_text=''))
#docs = Document.objects.raw('''select "pk" from "Document" where "source" = 'C' and "plain_text" ~ '^[[:space:]]*$' ''')
#docs = Document.objects.raw('''select "pk" from "Document" where "source" = 'C' and "plain_text" = 'Unable to extract document content.' ''')
def fix_plaintiffs(docs, left, simulate, verbose):
for doc in docs:
if verbose:
print "Fixing document number %s: %s" % (doc.pk, doc)
old_case_name = doc.citation.case_name
if left:
new_case_name = old_case_name.replace('P. v.', 'People v.')
else:
new_case_name = old_case_name.replace('v. P.', 'v. People')
print " Replacing %s" % old_case_name
print " with %s" % new_case_name
if not simulate:
if left:
doc.citation.case_name = doc.citation.case_name.replace('P. v.', 'People v.')
else:
doc.citation.case_name = doc.citation.case_name.replace('v. P.', 'v. People')
doc.citation.save()
def fix_michigan(docs, left, simulate, verbose):
for doc in docs:
if verbose:
print "Fixing document number %s: %s" % (doc.pk, doc)
old_case_name = doc.citation.case_name
if left:
new_case_name = old_case_name.replace('People of Mi', 'People of Michigan')
print " Replacing %s" % old_case_name
print " with %s" % new_case_name
if not simulate:
if left:
doc.citation.case_name = doc.citation.case_name.replace('People of Mi', 'People of Michigan')
doc.citation.save()
def fix_wva(docs, simulate, verbose):
for doc in docs:
if verbose:
print "Fixing document number %s: %s" % (doc.pk, doc)
if not simulate:
doc.precedential_status = "Published"
doc.save()
# Round one! Fix plaintiffs.
print "!!! ROUND ONE !!!"
court = Court.objects.get(pk='cal')
docs = queryset_generator(Document.objects.filter(source="C", court=court, citation__case_name__contains='P. v.'))
fix_plaintiffs(docs, True, simulate, verbose)
# Round three! Fix the Mi cases.
print "!!! ROUND THREE !!!"
court = Court.objects.get(pk='mich')
docs = queryset_generator(Document.objects.filter(source="C", court=court, citation__case_name__startswith='People of Mi '))
fix_michigan(docs, True, simulate, verbose)
# Round four! Fix the statuses.
print "!!! ROUND FOUR !!!"
court = Court.objects.get(pk='wva')
docs = queryset_generator(Document.objects.filter(precedential_status__in=['Memorandum Decision', 'Per Curiam Opinion', 'Signed Opinion'],
court=court))
fix_wva(docs, simulate, verbose)
def main():
usage = "usage: %prog [--verbose] [---simulate]"
parser = OptionParser(usage)
parser.add_option('-v', '--verbose', action="store_true", dest='verbose',
default=False, help="Display log during execution")
parser.add_option('-s', '--simulate', action="store_true",
dest='simulate', default=False, help=("Simulate the corrections without "
"actually making them."))
(options, args) = parser.parse_args()
verbose = options.verbose
simulate = options.simulate
if simulate:
print "*******************************************"
print "* SIMULATE MODE - NO CHANGES WILL BE MADE *"
print "*******************************************"
return fixer(simulate, verbose)
if __name__ == '__main__':
main()
| agpl-3.0 |
18padx08/PPTex | PPTexEnv_x86_64/lib/python2.7/site-packages/sympy/plotting/pygletplot/tests/test_plotting.py | 109 | 2653 | from sympy.external.importtools import import_module
disabled = False
# if pyglet.gl fails to import, e.g. opengl is missing, we disable the tests
pyglet_gl = import_module("pyglet.gl", catch=(OSError,))
pyglet_window = import_module("pyglet.window", catch=(OSError,))
if not pyglet_gl or not pyglet_window:
disabled = True
from sympy import symbols, sin, cos
x, y, z = symbols('x, y, z')
def test_import():
from sympy.plotting.pygletplot import PygletPlot
def test_plot_2d():
from sympy.plotting.pygletplot import PygletPlot
p = PygletPlot(x, [x, -5, 5, 4], visible=False)
p.wait_for_calculations()
def test_plot_2d_discontinuous():
from sympy.plotting.pygletplot import PygletPlot
p = PygletPlot(1/x, [x, -1, 1, 2], visible=False)
p.wait_for_calculations()
def test_plot_3d():
from sympy.plotting.pygletplot import PygletPlot
p = PygletPlot(x*y, [x, -5, 5, 5], [y, -5, 5, 5], visible=False)
p.wait_for_calculations()
def test_plot_3d_discontinuous():
from sympy.plotting.pygletplot import PygletPlot
p = PygletPlot(1/x, [x, -3, 3, 6], [y, -1, 1, 1], visible=False)
p.wait_for_calculations()
def test_plot_2d_polar():
from sympy.plotting.pygletplot import PygletPlot
p = PygletPlot(1/x, [x, -1, 1, 4], 'mode=polar', visible=False)
p.wait_for_calculations()
def test_plot_3d_cylinder():
from sympy.plotting.pygletplot import PygletPlot
p = PygletPlot(
1/y, [x, 0, 6.282, 4], [y, -1, 1, 4], 'mode=polar;style=solid',
visible=False)
p.wait_for_calculations()
def test_plot_3d_spherical():
from sympy.plotting.pygletplot import PygletPlot
p = PygletPlot(
1, [x, 0, 6.282, 4], [y, 0, 3.141,
4], 'mode=spherical;style=wireframe',
visible=False)
p.wait_for_calculations()
def test_plot_2d_parametric():
from sympy.plotting.pygletplot import PygletPlot
p = PygletPlot(sin(x), cos(x), [x, 0, 6.282, 4], visible=False)
p.wait_for_calculations()
def test_plot_3d_parametric():
from sympy.plotting.pygletplot import PygletPlot
p = PygletPlot(sin(x), cos(x), x/5.0, [x, 0, 6.282, 4], visible=False)
p.wait_for_calculations()
def _test_plot_log():
from sympy.plotting.pygletplot import PygletPlot
p = PygletPlot(log(x), [x, 0, 6.282, 4], 'mode=polar', visible=False)
p.wait_for_calculations()
def test_plot_integral():
# Make sure it doesn't treat x as an independent variable
from sympy.plotting.pygletplot import PygletPlot
from sympy import Integral
p = PygletPlot(Integral(z*x, (x, 1, z), (z, 1, y)), visible=False)
p.wait_for_calculations()
| mit |
les69/calvin-base | calvin/runtime/south/plugins/async/twistedimpl/client_connection.py | 2 | 6237 | # -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.protocols import basic
from twisted.internet import reactor
from twisted.internet.protocol import Protocol, ClientFactory
from twisted.internet.protocol import DatagramProtocol
from calvin.utilities.calvinlogger import get_logger
from calvin.utilities.calvin_callback import CalvinCBClass
_log = get_logger(__name__)
class DummyError(object):
def __init__(self, str_):
self._str = str_
def getErrorMessage(self):
return self._str
class UDPRawProtocol(CalvinCBClass, DatagramProtocol):
def __init__(self, callbacks=None, **kwargs):
super(UDPRawProtocol, self).__init__(callbacks)
self.host = kwargs.pop('host', '')
self.port = kwargs.pop('port', 0)
self.factory = kwargs.pop('factory', None)
def startProtocol(self):
self.transport.connect(self.host, self.port)
def stopProtocol(self):
"Called after all transport is teared down"
self.factory.clientConnectionLost(None, DummyError("disconnected"))
def datagramReceived(self, data, (host, port)):
self._callback_execute('data_received', data)
def send(self, data):
self.transport.write(data, (self.host, self.port))
class RawProtocol(CalvinCBClass, Protocol):
def __init__(self, callbacks=None, **kwargs):
super(RawProtocol, self).__init__(callbacks)
self.host = kwargs.pop('host', '')
self.port = kwargs.pop('port', 0)
def dataReceived(self, data):
self._callback_execute('data_received', data)
def send(self, data):
self.transport.write(data)
def close(self):
self.transport.loseConnection()
class StringRecieverProtocol(CalvinCBClass, basic.Int16StringReceiver):
def __init__(self, callbacks=None, **kwargs):
super(StringRecieverProtocol, self).__init__(callbacks)
self.host = kwargs.pop('host', '')
self.port = kwargs.pop('port', 0)
def stringReceived(self, data):
self._callback_execute('data_received', data)
class DelimiterProtocol(CalvinCBClass, basic.LineReceiver):
def __init__(self, callbacks=None, **kwargs):
self.delimiter = kwargs.pop('delimiter', '\r\n')
self.host = kwargs.pop('host', '')
self.port = kwargs.pop('port', 0)
super(DelimiterProtocol, self).__init__(callbacks)
def lineReceived(self, data):
self._callback_execute('data_received', data)
class BaseClientProtocolFactory(CalvinCBClass, ClientFactory):
def __init__(self, callbacks=None):
super(BaseClientProtocolFactory, self).__init__(callbacks)
self._callbacks = callbacks
self._addr = ""
self._port = 0
self._delimiter = None
def startedConnecting(self, connector):
pass
def buildProtocol(self, addr):
self.protocol = self._protocol_factory({'data_received': self._callbacks['data_received']},
delimiter=self._delimiter, host=self._addr, port=self._port,
factory=self)
reactor.callLater(0, self._callback_execute, 'connected', addr)
return self.protocol
def disconnect(self):
if self._connector:
# TODO: returns defered ?!?
self._connector.disconnect()
self.protocol = None
def send(self, data):
self.protocol.send(data)
def clientConnectionLost(self, connector, reason):
self._callback_execute('connection_lost', connector, reason)
self._callback_execute('connection_lost', (self._addr, self._port), reason.getErrorMessage())
# TODO: returns defered ?!?
def clientConnectionFailed(self, connector, reason):
self._callback_execute('connection_failed', (self._addr, self._port), reason.getErrorMessage())
class UDPClientProtocolFactory(BaseClientProtocolFactory):
def __init__(self, callbacks=None):
super(UDPClientProtocolFactory, self).__init__(callbacks)
self._addr = ""
self._port = 0
self._protocol_factory = UDPRawProtocol
def connect(self, addr, port):
self._addr = addr
self._port = port
self._connector = reactor.listenUDP(0, self.buildProtocol((addr, port)))
return self._connector
class TCPClientProtocolFactory(BaseClientProtocolFactory):
def __init__(self, mode, delimiter="\r\n", callbacks=None):
super(TCPClientProtocolFactory, self).__init__(callbacks)
self._protocol_factory = None
self._protocol_type = mode
self.protocol = None
self._connector = None
self._delimiter = delimiter
self._addr = ""
self._port = 0
if mode == "raw":
self._protocol_factory = RawProtocol
elif mode == "string":
self._protocol_factory = StringRecieverProtocol
elif mode == "delimiter":
self._protocol_factory = DelimiterProtocol
else:
_log.error("Trying use non existing protocol %s !" % (mode, ))
raise Exception("Trying use non existing protocol %s !" % (mode, ))
def connect(self, addr, port):
self._addr = addr
self._port = port
return reactor.connectTCP(addr, port, self)
def send(self, data):
if self._protocol_type == "raw":
self.protocol.send(data)
elif self._protocol_type == "string":
self.protocol.sendString(data)
elif self._protocol_type == "delimiter":
self.protocol.sendLine(data)
else:
_log.error("Trying use non existing protocol %s !" % self._protocol_type)
| apache-2.0 |
RydrDojo/Ridr | pylotVenv/lib/python2.7/site-packages/sqlalchemy/testing/suite/test_insert.py | 115 | 8132 | from .. import fixtures, config
from ..config import requirements
from .. import exclusions
from ..assertions import eq_
from .. import engines
from sqlalchemy import Integer, String, select, literal_column, literal
from ..schema import Table, Column
class LastrowidTest(fixtures.TablesTest):
run_deletes = 'each'
__backend__ = True
__requires__ = 'implements_get_lastrowid', 'autoincrement_insert'
__engine_options__ = {"implicit_returning": False}
@classmethod
def define_tables(cls, metadata):
Table('autoinc_pk', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('data', String(50))
)
Table('manual_pk', metadata,
Column('id', Integer, primary_key=True, autoincrement=False),
Column('data', String(50))
)
def _assert_round_trip(self, table, conn):
row = conn.execute(table.select()).first()
eq_(
row,
(config.db.dialect.default_sequence_base, "some data")
)
def test_autoincrement_on_insert(self):
config.db.execute(
self.tables.autoinc_pk.insert(),
data="some data"
)
self._assert_round_trip(self.tables.autoinc_pk, config.db)
def test_last_inserted_id(self):
r = config.db.execute(
self.tables.autoinc_pk.insert(),
data="some data"
)
pk = config.db.scalar(select([self.tables.autoinc_pk.c.id]))
eq_(
r.inserted_primary_key,
[pk]
)
# failed on pypy1.9 but seems to be OK on pypy 2.1
# @exclusions.fails_if(lambda: util.pypy,
# "lastrowid not maintained after "
# "connection close")
@requirements.dbapi_lastrowid
def test_native_lastrowid_autoinc(self):
r = config.db.execute(
self.tables.autoinc_pk.insert(),
data="some data"
)
lastrowid = r.lastrowid
pk = config.db.scalar(select([self.tables.autoinc_pk.c.id]))
eq_(
lastrowid, pk
)
class InsertBehaviorTest(fixtures.TablesTest):
run_deletes = 'each'
__backend__ = True
@classmethod
def define_tables(cls, metadata):
Table('autoinc_pk', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('data', String(50))
)
Table('manual_pk', metadata,
Column('id', Integer, primary_key=True, autoincrement=False),
Column('data', String(50))
)
Table('includes_defaults', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('data', String(50)),
Column('x', Integer, default=5),
Column('y', Integer,
default=literal_column("2", type_=Integer) + literal(2)))
def test_autoclose_on_insert(self):
if requirements.returning.enabled:
engine = engines.testing_engine(
options={'implicit_returning': False})
else:
engine = config.db
r = engine.execute(
self.tables.autoinc_pk.insert(),
data="some data"
)
assert r._soft_closed
assert not r.closed
assert r.is_insert
assert not r.returns_rows
@requirements.returning
def test_autoclose_on_insert_implicit_returning(self):
r = config.db.execute(
self.tables.autoinc_pk.insert(),
data="some data"
)
assert r._soft_closed
assert not r.closed
assert r.is_insert
assert not r.returns_rows
@requirements.empty_inserts
def test_empty_insert(self):
r = config.db.execute(
self.tables.autoinc_pk.insert(),
)
assert r._soft_closed
assert not r.closed
r = config.db.execute(
self.tables.autoinc_pk.select().
where(self.tables.autoinc_pk.c.id != None)
)
assert len(r.fetchall())
@requirements.insert_from_select
def test_insert_from_select(self):
table = self.tables.manual_pk
config.db.execute(
table.insert(),
[
dict(id=1, data="data1"),
dict(id=2, data="data2"),
dict(id=3, data="data3"),
]
)
config.db.execute(
table.insert(inline=True).
from_select(("id", "data",),
select([table.c.id + 5, table.c.data]).
where(table.c.data.in_(["data2", "data3"]))
),
)
eq_(
config.db.execute(
select([table.c.data]).order_by(table.c.data)
).fetchall(),
[("data1", ), ("data2", ), ("data2", ),
("data3", ), ("data3", )]
)
@requirements.insert_from_select
def test_insert_from_select_with_defaults(self):
table = self.tables.includes_defaults
config.db.execute(
table.insert(),
[
dict(id=1, data="data1"),
dict(id=2, data="data2"),
dict(id=3, data="data3"),
]
)
config.db.execute(
table.insert(inline=True).
from_select(("id", "data",),
select([table.c.id + 5, table.c.data]).
where(table.c.data.in_(["data2", "data3"]))
),
)
eq_(
config.db.execute(
select([table]).order_by(table.c.data, table.c.id)
).fetchall(),
[(1, 'data1', 5, 4), (2, 'data2', 5, 4),
(7, 'data2', 5, 4), (3, 'data3', 5, 4), (8, 'data3', 5, 4)]
)
class ReturningTest(fixtures.TablesTest):
run_create_tables = 'each'
__requires__ = 'returning', 'autoincrement_insert'
__backend__ = True
__engine_options__ = {"implicit_returning": True}
def _assert_round_trip(self, table, conn):
row = conn.execute(table.select()).first()
eq_(
row,
(config.db.dialect.default_sequence_base, "some data")
)
@classmethod
def define_tables(cls, metadata):
Table('autoinc_pk', metadata,
Column('id', Integer, primary_key=True,
test_needs_autoincrement=True),
Column('data', String(50))
)
@requirements.fetch_rows_post_commit
def test_explicit_returning_pk_autocommit(self):
engine = config.db
table = self.tables.autoinc_pk
r = engine.execute(
table.insert().returning(
table.c.id),
data="some data"
)
pk = r.first()[0]
fetched_pk = config.db.scalar(select([table.c.id]))
eq_(fetched_pk, pk)
def test_explicit_returning_pk_no_autocommit(self):
engine = config.db
table = self.tables.autoinc_pk
with engine.begin() as conn:
r = conn.execute(
table.insert().returning(
table.c.id),
data="some data"
)
pk = r.first()[0]
fetched_pk = config.db.scalar(select([table.c.id]))
eq_(fetched_pk, pk)
def test_autoincrement_on_insert_implcit_returning(self):
config.db.execute(
self.tables.autoinc_pk.insert(),
data="some data"
)
self._assert_round_trip(self.tables.autoinc_pk, config.db)
def test_last_inserted_id_implicit_returning(self):
r = config.db.execute(
self.tables.autoinc_pk.insert(),
data="some data"
)
pk = config.db.scalar(select([self.tables.autoinc_pk.c.id]))
eq_(
r.inserted_primary_key,
[pk]
)
__all__ = ('LastrowidTest', 'InsertBehaviorTest', 'ReturningTest')
| mit |
darktears/chromium-crosswalk | build/android/pylib/utils/xvfb.py | 143 | 1551 | # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0702
import os
import signal
import subprocess
import sys
import time
def _IsLinux():
"""Return True if on Linux; else False."""
return sys.platform.startswith('linux')
class Xvfb(object):
"""Class to start and stop Xvfb if relevant. Nop if not Linux."""
def __init__(self):
self._pid = 0
def Start(self):
"""Start Xvfb and set an appropriate DISPLAY environment. Linux only.
Copied from tools/code_coverage/coverage_posix.py
"""
if not _IsLinux():
return
proc = subprocess.Popen(['Xvfb', ':9', '-screen', '0', '1024x768x24',
'-ac'],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
self._pid = proc.pid
if not self._pid:
raise Exception('Could not start Xvfb')
os.environ['DISPLAY'] = ':9'
# Now confirm, giving a chance for it to start if needed.
for _ in range(10):
proc = subprocess.Popen('xdpyinfo >/dev/null', shell=True)
_, retcode = os.waitpid(proc.pid, 0)
if retcode == 0:
break
time.sleep(0.25)
if retcode != 0:
raise Exception('Could not confirm Xvfb happiness')
def Stop(self):
"""Stop Xvfb if needed. Linux only."""
if self._pid:
try:
os.kill(self._pid, signal.SIGKILL)
except:
pass
del os.environ['DISPLAY']
self._pid = 0
| bsd-3-clause |
artnavsegda/avrnavsegda | src/ASF/common/services/gfx_mono/tools/dump_display_over_serial.py | 73 | 4558 | ##
# \file
#
# \brief Convert display data on a serial line to a graphical representation
#
# Copyright (C) 2011-2014 Atmel Corporation. All rights reserved.
#
# \page License
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. The name of Atmel may not be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# 4. This software may only be redistributed and used in connection with an
# Atmel microcontroller product.
#
# THIS SOFTWARE IS PROVIDED BY ATMEL "AS IS" AND ANY EXPRESS OR IMPLIED
# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT ARE
# EXPRESSLY AND SPECIFICALLY DISCLAIMED. IN NO EVENT SHALL ATMEL BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
import serial
import os.path
import argparse
def scan_for_ports():
available_ports = []
for index in range(64):
try:
serial_port = serial.Serial(index)
available_ports.append((index, serial_port.portstr))
serial_port.close()
except serial.SerialException:
pass
except IndexError as Error:
pass
for port_number, port_name in available_ports:
print "%02d - %s" % (port_number, port_name)
return available_ports
def dump_display_data(serial_port, baud_rate, output_file_name):
try:
output_file = open(output_file_name, 'w')
port = serial.Serial(port = serial_port,
baudrate = baud_rate, timeout = 1)
port.close()
port.open()
except ValueError as e:
print "error: invalid serial port parameters. %s" % (str(e))
output_file.close()
return -1
except serial.SerialException as e:
print "error: could not open serial port. %s" % (str(e))
output_file.close()
return -1
except IOError as e:
print "error: could not open output file. %s" % (str(e))
return -1
print "Display on %s: %u,8,N,1" % (port.name, port.baudrate)
port.write("D")
line = port.readline()
display_data = ""
while(line[:2] != "};") :
display_data = display_data + line[:-1]
line = port.readline()
display_data = display_data + line
port.close()
print "Writing data to file %s" % (output_file_name)
output_file.write(display_data)
output_file.close()
def main():
parser = argparse.ArgumentParser(description="This script will try to "
"open the given serial port, send a string to "
"instruct the target device to dump the contents of "
"the display to a serial link in XPM format. The "
"received file is then written to 'display.xpm', "
"unless a file is specified by the -o option.")
parser.add_argument("-p", "--port", dest="serial_port",
help="which serial port to open")
parser.add_argument("-b", "--baud", dest="baudrate", type=int,
help="baud rate to use for serial communication",
default=19200)
parser.add_argument("-o", "--output", dest="output_file",
help="write XPM image to FILE. Default is display.xpm.",
metavar="FILE", default="display.xpm")
parser.add_argument("-s", "--scan", action="store_true",
dest="scan_ports",
help="scan for available serial ports and exit",
default=False)
arguments = parser.parse_args()
if arguments.scan_ports:
scan_for_ports()
sys.exit()
if arguments.serial_port is None:
parser.print_usage()
sys.exit()
if os.path.exists(arguments.output_file):
print "Warning: output file '%s' already exists" % (arguments.output_file)
print "Do you want to write over file '%s'?" % (arguments.output_file)
answer = raw_input("[yes/NO] ")
if answer not in ("yes", "Yes", "YES"):
sys.exit()
dump_display_data(arguments.serial_port, arguments.baudrate, arguments.output_file)
if __name__ == "__main__":
main()
| lgpl-3.0 |
sarakha63/persomov | couchpotato/core/media/_base/providers/torrent/hd4free.py | 22 | 6068 | import re
import json
import traceback
from couchpotato.core.helpers.variable import tryInt, getIdentifier
from couchpotato.core.logger import CPLog
from couchpotato.core.media._base.providers.torrent.base import TorrentProvider
log = CPLog(__name__)
class Base(TorrentProvider):
urls = {
'test': 'https://hd4free.xyz/',
'detail': 'https://hd4free.xyz/details.php?id=%s',
'search': 'https://hd4free.xyz/searchapi.php?apikey=%s&username=%s&imdbid=%s&internal=%s',
'download': 'https://hd4free.xyz/download.php?torrent=%s&torrent_pass=%s',
}
http_time_between_calls = 1 # Seconds
def _search(self, movie, quality, results):
data = self.getJsonData(self.urls['search'] % (self.conf('apikey'), self.conf('username'), getIdentifier(movie), self.conf('internal_only')))
if data:
try:
#for result in data[]:
for key, result in data.iteritems():
if tryInt(result['total_results']) == 0:
return
torrentscore = self.conf('extra_score')
releasegroup = result['releasegroup']
resolution = result['resolution']
encoding = result['encoding']
freeleech = tryInt(result['freeleech'])
seeders = tryInt(result['seeders'])
torrent_desc = '/ %s / %s / %s / %s seeders' % (releasegroup, resolution, encoding, seeders)
if freeleech > 0 and self.conf('prefer_internal'):
torrent_desc += '/ Internal'
torrentscore += 200
if seeders == 0:
torrentscore = 0
name = result['release_name']
year = tryInt(result['year'])
results.append({
'id': tryInt(result['torrentid']),
'name': re.sub('[^A-Za-z0-9\-_ \(\).]+', '', '%s (%s) %s' % (name, year, torrent_desc)),
'url': self.urls['download'] % (result['torrentid'], result['torrentpass']),
'detail_url': self.urls['detail'] % result['torrentid'],
'size': tryInt(result['size']),
'seeders': tryInt(result['seeders']),
'leechers': tryInt(result['leechers']),
'age': tryInt(result['age']),
'score': torrentscore
})
except:
log.error('Failed getting results from %s: %s', (self.getName(), traceback.format_exc()))
config = [{
'name': 'hd4free',
'groups': [
{
'tab': 'searcher',
'list': 'torrent_providers',
'name': 'HD4Free',
'wizard': True,
'description': '<a href="https://hd4free.xyz">HD4Free</a>',
'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAMAAAAoLQ9TAAABX1BMVEUF6nsH33cJ03EJ1XIJ1nMKzXIKz28Lym4MxGsMxWsMx2wNvmgNv2kNwGkNwWwOuGgOuWYOuWcOumcOu2cOvmgPtWQPtmUPt2UPt2YQr2IQsGIQsGMQsmMQs2QRqmARq2ARrmERrmISpV4SpmASp14SqF8ToFsToFwToVwTo10TpV0UnFoUn1sVllcVmFgWkFUWklYXjVQXjlMXkFUYh1EYilIYi1MZhlEafk0af04agE4agU4beEobeUsbe0wcdUkeaUQebUYfZEMfZ0QgX0AgYEAgYUEhWj4iVz0iWD0jTzkkSzcmQTMmQzQnPTInPjInPzIoNy8oOC8oODAoOTAoOjApMi0pNC4pNS4qLCoqLSsqLisqMCwrJygrKCgrKCkrKSkrKikrKiorKyosIyYsIycsJCcsJScsJigtHyUuGCIuGiMuGyMuHCMuHCQvEyAvFSEvFiEvFyE0ABU0ABY5lYz4AAAA3ElEQVR4AWNIQAMMiYmJCYkIkMCQnpKWkZ4KBGlARlpaLEOor194kI+Pj6+PT0CET0AYg46Alr22NDeHkBinnq6SkitDrolDgYtaapajdpGppoFfGkMhv2GxE0uuPwNfsk6mhHMOQ54isxmbUJKCtWx+tIZQcDpDtqSol7qIMqsRu3dIhJxxFkOBoF2JG5O7lSqjh5S/tkkWQ5SBTbqnfkymv2WGLa95YCSDhZiMvKIwj4GJCpesuDivK0N6VFRUYlRyfHJUchQQJDMkxsfHJcTHAxEIxMVj+BZDAACjwkqhYgsTAAAAAABJRU5ErkJggg==',
'options': [
{
'name': 'enabled',
'type': 'enabler',
'default': False,
},
{
'name': 'username',
'default': '',
'description': 'Enter your site username.',
},
{
'name': 'apikey',
'default': '',
'label': 'API Key',
'description': 'Enter your site api key. This can be find on <a href="https://hd4free.xyz/usercp.php?action=security">Profile Security</a>',
},
{
'name': 'seed_ratio',
'label': 'Seed ratio',
'type': 'float',
'default': 0,
'description': 'Will not be (re)moved until this seed ratio is met. HD4Free minimum is 1:1.',
},
{
'name': 'seed_time',
'label': 'Seed time',
'type': 'int',
'default': 0,
'description': 'Will not be (re)moved until this seed time (in hours) is met. HD4Free minimum is 72 hours.',
},
{
'name': 'prefer_internal',
'advanced': True,
'type': 'bool',
'default': 1,
'description': 'Favors internal releases over non-internal releases.',
},
{
'name': 'internal_only',
'advanced': True,
'label': 'Internal Only',
'type': 'bool',
'default': False,
'description': 'Only download releases marked as HD4Free internal',
},
{
'name': 'extra_score',
'advanced': True,
'label': 'Extra Score',
'type': 'int',
'default': 0,
'description': 'Starting score for each release found via this provider.',
}
],
},
],
}]
| gpl-3.0 |
goddardl/gaffer | apps/gui/gui-1.py | 1 | 5810 | ##########################################################################
#
# Copyright (c) 2011-2012, John Haddon. All rights reserved.
# Copyright (c) 2011-2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import os
import gc
import IECore
import Gaffer
import GafferUI
class gui( Gaffer.Application ) :
def __init__( self ) :
Gaffer.Application.__init__(
self,
"This application provides a graphical user interface for editing node graphs."
)
self.parameters().addParameters(
[
IECore.StringVectorParameter(
name = "scripts",
description = "A list of scripts to edit.",
defaultValue = IECore.StringVectorData(),
),
IECore.BoolParameter(
name = "fullScreen",
description = "Opens the UI in full screen mode.",
defaultValue = False,
),
]
)
self.parameters().userData()["parser"] = IECore.CompoundObject(
{
"flagless" : IECore.StringVectorData( [ "scripts" ] )
}
)
self.__setupClipboardSync()
def _run( self, args ) :
GafferUI.ScriptWindow.connect( self.root() )
if len( args["scripts"] ) :
for fileName in args["scripts"] :
scriptNode = Gaffer.ScriptNode()
scriptNode["fileName"].setValue( os.path.abspath( fileName ) )
# \todo: Display load errors in a dialog, like in python/GafferUI/FileMenu.py
scriptNode.load( continueOnError = True )
self.root()["scripts"].addChild( scriptNode )
GafferUI.FileMenu.addRecentFile( self, fileName )
del scriptNode
else :
self.root()["scripts"].addChild( Gaffer.ScriptNode() )
if args["fullScreen"].value :
primaryScript = self.root()["scripts"][-1]
primaryWindow = GafferUI.ScriptWindow.acquire( primaryScript )
primaryWindow.setFullScreen( True )
GafferUI.EventLoop.mainEventLoop().start()
return 0
def __setupClipboardSync( self ) :
## This function sets up two way syncing between the clipboard held in the Gaffer::ApplicationRoot
# and the global QtGui.QClipboard which is shared with external applications, and used by the cut and paste
# operations in GafferUI's underlying QWidgets. This is very useful, as it allows nodes to be copied from
# the graph and pasted into emails/chats etc, and then copied out of emails/chats and pasted into the node graph.
#
## \todo I don't think this is the ideal place for this functionality. Firstly, we need it in all apps
# rather than just the gui app. Secondly, we want a way of using the global clipboard using GafferUI
# public functions without needing an ApplicationRoot. Thirdly, it's questionable that ApplicationRoot should
# have a clipboard anyway - it seems like a violation of separation between the gui and non-gui libraries.
# Perhaps we should abolish the ApplicationRoot clipboard and the ScriptNode cut/copy/paste routines, relegating
# them all to GafferUI functionality?
QtGui = GafferUI._qtImport( "QtGui" )
self.__clipboardContentsChangedConnection = self.root().clipboardContentsChangedSignal().connect( Gaffer.WeakMethod( self.__clipboardContentsChanged ) )
QtGui.QApplication.clipboard().dataChanged.connect( Gaffer.WeakMethod( self.__qtClipboardContentsChanged ) )
self.__ignoreQtClipboardContentsChanged = False
def __clipboardContentsChanged( self, applicationRoot ) :
assert( applicationRoot.isSame( self.root() ) )
data = applicationRoot.getClipboardContents()
QtGui = GafferUI._qtImport( "QtGui" )
clipboard = QtGui.QApplication.clipboard()
try :
self.__ignoreQtClipboardContentsChanged = True # avoid triggering an unecessary copy back in __qtClipboardContentsChanged
clipboard.setText( str( data ) )
finally :
self.__ignoreQtClipboardContentsChanged = False
def __qtClipboardContentsChanged( self ) :
if self.__ignoreQtClipboardContentsChanged :
return
QtGui = GafferUI._qtImport( "QtGui" )
text = str( QtGui.QApplication.clipboard().text() )
if text :
with Gaffer.BlockedConnection( self.__clipboardContentsChangedConnection ) :
self.root().setClipboardContents( IECore.StringData( text ) )
IECore.registerRunTimeTyped( gui )
| bsd-3-clause |
lhilt/scipy | scipy/stats/tests/common_tests.py | 4 | 10728 | from __future__ import division, print_function, absolute_import
import pickle
import numpy as np
import numpy.testing as npt
from numpy.testing import assert_allclose, assert_equal
from scipy._lib._numpy_compat import suppress_warnings
from pytest import raises as assert_raises
import numpy.ma.testutils as ma_npt
from scipy._lib._util import getargspec_no_self as _getargspec
from scipy import stats
def check_named_results(res, attributes, ma=False):
for i, attr in enumerate(attributes):
if ma:
ma_npt.assert_equal(res[i], getattr(res, attr))
else:
npt.assert_equal(res[i], getattr(res, attr))
def check_normalization(distfn, args, distname):
norm_moment = distfn.moment(0, *args)
npt.assert_allclose(norm_moment, 1.0)
# this is a temporary plug: either ncf or expect is problematic;
# best be marked as a knownfail, but I've no clue how to do it.
if distname == "ncf":
atol, rtol = 1e-5, 0
else:
atol, rtol = 1e-7, 1e-7
normalization_expect = distfn.expect(lambda x: 1, args=args)
npt.assert_allclose(normalization_expect, 1.0, atol=atol, rtol=rtol,
err_msg=distname, verbose=True)
_a, _b = distfn.support(*args)
normalization_cdf = distfn.cdf(_b, *args)
npt.assert_allclose(normalization_cdf, 1.0)
def check_moment(distfn, arg, m, v, msg):
m1 = distfn.moment(1, *arg)
m2 = distfn.moment(2, *arg)
if not np.isinf(m):
npt.assert_almost_equal(m1, m, decimal=10, err_msg=msg +
' - 1st moment')
else: # or np.isnan(m1),
npt.assert_(np.isinf(m1),
msg + ' - 1st moment -infinite, m1=%s' % str(m1))
if not np.isinf(v):
npt.assert_almost_equal(m2 - m1 * m1, v, decimal=10, err_msg=msg +
' - 2ndt moment')
else: # or np.isnan(m2),
npt.assert_(np.isinf(m2),
msg + ' - 2nd moment -infinite, m2=%s' % str(m2))
def check_mean_expect(distfn, arg, m, msg):
if np.isfinite(m):
m1 = distfn.expect(lambda x: x, arg)
npt.assert_almost_equal(m1, m, decimal=5, err_msg=msg +
' - 1st moment (expect)')
def check_var_expect(distfn, arg, m, v, msg):
if np.isfinite(v):
m2 = distfn.expect(lambda x: x*x, arg)
npt.assert_almost_equal(m2, v + m*m, decimal=5, err_msg=msg +
' - 2st moment (expect)')
def check_skew_expect(distfn, arg, m, v, s, msg):
if np.isfinite(s):
m3e = distfn.expect(lambda x: np.power(x-m, 3), arg)
npt.assert_almost_equal(m3e, s * np.power(v, 1.5),
decimal=5, err_msg=msg + ' - skew')
else:
npt.assert_(np.isnan(s))
def check_kurt_expect(distfn, arg, m, v, k, msg):
if np.isfinite(k):
m4e = distfn.expect(lambda x: np.power(x-m, 4), arg)
npt.assert_allclose(m4e, (k + 3.) * np.power(v, 2), atol=1e-5, rtol=1e-5,
err_msg=msg + ' - kurtosis')
elif not np.isposinf(k):
npt.assert_(np.isnan(k))
def check_entropy(distfn, arg, msg):
ent = distfn.entropy(*arg)
npt.assert_(not np.isnan(ent), msg + 'test Entropy is nan')
def check_private_entropy(distfn, args, superclass):
# compare a generic _entropy with the distribution-specific implementation
npt.assert_allclose(distfn._entropy(*args),
superclass._entropy(distfn, *args))
def check_entropy_vect_scale(distfn, arg):
# check 2-d
sc = np.asarray([[1, 2], [3, 4]])
v_ent = distfn.entropy(*arg, scale=sc)
s_ent = [distfn.entropy(*arg, scale=s) for s in sc.ravel()]
s_ent = np.asarray(s_ent).reshape(v_ent.shape)
assert_allclose(v_ent, s_ent, atol=1e-14)
# check invalid value, check cast
sc = [1, 2, -3]
v_ent = distfn.entropy(*arg, scale=sc)
s_ent = [distfn.entropy(*arg, scale=s) for s in sc]
s_ent = np.asarray(s_ent).reshape(v_ent.shape)
assert_allclose(v_ent, s_ent, atol=1e-14)
def check_edge_support(distfn, args):
# Make sure that x=self.a and self.b are handled correctly.
x = distfn.support(*args)
if isinstance(distfn, stats.rv_discrete):
x = x[0]-1, x[1]
npt.assert_equal(distfn.cdf(x, *args), [0.0, 1.0])
npt.assert_equal(distfn.sf(x, *args), [1.0, 0.0])
if distfn.name not in ('skellam', 'dlaplace'):
# with a = -inf, log(0) generates warnings
npt.assert_equal(distfn.logcdf(x, *args), [-np.inf, 0.0])
npt.assert_equal(distfn.logsf(x, *args), [0.0, -np.inf])
npt.assert_equal(distfn.ppf([0.0, 1.0], *args), x)
npt.assert_equal(distfn.isf([0.0, 1.0], *args), x[::-1])
# out-of-bounds for isf & ppf
npt.assert_(np.isnan(distfn.isf([-1, 2], *args)).all())
npt.assert_(np.isnan(distfn.ppf([-1, 2], *args)).all())
def check_named_args(distfn, x, shape_args, defaults, meths):
## Check calling w/ named arguments.
# check consistency of shapes, numargs and _parse signature
signature = _getargspec(distfn._parse_args)
npt.assert_(signature.varargs is None)
npt.assert_(signature.keywords is None)
npt.assert_(list(signature.defaults) == list(defaults))
shape_argnames = signature.args[:-len(defaults)] # a, b, loc=0, scale=1
if distfn.shapes:
shapes_ = distfn.shapes.replace(',', ' ').split()
else:
shapes_ = ''
npt.assert_(len(shapes_) == distfn.numargs)
npt.assert_(len(shapes_) == len(shape_argnames))
# check calling w/ named arguments
shape_args = list(shape_args)
vals = [meth(x, *shape_args) for meth in meths]
npt.assert_(np.all(np.isfinite(vals)))
names, a, k = shape_argnames[:], shape_args[:], {}
while names:
k.update({names.pop(): a.pop()})
v = [meth(x, *a, **k) for meth in meths]
npt.assert_array_equal(vals, v)
if 'n' not in k.keys():
# `n` is first parameter of moment(), so can't be used as named arg
npt.assert_equal(distfn.moment(1, *a, **k),
distfn.moment(1, *shape_args))
# unknown arguments should not go through:
k.update({'kaboom': 42})
assert_raises(TypeError, distfn.cdf, x, **k)
def check_random_state_property(distfn, args):
# check the random_state attribute of a distribution *instance*
# This test fiddles with distfn.random_state. This breaks other tests,
# hence need to save it and then restore.
rndm = distfn.random_state
# baseline: this relies on the global state
np.random.seed(1234)
distfn.random_state = None
r0 = distfn.rvs(*args, size=8)
# use an explicit instance-level random_state
distfn.random_state = 1234
r1 = distfn.rvs(*args, size=8)
npt.assert_equal(r0, r1)
distfn.random_state = np.random.RandomState(1234)
r2 = distfn.rvs(*args, size=8)
npt.assert_equal(r0, r2)
# can override the instance-level random_state for an individual .rvs call
distfn.random_state = 2
orig_state = distfn.random_state.get_state()
r3 = distfn.rvs(*args, size=8, random_state=np.random.RandomState(1234))
npt.assert_equal(r0, r3)
# ... and that does not alter the instance-level random_state!
npt.assert_equal(distfn.random_state.get_state(), orig_state)
# finally, restore the random_state
distfn.random_state = rndm
def check_meth_dtype(distfn, arg, meths):
q0 = [0.25, 0.5, 0.75]
x0 = distfn.ppf(q0, *arg)
x_cast = [x0.astype(tp) for tp in
(np.int_, np.float16, np.float32, np.float64)]
for x in x_cast:
# casting may have clipped the values, exclude those
distfn._argcheck(*arg)
_a, _b = distfn.support(*arg)
x = x[(_a < x) & (x < _b)]
for meth in meths:
val = meth(x, *arg)
npt.assert_(val.dtype == np.float_)
def check_ppf_dtype(distfn, arg):
q0 = np.asarray([0.25, 0.5, 0.75])
q_cast = [q0.astype(tp) for tp in (np.float16, np.float32, np.float64)]
for q in q_cast:
for meth in [distfn.ppf, distfn.isf]:
val = meth(q, *arg)
npt.assert_(val.dtype == np.float_)
def check_cmplx_deriv(distfn, arg):
# Distributions allow complex arguments.
def deriv(f, x, *arg):
x = np.asarray(x)
h = 1e-10
return (f(x + h*1j, *arg)/h).imag
x0 = distfn.ppf([0.25, 0.51, 0.75], *arg)
x_cast = [x0.astype(tp) for tp in
(np.int_, np.float16, np.float32, np.float64)]
for x in x_cast:
# casting may have clipped the values, exclude those
distfn._argcheck(*arg)
_a, _b = distfn.support(*arg)
x = x[(_a < x) & (x < _b)]
pdf, cdf, sf = distfn.pdf(x, *arg), distfn.cdf(x, *arg), distfn.sf(x, *arg)
assert_allclose(deriv(distfn.cdf, x, *arg), pdf, rtol=1e-5)
assert_allclose(deriv(distfn.logcdf, x, *arg), pdf/cdf, rtol=1e-5)
assert_allclose(deriv(distfn.sf, x, *arg), -pdf, rtol=1e-5)
assert_allclose(deriv(distfn.logsf, x, *arg), -pdf/sf, rtol=1e-5)
assert_allclose(deriv(distfn.logpdf, x, *arg),
deriv(distfn.pdf, x, *arg) / distfn.pdf(x, *arg),
rtol=1e-5)
def check_pickling(distfn, args):
# check that a distribution instance pickles and unpickles
# pay special attention to the random_state property
# save the random_state (restore later)
rndm = distfn.random_state
distfn.random_state = 1234
distfn.rvs(*args, size=8)
s = pickle.dumps(distfn)
r0 = distfn.rvs(*args, size=8)
unpickled = pickle.loads(s)
r1 = unpickled.rvs(*args, size=8)
npt.assert_equal(r0, r1)
# also smoke test some methods
medians = [distfn.ppf(0.5, *args), unpickled.ppf(0.5, *args)]
npt.assert_equal(medians[0], medians[1])
npt.assert_equal(distfn.cdf(medians[0], *args),
unpickled.cdf(medians[1], *args))
# restore the random_state
distfn.random_state = rndm
def check_rvs_broadcast(distfunc, distname, allargs, shape, shape_only, otype):
np.random.seed(123)
with suppress_warnings() as sup:
# frechet_l and frechet_r are deprecated, so all their
# methods generate DeprecationWarnings.
sup.filter(category=DeprecationWarning, message=".*frechet_")
sample = distfunc.rvs(*allargs)
assert_equal(sample.shape, shape, "%s: rvs failed to broadcast" % distname)
if not shape_only:
rvs = np.vectorize(lambda *allargs: distfunc.rvs(*allargs), otypes=otype)
np.random.seed(123)
expected = rvs(*allargs)
assert_allclose(sample, expected, rtol=1e-15)
| bsd-3-clause |
mbrubeck/servo | tests/wpt/web-platform-tests/css/vendor-imports/mozilla/mozilla-central-reftests/text-decor-3/support/generate-text-emphasis-ruby-tests.py | 829 | 3042 | #!/usr/bin/env python
# - * - coding: UTF-8 - * -
"""
This script generates tests text-emphasis-ruby-001 ~ 004 which tests
emphasis marks with ruby in four directions. It outputs a list of all
tests it generated in the format of Mozilla reftest.list to the stdout.
"""
from __future__ import unicode_literals
TEST_FILE = 'text-emphasis-ruby-{:03}{}.html'
TEST_TEMPLATE = '''<!DOCTYPE html>
<meta charset="utf-8">
<title>CSS Test: text-emphasis and ruby, {wm}, {pos}</title>
<link rel="author" title="Xidorn Quan" href="https://www.upsuper.org">
<link rel="author" title="Mozilla" href="https://www.mozilla.org">
<link rel="help" href="https://drafts.csswg.org/css-text-decor-3/#text-emphasis-position-property">
<meta name="assert" content="emphasis marks are drawn outside the ruby">
<link rel="match" href="text-emphasis-ruby-{index:03}-ref.html">
<p>Pass if the emphasis marks are outside the ruby:</p>
<div style="line-height: 5; writing-mode: {wm}; ruby-position: {ruby_pos}; text-emphasis-position: {posval}">ルビ<span style="text-emphasis: circle">と<ruby>圏<rt>けん</rt>点<rt>てん</rt></ruby>を</span>同時</div>
'''
REF_FILE = 'text-emphasis-ruby-{:03}-ref.html'
REF_TEMPLATE = '''<!DOCTYPE html>
<meta charset="utf-8">
<title>CSS Reference: text-emphasis and ruby, {wm}, {pos}</title>
<link rel="author" title="Xidorn Quan" href="https://www.upsuper.org">
<link rel="author" title="Mozilla" href="https://www.mozilla.org">
<style> rtc {{ font-variant-east-asian: inherit; }} </style>
<p>Pass if the emphasis marks are outside the ruby:</p>
<div style="line-height: 5; writing-mode: {wm}; ruby-position: {posval}">ルビ<ruby>と<rtc>●</rtc>圏<rt>けん</rt><rtc>●</rtc>点<rt>てん</rt><rtc>●</rtc>を<rtc>●</rtc></ruby>同時</div>
'''
TEST_CASES = [
('top', 'horizontal-tb', 'over', [
('horizontal-tb', 'over right')]),
('bottom', 'horizontal-tb', 'under', [
('horizontal-tb', 'under right')]),
('right', 'vertical-rl', 'over', [
('vertical-rl', 'over right'),
('vertical-lr', 'over right')]),
('left', 'vertical-rl', 'under', [
('vertical-rl', 'over left'),
('vertical-lr', 'over left')]),
]
SUFFIXES = ['', 'a']
def write_file(filename, content):
with open(filename, 'wb') as f:
f.write(content.encode('UTF-8'))
print("# START tests from {}".format(__file__))
idx = 0
for pos, ref_wm, ruby_pos, subtests in TEST_CASES:
idx += 1
ref_file = REF_FILE.format(idx)
ref_content = REF_TEMPLATE.format(pos=pos, wm=ref_wm, posval=ruby_pos)
write_file(ref_file, ref_content)
suffix = iter(SUFFIXES)
for wm, posval in subtests:
test_file = TEST_FILE.format(idx, next(suffix))
test_content = TEST_TEMPLATE.format(
wm=wm, pos=pos, index=idx, ruby_pos=ruby_pos, posval=posval)
write_file(test_file, test_content)
print("== {} {}".format(test_file, ref_file))
print("# END tests from {}".format(__file__))
| mpl-2.0 |
HLFH/CouchPotatoServer | libs/caper/constraint.py | 81 | 4048 | # Copyright 2013 Dean Gardiner <gardiner91@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class CaptureConstraint(object):
def __init__(self, capture_group, constraint_type, comparisons=None, target=None, **kwargs):
"""Capture constraint object
:type capture_group: CaptureGroup
"""
self.capture_group = capture_group
self.constraint_type = constraint_type
self.target = target
self.comparisons = comparisons if comparisons else []
self.kwargs = {}
for orig_key, value in kwargs.items():
key = orig_key.split('__')
if len(key) != 2:
self.kwargs[orig_key] = value
continue
name, method = key
method = 'constraint_match_' + method
if not hasattr(self, method):
self.kwargs[orig_key] = value
continue
self.comparisons.append((name, getattr(self, method), value))
def execute(self, parent_node, node, **kwargs):
func_name = 'constraint_%s' % self.constraint_type
if hasattr(self, func_name):
return getattr(self, func_name)(parent_node, node, **kwargs)
raise ValueError('Unknown constraint type "%s"' % self.constraint_type)
#
# Node Matching
#
def constraint_match(self, parent_node, node):
results = []
total_weight = 0
for name, method, argument in self.comparisons:
weight, success = method(node, name, argument)
total_weight += weight
results.append(success)
return total_weight / (float(len(results)) or 1), all(results) if len(results) > 0 else False
def constraint_match_eq(self, node, name, expected):
if not hasattr(node, name):
return 1.0, False
return 1.0, getattr(node, name) == expected
def constraint_match_re(self, node, name, arg):
# Node match
if name == 'node':
group, minimum_weight = arg if type(arg) is tuple and len(arg) > 1 else (arg, 0)
weight, match, num_fragments = self.capture_group.parser.matcher.fragment_match(node, group)
return weight, weight > minimum_weight
# Regex match
if type(arg).__name__ == 'SRE_Pattern':
return 1.0, arg.match(getattr(node, name)) is not None
# Value match
if hasattr(node, name):
match = self.capture_group.parser.matcher.value_match(getattr(node, name), arg, single=True)
return 1.0, match is not None
raise ValueError("Unknown constraint match type '%s'" % name)
#
# Result
#
def constraint_result(self, parent_node, fragment):
ctag = self.kwargs.get('tag')
if not ctag:
return 0, False
ckey = self.kwargs.get('key')
for tag, result in parent_node.captured():
if tag != ctag:
continue
if not ckey or ckey in result.keys():
return 1.0, True
return 0.0, False
#
# Failure
#
def constraint_failure(self, parent_node, fragment, match):
if not match or not match.success:
return 1.0, True
return 0, False
#
# Success
#
def constraint_success(self, parent_node, fragment, match):
if match and match.success:
return 1.0, True
return 0, False
def __repr__(self):
return "CaptureConstraint(comparisons=%s)" % repr(self.comparisons)
| gpl-3.0 |
mmmavis/lightbeam-bedrock-website | vendor-local/lib/python/south/db/generic.py | 10 | 45531 | from __future__ import print_function
import re
import sys
from django.core.management.color import no_style
from django.db import transaction, models
from django.db.utils import DatabaseError
from django.db.backends.util import truncate_name
from django.db.backends.creation import BaseDatabaseCreation
from django.db.models.fields import NOT_PROVIDED
from django.dispatch import dispatcher
from django.conf import settings
from django.utils.datastructures import SortedDict
try:
from django.utils.functional import cached_property
except ImportError:
class cached_property(object):
"""
Decorator that creates converts a method with a single
self argument into a property cached on the instance.
"""
def __init__(self, func):
self.func = func
def __get__(self, instance, type):
res = instance.__dict__[self.func.__name__] = self.func(instance)
return res
from south.logger import get_logger
from south.utils.py3 import string_types, text_type
def alias(attrname):
"""
Returns a function which calls 'attrname' - for function aliasing.
We can't just use foo = bar, as this breaks subclassing.
"""
def func(self, *args, **kwds):
return getattr(self, attrname)(*args, **kwds)
return func
def invalidate_table_constraints(func):
def _cache_clear(self, table, *args, **opts):
self._set_cache(table, value=INVALID)
return func(self, table, *args, **opts)
return _cache_clear
def delete_column_constraints(func):
def _column_rm(self, table, column, *args, **opts):
self._set_cache(table, column, value=[])
return func(self, table, column, *args, **opts)
return _column_rm
def copy_column_constraints(func):
def _column_cp(self, table, column_old, column_new, *args, **opts):
db_name = self._get_setting('NAME')
self._set_cache(table, column_new, value=self.lookup_constraint(db_name, table, column_old))
return func(self, table, column_old, column_new, *args, **opts)
return _column_cp
class INVALID(Exception):
def __repr__(self):
return 'INVALID'
class DryRunError(ValueError):
pass
class DatabaseOperations(object):
"""
Generic SQL implementation of the DatabaseOperations.
Some of this code comes from Django Evolution.
"""
alter_string_set_type = 'ALTER COLUMN %(column)s TYPE %(type)s'
alter_string_set_null = 'ALTER COLUMN %(column)s DROP NOT NULL'
alter_string_drop_null = 'ALTER COLUMN %(column)s SET NOT NULL'
delete_check_sql = 'ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s'
add_column_string = 'ALTER TABLE %s ADD COLUMN %s;'
delete_unique_sql = "ALTER TABLE %s DROP CONSTRAINT %s"
delete_foreign_key_sql = 'ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s'
create_table_sql = 'CREATE TABLE %(table)s (%(columns)s)'
max_index_name_length = 63
drop_index_string = 'DROP INDEX %(index_name)s'
delete_column_string = 'ALTER TABLE %s DROP COLUMN %s CASCADE;'
create_primary_key_string = "ALTER TABLE %(table)s ADD CONSTRAINT %(constraint)s PRIMARY KEY (%(columns)s)"
delete_primary_key_sql = "ALTER TABLE %(table)s DROP CONSTRAINT %(constraint)s"
add_check_constraint_fragment = "ADD CONSTRAINT %(constraint)s CHECK (%(check)s)"
rename_table_sql = "ALTER TABLE %s RENAME TO %s;"
backend_name = None
default_schema_name = "public"
# Features
allows_combined_alters = True
supports_foreign_keys = True
has_check_constraints = True
has_booleans = True
raises_default_errors = True
@cached_property
def has_ddl_transactions(self):
"""
Tests the database using feature detection to see if it has
transactional DDL support.
"""
self._possibly_initialise()
connection = self._get_connection()
if hasattr(connection.features, "confirm") and not connection.features._confirmed:
connection.features.confirm()
# Django 1.3's MySQLdb backend doesn't raise DatabaseError
exceptions = (DatabaseError, )
try:
from MySQLdb import OperationalError
exceptions += (OperationalError, )
except ImportError:
pass
# Now do the test
if getattr(connection.features, 'supports_transactions', True):
cursor = connection.cursor()
self.start_transaction()
cursor.execute('CREATE TABLE DDL_TRANSACTION_TEST (X INT)')
self.rollback_transaction()
try:
try:
cursor.execute('CREATE TABLE DDL_TRANSACTION_TEST (X INT)')
except exceptions:
return False
else:
return True
finally:
cursor.execute('DROP TABLE DDL_TRANSACTION_TEST')
else:
return False
def __init__(self, db_alias):
self.debug = False
self.deferred_sql = []
self.dry_run = False
self.pending_transactions = 0
self.pending_create_signals = []
self.db_alias = db_alias
self._constraint_cache = {}
self._initialised = False
def lookup_constraint(self, db_name, table_name, column_name=None):
""" return a set() of constraints for db_name.table_name.column_name """
def _lookup():
table = self._constraint_cache[db_name][table_name]
if table is INVALID:
raise INVALID
elif column_name is None:
return list(table.items())
else:
return table[column_name]
try:
ret = _lookup()
return ret
except INVALID:
del self._constraint_cache[db_name][table_name]
self._fill_constraint_cache(db_name, table_name)
except KeyError:
if self._is_valid_cache(db_name, table_name):
return []
self._fill_constraint_cache(db_name, table_name)
return self.lookup_constraint(db_name, table_name, column_name)
def _set_cache(self, table_name, column_name=None, value=INVALID):
db_name = self._get_setting('NAME')
try:
if column_name is not None:
self._constraint_cache[db_name][table_name][column_name] = value
else:
self._constraint_cache[db_name][table_name] = value
except (LookupError, TypeError):
pass
def _is_valid_cache(self, db_name, table_name):
# we cache per-table so if the table is there it is valid
try:
return self._constraint_cache[db_name][table_name] is not INVALID
except KeyError:
return False
def _is_multidb(self):
try:
from django.db import connections
connections # Prevents "unused import" warning
except ImportError:
return False
else:
return True
def _get_connection(self):
"""
Returns a django connection for a given DB Alias
"""
if self._is_multidb():
from django.db import connections
return connections[self.db_alias]
else:
from django.db import connection
return connection
def _get_setting(self, setting_name):
"""
Allows code to get a setting (like, for example, STORAGE_ENGINE)
"""
setting_name = setting_name.upper()
connection = self._get_connection()
if self._is_multidb():
# Django 1.2 and above
return connection.settings_dict[setting_name]
else:
# Django 1.1 and below
return getattr(settings, "DATABASE_%s" % setting_name)
def _has_setting(self, setting_name):
"""
Existence-checking version of _get_setting.
"""
try:
self._get_setting(setting_name)
except (KeyError, AttributeError):
return False
else:
return True
def _get_schema_name(self):
try:
return self._get_setting('schema')
except (KeyError, AttributeError):
return self.default_schema_name
def _possibly_initialise(self):
if not self._initialised:
self.connection_init()
self._initialised = True
def connection_init(self):
"""
Run before any SQL to let database-specific config be sent as a command,
e.g. which storage engine (MySQL) or transaction serialisability level.
"""
pass
def quote_name(self, name):
"""
Uses the database backend to quote the given table/column name.
"""
return self._get_connection().ops.quote_name(name)
def _print_sql_error(self, e, sql, params=[]):
print('FATAL ERROR - The following SQL query failed: %s' % sql, file=sys.stderr)
print('The error was: %s' % e, file=sys.stderr)
def execute(self, sql, params=[], print_all_errors=True):
"""
Executes the given SQL statement, with optional parameters.
If the instance's debug attribute is True, prints out what it executes.
"""
self._possibly_initialise()
cursor = self._get_connection().cursor()
if self.debug:
print(" = %s" % sql, params)
if self.dry_run:
return []
get_logger().debug(text_type('execute "%s" with params "%s"' % (sql, params)))
try:
cursor.execute(sql, params)
except DatabaseError as e:
if print_all_errors:
self._print_sql_error(e, sql, params)
raise
try:
return cursor.fetchall()
except:
return []
def execute_many(self, sql, regex=r"(?mx) ([^';]* (?:'[^']*'[^';]*)*)", comment_regex=r"(?mx) (?:^\s*$)|(?:--.*$)"):
"""
Takes a SQL file and executes it as many separate statements.
(Some backends, such as Postgres, don't work otherwise.)
"""
# Be warned: This function is full of dark magic. Make sure you really
# know regexes before trying to edit it.
# First, strip comments
sql = "\n".join([x.strip().replace("%", "%%") for x in re.split(comment_regex, sql) if x.strip()])
# Now execute each statement
for st in re.split(regex, sql)[1:][::2]:
self.execute(st)
def add_deferred_sql(self, sql):
"""
Add a SQL statement to the deferred list, that won't be executed until
this instance's execute_deferred_sql method is run.
"""
self.deferred_sql.append(sql)
def execute_deferred_sql(self):
"""
Executes all deferred SQL, resetting the deferred_sql list
"""
for sql in self.deferred_sql:
self.execute(sql)
self.deferred_sql = []
def clear_deferred_sql(self):
"""
Resets the deferred_sql list to empty.
"""
self.deferred_sql = []
def clear_run_data(self, pending_creates = None):
"""
Resets variables to how they should be before a run. Used for dry runs.
If you want, pass in an old panding_creates to reset to.
"""
self.clear_deferred_sql()
self.pending_create_signals = pending_creates or []
def get_pending_creates(self):
return self.pending_create_signals
@invalidate_table_constraints
def create_table(self, table_name, fields):
"""
Creates the table 'table_name'. 'fields' is a tuple of fields,
each repsented by a 2-part tuple of field name and a
django.db.models.fields.Field object
"""
if len(table_name) > 63:
print(" ! WARNING: You have a table name longer than 63 characters; this will not fully work on PostgreSQL or MySQL.")
# avoid default values in CREATE TABLE statements (#925)
for field_name, field in fields:
field._suppress_default = True
columns = [
self.column_sql(table_name, field_name, field)
for field_name, field in fields
]
self.execute(self.create_table_sql % {
"table": self.quote_name(table_name),
"columns": ', '.join([col for col in columns if col]),
})
add_table = alias('create_table') # Alias for consistency's sake
@invalidate_table_constraints
def rename_table(self, old_table_name, table_name):
"""
Renames the table 'old_table_name' to 'table_name'.
"""
if old_table_name == table_name:
# Short-circuit out.
return
params = (self.quote_name(old_table_name), self.quote_name(table_name))
self.execute(self.rename_table_sql % params)
# Invalidate the not-yet-indexed table
self._set_cache(table_name, value=INVALID)
@invalidate_table_constraints
def delete_table(self, table_name, cascade=True):
"""
Deletes the table 'table_name'.
"""
params = (self.quote_name(table_name), )
if cascade:
self.execute('DROP TABLE %s CASCADE;' % params)
else:
self.execute('DROP TABLE %s;' % params)
drop_table = alias('delete_table')
@invalidate_table_constraints
def clear_table(self, table_name):
"""
Deletes all rows from 'table_name'.
"""
params = (self.quote_name(table_name), )
self.execute('DELETE FROM %s;' % params)
@invalidate_table_constraints
def add_column(self, table_name, name, field, keep_default=True):
"""
Adds the column 'name' to the table 'table_name'.
Uses the 'field' paramater, a django.db.models.fields.Field instance,
to generate the necessary sql
@param table_name: The name of the table to add the column to
@param name: The name of the column to add
@param field: The field to use
"""
sql = self.column_sql(table_name, name, field)
if sql:
params = (
self.quote_name(table_name),
sql,
)
sql = self.add_column_string % params
self.execute(sql)
# Now, drop the default if we need to
if field.default is not None:
field.default = NOT_PROVIDED
self.alter_column(table_name, name, field, explicit_name=False, ignore_constraints=True)
def _db_type_for_alter_column(self, field):
"""
Returns a field's type suitable for ALTER COLUMN.
By default it just returns field.db_type().
To be overriden by backend specific subclasses
@param field: The field to generate type for
"""
try:
return field.db_type(connection=self._get_connection())
except TypeError:
return field.db_type()
def _alter_add_column_mods(self, field, name, params, sqls):
"""
Subcommand of alter_column that modifies column definitions beyond
the type string -- e.g. adding constraints where they cannot be specified
as part of the type (overrideable)
"""
pass
def _alter_set_defaults(self, field, name, params, sqls):
"Subcommand of alter_column that sets default values (overrideable)"
# Next, set any default
if not field.null and field.has_default():
default = field.get_db_prep_save(field.get_default(), connection=self._get_connection())
sqls.append(('ALTER COLUMN %s SET DEFAULT %%s ' % (self.quote_name(name),), [default]))
else:
sqls.append(('ALTER COLUMN %s DROP DEFAULT' % (self.quote_name(name),), []))
def _update_nulls_to_default(self, params, field):
"Subcommand of alter_column that updates nulls to default value (overrideable)"
default = field.get_db_prep_save(field.get_default(), connection=self._get_connection())
self.execute('UPDATE %(table_name)s SET %(column)s=%%s WHERE %(column)s IS NULL' % params, [default])
@invalidate_table_constraints
def alter_column(self, table_name, name, field, explicit_name=True, ignore_constraints=False):
"""
Alters the given column name so it will match the given field.
Note that conversion between the two by the database must be possible.
Will not automatically add _id by default; to have this behavour, pass
explicit_name=False.
@param table_name: The name of the table to add the column to
@param name: The name of the column to alter
@param field: The new field definition to use
"""
if self.dry_run:
if self.debug:
print(' - no dry run output for alter_column() due to dynamic DDL, sorry')
return
# hook for the field to do any resolution prior to it's attributes being queried
if hasattr(field, 'south_init'):
field.south_init()
# Add _id or whatever if we need to
field.set_attributes_from_name(name)
if not explicit_name:
name = field.column
else:
field.column = name
if not ignore_constraints:
# Drop all check constraints. Note that constraints will be added back
# with self.alter_string_set_type and self.alter_string_drop_null.
if self.has_check_constraints:
check_constraints = self._constraints_affecting_columns(table_name, [name], "CHECK")
for constraint in check_constraints:
self.execute(self.delete_check_sql % {
'table': self.quote_name(table_name),
'constraint': self.quote_name(constraint),
})
# Drop all foreign key constraints
try:
self.delete_foreign_key(table_name, name)
except ValueError:
# There weren't any
pass
# First, change the type
params = {
"column": self.quote_name(name),
"type": self._db_type_for_alter_column(field),
"table_name": self.quote_name(table_name)
}
# SQLs is a list of (SQL, values) pairs.
sqls = []
# Only alter the column if it has a type (Geometry ones sometimes don't)
if params["type"] is not None:
sqls.append((self.alter_string_set_type % params, []))
# Add any field- and backend- specific modifications
self._alter_add_column_mods(field, name, params, sqls)
# Next, nullity
if field.null or field.has_default():
sqls.append((self.alter_string_set_null % params, []))
else:
sqls.append((self.alter_string_drop_null % params, []))
# Do defaults
self._alter_set_defaults(field, name, params, sqls)
# Actually change the column (step 1 -- Nullity may need to be fixed)
if self.allows_combined_alters:
sqls, values = zip(*sqls)
self.execute(
"ALTER TABLE %s %s;" % (self.quote_name(table_name), ", ".join(sqls)),
flatten(values),
)
else:
# Databases like e.g. MySQL don't like more than one alter at once.
for sql, values in sqls:
self.execute("ALTER TABLE %s %s;" % (self.quote_name(table_name), sql), values)
if not field.null and field.has_default():
# Final fixes
self._update_nulls_to_default(params, field)
self.execute("ALTER TABLE %s %s;" % (self.quote_name(table_name), self.alter_string_drop_null % params), [])
if not ignore_constraints:
# Add back FK constraints if needed
if field.rel and self.supports_foreign_keys:
self.execute(
self.foreign_key_sql(
table_name,
field.column,
field.rel.to._meta.db_table,
field.rel.to._meta.get_field(field.rel.field_name).column
)
)
def _fill_constraint_cache(self, db_name, table_name):
schema = self._get_schema_name()
ifsc_tables = ["constraint_column_usage", "key_column_usage"]
self._constraint_cache.setdefault(db_name, {})
self._constraint_cache[db_name][table_name] = {}
for ifsc_table in ifsc_tables:
rows = self.execute("""
SELECT kc.constraint_name, kc.column_name, c.constraint_type
FROM information_schema.%s AS kc
JOIN information_schema.table_constraints AS c ON
kc.table_schema = c.table_schema AND
kc.table_name = c.table_name AND
kc.constraint_name = c.constraint_name
WHERE
kc.table_schema = %%s AND
kc.table_name = %%s
""" % ifsc_table, [schema, table_name])
for constraint, column, kind in rows:
self._constraint_cache[db_name][table_name].setdefault(column, set())
self._constraint_cache[db_name][table_name][column].add((kind, constraint))
return
def _constraints_affecting_columns(self, table_name, columns, type="UNIQUE"):
"""
Gets the names of the constraints affecting the given columns.
If columns is None, returns all constraints of the type on the table.
"""
if self.dry_run:
raise DryRunError("Cannot get constraints for columns.")
if columns is not None:
columns = set(map(lambda s: s.lower(), columns))
db_name = self._get_setting('NAME')
cnames = {}
for col, constraints in self.lookup_constraint(db_name, table_name):
for kind, cname in constraints:
if kind == type:
cnames.setdefault(cname, set())
cnames[cname].add(col.lower())
for cname, cols in cnames.items():
if cols == columns or columns is None:
yield cname
@invalidate_table_constraints
def create_unique(self, table_name, columns):
"""
Creates a UNIQUE constraint on the columns on the given table.
"""
if not isinstance(columns, (list, tuple)):
columns = [columns]
name = self.create_index_name(table_name, columns, suffix="_uniq")
cols = ", ".join(map(self.quote_name, columns))
self.execute("ALTER TABLE %s ADD CONSTRAINT %s UNIQUE (%s)" % (
self.quote_name(table_name),
self.quote_name(name),
cols,
))
return name
@invalidate_table_constraints
def delete_unique(self, table_name, columns):
"""
Deletes a UNIQUE constraint on precisely the columns on the given table.
"""
if not isinstance(columns, (list, tuple)):
columns = [columns]
# Dry runs mean we can't do anything.
if self.dry_run:
if self.debug:
print(' - no dry run output for delete_unique_column() due to dynamic DDL, sorry')
return
constraints = list(self._constraints_affecting_columns(table_name, columns))
if not constraints:
raise ValueError("Cannot find a UNIQUE constraint on table %s, columns %r" % (table_name, columns))
for constraint in constraints:
self.execute(self.delete_unique_sql % (
self.quote_name(table_name),
self.quote_name(constraint),
))
def column_sql(self, table_name, field_name, field, tablespace='', with_name=True, field_prepared=False):
"""
Creates the SQL snippet for a column. Used by add_column and add_table.
"""
# If the field hasn't already been told its attribute name, do so.
if not field_prepared:
field.set_attributes_from_name(field_name)
# hook for the field to do any resolution prior to it's attributes being queried
if hasattr(field, 'south_init'):
field.south_init()
# Possible hook to fiddle with the fields (e.g. defaults & TEXT on MySQL)
field = self._field_sanity(field)
try:
sql = field.db_type(connection=self._get_connection())
except TypeError:
sql = field.db_type()
if sql:
# Some callers, like the sqlite stuff, just want the extended type.
if with_name:
field_output = [self.quote_name(field.column), sql]
else:
field_output = [sql]
field_output.append('%sNULL' % (not field.null and 'NOT ' or ''))
if field.primary_key:
field_output.append('PRIMARY KEY')
elif field.unique:
# Just use UNIQUE (no indexes any more, we have delete_unique)
field_output.append('UNIQUE')
tablespace = field.db_tablespace or tablespace
if tablespace and getattr(self._get_connection().features, "supports_tablespaces", False) and field.unique:
# We must specify the index tablespace inline, because we
# won't be generating a CREATE INDEX statement for this field.
field_output.append(self._get_connection().ops.tablespace_sql(tablespace, inline=True))
sql = ' '.join(field_output)
sqlparams = ()
# if the field is "NOT NULL" and a default value is provided, create the column with it
# this allows the addition of a NOT NULL field to a table with existing rows
if not getattr(field, '_suppress_default', False):
if field.has_default():
default = field.get_default()
# If the default is actually None, don't add a default term
if default is not None:
# If the default is a callable, then call it!
if callable(default):
default = default()
default = field.get_db_prep_save(default, connection=self._get_connection())
default = self._default_value_workaround(default)
# Now do some very cheap quoting. TODO: Redesign return values to avoid this.
if isinstance(default, string_types):
default = "'%s'" % default.replace("'", "''")
# Escape any % signs in the output (bug #317)
if isinstance(default, string_types):
default = default.replace("%", "%%")
# Add it in
sql += " DEFAULT %s"
sqlparams = (default)
elif (not field.null and field.blank) or (field.get_default() == ''):
if field.empty_strings_allowed and self._get_connection().features.interprets_empty_strings_as_nulls:
sql += " DEFAULT ''"
# Error here would be nice, but doesn't seem to play fair.
#else:
# raise ValueError("Attempting to add a non null column that isn't character based without an explicit default value.")
if field.rel and self.supports_foreign_keys:
self.add_deferred_sql(
self.foreign_key_sql(
table_name,
field.column,
field.rel.to._meta.db_table,
field.rel.to._meta.get_field(field.rel.field_name).column
)
)
# Things like the contrib.gis module fields have this in 1.1 and below
if hasattr(field, 'post_create_sql'):
for stmt in field.post_create_sql(no_style(), table_name):
self.add_deferred_sql(stmt)
# In 1.2 and above, you have to ask the DatabaseCreation stuff for it.
# This also creates normal indexes in 1.1.
if hasattr(self._get_connection().creation, "sql_indexes_for_field"):
# Make a fake model to pass in, with only db_table
model = self.mock_model("FakeModelForGISCreation", table_name)
for stmt in self._get_connection().creation.sql_indexes_for_field(model, field, no_style()):
self.add_deferred_sql(stmt)
if sql:
return sql % sqlparams
else:
return None
def _field_sanity(self, field):
"""
Placeholder for DBMS-specific field alterations (some combos aren't valid,
e.g. DEFAULT and TEXT on MySQL)
"""
return field
def _default_value_workaround(self, value):
"""
DBMS-specific value alterations (this really works around
missing functionality in Django backends)
"""
if isinstance(value, bool) and not self.has_booleans:
return int(value)
else:
return value
def foreign_key_sql(self, from_table_name, from_column_name, to_table_name, to_column_name):
"""
Generates a full SQL statement to add a foreign key constraint
"""
constraint_name = '%s_refs_%s_%s' % (from_column_name, to_column_name, self._digest(from_table_name, to_table_name))
return 'ALTER TABLE %s ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s (%s)%s;' % (
self.quote_name(from_table_name),
self.quote_name(self.shorten_name(constraint_name)),
self.quote_name(from_column_name),
self.quote_name(to_table_name),
self.quote_name(to_column_name),
self._get_connection().ops.deferrable_sql() # Django knows this
)
@invalidate_table_constraints
def delete_foreign_key(self, table_name, column):
"""
Drop a foreign key constraint
"""
if self.dry_run:
if self.debug:
print(' - no dry run output for delete_foreign_key() due to dynamic DDL, sorry')
return # We can't look at the DB to get the constraints
constraints = self._find_foreign_constraints(table_name, column)
if not constraints:
raise ValueError("Cannot find a FOREIGN KEY constraint on table %s, column %s" % (table_name, column))
for constraint_name in constraints:
self.execute(self.delete_foreign_key_sql % {
"table": self.quote_name(table_name),
"constraint": self.quote_name(constraint_name),
})
drop_foreign_key = alias('delete_foreign_key')
def _find_foreign_constraints(self, table_name, column_name=None):
constraints = self._constraints_affecting_columns(
table_name, [column_name], "FOREIGN KEY")
primary_key_columns = self._find_primary_key_columns(table_name)
if len(primary_key_columns) > 1:
# Composite primary keys cannot be referenced by a foreign key
return list(constraints)
else:
primary_key_columns.add(column_name)
recursive_constraints = set(self._constraints_affecting_columns(
table_name, primary_key_columns, "FOREIGN KEY"))
return list(recursive_constraints.union(constraints))
def _digest(self, *args):
"""
Use django.db.backends.creation.BaseDatabaseCreation._digest
to create index name in Django style. An evil hack :(
"""
if not hasattr(self, '_django_db_creation'):
self._django_db_creation = BaseDatabaseCreation(self._get_connection())
return self._django_db_creation._digest(*args)
def shorten_name(self, name):
return truncate_name(name, self._get_connection().ops.max_name_length())
def create_index_name(self, table_name, column_names, suffix=""):
"""
Generate a unique name for the index
"""
# If there is just one column in the index, use a default algorithm from Django
if len(column_names) == 1 and not suffix:
return self.shorten_name(
'%s_%s' % (table_name, self._digest(column_names[0]))
)
# Else generate the name for the index by South
table_name = table_name.replace('"', '').replace('.', '_')
index_unique_name = '_%x' % abs(hash((table_name, ','.join(column_names))))
# If the index name is too long, truncate it
index_name = ('%s_%s%s%s' % (table_name, column_names[0], index_unique_name, suffix)).replace('"', '').replace('.', '_')
if len(index_name) > self.max_index_name_length:
part = ('_%s%s%s' % (column_names[0], index_unique_name, suffix))
index_name = '%s%s' % (table_name[:(self.max_index_name_length - len(part))], part)
return index_name
def create_index_sql(self, table_name, column_names, unique=False, db_tablespace=''):
"""
Generates a create index statement on 'table_name' for a list of 'column_names'
"""
if not column_names:
print("No column names supplied on which to create an index")
return ''
connection = self._get_connection()
if db_tablespace and connection.features.supports_tablespaces:
tablespace_sql = ' ' + connection.ops.tablespace_sql(db_tablespace)
else:
tablespace_sql = ''
index_name = self.create_index_name(table_name, column_names)
return 'CREATE %sINDEX %s ON %s (%s)%s;' % (
unique and 'UNIQUE ' or '',
self.quote_name(index_name),
self.quote_name(table_name),
','.join([self.quote_name(field) for field in column_names]),
tablespace_sql
)
@invalidate_table_constraints
def create_index(self, table_name, column_names, unique=False, db_tablespace=''):
""" Executes a create index statement """
sql = self.create_index_sql(table_name, column_names, unique, db_tablespace)
self.execute(sql)
@invalidate_table_constraints
def delete_index(self, table_name, column_names, db_tablespace=''):
"""
Deletes an index created with create_index.
This is possible using only columns due to the deterministic
index naming function which relies on column names.
"""
if isinstance(column_names, string_types):
column_names = [column_names]
name = self.create_index_name(table_name, column_names)
sql = self.drop_index_string % {
"index_name": self.quote_name(name),
"table_name": self.quote_name(table_name),
}
self.execute(sql)
drop_index = alias('delete_index')
@delete_column_constraints
def delete_column(self, table_name, name):
"""
Deletes the column 'column_name' from the table 'table_name'.
"""
params = (self.quote_name(table_name), self.quote_name(name))
self.execute(self.delete_column_string % params, [])
drop_column = alias('delete_column')
def rename_column(self, table_name, old, new):
"""
Renames the column 'old' from the table 'table_name' to 'new'.
"""
raise NotImplementedError("rename_column has no generic SQL syntax")
@invalidate_table_constraints
def delete_primary_key(self, table_name):
"""
Drops the old primary key.
"""
# Dry runs mean we can't do anything.
if self.dry_run:
if self.debug:
print(' - no dry run output for delete_primary_key() due to dynamic DDL, sorry')
return
constraints = list(self._constraints_affecting_columns(table_name, None, type="PRIMARY KEY"))
if not constraints:
raise ValueError("Cannot find a PRIMARY KEY constraint on table %s" % (table_name,))
for constraint in constraints:
self.execute(self.delete_primary_key_sql % {
"table": self.quote_name(table_name),
"constraint": self.quote_name(constraint),
})
drop_primary_key = alias('delete_primary_key')
@invalidate_table_constraints
def create_primary_key(self, table_name, columns):
"""
Creates a new primary key on the specified columns.
"""
if not isinstance(columns, (list, tuple)):
columns = [columns]
self.execute(self.create_primary_key_string % {
"table": self.quote_name(table_name),
"constraint": self.quote_name(table_name + "_pkey"),
"columns": ", ".join(map(self.quote_name, columns)),
})
def _find_primary_key_columns(self, table_name):
"""
Find all columns of the primary key of the specified table
"""
db_name = self._get_setting('NAME')
primary_key_columns = set()
for col, constraints in self.lookup_constraint(db_name, table_name):
for kind, cname in constraints:
if kind == 'PRIMARY KEY':
primary_key_columns.add(col.lower())
return primary_key_columns
def start_transaction(self):
"""
Makes sure the following commands are inside a transaction.
Must be followed by a (commit|rollback)_transaction call.
"""
if self.dry_run:
self.pending_transactions += 1
transaction.commit_unless_managed(using=self.db_alias)
transaction.enter_transaction_management(using=self.db_alias)
transaction.managed(True, using=self.db_alias)
def commit_transaction(self):
"""
Commits the current transaction.
Must be preceded by a start_transaction call.
"""
if self.dry_run:
return
transaction.commit(using=self.db_alias)
transaction.leave_transaction_management(using=self.db_alias)
def rollback_transaction(self):
"""
Rolls back the current transaction.
Must be preceded by a start_transaction call.
"""
if self.dry_run:
self.pending_transactions -= 1
transaction.rollback(using=self.db_alias)
transaction.leave_transaction_management(using=self.db_alias)
def rollback_transactions_dry_run(self):
"""
Rolls back all pending_transactions during this dry run.
"""
if not self.dry_run:
return
while self.pending_transactions > 0:
self.rollback_transaction()
if transaction.is_dirty(using=self.db_alias):
# Force an exception, if we're still in a dirty transaction.
# This means we are missing a COMMIT/ROLLBACK.
transaction.leave_transaction_management(using=self.db_alias)
def send_create_signal(self, app_label, model_names):
self.pending_create_signals.append((app_label, model_names))
def send_pending_create_signals(self, verbosity=0, interactive=False):
# Group app_labels together
signals = SortedDict()
for (app_label, model_names) in self.pending_create_signals:
try:
signals[app_label].extend(model_names)
except KeyError:
signals[app_label] = list(model_names)
# Send only one signal per app.
for (app_label, model_names) in signals.items():
self.really_send_create_signal(app_label, list(set(model_names)),
verbosity=verbosity,
interactive=interactive)
self.pending_create_signals = []
def really_send_create_signal(self, app_label, model_names,
verbosity=0, interactive=False):
"""
Sends a post_syncdb signal for the model specified.
If the model is not found (perhaps it's been deleted?),
no signal is sent.
TODO: The behavior of django.contrib.* apps seems flawed in that
they don't respect created_models. Rather, they blindly execute
over all models within the app sending the signal. This is a
patch we should push Django to make For now, this should work.
"""
if self.debug:
print(" - Sending post_syncdb signal for %s: %s" % (app_label, model_names))
app = models.get_app(app_label)
if not app:
return
created_models = []
for model_name in model_names:
model = models.get_model(app_label, model_name)
if model:
created_models.append(model)
if created_models:
if hasattr(dispatcher, "send"):
# Older djangos
dispatcher.send(signal=models.signals.post_syncdb, sender=app,
app=app, created_models=created_models,
verbosity=verbosity, interactive=interactive)
else:
if self._is_multidb():
# Django 1.2+
models.signals.post_syncdb.send(
sender=app,
app=app,
created_models=created_models,
verbosity=verbosity,
interactive=interactive,
db=self.db_alias,
)
else:
# Django 1.1 - 1.0
models.signals.post_syncdb.send(
sender=app,
app=app,
created_models=created_models,
verbosity=verbosity,
interactive=interactive,
)
def mock_model(self, model_name, db_table, db_tablespace='',
pk_field_name='id', pk_field_type=models.AutoField,
pk_field_args=[], pk_field_kwargs={}):
"""
Generates a MockModel class that provides enough information
to be used by a foreign key/many-to-many relationship.
Migrations should prefer to use these rather than actual models
as models could get deleted over time, but these can remain in
migration files forever.
Depreciated.
"""
class MockOptions(object):
def __init__(self):
self.db_table = db_table
self.db_tablespace = db_tablespace or settings.DEFAULT_TABLESPACE
self.object_name = model_name
self.module_name = model_name.lower()
if pk_field_type == models.AutoField:
pk_field_kwargs['primary_key'] = True
self.pk = pk_field_type(*pk_field_args, **pk_field_kwargs)
self.pk.set_attributes_from_name(pk_field_name)
self.abstract = False
def get_field_by_name(self, field_name):
# we only care about the pk field
return (self.pk, self.model, True, False)
def get_field(self, name):
# we only care about the pk field
return self.pk
class MockModel(object):
_meta = None
# We need to return an actual class object here, not an instance
MockModel._meta = MockOptions()
MockModel._meta.model = MockModel
return MockModel
def _db_positive_type_for_alter_column(self, klass, field):
"""
A helper for subclasses overriding _db_type_for_alter_column:
Remove the check constraint from the type string for PositiveInteger
and PositiveSmallInteger fields.
@param klass: The type of the child (required to allow this to be used when it is subclassed)
@param field: The field to generate type for
"""
super_result = super(klass, self)._db_type_for_alter_column(field)
if isinstance(field, (models.PositiveSmallIntegerField, models.PositiveIntegerField)):
return super_result.split(" ", 1)[0]
return super_result
def _alter_add_positive_check(self, klass, field, name, params, sqls):
"""
A helper for subclasses overriding _alter_add_column_mods:
Add a check constraint verifying positivity to PositiveInteger and
PositiveSmallInteger fields.
"""
super(klass, self)._alter_add_column_mods(field, name, params, sqls)
if isinstance(field, (models.PositiveSmallIntegerField, models.PositiveIntegerField)):
uniq_hash = abs(hash(tuple(params.values())))
d = dict(
constraint = "CK_%s_PSTV_%s" % (name, hex(uniq_hash)[2:]),
check = "%s >= 0" % self.quote_name(name))
sqls.append((self.add_check_constraint_fragment % d, []))
# Single-level flattening of lists
def flatten(ls):
nl = []
for l in ls:
nl += l
return nl
| mpl-2.0 |
eonpatapon/contrail-controller | src/container/kube-manager/kube_manager/common/args.py | 1 | 7928 | #
# Copyright (c) 2016 Juniper Networks, Inc. All rights reserved.
#
import sys
import argparse
from vnc_api.vnc_api import *
from pysandesh.sandesh_base import Sandesh, SandeshSystem, SandeshConfig
from pysandesh.gen_py.sandesh.ttypes import SandeshLevel
from sandesh_common.vns.constants import (HttpPortKubeManager,ApiServerPort,\
DiscoveryServerPort)
from enum import Enum
class MandatoryArgs(Enum):
"""
Enum of mandatory arguments to kube-manager.
Kube-manager arguments will be validated against these arguments to
enforce the presence of these mandatory arguments and optionally to
enforce the correctness/validity of the supplied value for an argument.
Each mandatory argument is represented by an enum member and the following
info is captured for each argument, as a dictionary:
a. arg_str - String which identifies the argument in config file.
b. validatefn (optional) - Pointer to function that validates configured
value for an argument.
A validate function (if specified) can be any custom function that returns
a value that evaluates to bool True when validation is successful.
It should return bool False if its validation fails.
Example:
An argumennt "foo" is configured in the config file as follows:
foo = foo_value
It can be enforced as mandatory argument by added the following member to
this enum.
FOO = {"arg_str": "foo", "validatefn": foo_fn()}
If a validation function is not required then:
FOO = {"arg_str": "foo"}
"""
POD_SUBNET = {
"arg_str": "pod_subnets",
"validatefn": lambda x: x
}
SERVICE_SUBNET = {
"arg_str": "service_subnets",
"validatefn": lambda x: x
}
IP_FABRIC_SUBNET = {
"arg_str": "ip_fabric_subnets",
"validatefn": lambda x: x
}
def parse_args(args_str=None):
if not args_str:
args_str = sys.argv[1:]
conf_parser = argparse.ArgumentParser(add_help=False)
conf_parser.add_argument("-c", "--config-file", action='append',
help="Specify config file", metavar="FILE")
args, remaining_argv = conf_parser.parse_known_args(args_str)
defaults = {
'http_server_port': HttpPortKubeManager,
'worker_id': '0',
'collectors': '',
'logger_class': None,
'logging_conf': '',
'log_local': False,
'log_category': '',
'use_syslog': False,
'syslog_facility': Sandesh._DEFAULT_SYSLOG_FACILITY,
'kube_object_cache': 'True',
'disc_server_ip': 'localhost',
'disc_server_port': DiscoveryServerPort,
'log_level': SandeshLevel.SYS_DEBUG,
'log_file': '/var/log/contrail/contrail-kube-manager.log',
'api_service_link_local' : 'True',
'orchestrator' : 'kubernetes',
'token' : '',
'nested_mode': '0',
'global_tags': '1',
'aps_name': '',
'kube_timer_interval': '60',
'secure_project': 'True'
}
defaults.update(SandeshConfig.get_default_options(['DEFAULTS']))
vnc_opts = {
'rabbit_server': 'localhost',
'rabbit_port': '5672',
'rabbit_user': 'guest',
'rabbit_password': 'guest',
'rabbit_vhost': None,
'rabbit_ha_mode': False,
'rabbit_use_ssl': False,
'kombu_ssl_version': '',
'kombu_ssl_keyfile': '',
'kombu_ssl_certfile': '',
'kombu_ssl_ca_certs': '',
'cassandra_user': None,
'cassandra_password': None,
'cassandra_server_list': '',
'cluster_id': '',
'vnc_endpoint_ip': '[127.0.0.1]',
'vnc_endpoint_port': ApiServerPort,
'admin_user' : '',
'admin_password' : '',
'admin_tenant' : '',
'public_fip_pool': '{}',
'zk_server_ip': '127.0.0.1:2181',
}
k8s_opts = {
'kubernetes_api_server': 'localhost',
'kubernetes_api_port': '8080',
'kubernetes_api_secure_port': 8443,
'kubernetes_service_name': 'kubernetes',
MandatoryArgs.SERVICE_SUBNET.value['arg_str']: None,
MandatoryArgs.POD_SUBNET.value['arg_str']: None,
MandatoryArgs.IP_FABRIC_SUBNET.value['arg_str']: None,
'kubernetes_cluster_owner': 'k8s',
'kubernetes_cluster_domain' : 'default-domain',
'cluster_name': None,
'cluster_project' : "{}",
'cluster_network' : "{}",
'cluster_pod_network' : None,
'cluster_service_network' : None,
'ip_fabric_forwarding': False,
'ip_fabric_snat': False,
}
sandesh_opts = SandeshConfig.get_default_options()
auth_opts = {
'auth_token_url': None,
'auth_user': 'admin',
'auth_password': 'admin',
'auth_tenant': 'admin',
}
config = ConfigParser.SafeConfigParser()
if args.config_file:
config.read(args.config_file)
if 'VNC' in config.sections():
vnc_opts.update(dict(config.items("VNC")))
if 'KUBERNETES' in config.sections():
k8s_opts.update(dict(config.items("KUBERNETES")))
SandeshConfig.update_options(sandesh_opts, config)
if 'AUTH' in config.sections():
auth_opts.update(dict(config.items("AUTH")))
if 'DEFAULTS' in config.sections():
defaults.update(dict(config.items("DEFAULTS")))
parser = argparse.ArgumentParser(
parents=[conf_parser],
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
defaults.update(vnc_opts)
defaults.update(k8s_opts)
defaults.update(sandesh_opts)
defaults.update(auth_opts)
parser.set_defaults(**defaults)
args = parser.parse_args(args_str)
if type(args.cassandra_server_list) is str:
args.cassandra_server_list = args.cassandra_server_list.split()
if type(args.collectors) is str:
args.collectors = args.collectors.split()
if type(args.pod_subnets) is str:
args.pod_subnets = args.pod_subnets.split()
if type(args.service_subnets) is str:
args.service_subnets = args.service_subnets.split()
if type(args.ip_fabric_subnets) is str:
args.ip_fabric_subnets = args.ip_fabric_subnets.split()
if type(args.ip_fabric_forwarding) is str:
if args.ip_fabric_forwarding.lower() == 'true':
args.ip_fabric_forwarding = True
else:
args.ip_fabric_forwarding = False
if type(args.ip_fabric_snat) is str:
if args.ip_fabric_snat.lower() == 'true':
args.ip_fabric_snat = True
else:
args.ip_fabric_snat = False
args.sandesh_config = SandeshConfig.from_parser_arguments(args)
# Validate input argumnents.
validate_mandatory_args(args)
return args
def rabbitmq_args(args):
return {
'servers': args.rabbit_server, 'port': args.rabbit_port,
'user': args.rabbit_user, 'password': args.rabbit_password,
'vhost': args.rabbit_vhost, 'ha_mode': args.rabbit_ha_mode,
'use_ssl': args.rabbit_use_ssl,
'ssl_version': args.kombu_ssl_version,
'ssl_keyfile': args.kombu_ssl_keyfile,
'ssl_certfile': args.kombu_ssl_certfile,
'ssl_ca_certs': args.kombu_ssl_ca_certs
}
def validate_mandatory_args(args):
for mandatory_arg in MandatoryArgs:
arg_name = mandatory_arg.value['arg_str']
if not hasattr(args, arg_name):
print("Mandatory Argument %s not found in config"
% arg_name)
sys.exit("Mandatory argument [%s] not found in config" % arg_name)
validatefn = mandatory_arg.value.get('validatefn', None)
arg_value = getattr(args, arg_name)
if validatefn and not validatefn(arg_value):
sys.exit("Validation of mandatory argument [%s] configured with"\
" value [%s] failed." % (arg_name, arg_value))
| apache-2.0 |
yousseb/django_pytds | tests/runtests.py | 1 | 14761 | #!/usr/bin/env python
import logging
import os
import shutil
import subprocess
import sys
import tempfile
import warnings
import django
from django import contrib
from django.utils._os import upath
from django.utils import six
CONTRIB_MODULE_PATH = 'django.contrib'
TEST_TEMPLATE_DIR = 'templates'
DJANGO_RUNTESTS_DIR = os.path.abspath(os.path.join(os.path.dirname(upath(django.__file__)), '..', 'tests'))
RUNTESTS_DIR = os.path.abspath(os.path.dirname(upath(__file__)))
CONTRIB_DIR = os.path.dirname(upath(contrib.__file__))
TEMP_DIR = tempfile.mkdtemp(prefix='django_mssql_')
os.environ['DJANGO_TEST_TEMP_DIR'] = TEMP_DIR
MSSQL_DIR = os.path.abspath(os.path.join(RUNTESTS_DIR, '..'))
if MSSQL_DIR not in sys.path:
sys.path.append(MSSQL_DIR)
if DJANGO_RUNTESTS_DIR not in sys.path:
sys.path.append(DJANGO_RUNTESTS_DIR)
SUBDIRS_TO_SKIP = [
TEST_TEMPLATE_DIR,
CONTRIB_DIR,
'test_main',
]
DJANGO_TESTS_TO_INCLUDE = [
'aggregation',
'aggregation_regress',
'backends',
'basic',
'bulk_create',
'cache',
'commands_sql',
'custom_columns',
'custom_columns_regress',
'custom_managers',
'custom_managers_regress',
'custom_methods',
'custom_pk',
'datatypes',
'dates',
'datetimes',
'db_typecasts',
'defer',
'defer_regress',
'delete',
'delete_regress',
'expressions',
'expressions_regress',
'generic_relations',
'generic_relations_regress',
'get_object_or_404',
'get_or_create',
'get_or_create_regress',
'initial_sql_regress',
'inspectdb',
'introspection',
'known_related_objects',
'lookup',
'max_lengths',
'model_inheritance',
'model_inheritance_regress',
'model_inheritance_same_model_name',
'model_inheritance_select_related',
'model_regress',
'multiple_databases',
'mutually_referential',
'nested_foreign_keys',
'null_fk',
'null_fk_ordering',
'null_queries',
'ordering',
'pagination',
'prefetch_related',
'queries',
'raw_query',
'reserved_names',
'reverse_lookup',
'reverse_single_related',
'schema',
'select_for_update',
'select_related',
'select_related_onetoone',
'select_related_regress',
'string_lookup',
'tablespaces',
'timezones',
'transactions',
'transactions_regress',
'update_only_fields',
]
ALWAYS_INSTALLED_APPS = [
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sessions',
'sqlserver_ado',
'sqlserver_ado.sql_app',
]
def get_test_modules():
test_dirs = [
(None, RUNTESTS_DIR),
(None, DJANGO_RUNTESTS_DIR),
]
modules = []
for modpath, dirpath in test_dirs:
for f in os.listdir(dirpath):
if ('.' in f or
# Python 3 byte code dirs (PEP 3147)
f == '__pycache__' or
f.startswith('sql') or
os.path.basename(f) in SUBDIRS_TO_SKIP or
os.path.isfile(f)):
continue
if dirpath.startswith(DJANGO_RUNTESTS_DIR) and os.path.basename(f) not in DJANGO_TESTS_TO_INCLUDE:
continue
modules.append((modpath, f))
return modules
def get_installed():
from django.db.models.loading import get_apps
return [app.__name__.rsplit('.', 1)[0] for app in get_apps() if not app.__name__.startswith('django.contrib')]
def setup(verbosity, test_labels):
from django.conf import settings
from django.db.models.loading import get_apps, load_app
from django.test.testcases import TransactionTestCase, TestCase
# Force declaring available_apps in TransactionTestCase for faster tests.
def no_available_apps(self):
raise Exception("Please define available_apps in TransactionTestCase "
"and its subclasses.")
TransactionTestCase.available_apps = property(no_available_apps)
TestCase.available_apps = None
state = {
'INSTALLED_APPS': settings.INSTALLED_APPS,
'ROOT_URLCONF': getattr(settings, "ROOT_URLCONF", ""),
'TEMPLATE_DIRS': settings.TEMPLATE_DIRS,
'LANGUAGE_CODE': settings.LANGUAGE_CODE,
'STATIC_URL': settings.STATIC_URL,
'STATIC_ROOT': settings.STATIC_ROOT,
}
# Redirect some settings for the duration of these tests.
settings.INSTALLED_APPS = ALWAYS_INSTALLED_APPS
settings.ROOT_URLCONF = 'urls'
settings.STATIC_URL = '/static/'
settings.STATIC_ROOT = os.path.join(TEMP_DIR, 'static')
settings.TEMPLATE_DIRS = (os.path.join(RUNTESTS_DIR, TEST_TEMPLATE_DIR),)
settings.LANGUAGE_CODE = 'en'
settings.SITE_ID = 1
if verbosity > 0:
# Ensure any warnings captured to logging are piped through a verbose
# logging handler. If any -W options were passed explicitly on command
# line, warnings are not captured, and this has no effect.
logger = logging.getLogger('py.warnings')
handler = logging.StreamHandler()
logger.addHandler(handler)
# Load all the ALWAYS_INSTALLED_APPS.
with warnings.catch_warnings():
warnings.filterwarnings('ignore', 'django.contrib.comments is deprecated and will be removed before Django 1.8.', PendingDeprecationWarning)
get_apps()
# Load all the test model apps.
test_modules = get_test_modules()
# Reduce given test labels to just the app module path
test_labels_set = set()
for label in test_labels:
bits = label.split('.')
if bits[:2] == ['django', 'contrib']:
bits = bits[:3]
else:
bits = bits[:1]
test_labels_set.add('.'.join(bits))
# If GeoDjango, then we'll want to add in the test applications
# that are a part of its test suite.
from django.contrib.gis.tests.utils import HAS_SPATIAL_DB
if HAS_SPATIAL_DB:
from django.contrib.gis.tests import geo_apps
test_modules.extend(geo_apps())
settings.INSTALLED_APPS.extend(['django.contrib.gis', 'django.contrib.sitemaps'])
for modpath, module_name in test_modules:
if modpath:
module_label = '.'.join([modpath, module_name])
else:
module_label = module_name
# if the module (or an ancestor) was named on the command line, or
# no modules were named (i.e., run all), import
# this module and add it to INSTALLED_APPS.
if not test_labels:
module_found_in_labels = True
else:
match = lambda label: (
module_label == label or # exact match
module_label.startswith(label + '.') # ancestor match
)
module_found_in_labels = any(match(l) for l in test_labels_set)
if module_found_in_labels:
if verbosity >= 2:
print("Importing application %s" % module_name)
mod = load_app(module_label)
if mod:
if module_label not in settings.INSTALLED_APPS:
settings.INSTALLED_APPS.append(module_label)
return state
def teardown(state):
from django.conf import settings
try:
# Removing the temporary TEMP_DIR. Ensure we pass in unicode
# so that it will successfully remove temp trees containing
# non-ASCII filenames on Windows. (We're assuming the temp dir
# name itself does not contain non-ASCII characters.)
shutil.rmtree(six.text_type(TEMP_DIR))
except OSError:
print('Failed to remove temp directory: %s' % TEMP_DIR)
# Restore the old settings.
for key, value in state.items():
setattr(settings, key, value)
def django_tests(verbosity, interactive, failfast, test_labels):
from django.conf import settings
state = setup(verbosity, test_labels)
extra_tests = []
# Run the test suite, including the extra validation tests.
from django.test.utils import get_runner
if not hasattr(settings, 'TEST_RUNNER'):
settings.TEST_RUNNER = 'django.test.runner.DiscoverRunner'
TestRunner = get_runner(settings)
test_runner = TestRunner(
verbosity=verbosity,
interactive=interactive,
failfast=failfast,
)
failures = test_runner.run_tests(
test_labels or get_installed(), extra_tests=extra_tests)
teardown(state)
return failures
def bisect_tests(bisection_label, options, test_labels):
state = setup(int(options.verbosity), test_labels)
test_labels = test_labels or get_installed()
print('***** Bisecting test suite: %s' % ' '.join(test_labels))
# Make sure the bisection point isn't in the test list
# Also remove tests that need to be run in specific combinations
for label in [bisection_label, 'model_inheritance_same_model_name']:
try:
test_labels.remove(label)
except ValueError:
pass
subprocess_args = [
sys.executable, upath(__file__), '--settings=%s' % options.settings]
if options.failfast:
subprocess_args.append('--failfast')
if options.verbosity:
subprocess_args.append('--verbosity=%s' % options.verbosity)
if not options.interactive:
subprocess_args.append('--noinput')
iteration = 1
while len(test_labels) > 1:
midpoint = len(test_labels)/2
test_labels_a = test_labels[:midpoint] + [bisection_label]
test_labels_b = test_labels[midpoint:] + [bisection_label]
print('***** Pass %da: Running the first half of the test suite' % iteration)
print('***** Test labels: %s' % ' '.join(test_labels_a))
failures_a = subprocess.call(subprocess_args + test_labels_a)
print('***** Pass %db: Running the second half of the test suite' % iteration)
print('***** Test labels: %s' % ' '.join(test_labels_b))
print('')
failures_b = subprocess.call(subprocess_args + test_labels_b)
if failures_a and not failures_b:
print("***** Problem found in first half. Bisecting again...")
iteration = iteration + 1
test_labels = test_labels_a[:-1]
elif failures_b and not failures_a:
print("***** Problem found in second half. Bisecting again...")
iteration = iteration + 1
test_labels = test_labels_b[:-1]
elif failures_a and failures_b:
print("***** Multiple sources of failure found")
break
else:
print("***** No source of failure found... try pair execution (--pair)")
break
if len(test_labels) == 1:
print("***** Source of error: %s" % test_labels[0])
teardown(state)
def paired_tests(paired_test, options, test_labels):
state = setup(int(options.verbosity), test_labels)
test_labels = test_labels or get_installed()
print('***** Trying paired execution')
# Make sure the constant member of the pair isn't in the test list
# Also remove tests that need to be run in specific combinations
for label in [paired_test, 'model_inheritance_same_model_name']:
try:
test_labels.remove(label)
except ValueError:
pass
subprocess_args = [
sys.executable, upath(__file__), '--settings=%s' % options.settings]
if options.failfast:
subprocess_args.append('--failfast')
if options.verbosity:
subprocess_args.append('--verbosity=%s' % options.verbosity)
if not options.interactive:
subprocess_args.append('--noinput')
for i, label in enumerate(test_labels):
print('***** %d of %d: Check test pairing with %s' % (
i + 1, len(test_labels), label))
failures = subprocess.call(subprocess_args + [label, paired_test])
if failures:
print('***** Found problem pair with %s' % label)
return
print('***** No problem pair found')
teardown(state)
if __name__ == "__main__":
from optparse import OptionParser
usage = "%prog [options] [module module module ...]"
parser = OptionParser(usage=usage)
parser.add_option(
'-v', '--verbosity', action='store', dest='verbosity', default='1',
type='choice', choices=['0', '1', '2', '3'],
help='Verbosity level; 0=minimal output, 1=normal output, 2=all '
'output')
parser.add_option(
'--noinput', action='store_false', dest='interactive', default=True,
help='Tells Django to NOT prompt the user for input of any kind.')
parser.add_option(
'--failfast', action='store_true', dest='failfast', default=False,
help='Tells Django to stop running the test suite after first failed '
'test.')
parser.add_option(
'--settings',
help='Python path to settings module, e.g. "myproject.settings". If '
'this isn\'t provided, the DJANGO_SETTINGS_MODULE environment '
'variable will be used.')
parser.add_option(
'--bisect', action='store', dest='bisect', default=None,
help='Bisect the test suite to discover a test that causes a test '
'failure when combined with the named test.')
parser.add_option(
'--pair', action='store', dest='pair', default=None,
help='Run the test suite in pairs with the named test to find problem '
'pairs.')
parser.add_option(
'--liveserver', action='store', dest='liveserver', default=None,
help='Overrides the default address where the live server (used with '
'LiveServerTestCase) is expected to run from. The default value '
'is localhost:8081.')
parser.add_option(
'--selenium', action='store_true', dest='selenium',
default=False,
help='Run the Selenium tests as well (if Selenium is installed)')
options, args = parser.parse_args()
if options.settings:
os.environ['DJANGO_SETTINGS_MODULE'] = options.settings
elif "DJANGO_SETTINGS_MODULE" not in os.environ:
parser.error("DJANGO_SETTINGS_MODULE is not set in the environment. "
"Set it or use --settings.")
else:
options.settings = os.environ['DJANGO_SETTINGS_MODULE']
if options.liveserver is not None:
os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] = options.liveserver
if options.selenium:
os.environ['DJANGO_SELENIUM_TESTS'] = '1'
if options.bisect:
bisect_tests(options.bisect, options, args)
elif options.pair:
paired_tests(options.pair, options, args)
else:
failures = django_tests(int(options.verbosity), options.interactive,
options.failfast, args)
if failures:
sys.exit(bool(failures))
| mit |
lzw120/django | django/shortcuts/__init__.py | 254 | 4642 | """
This module collects helper functions and classes that "span" multiple levels
of MVC. In other words, these functions/classes introduce controlled coupling
for convenience's sake.
"""
from django.template import loader, RequestContext
from django.http import HttpResponse, Http404
from django.http import HttpResponseRedirect, HttpResponsePermanentRedirect
from django.db.models.manager import Manager
from django.db.models.query import QuerySet
from django.core import urlresolvers
def render_to_response(*args, **kwargs):
"""
Returns a HttpResponse whose content is filled with the result of calling
django.template.loader.render_to_string() with the passed arguments.
"""
httpresponse_kwargs = {'mimetype': kwargs.pop('mimetype', None)}
return HttpResponse(loader.render_to_string(*args, **kwargs), **httpresponse_kwargs)
def render(request, *args, **kwargs):
"""
Returns a HttpResponse whose content is filled with the result of calling
django.template.loader.render_to_string() with the passed arguments.
Uses a RequestContext by default.
"""
httpresponse_kwargs = {
'content_type': kwargs.pop('content_type', None),
'status': kwargs.pop('status', None),
}
if 'context_instance' in kwargs:
context_instance = kwargs.pop('context_instance')
if kwargs.get('current_app', None):
raise ValueError('If you provide a context_instance you must '
'set its current_app before calling render()')
else:
current_app = kwargs.pop('current_app', None)
context_instance = RequestContext(request, current_app=current_app)
kwargs['context_instance'] = context_instance
return HttpResponse(loader.render_to_string(*args, **kwargs),
**httpresponse_kwargs)
def redirect(to, *args, **kwargs):
"""
Returns an HttpResponseRedirect to the apropriate URL for the arguments
passed.
The arguments could be:
* A model: the model's `get_absolute_url()` function will be called.
* A view name, possibly with arguments: `urlresolvers.reverse()` will
be used to reverse-resolve the name.
* A URL, which will be used as-is for the redirect location.
By default issues a temporary redirect; pass permanent=True to issue a
permanent redirect
"""
if kwargs.pop('permanent', False):
redirect_class = HttpResponsePermanentRedirect
else:
redirect_class = HttpResponseRedirect
# If it's a model, use get_absolute_url()
if hasattr(to, 'get_absolute_url'):
return redirect_class(to.get_absolute_url())
# Next try a reverse URL resolution.
try:
return redirect_class(urlresolvers.reverse(to, args=args, kwargs=kwargs))
except urlresolvers.NoReverseMatch:
# If this is a callable, re-raise.
if callable(to):
raise
# If this doesn't "feel" like a URL, re-raise.
if '/' not in to and '.' not in to:
raise
# Finally, fall back and assume it's a URL
return redirect_class(to)
def _get_queryset(klass):
"""
Returns a QuerySet from a Model, Manager, or QuerySet. Created to make
get_object_or_404 and get_list_or_404 more DRY.
"""
if isinstance(klass, QuerySet):
return klass
elif isinstance(klass, Manager):
manager = klass
else:
manager = klass._default_manager
return manager.all()
def get_object_or_404(klass, *args, **kwargs):
"""
Uses get() to return an object, or raises a Http404 exception if the object
does not exist.
klass may be a Model, Manager, or QuerySet object. All other passed
arguments and keyword arguments are used in the get() query.
Note: Like with get(), an MultipleObjectsReturned will be raised if more than one
object is found.
"""
queryset = _get_queryset(klass)
try:
return queryset.get(*args, **kwargs)
except queryset.model.DoesNotExist:
raise Http404('No %s matches the given query.' % queryset.model._meta.object_name)
def get_list_or_404(klass, *args, **kwargs):
"""
Uses filter() to return a list of objects, or raise a Http404 exception if
the list is empty.
klass may be a Model, Manager, or QuerySet object. All other passed
arguments and keyword arguments are used in the filter() query.
"""
queryset = _get_queryset(klass)
obj_list = list(queryset.filter(*args, **kwargs))
if not obj_list:
raise Http404('No %s matches the given query.' % queryset.model._meta.object_name)
return obj_list
| bsd-3-clause |
mozilla/MozDef | tests/alerts/test_write_audit.py | 3 | 4301 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
# Copyright (c) 2017 Mozilla Corporation
from .positive_alert_test_case import PositiveAlertTestCase
from .negative_alert_test_case import NegativeAlertTestCase
from .alert_test_suite import AlertTestSuite
class TestWriteAudit(AlertTestSuite):
alert_filename = "write_audit"
alert_classname = 'WriteAudit'
# This event is the default positive event that will cause the
# alert to trigger
default_event = {
"_source": {
"category": "write",
"summary": "Write: /etc/audit/plugins.d/temp-file.conf",
"hostname": "exhostname",
"tags": [
"audisp-json",
"2.1.0",
"audit"
],
"details": {
"processname": "vi",
"originaluser": "randomjoe",
"user": "root",
"auditkey": "audit",
}
}
}
# This alert is the expected result from running this task
default_alert = {
"category": "write",
"severity": "WARNING",
"summary": "5 Filesystem write(s) to an auditd path (/etc/audit/plugins.d/temp-file.conf) by root (randomjoe)",
"tags": ['audit'],
}
test_cases = []
test_cases.append(
PositiveAlertTestCase(
description="Positive test with default event and default alert expected",
events=AlertTestSuite.create_events(default_event, 5),
expected_alert=default_alert
)
)
events = AlertTestSuite.create_events(default_event, 5)
for event in events:
event['_source']['details']['originaluser'] = 'user1'
expected_alert = AlertTestSuite.create_alert(default_alert)
expected_alert['severity'] = 'NOTICE'
expected_alert['summary'] = "5 Filesystem write(s) to an auditd path (/etc/audit/plugins.d/temp-file.conf) by root (user1)"
test_cases.append(
PositiveAlertTestCase(
description="Positive test with expected downgraded severity",
events=events,
expected_alert=expected_alert
)
)
events = AlertTestSuite.create_events(default_event, 5)
for event in events:
event['_source']['utctimestamp'] = AlertTestSuite.subtract_from_timestamp_lambda(date_timedelta={'minutes': 1})
event['_source']['receivedtimestamp'] = AlertTestSuite.subtract_from_timestamp_lambda(date_timedelta={'minutes': 1})
test_cases.append(
PositiveAlertTestCase(
description="Positive test with events a minute earlier",
events=events,
expected_alert=default_alert
)
)
events = AlertTestSuite.create_events(default_event, 5)
for event in events:
event['_source']['details']['auditkey'] = 'exec'
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with events with auditkey without 'audit'",
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 5)
for event in events:
event['_source']['details']['processname'] = 'process1'
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with events with processname that matches exclusion of 'process1'",
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 5)
for event in events:
event['_source']['details']['originaluser'] = None
test_cases.append(
NegativeAlertTestCase(
description="Negative test case aggregation key excluded",
events=events,
)
)
events = AlertTestSuite.create_events(default_event, 5)
for event in events:
event['_source']['utctimestamp'] = AlertTestSuite.subtract_from_timestamp_lambda({'minutes': 20})
event['_source']['receivedtimestamp'] = AlertTestSuite.subtract_from_timestamp_lambda({'minutes': 20})
test_cases.append(
NegativeAlertTestCase(
description="Negative test case with old timestamp",
events=events,
)
)
| mpl-2.0 |
andresgz/django | tests/template_tests/syntax_tests/test_list_index.py | 521 | 2694 | from django.test import SimpleTestCase
from ..utils import setup
class ListIndexTests(SimpleTestCase):
@setup({'list-index01': '{{ var.1 }}'})
def test_list_index01(self):
"""
List-index syntax allows a template to access a certain item of a
subscriptable object.
"""
output = self.engine.render_to_string('list-index01', {'var': ['first item', 'second item']})
self.assertEqual(output, 'second item')
@setup({'list-index02': '{{ var.5 }}'})
def test_list_index02(self):
"""
Fail silently when the list index is out of range.
"""
output = self.engine.render_to_string('list-index02', {'var': ['first item', 'second item']})
if self.engine.string_if_invalid:
self.assertEqual(output, 'INVALID')
else:
self.assertEqual(output, '')
@setup({'list-index03': '{{ var.1 }}'})
def test_list_index03(self):
"""
Fail silently when the list index is out of range.
"""
output = self.engine.render_to_string('list-index03', {'var': None})
if self.engine.string_if_invalid:
self.assertEqual(output, 'INVALID')
else:
self.assertEqual(output, '')
@setup({'list-index04': '{{ var.1 }}'})
def test_list_index04(self):
"""
Fail silently when variable is a dict without the specified key.
"""
output = self.engine.render_to_string('list-index04', {'var': {}})
if self.engine.string_if_invalid:
self.assertEqual(output, 'INVALID')
else:
self.assertEqual(output, '')
@setup({'list-index05': '{{ var.1 }}'})
def test_list_index05(self):
"""
Dictionary lookup wins out when dict's key is a string.
"""
output = self.engine.render_to_string('list-index05', {'var': {'1': "hello"}})
self.assertEqual(output, 'hello')
@setup({'list-index06': '{{ var.1 }}'})
def test_list_index06(self):
"""
But list-index lookup wins out when dict's key is an int, which
behind the scenes is really a dictionary lookup (for a dict)
after converting the key to an int.
"""
output = self.engine.render_to_string('list-index06', {"var": {1: "hello"}})
self.assertEqual(output, 'hello')
@setup({'list-index07': '{{ var.1 }}'})
def test_list_index07(self):
"""
Dictionary lookup wins out when there is a string and int version
of the key.
"""
output = self.engine.render_to_string('list-index07', {"var": {'1': "hello", 1: "world"}})
self.assertEqual(output, 'hello')
| bsd-3-clause |
statsbiblioteket/newspaper-jpylizer-deprecated | src/main/extras/jpylyzer-1.10.1/jpylyzer.py | 2 | 18851 | #! /usr/bin/env python
#
#
#
# jpylyzer
#
# Requires: Python 2.7 (older versions won't work) OR Python 3.2 or more recent
# (Python 3.0 and 3.1 won't work either!)
#
# Copyright (C) 2011, 2012 Johan van der Knijff, Koninklijke Bibliotheek -
# National Library of the Netherlands
#
# Contributors:
# Rene van der Ark (refactoring of original code)
# Lars Buitinck
# Adam Retter, The National Archives, UK. <adam.retter@googlemail.com>
# Jaishree Davey, The National Archives, UK.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import os
import time
import imp
import glob
import struct
import argparse
import config
import platform
import codecs
import etpatch as ET
import fnmatch
import xml.etree.ElementTree as ETree
from boxvalidator import BoxValidator
from byteconv import bytesToText
from shared import printWarning
scriptPath, scriptName = os.path.split(sys.argv[0])
__version__= "1.10.1"
# Create parser
parser = argparse.ArgumentParser(description="JP2 image validator and properties extractor")
# list of existing files to be analysed
existingFiles = []
def main_is_frozen():
return (hasattr(sys, "frozen") or # new py2exe
hasattr(sys, "importers") # old py2exe
or imp.is_frozen("__main__")) # tools/freeze
def get_main_dir():
if main_is_frozen():
return os.path.dirname(sys.executable)
return os.path.dirname(sys.argv[0])
def readFileBytes(file):
# Read file, return contents as a byte object
# Open file
f = open(file,"rb")
# Put contents of file into a byte object.
fileData=f.read()
f.close()
return(fileData)
def generatePropertiesRemapTable():
# Generates nested dictionary which is used to map 'raw' property values
# (mostly integer values) to corresponding text descriptions
# Master dictionary for mapping of text descriptions to enumerated values
# Key: corresponds to parameter tag name
# Value: sub-dictionary with mappings for all property values
enumerationsMap={}
# Sub-dictionaries for individual properties
# Generic 0 = no, 1=yes mapping (used for various properties)
yesNoMap={}
yesNoMap[0]="no"
yesNoMap[1]="yes"
# Bits per component: sign (Image HeaderBox, Bits Per Component Box, SIZ header
# in codestream)
signMap={}
signMap[0]="unsigned"
signMap[1]="signed"
# Compression type (Image Header Box)
cMap={}
cMap[7]="jpeg2000"
# meth (Colour Specification Box)
methMap={}
methMap[1]="Enumerated"
methMap[2]="Restricted ICC"
methMap[3]="Any ICC" # JPX only
methMap[4]="Vendor Colour" # JPX only
# enumCS (Colour Specification Box)
enumCSMap={}
enumCSMap[16]="sRGB"
enumCSMap[17]="greyscale"
enumCSMap[18]="sYCC"
# Profile Class (ICC)
profileClassMap={}
profileClassMap[b'scnr']="Input Device Profile"
profileClassMap[b'mntr']="Display Device Profile"
profileClassMap[b'prtr']="Output Device Profile"
profileClassMap[b'link']="DeviceLink Profile"
profileClassMap[b'spac']="ColorSpace Conversion Profile"
profileClassMap[b'abst']="Abstract Profile"
profileClassMap[b'nmcl']="Named Colour Profile"
# Primary Platform (ICC)
primaryPlatformMap={}
primaryPlatformMap[b'APPL']="Apple Computer, Inc."
primaryPlatformMap[b'MSFT']="Microsoft Corporation"
primaryPlatformMap[b'SGI']="Silicon Graphics, Inc."
primaryPlatformMap[b'SUNW']="Sun Microsystems, Inc."
# Transparency (ICC)
transparencyMap={}
transparencyMap[0]="Reflective"
transparencyMap[1]="Transparent"
# Glossiness (ICC)
glossinessMap={}
glossinessMap[0]="Glossy"
glossinessMap[1]="Matte"
# Polarity (ICC)
polarityMap={}
polarityMap[0]="Positive"
polarityMap[1]="Negative"
# Colour (ICC)
colourMap={}
colourMap[0]="Colour"
colourMap[1]="Black and white"
# Rendering intent (ICC)
renderingIntentMap={}
renderingIntentMap[0]="Perceptual"
renderingIntentMap[1]="Media-Relative Colorimetric"
renderingIntentMap[2]="Saturation"
renderingIntentMap[3]="ICC-Absolute Colorimetric"
# mTyp (Component Mapping box)
mTypMap={}
mTypMap[0]="direct use"
mTypMap[1]="palette mapping"
# Channel type (Channel Definition Box)
cTypMap={}
cTypMap[0]="colour"
cTypMap[1]="opacity"
cTypMap[2]="premultiplied opacity"
cTypMap[65535]="not specified"
# Channel association (Channel Definition Box)
cAssocMap={}
cAssocMap[0]="all colours"
cAssocMap[65535]="no colours"
# Decoder capabilities, rsiz (Codestream, SIZ)
rsizMap={}
rsizMap[0]="ISO/IEC 15444-1" # Does this correspiond to Profile 2??
rsizMap[1]="Profile 0"
rsizMap[2]="Profile 1"
# Progression order (Codestream, COD)
orderMap={}
orderMap[0]="LRCP"
orderMap[1]="RLCP"
orderMap[2]="RPCL"
orderMap[3]="PCRL"
orderMap[4]="CPRL"
# Transformation type (Codestream, COD)
transformationMap={}
transformationMap[0]="9-7 irreversible"
transformationMap[1]="5-3 reversible"
# Quantization style (Codestream, QCD)
qStyleMap={}
qStyleMap[0]="no quantization"
qStyleMap[1]="scalar derived"
qStyleMap[2]="scalar expounded"
# Registration value (Codestream, COM)
registrationMap={}
registrationMap[0]="binary"
registrationMap[1]="ISO/IEC 8859-15 (Latin)"
# Add sub-dictionaries to master dictionary, using tag name as key
enumerationsMap['unkC']=yesNoMap
enumerationsMap['iPR']=yesNoMap
enumerationsMap['profileClass']=profileClassMap
enumerationsMap['primaryPlatform']=primaryPlatformMap
enumerationsMap['embeddedProfile']=yesNoMap
enumerationsMap['profileCannotBeUsedIndependently']=yesNoMap
enumerationsMap['transparency']=transparencyMap
enumerationsMap['glossiness']=glossinessMap
enumerationsMap['polarity']=polarityMap
enumerationsMap['colour']=colourMap
enumerationsMap['renderingIntent']=renderingIntentMap
enumerationsMap['bSign']=signMap
enumerationsMap['mTyp']=mTypMap
enumerationsMap['precincts']=yesNoMap
enumerationsMap['sop']=yesNoMap
enumerationsMap['eph']=yesNoMap
enumerationsMap['multipleComponentTransformation']=yesNoMap
enumerationsMap['codingBypass']=yesNoMap
enumerationsMap['resetOnBoundaries']=yesNoMap
enumerationsMap['termOnEachPass']=yesNoMap
enumerationsMap['vertCausalContext']=yesNoMap
enumerationsMap['predTermination']=yesNoMap
enumerationsMap['segmentationSymbols']=yesNoMap
enumerationsMap['bPCSign']=signMap
enumerationsMap['ssizSign']=signMap
enumerationsMap['c']=cMap
enumerationsMap['meth']=methMap
enumerationsMap['enumCS']=enumCSMap
enumerationsMap['cTyp']=cTypMap
enumerationsMap['cAssoc']=cAssocMap
enumerationsMap['order']=orderMap
enumerationsMap['transformation']=transformationMap
enumerationsMap['rsiz']=rsizMap
enumerationsMap['qStyle']=qStyleMap
enumerationsMap['rcom']=registrationMap
return(enumerationsMap)
def checkOneFile(file):
# Process one file and return analysis result as element object
fileData = readFileBytes(file)
isValidJP2, tests, characteristics = BoxValidator("JP2", fileData).validate() #validateJP2(fileData)
# Generate property values remap table
remapTable = generatePropertiesRemapTable()
# Create printable version of tests and characteristics tree
tests.makeHumanReadable()
characteristics.makeHumanReadable(remapTable)
# Create output elementtree object
root=ET.Element('jpylyzer')
# Create elements for storing tool and file meta info
toolInfo=ET.Element('toolInfo')
fileInfo=ET.Element('fileInfo')
# File name and path may contain non-ASCII characters, decoding to Latin should
# (hopefully) prevent any Unicode decode errors. Elementtree will then deal with any non-ASCII
# characters by replacing them with numeric entity references
try:
# This works in Python 2.7, but raises error in 3.x (no decode attribute for str type!)
fileName=os.path.basename(file).decode("iso-8859-15","strict")
filePath=os.path.abspath(file).decode("iso-8859-15","strict")
except AttributeError:
# This works in Python 3.x, but goes wrong withh non-ASCII chars in 2.7
fileName=os.path.basename(file)
filePath=os.path.abspath(file)
# Produce some general tool and file meta info
toolInfo.appendChildTagWithText("toolName", scriptName)
toolInfo.appendChildTagWithText("toolVersion", __version__)
fileInfo.appendChildTagWithText("fileName", fileName)
fileInfo.appendChildTagWithText("filePath", filePath)
fileInfo.appendChildTagWithText("fileSizeInBytes", str(os.path.getsize(file)))
fileInfo.appendChildTagWithText("fileLastModified", time.ctime(os.path.getmtime(file)))
# Append to root
root.append(toolInfo)
root.append(fileInfo)
# Add validation outcome
root.appendChildTagWithText("isValidJP2", str(isValidJP2))
# Append test results and characteristics to root
root.append(tests)
root.append(characteristics)
return(root)
def checkNullArgs(args):
# This method checks if the input arguments list and exits program if invalid or no input argument is supplied.
if len(args) == 0:
print('')
parser.print_help()
sys.exit(config.ERR_CODE_NO_IMAGES)
def checkNoInput(files):
# Check if input arguments list results in any existing input files at all (and exits if not)
if len(files) == 0:
printWarning("no images to check!")
sys.exit(config.ERR_CODE_NO_IMAGES)
def printHelpAndExit():
# Print help message and exit
print('')
parser.print_help()
sys.exit()
def getFilesFromDir(dirpath):
for fp in os.listdir(dirpath):
filepath = os.path.join(dirpath, fp)
if os.path.isfile(filepath):
existingFiles.append(filepath)
def getFiles(searchpattern):
results = glob.glob(searchpattern)
for f in results:
if os.path.isfile(f):
existingFiles.append(f)
def getFilesWithPatternFromTree(rootDir, pattern):
# Recurse into directory tree and return list of all files
# NOTE: directory names are disabled here!!
for dirname, dirnames, filenames in os.walk(rootDir):
#Suppress directory names
for subdirname in dirnames:
thisDirectory=os.path.join(dirname, subdirname)
#find files matching the pattern in current path
searchpattern = os.path.join(thisDirectory,pattern)
getFiles(searchpattern)
def getFilesFromTree(rootDir):
# Recurse into directory tree and return list of all files
# NOTE: directory names are disabled here!!
for dirname, dirnames, filenames in os.walk(rootDir):
#Suppress directory names
for subdirname in dirnames:
thisDirectory=os.path.join(dirname, subdirname)
for filename in filenames:
thisFile=os.path.join(dirname, filename)
existingFiles.append(thisFile)
def findFiles(recurse, paths):
WILDCARD = "*"
#process the list of input paths
for root in paths:
#WILDCARD IN PATH OR FILENAME
#In Linux wilcard expansion done by bash so, add file to list
if os.path.isfile(root):
existingFiles.append(root)
#Windows (& Linux with backslash prefix) does not expand wildcard '*'
#Find files in the input path and add to list
elif WILDCARD in root:
#get the absolute path if not given
if not(os.path.isabs(root)):
root = os.path.abspath(root)
#Expand wildcard in the input path. Returns a list of files, folders
filesList = glob.glob(root)
#If the input path is a directory, then glob expands it to full name
if len(filesList) == 1:
#set root to the expanded directory path
root = filesList[0]
#get files from directory
""" Disabled JvdK: if enabled all files in direct child directories are analysed - do we really want that?
if os.path.isdir(root) and not recurse:
getFilesFromDir(root)
"""
#If the input path returned files list, add files to List
if len(filesList) > 1:
for f in filesList:
if os.path.isfile(f):
existingFiles.append(f)
elif os.path.isdir(root) == False and os.path.isfile(root) == False:
# One or more (but not all) paths do no exist - print a warning
msg = root + " does not exist"
printWarning(msg)
""" Disabled JvdK:
elif os.path.isdir(root) and not recurse:
#input path is a directory and is not recursive
getFilesFromDir(root)
"""
#RECURSION and WILDCARD IN RECURSION
#Check if recurse in the input path
if recurse:
#get absolute input path if not given
if not(os.path.isabs(root)):
root = os.path.abspath(root)
if WILDCARD in root:
pathAndFilePattern = os.path.split(root)
path = pathAndFilePattern[0]
filePattern = pathAndFilePattern[1]
filenameAndExtension = os.path.splitext(filePattern)
#input path contains wildcard
if WILDCARD in path:
filepath = glob.glob(path)
#if filepath is a folder, get files in current directory
if len(filepath) == 1:
getFilesWithPatternFromTree(filepath[0], filePattern)
#if filepath is a list of files/folder
#get all files in the tree matching the file pattern
if len(filepath) > 1:
for f in filepath:
if os.path.isdir(f):
getFilesWithPatternFromTree(f, filePattern)
#file name or extension contains wildcard
elif WILDCARD in filePattern:
getFilesWithPatternFromTree(path, filePattern)
elif WILDCARD in filenameAndExtension:
filenameAndExtension = os.path.splitext(filePattern)
extension = WILDCARD + filenameAndExtension[1]
getFilesWithPatternFromTree(path, extension)
#get files in the current folder and sub dirs w/o wildcard in path
elif os.path.isdir(root):
getFilesFromTree(root)
def checkFiles(recurse, wrap, paths):
# This method checks the input argument path(s) for existing files and analyses them
#Find existing files in the given input path(s)
findFiles(recurse, paths)
# If there are no valid input files then exit program
# JvdK:
checkNoInput(existingFiles)
# Set encoding of the terminal to UTF-8
if config.PYTHON_VERSION.startswith(config.PYTHON_2):
out = codecs.getwriter(config.UTF8_ENCODING) (sys.stdout)
elif config.PYTHON_VERSION.startswith(config.PYTHON_3):
out = codecs.getwriter(config.UTF8_ENCODING) (sys.stdout.buffer)
# Wrap the xml output in <results> element, if wrapper flag is true
if wrap:
out.write("<?xml version='1.0' encoding='UTF-8'?><results>")
else:
out.write("<?xml version='1.0' encoding='UTF-8'?>")
# Process the input files
for path in existingFiles:
# Analyse file
xmlElement=checkOneFile(path)
#Output the xml
#Python2.x does automatic conversion between byte and string types,
#hence, binary data can be output using sys.stdout
if config.PYTHON_VERSION.startswith(config.PYTHON_2):
ETree.ElementTree(xmlElement).write(out, xml_declaration=False)
#Python3.x recognizes bytes and str as different types and encoded
#Unicode is represented as binary data. The underlying sys.stdout.buffer
#is used to write binary data
if config.PYTHON_VERSION.startswith(config.PYTHON_3):
output = ETree.tostring(xmlElement,encoding="unicode",method="xml")
out.write(output)
def parseCommandLine():
# Add arguments
parser.add_argument('--verbose',
action = "store_true",
dest = "outputVerboseFlag",
default = False,
help = "report test results in verbose format")
"""
parser.add_argument('--recursive', '-r',
action = "store_true",
dest = "inputRecursiveFlag",
default = False,
help = "when encountering a folder, every file in every subfolder will be analysed")
"""
parser.add_argument('--wrapper',
'-w', action = "store_true",
dest = "inputWrapperFlag",
default = False,
help = "wrap output for individual image(s) in 'results' XML element")
parser.add_argument('jp2In',
action = "store",
type = str,
nargs = argparse.REMAINDER,
help = "input JP2 image(s), may be one or more (whitespace-separated) path expressions; prefix wildcard (*) with backslash (\\) in Linux")
parser.add_argument('--version', '-v',
action = 'version',
version = __version__)
# Parse arguments
args=parser.parse_args()
return(args)
def main():
# Get input from command line
args=parseCommandLine()
# Input images
jp2In=args.jp2In
# Print help message and exit if jp2In is empty
if len(jp2In) == 0:
printHelpAndExit()
# Storing this to 'config.outputVerboseFlag' makes this value available to any module
# that imports 'config.py' (here: 'boxvalidator.py')
config.outputVerboseFlag=args.outputVerboseFlag
# Check files
#checkFiles(args.inputRecursiveFlag, args.inputWrapperFlag, jp2In)
checkFiles(False, args.inputWrapperFlag, jp2In)
# Add the end </results> element, if wrapper flag is true
if args.inputWrapperFlag: print("</results>")
if __name__ == "__main__":
main()
| apache-2.0 |
dyoung418/tensorflow | tensorflow/python/client/session.py | 18 | 63174 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A client interface for TensorFlow."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import re
import threading
import numpy as np
from tensorflow.core.protobuf import config_pb2
from tensorflow.python import pywrap_tensorflow as tf_session
from tensorflow.python.framework import c_api_util
from tensorflow.python.framework import device
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.ops import session_ops
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import compat
from tensorflow.python.util import nest
class SessionInterface(object):
"""Base class for implementations of TensorFlow client sessions."""
@property
def graph(self):
"""The underlying TensorFlow graph, to be used in building Operations."""
raise NotImplementedError('graph')
@property
def sess_str(self):
"""The TensorFlow process to which this session will connect."""
raise NotImplementedError('sess_str')
def run(self, fetches, feed_dict=None, options=None, run_metadata=None):
"""Runs operations in the session. See `BaseSession.run()` for details."""
raise NotImplementedError('run')
def partial_run_setup(self, fetches, feeds=None):
"""Sets up the feeds and fetches for partial runs in the session."""
raise NotImplementedError('partial_run_setup')
def partial_run(self, handle, fetches, feed_dict=None):
"""Continues the execution with additional feeds and fetches."""
raise NotImplementedError('partial_run')
def _get_indexed_slices_value_from_fetches(fetched_vals):
return ops.IndexedSlicesValue(fetched_vals[0], fetched_vals[1],
fetched_vals[2]
if len(fetched_vals) == 3 else None)
def _get_feeds_for_indexed_slices(feed, feed_val):
return list(zip([feed.values, feed.indices] if feed.dense_shape is None else
[feed.values, feed.indices, feed.dense_shape], feed_val))
# List of extensions supported to convert run arguments into actual fetches and
# feeds.
#
# Each element in the list is a tuple of (Type, fetch_fn, feed_fn1, feed_fn2),
# where the function signatures are:
# fetch_fn : Type -> (list of Tensors,
# lambda: list of fetched np.ndarray -> TypeVal)
# feed_fn1 : Type, TypeVal -> list of (Tensor, value)
# feed_fn2 : Type -> list of Tensors
#
# `fetch_fn` describes how to expand fetch into its
# component Tensors and how to contract the fetched results back into
# a single return value.
#
# Each feed function describes how to unpack a single fed value and map it to
# feeds of one or more tensors and their corresponding values: `feed_fn1` is
# used to feed a run, `feed_fn2` to set up a partial run.
#
# TODO(touts): We could reimplement these as specialized _FeedMapper
# implementations after we refactor the feed handling code to use them.
#
# Eventually, this registration could be opened up to support custom Tensor
# expansions.
# pylint: disable=g-long-lambda
_REGISTERED_EXPANSIONS = [
# SparseTensors are fetched as SparseTensorValues. They can be fed
# SparseTensorValues or normal tuples.
(sparse_tensor.SparseTensor,
lambda fetch: (
[fetch.indices, fetch.values, fetch.dense_shape],
lambda fetched_vals: sparse_tensor.SparseTensorValue(*fetched_vals)),
lambda feed, feed_val: list(zip(
[feed.indices, feed.values, feed.dense_shape], feed_val)),
lambda feed: [feed.indices, feed.values, feed.dense_shape]),
# IndexedSlices are fetched as IndexedSlicesValues. They can be fed
# IndexedSlicesValues or normal tuples.
(ops.IndexedSlices,
lambda fetch: (
[fetch.values, fetch.indices] if fetch.dense_shape is None
else [fetch.values, fetch.indices, fetch.dense_shape],
_get_indexed_slices_value_from_fetches),
_get_feeds_for_indexed_slices,
lambda feed: [feed.values, feed.indices] if feed.dense_shape is None
else [feed.values, feed.indices, feed.dense_shape]),
# The default catches all other types and performs no expansions.
(object,
lambda fetch: ([fetch], lambda fetched_vals: fetched_vals[0]),
lambda feed, feed_val: [(feed, feed_val)],
lambda feed: [feed])]
# pylint: enable=g-long-lambda
def register_session_run_conversion_functions(tensor_type, fetch_function,
feed_function=None, feed_function_for_partial_run=None):
"""Register fetch and feed conversion functions for `tf.Session.run()`.
This function registers a triple of conversion functions for fetching and/or
feeding values of user-defined types in a call to tf.Session.run().
An example
```python
class SquaredTensor(object):
def __init__(self, tensor):
self.sq = tf.square(tensor)
#you can define conversion functions as follows:
fetch_function = lambda squared_tensor:([squared_tensor.sq],
lambda val: val[0])
feed_function = lambda feed, feed_val: [(feed.sq, feed_val)]
feed_function_for_partial_run = lambda feed: [feed.sq]
#then after invoking this register function, you can use as follows:
session.run(squared_tensor1,
feed_dict = {squared_tensor2 : some_numpy_array})
```
Args:
tensor_type: The type for which you want to register a conversion function.
fetch_function: A callable that takes an object of type `tensor_type` and
returns a tuple, where the first element is a list of `tf.Tensor` objects,
and the second element is a callable that takes a list of ndarrays and
returns an object of some value type that corresponds to `tensor_type`.
fetch_function describes how to expand fetch into its component Tensors
and how to contract the fetched results back into a single return value.
feed_function: A callable that takes feed_key and feed_value as input, and
returns a list of tuples (feed_tensor, feed_val), feed_key must have type
`tensor_type`, and feed_tensor must have type `tf.Tensor`. Each feed
function describes how to unpack a single fed value and map it to feeds
of one or more tensors and their corresponding values.
feed_function_for_partial_run: A callable for specifying tensor values to
feed when setting up a partial run, which takes a `tensor_type` type
object as input, and returns a list of Tensors.
"""
for conversion_function in _REGISTERED_EXPANSIONS:
if issubclass(conversion_function[0], tensor_type):
raise ValueError(
'%s has already been registered so ignore it.', tensor_type)
return
_REGISTERED_EXPANSIONS.insert(0,
(tensor_type, fetch_function, feed_function, feed_function_for_partial_run))
class _FetchMapper(object):
"""Definition of the interface provided by fetch mappers.
Fetch mappers are utility classes used by the _FetchHandler to handle
arbitrary structures for the `fetch` argument to `Session.run()`.
The `fetch` argument can be of various shapes: single tensor or op, list of
fetches, tuple of fetches, namedtuple of fetches, or dict of fetches. The
structures can be arbitrarily nested.
The low level run() API only wants a list of tensor or op names. The various
`_FetchMapper` subclasses below take care of handling the different shapes:
uniquifying the fetches, and constructing results with the original shape.
"""
def unique_fetches(self):
"""Return the list of unique tensors or ops needed by this fetch mapper.
Returns:
A list of tensors or ops.
"""
raise NotImplementedError('Must be implemented by subclasses')
def build_results(self, values):
"""Build results that match the original shape of the fetch.
Args:
values: List of values returned by run(). The values correspond
exactly to the list tensors or ops returned by unique_fetches().
Returns:
A struct of the same shape as the original fetch object handled by
this fetch mapper. In the returned struct, the original fetches are
replaced by their fetched values.
"""
raise NotImplementedError('Must be implemented by subclasses')
@staticmethod
def for_fetch(fetch):
"""Creates fetch mapper that handles the structure of `fetch`.
The default graph must be the one from which we want to fetch values when
this function is called.
Args:
fetch: An arbitrary fetch structure: singleton, list, tuple,
namedtuple, or dict.
Returns:
An instance of a subclass of `_FetchMapper` that handles the shape.
"""
if fetch is None:
raise TypeError('Fetch argument %r has invalid type %r' %
(fetch, type(fetch)))
elif isinstance(fetch, (list, tuple)):
# NOTE(touts): This is also the code path for namedtuples.
return _ListFetchMapper(fetch)
elif isinstance(fetch, dict):
return _DictFetchMapper(fetch)
else:
# Look for a handler in the registered expansions.
for tensor_type, fetch_fn, _, _ in _REGISTERED_EXPANSIONS:
if isinstance(fetch, tensor_type):
fetches, contraction_fn = fetch_fn(fetch)
return _ElementFetchMapper(fetches, contraction_fn)
# Did not find anything.
raise TypeError('Fetch argument %r has invalid type %r' %
(fetch, type(fetch)))
class _ElementFetchMapper(_FetchMapper):
"""Fetch mapper for singleton tensors and ops."""
def __init__(self, fetches, contraction_fn):
"""Creates an _ElementFetchMapper.
This is the fetch mapper used for leaves in the fetch struct. Because of
the expansions mechanism, a leaf can actually fetch more than one tensor.
Also note that the fetches here can be just strings (tensor or op names) or
any other object that the graph knows how to convert to a tensor, such as a
Variable. So we have to run each fetch through `as_graph_element()` to get
the corresponding tensor or op.
Args:
fetches: List of objects, as returned by a fetch_fn defined
in _REGISTERED_EXPANSIONS.
contraction_fn: Callable as returned by a fetch_fn.
"""
self._unique_fetches = []
for fetch in fetches:
try:
self._unique_fetches.append(ops.get_default_graph().as_graph_element(
fetch, allow_tensor=True, allow_operation=True))
except TypeError as e:
raise TypeError('Fetch argument %r has invalid type %r, '
'must be a string or Tensor. (%s)'
% (fetch, type(fetch), str(e)))
except ValueError as e:
raise ValueError('Fetch argument %r cannot be interpreted as a '
'Tensor. (%s)' % (fetch, str(e)))
except KeyError as e:
raise ValueError('Fetch argument %r cannot be interpreted as a '
'Tensor. (%s)' % (fetch, str(e)))
self._contraction_fn = contraction_fn
def unique_fetches(self):
return self._unique_fetches
def build_results(self, values):
if not values:
# 'Operation' case
return None
else:
return self._contraction_fn(values)
def _uniquify_fetches(fetch_mappers):
"""Uniquifies fetches from a list of fetch_mappers.
This is a utility function used by _ListFetchMapper and _DictFetchMapper. It
gathers all the unique fetches from a list of mappers and builds a list
containing all of them but without duplicates (unique_fetches).
It also returns a 2-D list of integers (values_indices) indicating at which
index in unique_fetches the fetches of the mappers are located.
This list is as follows:
values_indices[mapper_index][mapper_fetch_index] = unique_fetches_index
Args:
fetch_mappers: list of fetch mappers.
Returns:
A list of fetches.
A 2-D list of integers.
"""
unique_fetches = []
value_indices = []
seen_fetches = {}
for m in fetch_mappers:
m_value_indices = []
for f in m.unique_fetches():
j = seen_fetches.get(f)
if j is None:
j = len(seen_fetches)
seen_fetches[f] = j
unique_fetches.append(f)
m_value_indices.append(j)
value_indices.append(m_value_indices)
return unique_fetches, value_indices
class _ListFetchMapper(_FetchMapper):
"""Fetch mapper for lists, tuples, and namedtuples."""
def __init__(self, fetches):
"""Creates a _ListFetchMapper.
Args:
fetches: List, tuple, or namedtuple of fetches.
"""
self._fetch_type = type(fetches)
self._mappers = [_FetchMapper.for_fetch(fetch) for fetch in fetches]
self._unique_fetches, self._value_indices = _uniquify_fetches(self._mappers)
def unique_fetches(self):
return self._unique_fetches
def build_results(self, values):
# Create the list of results for each mapper.
results = []
for m, vi in zip(self._mappers, self._value_indices):
results.append(m.build_results([values[j] for j in vi]))
# Return a value of the original type of the fetches.
if self._fetch_type == list:
return results
elif self._fetch_type == tuple:
return tuple(results)
else:
# This is the code path for namedtuple.
return self._fetch_type(*results)
class _DictFetchMapper(_FetchMapper):
"""Fetch mapper for dicts."""
def __init__(self, fetches):
"""Creates a _DictFetchMapper.
Args:
fetches: Dict of fetches.
"""
self._fetch_type = type(fetches)
self._keys = fetches.keys()
self._mappers = [_FetchMapper.for_fetch(fetch)
for fetch in fetches.values()]
self._unique_fetches, self._value_indices = _uniquify_fetches(self._mappers)
def unique_fetches(self):
return self._unique_fetches
def build_results(self, values):
results = self._fetch_type()
for k, m, vi in zip(self._keys, self._mappers, self._value_indices):
results[k] = m.build_results([values[j] for j in vi])
return results
class _FetchHandler(object):
"""Handler for structured fetches.
Given a graph, a user-provided structure for fetches, and a feed dict, this
class takes care of generating a list of tensor names to fetch and op names
to run for a low level `run()` call.
Given the results of the low level run call, this class can also rebuild a
result structure matching the user-provided structure for fetches, but
containing the corresponding results.
"""
# TODO(touts): Make this class also take care of destructuring the feed
# dict instead of doing it in the callers.
def __init__(self, graph, fetches, feeds, feed_handles=None):
"""Creates a fetch handler.
Args:
graph: Graph of the fetches. Used to check for fetchability
and to convert all fetches to tensors or ops as needed.
fetches: An arbitrary fetch structure: singleton, list, tuple,
namedtuple, or dict.
feeds: A feed dict where keys are Tensors.
feed_handles: A dict from feed Tensors to TensorHandle objects used as
direct feeds.
"""
with graph.as_default():
self._fetch_mapper = _FetchMapper.for_fetch(fetches)
self._fetches = []
self._targets = []
self._feeds = feeds
self._feed_handles = feed_handles or {}
self._ops = []
self._fetch_handles = {}
for fetch in self._fetch_mapper.unique_fetches():
if isinstance(fetch, ops.Operation):
self._assert_fetchable(graph, fetch)
self._targets.append(fetch)
self._ops.append(True)
else:
self._assert_fetchable(graph, fetch.op)
self._fetches.append(fetch)
self._ops.append(False)
# Remember the fetch if it is for a tensor handle.
if (isinstance(fetch, ops.Tensor) and
(fetch.op.type == 'GetSessionHandle' or
fetch.op.type == 'GetSessionHandleV2')):
self._fetch_handles[fetch] = fetch.op.inputs[0].dtype
self._final_fetches = [x for x in self._fetches if x not in feeds]
def _assert_fetchable(self, graph, op):
if not graph.is_fetchable(op):
raise ValueError(
'Operation %r has been marked as not fetchable.' % op.name)
def fetches(self):
"""Return the unique names of tensors to fetch.
Returns:
A list of strings.
"""
return self._final_fetches
def targets(self):
"""Return the unique names of ops to run.
Returns:
A list of strings.
"""
return self._targets
def build_results(self, session, tensor_values):
"""Build results matching the original fetch shape.
`tensor_values` must be a list of the same length as
the one returned by `fetches()`, and holding the requested
fetch values.
This method builds a struct with the same shape as the original `fetches`
passed to the constructor, in which the fetches are replaced by their
fetched value.
Args:
session: The enclosing session. Used for tensor handles.
tensor_values: List of values matching the list returned
by fetches().
Returns:
A structure of the same shape as the original `fetches` argument but
containing tensors or None (for fetched ops).
"""
full_values = []
assert len(self._final_fetches) == len(tensor_values)
i = 0
j = 0
for is_op in self._ops:
if is_op:
full_values.append(None)
else:
# If the fetch was in the feeds, use the fed value, otherwise
# use the returned value.
if self._fetches[i] in self._feed_handles:
# A fetch had a corresponding direct TensorHandle feed. Call eval()
# to obtain the Tensor value from the TensorHandle.
value = self._feed_handles[self._fetches[i]].eval()
else:
value = self._feeds.get(self._fetches[i])
if value is None:
value = tensor_values[j]
j += 1
dtype = self._fetch_handles.get(self._fetches[i])
if dtype:
full_values.append(session_ops.TensorHandle(value, dtype, session))
else:
full_values.append(value)
i += 1
assert j == len(tensor_values)
return self._fetch_mapper.build_results(full_values)
def _name_list(tensor_list):
"""Utility function for transitioning to the new session API.
Args:
tensor_list: a list of `Tensor`s.
Returns:
A list of each `Tensor`s name (as byte arrays).
"""
return [compat.as_bytes(t.name) for t in tensor_list]
class _DeviceAttributes(object):
"""Struct-like object describing a device's attributes.
Each device has 3 key properties:
- name: the fully-qualified TensorFlow path to the device. For
example: /job:worker/replica:0/task:3/device:CPU:0
- device_type: the type of the device (e.g. CPU, GPU, TPU, etc.)
- memory_limit_bytes: the maximum amount of memory available on the device
(in bytes).
"""
def __init__(self, name, device_type, memory_limit_bytes):
self._name = device.canonical_name(name)
self._device_type = device_type
self._memory_limit_bytes = memory_limit_bytes
@property
def name(self):
return self._name
@property
def device_type(self):
return self._device_type
@property
def memory_limit_bytes(self):
return self._memory_limit_bytes
def __repr__(self):
return '_DeviceAttributes(%s, %s, %d)' % (self.name, self.device_type,
self.memory_limit_bytes,)
class BaseSession(SessionInterface):
"""A class for interacting with a TensorFlow computation.
The BaseSession enables incremental graph building with inline
execution of Operations and evaluation of Tensors.
"""
def __init__(self, target='', graph=None, config=None):
"""Constructs a new TensorFlow session.
Args:
target: (Optional) The TensorFlow execution engine to connect to.
graph: (Optional) The graph to be used. If this argument is None,
the default graph will be used.
config: (Optional) ConfigProto proto used to configure the session.
Raises:
tf.errors.OpError: Or one of its subclasses if an error occurs while
creating the TensorFlow session.
TypeError: If one of the arguments has the wrong type.
"""
if graph is None:
self._graph = ops.get_default_graph()
else:
if not isinstance(graph, ops.Graph):
raise TypeError('graph must be a tf.Graph, but got %s' % type(graph))
self._graph = graph
self._opened = False
self._closed = False
self._current_version = 0
self._extend_lock = threading.Lock()
if target is not None:
try:
self._target = compat.as_bytes(target)
except TypeError:
raise TypeError('target must be a string, but got %s' % type(target))
else:
self._target = None
self._delete_lock = threading.Lock()
self._dead_handles = []
if config is not None:
if not isinstance(config, config_pb2.ConfigProto):
raise TypeError('config must be a tf.ConfigProto, but got %s'
% type(config))
self._config = config
self._add_shapes = config.graph_options.infer_shapes
else:
self._config = None
self._add_shapes = False
# pylint: disable=protected-access
# We cache _USE_C_API's value because some test cases will create a session
# with _USE_C_API = False but set it back to True before calling close().
self._created_with_new_api = ops._USE_C_API
# pylint: enable=protected-access
self._session = None
opts = tf_session.TF_NewSessionOptions(target=self._target, config=config)
try:
with errors.raise_exception_on_not_ok_status() as status:
if self._created_with_new_api:
# pylint: disable=protected-access
self._session = tf_session.TF_NewSession(self._graph._c_graph, opts,
status)
# pylint: enable=protected-access
else:
self._session = tf_session.TF_NewDeprecatedSession(opts, status)
finally:
tf_session.TF_DeleteSessionOptions(opts)
def list_devices(self):
"""Lists available devices in this session.
```python
devices = sess.list_devices()
for d in devices:
print(d.name)
```
Each element in the list has the following properties:
- `name`: A string with the full name of the device. ex:
`/job:worker/replica:0/task:3/device:CPU:0`
- `device_type`: The type of the device (e.g. `CPU`, `GPU`, `TPU`.)
- `memory_limit`: The maximum amount of memory available on the device.
Note: depending on the device, it is possible the usable memory could
be substantially less.
Raises:
tf.errors.OpError: If it encounters an error (e.g. session is in an
invalid state, or network errors occur).
Returns:
A list of devices in the session.
"""
with errors.raise_exception_on_not_ok_status() as status:
if self._created_with_new_api:
raw_device_list = tf_session.TF_SessionListDevices(
self._session, status)
else:
raw_device_list = tf_session.TF_DeprecatedSessionListDevices(
self._session, status)
device_list = []
size = tf_session.TF_DeviceListCount(raw_device_list)
for i in range(size):
name = tf_session.TF_DeviceListName(raw_device_list, i, status)
device_type = tf_session.TF_DeviceListType(raw_device_list, i, status)
memory = tf_session.TF_DeviceListMemoryBytes(raw_device_list, i, status)
device_list.append(_DeviceAttributes(name, device_type, memory))
tf_session.TF_DeleteDeviceList(raw_device_list)
return device_list
def close(self):
"""Closes this session.
Calling this method frees all resources associated with the session.
Raises:
tf.errors.OpError: Or one of its subclasses if an error occurs while
closing the TensorFlow session.
"""
if self._created_with_new_api:
if self._session and not self._closed:
self._closed = True
with errors.raise_exception_on_not_ok_status() as status:
tf_session.TF_CloseSession(self._session, status)
else:
with self._extend_lock:
if self._opened and not self._closed:
self._closed = True
with errors.raise_exception_on_not_ok_status() as status:
tf_session.TF_CloseDeprecatedSession(self._session, status)
def __del__(self):
# cleanly ignore all exceptions
try:
self.close()
except Exception: # pylint: disable=broad-except
pass
if self._session is not None:
try:
status = c_api_util.ScopedTFStatus()
if self._created_with_new_api:
tf_session.TF_DeleteSession(self._session, status)
else:
tf_session.TF_DeleteDeprecatedSession(self._session, status)
except AttributeError:
# At shutdown, `c_api_util` or `tf_session` may have been garbage
# collected, causing the above method calls to fail. In this case,
# silently leak since the program is about to terminate anyway.
pass
self._session = None
@property
def graph(self):
"""The graph that was launched in this session."""
return self._graph
@property
def graph_def(self):
"""A serializable version of the underlying TensorFlow graph.
Returns:
A graph_pb2.GraphDef proto containing nodes for all of the Operations in
the underlying TensorFlow graph.
"""
return self._graph.as_graph_def(add_shapes=self._add_shapes)
@property
def sess_str(self):
return self._target
def as_default(self):
"""Returns a context manager that makes this object the default session.
Use with the `with` keyword to specify that calls to
@{tf.Operation.run} or @{tf.Tensor.eval} should be executed in
this session.
```python
c = tf.constant(..)
sess = tf.Session()
with sess.as_default():
assert tf.get_default_session() is sess
print(c.eval())
```
To get the current default session, use @{tf.get_default_session}.
*N.B.* The `as_default` context manager *does not* close the
session when you exit the context, and you must close the session
explicitly.
```python
c = tf.constant(...)
sess = tf.Session()
with sess.as_default():
print(c.eval())
# ...
with sess.as_default():
print(c.eval())
sess.close()
```
Alternatively, you can use `with tf.Session():` to create a
session that is automatically closed on exiting the context,
including when an uncaught exception is raised.
*N.B.* The default session is a property of the current thread. If you
create a new thread, and wish to use the default session in that
thread, you must explicitly add a `with sess.as_default():` in that
thread's function.
*N.B.* Entering a `with sess.as_default():` block does not affect
the current default graph. If you are using multiple graphs, and
`sess.graph` is different from the value of @{tf.get_default_graph},
you must explicitly enter a `with sess.graph.as_default():` block
to make `sess.graph` the default graph.
Returns:
A context manager using this session as the default session.
"""
return ops.default_session(self)
def run(self, fetches, feed_dict=None, options=None, run_metadata=None):
"""Runs operations and evaluates tensors in `fetches`.
This method runs one "step" of TensorFlow computation, by
running the necessary graph fragment to execute every `Operation`
and evaluate every `Tensor` in `fetches`, substituting the values in
`feed_dict` for the corresponding input values.
The `fetches` argument may be a single graph element, or an arbitrarily
nested list, tuple, namedtuple, dict, or OrderedDict containing graph
elements at its leaves. A graph element can be one of the following types:
* An @{tf.Operation}.
The corresponding fetched value will be `None`.
* A @{tf.Tensor}.
The corresponding fetched value will be a numpy ndarray containing the
value of that tensor.
* A @{tf.SparseTensor}.
The corresponding fetched value will be a
@{tf.SparseTensorValue}
containing the value of that sparse tensor.
* A `get_tensor_handle` op. The corresponding fetched value will be a
numpy ndarray containing the handle of that tensor.
* A `string` which is the name of a tensor or operation in the graph.
The value returned by `run()` has the same shape as the `fetches` argument,
where the leaves are replaced by the corresponding values returned by
TensorFlow.
Example:
```python
a = tf.constant([10, 20])
b = tf.constant([1.0, 2.0])
# 'fetches' can be a singleton
v = session.run(a)
# v is the numpy array [10, 20]
# 'fetches' can be a list.
v = session.run([a, b])
# v is a Python list with 2 numpy arrays: the 1-D array [10, 20] and the
# 1-D array [1.0, 2.0]
# 'fetches' can be arbitrary lists, tuples, namedtuple, dicts:
MyData = collections.namedtuple('MyData', ['a', 'b'])
v = session.run({'k1': MyData(a, b), 'k2': [b, a]})
# v is a dict with
# v['k1'] is a MyData namedtuple with 'a' (the numpy array [10, 20]) and
# 'b' (the numpy array [1.0, 2.0])
# v['k2'] is a list with the numpy array [1.0, 2.0] and the numpy array
# [10, 20].
```
The optional `feed_dict` argument allows the caller to override
the value of tensors in the graph. Each key in `feed_dict` can be
one of the following types:
* If the key is a @{tf.Tensor}, the
value may be a Python scalar, string, list, or numpy ndarray
that can be converted to the same `dtype` as that
tensor. Additionally, if the key is a
@{tf.placeholder}, the shape of
the value will be checked for compatibility with the placeholder.
* If the key is a
@{tf.SparseTensor},
the value should be a
@{tf.SparseTensorValue}.
* If the key is a nested tuple of `Tensor`s or `SparseTensor`s, the value
should be a nested tuple with the same structure that maps to their
corresponding values as above.
Each value in `feed_dict` must be convertible to a numpy array of the dtype
of the corresponding key.
The optional `options` argument expects a [`RunOptions`] proto. The options
allow controlling the behavior of this particular step (e.g. turning tracing
on).
The optional `run_metadata` argument expects a [`RunMetadata`] proto. When
appropriate, the non-Tensor output of this step will be collected there. For
example, when users turn on tracing in `options`, the profiled info will be
collected into this argument and passed back.
Args:
fetches: A single graph element, a list of graph elements,
or a dictionary whose values are graph elements or lists of graph
elements (described above).
feed_dict: A dictionary that maps graph elements to values
(described above).
options: A [`RunOptions`] protocol buffer
run_metadata: A [`RunMetadata`] protocol buffer
Returns:
Either a single value if `fetches` is a single graph element, or
a list of values if `fetches` is a list, or a dictionary with the
same keys as `fetches` if that is a dictionary (described above).
Raises:
RuntimeError: If this `Session` is in an invalid state (e.g. has been
closed).
TypeError: If `fetches` or `feed_dict` keys are of an inappropriate type.
ValueError: If `fetches` or `feed_dict` keys are invalid or refer to a
`Tensor` that doesn't exist.
"""
options_ptr = tf_session.TF_NewBufferFromString(
compat.as_bytes(options.SerializeToString())) if options else None
run_metadata_ptr = tf_session.TF_NewBuffer() if run_metadata else None
try:
result = self._run(None, fetches, feed_dict, options_ptr,
run_metadata_ptr)
if run_metadata:
proto_data = tf_session.TF_GetBuffer(run_metadata_ptr)
run_metadata.ParseFromString(compat.as_bytes(proto_data))
finally:
if run_metadata_ptr:
tf_session.TF_DeleteBuffer(run_metadata_ptr)
if options:
tf_session.TF_DeleteBuffer(options_ptr)
return result
def partial_run(self, handle, fetches, feed_dict=None):
"""Continues the execution with more feeds and fetches.
This is EXPERIMENTAL and subject to change.
To use partial execution, a user first calls `partial_run_setup()` and
then a sequence of `partial_run()`. `partial_run_setup` specifies the
list of feeds and fetches that will be used in the subsequent
`partial_run` calls.
The optional `feed_dict` argument allows the caller to override
the value of tensors in the graph. See run() for more information.
Below is a simple example:
```python
a = array_ops.placeholder(dtypes.float32, shape=[])
b = array_ops.placeholder(dtypes.float32, shape=[])
c = array_ops.placeholder(dtypes.float32, shape=[])
r1 = math_ops.add(a, b)
r2 = math_ops.multiply(r1, c)
h = sess.partial_run_setup([r1, r2], [a, b, c])
res = sess.partial_run(h, r1, feed_dict={a: 1, b: 2})
res = sess.partial_run(h, r2, feed_dict={c: res})
```
Args:
handle: A handle for a sequence of partial runs.
fetches: A single graph element, a list of graph elements,
or a dictionary whose values are graph elements or lists of graph
elements (see documentation for `run`).
feed_dict: A dictionary that maps graph elements to values
(described above).
Returns:
Either a single value if `fetches` is a single graph element, or
a list of values if `fetches` is a list, or a dictionary with the
same keys as `fetches` if that is a dictionary
(see documentation for `run`).
Raises:
tf.errors.OpError: Or one of its subclasses on error.
"""
# TODO(touts): Support feeding and fetching the same tensor.
return self._run(handle, fetches, feed_dict, None, None)
def partial_run_setup(self, fetches, feeds=None):
"""Sets up a graph with feeds and fetches for partial run.
This is EXPERIMENTAL and subject to change.
Note that contrary to `run`, `feeds` only specifies the graph elements.
The tensors will be supplied by the subsequent `partial_run` calls.
Args:
fetches: A single graph element, or a list of graph elements.
feeds: A single graph element, or a list of graph elements.
Returns:
A handle for partial run.
Raises:
RuntimeError: If this `Session` is in an invalid state (e.g. has been
closed).
TypeError: If `fetches` or `feed_dict` keys are of an inappropriate type.
tf.errors.OpError: Or one of its subclasses if a TensorFlow error happens.
"""
def _feed_fn(feed):
for tensor_type, _, _, feed_fn in _REGISTERED_EXPANSIONS:
if isinstance(feed, tensor_type):
return feed_fn(feed)
raise TypeError('Feed argument %r has invalid type %r'
% (feed, type(feed)))
# Check session.
if self._closed:
raise RuntimeError('Attempted to use a closed Session.')
if self.graph.version == 0:
raise RuntimeError('The Session graph is empty. Add operations to the '
'graph before calling run().')
if feeds is None:
feeds = []
# Create request.
feed_list = []
# Validate and process feed_list.
is_list_feed = isinstance(feeds, (list, tuple))
if not is_list_feed:
feeds = [feeds]
for feed in feeds:
for subfeed in _feed_fn(feed):
try:
subfeed_t = self.graph.as_graph_element(subfeed, allow_tensor=True,
allow_operation=False)
if self._created_with_new_api:
# pylint: disable=protected-access
feed_list.append(subfeed_t._as_tf_output())
# pylint: enable=protected-access
else:
feed_list.append(compat.as_bytes(subfeed_t.name))
except Exception as e:
e.message = ('Cannot interpret feed_list key as Tensor: '
+ e.message)
e.args = (e.message,)
raise e
# Validate and process fetches.
# TODO(touts): Support feeding and fetching the same tensor.
fetch_handler = _FetchHandler(self._graph, fetches, {})
# Set up a graph with feeds and fetches for partial run.
def _setup_fn(session, feed_list, fetch_list, target_list):
self._extend_graph()
with errors.raise_exception_on_not_ok_status() as status:
if self._created_with_new_api:
return tf_session.TF_SessionPRunSetup_wrapper(
session, feed_list, fetch_list, target_list, status)
else:
return tf_session.TF_PRunSetup(session, feed_list, fetch_list,
target_list, status)
if self._created_with_new_api:
# pylint: disable=protected-access
final_fetches = [t._as_tf_output() for t in fetch_handler.fetches()]
final_targets = [op._c_op for op in fetch_handler.targets()]
# pylint: enable=protected-access
else:
final_fetches = _name_list(fetch_handler.fetches())
final_targets = _name_list(fetch_handler.targets())
return self._do_call(_setup_fn, self._session, feed_list, final_fetches,
final_targets)
def _run(self, handle, fetches, feed_dict, options, run_metadata):
"""Perform either run or partial_run, depending the presence of `handle`."""
def _feed_fn(feed, feed_val):
for tensor_type, _, feed_fn, _ in _REGISTERED_EXPANSIONS:
if isinstance(feed, tensor_type):
return feed_fn(feed, feed_val)
raise TypeError('Feed argument %r has invalid type %r'
% (feed, type(feed)))
# Check session.
if self._closed:
raise RuntimeError('Attempted to use a closed Session.')
if self.graph.version == 0:
raise RuntimeError('The Session graph is empty. Add operations to the '
'graph before calling run().')
# Create request.
feed_dict_tensor = {}
feed_map = {}
# Validate and process feed_dict.
feed_handles = {}
if feed_dict:
feed_dict = nest.flatten_dict_items(feed_dict)
for feed, feed_val in feed_dict.items():
for subfeed, subfeed_val in _feed_fn(feed, feed_val):
try:
subfeed_t = self.graph.as_graph_element(subfeed, allow_tensor=True,
allow_operation=False)
except Exception as e:
raise TypeError('Cannot interpret feed_dict key as Tensor: '
+ e.args[0])
if isinstance(subfeed_val, ops.Tensor):
raise TypeError('The value of a feed cannot be a tf.Tensor object. '
'Acceptable feed values include Python scalars, '
'strings, lists, numpy ndarrays, or TensorHandles.')
subfeed_dtype = subfeed_t.dtype.as_numpy_dtype
if isinstance(subfeed_val,
int) and subfeed_dtype(subfeed_val) != subfeed_val:
raise TypeError(
'Type of feed value ' + str(subfeed_val) + ' is not'
' compatible with Tensor type ' + str(subfeed_dtype) + '.'
' Try explicitly setting the type of the feed tensor'
' to a larger type (e.g. int64).')
is_tensor_handle_feed = isinstance(subfeed_val,
session_ops.TensorHandle)
if is_tensor_handle_feed:
np_val = subfeed_val.to_numpy_array()
feed_handles[subfeed_t] = subfeed_val
else:
np_val = np.asarray(subfeed_val, dtype=subfeed_dtype)
if (not is_tensor_handle_feed and
not subfeed_t.get_shape().is_compatible_with(np_val.shape)):
raise ValueError(
'Cannot feed value of shape %r for Tensor %r, '
'which has shape %r'
% (np_val.shape, subfeed_t.name, str(subfeed_t.get_shape())))
if not self.graph.is_feedable(subfeed_t):
raise ValueError('Tensor %s may not be fed.' % subfeed_t)
feed_dict_tensor[subfeed_t] = np_val
feed_map[compat.as_bytes(subfeed_t.name)] = (subfeed_t, subfeed_val)
# Create a fetch handler to take care of the structure of fetches.
fetch_handler = _FetchHandler(
self._graph, fetches, feed_dict_tensor, feed_handles=feed_handles)
# Run request and get response.
# We need to keep the returned movers alive for the following _do_run().
# These movers are no longer needed when _do_run() completes, and
# are deleted when `movers` goes out of scope when this _run() ends.
# TODO(yuanbyu, keveman): Revisit whether we should just treat feeding
# of a handle from a different device as an error.
_ = self._update_with_movers(feed_dict_tensor, feed_map)
final_fetches = fetch_handler.fetches()
final_targets = fetch_handler.targets()
# We only want to really perform the run if fetches or targets are provided,
# or if the call is a partial run that specifies feeds.
if final_fetches or final_targets or (handle and feed_dict_tensor):
results = self._do_run(handle, final_targets, final_fetches,
feed_dict_tensor, options, run_metadata)
else:
results = []
return fetch_handler.build_results(self, results)
def make_callable(self,
fetches,
feed_list=None,
accept_options=False):
"""Returns a Python callable that runs a particular step.
The returned callable will take `len(feed_list)` arguments whose types
must be compatible feed values for the respective elements of `feed_list`.
For example, if element `i` of `feed_list` is a `tf.Tensor`, the `i`th
argument to the returned callable must be a numpy ndarray (or something
convertible to an ndarray) with matching element type and shape. See
@{tf.Session.run} for details of the allowable feed key and value types.
The returned callable will have the same return type as
`tf.Session.run(fetches, ...)`. For example, if `fetches` is a `tf.Tensor`,
the callable will return a numpy ndarray; if `fetches` is a `tf.Operation`,
it will return `None`.
Args:
fetches: A value or list of values to fetch. See @{tf.Session.run}
for details of the allowable fetch types.
feed_list: (Optional.) A list of `feed_dict` keys. See
@{tf.Session.run} for details of the allowable feed key types.
accept_options: (Optional.) Iff `True`, the returned `Callable` will be
able to accept @{tf.RunOptions} and @{tf.RunMetadata} as optional
keyword arguments `options` and `run_metadata`, respectively, with
the same syntax and semantics as @{tf.Session.run}, which is useful
for certain use cases (profiling and debugging) but will result in
measurable slowdown of the `Callable`'s performance. Default: `False`.
Returns:
A function that when called will execute the step defined by
`feed_list` and `fetches` in this session.
Raises:
TypeError: If `fetches` or `feed_list` cannot be interpreted
as arguments to @{tf.Session.run}.
"""
assert not self._created_with_new_api, ('session.make_callable() doesn\'t '
'work with C API')
if feed_list is not None:
if not isinstance(feed_list, (list, tuple)):
raise TypeError('`feed_list` must be a list or tuple.')
# Delegate any non-empty feed lists to the existing `run()` logic.
# TODO(mrry): Refactor the feed handling logic from
# `Session._run()` so that we can convert the feeds to a list of
# strings here.
def _generic_run(*feed_args, **kwargs):
feed_dict = {feed: feed_val
for feed, feed_val in zip(feed_list, feed_args)}
return self.run(fetches, feed_dict=feed_dict, **kwargs)
return _generic_run
# Ensure any changes to the graph are reflected in the runtime.
# Note that we don't need to do this on subsequent calls to the
# returned object, because the arguments to `fetches` must already be
# in the graph.
self._extend_graph()
# Create a fetch handler to take care of the structure of fetches.
fetch_handler = _FetchHandler(self._graph, fetches, {})
fetch_list_as_strings = _name_list(fetch_handler.fetches())
target_list_as_strings = _name_list(fetch_handler.targets())
def _callable_template_with_options_and_metadata(
fetch_list_as_strings,
target_list_as_strings,
fetch_handler,
options=None,
run_metadata=None):
"""Template callable that accepts RunOptions and RunMetadata."""
options_ptr = tf_session.TF_NewBufferFromString(
compat.as_bytes(options.SerializeToString())) if options else None
run_metadata_ptr = tf_session.TF_NewBuffer() if run_metadata else None
try:
with errors.raise_exception_on_not_ok_status() as status:
results = tf_session.TF_Run(
self._session, options_ptr, {}, fetch_list_as_strings,
target_list_as_strings, status, run_metadata_ptr)
if fetch_handler:
results = fetch_handler.build_results(self, results)
else:
results = results[0] if results else None
if run_metadata:
proto_data = tf_session.TF_GetBuffer(run_metadata_ptr)
run_metadata.ParseFromString(compat.as_bytes(proto_data))
finally:
if run_metadata_ptr:
tf_session.TF_DeleteBuffer(run_metadata_ptr)
if options:
tf_session.TF_DeleteBuffer(options_ptr)
return results
if accept_options:
return functools.partial(
_callable_template_with_options_and_metadata, fetch_list_as_strings,
target_list_as_strings, fetch_handler)
elif isinstance(fetches, ops.Operation):
# Special case for fetching a single operation, because the
# function will have no return value.
assert not fetch_list_as_strings
assert len(target_list_as_strings) == 1
def _single_operation_run():
with errors.raise_exception_on_not_ok_status() as status:
tf_session.TF_Run(self._session, None, {}, [],
target_list_as_strings, status, None)
return _single_operation_run
elif isinstance(fetches, ops.Tensor):
# Special case for fetching a single tensor, because the
# function can return the result of `TF_Run()` directly.
assert len(fetch_list_as_strings) == 1
assert not target_list_as_strings
def _single_tensor_run():
with errors.raise_exception_on_not_ok_status() as status:
results = tf_session.TF_Run(self._session, None, {},
fetch_list_as_strings, [], status, None)
return results[0]
return _single_tensor_run
else:
# In all other cases, we must use `fetch_handler` to build the
# results for us.
def _fetch_handler_run():
with errors.raise_exception_on_not_ok_status() as status:
results = tf_session.TF_Run(self._session, None, {},
fetch_list_as_strings,
target_list_as_strings, status, None)
return fetch_handler.build_results(self, results)
return _fetch_handler_run
# Captures the name of a node in an error status.
_NODEDEF_NAME_RE = re.compile(r'\[\[Node: ([^ ]*?) =')
def _do_run(self, handle, target_list, fetch_list, feed_dict,
options, run_metadata):
"""Runs a step based on the given fetches and feeds.
Args:
handle: a handle for partial_run. None if this is just a call to run().
target_list: A list of operations to be run, but not fetched.
fetch_list: A list of tensors to be fetched.
feed_dict: A dictionary that maps tensors to numpy ndarrays.
options: A (pointer to a) [`RunOptions`] protocol buffer, or None
run_metadata: A (pointer to a) [`RunMetadata`] protocol buffer, or None
Returns:
A list of numpy ndarrays, corresponding to the elements of
`fetch_list`. If the ith element of `fetch_list` contains the
name of an operation, the first Tensor output of that operation
will be returned for that element.
Raises:
tf.errors.OpError: Or one of its subclasses on error.
"""
if self._created_with_new_api:
# pylint: disable=protected-access
feeds = dict((t._as_tf_output(), v) for t, v in feed_dict.items())
fetches = [t._as_tf_output() for t in fetch_list]
targets = [op._c_op for op in target_list]
# pylint: enable=protected-access
else:
feeds = dict((compat.as_bytes(t.name), v) for t, v in feed_dict.items())
fetches = _name_list(fetch_list)
targets = _name_list(target_list)
def _run_fn(session, feed_dict, fetch_list, target_list, options,
run_metadata):
# Ensure any changes to the graph are reflected in the runtime.
self._extend_graph()
with errors.raise_exception_on_not_ok_status() as status:
if self._created_with_new_api:
return tf_session.TF_SessionRun_wrapper(
session, options, feed_dict, fetch_list, target_list,
run_metadata, status)
else:
return tf_session.TF_Run(session, options,
feed_dict, fetch_list, target_list,
status, run_metadata)
def _prun_fn(session, handle, feed_dict, fetch_list):
if target_list:
raise RuntimeError('partial_run() requires empty target_list.')
with errors.raise_exception_on_not_ok_status() as status:
if self._created_with_new_api:
return tf_session.TF_SessionPRun_wrapper(session, handle, feed_dict,
fetch_list, status)
else:
return tf_session.TF_PRun(session, handle, feed_dict, fetch_list,
status)
if handle is None:
return self._do_call(_run_fn, self._session, feeds, fetches, targets,
options, run_metadata)
else:
return self._do_call(_prun_fn, self._session, handle, feeds, fetches)
def _do_call(self, fn, *args):
try:
return fn(*args)
except errors.OpError as e:
message = compat.as_text(e.message)
m = BaseSession._NODEDEF_NAME_RE.search(message)
node_def = None
op = None
if m is not None:
node_name = m.group(1)
try:
op = self._graph.get_operation_by_name(node_name)
node_def = op.node_def
except KeyError:
pass
raise type(e)(node_def, op, message)
def _extend_graph(self):
# Nothing to do if we're using the new session interface
# TODO(skyewm): remove this function altogether eventually
if self._created_with_new_api: return
# Ensure any changes to the graph are reflected in the runtime.
with self._extend_lock:
if self._graph.version > self._current_version:
# pylint: disable=protected-access
graph_def, self._current_version = self._graph._as_graph_def(
from_version=self._current_version,
add_shapes=self._add_shapes)
# pylint: enable=protected-access
with errors.raise_exception_on_not_ok_status() as status:
tf_session.TF_ExtendGraph(
self._session, graph_def.SerializeToString(), status)
self._opened = True
# The threshold to run garbage collection to delete dead tensors.
_DEAD_HANDLES_THRESHOLD = 10
def _register_dead_handle(self, handle):
# Register a dead handle in the session. Delete the dead tensors when
# the number of dead tensors exceeds certain threshold.
tensors_to_delete = None
with self._delete_lock:
self._dead_handles.append(handle)
if len(self._dead_handles) == BaseSession._DEAD_HANDLES_THRESHOLD:
tensors_to_delete = self._dead_handles
self._dead_handles = []
# Delete the dead tensors.
if tensors_to_delete:
feeds = {}
fetches = []
for deleter_key, tensor_handle in enumerate(tensors_to_delete):
holder, deleter = session_ops._get_handle_deleter(self.graph,
deleter_key,
tensor_handle)
feeds[holder] = tensor_handle
fetches.append(deleter)
self.run(fetches, feed_dict=feeds)
def _update_with_movers(self, feed_dict, feed_map):
# If a tensor handle that is fed to a device incompatible placeholder,
# we move the tensor to the right device, generate a new tensor handle,
# and update `feed_dict` to use the new handle.
handle_movers = []
for feed_name, val in feed_map.items():
mover = session_ops._get_handle_mover(self.graph, *val)
if mover:
handle_movers.append((feed_name, val[1], mover))
# Transfer a tensor to the right device if needed.
if not handle_movers:
return []
else:
feeds = {}
fetches = []
for _, handle, mover in handle_movers:
feeds[mover[0]] = handle
fetches.append(mover[1])
handles = self.run(fetches, feed_dict=feeds)
for handle_mover, handle in zip(handle_movers, handles):
np_val = np.array(handle.handle, dtype=np.object)
feed_name = handle_mover[0]
feed_tensor = feed_map[feed_name][0]
feed_dict[feed_tensor] = np_val
return handles
class Session(BaseSession):
"""A class for running TensorFlow operations.
A `Session` object encapsulates the environment in which `Operation`
objects are executed, and `Tensor` objects are evaluated. For
example:
```python
# Build a graph.
a = tf.constant(5.0)
b = tf.constant(6.0)
c = a * b
# Launch the graph in a session.
sess = tf.Session()
# Evaluate the tensor `c`.
print(sess.run(c))
```
A session may own resources, such as
@{tf.Variable}, @{tf.QueueBase},
and @{tf.ReaderBase}. It is important to release
these resources when they are no longer required. To do this, either
invoke the @{tf.Session.close} method on the session, or use
the session as a context manager. The following two examples are
equivalent:
```python
# Using the `close()` method.
sess = tf.Session()
sess.run(...)
sess.close()
# Using the context manager.
with tf.Session() as sess:
sess.run(...)
```
The [`ConfigProto`](https://www.tensorflow.org/code/tensorflow/core/protobuf/config.proto)
protocol buffer exposes various configuration options for a
session. For example, to create a session that uses soft constraints
for device placement, and log the resulting placement decisions,
create a session as follows:
```python
# Launch the graph in a session that allows soft device placement and
# logs the placement decisions.
sess = tf.Session(config=tf.ConfigProto(allow_soft_placement=True,
log_device_placement=True))
```
"""
def __init__(self, target='', graph=None, config=None):
"""Creates a new TensorFlow session.
If no `graph` argument is specified when constructing the session,
the default graph will be launched in the session. If you are
using more than one graph (created with `tf.Graph()` in the same
process, you will have to use different sessions for each graph,
but each graph can be used in multiple sessions. In this case, it
is often clearer to pass the graph to be launched explicitly to
the session constructor.
Args:
target: (Optional.) The execution engine to connect to.
Defaults to using an in-process engine. See
@{$distributed$Distributed TensorFlow}
for more examples.
graph: (Optional.) The `Graph` to be launched (described above).
config: (Optional.) A [`ConfigProto`](https://www.tensorflow.org/code/tensorflow/core/protobuf/config.proto)
protocol buffer with configuration options for the session.
"""
super(Session, self).__init__(target, graph, config=config)
# NOTE(mrry): Create these on first `__enter__` to avoid a reference cycle.
self._default_graph_context_manager = None
self._default_session_context_manager = None
def __enter__(self):
if self._default_graph_context_manager is None:
self._default_graph_context_manager = self.graph.as_default()
else:
raise RuntimeError('Session context managers are not re-entrant. '
'Use `Session.as_default()` if you want to enter '
'a session multiple times.')
if self._default_session_context_manager is None:
self._default_session_context_manager = self.as_default()
self._default_graph_context_manager.__enter__()
return self._default_session_context_manager.__enter__()
def __exit__(self, exec_type, exec_value, exec_tb):
if exec_type is errors.OpError:
logging.error('Session closing due to OpError: %s', (exec_value,))
self._default_session_context_manager.__exit__(
exec_type, exec_value, exec_tb)
self._default_graph_context_manager.__exit__(exec_type, exec_value, exec_tb)
self._default_session_context_manager = None
self._default_graph_context_manager = None
self.close()
@staticmethod
def reset(target, containers=None, config=None):
"""Resets resource containers on `target`, and close all connected sessions.
A resource container is distributed across all workers in the
same cluster as `target`. When a resource container on `target`
is reset, resources associated with that container will be cleared.
In particular, all Variables in the container will become undefined:
they lose their values and shapes.
NOTE:
(i) reset() is currently only implemented for distributed sessions.
(ii) Any sessions on the master named by `target` will be closed.
If no resource containers are provided, all containers are reset.
Args:
target: The execution engine to connect to.
containers: A list of resource container name strings, or `None` if all of
all the containers are to be reset.
config: (Optional.) Protocol buffer with configuration options.
Raises:
tf.errors.OpError: Or one of its subclasses if an error occurs while
resetting containers.
"""
if target is not None:
target = compat.as_bytes(target)
if containers is not None:
containers = [compat.as_bytes(c) for c in containers]
else:
containers = []
tf_session.TF_Reset(target, containers, config)
class InteractiveSession(BaseSession):
"""A TensorFlow `Session` for use in interactive contexts, such as a shell.
The only difference with a regular `Session` is that an `InteractiveSession`
installs itself as the default session on construction.
The methods @{tf.Tensor.eval}
and @{tf.Operation.run}
will use that session to run ops.
This is convenient in interactive shells and [IPython
notebooks](http://ipython.org), as it avoids having to pass an explicit
`Session` object to run ops.
For example:
```python
sess = tf.InteractiveSession()
a = tf.constant(5.0)
b = tf.constant(6.0)
c = a * b
# We can just use 'c.eval()' without passing 'sess'
print(c.eval())
sess.close()
```
Note that a regular session installs itself as the default session when it
is created in a `with` statement. The common usage in non-interactive
programs is to follow that pattern:
```python
a = tf.constant(5.0)
b = tf.constant(6.0)
c = a * b
with tf.Session():
# We can also use 'c.eval()' here.
print(c.eval())
```
"""
def __init__(self, target='', graph=None, config=None):
"""Creates a new interactive TensorFlow session.
If no `graph` argument is specified when constructing the session,
the default graph will be launched in the session. If you are
using more than one graph (created with `tf.Graph()` in the same
process, you will have to use different sessions for each graph,
but each graph can be used in multiple sessions. In this case, it
is often clearer to pass the graph to be launched explicitly to
the session constructor.
Args:
target: (Optional.) The execution engine to connect to.
Defaults to using an in-process engine.
graph: (Optional.) The `Graph` to be launched (described above).
config: (Optional) `ConfigProto` proto used to configure the session.
"""
if not config:
# If config is not provided, choose some reasonable defaults for
# interactive use:
#
# - Grow GPU memory as needed at the cost of fragmentation.
gpu_options = config_pb2.GPUOptions(allow_growth=True)
config = config_pb2.ConfigProto(gpu_options=gpu_options)
# Interactive sessions always place pruned graphs.
config.graph_options.place_pruned_graph = True
super(InteractiveSession, self).__init__(target, graph, config)
self._default_session = self.as_default()
self._default_session.enforce_nesting = False
self._default_session.__enter__()
self._explicit_graph = graph
if self._explicit_graph is not None:
self._default_graph = graph.as_default()
self._default_graph.enforce_nesting = False
self._default_graph.__enter__()
def close(self):
"""Closes an `InteractiveSession`."""
super(InteractiveSession, self).close()
if self._explicit_graph is not None:
self._default_graph.__exit__(None, None, None)
self._default_session.__exit__(None, None, None)
| apache-2.0 |
datawire/qpid-proton | proton-c/env.py | 12 | 2444 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
#
# A platform-agnostic tool for running a program in a modified environment.
#
import sys
import os
import subprocess
from optparse import OptionParser
def main(argv=None):
parser = OptionParser(usage="Usage: %prog [options] [--] VAR=VALUE... command [options] arg1 arg2...")
parser.add_option("-i", "--ignore-environment",
action="store_true", default=False,
help="Start with an empty environment (do not inherit current environment)")
(options, args) = parser.parse_args(args=argv)
if options.ignore_environment:
new_env = {}
else:
new_env = os.environ.copy()
# pull out each name value pair
while (len(args)):
z = args[0].split("=",1)
if len(z) != 2:
break; # done with env args
if len(z[0]) == 0:
raise Exception("Error: incorrect format for env var: '%s'" % str(args[x]))
del args[0]
if len(z[1]) == 0:
# value is not present, so delete it
if z[0] in new_env:
del new_env[z[0]]
else:
new_env[z[0]] = z[1]
if len(args) == 0 or len(args[0]) == 0:
raise Exception("Error: syntax error in command arguments")
if new_env.get("VALGRIND") and new_env.get("VALGRIND_ALL"):
# Python generates a lot of possibly-lost errors that are not errors, don't show them.
args = [new_env.get("VALGRIND"), "--show-reachable=no", "--show-possibly-lost=no",
"--error-exitcode=42"] + args
p = subprocess.Popen(args, env=new_env)
return p.wait()
if __name__ == "__main__":
sys.exit(main())
| apache-2.0 |
Kazade/NeHe-Website | google_appengine/lib/django-1.4/django/core/management/commands/shell.py | 92 | 3142 | import os
from django.core.management.base import NoArgsCommand
from optparse import make_option
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
make_option('--plain', action='store_true', dest='plain',
help='Tells Django to use plain Python, not IPython.'),
)
help = "Runs a Python interactive interpreter. Tries to use IPython, if it's available."
shells = ['ipython', 'bpython']
requires_model_validation = False
def ipython(self):
try:
from IPython import embed
embed()
except ImportError:
# IPython < 0.11
# Explicitly pass an empty list as arguments, because otherwise
# IPython would use sys.argv from this script.
try:
from IPython.Shell import IPShell
shell = IPShell(argv=[])
shell.mainloop()
except ImportError:
# IPython not found at all, raise ImportError
raise
def bpython(self):
import bpython
bpython.embed()
def run_shell(self):
for shell in self.shells:
try:
return getattr(self, shell)()
except ImportError:
pass
raise ImportError
def handle_noargs(self, **options):
# XXX: (Temporary) workaround for ticket #1796: force early loading of all
# models from installed apps.
from django.db.models.loading import get_models
get_models()
use_plain = options.get('plain', False)
try:
if use_plain:
# Don't bother loading IPython, because the user wants plain Python.
raise ImportError
self.run_shell()
except ImportError:
import code
# Set up a dictionary to serve as the environment for the shell, so
# that tab completion works on objects that are imported at runtime.
# See ticket 5082.
imported_objects = {}
try: # Try activating rlcompleter, because it's handy.
import readline
except ImportError:
pass
else:
# We don't have to wrap the following import in a 'try', because
# we already know 'readline' was imported successfully.
import rlcompleter
readline.set_completer(rlcompleter.Completer(imported_objects).complete)
readline.parse_and_bind("tab:complete")
# We want to honor both $PYTHONSTARTUP and .pythonrc.py, so follow system
# conventions and get $PYTHONSTARTUP first then import user.
if not use_plain:
pythonrc = os.environ.get("PYTHONSTARTUP")
if pythonrc and os.path.isfile(pythonrc):
try:
execfile(pythonrc)
except NameError:
pass
# This will import .pythonrc.py as a side-effect
import user
code.interact(local=imported_objects)
| bsd-3-clause |
mlavin/django | django/contrib/gis/db/backends/postgis/introspection.py | 42 | 5410 | from django.contrib.gis.gdal import OGRGeomType
from django.db.backends.postgresql.introspection import DatabaseIntrospection
class GeoIntrospectionError(Exception):
pass
class PostGISIntrospection(DatabaseIntrospection):
# Reverse dictionary for PostGIS geometry types not populated until
# introspection is actually performed.
postgis_types_reverse = {}
ignored_tables = DatabaseIntrospection.ignored_tables + [
'geography_columns',
'geometry_columns',
'raster_columns',
'spatial_ref_sys',
'raster_overviews',
]
# Overridden from parent to include raster indices in retrieval.
# Raster indices have pg_index.indkey value 0 because they are an
# expression over the raster column through the ST_ConvexHull function.
# So the default query has to be adapted to include raster indices.
_get_indexes_query = """
SELECT DISTINCT attr.attname, idx.indkey, idx.indisunique, idx.indisprimary
FROM pg_catalog.pg_class c, pg_catalog.pg_class c2, pg_catalog.pg_index idx,
pg_catalog.pg_attribute attr, pg_catalog.pg_type t
WHERE
c.oid = idx.indrelid
AND idx.indexrelid = c2.oid
AND attr.attrelid = c.oid
AND t.oid = attr.atttypid
AND (
attr.attnum = idx.indkey[0] OR
(t.typname LIKE 'raster' AND idx.indkey = '0')
)
AND attr.attnum > 0
AND c.relname = %s"""
def get_postgis_types(self):
"""
Return a dictionary with keys that are the PostgreSQL object
identification integers for the PostGIS geometry and/or
geography types (if supported).
"""
field_types = [
('geometry', 'GeometryField'),
# The value for the geography type is actually a tuple
# to pass in the `geography=True` keyword to the field
# definition.
('geography', ('GeometryField', {'geography': True})),
]
postgis_types = {}
# The OID integers associated with the geometry type may
# be different across versions; hence, this is why we have
# to query the PostgreSQL pg_type table corresponding to the
# PostGIS custom data types.
oid_sql = 'SELECT "oid" FROM "pg_type" WHERE "typname" = %s'
cursor = self.connection.cursor()
try:
for field_type in field_types:
cursor.execute(oid_sql, (field_type[0],))
for result in cursor.fetchall():
postgis_types[result[0]] = field_type[1]
finally:
cursor.close()
return postgis_types
def get_field_type(self, data_type, description):
if not self.postgis_types_reverse:
# If the PostGIS types reverse dictionary is not populated, do so
# now. In order to prevent unnecessary requests upon connection
# initialization, the `data_types_reverse` dictionary is not updated
# with the PostGIS custom types until introspection is actually
# performed -- in other words, when this function is called.
self.postgis_types_reverse = self.get_postgis_types()
self.data_types_reverse.update(self.postgis_types_reverse)
return super().get_field_type(data_type, description)
def get_geometry_type(self, table_name, geo_col):
"""
The geometry type OID used by PostGIS does not indicate the particular
type of field that a geometry column is (e.g., whether it's a
PointField or a PolygonField). Thus, this routine queries the PostGIS
metadata tables to determine the geometry type.
"""
cursor = self.connection.cursor()
try:
try:
# First seeing if this geometry column is in the `geometry_columns`
cursor.execute('SELECT "coord_dimension", "srid", "type" '
'FROM "geometry_columns" '
'WHERE "f_table_name"=%s AND "f_geometry_column"=%s',
(table_name, geo_col))
row = cursor.fetchone()
if not row:
raise GeoIntrospectionError
except GeoIntrospectionError:
cursor.execute('SELECT "coord_dimension", "srid", "type" '
'FROM "geography_columns" '
'WHERE "f_table_name"=%s AND "f_geography_column"=%s',
(table_name, geo_col))
row = cursor.fetchone()
if not row:
raise Exception('Could not find a geometry or geography column for "%s"."%s"' %
(table_name, geo_col))
# OGRGeomType does not require GDAL and makes it easy to convert
# from OGC geom type name to Django field.
field_type = OGRGeomType(row[2]).django
# Getting any GeometryField keyword arguments that are not the default.
dim = row[0]
srid = row[1]
field_params = {}
if srid != 4326:
field_params['srid'] = srid
if dim != 2:
field_params['dim'] = dim
finally:
cursor.close()
return field_type, field_params
| bsd-3-clause |
wzhy90/git-repo | subcmds/info.py | 46 | 6021 | #
# Copyright (C) 2012 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from command import PagedCommand
from color import Coloring
from error import NoSuchProjectError
from git_refs import R_M
class _Coloring(Coloring):
def __init__(self, config):
Coloring.__init__(self, config, "status")
class Info(PagedCommand):
common = True
helpSummary = "Get info on the manifest branch, current branch or unmerged branches"
helpUsage = "%prog [-dl] [-o [-b]] [<project>...]"
def _Options(self, p):
p.add_option('-d', '--diff',
dest='all', action='store_true',
help="show full info and commit diff including remote branches")
p.add_option('-o', '--overview',
dest='overview', action='store_true',
help='show overview of all local commits')
p.add_option('-b', '--current-branch',
dest="current_branch", action="store_true",
help="consider only checked out branches")
p.add_option('-l', '--local-only',
dest="local", action="store_true",
help="Disable all remote operations")
def Execute(self, opt, args):
self.out = _Coloring(self.manifest.globalConfig)
self.heading = self.out.printer('heading', attr = 'bold')
self.headtext = self.out.printer('headtext', fg = 'yellow')
self.redtext = self.out.printer('redtext', fg = 'red')
self.sha = self.out.printer("sha", fg = 'yellow')
self.text = self.out.nofmt_printer('text')
self.dimtext = self.out.printer('dimtext', attr = 'dim')
self.opt = opt
manifestConfig = self.manifest.manifestProject.config
mergeBranch = manifestConfig.GetBranch("default").merge
manifestGroups = (manifestConfig.GetString('manifest.groups')
or 'all,-notdefault')
self.heading("Manifest branch: ")
self.headtext(self.manifest.default.revisionExpr)
self.out.nl()
self.heading("Manifest merge branch: ")
self.headtext(mergeBranch)
self.out.nl()
self.heading("Manifest groups: ")
self.headtext(manifestGroups)
self.out.nl()
self.printSeparator()
if not opt.overview:
self.printDiffInfo(args)
else:
self.printCommitOverview(args)
def printSeparator(self):
self.text("----------------------------")
self.out.nl()
def printDiffInfo(self, args):
try:
projs = self.GetProjects(args)
except NoSuchProjectError:
return
for p in projs:
self.heading("Project: ")
self.headtext(p.name)
self.out.nl()
self.heading("Mount path: ")
self.headtext(p.worktree)
self.out.nl()
self.heading("Current revision: ")
self.headtext(p.revisionExpr)
self.out.nl()
localBranches = p.GetBranches().keys()
self.heading("Local Branches: ")
self.redtext(str(len(localBranches)))
if len(localBranches) > 0:
self.text(" [")
self.text(", ".join(localBranches))
self.text("]")
self.out.nl()
if self.opt.all:
self.findRemoteLocalDiff(p)
self.printSeparator()
def findRemoteLocalDiff(self, project):
#Fetch all the latest commits
if not self.opt.local:
project.Sync_NetworkHalf(quiet=True, current_branch_only=True)
logTarget = R_M + self.manifest.manifestProject.config.GetBranch("default").merge
bareTmp = project.bare_git._bare
project.bare_git._bare = False
localCommits = project.bare_git.rev_list(
'--abbrev=8',
'--abbrev-commit',
'--pretty=oneline',
logTarget + "..",
'--')
originCommits = project.bare_git.rev_list(
'--abbrev=8',
'--abbrev-commit',
'--pretty=oneline',
".." + logTarget,
'--')
project.bare_git._bare = bareTmp
self.heading("Local Commits: ")
self.redtext(str(len(localCommits)))
self.dimtext(" (on current branch)")
self.out.nl()
for c in localCommits:
split = c.split()
self.sha(split[0] + " ")
self.text(" ".join(split[1:]))
self.out.nl()
self.printSeparator()
self.heading("Remote Commits: ")
self.redtext(str(len(originCommits)))
self.out.nl()
for c in originCommits:
split = c.split()
self.sha(split[0] + " ")
self.text(" ".join(split[1:]))
self.out.nl()
def printCommitOverview(self, args):
all_branches = []
for project in self.GetProjects(args):
br = [project.GetUploadableBranch(x)
for x in project.GetBranches()]
br = [x for x in br if x]
if self.opt.current_branch:
br = [x for x in br if x.name == project.CurrentBranch]
all_branches.extend(br)
if not all_branches:
return
self.out.nl()
self.heading('Projects Overview')
project = None
for branch in all_branches:
if project != branch.project:
project = branch.project
self.out.nl()
self.headtext(project.relpath)
self.out.nl()
commits = branch.commits
date = branch.date
self.text('%s %-33s (%2d commit%s, %s)' % (
branch.name == project.CurrentBranch and '*' or ' ',
branch.name,
len(commits),
len(commits) != 1 and 's' or '',
date))
self.out.nl()
for commit in commits:
split = commit.split()
self.text('{0:38}{1} '.format('','-'))
self.sha(split[0] + " ")
self.text(" ".join(split[1:]))
self.out.nl()
| apache-2.0 |
haiyangd/python-show-me-the-code- | Jaccorot/0006/0006.py | 8 | 1132 | #!usr/bin/python
#coding=utf-8
"""
第 0006 题:你有一个目录,放了你一个月的日记,都是 txt,为了避免分词的问题,
假设内容都是英文,请统计出你认为每篇日记最重要的词。
"""
import os
import re
def walk_dir(path):
file_path = []
for root, dirs, files in os.walk(path):
for f in files:
if f.lower().endswith('txt'):
file_path.append(os.path.join(root, f))
return file_path
def find_key_word(filepath):
word_dic = {}
filename = os.path.basename(filepath)
with open(filepath) as f:
text = f.read()
words_list = re.findall(r'[A-Za-z]+', text.lower())
for w in words_list:
if w in word_dic:
word_dic[w] += 1
else:
word_dic[w] = 1
sorted_word_list = sorted(word_dic.items(), key=lambda d: d[1])
print u"在%s文件中,%s为关键词,共出现了%s次" %(filename, sorted_word_list[-1][0], sorted_word_list[-1][1])
if __name__ == "__main__":
for file_path in walk_dir(os.getcwd()):
find_key_word(file_path)
| mit |
TechWritingWhiz/indy-node | indy_node/test/upgrade/test_node_control_tool_resolves_dependencies.py | 2 | 1555 | import multiprocessing
from indy_node.utils.node_control_tool import NodeControlTool
from plenum.test.helper import randomText
m = multiprocessing.Manager()
whitelist = ['Unexpected error in _upgrade test']
def testNodeControlResolvesDependencies(monkeypatch):
nct = NodeControlTool()
node_package = ('indy-node', '0.0.1')
anoncreds_package = ('indy-anoncreds', '0.0.2')
plenum_package = ('indy-plenum', '0.0.3')
node_package_with_version = '{}={}'.format(*node_package)
plenum_package_with_version = '{}={}'.format(*plenum_package)
anoncreds_package_with_version = '{}={}'.format(*anoncreds_package)
mock_info = {
node_package_with_version: '{}{} (= {}){}{} (= {}){}'.format(
randomText(100),
*plenum_package,
randomText(100),
*anoncreds_package,
randomText(100)),
plenum_package_with_version: '{}'.format(
randomText(100)),
anoncreds_package_with_version: '{}'.format(
randomText(100))}
def mock_get_info_from_package_manager(self, package):
return mock_info.get(package, None)
monkeypatch.setattr(nct.__class__, '_get_info_from_package_manager',
mock_get_info_from_package_manager)
monkeypatch.setattr(
nct.__class__, '_update_package_cache', lambda *x: None)
ret = nct._get_deps_list(node_package_with_version)
assert ret.split() == [
anoncreds_package_with_version,
plenum_package_with_version,
node_package_with_version] | apache-2.0 |
netvigator/myPyPacks | pyPacks/String/PhoneNos.py | 2 | 49851 | #!/usr/bin/pythonTest
# -*- coding: utf-8 -*-
#
# string functions PhoneNos phone numbers
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# The GNU General Public License is available from:
# The Free Software Foundation, Inc.
# 51 Franklin Street, Fifth Floor
# Boston MA 02110-1301 USA
#
# http://www.gnu.org/licenses/gpl.html
#
# Copyright 2010-2016 Rick Graves
#
'''
is the string a phone number or not?
'''
from string import digits
from six import print_ as print3
from String.Dumpster import getDigitsOnly
from String.Find import getFinderFindAll
from String.Test import getHasSubstrTester
from Utils.Config import getConfDict, getSetFrozenOffCommaString
from Utils.Config import fixAllLevel2sUnderLevel1
from Utils.Combos import All_
from Dict.Get import getValueIter
bDebugPrint = False
hasExtension = getHasSubstrTester( '( x|ext|#)' )
dStateInfo = getConfDict('state_info.conf')
dCountryCodesMore = getConfDict('country_codes.conf')
dCountryCodes = dCountryCodesMore['main']
# setNoAmNumbPlanCountries = getSetFrozenOffCommaString( dCountryCodesMore['NANP']['countries'] )
#'prepend-missing-numb' : getBoolOffYesNoTrueFalse,
#'mobile-number-length' : int,
dConfValueFixers = {
'area_codes' : getSetFrozenOffCommaString,
'United Kingdom' : getSetFrozenOffCommaString,
}
#'Canada' : getSetFrozenOffCommaString,
#'Caribbean' : getSetFrozenOffCommaString,
fixAllLevel2sUnderLevel1( dStateInfo, dConfValueFixers )
fixAllLevel2sUnderLevel1( dCountryCodesMore, dConfValueFixers )
setNoAmNumbPlanCountries = dCountryCodesMore['NANP']['countries']
class Finished( Exception ): pass
def _getCountryCodeFinder():
#
from Dict.Get import getItemTuple, getValueIter
#
for k, v in getItemTuple( dCountryCodes ):
#
if '-' in v:
#
# Caribbean countries on USA phone system with own area codes
#
dCountryCodes[ k ] = v.replace( '-', '' )
#
elif ', ' in v: # satelite phone service
#
del dCountryCodes[ k ]
#
gotCountryCode = getFinderFindAll(
'^%s' % '|^'.join( getValueIter( dCountryCodes ) ) )
#
return gotCountryCode
_gotCountryCode = _getCountryCodeFinder()
def getCodeGotCountry( sCountry ):
#
return dCountryCodes.get( sCountry.lower(), None )
setCountryCodes = frozenset( dCountryCodes.values() )
def isCountryCode( s ):
#
return s in setCountryCodes
def _getDictIddCodeCountry():
#
from Dict.Get import getReverseDict
#
dIddCodesCountries = getReverseDict( dCountryCodes )
#
return dIddCodesCountries
dIddCodesCountries = _getDictIddCodeCountry()
def getCountryListGotIddCode( sIddCode ):
#
lCountries = []
#
if sIddCode in dIddCodesCountries:
#
lCountries = [ s.title() for s in dIddCodesCountries[ sIddCode ] ]
#
#
return lCountries
isInvalid = \
getFinderFindAll(
'same|none|unknown|dontwant|unlisted|no listing|no phone|to be added|do not have' )
def isMaybeValid( sPhone ): return not isInvalid( sPhone )
setNotAvailable = frozenset( ( 'n/a', 'not available', 'na' ) )
def getAreaCodeSet():
#
setCodes = set( () )
#
for setState in getValueIter( dStateInfo.get('area_codes') ):
#
setCodes.update( setState )
#
return frozenset( setCodes )
setAreaCodesUSA = getAreaCodeSet()
def _isAreaCodeForCountry( sAreaCodeMaybe, setAreaCodes = setAreaCodesUSA ):
#
return sAreaCodeMaybe in setAreaCodes
def isAreaCodeForUSA( sAreaCodeMaybe ):
#
return _isAreaCodeForCountry( sAreaCodeMaybe )
def getOtherCodeSet( sArea ):
#
sCodes = ','.join( getValueIter( dCountryCodesMore.get( sArea ) ) )
#
return frozenset( sCodes.replace( ' ', '' ).split(',') )
setAreaCodesCanada = getOtherCodeSet( 'Canada' )
setAreaCodesCaribbean = getOtherCodeSet( 'Caribbean' )
def getStateAbbreviationDict():
#
from Dict.Get import getDictOffPairOfLists, getKeyIter
from Iter.AllVers import iMap
from String.Get import getLower, getUpper
#
dAbbreviationsStates = dStateInfo.get('abbreviations')
#
iAbbreviations = iMap( getUpper, getKeyIter( dAbbreviationsStates ) )
iStates = iMap( getLower, getValueIter( dAbbreviationsStates ) )
#
return getDictOffPairOfLists( iStates, iAbbreviations )
def getAreaCodeStateAbbrevDict():
#
from Dict.Get import getDictOffPairOfLists
from Dict.Get import getKeyIter
#
dStatesAbbreviations = getStateAbbreviationDict()
#
dStatesCodeStrings = dStateInfo.get('area_codes')
#
dCodesStates = {}
#
for sState in getKeyIter( dStatesCodeStrings ):
#
tCodes = tuple( dStatesCodeStrings.get( sState ) )
#
if isinstance( dStatesAbbreviations.get( sState ), str ):
sAbbrev = dStatesAbbreviations.get( sState ).upper()
#
dCodesStates.update(
getDictOffPairOfLists ( tCodes, [ sAbbrev ] * len( tCodes ) ) )
else:
print3( 'is "%s" a state?' % sState )
#
return dCodesStates
dCodesStateAbbrevs = getAreaCodeStateAbbrevDict()
fFindPhoneUSA = getFinderFindAll( '\A1?(?:[2-9]\d{2}){2}\d{4}\Z' )
_setUsaLongNumbers = frozenset( ( 10, 11 ) )
def isFormatOrLikeUSA( sPhoneOrig ):
#
from String.Eat import eatFrontZeros
from String.Test import getNumberPattern
#
lNumberPattern = getNumberPattern( sPhoneOrig )
#
lNumberPattern[0:0] = [ 0, 0, 0 ]
#
sPhoneDigits = getDigitsOnly( sPhoneOrig )
#
bLikePhoneUSA = False
#
bFormatLikeUSA = False
#
#print3( lNumberPattern )
#
if bDebugPrint:
print3( '' )
print3( 'lNumberPattern:', lNumberPattern )
print3( 'sPhoneDigits:', sPhoneDigits )
#
try:
if not fFindPhoneUSA( eatFrontZeros( sPhoneDigits ) ):
#
if bDebugPrint:
print3( 'fFindPhoneUSA() returned False' )
#
raise Finished
#
if not sPhoneDigits[ -10 : -7 ] in setAreaCodesUSA:
#
if bDebugPrint:
print3( sPhoneDigits[ -10 : -7 ], 'area code digits not found!' )
#
raise Finished
#
if lNumberPattern[ -1 ] in _setUsaLongNumbers:
#
# 8005551212 or 18005551212
#
if bDebugPrint:
print3( 'lNumberPattern[ -1 ] in _setUsaLongNumbers!' )
#
bLikePhoneUSA = True
#
elif ( lNumberPattern[ -3 ] == 1 and
lNumberPattern[ -2 ] == 3 and
lNumberPattern[ -1 ] == 7 ):
#
# 1 800 5551212
#
if bDebugPrint:
print3( 'number pattern ends with 1, 3, 7' )
#
bLikePhoneUSA = True
#
bFormatLikeUSA = True
#
elif ( lNumberPattern[ -3 ] == 3 and
lNumberPattern[ -2 ] == 3 and
lNumberPattern[ -1 ] == 4 ):
#
# 800 555-1212
#
if bDebugPrint:
print3( 'number pattern ends with 3, 3, 4' )
#
bLikePhoneUSA = True
#
bFormatLikeUSA = True
#
elif lNumberPattern[ -2 ] == 3 and lNumberPattern[ -1 ] == 7:
#
# 800 5551212
#
bLikePhoneUSA = True
#
elif lNumberPattern[ -2 ] == 6 and lNumberPattern[ -1 ] == 4:
#
# 800555 1212
#
bLikePhoneUSA = True
#
#
except Finished:
pass
#
return bLikePhoneUSA, bFormatLikeUSA
def isFormatLikeUSA( sPhoneOrig ):
#
bLikePhoneUSA, bFormatLikeUSA = isFormatOrLikeUSA( sPhoneOrig )
#
return bFormatLikeUSA
def isLikePhoneUSA( sPhoneOrig ):
#
bLikePhoneUSA, bFormatLikeUSA = isFormatOrLikeUSA( sPhoneOrig )
#
return bLikePhoneUSA
def isAreaCodeFromState( sAreaCode, sState ):
#
return sState.upper() == dCodesStateAbbrevs.get( sAreaCode )
def getStateGotAreaCode( sAreaCode ):
#
return dCodesStateAbbrevs.get( sAreaCode, 'not a USA area code' )
setZeroOne = frozenset( ( '0', '1' ) )
def _getAreaCodeAndDigits( sPhone ):
#
sPhoneDigits = getDigitsOnly( sPhone )
#
sAreaCode = sPhoneDigits[ -10 : -7 ]
#
return sAreaCode, sPhoneDigits
def getNumberDropExtension( sPhone ):
#
from String.Eat import eatBackNonDigits
#
lExtension = hasExtension( sPhone )
#
if lExtension:
#
sPhone = eatBackNonDigits( sPhone[ : sPhone.index( lExtension[0] ) ] )
#
#
return sPhone
def getDigitCount( sPhone ):
#
sPhoneDigits = getDigitsOnly( getNumberDropExtension( sPhone ) )
#
return len( sPhoneDigits )
def getAreaCode( sPhone ):
#
sAreaCode, sPhoneDigits = _getAreaCodeAndDigits( sPhone )
#
return sAreaCode
def isPhoneFromState( sPhone, sState ):
#
sAreaCode, sPhoneDigits = _getAreaCodeAndDigits( sPhone )
#
return sPhoneDigits[ -7 : -6 ] not in setZeroOne and \
isAreaCodeFromState( sAreaCode, sState )
def _getPhoneDigits( sPhone ):
#
from String.Dumpster import oKeepDigitsOnly, getWhiteWiped
#
sDigitsSpaces = oKeepDigitsOnly.Dump( sPhone )
#
return getWhiteWiped( sDigitsSpaces ), sDigitsSpaces
def _getZerosOffEnd( sPhone ):
#
from String.Get import getTheseCharsOffOneEnd
#
def isZero( s ): return s == '0'
#
return getTheseCharsOffOneEnd( sPhone, fGetIfMeets = isZero, bEatOffFront = False )
def isBogusPhone( sPhone, bExplain = False ):
#
from sys import exc_info
#
from String.Test import isInOrder
#
bBogus = True
#
if sPhone.startswith( '011' ):
# strip IDD prefix
sPhone = sPhone[ 3 : ]
#
#
sPhone = sPhone.strip()
#
try:
#
sDigits, sDigitsSpaces = _getPhoneDigits( sPhone )
#
iDigits = len( sDigits )
#
if iDigits < 5:
raise Finished( 'fewer than 5 digits' )
#
if isInOrder( sDigits ):
raise Finished( 'digits are in order' )
#
lCountryCode = _gotCountryCode( sDigits )
#
if lCountryCode:
#
sCountryCode = lCountryCode[0]
#
sRemains = sDigits [ len( sCountryCode ) : ]
#
if len( sRemains ) >= 5 and isInOrder( sRemains ):
raise Finished( 'digits after country code are in order' )
#
#
if sDigits.endswith( '5551212' ):
raise Finished( 'USA directory assistance number' )
#
setDigits = frozenset( tuple( sDigits ) )
#
if len( setDigits ) < iDigits // 4:
raise Finished( 'few digits repeated' )
#
#print3( 'len( setDigits ):', len( setDigits ) )
#print3( 'iDigits:', iDigits )
#print3( 'iDigits // 3:', iDigits / 3 )
#
iEndingZeros = len( _getZerosOffEnd( sDigits ) )
#
#print3( 'iEndingZeros:', iEndingZeros )
if iDigits - iEndingZeros <= 0:
raise Finished( 'absolutely too many ending zeros' )
#
#print3( '' )
#print3( 'setDigits:', setDigits )
#print3( 'len( setDigits ):', len( setDigits ) )
#print3( 'iEndingZeros:', iEndingZeros )
#print3( 'len( setDigits ) - iEndingZeros:', len( setDigits ) - iEndingZeros )
#print3( 'iDigits:', iDigits )
#print3( 'iDigits // 5:', iDigits // 5 )
#print3( '1 + len( setDigits ) - iEndingZeros < iDigits // 5:', 1 + len( setDigits ) - iEndingZeros < iDigits // 5 )
#
if 1 + len( setDigits ) - iEndingZeros < iDigits // 5:
raise Finished( 'relatively too many ending zeros' )
#
sLast = sDigits[-1]
#
if sDigits[ -5 : ] == sLast * 5:
raise Finished( 'ending repeats too many of same digit' )
#
bBogus = False
#
except Finished:
#
if bExplain:
#
error, msg, traceback = exc_info()
#
print3( sPhone, msg )
#
#
return bBogus
def isPhoneNotBogus( sPhone ):
#
return not isBogusPhone( sPhone )
def isAvailable( s ): return not s.lower() in setNotAvailable
isValidPhone = All_( isMaybeValid, isAvailable, isPhoneNotBogus )
isPhoneNo = All_( isMaybeValid, isAvailable )
def _isPhone4Country(
sPhone, sCountryCode, tLenMinMax, iAreaCodeLen = 0, setPrefix = (),
bDropLocalZeroForIDD = False, bPrefixVaries = False ):
#
from Iter.AllVers import iRange
from String.Eat import eatFrontNonDigits
from String.Test import hasAnyAlpha
#
bPhone4Country = False
#
sPhoneOrig = sPhone
#
# if sPhone.startswith( '+' ): sPhone = eatFrontNonDigits( sPhone[1:] )
#
if bDebugPrint:
print3( '' )
print3( '' )
print3( '_isPhone4Country()' )
#
if sPhone.startswith( '011' ): sPhone = eatFrontNonDigits( sPhone[3:] )
#
if hasAnyAlpha( sPhone ):
#
lExtension = hasExtension( sPhone )
#
if lExtension:
#
sPhone = sPhone[ : sPhone.index( lExtension[0] ) ]
#
#
try:
#
sPhone, sDigitsSpaces = _getPhoneDigits( sPhone )
#
if bDebugPrint:
print3( 'sPhone:', sPhone )
print3( 'sDigitsSpaces: ', sDigitsSpaces )
#
#
if sPhoneOrig.startswith( '+' ):
#
sNext = eatFrontNonDigits(
sPhoneOrig[ 1 : ] )[ : len( sCountryCode ) ]
#
if sNext != sCountryCode:
#
if bDebugPrint: print3( 'sNext != sCountryCode' )
sDigits, sDigitsSpaces = _getPhoneDigits( sPhone )
raise Finished
#
#
if sPhone.startswith(sCountryCode):
#
sPhone = eatFrontNonDigits( sPhone[len(sCountryCode):] )
#
#
if bDebugPrint:
print3( 'bDropLocalZeroForIDD:', bDropLocalZeroForIDD )
print3( 'sPhone.startswith( "0" ):', sPhone.startswith( '0' ) )
if bDropLocalZeroForIDD and not sPhone.startswith( '0' ):
#
sPhone = '0%s' % sPhone
#
#
sDigits, sDigitsSpaces = _getPhoneDigits( sPhone )
#
iDigitLen = len( sDigits )
#
if bDebugPrint:
print3( 'sDigits: ', sDigits )
print3( 'iDigitLen: ', iDigitLen )
#
#
if iDigitLen < min( tLenMinMax ) or iDigitLen > max( tLenMinMax ):
#
if bDebugPrint: print3( 'iDigitLen < min( tLenMinMax )' )
raise Finished
#
#
if iAreaCodeLen and setPrefix:
#
sAreaCode = sDigits[ -iDigitLen : iAreaCodeLen - iDigitLen ]
#
if sAreaCode[0] in setPrefix:
#
if bDebugPrint: print3( 'sAreaCode[0] in setPrefix' )
raise Finished
#
if sAreaCode not in setPrefix:
#
if bDebugPrint:
print3( 'sAreaCode not in setPrefix' )
print3( 'sAreaCode: ', sAreaCode )
raise Finished
#
#
elif bPrefixVaries and setPrefix:
#
bValidPrefix = False
#
for iLen in iRange( 3, 7 ): # (3, 4, 5, 6)
#
if sPhone[ : iLen ] in setPrefix:
#
bValidPrefix = True
#
if bDebugPrint:
print3( 'sPhone[ : iLen ] in setPrefix' )
print3( 'sPhone[ : iLen ]: ', sPhone[ : iLen ] )
#
break
#
#
#
if not bValidPrefix:
if bDebugPrint:
print3( 'sPhone[ : iLen ] not in setPrefix' )
raise Finished
#
#
elif setPrefix:
#
sExample = tuple( setPrefix )[0]
#
if sPhone[ : len( sExample ) ] not in setPrefix:
#
if bDebugPrint:
print3( 'sPhone[ : len( sExample ) ] not in setPrefix' )
print3( 'sPhone[ : len( sExample ) ]:', sPhone[ : len( sExample ) ] )
raise Finished
#
#
#
except Finished:
#
if bDebugPrint:
print3( 'Finished was raised' )
#
else:
#
bPhone4Country = True
if bDebugPrint:
print3( 'Finished not raised, bPhone4Country set to True' )
#
#
return bPhone4Country, sDigits
_setNoAmNumbPlanProhibitedFirst = frozenset( tuple( '01' ) )
def isPhoneUSA( sPhone ):
#
bPhone4Country, sDigits = _isPhone4Country(
sPhone, '1', ( 10, ), 3,
setPrefix = setAreaCodesUSA )
#
return ( bPhone4Country and
sDigits[ -7 : -6 ] not in _setNoAmNumbPlanProhibitedFirst )
def isPhoneHK( sPhone ):
#
bPhone4Country, sDigits = _isPhone4Country( sPhone, '852', ( 8, ) )
#
return bPhone4Country
def isPhoneCanada( sPhone ):
#
bPhone4Country, sDigits = _isPhone4Country( sPhone, '1', ( 10, ), 3, setPrefix = setAreaCodesCanada )
#
return bPhone4Country and sDigits[ -7 : -6 ] not in _setNoAmNumbPlanProhibitedFirst
def isPhoneCaribbean( sPhone ):
#
bPhone4Country, sDigits = _isPhone4Country( sPhone, '1', ( 10, ), 3, setPrefix = setAreaCodesCaribbean )
#
return bPhone4Country and sDigits[ -7 : -6 ] not in _setNoAmNumbPlanProhibitedFirst
def isPhoneNoAmDialingPlan( sPhone ):
#
# North American Dialing Plan
#
return ( isPhoneUSA( sPhone ) or
isPhoneCanada( sPhone ) or
isPhoneCaribbean( sPhone ) )
def hasObviousOtherCountryCode(
sPhoneOrig, sPhoneDigits, sThisCountryDialingCode,
lNumberPattern, lPatternNumbers ):
#
from copy import copy
#
from String.Eat import eatFrontZerosOnes
from String.Get import getTextAfter
from String.Test import getNumberPattern, hasAnyDigits
#
if sThisCountryDialingCode is None:
sThisCode = ''
else:
sThisCode = sThisCountryDialingCode
sOtherCountryCode = ''
sLessDigits = ''
#
lNumbers = copy( lPatternNumbers )
lPattern = copy( lNumberPattern )
#
if bDebugPrint:
print3( 'sPhoneOrig: ', sPhoneOrig )
print3( 'sThisCountryDialingCode: ', sThisCode )
print3( 'lNumbers: ', lNumbers )
print3( 'lPattern: ', lPattern )
#
if sPhoneOrig and not lPattern:
#
lPattern = getNumberPattern( sPhoneOrig )
#
#
if sPhoneOrig and not lNumbers:
#
iStart = 0
#
for iLen in lPattern:
#
lNumbers.append( sPhoneDigits[ iStart : iStart + iLen ] )
#
iStart += iLen
#
#
if not lNumbers:
#
pass
#
elif lNumbers[0] == '011':
#
del lNumbers[0]
del lPattern[0]
#
elif lNumbers[0] != '1':
#
lNumbers[0] = eatFrontZerosOnes( lNumbers[0] )
lPattern[0] = len( lNumbers[0] )
#
#
if lNumbers and not lNumbers[0]:
#
del lNumbers[0]
del lPattern[0]
#
#
if sPhoneDigits: sLessDigits = eatFrontZerosOnes( sPhoneDigits )
#
if bDebugPrint:
print3( 'lNumbers: ', lNumbers )
try:
#
if not hasAnyDigits( sPhoneOrig ):
#
raise Finished
#
elif False and sThisCode == '1':
#
raise Finished
#
elif ( sThisCode != '1' and
( sPhoneDigits[ : len( sThisCode ) ] == sThisCode or
sLessDigits[ : len( sThisCode ) ] == sThisCode ) ):
#
raise Finished
#
elif ( sPhoneOrig.startswith( '+' ) and
isCountryCode( lPattern[0] ) ):
#
sOtherCountryCode = lPattern[0]
#
if bDebugPrint:
print3( 'sOtherCountryCode: ', sOtherCountryCode )
#
raise Finished
#
#
bCountryCodeFirst2 = isCountryCode( sLessDigits[ : 2 ] )
bCountryCodeFirst3 = isCountryCode( sLessDigits[ : 3 ] )
bCountryCodeFirst4 = isCountryCode( sLessDigits[ : 4 ] )
#
if bDebugPrint:
print3( 'bCountryCodeFirst2: ', bCountryCodeFirst2 )
print3( 'bCountryCodeFirst3: ', bCountryCodeFirst3 )
print3( 'bCountryCodeFirst4: ', bCountryCodeFirst4 )
#
bLongPatternNumbers0 = len( lNumbers[0] ) > 4
#
if ( bCountryCodeFirst2 or
bCountryCodeFirst3 or
bCountryCodeFirst4 ):
#
if ( bCountryCodeFirst2 and
( sLessDigits[ : 2 ] == lNumbers[0] or
bLongPatternNumbers0 ) ):
#
sOtherCountryCode = sLessDigits[ : 2 ]
#
if bDebugPrint:
print3( 'sLessDigits[ : 2 ] == lNumbers[0]' )
#
elif ( bCountryCodeFirst3 and
( sLessDigits[ : 3 ] == lNumbers[0] or
bLongPatternNumbers0 ) ):
#
sOtherCountryCode = sLessDigits[ : 3 ]
#
if bDebugPrint:
print3( 'sLessDigits[ : 3 ] == lNumbers[0]' )
#
elif ( bCountryCodeFirst4 and
( sLessDigits[ : 4 ] == lNumbers[0] or
bLongPatternNumbers0 ) ):
#
sOtherCountryCode = sLessDigits[ : 4 ]
#
if bDebugPrint:
print3( 'sLessDigits[ : 4 ] == lNumbers[0]' )
#
else:
#
if bDebugPrint:
print3( 'lNumbers[0]: ', lNumbers[0] )
#
raise Finished
#
#
if bDebugPrint:
print3( 'sOtherCountryCode: ', sOtherCountryCode )
#
#
#
#
lParts = sPhoneOrig.split()
#
if len( lParts ) == 1:
#
lParts = sPhoneOrig.split( '-' )
#
#
bObviousOtherCountryCode = (
lParts and
lParts[0] in ( '011', '001' ) and
sThisCode != lParts[1] and
isCountryCode( lParts[1] ) )
#
if bObviousOtherCountryCode:
#
sOtherCountryCode = lParts[1]
#
raise Finished
#
#
except Finished: pass
#
#
return sOtherCountryCode
_setPrefixesUK = frozenset( ( '01', '02', '07' ) )
_setPrefixesUK = dCountryCodesMore['United Kingdom']['phone_prefixes']
def isPhoneUK( sPhone ):
#
bPhone4Country, sDigits = _isPhone4Country(
sPhone, '44', (11,), iAreaCodeLen = 0, setPrefix = _setPrefixesUK,
bDropLocalZeroForIDD = True, bPrefixVaries = True )
#
return bPhone4Country
digitsFinder = getFinderFindAll( '\d+' )
othersFinder = getFinderFindAll( '[^\d]+' )
def getDigitsOtherObject( sPhone ):
#
from Iter.AllVers import iRange
from Object.Get import ValueContainer
#
lDigits = digitsFinder( sPhone )
lOthers = othersFinder( sPhone )
#
if lDigits and sPhone.startswith( lDigits[0] ):
#
lEven = lDigits
lOdds = lOthers
#
tParts = ( 'd', 'o' )
#
else:
#
lEven = lOthers
lOdds = lDigits
#
tParts = ( 'o', 'd' )
#
#
lParts = [''] * 2 * max( len( lDigits ), len( lOthers ) )
lWhich = [''] * len( lParts )
#
if len( lOdds) < len( lEven ):
#
lOdds.append( '' )
#
for i in iRange( len( lEven ) ):
#
lParts[ 2 * i ] = lEven[ i ]
lParts[ 1 + 2 * i ] = lOdds[ i ]
#
lWhich[ 2 * i ] = tParts[ 0 ]
lWhich[ 1 + 2 * i ] = tParts[ 1 ]
#
#
#
if not lOdds[ -1 ]:
#
del lParts[ -1 ]
del lWhich[ -1 ]
del lOdds[ -1 ]
#
#
oReturn = ValueContainer(
lDigits = lDigits,
lOthers = lOthers,
lParts = lParts,
lWhich = lWhich )
#
return oReturn
if __name__ == "__main__":
#
from sys import argv
#
from Collect.Query import get1stThatFails, get1stThatMeets
from Utils.Result import sayTestResult
#
lProblems = []
#
args = argv[ 1 : ]
#
if args and args[0] == 'debug':
#
bDebugPrint = True
#
#
sCountry = 'Germany'
#
if getCodeGotCountry( sCountry ) != '49':
#
lProblems.append( 'getCodeGotCountry() Germany' )
#
#
sCountry = 'Canada'
#
if getCodeGotCountry( sCountry ) != '1':
#
lProblems.append( 'getCodeGotCountry() Canada' )
#
#
sCountry = 'United States'
#
if getCodeGotCountry( sCountry ) != '1':
#
lProblems.append( 'getCodeGotCountry() suggest you change USA to United States' )
#
#
if _gotCountryCode( '1206 286-2181' ) != ['1']:
#
lProblems.append( '_gotCountryCode() USA number' )
#
#
if _gotCountryCode( '852 9876-5432' ) != ['852']:
#
lProblems.append( '_gotCountryCode() HK number' )
#
#
'''
#
'''
sTest = 'aABabcABCDabcdeABCDEF'
#
if not fFindPhoneUSA( '12064079702' ) or \
not fFindPhoneUSA( '2064079702' ) or \
fFindPhoneUSA( '0890181075' ) or \
fFindPhoneUSA( '66890181075' ):
#
lProblems.append( 'fFindPhoneUSA()' )
#
#
if not fFindPhoneUSA( '12019845835' ):
#
lProblems.append( 'fFindPhoneUSA() 12019845835' )
#
#
if not isLikePhoneUSA( '12064079702' ) or \
not isLikePhoneUSA( '2064079702' ) or \
isLikePhoneUSA( '0890181075' ) or \
isLikePhoneUSA( '66890181075' ) or \
isLikePhoneUSA( '19994079602' ):
#
# no area code 999
#
lProblems.append( 'isLikePhoneUSA()' )
#
#
if not isLikePhoneUSA( '917 696 0477' ):
#
lProblems.append( 'isLikePhoneUSA() 917 696 0477' )
#
#
if not isLikePhoneUSA( '001 201 9845835' ):
#
lProblems.append( 'isLikePhoneUSA() 001 201 9845835' )
#
#
if isAreaCodeFromState( '212','PA' ) or \
not isAreaCodeFromState( '212','NY' ):
#
lProblems.append( 'isAreaCodeFromState()' )
#
#
if isPhoneFromState( '12062862181','PA' ) or \
isPhoneFromState( '2062862181','PA' ) or \
not isPhoneFromState( '12062862181','WA' ) or \
not isPhoneFromState( '2062862181','WA' ):
#
lProblems.append( 'isPhoneFromState()' )
#
#
if isBogusPhone( '6073519826' ):
#
lProblems.append( 'isBogusPhone() NY number should not be bogus' )
#
#
if isBogusPhone( '011494393969600' ):
#
lProblems.append( 'isBogusPhone() long DE number should not be bogus' )
#
#
if isBogusPhone( '642.08' ):
#
lProblems.append( 'isBogusPhone() minimal DE number should not be bogus' )
#
#
if isBogusPhone( '206 632-9929' ):
#
lProblems.append( 'isBogusPhone() valid USA number should not be bogus' )
#
#
if not isBogusPhone( 'Unknown' ):
#
lProblems.append( 'isBogusPhone() all alpha' )
#
#
if not isBogusPhone( '9876-5432' ):
#
lProblems.append( 'isBogusPhone() digits in sequence' )
#
#
if not isBogusPhone( '011 852 9876-5432' ):
#
lProblems.append( 'isBogusPhone() digits in sequence with country code' )
#
#
if not isBogusPhone( '7 777 777 7777' ):
#
lProblems.append( 'isBogusPhone() repeating digits' )
#
#
if not isBogusPhone( '4151540000' ):
#
lProblems.append( 'isBogusPhone() Mexican favorite' )
#
#
if not isBogusPhone( '1' ):
#
lProblems.append( 'isBogusPhone() one digit only' )
#
#
if not isBogusPhone( '' ):
#
lProblems.append( 'isBogusPhone() blank' )
#
#
if not isBogusPhone( ' "' ):
#
lProblems.append( 'isBogusPhone() double quote' )
#
#
if not isBogusPhone( '0705551212' ):
#
lProblems.append( 'isBogusPhone() Netherlands directory assistance' )
#
#
# 4151540000 in Mexico 43 times
# 4151520000 in Mexico 12 times
#
# 0705551212 in Netherlands 17 times
# 0205551212 in Netherlands 15 times
#
# 39055503131 in Italy 12 times
#
# 020 7722 0101 in UK 11 times
#
# 49 36601 82260 in Germany 10 times
#
# 2688-8854 in India 23 times
# 91-11-2688-8854 in India 12 times
#
# 31-417-3190 in South Korea 8 times
#
#
if not isPhoneUSA( '1-206-632-9929' ):
#
lProblems.append( 'isPhoneUSA() starts with 1 dash separates' )
#
#
if not isPhoneUSA( '206-632-9929' ):
#
lProblems.append( 'isPhoneUSA() starts w area code dash separates' )
#
#
if not isPhoneUSA( '(206) 632-9929' ):
#
lProblems.append( 'isPhoneUSA() starts w area code dash separates' )
#
#
if not isPhoneUSA( '212-410-9633' ):
#
lProblems.append( 'isPhoneUSA() NYC number' )
#
#
if isPhoneUSA( '212-010-9633' ):
#
lProblems.append( 'isPhoneUSA() cannot begin with 0 or 1' )
#
#
if not isLikePhoneUSA( '212-410-9633' ):
#
lProblems.append( 'isLikePhoneUSA() NYC number' )
#
#
if isPhoneUSA( '08 9018 1075' ):
#
lProblems.append( 'isPhoneUSA() Thailand number' )
#
#
if False and not isPhoneCanada( '1-416-538-1352' ):
#
lProblems.append( 'isPhoneCanada() starts with 1 dash separates' )
#
#
if not isValidPhone( '12062862181' ) or \
isValidPhone( 'same' ) or \
isValidPhone( 'n/a' ):
#
lProblems.append( 'isValidPhone()' )
#
#
tValidPhonesHK = (
'85291925529',
'2530 0728',
'6532-3831 x6483',
'852-2848-5360',
'852 2813 1204',
'+852 6330 3505',
'60814418',
'852 966-77-149',
'011 852 9360-6310',
'(852) 90421365',
'011 852 2888 1210', )
#
if get1stThatFails( tValidPhonesHK, isPhoneHK ):
#
lProblems.append( 'isPhoneHK() %s' %
get1stThatFails( tValidPhonesHK, isPhoneHK ) )
#
#
tInvalidPhonesHK = (
'135-0109-9234',
'7034963251',
'1-732-333-4616',
'+86-13911180425',
'14016883174', )
#
if get1stThatMeets( tInvalidPhonesHK, isPhoneHK ):
#
lProblems.append( 'isPhoneHK() %s' %
get1stThatMeets( tInvalidPhonesHK, isPhoneHK ) )
#
#
if getStateGotAreaCode( '212' ) != 'NY':
#
lProblems.append( 'getStateGotAreaCode() 212' )
#
if getStateGotAreaCode( '206' ) != 'WA':
#
lProblems.append( 'getStateGotAreaCode() 206' )
#
if getStateGotAreaCode( '717' ) != 'PA':
#
lProblems.append( 'getStateGotAreaCode() 717' )
#
if getStateGotAreaCode( '202' ) != 'DC':
#
lProblems.append( 'getStateGotAreaCode() 202' )
#
#
#
if isCountryCode( 'xyz' ):
#
lProblems.append( 'isCountryCode() xyz' )
#
#
if isCountryCode( 'xyz' ):
#
lProblems.append( 'isCountryCode() xyz' )
#
#
if not isCountryCode( '49' ):
#
lProblems.append( 'isCountryCode() 49 Germany' )
#
#
sPhoneOrig = '+447924406909'
sPhoneDigits = '447924406909'
sThisCountryDialingCode = '39'
#
sOtherCountryCode = hasObviousOtherCountryCode(
sPhoneOrig, sPhoneDigits, sThisCountryDialingCode, [], [] )
#
if sOtherCountryCode != '44':
#
lProblems.append( 'hasObviousOtherCountryCode() +447924406909 Italy' )
#
#
sPhoneOrig = '+1 212 555-1212'
#
bLikePhoneUSA, bFormatLikeUSA = isFormatOrLikeUSA( sPhoneOrig )
#
if not bFormatLikeUSA:
#
lProblems.append( 'isFormatOrLikeUSA() +1 212 555-1212' )
#
#
sPhoneOrig = '12125551212'
#
bLikePhoneUSA, bFormatLikeUSA = isFormatOrLikeUSA( sPhoneOrig )
#
if bFormatLikeUSA:
#
lProblems.append( 'isFormatOrLikeUSA() 12125551212' )
#
#
sPhoneOrig = '+1 212 5551212'
#
bLikePhoneUSA, bFormatLikeUSA = isFormatOrLikeUSA( sPhoneOrig )
#
if not bFormatLikeUSA:
#
lProblems.append( 'isFormatOrLikeUSA() +1 212 5551212' )
#
#
sPhoneOrig = '212555-1212'
#
bLikePhoneUSA, bFormatLikeUSA = isFormatOrLikeUSA( sPhoneOrig )
#
if not bLikePhoneUSA:
#
lProblems.append( 'bLikePhoneUSA() 212555-1212' )
#
#
if bFormatLikeUSA:
#
lProblems.append( 'isFormatOrLikeUSA() 212555-1212' )
#
#
sPhoneOrig = '0207 737 0107'
#
if bFormatLikeUSA:
#
lProblems.append( 'isFormatOrLikeUSA() 0207 737 0107' )
#
#
sPhoneOrig = '917 696 0477'
#
bLikePhoneUSA, bFormatLikeUSA = isFormatOrLikeUSA( sPhoneOrig )
#
if not bFormatLikeUSA:
#
lProblems.append( 'isFormatOrLikeUSA() 917 696 0477' )
#
#
bLikePhoneUSA, bFormatLikeUSA = isFormatOrLikeUSA( sPhoneOrig )
#
if 'Canada' not in setNoAmNumbPlanCountries:
#
lProblems.append( 'setNoAmNumbPlanCountries Canada should be in' )
#
#
if 'Thailand' in setNoAmNumbPlanCountries:
#
lProblems.append( 'setNoAmNumbPlanCountries Thailand should not be in' )
#
#
sPhone = '+1 212 555-1212'
#
if isPhoneCanada( sPhone ):
#
lProblems.append( 'isPhoneCanada() NYC # should not be in' )
#
#
if isPhoneCaribbean( sPhone ):
#
lProblems.append( 'isPhoneCaribbean() NYC # should not be in' )
#
#
sPhone = '+1 709 555-1212'
#
if not isPhoneCanada( sPhone ):
#
lProblems.append( 'isPhoneCanada() 709 should be in' )
#
#
sPhone = '19376603668'
#
if isPhoneCanada( sPhone ):
#
lProblems.append( 'isPhoneCanada() 937 is OH, should be out' )
#
#
sPhone = '19376603668'
#
bLikePhoneUSA, bFormatLikeUSA = isFormatOrLikeUSA( sPhone )
#
if not bLikePhoneUSA:
#
lProblems.append( 'bLikePhoneUSA() 19376603668' )
#
#
sPhone = '202957-0088'
#
bLikePhoneUSA, bFormatLikeUSA = isFormatOrLikeUSA( sPhone )
#
if not bLikePhoneUSA:
#
lProblems.append( 'bLikePhoneUSA() 202957-0088' )
#
#
sAreaCodeMaybe = '206' # Seattle
#
if not isAreaCodeForUSA( sAreaCodeMaybe ):
#
lProblems.append( 'isAreaCodeForUSA() 206 Seattle' )
#
#
sAreaCodeMaybe = '236' # British Columba
#
if isAreaCodeForUSA( sAreaCodeMaybe ):
#
lProblems.append( 'isAreaCodeForUSA() 236 BC' )
#
#
sPhoneOrig = '001 0049 171 363 7987'
sPhoneDigits = '00100491713637987'
sThisCountryDialingCode = '49'
#
sOtherCountryCode = hasObviousOtherCountryCode(
sPhoneOrig, sPhoneDigits, sThisCountryDialingCode, [], [] )
#
if sOtherCountryCode != '':
#
print3( 'sOtherCountryCode: ', sOtherCountryCode )
lProblems.append( 'hasObviousOtherCountryCode() 001 0049 171 363 7987 Germany' )
#
#
sPhoneOrig = '011 4 32763441'
sPhoneDigits = '011432763441'
sThisCountryDialingCode = '33'
#
sOtherCountryCode = hasObviousOtherCountryCode(
sPhoneOrig, sPhoneDigits, sThisCountryDialingCode, [], [] )
#
if sOtherCountryCode != '':
#
print3( 'sOtherCountryCode: ', sOtherCountryCode )
lProblems.append( 'hasObviousOtherCountryCode() 011 4 32763441 France' )
#
#
sPhoneOrig = '+223 301688'
sPhoneDigits = '223 301688'
sThisCountryDialingCode = '33'
#
sOtherCountryCode = hasObviousOtherCountryCode(
sPhoneOrig, sPhoneDigits, sThisCountryDialingCode, [], [] )
#
if sOtherCountryCode != '223':
#
print3( 'sOtherCountryCode: ', sOtherCountryCode )
lProblems.append( 'hasObviousOtherCountryCode() +223 301688 France' )
#
#
sPhoneOrig = '0041 7879818044'
sPhoneDigits = '00417879818044'
sThisCountryDialingCode = '44'
#
sOtherCountryCode = hasObviousOtherCountryCode(
sPhoneOrig, sPhoneDigits, sThisCountryDialingCode, [], [] )
#
if sOtherCountryCode != '41':
#
lProblems.append( 'hasObviousOtherCountryCode() 0041 7879818044 UK' )
#
#
sPhoneOrig = '011447924406909'
sPhoneDigits = '011447924406909'
sThisCountryDialingCode = '39'
#
sOtherCountryCode = hasObviousOtherCountryCode(
sPhoneOrig, sPhoneDigits, sThisCountryDialingCode, [], [] )
#
if sOtherCountryCode != '44':
#
lProblems.append( 'hasObviousOtherCountryCode() 011447924406909 Italy' )
#
#
sPhoneOrig = '33631126088'
sPhoneDigits = '33631126088'
sThisCountryDialingCode = '33'
#
sOtherCountryCode = hasObviousOtherCountryCode(
sPhoneOrig, sPhoneDigits, sThisCountryDialingCode, [], [] )
#
if sOtherCountryCode != '':
#
print3( 'sOtherCountryCode: ', sOtherCountryCode )
lProblems.append( 'hasObviousOtherCountryCode() 33631126088 France' )
#
'''
#
'''
#
sPhoneOrig = '011 250 353-9685'
sPhoneDigits = '0112503539685'
sThisCountryDialingCode = '1'
#
sOtherCountryCode = hasObviousOtherCountryCode(
sPhoneOrig, sPhoneDigits, sThisCountryDialingCode, [], [] )
#
if sOtherCountryCode != '250':
#
print3( 'sOtherCountryCode: ', sOtherCountryCode )
lProblems.append( 'hasObviousOtherCountryCode() 011 250 353-9685 Canada' )
#
#
sPhoneOrig = '001-40-22697638'
sPhoneDigits = '0014022697638'
sThisCountryDialingCode = '49'
#
sOtherCountryCode = hasObviousOtherCountryCode(
sPhoneOrig, sPhoneDigits, sThisCountryDialingCode,
[3, 2, 8], ['001', '40', '22697638'] )
#
if sOtherCountryCode != '40':
#
lProblems.append( 'hasObviousOtherCountryCode() 001-40-22697638 Italy' )
#
#
if isPhoneNoAmDialingPlan( sPhoneOrig ):
#
lProblems.append( 'isPhoneNoAmDialingPlan() 001-40-22697638' )
#
sPhone = '+1 201-984-5835'
#
sAreaCode = getAreaCode( sPhone )
#
if sAreaCode != '201':
#
lProblems.append( 'getAreaCode() +1 201-984-5835' )
#
#
sPhone = '334 9583672'
#
if isFormatLikeUSA( sPhone ):
#
lProblems.append( 'isFormatLikeUSA() 334 9583672' )
#
#
sPhone = '334 9583672'
#
if hasExtension( sPhone ):
#
lProblems.append( 'hasExtension() no extension' )
#
#
if not isPhoneNo( sPhone ):
#
lProblems.append( 'isPhoneNo( %s )' % sPhone )
#
#
sPhone = '334 9583672 # 123'
#
if not hasExtension( sPhone ):
#
lProblems.append( 'hasExtension() has extension' )
#
#
if not isPhoneNo( sPhone ):
#
lProblems.append( 'isPhoneNo( %s )' % sPhone )
#
#
sPhone = '334 9583672 x 123'
lExtension = hasExtension( sPhone )
#
if not lExtension:
#
print3( 'lExtension:', lExtension )
lProblems.append( 'hasExtension() has extension' )
#
#
sPhone = '334 9583672 Mx 123'
lExtension = hasExtension( sPhone )
#
if lExtension:
#
print3( 'lExtension:', lExtension )
lProblems.append( 'hasExtension() not an extension' )
#
#
sPhone = '334 9583672 x 123'
#
if getDigitCount( sPhone ) != 10:
#
print3( "getDigitCount( '334 9583672 x 123' ):", getDigitCount( sPhone ) )
lProblems.append( 'getDigitCount() has extension' )
#
#
sPhone = '+1 201-984-5835'
#
if getDigitCount( sPhone ) != 11:
#
print3( "getDigitCount( '+1 201-984-5835' ):", getDigitCount( sPhone ) )
lProblems.append( 'getDigitCount() no extension' )
#
#
if not isPhoneNoAmDialingPlan( sPhone ):
#
lProblems.append( 'isPhoneNoAmDialingPlan() +1 201-984-5835' )
#
#
if not isPhoneNo( sPhone ):
#
lProblems.append( 'isPhoneNo( %s )' % sPhone )
#
#
sPhone = 'spam and eggs'
#
if getDigitCount( sPhone ) != 0:
#
lProblems.append( 'getDigitCount() all alpha' )
#
#
if isValidPhone( sPhone ):
#
lProblems.append( 'isValidPhone( %s )' % sPhone )
#
#
#
sPhone = '334 9583672 x 123'
#
if getNumberDropExtension( sPhone ) != '334 9583672':
#
lProblems.append( 'getNumberDropExtension()' )
#
#
#
sPhone = '+250 3539685'
#
if isPhoneNoAmDialingPlan( sPhone ):
#
lProblems.append( 'isPhoneNoAmDialingPlan() +250 3539685' )
#
#
if isPhoneCanada( sPhone ):
#
lProblems.append( 'isPhoneCanada() +250 3539685' )
#
if not isPhoneNo( sPhone ):
#
lProblems.append( 'isPhoneNo( %s )' % sPhone )
#
#
sPhone = '+1 345 555-1212'
#
if not isPhoneCaribbean( sPhone ):
#
lProblems.append( 'isPhoneCaribbean() 345 should be in' )
#
#
sPhone = '604-264-7570' # BC Canada number
#
if not isFormatOrLikeUSA( sPhone ):
#
lProblems.append( 'isFormatOrLikeUSA() 604-264-7570' )
#
#
#
sPhone = '8030430150'
#
if isPhoneNoAmDialingPlan( sPhone ):
#
lProblems.append( 'isPhoneNoAmDialingPlan() 8030430150' )
#
#
sPhone = '020 5555 5555'
#
if not isBogusPhone( sPhone ):
#
lProblems.append( 'isBogusPhone() 020 5555 5555' )
#
#
if not isPhoneNo( sPhone ):
#
lProblems.append( 'isPhoneNo( %s )' % sPhone )
#
#
sPhone = '0041 218282000'
#
if isBogusPhone( sPhone, bExplain = True ):
#
lProblems.append( 'isBogusPhone() 0041 218282000' )
#
#
if not isPhoneNo( sPhone ):
#
lProblems.append( 'isPhoneNo( %s )' % sPhone )
#
#
sPhone = '011 44 20 7033 0000'
#
if isBogusPhone( sPhone, bExplain = True ):
#
lProblems.append( 'isBogusPhone() 011 44 20 7033 0000' )
#
#
#
sPhone = '(781) 062-2337'
#
if isFormatLikeUSA( sPhone ):
#
lProblems.append( 'isFormatLikeUSA() (781) 062-2337' )
#
#
if not isPhoneNo( sPhone ):
#
lProblems.append( 'isPhoneNo( %s )' % sPhone )
#
#
sPhone = '01483 273 0890'
#
if isPhoneUK( sPhone ):
#
lProblems.append( 'isPhoneUK() 01483 273 0890 too many digits' )
#
#
sPhone = '403-381-8691'
#
if isPhoneUK( sPhone ):
#
lProblems.append( 'isPhoneUK() 403-381-8691 Calgary AB' )
#
#
if not isPhoneNo( sPhone ):
#
lProblems.append( 'isPhoneNo( %s )' % sPhone )
#
#
sPhone = '0048505490185'
#
if isPhoneUK( sPhone ):
#
lProblems.append( 'isPhoneUK() 0048505490185 Polish' )
#
#
sPhone = '20777948973'
#
if isPhoneUK( sPhone ):
#
lProblems.append( 'isPhoneUK() 20777948973 UK missing zero too many digits' )
#
#
sPhone = '1415-738 7369'
#
if isPhoneUK( sPhone ):
#
lProblems.append( 'isPhoneUK() 1415-738 7369 CA' )
#
#
sPhone = '02392356713'
#
if not isPhoneUK( sPhone ):
#
lProblems.append( 'isPhoneUK() 02392356713 UK London land line' )
#
#
sPhone = '(0)747 222 4322'
#
if not isPhoneUK( sPhone ):
#
lProblems.append( 'isPhoneUK() (0)747 222 4322 UK mobile' )
#
#
sPhone = '+44 (0)747 222 4322'
#
if not isPhoneUK( sPhone ):
#
lProblems.append( 'isPhoneUK() +44 (0)747 222 4322 UK mobile' )
#
#
sPhone = '011 44 (0)747 222 4322'
#
if not isPhoneUK( sPhone ):
#
lProblems.append( 'isPhoneUK() 011 44 (0)747 222 4322 UK mobile' )
#
#
if not isPhoneNo( sPhone ):
#
lProblems.append( 'isPhoneNo( %s )' % sPhone )
#
#
sPhone = '20777948973'
#
#
sIddCode = '1'
#
lWant = ['Canada', 'United States']
lGot = getCountryListGotIddCode( sIddCode )
lGot.sort()
#
if lGot != lWant:
#
lProblems.append( 'getCountryListGotIddCode() 1' )
#
# print3( getCountryListGotIddCode( sIddCode ) )
#
#
sIddCode = '49'
#
l = ['Germany']
#
if getCountryListGotIddCode( sIddCode ) != l:
#
lProblems.append( 'getCountryListGotIddCode() 49' )
#
#
sCountry = 'Kosovo'
#
if getCodeGotCountry( sCountry ) != '381':
#
lProblems.append( 'getCodeGotCountry() Kosovo' )
#
#
sPhone = '4169244082'
#
if not isPhoneCanada( sPhone ):
#
lProblems.append( 'isPhoneCanada() 4169244082' )
#
#
if not isPhoneNoAmDialingPlan( sPhone ):
#
lProblems.append( 'isPhoneNoAmDialingPlan() 4169244082' )
#
#
if not isPhoneNo( sPhone ):
#
lProblems.append( 'isPhoneNo( %s )' % sPhone )
#
#
sPhone = '3223749262'
#
if isPhoneNoAmDialingPlan( sPhone ):
#
lProblems.append( 'isPhoneNoAmDialingPlan() ' + sPhone )
#
#
sPhone = '++49-(0)-6131 - 475556'
#
oPhone = getDigitsOtherObject( sPhone )
#
lDigits = ['49', '0', '6131', '475556']
lOthers = ['++', '-(', ')-', ' - ']
lParts = ['++', '49', '-(', '0', ')-', '6131', ' - ', '475556']
lWhich = ['o', 'd', 'o', 'd', 'o', 'd', 'o', 'd']
#
if ( oPhone.lDigits != lDigits or
oPhone.lOthers != lOthers or
oPhone.lParts != lParts or
oPhone.lWhich != lWhich ):
#
lProblems.append( 'getDigitsOtherObject() ' + sPhone )
#
#
sPhone = '++49-(0)-6131 - 475556 x'
#
oPhone = getDigitsOtherObject( sPhone )
#
#
#
'''
'''
sayTestResult( lProblems ) | gpl-2.0 |
crichardson17/starburst_atlas | Low_resolution_sims/Dusty_LowRes/Padova_inst/padova_inst_0/fullgrid/UV1.py | 31 | 9315 | import csv
import matplotlib.pyplot as plt
from numpy import *
import scipy.interpolate
import math
from pylab import *
from matplotlib.ticker import MultipleLocator, FormatStrFormatter
import matplotlib.patches as patches
from matplotlib.path import Path
import os
# ------------------------------------------------------------------------------------------------------
#inputs
for file in os.listdir('.'):
if file.endswith("1.grd"):
gridfile1 = file
for file in os.listdir('.'):
if file.endswith("2.grd"):
gridfile2 = file
for file in os.listdir('.'):
if file.endswith("3.grd"):
gridfile3 = file
# ------------------------
for file in os.listdir('.'):
if file.endswith("1.txt"):
Elines1 = file
for file in os.listdir('.'):
if file.endswith("2.txt"):
Elines2 = file
for file in os.listdir('.'):
if file.endswith("3.txt"):
Elines3 = file
# ------------------------------------------------------------------------------------------------------
#Patches data
#for the Kewley and Levesque data
verts = [
(1., 7.97712125471966000000), # left, bottom
(1., 9.57712125471966000000), # left, top
(2., 10.57712125471970000000), # right, top
(2., 8.97712125471966000000), # right, bottom
(0., 0.), # ignored
]
codes = [Path.MOVETO,
Path.LINETO,
Path.LINETO,
Path.LINETO,
Path.CLOSEPOLY,
]
path = Path(verts, codes)
# ------------------------
#for the Kewley 01 data
verts2 = [
(2.4, 9.243038049), # left, bottom
(2.4, 11.0211893), # left, top
(2.6, 11.0211893), # right, top
(2.6, 9.243038049), # right, bottom
(0, 0.), # ignored
]
path = Path(verts, codes)
path2 = Path(verts2, codes)
# -------------------------
#for the Moy et al data
verts3 = [
(1., 6.86712125471966000000), # left, bottom
(1., 10.18712125471970000000), # left, top
(3., 12.18712125471970000000), # right, top
(3., 8.86712125471966000000), # right, bottom
(0., 0.), # ignored
]
path = Path(verts, codes)
path3 = Path(verts3, codes)
# ------------------------------------------------------------------------------------------------------
#the routine to add patches for others peoples' data onto our plots.
def add_patches(ax):
patch3 = patches.PathPatch(path3, facecolor='yellow', lw=0)
patch2 = patches.PathPatch(path2, facecolor='green', lw=0)
patch = patches.PathPatch(path, facecolor='red', lw=0)
ax1.add_patch(patch3)
ax1.add_patch(patch2)
ax1.add_patch(patch)
# ------------------------------------------------------------------------------------------------------
#the subplot routine
def add_sub_plot(sub_num):
numplots = 16
plt.subplot(numplots/4.,4,sub_num)
rbf = scipy.interpolate.Rbf(x, y, z[:,sub_num-1], function='linear')
zi = rbf(xi, yi)
contour = plt.contour(xi,yi,zi, levels, colors='c', linestyles = 'dashed')
contour2 = plt.contour(xi,yi,zi, levels2, colors='k', linewidths=1.5)
plt.scatter(max_values[line[sub_num-1],2], max_values[line[sub_num-1],3], c ='k',marker = '*')
plt.annotate(headers[line[sub_num-1]], xy=(8,11), xytext=(6,8.5), fontsize = 10)
plt.annotate(max_values[line[sub_num-1],0], xy= (max_values[line[sub_num-1],2], max_values[line[sub_num-1],3]), xytext = (0, -10), textcoords = 'offset points', ha = 'right', va = 'bottom', fontsize=10)
if sub_num == numplots / 2.:
print "half the plots are complete"
#axis limits
yt_min = 8
yt_max = 23
xt_min = 0
xt_max = 12
plt.ylim(yt_min,yt_max)
plt.xlim(xt_min,xt_max)
plt.yticks(arange(yt_min+1,yt_max,1),fontsize=10)
plt.xticks(arange(xt_min+1,xt_max,1), fontsize = 10)
if sub_num in [2,3,4,6,7,8,10,11,12,14,15,16]:
plt.tick_params(labelleft = 'off')
else:
plt.tick_params(labelleft = 'on')
plt.ylabel('Log ($ \phi _{\mathrm{H}} $)')
if sub_num in [1,2,3,4,5,6,7,8,9,10,11,12]:
plt.tick_params(labelbottom = 'off')
else:
plt.tick_params(labelbottom = 'on')
plt.xlabel('Log($n _{\mathrm{H}} $)')
if sub_num == 1:
plt.yticks(arange(yt_min+1,yt_max+1,1),fontsize=10)
if sub_num == 13:
plt.yticks(arange(yt_min,yt_max,1),fontsize=10)
plt.xticks(arange(xt_min,xt_max,1), fontsize = 10)
if sub_num == 16 :
plt.xticks(arange(xt_min+1,xt_max+1,1), fontsize = 10)
# ---------------------------------------------------
#this is where the grid information (phi and hdens) is read in and saved to grid.
grid1 = [];
grid2 = [];
grid3 = [];
with open(gridfile1, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
for row in csvReader:
grid1.append(row);
grid1 = asarray(grid1)
with open(gridfile2, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
for row in csvReader:
grid2.append(row);
grid2 = asarray(grid2)
with open(gridfile3, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
for row in csvReader:
grid3.append(row);
grid3 = asarray(grid3)
#here is where the data for each line is read in and saved to dataEmissionlines
dataEmissionlines1 = [];
dataEmissionlines2 = [];
dataEmissionlines3 = [];
with open(Elines1, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
headers = csvReader.next()
for row in csvReader:
dataEmissionlines1.append(row);
dataEmissionlines1 = asarray(dataEmissionlines1)
with open(Elines2, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
headers2 = csvReader.next()
for row in csvReader:
dataEmissionlines2.append(row);
dataEmissionlines2 = asarray(dataEmissionlines2)
with open(Elines3, 'rb') as f:
csvReader = csv.reader(f,delimiter='\t')
headers3 = csvReader.next()
for row in csvReader:
dataEmissionlines3.append(row);
dataEmissionlines3 = asarray(dataEmissionlines3)
print "import files complete"
# ---------------------------------------------------
#for concatenating grid
#pull the phi and hdens values from each of the runs. exclude header lines
grid1new = zeros((len(grid1[:,0])-1,2))
grid1new[:,0] = grid1[1:,6]
grid1new[:,1] = grid1[1:,7]
grid2new = zeros((len(grid2[:,0])-1,2))
x = array(17.00000)
grid2new[:,0] = repeat(x,len(grid2[:,0])-1)
grid2new[:,1] = grid2[1:,6]
grid3new = zeros((len(grid3[:,0])-1,2))
grid3new[:,0] = grid3[1:,6]
grid3new[:,1] = grid3[1:,7]
grid = concatenate((grid1new,grid2new,grid3new))
hdens_values = grid[:,1]
phi_values = grid[:,0]
# ---------------------------------------------------
#for concatenating Emission lines data
Emissionlines = concatenate((dataEmissionlines1[:,1:],dataEmissionlines2[:,1:],dataEmissionlines3[:,1:]))
#for lines
headers = headers[1:]
concatenated_data = zeros((len(Emissionlines),len(Emissionlines[0])))
max_values = zeros((len(concatenated_data[0]),4))
# ---------------------------------------------------
#constructing grid by scaling
#select the scaling factor
#for 1215
#incident = Emissionlines[1:,4]
#for 4860
incident = concatenated_data[:,57]
#take the ratio of incident and all the lines and put it all in an array concatenated_data
for i in range(len(Emissionlines)):
for j in range(len(Emissionlines[0])):
if math.log(4860.*(float(Emissionlines[i,j])/float(Emissionlines[i,57])), 10) > 0:
concatenated_data[i,j] = math.log(4860.*(float(Emissionlines[i,j])/float(Emissionlines[i,57])), 10)
else:
concatenated_data[i,j] == 0
# for 1215
#for i in range(len(Emissionlines)):
# for j in range(len(Emissionlines[0])):
# if math.log(1215.*(float(Emissionlines[i,j])/float(Emissionlines[i,4])), 10) > 0:
# concatenated_data[i,j] = math.log(1215.*(float(Emissionlines[i,j])/float(Emissionlines[i,4])), 10)
# else:
# concatenated_data[i,j] == 0
# ---------------------------------------------------
#find the maxima to plot onto the contour plots
for j in range(len(concatenated_data[0])):
max_values[j,0] = max(concatenated_data[:,j])
max_values[j,1] = argmax(concatenated_data[:,j], axis = 0)
max_values[j,2] = hdens_values[max_values[j,1]]
max_values[j,3] = phi_values[max_values[j,1]]
#to round off the maxima
max_values[:,0] = [ '%.1f' % elem for elem in max_values[:,0] ]
print "data arranged"
# ---------------------------------------------------
#Creating the grid to interpolate with for contours.
gridarray = zeros((len(concatenated_data),2))
gridarray[:,0] = hdens_values
gridarray[:,1] = phi_values
x = gridarray[:,0]
y = gridarray[:,1]
# ---------------------------------------------------
#change desired lines here!
line = [0, #977
1, #991
2, #1026
5, #1216
91, #1218
6, #1239
7, #1240
8, #1243
9, #1263
10, #1304
11,#1308
12, #1397
13, #1402
14, #1406
16, #1486
17] #1531
#create z array for this plot
z = concatenated_data[:,line[:]]
# ---------------------------------------------------
# Interpolate
print "starting interpolation"
xi, yi = linspace(x.min(), x.max(), 10), linspace(y.min(), y.max(), 10)
xi, yi = meshgrid(xi, yi)
# ---------------------------------------------------
print "interpolatation complete; now plotting"
#plot
plt.subplots_adjust(wspace=0, hspace=0) #remove space between plots
levels = arange(10**-1,10, .2)
levels2 = arange(10**-2,10**2, 1)
plt.suptitle("Dusty UV Lines", fontsize=14)
# ---------------------------------------------------
for i in range(16):
add_sub_plot(i)
ax1 = plt.subplot(4,4,1)
add_patches(ax1)
print "complete"
plt.savefig('Dusty_UV_Lines.pdf')
plt.clf()
print "figure saved"
| gpl-2.0 |
mozilla/make.mozilla.org | vendor-local/lib/python/south/creator/actions.py | 20 | 19103 | """
Actions - things like 'a model was removed' or 'a field was changed'.
Each one has a class, which can take the action description and insert code
blocks into the forwards() and backwards() methods, in the right place.
"""
import sys
from django.db.models.fields.related import RECURSIVE_RELATIONSHIP_CONSTANT
from django.db.models.fields import FieldDoesNotExist, NOT_PROVIDED, CharField, TextField
from south.modelsinspector import value_clean
from south.creator.freezer import remove_useless_attributes, model_key
from south.utils import datetime_utils
class Action(object):
"""
Generic base Action class. Contains utility methods for inserting into
the forwards() and backwards() method lists.
"""
prepend_forwards = False
prepend_backwards = False
def forwards_code(self):
raise NotImplementedError
def backwards_code(self):
raise NotImplementedError
def add_forwards(self, forwards):
if self.prepend_forwards:
forwards.insert(0, self.forwards_code())
else:
forwards.append(self.forwards_code())
def add_backwards(self, backwards):
if self.prepend_backwards:
backwards.insert(0, self.backwards_code())
else:
backwards.append(self.backwards_code())
def console_line(self):
"Returns the string to print on the console, e.g. ' + Added field foo'"
raise NotImplementedError
@classmethod
def triples_to_defs(cls, fields):
# Turn the (class, args, kwargs) format into a string
for field, triple in fields.items():
fields[field] = cls.triple_to_def(triple)
return fields
@classmethod
def triple_to_def(cls, triple):
"Turns a single triple into a definition."
return "self.gf(%r)(%s)" % (
triple[0], # Field full path
", ".join(triple[1] + ["%s=%s" % (kwd, val) for kwd, val in triple[2].items()]), # args and kwds
)
class AddModel(Action):
"""
Addition of a model. Takes the Model subclass that is being created.
"""
FORWARDS_TEMPLATE = '''
# Adding model '%(model_name)s'
db.create_table(%(table_name)r, (
%(field_defs)s
))
db.send_create_signal(%(app_label)r, [%(model_name)r])'''[1:] + "\n"
BACKWARDS_TEMPLATE = '''
# Deleting model '%(model_name)s'
db.delete_table(%(table_name)r)'''[1:] + "\n"
def __init__(self, model, model_def):
self.model = model
self.model_def = model_def
def console_line(self):
"Returns the string to print on the console, e.g. ' + Added field foo'"
return " + Added model %s.%s" % (
self.model._meta.app_label,
self.model._meta.object_name,
)
def forwards_code(self):
"Produces the code snippet that gets put into forwards()"
field_defs = ",\n ".join([
"(%r, %s)" % (name, defn) for name, defn
in self.triples_to_defs(self.model_def).items()
]) + ","
return self.FORWARDS_TEMPLATE % {
"model_name": self.model._meta.object_name,
"table_name": self.model._meta.db_table,
"app_label": self.model._meta.app_label,
"field_defs": field_defs,
}
def backwards_code(self):
"Produces the code snippet that gets put into backwards()"
return self.BACKWARDS_TEMPLATE % {
"model_name": self.model._meta.object_name,
"table_name": self.model._meta.db_table,
}
class DeleteModel(AddModel):
"""
Deletion of a model. Takes the Model subclass that is being created.
"""
def console_line(self):
"Returns the string to print on the console, e.g. ' + Added field foo'"
return " - Deleted model %s.%s" % (
self.model._meta.app_label,
self.model._meta.object_name,
)
def forwards_code(self):
return AddModel.backwards_code(self)
def backwards_code(self):
return AddModel.forwards_code(self)
class _NullIssuesField(object):
"""
A field that might need to ask a question about rogue NULL values.
"""
allow_third_null_option = False
irreversible = False
IRREVERSIBLE_TEMPLATE = '''
# User chose to not deal with backwards NULL issues for '%(model_name)s.%(field_name)s'
raise RuntimeError("Cannot reverse this migration. '%(model_name)s.%(field_name)s' and its values cannot be restored.")'''
def deal_with_not_null_no_default(self, field, field_def):
# If it's a CharField or TextField that's blank, skip this step.
if isinstance(field, (CharField, TextField)) and field.blank:
field_def[2]['default'] = repr("")
return
# Oh dear. Ask them what to do.
print " ? The field '%s.%s' does not have a default specified, yet is NOT NULL." % (
self.model._meta.object_name,
field.name,
)
print " ? Since you are %s, you MUST specify a default" % self.null_reason
print " ? value to use for existing rows. Would you like to:"
print " ? 1. Quit now, and add a default to the field in models.py"
print " ? 2. Specify a one-off value to use for existing columns now"
if self.allow_third_null_option:
print " ? 3. Disable the backwards migration by raising an exception."
while True:
choice = raw_input(" ? Please select a choice: ")
if choice == "1":
sys.exit(1)
elif choice == "2":
break
elif choice == "3" and self.allow_third_null_option:
break
else:
print " ! Invalid choice."
if choice == "2":
self.add_one_time_default(field, field_def)
elif choice == "3":
self.irreversible = True
def add_one_time_default(self, field, field_def):
# OK, they want to pick their own one-time default. Who are we to refuse?
print " ? Please enter Python code for your one-off default value."
print " ? The datetime module is available, so you can do e.g. datetime.date.today()"
while True:
code = raw_input(" >>> ")
if not code:
print " ! Please enter some code, or 'exit' (with no quotes) to exit."
elif code == "exit":
sys.exit(1)
else:
try:
result = eval(code, {}, {"datetime": datetime_utils})
except (SyntaxError, NameError), e:
print " ! Invalid input: %s" % e
else:
break
# Right, add the default in.
field_def[2]['default'] = value_clean(result)
def irreversable_code(self, field):
return self.IRREVERSIBLE_TEMPLATE % {
"model_name": self.model._meta.object_name,
"table_name": self.model._meta.db_table,
"field_name": field.name,
"field_column": field.column,
}
class AddField(Action, _NullIssuesField):
"""
Adds a field to a model. Takes a Model class and the field name.
"""
null_reason = "adding this field"
FORWARDS_TEMPLATE = '''
# Adding field '%(model_name)s.%(field_name)s'
db.add_column(%(table_name)r, %(field_name)r,
%(field_def)s,
keep_default=False)'''[1:] + "\n"
BACKWARDS_TEMPLATE = '''
# Deleting field '%(model_name)s.%(field_name)s'
db.delete_column(%(table_name)r, %(field_column)r)'''[1:] + "\n"
def __init__(self, model, field, field_def):
self.model = model
self.field = field
self.field_def = field_def
# See if they've made a NOT NULL column but also have no default (far too common)
is_null = self.field.null
default = (self.field.default is not None) and (self.field.default is not NOT_PROVIDED)
if not is_null and not default:
self.deal_with_not_null_no_default(self.field, self.field_def)
def console_line(self):
"Returns the string to print on the console, e.g. ' + Added field foo'"
return " + Added field %s on %s.%s" % (
self.field.name,
self.model._meta.app_label,
self.model._meta.object_name,
)
def forwards_code(self):
return self.FORWARDS_TEMPLATE % {
"model_name": self.model._meta.object_name,
"table_name": self.model._meta.db_table,
"field_name": self.field.name,
"field_column": self.field.column,
"field_def": self.triple_to_def(self.field_def),
}
def backwards_code(self):
return self.BACKWARDS_TEMPLATE % {
"model_name": self.model._meta.object_name,
"table_name": self.model._meta.db_table,
"field_name": self.field.name,
"field_column": self.field.column,
}
class DeleteField(AddField):
"""
Removes a field from a model. Takes a Model class and the field name.
"""
null_reason = "removing this field"
allow_third_null_option = True
def console_line(self):
"Returns the string to print on the console, e.g. ' + Added field foo'"
return " - Deleted field %s on %s.%s" % (
self.field.name,
self.model._meta.app_label,
self.model._meta.object_name,
)
def forwards_code(self):
return AddField.backwards_code(self)
def backwards_code(self):
if not self.irreversible:
return AddField.forwards_code(self)
else:
return self.irreversable_code(self.field)
class ChangeField(Action, _NullIssuesField):
"""
Changes a field's type/options on a model.
"""
null_reason = "making this field non-nullable"
FORWARDS_TEMPLATE = BACKWARDS_TEMPLATE = '''
# Changing field '%(model_name)s.%(field_name)s'
db.alter_column(%(table_name)r, %(field_column)r, %(field_def)s)'''
RENAME_TEMPLATE = '''
# Renaming column for '%(model_name)s.%(field_name)s' to match new field type.
db.rename_column(%(table_name)r, %(old_column)r, %(new_column)r)'''
def __init__(self, model, old_field, new_field, old_def, new_def):
self.model = model
self.old_field = old_field
self.new_field = new_field
self.old_def = old_def
self.new_def = new_def
# See if they've changed a not-null field to be null
new_default = (self.new_field.default is not None) and (self.new_field.default is not NOT_PROVIDED)
old_default = (self.old_field.default is not None) and (self.old_field.default is not NOT_PROVIDED)
if self.old_field.null and not self.new_field.null and not new_default:
self.deal_with_not_null_no_default(self.new_field, self.new_def)
if not self.old_field.null and self.new_field.null and not old_default:
self.null_reason = "making this field nullable"
self.allow_third_null_option = True
self.deal_with_not_null_no_default(self.old_field, self.old_def)
def console_line(self):
"Returns the string to print on the console, e.g. ' + Added field foo'"
return " ~ Changed field %s on %s.%s" % (
self.new_field.name,
self.model._meta.app_label,
self.model._meta.object_name,
)
def _code(self, old_field, new_field, new_def):
output = ""
if self.old_field.column != self.new_field.column:
output += self.RENAME_TEMPLATE % {
"model_name": self.model._meta.object_name,
"table_name": self.model._meta.db_table,
"field_name": new_field.name,
"old_column": old_field.column,
"new_column": new_field.column,
}
output += self.FORWARDS_TEMPLATE % {
"model_name": self.model._meta.object_name,
"table_name": self.model._meta.db_table,
"field_name": new_field.name,
"field_column": new_field.column,
"field_def": self.triple_to_def(new_def),
}
return output
def forwards_code(self):
return self._code(self.old_field, self.new_field, self.new_def)
def backwards_code(self):
if not self.irreversible:
return self._code(self.new_field, self.old_field, self.old_def)
else:
return self.irreversable_code(self.old_field)
class AddUnique(Action):
"""
Adds a unique constraint to a model. Takes a Model class and the field names.
"""
FORWARDS_TEMPLATE = '''
# Adding unique constraint on '%(model_name)s', fields %(field_names)s
db.create_unique(%(table_name)r, %(fields)r)'''[1:] + "\n"
BACKWARDS_TEMPLATE = '''
# Removing unique constraint on '%(model_name)s', fields %(field_names)s
db.delete_unique(%(table_name)r, %(fields)r)'''[1:] + "\n"
prepend_backwards = True
def __init__(self, model, fields):
self.model = model
self.fields = fields
def console_line(self):
"Returns the string to print on the console, e.g. ' + Added field foo'"
return " + Added unique constraint for %s on %s.%s" % (
[x.name for x in self.fields],
self.model._meta.app_label,
self.model._meta.object_name,
)
def forwards_code(self):
return self.FORWARDS_TEMPLATE % {
"model_name": self.model._meta.object_name,
"table_name": self.model._meta.db_table,
"fields": [field.column for field in self.fields],
"field_names": [field.name for field in self.fields],
}
def backwards_code(self):
return self.BACKWARDS_TEMPLATE % {
"model_name": self.model._meta.object_name,
"table_name": self.model._meta.db_table,
"fields": [field.column for field in self.fields],
"field_names": [field.name for field in self.fields],
}
class DeleteUnique(AddUnique):
"""
Removes a unique constraint from a model. Takes a Model class and the field names.
"""
prepend_forwards = True
prepend_backwards = False
def console_line(self):
"Returns the string to print on the console, e.g. ' + Added field foo'"
return " - Deleted unique constraint for %s on %s.%s" % (
[x.name for x in self.fields],
self.model._meta.app_label,
self.model._meta.object_name,
)
def forwards_code(self):
return AddUnique.backwards_code(self)
def backwards_code(self):
return AddUnique.forwards_code(self)
class AddIndex(AddUnique):
"""
Adds an index to a model field[s]. Takes a Model class and the field names.
"""
FORWARDS_TEMPLATE = '''
# Adding index on '%(model_name)s', fields %(field_names)s
db.create_index(%(table_name)r, %(fields)r)'''[1:] + "\n"
BACKWARDS_TEMPLATE = '''
# Removing index on '%(model_name)s', fields %(field_names)s
db.delete_index(%(table_name)r, %(fields)r)'''[1:] + "\n"
def console_line(self):
"Returns the string to print on the console, e.g. ' + Added field foo'"
return " + Added index for %s on %s.%s" % (
[x.name for x in self.fields],
self.model._meta.app_label,
self.model._meta.object_name,
)
class DeleteIndex(AddIndex):
"""
Deletes an index off a model field[s]. Takes a Model class and the field names.
"""
def console_line(self):
"Returns the string to print on the console, e.g. ' + Added field foo'"
return " + Deleted index for %s on %s.%s" % (
[x.name for x in self.fields],
self.model._meta.app_label,
self.model._meta.object_name,
)
def forwards_code(self):
return AddIndex.backwards_code(self)
def backwards_code(self):
return AddIndex.forwards_code(self)
class AddM2M(Action):
"""
Adds a unique constraint to a model. Takes a Model class and the field names.
"""
FORWARDS_TEMPLATE = '''
# Adding M2M table for field %(field_name)s on '%(model_name)s'
db.create_table(%(table_name)r, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
(%(left_field)r, models.ForeignKey(orm[%(left_model_key)r], null=False)),
(%(right_field)r, models.ForeignKey(orm[%(right_model_key)r], null=False))
))
db.create_unique(%(table_name)r, [%(left_column)r, %(right_column)r])'''[1:] + "\n"
BACKWARDS_TEMPLATE = '''
# Removing M2M table for field %(field_name)s on '%(model_name)s'
db.delete_table('%(table_name)s')'''[1:] + "\n"
def __init__(self, model, field):
self.model = model
self.field = field
def console_line(self):
"Returns the string to print on the console, e.g. ' + Added field foo'"
return " + Added M2M table for %s on %s.%s" % (
self.field.name,
self.model._meta.app_label,
self.model._meta.object_name,
)
def forwards_code(self):
return self.FORWARDS_TEMPLATE % {
"model_name": self.model._meta.object_name,
"field_name": self.field.name,
"table_name": self.field.m2m_db_table(),
"left_field": self.field.m2m_column_name()[:-3], # Remove the _id part
"left_column": self.field.m2m_column_name(),
"left_model_key": model_key(self.model),
"right_field": self.field.m2m_reverse_name()[:-3], # Remove the _id part
"right_column": self.field.m2m_reverse_name(),
"right_model_key": model_key(self.field.rel.to),
}
def backwards_code(self):
return self.BACKWARDS_TEMPLATE % {
"model_name": self.model._meta.object_name,
"field_name": self.field.name,
"table_name": self.field.m2m_db_table(),
}
class DeleteM2M(AddM2M):
"""
Adds a unique constraint to a model. Takes a Model class and the field names.
"""
def console_line(self):
"Returns the string to print on the console, e.g. ' + Added field foo'"
return " - Deleted M2M table for %s on %s.%s" % (
self.field.name,
self.model._meta.app_label,
self.model._meta.object_name,
)
def forwards_code(self):
return AddM2M.backwards_code(self)
def backwards_code(self):
return AddM2M.forwards_code(self)
| bsd-3-clause |
bob-the-hamster/commandergenius | project/jni/python/src/Lib/plat-mac/Carbon/QDOffscreen.py | 82 | 1266 | # Generated from 'QDOffscreen.h'
def FOUR_CHAR_CODE(x): return x
pixPurgeBit = 0
noNewDeviceBit = 1
useTempMemBit = 2
keepLocalBit = 3
useDistantHdwrMemBit = 4
useLocalHdwrMemBit = 5
pixelsPurgeableBit = 6
pixelsLockedBit = 7
mapPixBit = 16
newDepthBit = 17
alignPixBit = 18
newRowBytesBit = 19
reallocPixBit = 20
clipPixBit = 28
stretchPixBit = 29
ditherPixBit = 30
gwFlagErrBit = 31
pixPurge = 1L << pixPurgeBit
noNewDevice = 1L << noNewDeviceBit
useTempMem = 1L << useTempMemBit
keepLocal = 1L << keepLocalBit
useDistantHdwrMem = 1L << useDistantHdwrMemBit
useLocalHdwrMem = 1L << useLocalHdwrMemBit
pixelsPurgeable = 1L << pixelsPurgeableBit
pixelsLocked = 1L << pixelsLockedBit
kAllocDirectDrawSurface = 1L << 14
mapPix = 1L << mapPixBit
newDepth = 1L << newDepthBit
alignPix = 1L << alignPixBit
newRowBytes = 1L << newRowBytesBit
reallocPix = 1L << reallocPixBit
clipPix = 1L << clipPixBit
stretchPix = 1L << stretchPixBit
ditherPix = 1L << ditherPixBit
gwFlagErr = 1L << gwFlagErrBit
deviceIsIndirect = (1L << 0)
deviceNeedsLock = (1L << 1)
deviceIsStatic = (1L << 2)
deviceIsExternalBuffer = (1L << 3)
deviceIsDDSurface = (1L << 4)
deviceIsDCISurface = (1L << 5)
deviceIsGDISurface = (1L << 6)
deviceIsAScreen = (1L << 7)
deviceIsOverlaySurface = (1L << 8)
| lgpl-2.1 |
elventear/ansible | lib/ansible/modules/source_control/gitlab_group.py | 15 | 7499 | #!/usr/bin/python
# (c) 2015, Werner Dijkerman (ikben@werner-dijkerman.nl)
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: gitlab_group
short_description: Creates/updates/deletes Gitlab Groups
description:
- When the group does not exists in Gitlab, it will be created.
- When the group does exists and state=absent, the group will be deleted.
version_added: "2.1"
author: "Werner Dijkerman (@dj-wasabi)"
requirements:
- pyapi-gitlab python module
options:
server_url:
description:
- Url of Gitlab server, with protocol (http or https).
required: true
validate_certs:
description:
- When using https if SSL certificate needs to be verified.
required: false
default: true
aliases:
- verify_ssl
login_user:
description:
- Gitlab user name.
required: false
default: null
login_password:
description:
- Gitlab password for login_user
required: false
default: null
login_token:
description:
- Gitlab token for logging in.
required: false
default: null
name:
description:
- Name of the group you want to create.
required: true
path:
description:
- The path of the group you want to create, this will be server_url/group_path
- If not supplied, the group_name will be used.
required: false
default: null
state:
description:
- create or delete group.
- Possible values are present and absent.
required: false
default: "present"
choices: ["present", "absent"]
'''
EXAMPLES = '''
- name: Delete Gitlab Group
gitlab_group:
server_url: http://gitlab.example.com
validate_certs: False
login_token: WnUzDsxjy8230-Dy_k
name: my_first_group
state: absent
delegate_to: localhost
- name: Create Gitlab Group
gitlab_group:
server_url: https://gitlab.example.com
validate_certs: True
login_user: dj-wasabi
login_password: MySecretPassword
name: my_first_group
path: my_first_group
state: present
delegate_to: localhost
'''
RETURN = '''# '''
try:
import gitlab
HAS_GITLAB_PACKAGE = True
except:
HAS_GITLAB_PACKAGE = False
from ansible.module_utils.basic import *
from ansible.module_utils.pycompat24 import get_exception
class GitLabGroup(object):
def __init__(self, module, git):
self._module = module
self._gitlab = git
def createGroup(self, group_name, group_path):
if self._module.check_mode:
self._module.exit_json(changed=True)
return self._gitlab.creategroup(group_name, group_path)
def deleteGroup(self, group_name):
is_group_empty = True
group_id = self.idGroup(group_name)
for project in self._gitlab.getall(self._gitlab.getprojects):
owner = project['namespace']['name']
if owner == group_name:
is_group_empty = False
if is_group_empty:
if self._module.check_mode:
self._module.exit_json(changed=True)
return self._gitlab.deletegroup(group_id)
else:
self._module.fail_json(msg="There are still projects in this group. These needs to be moved or deleted before this group can be removed.")
def existsGroup(self, group_name):
for group in self._gitlab.getall(self._gitlab.getgroups):
if group['name'] == group_name:
return True
return False
def idGroup(self, group_name):
for group in self._gitlab.getall(self._gitlab.getgroups):
if group['name'] == group_name:
return group['id']
def main():
module = AnsibleModule(
argument_spec=dict(
server_url=dict(required=True),
validate_certs=dict(required=False, default=True, type='bool', aliases=['verify_ssl']),
login_user=dict(required=False, no_log=True),
login_password=dict(required=False, no_log=True),
login_token=dict(required=False, no_log=True),
name=dict(required=True),
path=dict(required=False),
state=dict(default="present", choices=["present", "absent"]),
),
supports_check_mode=True
)
if not HAS_GITLAB_PACKAGE:
module.fail_json(msg="Missing requried gitlab module (check docs or install with: pip install pyapi-gitlab")
server_url = module.params['server_url']
verify_ssl = module.params['validate_certs']
login_user = module.params['login_user']
login_password = module.params['login_password']
login_token = module.params['login_token']
group_name = module.params['name']
group_path = module.params['path']
state = module.params['state']
# We need both login_user and login_password or login_token, otherwise we fail.
if login_user is not None and login_password is not None:
use_credentials = True
elif login_token is not None:
use_credentials = False
else:
module.fail_json(msg="No login credentials are given. Use login_user with login_password, or login_token")
# Set group_path to group_name if it is empty.
if group_path is None:
group_path = group_name.replace(" ", "_")
# Lets make an connection to the Gitlab server_url, with either login_user and login_password
# or with login_token
try:
if use_credentials:
git = gitlab.Gitlab(host=server_url, verify_ssl=verify_ssl)
git.login(user=login_user, password=login_password)
else:
git = gitlab.Gitlab(server_url, token=login_token, verify_ssl=verify_ssl)
except Exception:
e = get_exception()
module.fail_json(msg="Failed to connect to Gitlab server: %s " % e)
# Validate if group exists and take action based on "state"
group = GitLabGroup(module, git)
group_name = group_name.lower()
group_exists = group.existsGroup(group_name)
if group_exists and state == "absent":
group.deleteGroup(group_name)
module.exit_json(changed=True, result="Successfully deleted group %s" % group_name)
else:
if state == "absent":
module.exit_json(changed=False, result="Group deleted or does not exists")
else:
if group_exists:
module.exit_json(changed=False)
else:
if group.createGroup(group_name, group_path):
module.exit_json(changed=True, result="Successfully created or updated the group %s" % group_name)
if __name__ == '__main__':
main()
| gpl-3.0 |
mcltn/ansible-modules-extras | univention/udm_dns_record.py | 28 | 5619 | #!/usr/bin/python
# -*- coding: UTF-8 -*-
# Copyright (c) 2016, Adfinis SyGroup AG
# Tobias Rueetschi <tobias.ruetschi@adfinis-sygroup.ch>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.univention_umc import (
umc_module_for_add,
umc_module_for_edit,
ldap_search,
base_dn,
config,
uldap,
)
from univention.admin.handlers.dns import (
forward_zone,
reverse_zone,
)
DOCUMENTATION = '''
---
module: udm_dns_record
version_added: "2.2"
author: "Tobias Rueetschi (@2-B)"
short_description: Manage dns entries on a univention corporate server
description:
- "This module allows to manage dns records on a univention corporate server (UCS).
It uses the python API of the UCS to create a new object or edit it."
requirements:
- Python >= 2.6
options:
state:
required: false
default: "present"
choices: [ present, absent ]
description:
- Whether the dns record is present or not.
name:
required: true
description:
- "Name of the record, this is also the DNS record. E.g. www for
www.example.com."
zone:
required: true
description:
- Corresponding DNS zone for this record, e.g. example.com.
type:
required: true
choices: [ host_record, alias, ptr_record, srv_record, txt_record ]
description:
- "Define the record type. C(host_record) is a A or AAAA record,
C(alias) is a CNAME, C(ptr_record) is a PTR record, C(srv_record)
is a SRV record and C(txt_record) is a TXT record."
data:
required: false
default: []
description:
- "Additional data for this record, e.g. ['a': '192.0.2.1'].
Required if C(state=present)."
'''
EXAMPLES = '''
# Create a DNS record on a UCS
- udm_dns_zone: name=www
zone=example.com
type=host_record
data=['a': '192.0.2.1']
'''
RETURN = '''# '''
def main():
module = AnsibleModule(
argument_spec = dict(
type = dict(required=True,
type='str'),
zone = dict(required=True,
type='str'),
name = dict(required=True,
type='str'),
data = dict(default=[],
type='dict'),
state = dict(default='present',
choices=['present', 'absent'],
type='str')
),
supports_check_mode=True,
required_if = ([
('state', 'present', ['data'])
])
)
type = module.params['type']
zone = module.params['zone']
name = module.params['name']
data = module.params['data']
state = module.params['state']
changed = False
obj = list(ldap_search(
'(&(objectClass=dNSZone)(zoneName={})(relativeDomainName={}))'.format(zone, name),
attr=['dNSZone']
))
exists = bool(len(obj))
container = 'zoneName={},cn=dns,{}'.format(zone, base_dn())
dn = 'relativeDomainName={},{}'.format(name, container)
if state == 'present':
try:
if not exists:
so = forward_zone.lookup(
config(),
uldap(),
'(zone={})'.format(zone),
scope='domain',
) or reverse_zone.lookup(
config(),
uldap(),
'(zone={})'.format(zone),
scope='domain',
)
obj = umc_module_for_add('dns/{}'.format(type), container, superordinate=so[0])
else:
obj = umc_module_for_edit('dns/{}'.format(type), dn)
obj['name'] = name
for k, v in data.items():
obj[k] = v
diff = obj.diff()
changed = obj.diff() != []
if not module.check_mode:
if not exists:
obj.create()
else:
obj.modify()
except BaseException as e:
module.fail_json(
msg='Creating/editing dns entry {} in {} failed: {}'.format(name, container, e)
)
if state == 'absent' and exists:
try:
obj = umc_module_for_edit('dns/{}'.format(type), dn)
if not module.check_mode:
obj.remove()
changed = True
except BaseException as e:
module.fail_json(
msg='Removing dns entry {} in {} failed: {}'.format(name, container, e)
)
module.exit_json(
changed=changed,
name=name,
diff=diff,
container=container
)
if __name__ == '__main__':
main()
| gpl-3.0 |
jamesblunt/edx-platform | common/djangoapps/track/migrations/0001_initial.py | 189 | 2527 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'TrackingLog'
db.create_table('track_trackinglog', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('dtcreated', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('username', self.gf('django.db.models.fields.CharField')(max_length=32, blank=True)),
('ip', self.gf('django.db.models.fields.CharField')(max_length=32, blank=True)),
('event_source', self.gf('django.db.models.fields.CharField')(max_length=32)),
('event_type', self.gf('django.db.models.fields.CharField')(max_length=32, blank=True)),
('event', self.gf('django.db.models.fields.TextField')(blank=True)),
('agent', self.gf('django.db.models.fields.CharField')(max_length=256, blank=True)),
('page', self.gf('django.db.models.fields.CharField')(max_length=32, null=True, blank=True)),
('time', self.gf('django.db.models.fields.DateTimeField')()),
))
db.send_create_signal('track', ['TrackingLog'])
def backwards(self, orm):
# Deleting model 'TrackingLog'
db.delete_table('track_trackinglog')
models = {
'track.trackinglog': {
'Meta': {'object_name': 'TrackingLog'},
'agent': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'dtcreated': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'event_source': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'event_type': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'}),
'page': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'time': ('django.db.models.fields.DateTimeField', [], {}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '32', 'blank': 'True'})
}
}
complete_apps = ['track']
| agpl-3.0 |
alexandrucoman/vbox-nova-driver | nova/tests/unit/api/openstack/compute/contrib/test_server_start_stop.py | 33 | 6816 | # Copyright (c) 2012 Midokura Japan K.K.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from mox3 import mox
import webob
from nova.api.openstack.compute.contrib import server_start_stop \
as server_v2
from nova.api.openstack.compute import plugins
from nova.api.openstack.compute.plugins.v3 import servers \
as server_v21
from nova.compute import api as compute_api
from nova import db
from nova import exception
from nova.openstack.common import policy as common_policy
from nova import policy
from nova import test
from nova.tests.unit.api.openstack import fakes
def fake_instance_get(context, instance_id,
columns_to_join=None, use_slave=False):
result = fakes.stub_instance(id=1, uuid=instance_id)
result['created_at'] = None
result['deleted_at'] = None
result['updated_at'] = None
result['deleted'] = 0
result['info_cache'] = {'network_info': '[]',
'instance_uuid': result['uuid']}
return result
def fake_start_stop_not_ready(self, context, instance):
raise exception.InstanceNotReady(instance_id=instance["uuid"])
def fake_start_stop_locked_server(self, context, instance):
raise exception.InstanceIsLocked(instance_uuid=instance['uuid'])
def fake_start_stop_invalid_state(self, context, instance):
raise exception.InstanceIsLocked(instance_uuid=instance['uuid'])
class ServerStartStopTestV21(test.TestCase):
start_policy = "os_compute_api:servers:start"
stop_policy = "os_compute_api:servers:stop"
def setUp(self):
super(ServerStartStopTestV21, self).setUp()
self._setup_controller()
self.req = fakes.HTTPRequest.blank('')
def _setup_controller(self):
ext_info = plugins.LoadedExtensionInfo()
self.controller = server_v21.ServersController(
extension_info=ext_info)
def test_start(self):
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get)
self.mox.StubOutWithMock(compute_api.API, 'start')
compute_api.API.start(mox.IgnoreArg(), mox.IgnoreArg())
self.mox.ReplayAll()
body = dict(start="")
self.controller._start_server(self.req, 'test_inst', body)
def test_start_policy_failed(self):
rules = {
self.start_policy:
common_policy.parse_rule("project_id:non_fake")
}
policy.set_rules(rules)
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get)
body = dict(start="")
exc = self.assertRaises(exception.PolicyNotAuthorized,
self.controller._start_server,
self.req, 'test_inst', body)
self.assertIn(self.start_policy, exc.format_message())
def test_start_not_ready(self):
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get)
self.stubs.Set(compute_api.API, 'start', fake_start_stop_not_ready)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._start_server, self.req, 'test_inst', body)
def test_start_locked_server(self):
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get)
self.stubs.Set(compute_api.API, 'start', fake_start_stop_locked_server)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._start_server, self.req, 'test_inst', body)
def test_start_invalid_state(self):
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get)
self.stubs.Set(compute_api.API, 'start', fake_start_stop_invalid_state)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._start_server, self.req, 'test_inst', body)
def test_stop(self):
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get)
self.mox.StubOutWithMock(compute_api.API, 'stop')
compute_api.API.stop(mox.IgnoreArg(), mox.IgnoreArg())
self.mox.ReplayAll()
body = dict(stop="")
self.controller._stop_server(self.req, 'test_inst', body)
def test_stop_policy_failed(self):
rules = {
self.stop_policy:
common_policy.parse_rule("project_id:non_fake")
}
policy.set_rules(rules)
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get)
body = dict(stop="")
exc = self.assertRaises(exception.PolicyNotAuthorized,
self.controller._stop_server,
self.req, 'test_inst', body)
self.assertIn(self.stop_policy, exc.format_message())
def test_stop_not_ready(self):
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get)
self.stubs.Set(compute_api.API, 'stop', fake_start_stop_not_ready)
body = dict(stop="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._stop_server, self.req, 'test_inst', body)
def test_stop_locked_server(self):
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get)
self.stubs.Set(compute_api.API, 'stop', fake_start_stop_locked_server)
body = dict(stop="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._stop_server, self.req, 'test_inst', body)
def test_stop_invalid_state(self):
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get)
self.stubs.Set(compute_api.API, 'stop', fake_start_stop_invalid_state)
body = dict(start="")
self.assertRaises(webob.exc.HTTPConflict,
self.controller._stop_server, self.req, 'test_inst', body)
def test_start_with_bogus_id(self):
body = dict(start="")
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._start_server, self.req, 'test_inst', body)
def test_stop_with_bogus_id(self):
body = dict(stop="")
self.assertRaises(webob.exc.HTTPNotFound,
self.controller._stop_server, self.req, 'test_inst', body)
class ServerStartStopTestV2(ServerStartStopTestV21):
start_policy = "compute:start"
stop_policy = "compute:stop"
def _setup_controller(self):
self.controller = server_v2.ServerStartStopActionController()
| apache-2.0 |
asm0dey/Flexget | flexget/plugins/metainfo/nzb_size.py | 11 | 2184 | from __future__ import unicode_literals, division, absolute_import
import logging
from flexget import plugin
from flexget.event import event
log = logging.getLogger('nzb_size')
# a bit hacky, add nzb as a known mimetype
import mimetypes
mimetypes.add_type('application/x-nzb', '.nzb')
class NzbSize(object):
"""
Provides entry size information when dealing with nzb files
"""
@plugin.priority(200)
def on_task_modify(self, task, config):
"""
The downloaded file is accessible in modify phase
"""
try:
from pynzb import nzb_parser
except ImportError:
# TODO: remove builtin status so this won't get repeated on every task execution
# TODO: this will get loaded even without any need for nzb
raise plugin.DependencyError(issued_by='nzb_size', missing='lib pynzb')
for entry in task.accepted:
if entry.get('mime-type', None) in [u'text/nzb', u'application/x-nzb'] or \
entry.get('filename', '').endswith('.nzb'):
if 'file' not in entry:
log.warning('`%s` does not have a `file` that could be used to get size information' %
entry['title'])
continue
filename = entry['file']
log.debug('reading %s' % filename)
xmldata = file(filename).read()
try:
nzbfiles = nzb_parser.parse(xmldata)
except:
log.debug('%s is not a valid nzb' % entry['title'])
continue
size = 0
for nzbfile in nzbfiles:
for segment in nzbfile.segments:
size += segment.bytes
size_mb = size / 1024 / 1024
log.debug('%s content size: %s MB' % (entry['title'], size_mb))
entry['content_size'] = size_mb
else:
log.trace('%s does not seem to be nzb' % entry['title'])
@event('plugin.register')
def register_plugin():
plugin.register(NzbSize, 'nzb_size', api_ver=2, builtin=True)
| mit |
JI007/flasky | tests/test_selenium.py | 22 | 2879 | import re
import threading
import unittest
from selenium import webdriver
from app import create_app, db
from app.models import Role, User, Post
class SeleniumTestCase(unittest.TestCase):
client = None
@classmethod
def setUpClass(cls):
# start Firefox
try:
cls.client = webdriver.Firefox()
except:
pass
# skip these tests if the browser could not be started
if cls.client:
# create the application
cls.app = create_app('testing')
cls.app_context = cls.app.app_context()
cls.app_context.push()
# suppress logging to keep unittest output clean
import logging
logger = logging.getLogger('werkzeug')
logger.setLevel("ERROR")
# create the database and populate with some fake data
db.create_all()
Role.insert_roles()
User.generate_fake(10)
Post.generate_fake(10)
# add an administrator user
admin_role = Role.query.filter_by(permissions=0xff).first()
admin = User(email='john@example.com',
username='john', password='cat',
role=admin_role, confirmed=True)
db.session.add(admin)
db.session.commit()
# start the Flask server in a thread
threading.Thread(target=cls.app.run).start()
@classmethod
def tearDownClass(cls):
if cls.client:
# stop the flask server and the browser
cls.client.get('http://localhost:5000/shutdown')
cls.client.close()
# destroy database
db.drop_all()
db.session.remove()
# remove application context
cls.app_context.pop()
def setUp(self):
if not self.client:
self.skipTest('Web browser not available')
def tearDown(self):
pass
def test_admin_home_page(self):
# navigate to home page
self.client.get('http://localhost:5000/')
self.assertTrue(re.search('Hello,\s+Stranger!',
self.client.page_source))
# navigate to login page
self.client.find_element_by_link_text('Log In').click()
self.assertTrue('<h1>Login</h1>' in self.client.page_source)
# login
self.client.find_element_by_name('email').\
send_keys('john@example.com')
self.client.find_element_by_name('password').send_keys('cat')
self.client.find_element_by_name('submit').click()
self.assertTrue(re.search('Hello,\s+john!', self.client.page_source))
# navigate to the user's profile page
self.client.find_element_by_link_text('Profile').click()
self.assertTrue('<h1>john</h1>' in self.client.page_source)
| mit |
azatoth/scons | test/Fortran/FORTRANSUFFIXES.py | 4 | 3948 | #!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Test the ability to scan additional filesuffixes added to $FORTRANSUFFIXES.
"""
import TestSCons
_python_ = TestSCons._python_
test = TestSCons.TestSCons()
test.write('myfc.py', r"""
import sys
def do_file(outf, inf):
for line in open(inf, 'rb').readlines():
if line[:15] == " INCLUDE '":
do_file(outf, line[15:-2])
else:
outf.write(line)
outf = open(sys.argv[1], 'wb')
for f in sys.argv[2:]:
do_file(outf, f)
sys.exit(0)
""")
test.write('SConstruct', """
env = Environment(FORTRANPATH = ['.'],
FORTRAN = r'%(_python_)s myfc.py',
FORTRANCOM = '$FORTRAN $TARGET $SOURCES',
OBJSUFFIX = '.o')
env.Append(FORTRANSUFFIXES = ['.x'])
env.Object(target = 'test1', source = 'test1.f')
env.InstallAs('test1_f', 'test1.f')
env.InstallAs('test1_h', 'test1.h')
env.InstallAs('test1_x', 'test1.x')
""" % locals())
test.write('test1.f', """\
test1.f 1
INCLUDE 'test1.h'
INCLUDE 'test1.x'
""")
test.write('test1.h', """\
test1.h 1
INCLUDE 'foo.h'
""")
test.write('test1.x', """\
test1.x 1
INCLUDE 'foo.h'
""")
test.write('foo.h', """\
foo.h 1
""")
expect = test.wrap_stdout("""\
%(_python_)s myfc.py test1.o test1.f
Install file: "test1.f" as "test1_f"
Install file: "test1.h" as "test1_h"
Install file: "test1.x" as "test1_x"
""" % locals())
test.run(arguments='.', stdout=expect)
test.must_match('test1.o', """\
test1.f 1
test1.h 1
foo.h 1
test1.x 1
foo.h 1
""")
test.up_to_date(arguments='.')
test.write('foo.h', """\
foo.h 2
""")
expect = test.wrap_stdout("""\
%(_python_)s myfc.py test1.o test1.f
""" % locals())
test.run(arguments='.', stdout=expect)
test.must_match('test1.o', """\
test1.f 1
test1.h 1
foo.h 2
test1.x 1
foo.h 2
""")
test.up_to_date(arguments='.')
test.write('test1.x', """\
test1.x 2
INCLUDE 'foo.h'
""")
expect = test.wrap_stdout("""\
%(_python_)s myfc.py test1.o test1.f
Install file: "test1.x" as "test1_x"
""" % locals())
test.run(arguments='.', stdout=expect)
test.must_match('test1.o', """\
test1.f 1
test1.h 1
foo.h 2
test1.x 2
foo.h 2
""")
test.up_to_date(arguments='.')
test.write('test1.h', """\
test1.h 2
INCLUDE 'foo.h'
""")
expect = test.wrap_stdout("""\
%(_python_)s myfc.py test1.o test1.f
Install file: "test1.h" as "test1_h"
""" % locals())
test.run(arguments='.', stdout=expect)
test.must_match('test1.o', """\
test1.f 1
test1.h 2
foo.h 2
test1.x 2
foo.h 2
""")
test.up_to_date(arguments='.')
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| mit |
benjamindeleener/odoo | addons/anonymization/__openerp__.py | 27 | 1106 | # -*- encoding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Database Anonymization',
'version': '1.0',
'category': 'Tools',
'description': """
This module allows you to anonymize a database.
===============================================
This module allows you to keep your data confidential for a given database.
This process is useful, if you want to use the migration process and protect
your own or your customer’s confidential data. The principle is that you run
an anonymization tool which will hide your confidential data(they are replaced
by ‘XXX’ characters). Then you can send the anonymized database to the migration
team. Once you get back your migrated database, you restore it and reverse the
anonymization process to recover your previous data.
""",
'depends': ['base'],
'demo': ['anonymization_demo.xml'],
'data': [
'ir.model.fields.anonymization.csv',
'security/ir.model.access.csv',
'anonymization_view.xml',
],
'installable': True,
'auto_install': False,
}
| gpl-3.0 |
vFense/vFenseAgent-nix | agent/deps/rpm6-32/Python-2.7.5/lib/python2.7/lib2to3/pgen2/literals.py | 399 | 1614 | # Copyright 2004-2005 Elemental Security, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Safely evaluate Python string literals without using eval()."""
import re
simple_escapes = {"a": "\a",
"b": "\b",
"f": "\f",
"n": "\n",
"r": "\r",
"t": "\t",
"v": "\v",
"'": "'",
'"': '"',
"\\": "\\"}
def escape(m):
all, tail = m.group(0, 1)
assert all.startswith("\\")
esc = simple_escapes.get(tail)
if esc is not None:
return esc
if tail.startswith("x"):
hexes = tail[1:]
if len(hexes) < 2:
raise ValueError("invalid hex string escape ('\\%s')" % tail)
try:
i = int(hexes, 16)
except ValueError:
raise ValueError("invalid hex string escape ('\\%s')" % tail)
else:
try:
i = int(tail, 8)
except ValueError:
raise ValueError("invalid octal string escape ('\\%s')" % tail)
return chr(i)
def evalString(s):
assert s.startswith("'") or s.startswith('"'), repr(s[:1])
q = s[0]
if s[:3] == q*3:
q = q*3
assert s.endswith(q), repr(s[-len(q):])
assert len(s) >= 2*len(q)
s = s[len(q):-len(q)]
return re.sub(r"\\(\'|\"|\\|[abfnrtv]|x.{0,2}|[0-7]{1,3})", escape, s)
def test():
for i in range(256):
c = chr(i)
s = repr(c)
e = evalString(s)
if e != c:
print i, c, s, e
if __name__ == "__main__":
test()
| lgpl-3.0 |
nightjean/Deep-Learning | tensorflow/contrib/session_bundle/session_bundle_test.py | 133 | 7571 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for session_bundle.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os.path
import shutil
import numpy as np
from tensorflow.contrib.session_bundle import constants
from tensorflow.contrib.session_bundle import manifest_pb2
from tensorflow.contrib.session_bundle import session_bundle
from tensorflow.core.example.example_pb2 import Example
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import graph_util
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variables
import tensorflow.python.ops.parsing_ops # pylint: disable=unused-import
from tensorflow.python.platform import test
from tensorflow.python.training import saver
from tensorflow.python.util import compat
SAVED_MODEL_PATH = (
"python/saved_model/example/saved_model_half_plus_two/00000123")
SESSION_BUNDLE_PATH = "contrib/session_bundle/testdata/half_plus_two/00000123"
def _make_serialized_example(x):
example = Example()
example.features.feature["x"].float_list.value.append(x)
return example.SerializeToString()
class SessionBundleLoadTest(test.TestCase):
def _checkRegressionSignature(self, signatures, sess):
default_signature = signatures.default_signature
input_name = default_signature.regression_signature.input.tensor_name
output_name = default_signature.regression_signature.output.tensor_name
tf_example = [_make_serialized_example(x) for x in [0, 1, 2, 3]]
y = sess.run([output_name], {input_name: tf_example})
# The operation is y = 0.5 * x + 2
self.assertEqual(y[0][0], 2)
self.assertEqual(y[0][1], 2.5)
self.assertEqual(y[0][2], 3)
self.assertEqual(y[0][3], 3.5)
def _checkNamedSignatures(self, signatures, sess):
named_signatures = signatures.named_signatures
input_name = (named_signatures["inputs"].generic_signature.map["x"]
.tensor_name)
output_name = (named_signatures["outputs"].generic_signature.map["y"]
.tensor_name)
y = sess.run([output_name], {input_name: np.array([[0], [1], [2], [3]])})
# The operation is y = 0.5 * x + 2
self.assertEqual(y[0][0], 2)
self.assertEqual(y[0][1], 2.5)
self.assertEqual(y[0][2], 3)
self.assertEqual(y[0][3], 3.5)
def testMaybeSessionBundleDir(self):
base_path = test.test_src_dir_path(SESSION_BUNDLE_PATH)
self.assertTrue(session_bundle.maybe_session_bundle_dir(base_path))
base_path = test.test_src_dir_path(SAVED_MODEL_PATH)
self.assertFalse(session_bundle.maybe_session_bundle_dir(base_path))
base_path = "complete_garbage"
self.assertFalse(session_bundle.maybe_session_bundle_dir(base_path))
def testBasic(self):
base_path = test.test_src_dir_path(SESSION_BUNDLE_PATH)
ops.reset_default_graph()
sess, meta_graph_def = session_bundle.load_session_bundle_from_path(
base_path,
target="",
config=config_pb2.ConfigProto(device_count={"CPU": 2}))
self.assertTrue(sess)
asset_path = os.path.join(base_path, constants.ASSETS_DIRECTORY)
with sess.as_default():
path1, path2 = sess.run(["filename1:0", "filename2:0"])
self.assertEqual(
compat.as_bytes(os.path.join(asset_path, "hello1.txt")), path1)
self.assertEqual(
compat.as_bytes(os.path.join(asset_path, "hello2.txt")), path2)
collection_def = meta_graph_def.collection_def
signatures_any = collection_def[constants.SIGNATURES_KEY].any_list.value
self.assertEquals(len(signatures_any), 1)
signatures = manifest_pb2.Signatures()
signatures_any[0].Unpack(signatures)
self._checkRegressionSignature(signatures, sess)
self._checkNamedSignatures(signatures, sess)
def testBadPath(self):
base_path = test.test_src_dir_path("/no/such/a/dir")
ops.reset_default_graph()
with self.assertRaises(RuntimeError) as cm:
_, _ = session_bundle.load_session_bundle_from_path(
base_path,
target="local",
config=config_pb2.ConfigProto(device_count={"CPU": 2}))
self.assertTrue("Expected meta graph file missing" in str(cm.exception))
def testVarCheckpointV2(self):
base_path = test.test_src_dir_path(
"contrib/session_bundle/testdata/half_plus_two_ckpt_v2/00000123")
ops.reset_default_graph()
sess, meta_graph_def = session_bundle.load_session_bundle_from_path(
base_path,
target="",
config=config_pb2.ConfigProto(device_count={"CPU": 2}))
self.assertTrue(sess)
asset_path = os.path.join(base_path, constants.ASSETS_DIRECTORY)
with sess.as_default():
path1, path2 = sess.run(["filename1:0", "filename2:0"])
self.assertEqual(
compat.as_bytes(os.path.join(asset_path, "hello1.txt")), path1)
self.assertEqual(
compat.as_bytes(os.path.join(asset_path, "hello2.txt")), path2)
collection_def = meta_graph_def.collection_def
signatures_any = collection_def[constants.SIGNATURES_KEY].any_list.value
self.assertEquals(len(signatures_any), 1)
signatures = manifest_pb2.Signatures()
signatures_any[0].Unpack(signatures)
self._checkRegressionSignature(signatures, sess)
self._checkNamedSignatures(signatures, sess)
class SessionBundleLoadNoVarsTest(test.TestCase):
"""Test the case where there are no variables in the graph."""
def setUp(self):
self.base_path = os.path.join(test.get_temp_dir(), "no_vars")
if not os.path.exists(self.base_path):
os.mkdir(self.base_path)
# Create a simple graph with a variable, then convert variables to
# constants and export the graph.
with ops.Graph().as_default() as g:
x = array_ops.placeholder(dtypes.float32, name="x")
w = variables.Variable(3.0)
y = math_ops.subtract(w * x, 7.0, name="y") # pylint: disable=unused-variable
ops.add_to_collection("meta", "this is meta")
with self.test_session(graph=g) as session:
variables.global_variables_initializer().run()
new_graph_def = graph_util.convert_variables_to_constants(
session, g.as_graph_def(), ["y"])
filename = os.path.join(self.base_path, constants.META_GRAPH_DEF_FILENAME)
saver.export_meta_graph(
filename, graph_def=new_graph_def, collection_list=["meta"])
def tearDown(self):
shutil.rmtree(self.base_path)
def testGraphWithoutVarsLoadsCorrectly(self):
session, _ = session_bundle.load_session_bundle_from_path(self.base_path)
got = session.run(["y:0"], {"x:0": 5.0})[0]
self.assertEquals(got, 5.0 * 3.0 - 7.0)
self.assertEquals(ops.get_collection("meta"), [b"this is meta"])
if __name__ == "__main__":
test.main()
| apache-2.0 |
idryomov/btrfs-unstable | Documentation/target/tcm_mod_builder.py | 2358 | 40707 | #!/usr/bin/python
# The TCM v4 multi-protocol fabric module generation script for drivers/target/$NEW_MOD
#
# Copyright (c) 2010 Rising Tide Systems
# Copyright (c) 2010 Linux-iSCSI.org
#
# Author: nab@kernel.org
#
import os, sys
import subprocess as sub
import string
import re
import optparse
tcm_dir = ""
fabric_ops = []
fabric_mod_dir = ""
fabric_mod_port = ""
fabric_mod_init_port = ""
def tcm_mod_err(msg):
print msg
sys.exit(1)
def tcm_mod_create_module_subdir(fabric_mod_dir_var):
if os.path.isdir(fabric_mod_dir_var) == True:
return 1
print "Creating fabric_mod_dir: " + fabric_mod_dir_var
ret = os.mkdir(fabric_mod_dir_var)
if ret:
tcm_mod_err("Unable to mkdir " + fabric_mod_dir_var)
return
def tcm_mod_build_FC_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for FC Initiator Nport */\n"
buf += " u64 nport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Initiator Nport */\n"
buf += " char nport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* FC lport target portal group tag for TCM */\n"
buf += " u16 lport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_lport */\n"
buf += " struct " + fabric_mod_name + "_lport *lport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_lport {\n"
buf += " /* SCSI protocol the lport is providing */\n"
buf += " u8 lport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for FC Target Lport */\n"
buf += " u64 lport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Target Lport */\n"
buf += " char lport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_lport() */\n"
buf += " struct se_wwn lport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "lport"
fabric_mod_init_port = "nport"
return
def tcm_mod_build_SAS_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for SAS Initiator port */\n"
buf += " u64 iport_wwpn;\n"
buf += " /* ASCII formatted WWPN for Sas Initiator port */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* SAS port target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for SAS Target port */\n"
buf += " u64 tport_wwpn;\n"
buf += " /* ASCII formatted WWPN for SAS Target port */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_iSCSI_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* ASCII formatted InitiatorName */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* iSCSI target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* ASCII formatted TargetName for IQN */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_base_includes(proto_ident, fabric_mod_dir_val, fabric_mod_name):
if proto_ident == "FC":
tcm_mod_build_FC_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "SAS":
tcm_mod_build_SAS_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "iSCSI":
tcm_mod_build_iSCSI_include(fabric_mod_dir_val, fabric_mod_name)
else:
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
return
def tcm_mod_build_configfs(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_configfs.c"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#include <linux/module.h>\n"
buf += "#include <linux/moduleparam.h>\n"
buf += "#include <linux/version.h>\n"
buf += "#include <generated/utsrelease.h>\n"
buf += "#include <linux/utsname.h>\n"
buf += "#include <linux/init.h>\n"
buf += "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/configfs.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_fabric_configfs.h>\n"
buf += "#include <target/target_core_configfs.h>\n"
buf += "#include <target/configfs_macros.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "/* Local pointer to allocated TCM configfs fabric module */\n"
buf += "struct target_fabric_configfs *" + fabric_mod_name + "_fabric_configfs;\n\n"
buf += "static struct se_node_acl *" + fabric_mod_name + "_make_nodeacl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct se_node_acl *se_nacl, *se_nacl_new;\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n"
buf += " u32 nexus_depth;\n\n"
buf += " /* " + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n"
buf += " se_nacl_new = " + fabric_mod_name + "_alloc_fabric_acl(se_tpg);\n"
buf += " if (!se_nacl_new)\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += "//#warning FIXME: Hardcoded nexus depth in " + fabric_mod_name + "_make_nodeacl()\n"
buf += " nexus_depth = 1;\n"
buf += " /*\n"
buf += " * se_nacl_new may be released by core_tpg_add_initiator_node_acl()\n"
buf += " * when converting a NodeACL from demo mode -> explict\n"
buf += " */\n"
buf += " se_nacl = core_tpg_add_initiator_node_acl(se_tpg, se_nacl_new,\n"
buf += " name, nexus_depth);\n"
buf += " if (IS_ERR(se_nacl)) {\n"
buf += " " + fabric_mod_name + "_release_fabric_acl(se_tpg, se_nacl_new);\n"
buf += " return se_nacl;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Locate our struct " + fabric_mod_name + "_nacl and set the FC Nport WWPN\n"
buf += " */\n"
buf += " nacl = container_of(se_nacl, struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " nacl->" + fabric_mod_init_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&nacl->" + fabric_mod_init_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return se_nacl;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_nodeacl(struct se_node_acl *se_acl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_acl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " core_tpg_del_initiator_node_acl(se_acl->se_tpg, se_acl, 1);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
buf += "static struct se_portal_group *" + fabric_mod_name + "_make_tpg(\n"
buf += " struct se_wwn *wwn,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + "*" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg;\n"
buf += " unsigned long tpgt;\n"
buf += " int ret;\n\n"
buf += " if (strstr(name, \"tpgt_\") != name)\n"
buf += " return ERR_PTR(-EINVAL);\n"
buf += " if (strict_strtoul(name + 5, 10, &tpgt) || tpgt > UINT_MAX)\n"
buf += " return ERR_PTR(-EINVAL);\n\n"
buf += " tpg = kzalloc(sizeof(struct " + fabric_mod_name + "_tpg), GFP_KERNEL);\n"
buf += " if (!tpg) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_tpg\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
buf += " tpg->" + fabric_mod_port + " = " + fabric_mod_port + ";\n"
buf += " tpg->" + fabric_mod_port + "_tpgt = tpgt;\n\n"
buf += " ret = core_tpg_register(&" + fabric_mod_name + "_fabric_configfs->tf_ops, wwn,\n"
buf += " &tpg->se_tpg, (void *)tpg,\n"
buf += " TRANSPORT_TPG_TYPE_NORMAL);\n"
buf += " if (ret < 0) {\n"
buf += " kfree(tpg);\n"
buf += " return NULL;\n"
buf += " }\n"
buf += " return &tpg->se_tpg;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_tpg(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n\n"
buf += " core_tpg_deregister(se_tpg);\n"
buf += " kfree(tpg);\n"
buf += "}\n\n"
buf += "static struct se_wwn *" + fabric_mod_name + "_make_" + fabric_mod_port + "(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + ";\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n\n"
buf += " /* if (" + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n\n"
buf += " " + fabric_mod_port + " = kzalloc(sizeof(struct " + fabric_mod_name + "_" + fabric_mod_port + "), GFP_KERNEL);\n"
buf += " if (!" + fabric_mod_port + ") {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_" + fabric_mod_port + "\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " " + fabric_mod_port + "->" + fabric_mod_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&" + fabric_mod_port + "->" + fabric_mod_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_wwn;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_" + fabric_mod_port + "(struct se_wwn *wwn)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n"
buf += " kfree(" + fabric_mod_port + ");\n"
buf += "}\n\n"
buf += "static ssize_t " + fabric_mod_name + "_wwn_show_attr_version(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " char *page)\n"
buf += "{\n"
buf += " return sprintf(page, \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \"on \"UTS_RELEASE\"\\n\", " + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += "}\n\n"
buf += "TF_WWN_ATTR_RO(" + fabric_mod_name + ", version);\n\n"
buf += "static struct configfs_attribute *" + fabric_mod_name + "_wwn_attrs[] = {\n"
buf += " &" + fabric_mod_name + "_wwn_version.attr,\n"
buf += " NULL,\n"
buf += "};\n\n"
buf += "static struct target_core_fabric_ops " + fabric_mod_name + "_ops = {\n"
buf += " .get_fabric_name = " + fabric_mod_name + "_get_fabric_name,\n"
buf += " .get_fabric_proto_ident = " + fabric_mod_name + "_get_fabric_proto_ident,\n"
buf += " .tpg_get_wwn = " + fabric_mod_name + "_get_fabric_wwn,\n"
buf += " .tpg_get_tag = " + fabric_mod_name + "_get_tag,\n"
buf += " .tpg_get_default_depth = " + fabric_mod_name + "_get_default_depth,\n"
buf += " .tpg_get_pr_transport_id = " + fabric_mod_name + "_get_pr_transport_id,\n"
buf += " .tpg_get_pr_transport_id_len = " + fabric_mod_name + "_get_pr_transport_id_len,\n"
buf += " .tpg_parse_pr_out_transport_id = " + fabric_mod_name + "_parse_pr_out_transport_id,\n"
buf += " .tpg_check_demo_mode = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_check_demo_mode_cache = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_demo_mode_write_protect = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_prod_mode_write_protect = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_alloc_fabric_acl = " + fabric_mod_name + "_alloc_fabric_acl,\n"
buf += " .tpg_release_fabric_acl = " + fabric_mod_name + "_release_fabric_acl,\n"
buf += " .tpg_get_inst_index = " + fabric_mod_name + "_tpg_get_inst_index,\n"
buf += " .release_cmd = " + fabric_mod_name + "_release_cmd,\n"
buf += " .shutdown_session = " + fabric_mod_name + "_shutdown_session,\n"
buf += " .close_session = " + fabric_mod_name + "_close_session,\n"
buf += " .stop_session = " + fabric_mod_name + "_stop_session,\n"
buf += " .fall_back_to_erl0 = " + fabric_mod_name + "_reset_nexus,\n"
buf += " .sess_logged_in = " + fabric_mod_name + "_sess_logged_in,\n"
buf += " .sess_get_index = " + fabric_mod_name + "_sess_get_index,\n"
buf += " .sess_get_initiator_sid = NULL,\n"
buf += " .write_pending = " + fabric_mod_name + "_write_pending,\n"
buf += " .write_pending_status = " + fabric_mod_name + "_write_pending_status,\n"
buf += " .set_default_node_attributes = " + fabric_mod_name + "_set_default_node_attrs,\n"
buf += " .get_task_tag = " + fabric_mod_name + "_get_task_tag,\n"
buf += " .get_cmd_state = " + fabric_mod_name + "_get_cmd_state,\n"
buf += " .queue_data_in = " + fabric_mod_name + "_queue_data_in,\n"
buf += " .queue_status = " + fabric_mod_name + "_queue_status,\n"
buf += " .queue_tm_rsp = " + fabric_mod_name + "_queue_tm_rsp,\n"
buf += " .is_state_remove = " + fabric_mod_name + "_is_state_remove,\n"
buf += " /*\n"
buf += " * Setup function pointers for generic logic in target_core_fabric_configfs.c\n"
buf += " */\n"
buf += " .fabric_make_wwn = " + fabric_mod_name + "_make_" + fabric_mod_port + ",\n"
buf += " .fabric_drop_wwn = " + fabric_mod_name + "_drop_" + fabric_mod_port + ",\n"
buf += " .fabric_make_tpg = " + fabric_mod_name + "_make_tpg,\n"
buf += " .fabric_drop_tpg = " + fabric_mod_name + "_drop_tpg,\n"
buf += " .fabric_post_link = NULL,\n"
buf += " .fabric_pre_unlink = NULL,\n"
buf += " .fabric_make_np = NULL,\n"
buf += " .fabric_drop_np = NULL,\n"
buf += " .fabric_make_nodeacl = " + fabric_mod_name + "_make_nodeacl,\n"
buf += " .fabric_drop_nodeacl = " + fabric_mod_name + "_drop_nodeacl,\n"
buf += "};\n\n"
buf += "static int " + fabric_mod_name + "_register_configfs(void)\n"
buf += "{\n"
buf += " struct target_fabric_configfs *fabric;\n"
buf += " int ret;\n\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \" on \"UTS_RELEASE\"\\n\"," + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += " /*\n"
buf += " * Register the top level struct config_item_type with TCM core\n"
buf += " */\n"
buf += " fabric = target_fabric_configfs_init(THIS_MODULE, \"" + fabric_mod_name[4:] + "\");\n"
buf += " if (IS_ERR(fabric)) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_init() failed\\n\");\n"
buf += " return PTR_ERR(fabric);\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup fabric->tf_ops from our local " + fabric_mod_name + "_ops\n"
buf += " */\n"
buf += " fabric->tf_ops = " + fabric_mod_name + "_ops;\n"
buf += " /*\n"
buf += " * Setup default attribute lists for various fabric->tf_cit_tmpl\n"
buf += " */\n"
buf += " TF_CIT_TMPL(fabric)->tfc_wwn_cit.ct_attrs = " + fabric_mod_name + "_wwn_attrs;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_param_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_np_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_auth_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_param_cit.ct_attrs = NULL;\n"
buf += " /*\n"
buf += " * Register the fabric for use within TCM\n"
buf += " */\n"
buf += " ret = target_fabric_configfs_register(fabric);\n"
buf += " if (ret < 0) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_register() failed\"\n"
buf += " \" for " + fabric_mod_name.upper() + "\\n\");\n"
buf += " return ret;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup our local pointer to *fabric\n"
buf += " */\n"
buf += " " + fabric_mod_name + "_fabric_configfs = fabric;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Set fabric -> " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_deregister_configfs(void)\n"
buf += "{\n"
buf += " if (!" + fabric_mod_name + "_fabric_configfs)\n"
buf += " return;\n\n"
buf += " target_fabric_configfs_deregister(" + fabric_mod_name + "_fabric_configfs);\n"
buf += " " + fabric_mod_name + "_fabric_configfs = NULL;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Cleared " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += "};\n\n"
buf += "static int __init " + fabric_mod_name + "_init(void)\n"
buf += "{\n"
buf += " int ret;\n\n"
buf += " ret = " + fabric_mod_name + "_register_configfs();\n"
buf += " if (ret < 0)\n"
buf += " return ret;\n\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_exit(void)\n"
buf += "{\n"
buf += " " + fabric_mod_name + "_deregister_configfs();\n"
buf += "};\n\n"
buf += "MODULE_DESCRIPTION(\"" + fabric_mod_name.upper() + " series fabric driver\");\n"
buf += "MODULE_LICENSE(\"GPL\");\n"
buf += "module_init(" + fabric_mod_name + "_init);\n"
buf += "module_exit(" + fabric_mod_name + "_exit);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_scan_fabric_ops(tcm_dir):
fabric_ops_api = tcm_dir + "include/target/target_core_fabric.h"
print "Using tcm_mod_scan_fabric_ops: " + fabric_ops_api
process_fo = 0;
p = open(fabric_ops_api, 'r')
line = p.readline()
while line:
if process_fo == 0 and re.search('struct target_core_fabric_ops {', line):
line = p.readline()
continue
if process_fo == 0:
process_fo = 1;
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
continue
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
p.close()
return
def tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
bufi = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.c"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
fi = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.h"
print "Writing file: " + fi
pi = open(fi, 'w')
if not pi:
tcm_mod_err("Unable to open file: " + fi)
buf = "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/list.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n"
buf += "#include <scsi/scsi.h>\n"
buf += "#include <scsi/scsi_host.h>\n"
buf += "#include <scsi/scsi_device.h>\n"
buf += "#include <scsi/scsi_cmnd.h>\n"
buf += "#include <scsi/libfc.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_configfs.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "int " + fabric_mod_name + "_check_true(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_true(struct se_portal_group *);\n"
buf += "int " + fabric_mod_name + "_check_false(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_false(struct se_portal_group *);\n"
total_fabric_ops = len(fabric_ops)
i = 0
while i < total_fabric_ops:
fo = fabric_ops[i]
i += 1
# print "fabric_ops: " + fo
if re.search('get_fabric_name', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_name(void)\n"
buf += "{\n"
buf += " return \"" + fabric_mod_name[4:] + "\";\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_name(void);\n"
continue
if re.search('get_fabric_proto_ident', fo):
buf += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " u8 proto_id;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " proto_id = fc_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " proto_id = sas_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " proto_id = iscsi_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return proto_id;\n"
buf += "}\n\n"
bufi += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *);\n"
if re.search('get_wwn', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_name[0];\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *);\n"
if re.search('get_tag', fo):
buf += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " return tpg->" + fabric_mod_port + "_tpgt;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *);\n"
if re.search('get_default_depth', fo):
buf += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *);\n"
if re.search('get_pr_transport_id\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code,\n"
buf += " unsigned char *buf)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *, unsigned char *);\n"
if re.search('get_pr_transport_id_len\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *);\n"
if re.search('parse_pr_out_transport_id\)\(', fo):
buf += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " const char *buf,\n"
buf += " u32 *out_tid_len,\n"
buf += " char **port_nexus_ptr)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " char *tid = NULL;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " tid = fc_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " tid = sas_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " tid = iscsi_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
buf += " }\n\n"
buf += " return tid;\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(struct se_portal_group *,\n"
bufi += " const char *, u32 *, char **);\n"
if re.search('alloc_fabric_acl\)\(', fo):
buf += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n\n"
buf += " nacl = kzalloc(sizeof(struct " + fabric_mod_name + "_nacl), GFP_KERNEL);\n"
buf += " if (!nacl) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_nacl\\n\");\n"
buf += " return NULL;\n"
buf += " }\n\n"
buf += " return &nacl->se_node_acl;\n"
buf += "}\n\n"
bufi += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *);\n"
if re.search('release_fabric_acl\)\(', fo):
buf += "void " + fabric_mod_name + "_release_fabric_acl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_nacl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_fabric_acl(struct se_portal_group *,\n"
bufi += " struct se_node_acl *);\n"
if re.search('tpg_get_inst_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *);\n"
if re.search('\*release_cmd\)\(', fo):
buf += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *);\n"
if re.search('shutdown_session\)\(', fo):
buf += "int " + fabric_mod_name + "_shutdown_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_shutdown_session(struct se_session *);\n"
if re.search('close_session\)\(', fo):
buf += "void " + fabric_mod_name + "_close_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_close_session(struct se_session *);\n"
if re.search('stop_session\)\(', fo):
buf += "void " + fabric_mod_name + "_stop_session(struct se_session *se_sess, int sess_sleep , int conn_sleep)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_stop_session(struct se_session *, int, int);\n"
if re.search('fall_back_to_erl0\)\(', fo):
buf += "void " + fabric_mod_name + "_reset_nexus(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_reset_nexus(struct se_session *);\n"
if re.search('sess_logged_in\)\(', fo):
buf += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *);\n"
if re.search('sess_get_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *);\n"
if re.search('write_pending\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending(struct se_cmd *);\n"
if re.search('write_pending_status\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *);\n"
if re.search('set_default_node_attributes\)\(', fo):
buf += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *nacl)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *);\n"
if re.search('get_task_tag\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *);\n"
if re.search('get_cmd_state\)\(', fo):
buf += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *);\n"
if re.search('queue_data_in\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *);\n"
if re.search('queue_status\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_status(struct se_cmd *);\n"
if re.search('queue_tm_rsp\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *);\n"
if re.search('is_state_remove\)\(', fo):
buf += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
ret = pi.write(bufi)
if ret:
tcm_mod_err("Unable to write fi: " + fi)
pi.close()
return
def tcm_mod_build_kbuild(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Makefile"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf += fabric_mod_name + "-objs := " + fabric_mod_name + "_fabric.o \\\n"
buf += " " + fabric_mod_name + "_configfs.o\n"
buf += "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name + ".o\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_build_kconfig(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Kconfig"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "config " + fabric_mod_name.upper() + "\n"
buf += " tristate \"" + fabric_mod_name.upper() + " fabric module\"\n"
buf += " depends on TARGET_CORE && CONFIGFS_FS\n"
buf += " default n\n"
buf += " ---help---\n"
buf += " Say Y here to enable the " + fabric_mod_name.upper() + " fabric module\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_add_kbuild(tcm_dir, fabric_mod_name):
buf = "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name.lower() + "/\n"
kbuild = tcm_dir + "/drivers/target/Makefile"
f = open(kbuild, 'a')
f.write(buf)
f.close()
return
def tcm_mod_add_kconfig(tcm_dir, fabric_mod_name):
buf = "source \"drivers/target/" + fabric_mod_name.lower() + "/Kconfig\"\n"
kconfig = tcm_dir + "/drivers/target/Kconfig"
f = open(kconfig, 'a')
f.write(buf)
f.close()
return
def main(modname, proto_ident):
# proto_ident = "FC"
# proto_ident = "SAS"
# proto_ident = "iSCSI"
tcm_dir = os.getcwd();
tcm_dir += "/../../"
print "tcm_dir: " + tcm_dir
fabric_mod_name = modname
fabric_mod_dir = tcm_dir + "drivers/target/" + fabric_mod_name
print "Set fabric_mod_name: " + fabric_mod_name
print "Set fabric_mod_dir: " + fabric_mod_dir
print "Using proto_ident: " + proto_ident
if proto_ident != "FC" and proto_ident != "SAS" and proto_ident != "iSCSI":
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
ret = tcm_mod_create_module_subdir(fabric_mod_dir)
if ret:
print "tcm_mod_create_module_subdir() failed because module already exists!"
sys.exit(1)
tcm_mod_build_base_includes(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_scan_fabric_ops(tcm_dir)
tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_configfs(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kbuild(fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kconfig(fabric_mod_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Makefile..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kbuild(tcm_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Kconfig..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kconfig(tcm_dir, fabric_mod_name)
return
parser = optparse.OptionParser()
parser.add_option('-m', '--modulename', help='Module name', dest='modname',
action='store', nargs=1, type='string')
parser.add_option('-p', '--protoident', help='Protocol Ident', dest='protoident',
action='store', nargs=1, type='string')
(opts, args) = parser.parse_args()
mandatories = ['modname', 'protoident']
for m in mandatories:
if not opts.__dict__[m]:
print "mandatory option is missing\n"
parser.print_help()
exit(-1)
if __name__ == "__main__":
main(str(opts.modname), opts.protoident)
| gpl-2.0 |
MaStanford/AnglishWordbook | Anglish/SyncWikia.py | 1 | 4522 | __author__ = 'm.stanford'
import string
from socket import error as SocketError
import json, httplib
STARTING_PAGE = 72;
ENDING_PAGE = 98;
invalidWords = ["un-English", "Anglish/English", "attested", "unattested", "Class"]
delimiter = "\'\'\'"
wierdfunkInSomeWords = ["\'\' \'\'\'", "\'\'\',", '\'\'\'\'\'', '\"\'\'']
def getWordPage(page):
connection = httplib.HTTPConnection('anglish.wikia.com', 80)
connection.connect()
connection.request('GET', '/api.php?action=query&prop=revisions&rvprop=content&format=json&pageids=' + str(page))
result = json.loads(connection.getresponse().read())
print result
return result
def processRawPage(page, number):
words = page['query']
words = words['pages']
words = words[str(number)]
words = words['revisions']
words = words[0]
listOfWords = []
for key, value in words.iteritems():
listOfLines = value
for strings in wierdfunkInSomeWords:
listOfLines = listOfLines.replace(strings, '')
listOfLines = value.split(delimiter)
print 'Raw Line: ' + str(listOfLines)
length = len(listOfLines)
i = 10;
while not isValidWord(listOfLines[i]):
i += 1
even = i % 2
while i < length:
#Check if we have an invalid word in a place where it should be valid. We then will append that line to the previous line in the list of words.
if not isValidWord(listOfLines[i]) and i % 2 == even:
out = listOfWords[len(listOfWords)-1] + listOfLines[i]
out = out.replace("\'\'", '').replace('|', '\n')
listOfWords.remove(listOfWords[len(listOfWords)-1])
listOfWords.append(out)
print 'Found odd line: ' + out.replace('\n', ' ')
i += 1
even = i % 2
else:
print 'Valid Line: ' + listOfLines[i].replace("\'\'", '').replace('|', '').replace('\n', ' ')
listOfWords.append(listOfLines[i].replace("\'\'", '').replace('|', '\n'))
i += 1
return listOfWords
def buildWordDef(processedHead, processedDef):
word = {}
word['word'] = processedHead.lower()
listOfDefs = [x for x in processedDef.split('\n') if x]
# print 'Def: ' + processedHead + ' : ' + str(listOfDefs)
if len(listOfDefs) > 3:
word['attested_definitions'] = listOfDefs[1].replace('-\n', '').replace('\n', '').replace(' ', '').split(',')
word['unattested_definitions'] = listOfDefs[2].replace('-\n', '').replace('\n', '').replace(' ', '').split(',')
word['type'] = listOfDefs[0].replace("\'", "")
else:
word['attested_definitions'] = []
word['unattested_definitions'] = []
word['type'] = ''
print "buildWordDef" + str(word)
return word
def addWord(wordDef):
word = wordDef['word']
attested = wordDef['attested_definitions']
unattested = wordDef['unattested_definitions']
wordType = wordDef['type']
try:
connection = httplib.HTTPSConnection('https://anglishwordbook.herokuapp.com/', 443)
connection.connect()
connection.request('POST', '/1/classes/Word', json.dumps({
"Word": word,
"Attested": attested,
"Unattested": unattested,
"Type": wordType
}), {
"X-Parse-Application-Id": "ApuxkukQC9mFuLIdIjG3qC27ms5kZ4XZbopxUohp",
"X-Parse-Master-Key ": "ME6doa9GdB2PTGesScr8DwNQVzlzMwmoEurf3kIX",
"Content-Type": "application/json"
})
result = json.loads(connection.getresponse().read())
if 'objectId' in result:
print result
return True
else:
return False
except SocketError as e:
return addWord(wordDef)
def isValidWord(line):
if len(line.split(' ')) > 2:
return False
if line in invalidWords:
return False
if all(c in string.punctuation for c in line.replace(' ', '').replace('\n','')):
return False
return True
for j in range(STARTING_PAGE, ENDING_PAGE):
rawPage = getWordPage(j)
processedPage = processRawPage(rawPage, j)
index = len(processedPage)
k = 0
while k < index - 1:
# print 'Obj 1 ' + processedPage[i]
# print 'Obj 2 ' + processedPage[i+1]
wordDef = buildWordDef(processedPage[k], processedPage[k+1])
if addWord(wordDef):
k += 2
else:
k = k
| apache-2.0 |
xinwu/horizon | horizon/templatetags/parse_date.py | 75 | 1738 | # Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Template tags for parsing date strings.
"""
from datetime import datetime # noqa
from django import template
from django.utils import timezone
register = template.Library()
class ParseDateNode(template.Node):
def render(self, datestring):
"""Parses a date-like input string into a timezone aware Python
datetime.
"""
formats = ["%Y-%m-%dT%H:%M:%S.%f", "%Y-%m-%d %H:%M:%S.%f",
"%Y-%m-%dT%H:%M:%S", "%Y-%m-%d %H:%M:%S"]
if datestring:
for format in formats:
try:
parsed = datetime.strptime(datestring, format)
if not timezone.is_aware(parsed):
parsed = timezone.make_aware(parsed, timezone.utc)
return parsed
except Exception:
pass
return None
@register.filter(name='parse_date')
def parse_date(value):
return ParseDateNode().render(value)
| apache-2.0 |
jsirois/pants | src/python/pants/backend/python/goals/setup_py.py | 1 | 37779 | # Copyright 2019 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import enum
import io
import itertools
import logging
import os
import pickle
from abc import ABC, abstractmethod
from collections import abc, defaultdict
from dataclasses import dataclass
from typing import Any, Dict, List, Mapping, Set, Tuple, cast
from pants.backend.python.macros.python_artifact import PythonArtifact
from pants.backend.python.subsystems.setuptools import Setuptools
from pants.backend.python.target_types import (
PexEntryPointField,
PythonProvidesField,
PythonRequirementsField,
PythonSources,
ResolvedPexEntryPoint,
ResolvePexEntryPointRequest,
SetupPyCommandsField,
)
from pants.backend.python.util_rules.pex import (
PexInterpreterConstraints,
PexRequest,
PexRequirements,
VenvPex,
VenvPexProcess,
)
from pants.backend.python.util_rules.python_sources import (
PythonSourceFilesRequest,
StrippedPythonSourceFiles,
)
from pants.backend.python.util_rules.python_sources import rules as python_sources_rules
from pants.base.specs import AddressSpecs, AscendantAddresses
from pants.core.goals.package import BuiltPackage, BuiltPackageArtifact, PackageFieldSet
from pants.core.target_types import FilesSources, ResourcesSources
from pants.engine.addresses import Address, UnparsedAddressInputs
from pants.engine.collection import Collection, DeduplicatedCollection
from pants.engine.fs import (
AddPrefix,
CreateDigest,
Digest,
DigestContents,
DigestSubset,
FileContent,
MergeDigests,
PathGlobs,
RemovePrefix,
Snapshot,
)
from pants.engine.process import ProcessResult
from pants.engine.rules import Get, MultiGet, collect_rules, rule
from pants.engine.target import (
Dependencies,
DependenciesRequest,
Sources,
Target,
Targets,
TransitiveTargets,
TransitiveTargetsRequest,
)
from pants.engine.unions import UnionMembership, UnionRule, union
from pants.option.subsystem import Subsystem
from pants.python.python_setup import PythonSetup
from pants.util.docutil import docs_url
from pants.util.logging import LogLevel
from pants.util.memo import memoized_property
from pants.util.meta import frozen_after_init
from pants.util.strutil import ensure_text
logger = logging.getLogger(__name__)
class InvalidSetupPyArgs(Exception):
"""Indicates invalid arguments to setup.py."""
class TargetNotExported(Exception):
"""Indicates a target that was expected to be exported is not."""
class InvalidEntryPoint(Exception):
"""Indicates that a specified binary entry point was invalid."""
class OwnershipError(Exception):
"""An error related to target ownership calculation."""
def __init__(self, msg: str):
super().__init__(
f"{msg} See {docs_url('python-distributions')} for "
f"how python_library targets are mapped to distributions."
)
class NoOwnerError(OwnershipError):
"""Indicates an exportable target has no owning exported target."""
class AmbiguousOwnerError(OwnershipError):
"""Indicates an exportable target has more than one owning exported target."""
@dataclass(frozen=True)
class ExportedTarget:
"""A target that explicitly exports a setup.py artifact, using a `provides=` stanza.
The code provided by this artifact can be from this target or from any targets it owns.
"""
target: Target # In practice, a PythonDistribution.
@property
def provides(self) -> PythonArtifact:
return self.target[PythonProvidesField].value
@dataclass(frozen=True)
class DependencyOwner:
"""An ExportedTarget in its role as an owner of other targets.
We need this type to prevent rule ambiguities when computing the list of targets owned by an
ExportedTarget (which involves going from ExportedTarget -> dep -> owner (which is itself an
ExportedTarget) and checking if owner is the original ExportedTarget.
"""
exported_target: ExportedTarget
@dataclass(frozen=True)
class OwnedDependency:
"""A target that is owned by some ExportedTarget.
Code in this target is published in the owner's distribution.
The owner of a target T is T's closest filesystem ancestor among the python_distribution
targets that directly or indirectly depend on it (including T itself).
"""
target: Target
class OwnedDependencies(Collection[OwnedDependency]):
pass
class ExportedTargetRequirements(DeduplicatedCollection[str]):
"""The requirements of an ExportedTarget.
Includes:
- The "normal" 3rdparty requirements of the ExportedTarget and all targets it owns.
- The published versions of any other ExportedTargets it depends on.
"""
sort_input = True
@dataclass(frozen=True)
class PythonDistributionFieldSet(PackageFieldSet):
required_fields = (PythonProvidesField,)
provides: PythonProvidesField
@dataclass(frozen=True)
class SetupPySourcesRequest:
targets: Targets
py2: bool # Whether to use py2 or py3 package semantics.
@dataclass(frozen=True)
class SetupPySources:
"""The sources required by a setup.py command.
Includes some information derived from analyzing the source, namely the packages, namespace
packages and resource files in the source.
"""
digest: Digest
packages: Tuple[str, ...]
namespace_packages: Tuple[str, ...]
package_data: Tuple["PackageDatum", ...]
@dataclass(frozen=True)
class SetupPyChrootRequest:
"""A request to create a chroot containing a setup.py and the sources it operates on."""
exported_target: ExportedTarget
py2: bool # Whether to use py2 or py3 package semantics.
@frozen_after_init
@dataclass(unsafe_hash=True)
class SetupKwargs:
"""The keyword arguments to the `setup()` function in the generated `setup.py`."""
_pickled_bytes: bytes
def __init__(
self, kwargs: Mapping[str, Any], *, address: Address, _allow_banned_keys: bool = False
) -> None:
super().__init__()
if "version" not in kwargs:
raise ValueError(f"Missing a `version` kwarg in the `provides` field for {address}.")
if not _allow_banned_keys:
for arg in {
"data_files",
"namespace_packages",
"package_dir",
"package_data",
"packages",
"install_requires",
}:
if arg in kwargs:
raise ValueError(
f"{arg} cannot be set in the `provides` field for {address}, but it was "
f"set to {kwargs[arg]}. Pants will dynamically set the value for you."
)
# We serialize with `pickle` so that is hashable. We don't use `FrozenDict` because it
# would require that all values are immutable, and we may have lists and dictionaries as
# values. It's too difficult/clunky to convert those all, then to convert them back out of
# `FrozenDict`. We don't use JSON because it does not preserve data types like `tuple`.
self._pickled_bytes = pickle.dumps({k: v for k, v in sorted(kwargs.items())}, protocol=4)
@memoized_property
def kwargs(self) -> Dict[str, Any]:
return cast(Dict[str, Any], pickle.loads(self._pickled_bytes))
@property
def name(self) -> str:
return cast(str, self.kwargs["name"])
@property
def version(self) -> str:
return cast(str, self.kwargs["version"])
# Note: This only exists as a hook for additional logic for the `setup()` kwargs, e.g. for plugin
# authors. To resolve `SetupKwargs`, call `await Get(SetupKwargs, ExportedTarget)`, which handles
# running any custom implementations vs. using the default implementation.
@union
@dataclass(frozen=True) # type: ignore[misc]
class SetupKwargsRequest(ABC):
"""A request to allow setting the kwargs passed to the `setup()` function.
By default, Pants will pass the kwargs provided in the BUILD file unchanged. To customize this
behavior, subclass `SetupKwargsRequest`, register the rule `UnionRule(SetupKwargsRequest,
MyCustomSetupKwargsRequest)`, and add a rule that takes your subclass as a parameter and returns
`SetupKwargs`.
"""
target: Target
@classmethod
@abstractmethod
def is_applicable(cls, target: Target) -> bool:
"""Whether the kwargs implementation should be used for this target or not."""
@property
def explicit_kwargs(self) -> Dict[str, Any]:
return self.target[PythonProvidesField].value.kwargs
class FinalizedSetupKwargs(SetupKwargs):
"""The final kwargs used for the `setup()` function, after Pants added requirements and sources
information."""
def __init__(self, kwargs: Mapping[str, Any], *, address: Address) -> None:
super().__init__(kwargs, address=address, _allow_banned_keys=True)
@dataclass(frozen=True)
class SetupPyChroot:
"""A chroot containing a generated setup.py and the sources it operates on."""
digest: Digest
setup_kwargs: FinalizedSetupKwargs
@dataclass(frozen=True)
class RunSetupPyRequest:
"""A request to run a setup.py command."""
exported_target: ExportedTarget
interpreter_constraints: PexInterpreterConstraints
chroot: SetupPyChroot
args: Tuple[str, ...]
@dataclass(frozen=True)
class RunSetupPyResult:
"""The result of running a setup.py command."""
output: Digest # The state of the chroot after running setup.py.
@enum.unique
class FirstPartyDependencyVersionScheme(enum.Enum):
EXACT = "exact" # i.e., ==
COMPATIBLE = "compatible" # i.e., ~=
ANY = "any" # i.e., no specifier
class SetupPyGeneration(Subsystem):
options_scope = "setup-py-generation"
help = "Options to control how setup.py is generated from a `python_distribution` target."
@classmethod
def register_options(cls, register):
super().register_options(register)
register(
"--first-party-dependency-version-scheme",
type=FirstPartyDependencyVersionScheme,
default=FirstPartyDependencyVersionScheme.EXACT,
help=(
"What version to set in `install_requires` when a `python_distribution` depends on "
"other `python_distribution`s. If `exact`, will use `==`. If `compatible`, will "
"use `~=`. If `any`, will leave off the version. See "
"https://www.python.org/dev/peps/pep-0440/#version-specifiers."
),
)
def first_party_dependency_version(self, version: str) -> str:
"""Return the version string (e.g. '~=4.0') for a first-party dependency.
If the user specified to use "any" version, then this will return an empty string.
"""
scheme = self.options.first_party_dependency_version_scheme
if scheme == FirstPartyDependencyVersionScheme.ANY:
return ""
specifier = "==" if scheme == FirstPartyDependencyVersionScheme.EXACT else "~="
return f"{specifier}{version}"
def validate_commands(commands: Tuple[str, ...]):
# We rely on the dist dir being the default, so we know where to find the created dists.
if "--dist-dir" in commands or "-d" in commands:
raise InvalidSetupPyArgs(
"Cannot set --dist-dir/-d in setup.py args. To change where dists "
"are written, use the global --pants-distdir option."
)
# We don't allow publishing via setup.py, as we don't want the setup.py running rule,
# which is not a @goal_rule, to side-effect (plus, we'd need to ensure that publishing
# happens in dependency order). Note that `upload` and `register` were removed in
# setuptools 42.0.0, in favor of Twine, but we still check for them in case the user modified
# the default version used by our Setuptools subsystem.
# TODO: A `publish` rule, that can invoke Twine to do the actual uploading.
# See https://github.com/pantsbuild/pants/issues/8935.
if "upload" in commands or "register" in commands:
raise InvalidSetupPyArgs("Cannot use the `upload` or `register` setup.py commands")
@rule
async def package_python_dist(
field_set: PythonDistributionFieldSet,
python_setup: PythonSetup,
) -> BuiltPackage:
transitive_targets = await Get(TransitiveTargets, TransitiveTargetsRequest([field_set.address]))
exported_target = ExportedTarget(transitive_targets.roots[0])
interpreter_constraints = PexInterpreterConstraints.create_from_targets(
transitive_targets.closure, python_setup
)
chroot = await Get(
SetupPyChroot,
SetupPyChrootRequest(exported_target, py2=interpreter_constraints.includes_python2()),
)
# If commands were provided, run setup.py with them; Otherwise just dump chroots.
commands = exported_target.target.get(SetupPyCommandsField).value or ()
if commands:
validate_commands(commands)
setup_py_result = await Get(
RunSetupPyResult,
RunSetupPyRequest(exported_target, interpreter_constraints, chroot, commands),
)
dist_snapshot = await Get(Snapshot, Digest, setup_py_result.output)
return BuiltPackage(
setup_py_result.output,
tuple(BuiltPackageArtifact(path) for path in dist_snapshot.files),
)
else:
dirname = f"{chroot.setup_kwargs.name}-{chroot.setup_kwargs.version}"
rel_chroot = await Get(Digest, AddPrefix(chroot.digest, dirname))
return BuiltPackage(rel_chroot, (BuiltPackageArtifact(dirname),))
# We write .py sources into the chroot under this dir.
CHROOT_SOURCE_ROOT = "src"
SETUP_BOILERPLATE = """
# DO NOT EDIT THIS FILE -- AUTOGENERATED BY PANTS
# Target: {target_address_spec}
from setuptools import setup
setup(**{setup_kwargs_str})
"""
@rule
async def run_setup_py(req: RunSetupPyRequest, setuptools: Setuptools) -> RunSetupPyResult:
"""Run a setup.py command on a single exported target."""
# Note that this pex has no entrypoint. We use it to run our generated setup.py, which
# in turn imports from and invokes setuptools.
setuptools_pex = await Get(
VenvPex,
PexRequest(
output_filename="setuptools.pex",
internal_only=True,
requirements=PexRequirements(setuptools.all_requirements),
interpreter_constraints=(
req.interpreter_constraints
if setuptools.options.is_default("interpreter_constraints")
else PexInterpreterConstraints(setuptools.interpreter_constraints)
),
),
)
# The setuptools dist dir, created by it under the chroot (not to be confused with
# pants's own dist dir, at the buildroot).
dist_dir = "dist/"
result = await Get(
ProcessResult,
VenvPexProcess(
setuptools_pex,
argv=("setup.py", *req.args),
input_digest=req.chroot.digest,
# setuptools commands that create dists write them to the distdir.
# TODO: Could there be other useful files to capture?
output_directories=(dist_dir,),
description=f"Run setuptools for {req.exported_target.target.address}",
level=LogLevel.DEBUG,
),
)
output_digest = await Get(Digest, RemovePrefix(result.output_digest, dist_dir))
return RunSetupPyResult(output_digest)
@rule
async def determine_setup_kwargs(
exported_target: ExportedTarget, union_membership: UnionMembership
) -> SetupKwargs:
target = exported_target.target
setup_kwargs_requests = union_membership.get(SetupKwargsRequest) # type: ignore[misc]
applicable_setup_kwargs_requests = tuple(
request for request in setup_kwargs_requests if request.is_applicable(target)
)
# If no provided implementations, fall back to our default implementation that simply returns
# what the user explicitly specified in the BUILD file.
if not applicable_setup_kwargs_requests:
return SetupKwargs(exported_target.provides.kwargs, address=target.address)
if len(applicable_setup_kwargs_requests) > 1:
possible_requests = sorted(plugin.__name__ for plugin in applicable_setup_kwargs_requests)
raise ValueError(
f"Multiple of the registered `SetupKwargsRequest`s can work on the target "
f"{target.address}, and it's ambiguous which to use: {possible_requests}\n\nPlease "
"activate fewer implementations, or make the classmethod `is_applicable()` more "
"precise so that only one implementation is applicable for this target."
)
setup_kwargs_request = tuple(applicable_setup_kwargs_requests)[0]
return await Get(SetupKwargs, SetupKwargsRequest, setup_kwargs_request(target))
@rule
async def generate_chroot(request: SetupPyChrootRequest) -> SetupPyChroot:
exported_target = request.exported_target
exported_addr = exported_target.target.address
owned_deps, transitive_targets = await MultiGet(
Get(OwnedDependencies, DependencyOwner(exported_target)),
Get(TransitiveTargets, TransitiveTargetsRequest([exported_target.target.address])),
)
# files() targets aren't owned by a single exported target - they aren't code, so
# we allow them to be in multiple dists. This is helpful for, e.g., embedding
# a standard license file in a dist.
files_targets = (tgt for tgt in transitive_targets.closure if tgt.has_field(FilesSources))
targets = Targets(itertools.chain((od.target for od in owned_deps), files_targets))
sources, requirements = await MultiGet(
Get(SetupPySources, SetupPySourcesRequest(targets, py2=request.py2)),
Get(ExportedTargetRequirements, DependencyOwner(exported_target)),
)
# Generate the kwargs for the setup() call. In addition to using the kwargs that are either
# explicitly provided or generated via a user's plugin, we add additional kwargs based on the
# resolved requirements and sources.
target = exported_target.target
resolved_setup_kwargs = await Get(SetupKwargs, ExportedTarget, exported_target)
setup_kwargs = resolved_setup_kwargs.kwargs.copy()
# NB: We are careful to not overwrite these values, but we also don't expect them to have been
# set. The user must have have gone out of their way to use a `SetupKwargs` plugin, and to have
# specified `SetupKwargs(_allow_banned_keys=True)`.
setup_kwargs.update(
{
"package_dir": {"": CHROOT_SOURCE_ROOT, **setup_kwargs.get("package_dir", {})},
"packages": (*sources.packages, *(setup_kwargs.get("packages", []))),
"namespace_packages": (
*sources.namespace_packages,
*setup_kwargs.get("namespace_packages", []),
),
"package_data": {**dict(sources.package_data), **setup_kwargs.get("package_data", {})},
"install_requires": (*requirements, *setup_kwargs.get("install_requires", [])),
}
)
# Add any `pex_binary` targets from `setup_py().with_binaries()` to the dist's entry points.
key_to_binary_spec = exported_target.provides.binaries
binaries = await Get(
Targets, UnparsedAddressInputs(key_to_binary_spec.values(), owning_address=target.address)
)
entry_point_requests = []
for binary in binaries:
if not binary.has_field(PexEntryPointField):
raise InvalidEntryPoint(
"Expected addresses to `pex_binary` targets in `.with_binaries()` for the "
f"`provides` field for {exported_addr}, but found {binary.address} with target "
f"type {binary.alias}."
)
entry_point = binary[PexEntryPointField].value
url = "https://python-packaging.readthedocs.io/en/latest/command-line-scripts.html#the-console-scripts-entry-point"
if not entry_point:
raise InvalidEntryPoint(
"Every `pex_binary` used in `.with_binaries()` for the `provides` field for "
f"{exported_addr} must explicitly set the `entry_point` field, but "
f"{binary.address} left the field off. Set `entry_point` to either "
f"`app.py:func` or the longhand `path.to.app:func`. See {url}."
)
if not entry_point.function:
raise InvalidEntryPoint(
"Every `pex_binary` used in `with_binaries()` for the `provides()` field for "
f"{exported_addr} must end in the format `:my_func` for the `entry_point` field, "
f"but {binary.address} set it to {entry_point.spec!r}. For example, set "
f"`entry_point='{entry_point.module}:main'. See {url}."
)
entry_point_requests.append(ResolvePexEntryPointRequest(binary[PexEntryPointField]))
binary_entry_points = await MultiGet(
Get(ResolvedPexEntryPoint, ResolvePexEntryPointRequest, request)
for request in entry_point_requests
)
for key, binary_entry_point in zip(key_to_binary_spec.keys(), binary_entry_points):
entry_points = setup_kwargs.setdefault("entry_points", {})
console_scripts = entry_points.setdefault("console_scripts", [])
if binary_entry_point.val is not None:
console_scripts.append(f"{key}={binary_entry_point.val.spec}")
# Generate the setup script.
setup_py_content = SETUP_BOILERPLATE.format(
target_address_spec=target.address.spec,
setup_kwargs_str=distutils_repr(setup_kwargs),
).encode()
files_to_create = [
FileContent("setup.py", setup_py_content),
FileContent("MANIFEST.in", "include *.py".encode()),
]
extra_files_digest, src_digest = await MultiGet(
Get(Digest, CreateDigest(files_to_create)),
# Nest the sources under the src/ prefix.
Get(Digest, AddPrefix(sources.digest, CHROOT_SOURCE_ROOT)),
)
chroot_digest = await Get(Digest, MergeDigests((src_digest, extra_files_digest)))
return SetupPyChroot(chroot_digest, FinalizedSetupKwargs(setup_kwargs, address=target.address))
@rule
async def get_sources(request: SetupPySourcesRequest) -> SetupPySources:
python_sources_request = PythonSourceFilesRequest(
targets=request.targets, include_resources=False, include_files=False
)
all_sources_request = PythonSourceFilesRequest(
targets=request.targets, include_resources=True, include_files=True
)
python_sources, all_sources = await MultiGet(
Get(StrippedPythonSourceFiles, PythonSourceFilesRequest, python_sources_request),
Get(StrippedPythonSourceFiles, PythonSourceFilesRequest, all_sources_request),
)
python_files = set(python_sources.stripped_source_files.snapshot.files)
all_files = set(all_sources.stripped_source_files.snapshot.files)
resource_files = all_files - python_files
init_py_digest_contents = await Get(
DigestContents,
DigestSubset(
python_sources.stripped_source_files.snapshot.digest, PathGlobs(["**/__init__.py"])
),
)
packages, namespace_packages, package_data = find_packages(
python_files=python_files,
resource_files=resource_files,
init_py_digest_contents=init_py_digest_contents,
py2=request.py2,
)
return SetupPySources(
digest=all_sources.stripped_source_files.snapshot.digest,
packages=packages,
namespace_packages=namespace_packages,
package_data=package_data,
)
@rule(desc="Compute distribution's 3rd party requirements")
async def get_requirements(
dep_owner: DependencyOwner,
union_membership: UnionMembership,
setup_py_generation: SetupPyGeneration,
) -> ExportedTargetRequirements:
transitive_targets = await Get(
TransitiveTargets, TransitiveTargetsRequest([dep_owner.exported_target.target.address])
)
ownable_tgts = [
tgt for tgt in transitive_targets.closure if is_ownable_target(tgt, union_membership)
]
owners = await MultiGet(Get(ExportedTarget, OwnedDependency(tgt)) for tgt in ownable_tgts)
owned_by_us: Set[Target] = set()
owned_by_others: Set[Target] = set()
for tgt, owner in zip(ownable_tgts, owners):
(owned_by_us if owner == dep_owner.exported_target else owned_by_others).add(tgt)
# Get all 3rdparty deps of our owned deps.
#
# Note that we need only consider requirements that are direct dependencies of our owned deps:
# If T depends on R indirectly, then it must be via some direct deps U1, U2, ... For each such U,
# if U is in the owned deps then we'll pick up R through U. And if U is not in the owned deps
# then it's owned by an exported target ET, and so R will be in the requirements for ET, and we
# will require ET.
direct_deps_tgts = await MultiGet(
Get(Targets, DependenciesRequest(tgt.get(Dependencies))) for tgt in owned_by_us
)
reqs = PexRequirements.create_from_requirement_fields(
tgt[PythonRequirementsField]
for tgt in itertools.chain.from_iterable(direct_deps_tgts)
if tgt.has_field(PythonRequirementsField)
)
req_strs = list(reqs)
# Add the requirements on any exported targets on which we depend.
kwargs_for_exported_targets_we_depend_on = await MultiGet(
Get(SetupKwargs, OwnedDependency(tgt)) for tgt in owned_by_others
)
req_strs.extend(
f"{kwargs.name}{setup_py_generation.first_party_dependency_version(kwargs.version)}"
for kwargs in set(kwargs_for_exported_targets_we_depend_on)
)
return ExportedTargetRequirements(req_strs)
@rule(desc="Find all code to be published in the distribution", level=LogLevel.DEBUG)
async def get_owned_dependencies(
dependency_owner: DependencyOwner, union_membership: UnionMembership
) -> OwnedDependencies:
"""Find the dependencies of dependency_owner that are owned by it.
Includes dependency_owner itself.
"""
transitive_targets = await Get(
TransitiveTargets,
TransitiveTargetsRequest([dependency_owner.exported_target.target.address]),
)
ownable_targets = [
tgt for tgt in transitive_targets.closure if is_ownable_target(tgt, union_membership)
]
owners = await MultiGet(Get(ExportedTarget, OwnedDependency(tgt)) for tgt in ownable_targets)
owned_dependencies = [
tgt
for owner, tgt in zip(owners, ownable_targets)
if owner == dependency_owner.exported_target
]
return OwnedDependencies(OwnedDependency(t) for t in owned_dependencies)
@rule(desc="Get exporting owner for target")
async def get_exporting_owner(owned_dependency: OwnedDependency) -> ExportedTarget:
"""Find the exported target that owns the given target (and therefore exports it).
The owner of T (i.e., the exported target in whose artifact T's code is published) is:
1. An exported target that depends on T (or is T itself).
2. Is T's closest filesystem ancestor among those satisfying 1.
If there are multiple such exported targets at the same degree of ancestry, the ownership
is ambiguous and an error is raised. If there is no exported target that depends on T
and is its ancestor, then there is no owner and an error is raised.
"""
target = owned_dependency.target
ancestor_addrs = AscendantAddresses(target.address.spec_path)
ancestor_tgts = await Get(Targets, AddressSpecs([ancestor_addrs]))
# Note that addresses sort by (spec_path, target_name), and all these targets are
# ancestors of the given target, i.e., their spec_paths are all prefixes. So sorting by
# address will effectively sort by closeness of ancestry to the given target.
exported_ancestor_tgts = sorted(
[t for t in ancestor_tgts if t.has_field(PythonProvidesField)],
key=lambda t: t.address,
reverse=True,
)
exported_ancestor_iter = iter(exported_ancestor_tgts)
for exported_ancestor in exported_ancestor_iter:
transitive_targets = await Get(
TransitiveTargets, TransitiveTargetsRequest([exported_ancestor.address])
)
if target in transitive_targets.closure:
owner = exported_ancestor
# Find any exported siblings of owner that also depend on target. They have the
# same spec_path as it, so they must immediately follow it in ancestor_iter.
sibling_owners = []
sibling = next(exported_ancestor_iter, None)
while sibling and sibling.address.spec_path == owner.address.spec_path:
transitive_targets = await Get(
TransitiveTargets, TransitiveTargetsRequest([sibling.address])
)
if target in transitive_targets.closure:
sibling_owners.append(sibling)
sibling = next(exported_ancestor_iter, None)
if sibling_owners:
all_owners = [exported_ancestor] + sibling_owners
raise AmbiguousOwnerError(
f"Found multiple sibling python_distribution targets that are the closest "
f"ancestor dependees of {target.address} and are therefore candidates to "
f"own it: {', '.join(o.address.spec for o in all_owners)}. Only a "
f"single such owner is allowed, to avoid ambiguity."
)
return ExportedTarget(owner)
raise NoOwnerError(
f"No python_distribution target found to own {target.address}. Note that "
f"the owner must be in or above the owned target's directory, and must "
f"depend on it (directly or indirectly)."
)
def is_ownable_target(tgt: Target, union_membership: UnionMembership) -> bool:
return (
# Note that we check for a PythonProvides field so that a python_distribution
# target can be owned (by itself). This is so that if there are any 3rdparty
# requirements directly on the python_distribution target, we apply them to the dist.
# This isn't particularly useful (3rdparty requirements should be on the python_library
# that consumes them)... but users may expect it to work anyway.
tgt.has_field(PythonProvidesField)
or tgt.has_field(PythonSources)
or tgt.has_field(ResourcesSources)
or tgt.get(Sources).can_generate(PythonSources, union_membership)
)
# Convenient type alias for the pair (package name, data files in the package).
PackageDatum = Tuple[str, Tuple[str, ...]]
# Distutils does not support unicode strings in setup.py, so we must explicitly convert to binary
# strings as pants uses unicode_literals. A natural and prior technique was to use `pprint.pformat`,
# but that embeds u's in the string itself during conversion. For that reason we roll out own
# literal pretty-printer here.
#
# Note that we must still keep this code, even though Pants only runs with Python 3, because
# the created product may still be run by Python 2.
#
# For more information, see http://bugs.python.org/issue13943.
def distutils_repr(obj):
"""Compute a string repr suitable for use in generated setup.py files."""
output = io.StringIO()
linesep = os.linesep
def _write(data):
output.write(ensure_text(data))
def _write_repr(o, indent=False, level=0):
pad = " " * 4 * level
if indent:
_write(pad)
level += 1
if isinstance(o, (bytes, str)):
# The py2 repr of str (unicode) is `u'...'` and we don't want the `u` prefix; likewise,
# the py3 repr of bytes is `b'...'` and we don't want the `b` prefix so we hand-roll a
# repr here.
o_txt = ensure_text(o)
if linesep in o_txt:
_write('"""{}"""'.format(o_txt.replace('"""', r"\"\"\"")))
else:
_write("'{}'".format(o_txt.replace("'", r"\'")))
elif isinstance(o, abc.Mapping):
_write("{" + linesep)
for k, v in o.items():
_write_repr(k, indent=True, level=level)
_write(": ")
_write_repr(v, indent=False, level=level)
_write("," + linesep)
_write(pad + "}")
elif isinstance(o, abc.Iterable):
if isinstance(o, abc.MutableSequence):
open_collection, close_collection = "[]"
elif isinstance(o, abc.Set):
open_collection, close_collection = "{}"
else:
open_collection, close_collection = "()"
_write(open_collection + linesep)
for i in o:
_write_repr(i, indent=True, level=level)
_write("," + linesep)
_write(pad + close_collection)
else:
_write(repr(o)) # Numbers and bools.
_write_repr(obj)
return output.getvalue()
def find_packages(
*,
python_files: Set[str],
resource_files: Set[str],
init_py_digest_contents: DigestContents,
py2: bool,
) -> Tuple[Tuple[str, ...], Tuple[str, ...], Tuple[PackageDatum, ...]]:
"""Analyze the package structure for the given sources.
Returns a tuple (packages, namespace_packages, package_data), suitable for use as setup()
kwargs.
"""
# Find all packages implied by the sources.
packages: Set[str] = set()
package_data: Dict[str, List[str]] = defaultdict(list)
for python_file in python_files:
# Python 2: An __init__.py file denotes a package.
# Python 3: Any directory containing python source files is a package.
if not py2 or os.path.basename(python_file) == "__init__.py":
packages.add(os.path.dirname(python_file).replace(os.path.sep, "."))
# Now find all package_data.
for resource_file in resource_files:
# Find the closest enclosing package, if any. Resources will be loaded relative to that.
maybe_package: str = os.path.dirname(resource_file).replace(os.path.sep, ".")
while maybe_package and maybe_package not in packages:
maybe_package = maybe_package.rpartition(".")[0]
# If resource is not in a package, ignore it. There's no principled way to load it anyway.
if maybe_package:
package_data[maybe_package].append(
os.path.relpath(resource_file, maybe_package.replace(".", os.path.sep))
)
# See which packages are pkg_resources-style namespace packages.
# Note that implicit PEP 420 namespace packages and pkgutil-style namespace packages
# should *not* be listed in the setup namespace_packages kwarg. That's for pkg_resources-style
# namespace packages only. See https://github.com/pypa/sample-namespace-packages/.
namespace_packages: Set[str] = set()
init_py_by_path: Dict[str, bytes] = {ipc.path: ipc.content for ipc in init_py_digest_contents}
for pkg in packages:
path = os.path.join(pkg.replace(".", os.path.sep), "__init__.py")
if path in init_py_by_path and declares_pkg_resources_namespace_package(
init_py_by_path[path].decode()
):
namespace_packages.add(pkg)
return (
tuple(sorted(packages)),
tuple(sorted(namespace_packages)),
tuple((pkg, tuple(sorted(files))) for pkg, files in package_data.items()),
)
def declares_pkg_resources_namespace_package(python_src: str) -> bool:
"""Given .py file contents, determine if it declares a pkg_resources-style namespace package.
Detects pkg_resources-style namespaces. See here for details:
https://packaging.python.org/guides/packaging-namespace-packages/.
Note: Accepted namespace package decls are valid Python syntax in all Python versions,
so this code can, e.g., detect namespace packages in Python 2 code while running on Python 3.
"""
import ast
def is_name(node: ast.AST, name: str) -> bool:
return isinstance(node, ast.Name) and node.id == name
def is_call_to(node: ast.AST, func_name: str) -> bool:
if not isinstance(node, ast.Call):
return False
func = node.func
return (isinstance(func, ast.Attribute) and func.attr == func_name) or is_name(
func, func_name
)
def has_args(call_node: ast.Call, required_arg_ids: Tuple[str, ...]) -> bool:
args = call_node.args
if len(args) != len(required_arg_ids):
return False
actual_arg_ids = tuple(arg.id for arg in args if isinstance(arg, ast.Name))
return actual_arg_ids == required_arg_ids
try:
python_src_ast = ast.parse(python_src)
except SyntaxError:
# The namespace package incantations we check for are valid code in all Python versions.
# So if the code isn't parseable we know it isn't a valid namespace package.
return False
# Note that these checks are slightly heuristic. It is possible to construct adversarial code
# that would defeat them. But the only consequence would be an incorrect namespace_packages list
# in setup.py, and we're assuming our users aren't trying to shoot themselves in the foot.
for ast_node in ast.walk(python_src_ast):
# pkg_resources-style namespace, e.g.,
# __import__('pkg_resources').declare_namespace(__name__).
if is_call_to(ast_node, "declare_namespace") and has_args(
cast(ast.Call, ast_node), ("__name__",)
):
return True
return False
def rules():
return [
*python_sources_rules(),
*collect_rules(),
UnionRule(PackageFieldSet, PythonDistributionFieldSet),
]
| apache-2.0 |
azumimuo/family-xbmc-addon | plugin.video.bubbles/resources/lib/externals/hachoir/hachoir_parser/image/bmp.py | 1 | 6874 | """
Microsoft Bitmap picture parser.
- file extension: ".bmp"
Author: Victor Stinner
Creation: 16 december 2005
"""
from resources.lib.externals.hachoir.hachoir_parser import Parser
from resources.lib.externals.hachoir.hachoir_core.field import (FieldSet,
UInt8, UInt16, UInt32, Bits,
String, RawBytes, Enum,
PaddingBytes, NullBytes, createPaddingField)
from resources.lib.externals.hachoir.hachoir_core.endian import LITTLE_ENDIAN
from resources.lib.externals.hachoir.hachoir_core.text_handler import textHandler, hexadecimal
from resources.lib.externals.hachoir.hachoir_parser.image.common import RGB, PaletteRGBA
from resources.lib.externals.hachoir.hachoir_core.tools import alignValue
class Pixel4bit(Bits):
static_size = 4
def __init__(self, parent, name):
Bits.__init__(self, parent, name, 4)
class ImageLine(FieldSet):
def __init__(self, parent, name, width, pixel_class):
FieldSet.__init__(self, parent, name)
self._pixel = pixel_class
self._width = width
self._size = alignValue(self._width * self._pixel.static_size, 32)
def createFields(self):
for x in xrange(self._width):
yield self._pixel(self, "pixel[]")
size = self.size - self.current_size
if size:
yield createPaddingField(self, size)
class ImagePixels(FieldSet):
def __init__(self, parent, name, width, height, pixel_class, size=None):
FieldSet.__init__(self, parent, name, size=size)
self._width = width
self._height = height
self._pixel = pixel_class
def createFields(self):
for y in xrange(self._height-1, -1, -1):
yield ImageLine(self, "line[%u]" % y, self._width, self._pixel)
size = (self.size - self.current_size) // 8
if size:
yield NullBytes(self, "padding", size)
class CIEXYZ(FieldSet):
def createFields(self):
yield UInt32(self, "x")
yield UInt32(self, "y")
yield UInt32(self, "z")
class BmpHeader(FieldSet):
color_space_name = {
1: "Business (Saturation)",
2: "Graphics (Relative)",
4: "Images (Perceptual)",
8: "Absolute colormetric (Absolute)",
}
def getFormatVersion(self):
if "gamma_blue" in self:
return 4
if "important_color" in self:
return 3
return 2
def createFields(self):
# Version 2 (12 bytes)
yield UInt32(self, "header_size", "Header size")
yield UInt32(self, "width", "Width (pixels)")
yield UInt32(self, "height", "Height (pixels)")
yield UInt16(self, "nb_plan", "Number of plan (=1)")
yield UInt16(self, "bpp", "Bits per pixel") # may be zero for PNG/JPEG picture
# Version 3 (40 bytes)
if self["header_size"].value < 40:
return
yield Enum(UInt32(self, "compression", "Compression method"), BmpFile.COMPRESSION_NAME)
yield UInt32(self, "image_size", "Image size (bytes)")
yield UInt32(self, "horizontal_dpi", "Horizontal DPI")
yield UInt32(self, "vertical_dpi", "Vertical DPI")
yield UInt32(self, "used_colors", "Number of color used")
yield UInt32(self, "important_color", "Number of import colors")
# Version 4 (108 bytes)
if self["header_size"].value < 108:
return
yield textHandler(UInt32(self, "red_mask"), hexadecimal)
yield textHandler(UInt32(self, "green_mask"), hexadecimal)
yield textHandler(UInt32(self, "blue_mask"), hexadecimal)
yield textHandler(UInt32(self, "alpha_mask"), hexadecimal)
yield Enum(UInt32(self, "color_space"), self.color_space_name)
yield CIEXYZ(self, "red_primary")
yield CIEXYZ(self, "green_primary")
yield CIEXYZ(self, "blue_primary")
yield UInt32(self, "gamma_red")
yield UInt32(self, "gamma_green")
yield UInt32(self, "gamma_blue")
def parseImageData(parent, name, size, header):
if ("compression" not in header) or (header["compression"].value in (0, 3)):
width = header["width"].value
height = header["height"].value
bpp = header["bpp"].value
if bpp == 32:
cls = UInt32
elif bpp == 24:
cls = RGB
elif bpp == 8:
cls = UInt8
elif bpp == 4:
cls = Pixel4bit
else:
cls = None
if cls:
return ImagePixels(parent, name, width, height, cls, size=size*8)
return RawBytes(parent, name, size)
class BmpFile(Parser):
PARSER_TAGS = {
"id": "bmp",
"category": "image",
"file_ext": ("bmp",),
"mime": (u"image/x-ms-bmp", u"image/x-bmp"),
"min_size": 30*8,
# "magic": (("BM", 0),),
"magic_regex": ((
# "BM", <filesize>, <reserved>, header_size=(12|40|108)
"BM.{4}.{8}[\x0C\x28\x6C]\0{3}",
0),),
"description": "Microsoft bitmap (BMP) picture"
}
endian = LITTLE_ENDIAN
COMPRESSION_NAME = {
0: u"Uncompressed",
1: u"RLE 8-bit",
2: u"RLE 4-bit",
3: u"Bitfields",
4: u"JPEG",
5: u"PNG",
}
def validate(self):
if self.stream.readBytes(0, 2) != 'BM':
return "Wrong file signature"
if self["header/header_size"].value not in (12, 40, 108):
return "Unknown header size (%s)" % self["header_size"].value
if self["header/nb_plan"].value != 1:
return "Invalid number of planes"
return True
def createFields(self):
yield String(self, "signature", 2, "Header (\"BM\")", charset="ASCII")
yield UInt32(self, "file_size", "File size (bytes)")
yield PaddingBytes(self, "reserved", 4, "Reserved")
yield UInt32(self, "data_start", "Data start position")
yield BmpHeader(self, "header")
# Compute number of color
header = self["header"]
bpp = header["bpp"].value
if 0 < bpp <= 8:
if "used_colors" in header and header["used_colors"].value:
nb_color = header["used_colors"].value
else:
nb_color = (1 << bpp)
else:
nb_color = 0
# Color palette (if any)
if nb_color:
yield PaletteRGBA(self, "palette", nb_color)
# Seek to data start
field = self.seekByte(self["data_start"].value)
if field:
yield field
# Image pixels
size = min(self["file_size"].value-self["data_start"].value, (self.size - self.current_size)//8)
yield parseImageData(self, "pixels", size, header)
def createDescription(self):
return u"Microsoft Bitmap version %s" % self["header"].getFormatVersion()
def createContentSize(self):
return self["file_size"].value * 8
| gpl-2.0 |
arun6582/django | tests/messages_tests/test_session.py | 91 | 1743 | from django.contrib.messages import constants
from django.contrib.messages.storage.base import Message
from django.contrib.messages.storage.session import SessionStorage
from django.test import TestCase
from django.utils.safestring import SafeData, mark_safe
from .base import BaseTests
def set_session_data(storage, messages):
"""
Sets the messages into the backend request's session and remove the
backend's loaded data cache.
"""
storage.request.session[storage.session_key] = storage.serialize_messages(messages)
if hasattr(storage, '_loaded_data'):
del storage._loaded_data
def stored_session_messages_count(storage):
data = storage.deserialize_messages(storage.request.session.get(storage.session_key, []))
return len(data)
class SessionTests(BaseTests, TestCase):
storage_class = SessionStorage
def get_request(self):
self.session = {}
request = super().get_request()
request.session = self.session
return request
def stored_messages_count(self, storage, response):
return stored_session_messages_count(storage)
def test_get(self):
storage = self.storage_class(self.get_request())
example_messages = ['test', 'me']
set_session_data(storage, example_messages)
self.assertEqual(list(storage), example_messages)
def test_safedata(self):
"""
A message containing SafeData keeps its safe status when retrieved from
the message storage.
"""
storage = self.get_storage()
message = Message(constants.DEBUG, mark_safe("<b>Hello Django!</b>"))
set_session_data(storage, [message])
self.assertIsInstance(list(storage)[0].message, SafeData)
| bsd-3-clause |
Fireblend/chromium-crosswalk | chrome/test/mini_installer/uninstall_chrome.py | 123 | 2753 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Uninstalls Chrome.
This script reads the uninstall command from registry, calls it, and verifies
the output status code.
"""
import _winreg
import optparse
import subprocess
import sys
def main():
parser = optparse.OptionParser(description='Uninstall Chrome.')
parser.add_option('--system-level', action='store_true', dest='system_level',
default=False, help='Uninstall Chrome at system level.')
parser.add_option('--chrome-long-name', default='Google Chrome',
help='Google Chrome or Chromium)')
parser.add_option('--interactive', action='store_true', dest='interactive',
default=False, help='Ask before uninstalling Chrome.')
parser.add_option('--no-error-if-absent', action='store_true',
dest='no_error_if_absent', default=False,
help='No error if the registry key for uninstalling Chrome '
'is absent.')
options, _ = parser.parse_args()
# TODO(sukolsak): Add support for uninstalling MSI-based Chrome installs when
# we support testing MSIs.
if options.system_level:
root_key = _winreg.HKEY_LOCAL_MACHINE
else:
root_key = _winreg.HKEY_CURRENT_USER
sub_key = ('SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\%s' %
options.chrome_long_name)
# Query the key. It will throw a WindowsError if the key doesn't exist.
try:
key = _winreg.OpenKey(root_key, sub_key, 0, _winreg.KEY_QUERY_VALUE)
except WindowsError:
if options.no_error_if_absent:
return 0
raise KeyError('Registry key %s\\%s is missing' % (
'HKEY_LOCAL_MACHINE' if options.system_level else 'HKEY_CURRENT_USER',
sub_key))
if options.interactive:
prompt = ('Warning: This will uninstall %s at %s. Do you want to continue? '
'(y/N) ' % (options.chrome_long_name,
'system-level' if
options.system_level else 'user-level'))
if raw_input(prompt).strip() != 'y':
print >> sys.stderr, 'User aborted'
return 1
uninstall_string, _ = _winreg.QueryValueEx(key, 'UninstallString')
exit_status = subprocess.call(uninstall_string + ' --force-uninstall',
shell=True)
# The exit status for successful uninstallation of Chrome is 19 (see
# chrome/installer/util/util_constants.h).
if exit_status != 19:
raise Exception('Could not uninstall Chrome. The installer exited with '
'status %d.' % exit_status)
return 0
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause |
urbn/kombu | t/unit/utils/test_functional.py | 1 | 9030 | from __future__ import absolute_import, unicode_literals
import pickle
import pytest
from itertools import count
from case import Mock, mock, skip
from kombu.five import (
items, PY3,
)
from kombu.utils import functional as utils
from kombu.utils.functional import (
ChannelPromise, LRUCache, fxrange, fxrangemax, memoize, lazy,
maybe_evaluate, maybe_list, reprcall, reprkwargs, retry_over_time,
accepts_argument,
)
class test_ChannelPromise:
def test_repr(self):
obj = Mock(name='cb')
assert 'promise' in repr(ChannelPromise(obj))
obj.assert_not_called()
class test_shufflecycle:
def test_shuffles(self):
prev_repeat, utils.repeat = utils.repeat, Mock()
try:
utils.repeat.return_value = list(range(10))
values = {'A', 'B', 'C'}
cycle = utils.shufflecycle(values)
seen = set()
for i in range(10):
next(cycle)
utils.repeat.assert_called_with(None)
assert seen.issubset(values)
with pytest.raises(StopIteration):
next(cycle)
next(cycle)
finally:
utils.repeat = prev_repeat
def double(x):
return x * 2
class test_LRUCache:
def test_expires(self):
limit = 100
x = LRUCache(limit=limit)
slots = list(range(limit * 2))
for i in slots:
x[i] = i
assert list(x.keys()) == list(slots[limit:])
assert x.items()
assert x.values()
def test_is_pickleable(self):
x = LRUCache(limit=10)
x.update(luke=1, leia=2)
y = pickle.loads(pickle.dumps(x))
assert y.limit == y.limit
assert y == x
def test_update_expires(self):
limit = 100
x = LRUCache(limit=limit)
slots = list(range(limit * 2))
for i in slots:
x.update({i: i})
assert list(x.keys()) == list(slots[limit:])
def test_least_recently_used(self):
x = LRUCache(3)
x[1], x[2], x[3] = 1, 2, 3
assert list(x.keys()), [1, 2 == 3]
x[4], x[5] = 4, 5
assert list(x.keys()), [3, 4 == 5]
# access 3, which makes it the last used key.
x[3]
x[6] = 6
assert list(x.keys()), [5, 3 == 6]
x[7] = 7
assert list(x.keys()), [3, 6 == 7]
def test_update_larger_than_cache_size(self):
x = LRUCache(2)
x.update({x: x for x in range(100)})
assert list(x.keys()), [98 == 99]
def test_items(self):
c = LRUCache()
c.update(a=1, b=2, c=3)
assert list(items(c))
def test_incr(self):
c = LRUCache()
c.update(a='1')
c.incr('a')
assert c['a'] == '2'
def test_memoize():
counter = count(1)
@memoize(maxsize=2)
def x(i):
return next(counter)
assert x(1) == 1
assert x(1) == 1
assert x(2) == 2
assert x(3) == 3
assert x(1) == 4
x.clear()
assert x(3) == 5
class test_lazy:
def test__str__(self):
assert (str(lazy(lambda: 'the quick brown fox')) ==
'the quick brown fox')
def test__repr__(self):
assert repr(lazy(lambda: 'fi fa fo')).strip('u') == "'fi fa fo'"
@skip.if_python3()
def test__cmp__(self):
assert lazy(lambda: 10).__cmp__(lazy(lambda: 20)) == -1
assert lazy(lambda: 10).__cmp__(5) == 1
def test_evaluate(self):
assert lazy(lambda: 2 + 2)() == 4
assert lazy(lambda x: x * 4, 2) == 8
assert lazy(lambda x: x * 8, 2)() == 16
def test_cmp(self):
assert lazy(lambda: 10) == lazy(lambda: 10)
assert lazy(lambda: 10) != lazy(lambda: 20)
def test__reduce__(self):
x = lazy(double, 4)
y = pickle.loads(pickle.dumps(x))
assert x() == y()
def test__deepcopy__(self):
from copy import deepcopy
x = lazy(double, 4)
y = deepcopy(x)
assert x._fun == y._fun
assert x._args == y._args
assert x() == y()
@pytest.mark.parametrize('obj,expected', [
(lazy(lambda: 10), 10),
(20, 20),
])
def test_maybe_evaluate(obj, expected):
assert maybe_evaluate(obj) == expected
class test_retry_over_time:
class Predicate(Exception):
pass
def setup(self):
self.index = 0
def myfun(self):
if self.index < 9:
raise self.Predicate()
return 42
def errback(self, exc, intervals, retries):
interval = next(intervals)
sleepvals = (None, 2.0, 4.0, 6.0, 8.0, 10.0, 12.0, 14.0, 16.0, 16.0)
self.index += 1
assert interval == sleepvals[self.index]
return interval
@mock.sleepdeprived(module=utils)
def test_simple(self):
prev_count, utils.count = utils.count, Mock()
try:
utils.count.return_value = list(range(1))
x = retry_over_time(self.myfun, self.Predicate,
errback=None, interval_max=14)
assert x is None
utils.count.return_value = list(range(10))
cb = Mock()
x = retry_over_time(self.myfun, self.Predicate,
errback=self.errback, callback=cb,
interval_max=14)
assert x == 42
assert self.index == 9
cb.assert_called_with()
finally:
utils.count = prev_count
def test_retry_timeout(self):
with pytest.raises(self.Predicate):
retry_over_time(
self.myfun, self.Predicate,
errback=self.errback, interval_max=14, timeout=1
)
assert self.index == 1
# no errback
with pytest.raises(self.Predicate):
retry_over_time(
self.myfun, self.Predicate,
errback=None, timeout=1,
)
@mock.sleepdeprived(module=utils)
def test_retry_zero(self):
with pytest.raises(self.Predicate):
retry_over_time(
self.myfun, self.Predicate,
max_retries=0, errback=self.errback, interval_max=14,
)
assert self.index == 0
# no errback
with pytest.raises(self.Predicate):
retry_over_time(
self.myfun, self.Predicate,
max_retries=0, errback=None, interval_max=14,
)
@mock.sleepdeprived(module=utils)
def test_retry_once(self):
with pytest.raises(self.Predicate):
retry_over_time(
self.myfun, self.Predicate,
max_retries=1, errback=self.errback, interval_max=14,
)
assert self.index == 1
# no errback
with pytest.raises(self.Predicate):
retry_over_time(
self.myfun, self.Predicate,
max_retries=1, errback=None, interval_max=14,
)
@mock.sleepdeprived(module=utils)
def test_retry_always(self):
Predicate = self.Predicate
class Fun(object):
def __init__(self):
self.calls = 0
def __call__(self, *args, **kwargs):
try:
if self.calls >= 10:
return 42
raise Predicate()
finally:
self.calls += 1
fun = Fun()
assert retry_over_time(
fun, self.Predicate,
max_retries=None, errback=None, interval_max=14) == 42
assert fun.calls == 11
@pytest.mark.parametrize('obj,expected', [
(None, None),
(1, [1]),
([1, 2, 3], [1, 2, 3]),
])
def test_maybe_list(obj, expected):
assert maybe_list(obj) == expected
def test_fxrange__no_repeatlast():
assert list(fxrange(1.0, 3.0, 1.0)) == [1.0, 2.0, 3.0]
@pytest.mark.parametrize('args,expected', [
((1.0, 3.0, 1.0, 30.0),
[1.0, 2.0, 3.0, 3.0, 3.0, 3.0,
3.0, 3.0, 3.0, 3.0, 3.0]),
((1.0, None, 1.0, 30.0),
[1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0]),
])
def test_fxrangemax(args, expected):
assert list(fxrangemax(*args)) == expected
def test_reprkwargs():
assert reprkwargs({'foo': 'bar', 1: 2, 'k': 'v'})
def test_reprcall():
assert reprcall('add', (2, 2), {'copy': True})
class test_accepts_arg:
def function(self, foo, bar, baz="baz"):
pass
def test_valid_argument(self):
assert accepts_argument(self.function, 'self')
assert accepts_argument(self.function, 'foo')
assert accepts_argument(self.function, 'baz')
def test_invalid_argument(self):
assert not accepts_argument(self.function, 'random_argument')
if PY3:
assert not accepts_argument(test_accepts_arg, 'foo')
def test_raise_exception(self):
with pytest.raises(Exception):
accepts_argument(None, 'foo')
if not PY3:
with pytest.raises(Exception):
accepts_argument(test_accepts_arg, 'foo')
| bsd-3-clause |
denisenkom/django | django/core/management/commands/makemigrations.py | 5 | 3802 | import sys
import os
from optparse import make_option
from django.core.management.base import BaseCommand
from django.core.exceptions import ImproperlyConfigured
from django.db import connections, DEFAULT_DB_ALIAS
from django.db.migrations.loader import MigrationLoader
from django.db.migrations.autodetector import MigrationAutodetector, InteractiveMigrationQuestioner
from django.db.migrations.state import ProjectState
from django.db.migrations.writer import MigrationWriter
from django.db.models.loading import cache
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--empty', action='store_true', dest='empty', default=False,
help='Make a blank migration.'),
)
help = "Creates new migration(s) for apps."
usage_str = "Usage: ./manage.py makemigrations [--empty] [app [app ...]]"
def handle(self, *app_labels, **options):
self.verbosity = int(options.get('verbosity'))
self.interactive = options.get('interactive')
# Make sure the app they asked for exists
app_labels = set(app_labels)
bad_app_labels = set()
for app_label in app_labels:
try:
cache.get_app(app_label)
except ImproperlyConfigured:
bad_app_labels.add(app_label)
if bad_app_labels:
for app_label in bad_app_labels:
self.stderr.write("App '%s' could not be found. Is it in INSTALLED_APPS?" % app_label)
sys.exit(2)
# Load the current graph state. Takes a connection, but it's not used
# (makemigrations doesn't look at the database state).
loader = MigrationLoader(connections[DEFAULT_DB_ALIAS])
# Detect changes
autodetector = MigrationAutodetector(
loader.graph.project_state(),
ProjectState.from_app_cache(cache),
InteractiveMigrationQuestioner(specified_apps=app_labels),
)
changes = autodetector.changes(graph=loader.graph, trim_to_apps=app_labels or None)
# No changes? Tell them.
if not changes and self.verbosity >= 1:
if len(app_labels) == 1:
self.stdout.write("No changes detected in app '%s'" % app_labels.pop())
elif len(app_labels) > 1:
self.stdout.write("No changes detected in apps '%s'" % ("', '".join(app_labels)))
else:
self.stdout.write("No changes detected")
return
directory_created = {}
for app_label, migrations in changes.items():
if self.verbosity >= 1:
self.stdout.write(self.style.MIGRATE_HEADING("Migrations for '%s':" % app_label) + "\n")
for migration in migrations:
# Describe the migration
writer = MigrationWriter(migration)
if self.verbosity >= 1:
self.stdout.write(" %s:\n" % (self.style.MIGRATE_LABEL(writer.filename),))
for operation in migration.operations:
self.stdout.write(" - %s\n" % operation.describe())
# Write it
migrations_directory = os.path.dirname(writer.path)
if not directory_created.get(app_label, False):
if not os.path.isdir(migrations_directory):
os.mkdir(migrations_directory)
init_path = os.path.join(migrations_directory, "__init__.py")
if not os.path.isfile(init_path):
open(init_path, "w").close()
# We just do this once per app
directory_created[app_label] = True
with open(writer.path, "wb") as fh:
fh.write(writer.as_string())
| bsd-3-clause |
tchernomax/ansible | lib/ansible/modules/cloud/amazon/aws_glue_connection.py | 27 | 12285 | #!/usr/bin/python
# Copyright: (c) 2018, Rob White (@wimnat)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: aws_glue_connection
short_description: Manage an AWS Glue connection
description:
- Manage an AWS Glue connection. See U(https://aws.amazon.com/glue/) for details.
version_added: "2.6"
requirements: [ boto3 ]
author: "Rob White (@wimnat)"
options:
catalog_id:
description:
- The ID of the Data Catalog in which to create the connection. If none is supplied,
the AWS account ID is used by default.
required: false
connection_properties:
description:
- A dict of key-value pairs used as parameters for this connection.
required: true
connection_type:
description:
- The type of the connection. Currently, only JDBC is supported; SFTP is not supported.
required: false
default: JDBC
choices: [ 'JDBC', 'SFTP' ]
description:
description:
- The description of the connection.
required: false
match_criteria:
description:
- A list of UTF-8 strings that specify the criteria that you can use in selecting this connection.
required: false
name:
description:
- The name of the connection.
required: true
security_groups:
description:
- A list of security groups to be used by the connection. Use either security group name or ID.
required: false
state:
description:
- Create or delete the AWS Glue connection.
required: true
choices: [ 'present', 'absent' ]
subnet_id:
description:
- The subnet ID used by the connection.
required: false
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Create an AWS Glue connection
- aws_glue_connection:
name: my-glue-connection
connection_properties:
JDBC_CONNECTION_URL: jdbc:mysql://mydb:3306/databasename
USERNAME: my-username
PASSWORD: my-password
state: present
# Delete an AWS Glue connection
- aws_glue_connection:
name: my-glue-connection
state: absent
'''
RETURN = '''
connection_properties:
description: A dict of key-value pairs used as parameters for this connection.
returned: when state is present
type: dict
sample: {'JDBC_CONNECTION_URL':'jdbc:mysql://mydb:3306/databasename','USERNAME':'x','PASSWORD':'y'}
connection_type:
description: The type of the connection.
returned: when state is present
type: string
sample: JDBC
creation_time:
description: The time this connection definition was created.
returned: when state is present
type: string
sample: "2018-04-21T05:19:58.326000+00:00"
description:
description: Description of the job being defined.
returned: when state is present
type: string
sample: My first Glue job
last_updated_time:
description: The last time this connection definition was updated.
returned: when state is present
type: string
sample: "2018-04-21T05:19:58.326000+00:00"
match_criteria:
description: A list of criteria that can be used in selecting this connection.
returned: when state is present
type: list
sample: []
name:
description: The name of the connection definition.
returned: when state is present
type: string
sample: my-glue-connection
physical_connection_requirements:
description: A dict of physical connection requirements, such as VPC and SecurityGroup,
needed for making this connection successfully.
returned: when state is present
type: dict
sample: {'subnet-id':'subnet-aabbccddee'}
'''
from ansible.module_utils.aws.core import AnsibleAWSModule
from ansible.module_utils.ec2 import camel_dict_to_snake_dict, get_ec2_security_group_ids_from_names
# Non-ansible imports
import copy
import time
try:
from botocore.exceptions import BotoCoreError, ClientError
except ImportError:
pass
def _get_glue_connection(connection, module):
"""
Get an AWS Glue connection based on name. If not found, return None.
:param connection: AWS boto3 glue connection
:param module: Ansible module
:return: boto3 Glue connection dict or None if not found
"""
connection_name = module.params.get("name")
connection_catalog_id = module.params.get("catalog_id")
params = {'Name': connection_name}
if connection_catalog_id is not None:
params['CatalogId'] = connection_catalog_id
try:
return connection.get_connection(**params)['Connection']
except (BotoCoreError, ClientError) as e:
if e.response['Error']['Code'] == 'EntityNotFoundException':
return None
else:
raise e
def _compare_glue_connection_params(user_params, current_params):
"""
Compare Glue connection params. If there is a difference, return True immediately else return False
:param user_params: the Glue connection parameters passed by the user
:param current_params: the Glue connection parameters currently configured
:return: True if any parameter is mismatched else False
"""
# Weirdly, boto3 doesn't return some keys if the value is empty e.g. Description
# To counter this, add the key if it's missing with a blank value
if 'Description' not in current_params:
current_params['Description'] = ""
if 'MatchCriteria' not in current_params:
current_params['MatchCriteria'] = list()
if 'PhysicalConnectionRequirements' not in current_params:
current_params['PhysicalConnectionRequirements'] = dict()
current_params['PhysicalConnectionRequirements']['SecurityGroupIdList'] = []
current_params['PhysicalConnectionRequirements']['SubnetId'] = ""
if 'ConnectionProperties' in user_params['ConnectionInput'] and user_params['ConnectionInput']['ConnectionProperties'] \
!= current_params['ConnectionProperties']:
return True
if 'ConnectionType' in user_params['ConnectionInput'] and user_params['ConnectionInput']['ConnectionType'] \
!= current_params['ConnectionType']:
return True
if 'Description' in user_params['ConnectionInput'] and user_params['ConnectionInput']['Description'] != current_params['Description']:
return True
if 'MatchCriteria' in user_params['ConnectionInput'] and set(user_params['ConnectionInput']['MatchCriteria']) != set(current_params['MatchCriteria']):
return True
if 'PhysicalConnectionRequirements' in user_params['ConnectionInput']:
if 'SecurityGroupIdList' in user_params['ConnectionInput']['PhysicalConnectionRequirements'] and \
set(user_params['ConnectionInput']['PhysicalConnectionRequirements']['SecurityGroupIdList']) \
!= set(current_params['PhysicalConnectionRequirements']['SecurityGroupIdList']):
return True
if 'SubnetId' in user_params['ConnectionInput']['PhysicalConnectionRequirements'] and \
user_params['ConnectionInput']['PhysicalConnectionRequirements']['SubnetId'] \
!= current_params['PhysicalConnectionRequirements']['SubnetId']:
return True
return False
def create_or_update_glue_connection(connection, connection_ec2, module, glue_connection):
"""
Create or update an AWS Glue connection
:param connection: AWS boto3 glue connection
:param module: Ansible module
:param glue_connection: a dict of AWS Glue connection parameters or None
:return:
"""
changed = False
params = dict()
params['ConnectionInput'] = dict()
params['ConnectionInput']['Name'] = module.params.get("name")
params['ConnectionInput']['ConnectionType'] = module.params.get("connection_type")
params['ConnectionInput']['ConnectionProperties'] = module.params.get("connection_properties")
if module.params.get("catalog_id") is not None:
params['CatalogId'] = module.params.get("catalog_id")
if module.params.get("description") is not None:
params['ConnectionInput']['Description'] = module.params.get("description")
if module.params.get("match_criteria") is not None:
params['ConnectionInput']['MatchCriteria'] = module.params.get("match_criteria")
if module.params.get("security_groups") is not None or module.params.get("subnet_id") is not None:
params['ConnectionInput']['PhysicalConnectionRequirements'] = dict()
if module.params.get("security_groups") is not None:
# Get security group IDs from names
security_group_ids = get_ec2_security_group_ids_from_names(module.params.get('security_groups'), connection_ec2, boto3=True)
params['ConnectionInput']['PhysicalConnectionRequirements']['SecurityGroupIdList'] = security_group_ids
if module.params.get("subnet_id") is not None:
params['ConnectionInput']['PhysicalConnectionRequirements']['SubnetId'] = module.params.get("subnet_id")
# If glue_connection is not None then check if it needs to be modified, else create it
if glue_connection:
if _compare_glue_connection_params(params, glue_connection):
try:
# We need to slightly modify the params for an update
update_params = copy.deepcopy(params)
update_params['Name'] = update_params['ConnectionInput']['Name']
connection.update_connection(**update_params)
changed = True
except (BotoCoreError, ClientError) as e:
module.fail_json_aws(e)
else:
try:
connection.create_connection(**params)
changed = True
except (BotoCoreError, ClientError) as e:
module.fail_json_aws(e)
# If changed, get the Glue connection again
if changed:
glue_connection = None
for i in range(10):
glue_connection = _get_glue_connection(connection, module)
if glue_connection is not None:
break
time.sleep(10)
module.exit_json(changed=changed, **camel_dict_to_snake_dict(glue_connection))
def delete_glue_connection(connection, module, glue_connection):
"""
Delete an AWS Glue connection
:param connection: AWS boto3 glue connection
:param module: Ansible module
:param glue_connection: a dict of AWS Glue connection parameters or None
:return:
"""
changed = False
params = {'ConnectionName': module.params.get("name")}
if module.params.get("catalog_id") is not None:
params['CatalogId'] = module.params.get("catalog_id")
if glue_connection:
try:
connection.delete_connection(**params)
changed = True
except (BotoCoreError, ClientError) as e:
module.fail_json_aws(e)
module.exit_json(changed=changed)
def main():
argument_spec = (
dict(
catalog_id=dict(type='str'),
connection_properties=dict(type='dict'),
connection_type=dict(type='str', default='JDBC', choices=['JDBC', 'SFTP']),
description=dict(type='str'),
match_criteria=dict(type='list'),
name=dict(required=True, type='str'),
security_groups=dict(type='list'),
state=dict(required=True, choices=['present', 'absent'], type='str'),
subnet_id=dict(type='str')
)
)
module = AnsibleAWSModule(argument_spec=argument_spec,
required_if=[
('state', 'present', ['connection_properties'])
]
)
connection_glue = module.client('glue')
connection_ec2 = module.client('ec2')
glue_connection = _get_glue_connection(connection_glue, module)
if module.params.get("state") == 'present':
create_or_update_glue_connection(connection_glue, connection_ec2, module, glue_connection)
else:
delete_glue_connection(connection_glue, module, glue_connection)
if __name__ == '__main__':
main()
| gpl-3.0 |
joebowen/LogMyRocket_API | LogMyRocket/libraries/sys_packages/jmespath/functions.py | 21 | 13008 | import math
import json
import weakref
from jmespath import exceptions
from jmespath.compat import string_type as STRING_TYPE
from jmespath.compat import get_methods
# python types -> jmespath types
TYPES_MAP = {
'bool': 'boolean',
'list': 'array',
'dict': 'object',
'NoneType': 'null',
'unicode': 'string',
'str': 'string',
'float': 'number',
'int': 'number',
'OrderedDict': 'object',
'_Projection': 'array',
'_Expression': 'expref',
}
# jmespath types -> python types
REVERSE_TYPES_MAP = {
'boolean': ('bool',),
'array': ('list', '_Projection'),
'object': ('dict', 'OrderedDict',),
'null': ('None',),
'string': ('unicode', 'str'),
'number': ('float', 'int'),
'expref': ('_Expression',),
}
def populate_function_table(cls):
func_table = cls.FUNCTION_TABLE
for name, method in get_methods(cls):
signature = getattr(method, 'signature', None)
if signature is not None:
func_table[name[6:]] = {"function": method,
"signature": signature}
return cls
def builtin_function(*arguments):
def _record_arity(func):
func.signature = arguments
return func
return _record_arity
@populate_function_table
class RuntimeFunctions(object):
# The built in functions are automatically populated in the FUNCTION_TABLE
# using the @builtin_function decorator on methods defined in this class.
FUNCTION_TABLE = {
}
def __init__(self):
self._interpreter = None
@property
def interpreter(self):
if self._interpreter is None:
return None
else:
return self._interpreter()
@interpreter.setter
def interpreter(self, value):
# A weakref is used because we have
# a cyclic reference and we want to allow
# for the memory to be properly freed when
# the objects are no longer needed.
self._interpreter = weakref.ref(value)
def call_function(self, function_name, resolved_args):
try:
spec = self.FUNCTION_TABLE[function_name]
except KeyError:
raise exceptions.UnknownFunctionError(
"Unknown function: %s()" % function_name)
function = spec['function']
signature = spec['signature']
self._validate_arguments(resolved_args, signature, function_name)
return function(self, *resolved_args)
def _validate_arguments(self, args, signature, function_name):
if signature and signature[-1].get('variadic'):
if len(args) < len(signature):
raise exceptions.VariadictArityError(
len(signature), len(args), function_name)
elif len(args) != len(signature):
raise exceptions.ArityError(
len(signature), len(args), function_name)
return self._type_check(args, signature, function_name)
def _type_check(self, actual, signature, function_name):
for i in range(len(signature)):
allowed_types = signature[i]['types']
if allowed_types:
self._type_check_single(actual[i], allowed_types,
function_name)
def _type_check_single(self, current, types, function_name):
# Type checking involves checking the top level type,
# and in the case of arrays, potentially checking the types
# of each element.
allowed_types, allowed_subtypes = self._get_allowed_pytypes(types)
# We're not using isinstance() on purpose.
# The type model for jmespath does not map
# 1-1 with python types (booleans are considered
# integers in python for example).
actual_typename = type(current).__name__
if actual_typename not in allowed_types:
raise exceptions.JMESPathTypeError(
function_name, current,
self._convert_to_jmespath_type(actual_typename), types)
# If we're dealing with a list type, we can have
# additional restrictions on the type of the list
# elements (for example a function can require a
# list of numbers or a list of strings).
# Arrays are the only types that can have subtypes.
if allowed_subtypes:
self._subtype_check(current, allowed_subtypes,
types, function_name)
def _get_allowed_pytypes(self, types):
allowed_types = []
allowed_subtypes = []
for t in types:
type_ = t.split('-', 1)
if len(type_) == 2:
type_, subtype = type_
allowed_subtypes.append(REVERSE_TYPES_MAP[subtype])
else:
type_ = type_[0]
allowed_types.extend(REVERSE_TYPES_MAP[type_])
return allowed_types, allowed_subtypes
def _subtype_check(self, current, allowed_subtypes, types, function_name):
if len(allowed_subtypes) == 1:
# The easy case, we know up front what type
# we need to validate.
allowed_subtypes = allowed_subtypes[0]
for element in current:
actual_typename = type(element).__name__
if actual_typename not in allowed_subtypes:
raise exceptions.JMESPathTypeError(
function_name, element, actual_typename, types)
elif len(allowed_subtypes) > 1 and current:
# Dynamic type validation. Based on the first
# type we see, we validate that the remaining types
# match.
first = type(current[0]).__name__
for subtypes in allowed_subtypes:
if first in subtypes:
allowed = subtypes
break
else:
raise exceptions.JMESPathTypeError(
function_name, current[0], first, types)
for element in current:
actual_typename = type(element).__name__
if actual_typename not in allowed:
raise exceptions.JMESPathTypeError(
function_name, element, actual_typename, types)
@builtin_function({'types': ['number']})
def _func_abs(self, arg):
return abs(arg)
@builtin_function({'types': ['array-number']})
def _func_avg(self, arg):
return sum(arg) / float(len(arg))
@builtin_function({'types': [], 'variadic': True})
def _func_not_null(self, *arguments):
for argument in arguments:
if argument is not None:
return argument
@builtin_function({'types': []})
def _func_to_array(self, arg):
if isinstance(arg, list):
return arg
else:
return [arg]
@builtin_function({'types': []})
def _func_to_string(self, arg):
if isinstance(arg, STRING_TYPE):
return arg
else:
return json.dumps(arg, separators=(',', ':'),
default=str)
@builtin_function({'types': []})
def _func_to_number(self, arg):
if isinstance(arg, (list, dict, bool)):
return None
elif arg is None:
return None
elif isinstance(arg, (int, float)):
return arg
else:
try:
if '.' in arg:
return float(arg)
else:
return int(arg)
except ValueError:
return None
@builtin_function({'types': ['array', 'string']}, {'types': []})
def _func_contains(self, subject, search):
return search in subject
@builtin_function({'types': ['string', 'array', 'object']})
def _func_length(self, arg):
return len(arg)
@builtin_function({'types': ['string']}, {'types': ['string']})
def _func_ends_with(self, search, suffix):
return search.endswith(suffix)
@builtin_function({'types': ['string']}, {'types': ['string']})
def _func_starts_with(self, search, suffix):
return search.startswith(suffix)
@builtin_function({'types': ['array', 'string']})
def _func_reverse(self, arg):
if isinstance(arg, STRING_TYPE):
return arg[::-1]
else:
return list(reversed(arg))
@builtin_function({"types": ['number']})
def _func_ceil(self, arg):
return math.ceil(arg)
@builtin_function({"types": ['number']})
def _func_floor(self, arg):
return math.floor(arg)
@builtin_function({"types": ['string']}, {"types": ['array-string']})
def _func_join(self, separator, array):
return separator.join(array)
@builtin_function({'types': ['expref']}, {'types': ['array']})
def _func_map(self, expref, arg):
result = []
for element in arg:
result.append(self.interpreter.visit(expref.expression, element))
return result
@builtin_function({"types": ['array-number', 'array-string']})
def _func_max(self, arg):
if arg:
return max(arg)
else:
return None
@builtin_function({"types": ["object"], "variadic": True})
def _func_merge(self, *arguments):
merged = {}
for arg in arguments:
merged.update(arg)
return merged
@builtin_function({"types": ['array-number', 'array-string']})
def _func_min(self, arg):
if arg:
return min(arg)
else:
return None
@builtin_function({"types": ['array-string', 'array-number']})
def _func_sort(self, arg):
return list(sorted(arg))
@builtin_function({"types": ['array-number']})
def _func_sum(self, arg):
return sum(arg)
@builtin_function({"types": ['object']})
def _func_keys(self, arg):
# To be consistent with .values()
# should we also return the indices of a list?
return list(arg.keys())
@builtin_function({"types": ['object']})
def _func_values(self, arg):
return list(arg.values())
@builtin_function({'types': []})
def _func_type(self, arg):
if isinstance(arg, STRING_TYPE):
return "string"
elif isinstance(arg, bool):
return "boolean"
elif isinstance(arg, list):
return "array"
elif isinstance(arg, dict):
return "object"
elif isinstance(arg, (float, int)):
return "number"
elif arg is None:
return "null"
@builtin_function({'types': ['array']}, {'types': ['expref']})
def _func_sort_by(self, array, expref):
if not array:
return array
# sort_by allows for the expref to be either a number of
# a string, so we have some special logic to handle this.
# We evaluate the first array element and verify that it's
# either a string of a number. We then create a key function
# that validates that type, which requires that remaining array
# elements resolve to the same type as the first element.
required_type = self._convert_to_jmespath_type(
type(self.interpreter.visit(expref.expression, array[0])).__name__)
if required_type not in ['number', 'string']:
raise exceptions.JMESPathTypeError(
'sort_by', array[0], required_type, ['string', 'number'])
keyfunc = self._create_key_func(expref.expression,
[required_type],
'sort_by')
return list(sorted(array, key=keyfunc))
@builtin_function({'types': ['array']}, {'types': ['expref']})
def _func_min_by(self, array, expref):
keyfunc = self._create_key_func(expref.expression,
['number', 'string'],
'min_by')
return min(array, key=keyfunc)
@builtin_function({'types': ['array']}, {'types': ['expref']})
def _func_max_by(self, array, expref):
keyfunc = self._create_key_func(expref.expression,
['number', 'string'],
'min_by')
return max(array, key=keyfunc)
def _create_key_func(self, expr_node, allowed_types, function_name):
interpreter = self.interpreter
def keyfunc(x):
result = interpreter.visit(expr_node, x)
actual_typename = type(result).__name__
jmespath_type = self._convert_to_jmespath_type(actual_typename)
# allowed_types is in term of jmespath types, not python types.
if jmespath_type not in allowed_types:
raise exceptions.JMESPathTypeError(
function_name, result, jmespath_type, allowed_types)
return result
return keyfunc
def _convert_to_jmespath_type(self, pyobject):
return TYPES_MAP.get(pyobject, 'unknown')
| gpl-3.0 |
salguarnieri/intellij-community | python/helpers/epydoc/util.py | 91 | 10077 | # epydoc -- Utility functions
#
# Copyright (C) 2005 Edward Loper
# Author: Edward Loper <edloper@loper.org>
# URL: <http://epydoc.sf.net>
#
# $Id: util.py 1671 2008-01-29 02:55:49Z edloper $
"""
Miscellaneous utility functions that are used by multiple modules.
@group Python source types: is_module_file, is_package_dir, is_pyname,
py_src_filename
@group Text processing: wordwrap, decode_with_backslashreplace,
plaintext_to_html
"""
__docformat__ = 'epytext en'
import os, os.path, re
######################################################################
## Python Source Types
######################################################################
PY_SRC_EXTENSIONS = ['.py', '.pyw']
PY_BIN_EXTENSIONS = ['.pyc', '.so', '.pyd']
def is_module_file(path):
# Make sure it's a file name.
if not isinstance(path, basestring):
return False
(dir, filename) = os.path.split(path)
(basename, extension) = os.path.splitext(filename)
return (os.path.isfile(path) and
re.match('[a-zA-Z_]\w*$', basename) and
extension in PY_SRC_EXTENSIONS+PY_BIN_EXTENSIONS)
def is_src_filename(filename):
if not isinstance(filename, basestring): return False
if not os.path.exists(filename): return False
return os.path.splitext(filename)[1] in PY_SRC_EXTENSIONS
def is_package_dir(dirname):
"""
Return true if the given directory is a valid package directory
(i.e., it names a directory that contains a valid __init__ file,
and its name is a valid identifier).
"""
# Make sure it's a directory name.
if not isinstance(dirname, basestring):
return False
if not os.path.isdir(dirname):
return False
dirname = os.path.abspath(dirname)
# Make sure it's a valid identifier. (Special case for
# "foo/", where os.path.split -> ("foo", "").)
(parent, dir) = os.path.split(dirname)
if dir == '': (parent, dir) = os.path.split(parent)
# The following constraint was removed because of sourceforge
# bug #1787028 -- in some cases (eg eggs), it's too strict.
#if not re.match('\w+$', dir):
# return False
for name in os.listdir(dirname):
filename = os.path.join(dirname, name)
if name.startswith('__init__.') and is_module_file(filename):
return True
else:
return False
def is_pyname(name):
return re.match(r"\w+(\.\w+)*$", name)
def py_src_filename(filename):
basefile, extension = os.path.splitext(filename)
if extension in PY_SRC_EXTENSIONS:
return filename
else:
for ext in PY_SRC_EXTENSIONS:
if os.path.isfile('%s%s' % (basefile, ext)):
return '%s%s' % (basefile, ext)
else:
raise ValueError('Could not find a corresponding '
'Python source file for %r.' % filename)
def munge_script_name(filename):
name = os.path.split(filename)[1]
name = re.sub(r'\W', '_', name)
return 'script-'+name
######################################################################
## Text Processing
######################################################################
def decode_with_backslashreplace(s):
r"""
Convert the given 8-bit string into unicode, treating any
character c such that ord(c)<128 as an ascii character, and
converting any c such that ord(c)>128 into a backslashed escape
sequence.
>>> decode_with_backslashreplace('abc\xff\xe8')
u'abc\\xff\\xe8'
"""
# s.encode('string-escape') is not appropriate here, since it
# also adds backslashes to some ascii chars (eg \ and ').
assert isinstance(s, str)
return (s
.decode('latin1')
.encode('ascii', 'backslashreplace')
.decode('ascii'))
def wordwrap(str, indent=0, right=75, startindex=0, splitchars=''):
"""
Word-wrap the given string. I.e., add newlines to the string such
that any lines that are longer than C{right} are broken into
shorter lines (at the first whitespace sequence that occurs before
index C{right}). If the given string contains newlines, they will
I{not} be removed. Any lines that begin with whitespace will not
be wordwrapped.
@param indent: If specified, then indent each line by this number
of spaces.
@type indent: C{int}
@param right: The right margin for word wrapping. Lines that are
longer than C{right} will be broken at the first whitespace
sequence before the right margin.
@type right: C{int}
@param startindex: If specified, then assume that the first line
is already preceeded by C{startindex} characters.
@type startindex: C{int}
@param splitchars: A list of non-whitespace characters which can
be used to split a line. (E.g., use '/\\' to allow path names
to be split over multiple lines.)
@rtype: C{str}
"""
if splitchars:
chunks = re.split(r'( +|\n|[^ \n%s]*[%s])' %
(re.escape(splitchars), re.escape(splitchars)),
str.expandtabs())
else:
chunks = re.split(r'( +|\n)', str.expandtabs())
result = [' '*(indent-startindex)]
charindex = max(indent, startindex)
for chunknum, chunk in enumerate(chunks):
if (charindex+len(chunk) > right and charindex > 0) or chunk == '\n':
result.append('\n' + ' '*indent)
charindex = indent
if chunk[:1] not in ('\n', ' '):
result.append(chunk)
charindex += len(chunk)
else:
result.append(chunk)
charindex += len(chunk)
return ''.join(result).rstrip()+'\n'
def plaintext_to_html(s):
"""
@return: An HTML string that encodes the given plaintext string.
In particular, special characters (such as C{'<'} and C{'&'})
are escaped.
@rtype: C{string}
"""
s = s.replace('&', '&').replace('"', '"')
s = s.replace('<', '<').replace('>', '>')
return s
def plaintext_to_latex(str, nbsp=0, breakany=0):
"""
@return: A LaTeX string that encodes the given plaintext string.
In particular, special characters (such as C{'$'} and C{'_'})
are escaped, and tabs are expanded.
@rtype: C{string}
@param breakany: Insert hyphenation marks, so that LaTeX can
break the resulting string at any point. This is useful for
small boxes (e.g., the type box in the variable list table).
@param nbsp: Replace every space with a non-breaking space
(C{'~'}).
"""
# These get converted to hyphenation points later
if breakany: str = re.sub('(.)', '\\1\1', str)
# These get converted to \textbackslash later.
str = str.replace('\\', '\0')
# Expand tabs
str = str.expandtabs()
# These elements need to be backslashed.
str = re.sub(r'([#$&%_\${}])', r'\\\1', str)
# These elements have special names.
str = str.replace('|', '{\\textbar}')
str = str.replace('<', '{\\textless}')
str = str.replace('>', '{\\textgreater}')
str = str.replace('^', '{\\textasciicircum}')
str = str.replace('~', '{\\textasciitilde}')
str = str.replace('\0', r'{\textbackslash}')
# replace spaces with non-breaking spaces
if nbsp: str = str.replace(' ', '~')
# Convert \1's to hyphenation points.
if breakany: str = str.replace('\1', r'\-')
return str
class RunSubprocessError(OSError):
def __init__(self, cmd, out, err):
OSError.__init__(self, '%s failed' % cmd[0])
self.out = out
self.err = err
def run_subprocess(cmd, data=None):
"""
Execute the command C{cmd} in a subprocess.
@param cmd: The command to execute, specified as a list
of string.
@param data: A string containing data to send to the
subprocess.
@return: A tuple C{(out, err)}.
@raise OSError: If there is any problem executing the
command, or if its exitval is not 0.
"""
if isinstance(cmd, basestring):
cmd = cmd.split()
# Under Python 2.4+, use subprocess
try:
from subprocess import Popen, PIPE
pipe = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
out, err = pipe.communicate(data)
if hasattr(pipe, 'returncode'):
if pipe.returncode == 0:
return out, err
else:
raise RunSubprocessError(cmd, out, err)
else:
# Assume that there was an error iff anything was written
# to the child's stderr.
if err == '':
return out, err
else:
raise RunSubprocessError(cmd, out, err)
except ImportError:
pass
# Under Python 2.3 or earlier, on unix, use popen2.Popen3 so we
# can access the return value.
import popen2
if hasattr(popen2, 'Popen3'):
pipe = popen2.Popen3(' '.join(cmd), True)
to_child = pipe.tochild
from_child = pipe.fromchild
child_err = pipe.childerr
if data:
to_child.write(data)
to_child.close()
out = err = ''
while pipe.poll() is None:
out += from_child.read()
err += child_err.read()
out += from_child.read()
err += child_err.read()
if pipe.wait() == 0:
return out, err
else:
raise RunSubprocessError(cmd, out, err)
# Under Python 2.3 or earlier, on non-unix, use os.popen3
else:
to_child, from_child, child_err = os.popen3(' '.join(cmd), 'b')
if data:
try:
to_child.write(data)
# Guard for a broken pipe error
except IOError, e:
raise OSError(e)
to_child.close()
out = from_child.read()
err = child_err.read()
# Assume that there was an error iff anything was written
# to the child's stderr.
if err == '':
return out, err
else:
raise RunSubprocessError(cmd, out, err)
| apache-2.0 |
EnviroCentre/jython-upgrade | jython/lib/test/test_seq_jy.py | 12 | 2043 | """Additional seq_tests
Made for Jython.
"""
import unittest
from collections import deque
from test import test_support
class SeqTestCase(unittest.TestCase):
types2test = list, tuple, deque
def test_seq_item_equality(self):
eq_called = []
class Foo(object):
def __eq__(self, other):
eq_called.append(other)
return False
for type2test in self.types2test:
foo = Foo()
seq1 = type2test([foo])
self.assertEqual(seq1, seq1)
self.assertEqual(cmp(seq1, seq1), 0)
seq2 = type2test([foo])
self.assertEqual(seq1, seq2)
self.assertEqual(cmp(seq1, seq2), 0)
self.assertTrue(foo in seq1)
self.assertFalse(eq_called)
def test_seq_equality(self):
class Foo(object):
def __eq__(self, other):
return True
foo = [Foo()]
for type2test in self.types2test:
self.assertTrue(type2test() in foo)
def test_seq_subclass_equality(self):
# Various combinations of PyObject._eq, overriden Object.equals,
# and cmp implementations
for type2test in self.types2test:
class Foo(type2test):
def __eq__(self, other):
return False
l = type2test(['bar', 'baz'])
foo = Foo(l)
self.assertEqual(cmp(l, foo), 1)
self.assertEqual(cmp(foo, foo), 0)
seqs1 = type2test([l, foo])
seqs2 = type2test([l, foo])
self.assertEqual(seqs1, seqs1)
self.assertEqual(seqs1, seqs2)
self.assertEqual(cmp(seqs1, seqs2), 0)
self.assertTrue(foo in seqs1)
if hasattr(seqs1, 'count'):
self.assertTrue(seqs1.count(foo), 1)
if hasattr(seqs1, 'index'):
self.assertEqual(seqs1.index(foo), 1)
def test_main():
test_support.run_unittest(SeqTestCase)
if __name__ == "__main__":
test_main()
| mit |
msullivan/advent-of-code | 2020/17a.py | 1 | 1655 | #!/usr/bin/env python3
import copy
from collections import defaultdict
import sys
import re
def extract(s):
return [int(x) for x in re.findall(r'-?\d+', s)]
def first(grid, x, y, dx, dy):
while True:
x += dx
y += dy
if x < 0 or x >= len(grid[0]) or y < 0 or y >= len(grid):
return ''
if grid[y][x] in ('L', '#'):
return grid[y][x]
nbrs = [(x, y, z) for x in range(-1, 2) for y in range(-1, 2) for z in range(-1, 2) if not x == y == z == 0]
def add(v1, v2):
return tuple(x + y for x, y in zip(v1, v2))
def step(grid):
ngrid = copy.deepcopy(grid)
# ngrid = [x[:] for x in grid]
change = False
for pos in list(grid):
for dx in nbrs + [(0, 0, 0)]:
npos = add(dx, pos)
cnt = 0
for d in nbrs:
if grid[add(npos, d)] == "#":
cnt += 1
print(cnt)
if grid[npos] == '#' and not (cnt == 2 or cnt == 3):
ngrid[npos] = '.'
change = True
elif grid[npos] == '.' and cnt == 3:
ngrid[npos] = '#'
change = True
return ngrid, change
def main(args):
# data = [x.split('\n') for x in sys.stdin.read().split('\n\n')]
data = [list(s.strip()) for s in sys.stdin]
grid = defaultdict(lambda: ".")
for y in range(len(data)):
for x in range(len(data[0])):
grid[x,y,0] = data[y][x]
for i in range(6):
print(i, grid)
grid, _ = step(grid)
print(len([x for x in grid.values() if x == '#']))
if __name__ == '__main__':
sys.exit(main(sys.argv))
| mit |
wimberosa/samba | lib/dnspython/dns/rdataset.py | 91 | 11548 | # Copyright (C) 2001-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS rdatasets (an rdataset is a set of rdatas of a given type and class)"""
import random
import StringIO
import struct
import dns.exception
import dns.rdatatype
import dns.rdataclass
import dns.rdata
import dns.set
# define SimpleSet here for backwards compatibility
SimpleSet = dns.set.Set
class DifferingCovers(dns.exception.DNSException):
"""Raised if an attempt is made to add a SIG/RRSIG whose covered type
is not the same as that of the other rdatas in the rdataset."""
pass
class IncompatibleTypes(dns.exception.DNSException):
"""Raised if an attempt is made to add rdata of an incompatible type."""
pass
class Rdataset(dns.set.Set):
"""A DNS rdataset.
@ivar rdclass: The class of the rdataset
@type rdclass: int
@ivar rdtype: The type of the rdataset
@type rdtype: int
@ivar covers: The covered type. Usually this value is
dns.rdatatype.NONE, but if the rdtype is dns.rdatatype.SIG or
dns.rdatatype.RRSIG, then the covers value will be the rdata
type the SIG/RRSIG covers. The library treats the SIG and RRSIG
types as if they were a family of
types, e.g. RRSIG(A), RRSIG(NS), RRSIG(SOA). This makes RRSIGs much
easier to work with than if RRSIGs covering different rdata
types were aggregated into a single RRSIG rdataset.
@type covers: int
@ivar ttl: The DNS TTL (Time To Live) value
@type ttl: int
"""
__slots__ = ['rdclass', 'rdtype', 'covers', 'ttl']
def __init__(self, rdclass, rdtype, covers=dns.rdatatype.NONE):
"""Create a new rdataset of the specified class and type.
@see: the description of the class instance variables for the
meaning of I{rdclass} and I{rdtype}"""
super(Rdataset, self).__init__()
self.rdclass = rdclass
self.rdtype = rdtype
self.covers = covers
self.ttl = 0
def _clone(self):
obj = super(Rdataset, self)._clone()
obj.rdclass = self.rdclass
obj.rdtype = self.rdtype
obj.covers = self.covers
obj.ttl = self.ttl
return obj
def update_ttl(self, ttl):
"""Set the TTL of the rdataset to be the lesser of the set's current
TTL or the specified TTL. If the set contains no rdatas, set the TTL
to the specified TTL.
@param ttl: The TTL
@type ttl: int"""
if len(self) == 0:
self.ttl = ttl
elif ttl < self.ttl:
self.ttl = ttl
def add(self, rd, ttl=None):
"""Add the specified rdata to the rdataset.
If the optional I{ttl} parameter is supplied, then
self.update_ttl(ttl) will be called prior to adding the rdata.
@param rd: The rdata
@type rd: dns.rdata.Rdata object
@param ttl: The TTL
@type ttl: int"""
#
# If we're adding a signature, do some special handling to
# check that the signature covers the same type as the
# other rdatas in this rdataset. If this is the first rdata
# in the set, initialize the covers field.
#
if self.rdclass != rd.rdclass or self.rdtype != rd.rdtype:
raise IncompatibleTypes
if not ttl is None:
self.update_ttl(ttl)
if self.rdtype == dns.rdatatype.RRSIG or \
self.rdtype == dns.rdatatype.SIG:
covers = rd.covers()
if len(self) == 0 and self.covers == dns.rdatatype.NONE:
self.covers = covers
elif self.covers != covers:
raise DifferingCovers
if dns.rdatatype.is_singleton(rd.rdtype) and len(self) > 0:
self.clear()
super(Rdataset, self).add(rd)
def union_update(self, other):
self.update_ttl(other.ttl)
super(Rdataset, self).union_update(other)
def intersection_update(self, other):
self.update_ttl(other.ttl)
super(Rdataset, self).intersection_update(other)
def update(self, other):
"""Add all rdatas in other to self.
@param other: The rdataset from which to update
@type other: dns.rdataset.Rdataset object"""
self.update_ttl(other.ttl)
super(Rdataset, self).update(other)
def __repr__(self):
if self.covers == 0:
ctext = ''
else:
ctext = '(' + dns.rdatatype.to_text(self.covers) + ')'
return '<DNS ' + dns.rdataclass.to_text(self.rdclass) + ' ' + \
dns.rdatatype.to_text(self.rdtype) + ctext + ' rdataset>'
def __str__(self):
return self.to_text()
def __eq__(self, other):
"""Two rdatasets are equal if they have the same class, type, and
covers, and contain the same rdata.
@rtype: bool"""
if not isinstance(other, Rdataset):
return False
if self.rdclass != other.rdclass or \
self.rdtype != other.rdtype or \
self.covers != other.covers:
return False
return super(Rdataset, self).__eq__(other)
def __ne__(self, other):
return not self.__eq__(other)
def to_text(self, name=None, origin=None, relativize=True,
override_rdclass=None, **kw):
"""Convert the rdataset into DNS master file format.
@see: L{dns.name.Name.choose_relativity} for more information
on how I{origin} and I{relativize} determine the way names
are emitted.
Any additional keyword arguments are passed on to the rdata
to_text() method.
@param name: If name is not None, emit a RRs with I{name} as
the owner name.
@type name: dns.name.Name object
@param origin: The origin for relative names, or None.
@type origin: dns.name.Name object
@param relativize: True if names should names be relativized
@type relativize: bool"""
if not name is None:
name = name.choose_relativity(origin, relativize)
ntext = str(name)
pad = ' '
else:
ntext = ''
pad = ''
s = StringIO.StringIO()
if not override_rdclass is None:
rdclass = override_rdclass
else:
rdclass = self.rdclass
if len(self) == 0:
#
# Empty rdatasets are used for the question section, and in
# some dynamic updates, so we don't need to print out the TTL
# (which is meaningless anyway).
#
print >> s, '%s%s%s %s' % (ntext, pad,
dns.rdataclass.to_text(rdclass),
dns.rdatatype.to_text(self.rdtype))
else:
for rd in self:
print >> s, '%s%s%d %s %s %s' % \
(ntext, pad, self.ttl, dns.rdataclass.to_text(rdclass),
dns.rdatatype.to_text(self.rdtype),
rd.to_text(origin=origin, relativize=relativize, **kw))
#
# We strip off the final \n for the caller's convenience in printing
#
return s.getvalue()[:-1]
def to_wire(self, name, file, compress=None, origin=None,
override_rdclass=None, want_shuffle=True):
"""Convert the rdataset to wire format.
@param name: The owner name of the RRset that will be emitted
@type name: dns.name.Name object
@param file: The file to which the wire format data will be appended
@type file: file
@param compress: The compression table to use; the default is None.
@type compress: dict
@param origin: The origin to be appended to any relative names when
they are emitted. The default is None.
@returns: the number of records emitted
@rtype: int
"""
if not override_rdclass is None:
rdclass = override_rdclass
want_shuffle = False
else:
rdclass = self.rdclass
file.seek(0, 2)
if len(self) == 0:
name.to_wire(file, compress, origin)
stuff = struct.pack("!HHIH", self.rdtype, rdclass, 0, 0)
file.write(stuff)
return 1
else:
if want_shuffle:
l = list(self)
random.shuffle(l)
else:
l = self
for rd in l:
name.to_wire(file, compress, origin)
stuff = struct.pack("!HHIH", self.rdtype, rdclass,
self.ttl, 0)
file.write(stuff)
start = file.tell()
rd.to_wire(file, compress, origin)
end = file.tell()
assert end - start < 65536
file.seek(start - 2)
stuff = struct.pack("!H", end - start)
file.write(stuff)
file.seek(0, 2)
return len(self)
def match(self, rdclass, rdtype, covers):
"""Returns True if this rdataset matches the specified class, type,
and covers"""
if self.rdclass == rdclass and \
self.rdtype == rdtype and \
self.covers == covers:
return True
return False
def from_text_list(rdclass, rdtype, ttl, text_rdatas):
"""Create an rdataset with the specified class, type, and TTL, and with
the specified list of rdatas in text format.
@rtype: dns.rdataset.Rdataset object
"""
if isinstance(rdclass, (str, unicode)):
rdclass = dns.rdataclass.from_text(rdclass)
if isinstance(rdtype, (str, unicode)):
rdtype = dns.rdatatype.from_text(rdtype)
r = Rdataset(rdclass, rdtype)
r.update_ttl(ttl)
for t in text_rdatas:
rd = dns.rdata.from_text(r.rdclass, r.rdtype, t)
r.add(rd)
return r
def from_text(rdclass, rdtype, ttl, *text_rdatas):
"""Create an rdataset with the specified class, type, and TTL, and with
the specified rdatas in text format.
@rtype: dns.rdataset.Rdataset object
"""
return from_text_list(rdclass, rdtype, ttl, text_rdatas)
def from_rdata_list(ttl, rdatas):
"""Create an rdataset with the specified TTL, and with
the specified list of rdata objects.
@rtype: dns.rdataset.Rdataset object
"""
if len(rdatas) == 0:
raise ValueError("rdata list must not be empty")
r = None
for rd in rdatas:
if r is None:
r = Rdataset(rd.rdclass, rd.rdtype)
r.update_ttl(ttl)
first_time = False
r.add(rd)
return r
def from_rdata(ttl, *rdatas):
"""Create an rdataset with the specified TTL, and with
the specified rdata objects.
@rtype: dns.rdataset.Rdataset object
"""
return from_rdata_list(ttl, rdatas)
| gpl-3.0 |
asamerh4/mesos | support/push-commits.py | 1 | 4982 | #!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This script is typically used by Mesos committers to push a locally applied
review chain to ASF git repo and mark the reviews as submitted on ASF
ReviewBoard.
Example Usage:
> git checkout master
> git pull origin
> ./support/apply-reviews.py -c -r 1234
> ./support/push-commits.py
"""
# TODO(vinod): Also post the commit message to the corresponding ASF JIRA
# tickets and resolve them if necessary.
import argparse
import os
import re
import sys
from subprocess import check_output
REVIEWBOARD_URL = 'https://reviews.apache.org'
def get_reviews(revision_range):
"""Return the list of reviews found in the commits in the revision range."""
reviews = [] # List of (review id, commit log) tuples
rev_list = check_output(['git',
'rev-list',
'--reverse',
revision_range]).strip().split('\n')
for rev in rev_list:
commit_log = check_output(['git',
'--no-pager',
'show',
'--no-color',
'--no-patch',
rev]).strip()
pos = commit_log.find('Review: ')
if pos != -1:
pattern = re.compile('Review: ({url})$'.format(
url=os.path.join(REVIEWBOARD_URL, 'r', '[0-9]+')))
match = pattern.search(commit_log.strip().strip('/'))
if match is None:
print "\nInvalid ReviewBoard URL: '{}'".format(commit_log[pos:])
sys.exit(1)
url = match.group(1)
reviews.append((os.path.basename(url), commit_log))
return reviews
def close_reviews(reviews, options):
"""Mark the given reviews as submitted on ReviewBoard."""
for review_id, commit_log in reviews:
print 'Closing review', review_id
if not options['dry_run']:
check_output(['rbt',
'close',
'--description',
commit_log,
review_id])
def parse_options():
"""Return a dictionary of options parsed from command line arguments."""
parser = argparse.ArgumentParser()
parser.add_argument('-n',
'--dry-run',
action='store_true',
help='Perform a dry run.')
args = parser.parse_args()
options = {}
options['dry_run'] = args.dry_run
return options
def main():
"""Main function to push the commits in this branch as review requests."""
options = parse_options()
current_branch_ref = check_output(['git', 'symbolic-ref', 'HEAD']).strip()
current_branch = current_branch_ref.replace('refs/heads/', '', 1)
if current_branch != 'master':
print 'Please run this script from master branch'
sys.exit(1)
remote_tracking_branch = check_output(['git',
'rev-parse',
'--abbrev-ref',
'master@{upstream}']).strip()
merge_base = check_output([
'git',
'merge-base',
remote_tracking_branch,
'master']).strip()
if merge_base == current_branch_ref:
print 'No new commits found to push'
sys.exit(1)
reviews = get_reviews(merge_base + ".." + current_branch_ref)
# Push the current branch to remote master.
remote = check_output(['git',
'config',
'--get',
'branch.master.remote']).strip()
print 'Pushing commits to', remote
if options['dry_run']:
check_output(['git',
'push',
'--dry-run',
remote,
'master:master'])
else:
check_output(['git',
'push',
remote,
'master:master'])
# Now mark the reviews as submitted.
close_reviews(reviews, options)
if __name__ == '__main__':
main()
| apache-2.0 |
davisein/jitsudone | django/django/conf/locale/sv/formats.py | 86 | 1390 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j F Y'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = 'j F Y H:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j F'
SHORT_DATE_FORMAT = 'Y-m-d'
SHORT_DATETIME_FORMAT = 'Y-m-d H:i'
FIRST_DAY_OF_WEEK = 1
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%Y-%m-%d', # '2006-10-25'
'%m/%d/%Y', # '10/25/2006'
'%m/%d/%y', # '10/25/06'
)
TIME_INPUT_FORMATS = (
'%H:%M:%S', # '14:30:59'
'%H:%M', # '14:30'
)
DATETIME_INPUT_FORMATS = (
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%m/%d/%Y %H:%M:%S', # '10/25/2006 14:30:59'
'%m/%d/%Y %H:%M', # '10/25/2006 14:30'
'%m/%d/%Y', # '10/25/2006'
'%m/%d/%y %H:%M:%S', # '10/25/06 14:30:59'
'%m/%d/%y %H:%M', # '10/25/06 14:30'
'%m/%d/%y', # '10/25/06'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = u'\xa0' # non-breaking space
NUMBER_GROUPING = 3
| bsd-3-clause |
willowtreeapps/python-spreedly | api.py | 2 | 12100 | import httplib, urllib2, time, calendar
from datetime import datetime
from decimal import Decimal
from xml.etree.ElementTree import fromstring
from xml.etree import ElementTree as ET
from base64 import b64encode
API_VERSION = 'v4'
def utc_to_local(dt):
''' Converts utc datetime to local'''
secs = calendar.timegm(dt.timetuple())
return datetime(*time.localtime(secs)[:6])
def str_to_datetime(s):
''' Converts ISO 8601 string (2009-11-10T21:11Z) to LOCAL datetime'''
return utc_to_local(datetime.strptime(s, '%Y-%m-%dT%H:%M:%SZ'))
class Client:
def __init__(self, token, site_name):
self.auth = b64encode('%s:x' % token)
self.base_host = 'spreedly.com'
self.base_path = '/api/%s/%s' % (API_VERSION, site_name)
self.base_url = 'https://%s%s' % (self.base_host, self.base_path)
self.url = None
def get_response(self):
return self.response
def get_url(self):
return self.url
def set_url(self, url):
self.url = '%s/%s' % (self.base_url, url)
def query(self, data=None, put=False):
opener = urllib2.build_opener(urllib2.HTTPHandler)
req = urllib2.Request(url=self.get_url())
req.add_header('User-agent', 'python-spreedly 1.0')
req.add_header('Authorization', 'Basic %s' % self.auth)
# Convert to POST if we got some data
if data:
req.add_header('Content-Type', 'application/xml')
req.add_data(data)
if put:
req.get_method = lambda: 'PUT'
f = opener.open(req)
self.response = f.read()
def get_plans(self):
self.set_url('subscription_plans.xml')
self.query()
# Parse
result = []
tree = fromstring(self.get_response())
for plan in tree.getiterator('subscription-plan'):
data = {
'name': plan.findtext('name'),
'description': plan.findtext('description'),
'terms': plan.findtext('terms'),
'plan_type': plan.findtext('plan-type'),
'price': Decimal(plan.findtext('price')),
'enabled': True if plan.findtext('enabled') == 'true' else False,
'force_recurring': \
True if plan.findtext('force-recurring') == 'true' else False,
'force_renew': \
True if plan.findtext('needs-to-be-renewed') == 'true' else False,
'duration': int(plan.findtext('duration-quantity')),
'duration_units': plan.findtext('duration-units'),
'feature_level': plan.findtext('feature-level'),
'return_url': plan.findtext('return-url'),
'version': int(plan.findtext('version')) \
if plan.findtext('version') else 0,
'speedly_id': int(plan.findtext('id')),
'speedly_site_id': int(plan.findtext('site-id')) \
if plan.findtext('site-id') else 0,
'created_at': str_to_datetime(plan.findtext('created-at')),
'date_changed': str_to_datetime(plan.findtext('updated-at')),
}
result.append(data)
return result
def create_subscriber(self, customer_id, screen_name):
'''
Creates a subscription
'''
data = '''
<subscriber>
<customer-id>%d</customer-id>
<screen-name>%s</screen-name>
</subscriber>
''' % (customer_id, screen_name)
self.set_url('subscribers.xml')
self.query(data)
# Parse
result = []
tree = fromstring(self.get_response())
for plan in tree.getiterator('subscriber'):
data = {
'customer_id': int(plan.findtext('customer-id')),
'first_name': plan.findtext('billing-first-name'),
'last_name': plan.findtext('billing-last-name'),
'active': True if plan.findtext('active') == 'true' else False,
'gift': True if plan.findtext('on-gift') == 'true' else False,
'trial_active': \
True if plan.findtext('on-trial') == 'true' else False,
'trial_elegible': \
True if plan.findtext('eligible-for-free-trial') == 'true' \
else False,
'lifetime': \
True if plan.findtext('lifetime-subscription') == 'true' \
else False,
'recurring': \
True if plan.findtext('recurring') == 'true' \
else False,
'card_expires_before_next_auto_renew': \
True if plan.findtext('card-expires-before-next-auto-renew') == 'true' \
else False,
'token': plan.findtext('token'),
'name': plan.findtext('subscription-plan-name'),
'feature_level': plan.findtext('feature-level'),
'created_at': str_to_datetime(plan.findtext('created-at')),
'date_changed': str_to_datetime(plan.findtext('updated-at')),
'active_until': str_to_datetime(plan.findtext('active-until')) if plan.findtext('active-until') else None,
}
result.append(data)
return result[0]
def delete_subscriber(self, id):
if 'test' in self.base_path:
headers = {'Authorization': 'Basic %s' % self.auth}
conn = httplib.HTTPSConnection(self.base_host)
conn.request(
'DELETE', '%s/subscribers/%d.xml' % (self.base_path, id),
'',
headers
)
response = conn.getresponse()
return response.status
return
def subscribe(self, subscriber_id, plan_id, trial=False):
'''
Subscribe a user to some plan
'''
data = '<subscription_plan><id>%d</id></subscription_plan>' % plan_id
if trial:
self.set_url('subscribers/%d/subscribe_to_free_trial.xml' % subscriber_id)
self.query(data)
# Parse
result = []
tree = fromstring(self.get_response())
for plan in tree.getiterator('subscriber'):
data = {
'customer_id': int(plan.findtext('customer-id')),
'first_name': plan.findtext('billing-first-name'),
'last_name': plan.findtext('billing-last-name'),
'active': True if plan.findtext('active') == 'true' else False,
'gift': True if plan.findtext('on-gift') == 'true' else False,
'trial_active': \
True if plan.findtext('on-trial') == 'true' else False,
'trial_elegible': \
True if plan.findtext('eligible-for-free-trial') == 'true' \
else False,
'lifetime': \
True if plan.findtext('lifetime-subscription') == 'true' \
else False,
'recurring': \
True if plan.findtext('recurring') == 'true' \
else False,
'card_expires_before_next_auto_renew': \
True if plan.findtext('card-expires-before-next-auto-renew') == 'true' \
else False,
'token': plan.findtext('token'),
'name': plan.findtext('subscription-plan-name'),
'feature_level': plan.findtext('feature-level'),
'created_at': str_to_datetime(plan.findtext('created-at')),
'date_changed': str_to_datetime(plan.findtext('updated-at')),
'active_until': str_to_datetime(plan.findtext('active-until')) if plan.findtext('active-until') else None,
}
result.append(data)
return result[0]
def cleanup(self):
'''
Removes ALL subscribers. NEVER USE IN PRODUCTION!
'''
if 'test' in self.base_path:
headers = {'Authorization': 'Basic %s' % self.auth}
conn = httplib.HTTPSConnection(self.base_host)
conn.request(
'DELETE', '%s/subscribers.xml' % self.base_path,
'',
headers
)
response = conn.getresponse()
return response.status
return
def get_info(self, subscriber_id):
self.set_url('subscribers/%d.xml' % subscriber_id)
self.query('')
# Parse
result = []
tree = fromstring(self.get_response())
for plan in tree.getiterator('subscriber'):
data = {
'customer_id': int(plan.findtext('customer-id')),
'email': plan.findtext('email'),
'screen_name': plan.findtext('screen-name'),
'first_name': plan.findtext('billing-first-name'),
'last_name': plan.findtext('billing-last-name'),
'active': True if plan.findtext('active') == 'true' else False,
'gift': True if plan.findtext('on-gift') == 'true' else False,
'trial_active': \
True if plan.findtext('on-trial') == 'true' else False,
'trial_elegible': \
True if plan.findtext('eligible-for-free-trial') == 'true' \
else False,
'lifetime': \
True if plan.findtext('lifetime-subscription') == 'true' \
else False,
'recurring': \
True if plan.findtext('recurring') == 'true' \
else False,
'card_expires_before_next_auto_renew': \
True if plan.findtext('card-expires-before-next-auto-renew') == 'true' \
else False,
'token': plan.findtext('token'),
'name': plan.findtext('subscription-plan-name'),
'feature_level': plan.findtext('feature-level'),
'created_at': str_to_datetime(plan.findtext('created-at')),
'date_changed': str_to_datetime(plan.findtext('updated-at')),
'active_until': str_to_datetime(plan.findtext('active-until')) if plan.findtext('active-until') else None,
}
result.append(data)
return result[0]
def set_info(self, subscriber_id, **kw):
root = ET.Element('subscriber')
for key, value in kw.items():
e = ET.SubElement(root, key)
e.text = value
self.set_url('subscribers/%d.xml' % subscriber_id)
self.query(data=ET.tostring(root), put=True)
def create_complimentary_subscription(self, subscriber_id, duration, duration_units, feature_level):
data = """<complimentary_subscription>
<duration_quantity>%s</duration_quantity>
<duration_units>%s</duration_units>
<feature_level>%s</feature_level>
</complimentary_subscription>""" % (duration, duration_units, feature_level)
self.set_url('subscribers/%s/complimentary_subscriptions.xml' % subscriber_id)
self.query(data)
def complimentary_time_extensions(self, subscriber_id, duration, duration_units):
data = """<complimentary_time_extension>
<duration_quantity>%s</duration_quantity>
<duration_units>%s</duration_units>
</complimentary_time_extension>""" % (duration, duration_units)
self.set_url('subscribers/%s/complimentary_time_extensions.xml' % subscriber_id)
self.query(data)
def get_or_create_subscriber(self, subscriber_id, screen_name):
try:
return self.get_info(subscriber_id)
except urllib2.HTTPError, e:
if e.code == 404:
return self.create_subscriber(subscriber_id, screen_name)
| mit |
eckucukoglu/arm-linux-gnueabihf | lib/python2.7/distutils/tests/test_build_clib.py | 73 | 4970 | """Tests for distutils.command.build_clib."""
import unittest
import os
import sys
from test.test_support import run_unittest
from distutils.command.build_clib import build_clib
from distutils.errors import DistutilsSetupError
from distutils.tests import support
from distutils.spawn import find_executable
class BuildCLibTestCase(support.TempdirManager,
support.LoggingSilencer,
unittest.TestCase):
def test_check_library_dist(self):
pkg_dir, dist = self.create_dist()
cmd = build_clib(dist)
# 'libraries' option must be a list
self.assertRaises(DistutilsSetupError, cmd.check_library_list, 'foo')
# each element of 'libraries' must a 2-tuple
self.assertRaises(DistutilsSetupError, cmd.check_library_list,
['foo1', 'foo2'])
# first element of each tuple in 'libraries'
# must be a string (the library name)
self.assertRaises(DistutilsSetupError, cmd.check_library_list,
[(1, 'foo1'), ('name', 'foo2')])
# library name may not contain directory separators
self.assertRaises(DistutilsSetupError, cmd.check_library_list,
[('name', 'foo1'),
('another/name', 'foo2')])
# second element of each tuple must be a dictionary (build info)
self.assertRaises(DistutilsSetupError, cmd.check_library_list,
[('name', {}),
('another', 'foo2')])
# those work
libs = [('name', {}), ('name', {'ok': 'good'})]
cmd.check_library_list(libs)
def test_get_source_files(self):
pkg_dir, dist = self.create_dist()
cmd = build_clib(dist)
# "in 'libraries' option 'sources' must be present and must be
# a list of source filenames
cmd.libraries = [('name', {})]
self.assertRaises(DistutilsSetupError, cmd.get_source_files)
cmd.libraries = [('name', {'sources': 1})]
self.assertRaises(DistutilsSetupError, cmd.get_source_files)
cmd.libraries = [('name', {'sources': ['a', 'b']})]
self.assertEqual(cmd.get_source_files(), ['a', 'b'])
cmd.libraries = [('name', {'sources': ('a', 'b')})]
self.assertEqual(cmd.get_source_files(), ['a', 'b'])
cmd.libraries = [('name', {'sources': ('a', 'b')}),
('name2', {'sources': ['c', 'd']})]
self.assertEqual(cmd.get_source_files(), ['a', 'b', 'c', 'd'])
def test_build_libraries(self):
pkg_dir, dist = self.create_dist()
cmd = build_clib(dist)
class FakeCompiler:
def compile(*args, **kw):
pass
create_static_lib = compile
cmd.compiler = FakeCompiler()
# build_libraries is also doing a bit of typoe checking
lib = [('name', {'sources': 'notvalid'})]
self.assertRaises(DistutilsSetupError, cmd.build_libraries, lib)
lib = [('name', {'sources': list()})]
cmd.build_libraries(lib)
lib = [('name', {'sources': tuple()})]
cmd.build_libraries(lib)
def test_finalize_options(self):
pkg_dir, dist = self.create_dist()
cmd = build_clib(dist)
cmd.include_dirs = 'one-dir'
cmd.finalize_options()
self.assertEqual(cmd.include_dirs, ['one-dir'])
cmd.include_dirs = None
cmd.finalize_options()
self.assertEqual(cmd.include_dirs, [])
cmd.distribution.libraries = 'WONTWORK'
self.assertRaises(DistutilsSetupError, cmd.finalize_options)
def test_run(self):
# can't test on windows
if sys.platform == 'win32':
return
pkg_dir, dist = self.create_dist()
cmd = build_clib(dist)
foo_c = os.path.join(pkg_dir, 'foo.c')
self.write_file(foo_c, 'int main(void) { return 1;}\n')
cmd.libraries = [('foo', {'sources': [foo_c]})]
build_temp = os.path.join(pkg_dir, 'build')
os.mkdir(build_temp)
cmd.build_temp = build_temp
cmd.build_clib = build_temp
# before we run the command, we want to make sure
# all commands are present on the system
# by creating a compiler and checking its executables
from distutils.ccompiler import new_compiler
from distutils.sysconfig import customize_compiler
compiler = new_compiler()
customize_compiler(compiler)
for ccmd in compiler.executables.values():
if ccmd is None:
continue
if find_executable(ccmd[0]) is None:
return # can't test
# this should work
cmd.run()
# let's check the result
self.assertTrue('libfoo.a' in os.listdir(build_temp))
def test_suite():
return unittest.makeSuite(BuildCLibTestCase)
if __name__ == "__main__":
run_unittest(test_suite())
| gpl-2.0 |
indictranstech/omnitech-frappe | frappe/boot.py | 5 | 4919 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
"""
bootstrap client session
"""
import frappe
import frappe.defaults
import frappe.desk.desk_page
from frappe.utils import get_gravatar, get_url
from frappe.desk.form.load import get_meta_bundle
from frappe.utils.change_log import get_versions
def get_bootinfo():
"""build and return boot info"""
frappe.set_user_lang(frappe.session.user)
bootinfo = frappe._dict()
hooks = frappe.get_hooks()
doclist = []
# user
get_user(bootinfo)
# system info
bootinfo['sysdefaults'] = frappe.defaults.get_defaults()
bootinfo['server_date'] = frappe.utils.nowdate()
if frappe.session['user'] != 'Guest':
bootinfo['user_info'] = get_fullnames()
bootinfo['sid'] = frappe.session['sid'];
# home page
bootinfo.modules = {}
for app in frappe.get_installed_apps():
try:
bootinfo.modules.update(frappe.get_attr(app + ".config.desktop.get_data")() or {})
except ImportError:
pass
except AttributeError:
pass
bootinfo.module_app = frappe.local.module_app
bootinfo.hidden_modules = frappe.db.get_global("hidden_modules")
bootinfo.doctype_icons = dict(frappe.db.sql("""select name, icon from
tabDocType where ifnull(icon,'')!=''"""))
bootinfo.single_types = frappe.db.sql_list("""select name from tabDocType where ifnull(issingle,0)=1""")
add_home_page(bootinfo, doclist)
bootinfo.page_info = get_allowed_pages()
load_translations(bootinfo)
add_timezone_info(bootinfo)
load_conf_settings(bootinfo)
load_print(bootinfo, doclist)
doclist.extend(get_meta_bundle("Page"))
# ipinfo
if frappe.session['data'].get('ipinfo'):
bootinfo['ipinfo'] = frappe.session['data']['ipinfo']
# add docs
bootinfo['docs'] = doclist
for method in hooks.boot_session or []:
frappe.get_attr(method)(bootinfo)
if bootinfo.lang:
bootinfo.lang = unicode(bootinfo.lang)
bootinfo['versions'] = {k: v['version'] for k, v in get_versions().items()}
bootinfo.error_report_email = frappe.get_hooks("error_report_email")
bootinfo.calendars = sorted(frappe.get_hooks("calendars"))
return bootinfo
def load_conf_settings(bootinfo):
from frappe import conf
bootinfo.max_file_size = conf.get('max_file_size') or 5242880
for key in ['developer_mode']:
if key in conf: bootinfo[key] = conf.get(key)
def get_allowed_pages():
roles = frappe.get_roles()
page_info = {}
for p in frappe.db.sql("""select distinct
tabPage.name, tabPage.modified, tabPage.title
from `tabPage Role`, `tabPage`
where `tabPage Role`.role in (%s)
and `tabPage Role`.parent = `tabPage`.name""" % ', '.join(['%s']*len(roles)),
roles, as_dict=True):
page_info[p.name] = {"modified":p.modified, "title":p.title}
# pages where role is not set are also allowed
for p in frappe.db.sql("""select name, modified, title
from `tabPage` where
(select count(*) from `tabPage Role`
where `tabPage Role`.parent=tabPage.name) = 0""", as_dict=1):
page_info[p.name] = {"modified":p.modified, "title":p.title}
return page_info
def load_translations(bootinfo):
if frappe.local.lang != 'en':
messages = frappe.get_lang_dict("boot")
bootinfo["lang"] = frappe.lang
# load translated report names
for name in bootinfo.user.all_reports:
messages[name] = frappe._(name)
bootinfo["__messages"] = messages
def get_fullnames():
"""map of user fullnames"""
ret = frappe.db.sql("""select name,
concat(ifnull(first_name, ''),
if(ifnull(last_name, '')!='', ' ', ''), ifnull(last_name, '')) as fullname,
user_image as image, gender, email
from tabUser where ifnull(enabled, 0)=1 and user_type!="Website User" """, as_dict=1)
d = {}
for r in ret:
if not r.image:
r.image = get_gravatar()
d[r.name] = r
return d
def get_user(bootinfo):
"""get user info"""
bootinfo.user = frappe.get_user().load_user()
def add_home_page(bootinfo, docs):
"""load home page"""
if frappe.session.user=="Guest":
return
home_page = frappe.db.get_default("desktop:home_page")
try:
page = frappe.desk.desk_page.get(home_page)
except (frappe.DoesNotExistError, frappe.PermissionError):
frappe.message_log.pop()
page = frappe.desk.desk_page.get('desktop')
bootinfo['home_page'] = page.name
docs.append(page)
def add_timezone_info(bootinfo):
system = bootinfo.sysdefaults.get("time_zone")
import frappe.utils.momentjs
bootinfo.timezone_info = {"zones":{}, "rules":{}, "links":{}}
frappe.utils.momentjs.update(system, bootinfo.timezone_info)
def load_print(bootinfo, doclist):
print_settings = frappe.db.get_singles_dict("Print Settings")
print_settings.doctype = ":Print Settings"
doclist.append(print_settings)
load_print_css(bootinfo, print_settings)
def load_print_css(bootinfo, print_settings):
bootinfo.print_css = frappe.get_attr("frappe.templates.pages.print.get_print_style")(print_settings.print_style or "Modern", for_legacy=True)
| mit |
Kiiv/CouchPotatoServer | couchpotato/core/media/movie/providers/trailer/youtube_dl/extractor/khanacademy.py | 21 | 2666 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
unified_strdate,
)
class KhanAcademyIE(InfoExtractor):
_VALID_URL = r'^https?://(?:(?:www|api)\.)?khanacademy\.org/(?P<key>[^/]+)/(?:[^/]+/){,2}(?P<id>[^?#/]+)(?:$|[?#])'
IE_NAME = 'KhanAcademy'
_TESTS = [{
'url': 'http://www.khanacademy.org/video/one-time-pad',
'md5': '7021db7f2d47d4fff89b13177cb1e8f4',
'info_dict': {
'id': 'one-time-pad',
'ext': 'mp4',
'title': 'The one-time pad',
'description': 'The perfect cipher',
'duration': 176,
'uploader': 'Brit Cruise',
'upload_date': '20120411',
}
}, {
'url': 'https://www.khanacademy.org/math/applied-math/cryptography',
'info_dict': {
'id': 'cryptography',
'title': 'Journey into cryptography',
'description': 'How have humans protected their secret messages through history? What has changed today?',
},
'playlist_mincount': 3,
}]
def _real_extract(self, url):
m = re.match(self._VALID_URL, url)
video_id = m.group('id')
if m.group('key') == 'video':
data = self._download_json(
'http://api.khanacademy.org/api/v1/videos/' + video_id,
video_id, 'Downloading video info')
upload_date = unified_strdate(data['date_added'])
uploader = ', '.join(data['author_names'])
return {
'_type': 'url_transparent',
'url': data['url'],
'id': video_id,
'title': data['title'],
'thumbnail': data['image_url'],
'duration': data['duration'],
'description': data['description'],
'uploader': uploader,
'upload_date': upload_date,
}
else:
# topic
data = self._download_json(
'http://api.khanacademy.org/api/v1/topic/' + video_id,
video_id, 'Downloading topic info')
entries = [
{
'_type': 'url',
'url': c['url'],
'id': c['id'],
'title': c['title'],
}
for c in data['children'] if c['kind'] in ('Video', 'Topic')]
return {
'_type': 'playlist',
'id': video_id,
'title': data['title'],
'description': data['description'],
'entries': entries,
}
| gpl-3.0 |
bgris/ODL_bgris | lib/python3.5/site-packages/numpy/matlib.py | 161 | 9584 | from __future__ import division, absolute_import, print_function
import numpy as np
from numpy.matrixlib.defmatrix import matrix, asmatrix
# need * as we're copying the numpy namespace
from numpy import *
__version__ = np.__version__
__all__ = np.__all__[:] # copy numpy namespace
__all__ += ['rand', 'randn', 'repmat']
def empty(shape, dtype=None, order='C'):
"""Return a new matrix of given shape and type, without initializing entries.
Parameters
----------
shape : int or tuple of int
Shape of the empty matrix.
dtype : data-type, optional
Desired output data-type.
order : {'C', 'F'}, optional
Whether to store multi-dimensional data in row-major
(C-style) or column-major (Fortran-style) order in
memory.
See Also
--------
empty_like, zeros
Notes
-----
`empty`, unlike `zeros`, does not set the matrix values to zero,
and may therefore be marginally faster. On the other hand, it requires
the user to manually set all the values in the array, and should be
used with caution.
Examples
--------
>>> import numpy.matlib
>>> np.matlib.empty((2, 2)) # filled with random data
matrix([[ 6.76425276e-320, 9.79033856e-307],
[ 7.39337286e-309, 3.22135945e-309]]) #random
>>> np.matlib.empty((2, 2), dtype=int)
matrix([[ 6600475, 0],
[ 6586976, 22740995]]) #random
"""
return ndarray.__new__(matrix, shape, dtype, order=order)
def ones(shape, dtype=None, order='C'):
"""
Matrix of ones.
Return a matrix of given shape and type, filled with ones.
Parameters
----------
shape : {sequence of ints, int}
Shape of the matrix
dtype : data-type, optional
The desired data-type for the matrix, default is np.float64.
order : {'C', 'F'}, optional
Whether to store matrix in C- or Fortran-contiguous order,
default is 'C'.
Returns
-------
out : matrix
Matrix of ones of given shape, dtype, and order.
See Also
--------
ones : Array of ones.
matlib.zeros : Zero matrix.
Notes
-----
If `shape` has length one i.e. ``(N,)``, or is a scalar ``N``,
`out` becomes a single row matrix of shape ``(1,N)``.
Examples
--------
>>> np.matlib.ones((2,3))
matrix([[ 1., 1., 1.],
[ 1., 1., 1.]])
>>> np.matlib.ones(2)
matrix([[ 1., 1.]])
"""
a = ndarray.__new__(matrix, shape, dtype, order=order)
a.fill(1)
return a
def zeros(shape, dtype=None, order='C'):
"""
Return a matrix of given shape and type, filled with zeros.
Parameters
----------
shape : int or sequence of ints
Shape of the matrix
dtype : data-type, optional
The desired data-type for the matrix, default is float.
order : {'C', 'F'}, optional
Whether to store the result in C- or Fortran-contiguous order,
default is 'C'.
Returns
-------
out : matrix
Zero matrix of given shape, dtype, and order.
See Also
--------
numpy.zeros : Equivalent array function.
matlib.ones : Return a matrix of ones.
Notes
-----
If `shape` has length one i.e. ``(N,)``, or is a scalar ``N``,
`out` becomes a single row matrix of shape ``(1,N)``.
Examples
--------
>>> import numpy.matlib
>>> np.matlib.zeros((2, 3))
matrix([[ 0., 0., 0.],
[ 0., 0., 0.]])
>>> np.matlib.zeros(2)
matrix([[ 0., 0.]])
"""
a = ndarray.__new__(matrix, shape, dtype, order=order)
a.fill(0)
return a
def identity(n,dtype=None):
"""
Returns the square identity matrix of given size.
Parameters
----------
n : int
Size of the returned identity matrix.
dtype : data-type, optional
Data-type of the output. Defaults to ``float``.
Returns
-------
out : matrix
`n` x `n` matrix with its main diagonal set to one,
and all other elements zero.
See Also
--------
numpy.identity : Equivalent array function.
matlib.eye : More general matrix identity function.
Examples
--------
>>> import numpy.matlib
>>> np.matlib.identity(3, dtype=int)
matrix([[1, 0, 0],
[0, 1, 0],
[0, 0, 1]])
"""
a = array([1]+n*[0], dtype=dtype)
b = empty((n, n), dtype=dtype)
b.flat = a
return b
def eye(n,M=None, k=0, dtype=float):
"""
Return a matrix with ones on the diagonal and zeros elsewhere.
Parameters
----------
n : int
Number of rows in the output.
M : int, optional
Number of columns in the output, defaults to `n`.
k : int, optional
Index of the diagonal: 0 refers to the main diagonal,
a positive value refers to an upper diagonal,
and a negative value to a lower diagonal.
dtype : dtype, optional
Data-type of the returned matrix.
Returns
-------
I : matrix
A `n` x `M` matrix where all elements are equal to zero,
except for the `k`-th diagonal, whose values are equal to one.
See Also
--------
numpy.eye : Equivalent array function.
identity : Square identity matrix.
Examples
--------
>>> import numpy.matlib
>>> np.matlib.eye(3, k=1, dtype=float)
matrix([[ 0., 1., 0.],
[ 0., 0., 1.],
[ 0., 0., 0.]])
"""
return asmatrix(np.eye(n, M, k, dtype))
def rand(*args):
"""
Return a matrix of random values with given shape.
Create a matrix of the given shape and propagate it with
random samples from a uniform distribution over ``[0, 1)``.
Parameters
----------
\\*args : Arguments
Shape of the output.
If given as N integers, each integer specifies the size of one
dimension.
If given as a tuple, this tuple gives the complete shape.
Returns
-------
out : ndarray
The matrix of random values with shape given by `\\*args`.
See Also
--------
randn, numpy.random.rand
Examples
--------
>>> import numpy.matlib
>>> np.matlib.rand(2, 3)
matrix([[ 0.68340382, 0.67926887, 0.83271405],
[ 0.00793551, 0.20468222, 0.95253525]]) #random
>>> np.matlib.rand((2, 3))
matrix([[ 0.84682055, 0.73626594, 0.11308016],
[ 0.85429008, 0.3294825 , 0.89139555]]) #random
If the first argument is a tuple, other arguments are ignored:
>>> np.matlib.rand((2, 3), 4)
matrix([[ 0.46898646, 0.15163588, 0.95188261],
[ 0.59208621, 0.09561818, 0.00583606]]) #random
"""
if isinstance(args[0], tuple):
args = args[0]
return asmatrix(np.random.rand(*args))
def randn(*args):
"""
Return a random matrix with data from the "standard normal" distribution.
`randn` generates a matrix filled with random floats sampled from a
univariate "normal" (Gaussian) distribution of mean 0 and variance 1.
Parameters
----------
\\*args : Arguments
Shape of the output.
If given as N integers, each integer specifies the size of one
dimension. If given as a tuple, this tuple gives the complete shape.
Returns
-------
Z : matrix of floats
A matrix of floating-point samples drawn from the standard normal
distribution.
See Also
--------
rand, random.randn
Notes
-----
For random samples from :math:`N(\\mu, \\sigma^2)`, use:
``sigma * np.matlib.randn(...) + mu``
Examples
--------
>>> import numpy.matlib
>>> np.matlib.randn(1)
matrix([[-0.09542833]]) #random
>>> np.matlib.randn(1, 2, 3)
matrix([[ 0.16198284, 0.0194571 , 0.18312985],
[-0.7509172 , 1.61055 , 0.45298599]]) #random
Two-by-four matrix of samples from :math:`N(3, 6.25)`:
>>> 2.5 * np.matlib.randn((2, 4)) + 3
matrix([[ 4.74085004, 8.89381862, 4.09042411, 4.83721922],
[ 7.52373709, 5.07933944, -2.64043543, 0.45610557]]) #random
"""
if isinstance(args[0], tuple):
args = args[0]
return asmatrix(np.random.randn(*args))
def repmat(a, m, n):
"""
Repeat a 0-D to 2-D array or matrix MxN times.
Parameters
----------
a : array_like
The array or matrix to be repeated.
m, n : int
The number of times `a` is repeated along the first and second axes.
Returns
-------
out : ndarray
The result of repeating `a`.
Examples
--------
>>> import numpy.matlib
>>> a0 = np.array(1)
>>> np.matlib.repmat(a0, 2, 3)
array([[1, 1, 1],
[1, 1, 1]])
>>> a1 = np.arange(4)
>>> np.matlib.repmat(a1, 2, 2)
array([[0, 1, 2, 3, 0, 1, 2, 3],
[0, 1, 2, 3, 0, 1, 2, 3]])
>>> a2 = np.asmatrix(np.arange(6).reshape(2, 3))
>>> np.matlib.repmat(a2, 2, 3)
matrix([[0, 1, 2, 0, 1, 2, 0, 1, 2],
[3, 4, 5, 3, 4, 5, 3, 4, 5],
[0, 1, 2, 0, 1, 2, 0, 1, 2],
[3, 4, 5, 3, 4, 5, 3, 4, 5]])
"""
a = asanyarray(a)
ndim = a.ndim
if ndim == 0:
origrows, origcols = (1, 1)
elif ndim == 1:
origrows, origcols = (1, a.shape[0])
else:
origrows, origcols = a.shape
rows = origrows * m
cols = origcols * n
c = a.reshape(1, a.size).repeat(m, 0).reshape(rows, origcols).repeat(n, 0)
return c.reshape(rows, cols)
| gpl-3.0 |
likesxuqiang/fabric | tests/test_context_managers.py | 20 | 8115 | from __future__ import with_statement
import os
import sys
from StringIO import StringIO
from nose.tools import eq_, ok_
from fabric.state import env, output
from fabric.context_managers import (cd, settings, lcd, hide, shell_env, quiet,
warn_only, prefix, path)
from fabric.operations import run, local, _prefix_commands
from utils import mock_streams, FabricTest
from server import server
#
# cd()
#
def test_error_handling():
"""
cd cleans up after itself even in case of an exception
"""
class TestException(Exception):
pass
try:
with cd('somewhere'):
raise TestException('Houston, we have a problem.')
except TestException:
pass
finally:
with cd('else'):
eq_(env.cwd, 'else')
def test_cwd_with_absolute_paths():
"""
cd() should append arg if non-absolute or overwrite otherwise
"""
existing = '/some/existing/path'
additional = 'another'
absolute = '/absolute/path'
with settings(cwd=existing):
with cd(absolute):
eq_(env.cwd, absolute)
with cd(additional):
eq_(env.cwd, existing + '/' + additional)
def test_cd_home_dir():
"""
cd() should work with home directories
"""
homepath = "~/somepath"
with cd(homepath):
eq_(env.cwd, homepath)
def test_cd_nested_home_abs_dirs():
"""
cd() should work with nested user homedir (starting with ~) paths.
It should always take the last path if the new path begins with `/` or `~`
"""
home_path = "~/somepath"
abs_path = "/some/random/path"
relative_path = "some/random/path"
# 2 nested homedir paths
with cd(home_path):
eq_(env.cwd, home_path)
another_path = home_path + "/another/path"
with cd(another_path):
eq_(env.cwd, another_path)
# first absolute path, then a homedir path
with cd(abs_path):
eq_(env.cwd, abs_path)
with cd(home_path):
eq_(env.cwd, home_path)
# first relative path, then a homedir path
with cd(relative_path):
eq_(env.cwd, relative_path)
with cd(home_path):
eq_(env.cwd, home_path)
# first home path, then a a relative path
with cd(home_path):
eq_(env.cwd, home_path)
with cd(relative_path):
eq_(env.cwd, home_path + "/" + relative_path)
#
# prefix
#
def test_nested_prefix():
"""
prefix context managers can be created outside of the with block and nested
"""
cm1 = prefix('1')
cm2 = prefix('2')
with cm1:
with cm2:
eq_(env.command_prefixes, ['1', '2'])
#
# cd prefix with dev/null
#
def test_cd_prefix():
"""
cd prefix should direct output to /dev/null in case of CDPATH
"""
some_path = "~/somepath"
with cd(some_path):
command_out = _prefix_commands('foo', "remote")
eq_(command_out, 'cd %s >/dev/null && foo' % some_path)
# def test_cd_prefix_on_win32():
# """
# cd prefix should NOT direct output to /dev/null on win32
# """
# some_path = "~/somepath"
# import fabric
# try:
# fabric.state.win32 = True
# with cd(some_path):
# command_out = _prefix_commands('foo', "remote")
# eq_(command_out, 'cd %s && foo' % some_path)
# finally:
# fabric.state.win32 = False
#
# hide/show
#
def test_hide_show_exception_handling():
"""
hide()/show() should clean up OK if exceptions are raised
"""
try:
with hide('stderr'):
# now it's False, while the default is True
eq_(output.stderr, False)
raise Exception
except Exception:
# Here it should be True again.
# If it's False, this means hide() didn't clean up OK.
eq_(output.stderr, True)
#
# settings()
#
def test_setting_new_env_dict_key_should_work():
"""
Using settings() with a previously nonexistent key should work correctly
"""
key = 'thisshouldnevereverexistseriouslynow'
value = 'a winner is you'
with settings(**{key: value}):
ok_(key in env)
ok_(key not in env)
def test_settings():
"""
settings() should temporarily override env dict with given key/value pair
"""
env.testval = "outer value"
with settings(testval="inner value"):
eq_(env.testval, "inner value")
eq_(env.testval, "outer value")
def test_settings_with_multiple_kwargs():
"""
settings() should temporarily override env dict with given key/value pairS
"""
env.testval1 = "outer 1"
env.testval2 = "outer 2"
with settings(testval1="inner 1", testval2="inner 2"):
eq_(env.testval1, "inner 1")
eq_(env.testval2, "inner 2")
eq_(env.testval1, "outer 1")
eq_(env.testval2, "outer 2")
def test_settings_with_other_context_managers():
"""
settings() should take other context managers, and use them with other overrided
key/value pairs.
"""
env.testval1 = "outer 1"
prev_lcwd = env.lcwd
with settings(lcd("here"), testval1="inner 1"):
eq_(env.testval1, "inner 1")
ok_(env.lcwd.endswith("here")) # Should be the side-effect of adding cd to settings
ok_(env.testval1, "outer 1")
eq_(env.lcwd, prev_lcwd)
def test_settings_clean_revert():
"""
settings(clean_revert=True) should only revert values matching input values
"""
env.modified = "outer"
env.notmodified = "outer"
with settings(
modified="inner",
notmodified="inner",
inner_only="only",
clean_revert=True
):
eq_(env.modified, "inner")
eq_(env.notmodified, "inner")
eq_(env.inner_only, "only")
env.modified = "modified internally"
eq_(env.modified, "modified internally")
ok_("inner_only" not in env)
#
# shell_env()
#
def test_shell_env():
"""
shell_env() sets the shell_env attribute in the env dict
"""
with shell_env(KEY="value"):
eq_(env.shell_env['KEY'], 'value')
eq_(env.shell_env, {})
class TestQuietAndWarnOnly(FabricTest):
@server()
@mock_streams('both')
def test_quiet_hides_all_output(self):
# Sanity test - normally this is not empty
run("ls /simple")
ok_(sys.stdout.getvalue())
# Reset
sys.stdout = StringIO()
# Real test
with quiet():
run("ls /simple")
# Empty output
ok_(not sys.stdout.getvalue())
# Reset
sys.stdout = StringIO()
# Kwarg test
run("ls /simple", quiet=True)
ok_(not sys.stdout.getvalue())
@server(responses={'barf': [
"this is my stdout",
"this is my stderr",
1
]})
def test_quiet_sets_warn_only_to_true(self):
# Sanity test to ensure environment
with settings(warn_only=False):
with quiet():
eq_(run("barf").return_code, 1)
# Kwarg test
eq_(run("barf", quiet=True).return_code, 1)
@server(responses={'hrm': ["", "", 1]})
@mock_streams('both')
def test_warn_only_is_same_as_settings_warn_only(self):
with warn_only():
eq_(run("hrm").failed, True)
@server()
@mock_streams('both')
def test_warn_only_does_not_imply_hide_everything(self):
with warn_only():
run("ls /simple")
assert sys.stdout.getvalue().strip() != ""
# path() (distinct from shell_env)
class TestPathManager(FabricTest):
def setup(self):
super(TestPathManager, self).setup()
self.real = os.environ.get('PATH')
def via_local(self):
with hide('everything'):
return local("echo $PATH", capture=True)
def test_lack_of_path_has_default_local_path(self):
"""
No use of 'with path' == default local $PATH
"""
eq_(self.real, self.via_local())
def test_use_of_path_appends_by_default(self):
"""
'with path' appends by default
"""
with path('foo'):
eq_(self.via_local(), self.real + ":foo")
| bsd-2-clause |
ATSTI/administra | open_myplace/boleto/boleto.py | 1 | 4123 | # -*- coding: utf-8 -*-
#################################################################################
# #
# Copyright (C) 2011 Vinicius Dittgen - PROGE, Leonardo Santagada - PROGE #
# #
#This program is free software: you can redistribute it and/or modify #
#it under the terms of the GNU Affero General Public License as published by #
#the Free Software Foundation, either version 3 of the License, or #
#(at your option) any later version. #
# #
#This program is distributed in the hope that it will be useful, #
#but WITHOUT ANY WARRANTY; without even the implied warranty of #
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
#GNU General Public License for more details. #
# #
#You should have received a copy of the GNU General Public License #
#along with this program. If not, see <http://www.gnu.org/licenses/>. #
#################################################################################
from openerp.osv import fields, osv
class boleto_partner_config(osv.osv):
"""Boleto Partner Configuration"""
_name = 'boleto.partner_config'
_columns = {
'name': fields.char('Name', size=20, required=True),
'carteira': fields.integer('Carteira', size=20, required=True),
# 'juros': fields.float('Juros', digits=(1, 6)),
# 'multa': fields.float('Multa', digits=(12, 6)),
'instrucoes': fields.text(u'Instruções'),
}
boleto_partner_config()
class boleto_company_config(osv.osv):
"""Boleto Company Configuration"""
_name = 'boleto.company_config'
_columns = {
'name': fields.char('Name', size=20, required=True),
'banco': fields.selection([('bb', 'Banco do Brasil'), ('real', 'Banco Real'), ('bradesco', 'Banco Bradesco'), ('caixa', 'Banco Caixa Federal'),('sicredi', 'Sicredi'),('itau', 'Banco Itau')], 'Banco', required=True),
'agencia_cedente': fields.integer('Agencia', size=6, required=True),
'conta_cedente': fields.integer('Conta', size=8, required=True),
'convenio': fields.integer(u'Convenio', size=8, required=True),
'nosso_numero': fields.integer(u'Nosso Número'),
}
boleto_company_config()
class boleto_boleto(osv.osv):
"""Boleto"""
_name = 'boleto.boleto'
_columns = {
'name': fields.char('Name', size=20, required=True),
# do cliente
'carteira': fields.char('Carteira', size=10),
# 'juros': fields.float('Juros', digits=(12, 6)),
# 'multa': fields.float('Multa', digits=(12, 6)),
'instrucoes': fields.text(u'Instruções'),
'sacado': fields.many2one('res.partner', 'Sacado'),
# da empresa
'banco': fields.selection([('bb', 'Banco do Brasil'), ('real', 'Banco Real'), ('bradesco', 'Banco Bradesco'), ('caixa', 'Banco Caixa Federal'),('sicredi', 'Sicredi'),('itau','Banco Itau')], 'Banco'),
'agencia_cedente': fields.char('Agencia', size=6),
'conta_cedente': fields.char('Conta', size=8),
'convenio': fields.char('Convenio', size=8),
'nosso_numero': fields.integer(u'Nosso Número'),
'cedente': fields.many2one('res.company', 'Empresa'),
# da fatura
'move_line_id': fields.many2one('account.move.line', 'Move Line'),
'data_vencimento': fields.date('Data do Vencimento'),
'data_documento': fields.date('Data do Documento'),
'data_processamento': fields.date('Data do Processamento'),
'valor': fields.float('Valor', digits=(12, 6)),
'numero_documento': fields.char(u'Número do Documento', size=20),
'endereco': fields.char(u'Endereço', size=20),
}
boleto_boleto()
| gpl-2.0 |
dol-sen/portage | pym/portage/tests/ebuild/test_ipc_daemon.py | 2 | 5393 | # Copyright 2010-2016 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
import tempfile
import time
from portage import os
from portage import shutil
from portage import _python_interpreter
from portage.tests import TestCase
from portage.const import PORTAGE_BIN_PATH
from portage.const import PORTAGE_PYM_PATH
from portage.const import BASH_BINARY
from portage.locks import hardlock_cleanup
from portage.package.ebuild._ipc.ExitCommand import ExitCommand
from portage.util import ensure_dirs
from portage.util._async.ForkProcess import ForkProcess
from portage.util._async.TaskScheduler import TaskScheduler
from portage.util._eventloop.global_event_loop import global_event_loop
from _emerge.SpawnProcess import SpawnProcess
from _emerge.EbuildBuildDir import EbuildBuildDir
from _emerge.EbuildIpcDaemon import EbuildIpcDaemon
class SleepProcess(ForkProcess):
"""
Emulate the sleep command, in order to ensure a consistent
return code when it is killed by SIGTERM (see bug #437180).
"""
__slots__ = ('seconds',)
def _run(self):
time.sleep(self.seconds)
class IpcDaemonTestCase(TestCase):
_SCHEDULE_TIMEOUT = 40000 # 40 seconds
def testIpcDaemon(self):
event_loop = global_event_loop()
tmpdir = tempfile.mkdtemp()
build_dir = None
try:
env = {}
# Pass along PORTAGE_USERNAME and PORTAGE_GRPNAME since they
# need to be inherited by ebuild subprocesses.
if 'PORTAGE_USERNAME' in os.environ:
env['PORTAGE_USERNAME'] = os.environ['PORTAGE_USERNAME']
if 'PORTAGE_GRPNAME' in os.environ:
env['PORTAGE_GRPNAME'] = os.environ['PORTAGE_GRPNAME']
env['PORTAGE_PYTHON'] = _python_interpreter
env['PORTAGE_BIN_PATH'] = PORTAGE_BIN_PATH
env['PORTAGE_PYM_PATH'] = PORTAGE_PYM_PATH
env['PORTAGE_BUILDDIR'] = os.path.join(tmpdir, 'cat', 'pkg-1')
env['PYTHONDONTWRITEBYTECODE'] = os.environ.get('PYTHONDONTWRITEBYTECODE', '')
if "__PORTAGE_TEST_HARDLINK_LOCKS" in os.environ:
env["__PORTAGE_TEST_HARDLINK_LOCKS"] = \
os.environ["__PORTAGE_TEST_HARDLINK_LOCKS"]
build_dir = EbuildBuildDir(
scheduler=event_loop,
settings=env)
build_dir.lock()
ensure_dirs(env['PORTAGE_BUILDDIR'])
input_fifo = os.path.join(env['PORTAGE_BUILDDIR'], '.ipc_in')
output_fifo = os.path.join(env['PORTAGE_BUILDDIR'], '.ipc_out')
os.mkfifo(input_fifo)
os.mkfifo(output_fifo)
for exitcode in (0, 1, 2):
exit_command = ExitCommand()
commands = {'exit' : exit_command}
daemon = EbuildIpcDaemon(commands=commands,
input_fifo=input_fifo,
output_fifo=output_fifo)
proc = SpawnProcess(
args=[BASH_BINARY, "-c",
'"$PORTAGE_BIN_PATH"/ebuild-ipc exit %d' % exitcode],
env=env)
task_scheduler = TaskScheduler(iter([daemon, proc]),
max_jobs=2, event_loop=event_loop)
self.received_command = False
def exit_command_callback():
self.received_command = True
task_scheduler.cancel()
exit_command.reply_hook = exit_command_callback
start_time = time.time()
self._run(event_loop, task_scheduler, self._SCHEDULE_TIMEOUT)
hardlock_cleanup(env['PORTAGE_BUILDDIR'],
remove_all_locks=True)
self.assertEqual(self.received_command, True,
"command not received after %d seconds" % \
(time.time() - start_time,))
self.assertEqual(proc.isAlive(), False)
self.assertEqual(daemon.isAlive(), False)
self.assertEqual(exit_command.exitcode, exitcode)
# Intentionally short timeout test for EventLoop/AsyncScheduler.
# Use a ridiculously long sleep_time_s in case the user's
# system is heavily loaded (see bug #436334).
sleep_time_s = 600 #600.000 seconds
short_timeout_ms = 10 # 0.010 seconds
for i in range(3):
exit_command = ExitCommand()
commands = {'exit' : exit_command}
daemon = EbuildIpcDaemon(commands=commands,
input_fifo=input_fifo,
output_fifo=output_fifo)
proc = SleepProcess(seconds=sleep_time_s)
task_scheduler = TaskScheduler(iter([daemon, proc]),
max_jobs=2, event_loop=event_loop)
self.received_command = False
def exit_command_callback():
self.received_command = True
task_scheduler.cancel()
exit_command.reply_hook = exit_command_callback
start_time = time.time()
self._run(event_loop, task_scheduler, short_timeout_ms)
hardlock_cleanup(env['PORTAGE_BUILDDIR'],
remove_all_locks=True)
self.assertEqual(self.received_command, False,
"command received after %d seconds" % \
(time.time() - start_time,))
self.assertEqual(proc.isAlive(), False)
self.assertEqual(daemon.isAlive(), False)
self.assertEqual(proc.returncode == os.EX_OK, False)
finally:
if build_dir is not None:
build_dir.unlock()
shutil.rmtree(tmpdir)
def _timeout_callback(self, task_scheduler):
task_scheduler.cancel()
self._exit_callback(task_scheduler)
def _exit_callback(self, task_scheduler):
if not self._run_done.done():
self._run_done.set_result(True)
def _run(self, event_loop, task_scheduler, timeout):
self._run_done = event_loop.create_future()
timeout_id = event_loop.timeout_add(timeout,
self._timeout_callback, task_scheduler)
task_scheduler.addExitListener(self._exit_callback)
try:
task_scheduler.start()
event_loop.run_until_complete(self._run_done)
task_scheduler.wait()
finally:
event_loop.source_remove(timeout_id)
| gpl-2.0 |
Lekensteyn/buildbot | master/buildbot/test/unit/test_www_hooks_poller.py | 5 | 5071 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
from twisted.internet import defer
from twisted.trial import unittest
import buildbot.www.change_hook as change_hook
from buildbot import util
from buildbot.changes import base
from buildbot.changes.manager import ChangeManager
from buildbot.test.fake import fakemaster
from buildbot.test.fake.web import FakeRequest
class TestPollingChangeHook(unittest.TestCase):
class Subclass(base.PollingChangeSource):
pollInterval = None
called = False
def poll(self):
self.called = True
@defer.inlineCallbacks
def setUpRequest(self, args, options=True, activate=True):
self.request = FakeRequest(args=args)
self.request.uri = b"/change_hook/poller"
self.request.method = b"GET"
www = self.request.site.master.www
self.master = master = self.request.site.master = fakemaster.make_master(
testcase=self, wantData=True)
master.www = www
yield self.master.startService()
self.changeHook = change_hook.ChangeHookResource(
dialects={'poller': options}, master=master)
master.change_svc = ChangeManager()
yield master.change_svc.setServiceParent(master)
self.changesrc = self.Subclass("example", 21)
yield self.changesrc.setServiceParent(master.change_svc)
self.otherpoller = self.Subclass("otherpoller", 22)
yield self.otherpoller.setServiceParent(master.change_svc)
anotherchangesrc = base.ChangeSource(name='notapoller')
anotherchangesrc.setName(u"notapoller")
yield anotherchangesrc.setServiceParent(master.change_svc)
yield self.request.test_render(self.changeHook)
yield util.asyncSleep(0.1)
def tearDown(self):
return self.master.stopService()
@defer.inlineCallbacks
def test_no_args(self):
yield self.setUpRequest({})
self.assertEqual(self.request.written, b"no changes found")
self.assertEqual(self.changesrc.called, True)
self.assertEqual(self.otherpoller.called, True)
@defer.inlineCallbacks
def test_no_poller(self):
yield self.setUpRequest({"poller": ["nosuchpoller"]})
expected = b"Could not find pollers: nosuchpoller"
self.assertEqual(self.request.written, expected)
self.request.setResponseCode.assert_called_with(400, expected)
self.assertEqual(self.changesrc.called, False)
self.assertEqual(self.otherpoller.called, False)
@defer.inlineCallbacks
def test_invalid_poller(self):
yield self.setUpRequest({"poller": ["notapoller"]})
expected = b"Could not find pollers: notapoller"
self.assertEqual(self.request.written, expected)
self.request.setResponseCode.assert_called_with(400, expected)
self.assertEqual(self.changesrc.called, False)
self.assertEqual(self.otherpoller.called, False)
@defer.inlineCallbacks
def test_trigger_poll(self):
yield self.setUpRequest({"poller": ["example"]})
self.assertEqual(self.request.written, b"no changes found")
self.assertEqual(self.changesrc.called, True)
self.assertEqual(self.otherpoller.called, False)
@defer.inlineCallbacks
def test_allowlist_deny(self):
yield self.setUpRequest({"poller": ["otherpoller"]}, options={"allowed": ["example"]})
expected = b"Could not find pollers: otherpoller"
self.assertEqual(self.request.written, expected)
self.request.setResponseCode.assert_called_with(400, expected)
self.assertEqual(self.changesrc.called, False)
self.assertEqual(self.otherpoller.called, False)
@defer.inlineCallbacks
def test_allowlist_allow(self):
yield self.setUpRequest({"poller": ["example"]}, options={"allowed": ["example"]})
self.assertEqual(self.request.written, b"no changes found")
self.assertEqual(self.changesrc.called, True)
self.assertEqual(self.otherpoller.called, False)
@defer.inlineCallbacks
def test_allowlist_all(self):
yield self.setUpRequest({}, options={"allowed": ["example"]})
self.assertEqual(self.request.written, b"no changes found")
self.assertEqual(self.changesrc.called, True)
self.assertEqual(self.otherpoller.called, False)
| gpl-2.0 |
yoer/hue | desktop/core/ext-py/tablib-0.10.0/tablib/packages/omnijson/core.py | 63 | 1863 | # -*- coding: utf-8 -*-
"""
omijson.core
~~~~~~~~~~~~
This module provides the core omnijson functionality.
"""
import sys
engine = None
_engine = None
options = [
['ujson', 'loads', 'dumps', (ValueError,)],
['yajl', 'loads', 'dumps', (TypeError, ValueError)],
['jsonlib2', 'read', 'write', (ValueError,)],
['jsonlib', 'read', 'write', (ValueError,)],
['simplejson', 'loads', 'dumps', (TypeError, ValueError)],
['json', 'loads', 'dumps', (TypeError, ValueError)],
['simplejson_from_packages', 'loads', 'dumps', (ValueError,)],
]
def _import(engine):
try:
if '_from_' in engine:
engine, package = engine.split('_from_')
m = __import__(package, globals(), locals(), [engine], -1)
return getattr(m, engine)
return __import__(engine)
except ImportError:
return False
def loads(s, **kwargs):
"""Loads JSON object."""
try:
return _engine[0](s)
except:
# crazy 2/3 exception hack
# http://www.voidspace.org.uk/python/weblog/arch_d7_2010_03_20.shtml
ExceptionClass, why = sys.exc_info()[:2]
if any([(issubclass(ExceptionClass, e)) for e in _engine[2]]):
raise JSONError(why)
else:
raise why
def dumps(o, **kwargs):
"""Dumps JSON object."""
try:
return _engine[1](o)
except:
ExceptionClass, why = sys.exc_info()[:2]
if any([(issubclass(ExceptionClass, e)) for e in _engine[2]]):
raise JSONError(why)
else:
raise why
class JSONError(ValueError):
"""JSON Failed."""
# ------
# Magic!
# ------
for e in options:
__engine = _import(e[0])
if __engine:
engine, _engine = e[0], e[1:4]
for i in (0, 1):
_engine[i] = getattr(__engine, _engine[i])
break
| apache-2.0 |
nanolearningllc/edx-platform-cypress | lms/djangoapps/ccx/tests/test_models.py | 45 | 8622 | """
tests for the models
"""
from datetime import datetime, timedelta
from django.utils.timezone import UTC
from mock import patch
from nose.plugins.attrib import attr
from student.roles import CourseCcxCoachRole # pylint: disable=import-error
from student.tests.factories import ( # pylint: disable=import-error
AdminFactory,
)
from util.tests.test_date_utils import fake_ugettext
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import (
CourseFactory,
check_mongo_calls
)
from .factories import (
CcxFactory,
)
from ..overrides import override_field_for_ccx
@attr('shard_1')
class TestCCX(ModuleStoreTestCase):
"""Unit tests for the CustomCourseForEdX model
"""
def setUp(self):
"""common setup for all tests"""
super(TestCCX, self).setUp()
self.course = course = CourseFactory.create()
coach = AdminFactory.create()
role = CourseCcxCoachRole(course.id)
role.add_users(coach)
self.ccx = CcxFactory(course_id=course.id, coach=coach)
def set_ccx_override(self, field, value):
"""Create a field override for the test CCX on <field> with <value>"""
override_field_for_ccx(self.ccx, self.course, field, value)
def test_ccx_course_is_correct_course(self):
"""verify that the course property of a ccx returns the right course"""
expected = self.course
actual = self.ccx.course
self.assertEqual(expected, actual)
def test_ccx_course_caching(self):
"""verify that caching the propery works to limit queries"""
with check_mongo_calls(1):
# these statements are used entirely to demonstrate the
# instance-level caching of these values on CCX objects. The
# check_mongo_calls context is the point here.
self.ccx.course # pylint: disable=pointless-statement
with check_mongo_calls(0):
self.ccx.course # pylint: disable=pointless-statement
def test_ccx_start_is_correct(self):
"""verify that the start datetime for a ccx is correctly retrieved
Note that after setting the start field override microseconds are
truncated, so we can't do a direct comparison between before and after.
For this reason we test the difference between and make sure it is less
than one second.
"""
expected = datetime.now(UTC())
self.set_ccx_override('start', expected)
actual = self.ccx.start # pylint: disable=no-member
diff = expected - actual
self.assertTrue(abs(diff.total_seconds()) < 1)
def test_ccx_start_caching(self):
"""verify that caching the start property works to limit queries"""
now = datetime.now(UTC())
self.set_ccx_override('start', now)
with check_mongo_calls(1):
# these statements are used entirely to demonstrate the
# instance-level caching of these values on CCX objects. The
# check_mongo_calls context is the point here.
self.ccx.start # pylint: disable=pointless-statement, no-member
with check_mongo_calls(0):
self.ccx.start # pylint: disable=pointless-statement, no-member
def test_ccx_due_without_override(self):
"""verify that due returns None when the field has not been set"""
actual = self.ccx.due # pylint: disable=no-member
self.assertIsNone(actual)
def test_ccx_due_is_correct(self):
"""verify that the due datetime for a ccx is correctly retrieved"""
expected = datetime.now(UTC())
self.set_ccx_override('due', expected)
actual = self.ccx.due # pylint: disable=no-member
diff = expected - actual
self.assertTrue(abs(diff.total_seconds()) < 1)
def test_ccx_due_caching(self):
"""verify that caching the due property works to limit queries"""
expected = datetime.now(UTC())
self.set_ccx_override('due', expected)
with check_mongo_calls(1):
# these statements are used entirely to demonstrate the
# instance-level caching of these values on CCX objects. The
# check_mongo_calls context is the point here.
self.ccx.due # pylint: disable=pointless-statement, no-member
with check_mongo_calls(0):
self.ccx.due # pylint: disable=pointless-statement, no-member
def test_ccx_has_started(self):
"""verify that a ccx marked as starting yesterday has started"""
now = datetime.now(UTC())
delta = timedelta(1)
then = now - delta
self.set_ccx_override('start', then)
self.assertTrue(self.ccx.has_started()) # pylint: disable=no-member
def test_ccx_has_not_started(self):
"""verify that a ccx marked as starting tomorrow has not started"""
now = datetime.now(UTC())
delta = timedelta(1)
then = now + delta
self.set_ccx_override('start', then)
self.assertFalse(self.ccx.has_started()) # pylint: disable=no-member
def test_ccx_has_ended(self):
"""verify that a ccx that has a due date in the past has ended"""
now = datetime.now(UTC())
delta = timedelta(1)
then = now - delta
self.set_ccx_override('due', then)
self.assertTrue(self.ccx.has_ended()) # pylint: disable=no-member
def test_ccx_has_not_ended(self):
"""verify that a ccx that has a due date in the future has not eneded
"""
now = datetime.now(UTC())
delta = timedelta(1)
then = now + delta
self.set_ccx_override('due', then)
self.assertFalse(self.ccx.has_ended()) # pylint: disable=no-member
def test_ccx_without_due_date_has_not_ended(self):
"""verify that a ccx without a due date has not ended"""
self.assertFalse(self.ccx.has_ended()) # pylint: disable=no-member
# ensure that the expected localized format will be found by the i18n
# service
@patch('util.date_utils.ugettext', fake_ugettext(translations={
"SHORT_DATE_FORMAT": "%b %d, %Y",
}))
def test_start_datetime_short_date(self):
"""verify that the start date for a ccx formats properly by default"""
start = datetime(2015, 1, 1, 12, 0, 0, tzinfo=UTC())
expected = "Jan 01, 2015"
self.set_ccx_override('start', start)
actual = self.ccx.start_datetime_text() # pylint: disable=no-member
self.assertEqual(expected, actual)
@patch('util.date_utils.ugettext', fake_ugettext(translations={
"DATE_TIME_FORMAT": "%b %d, %Y at %H:%M",
}))
def test_start_datetime_date_time_format(self):
"""verify that the DATE_TIME format also works as expected"""
start = datetime(2015, 1, 1, 12, 0, 0, tzinfo=UTC())
expected = "Jan 01, 2015 at 12:00 UTC"
self.set_ccx_override('start', start)
actual = self.ccx.start_datetime_text('DATE_TIME') # pylint: disable=no-member
self.assertEqual(expected, actual)
@patch('util.date_utils.ugettext', fake_ugettext(translations={
"SHORT_DATE_FORMAT": "%b %d, %Y",
}))
def test_end_datetime_short_date(self):
"""verify that the end date for a ccx formats properly by default"""
end = datetime(2015, 1, 1, 12, 0, 0, tzinfo=UTC())
expected = "Jan 01, 2015"
self.set_ccx_override('due', end)
actual = self.ccx.end_datetime_text() # pylint: disable=no-member
self.assertEqual(expected, actual)
@patch('util.date_utils.ugettext', fake_ugettext(translations={
"DATE_TIME_FORMAT": "%b %d, %Y at %H:%M",
}))
def test_end_datetime_date_time_format(self):
"""verify that the DATE_TIME format also works as expected"""
end = datetime(2015, 1, 1, 12, 0, 0, tzinfo=UTC())
expected = "Jan 01, 2015 at 12:00 UTC"
self.set_ccx_override('due', end)
actual = self.ccx.end_datetime_text('DATE_TIME') # pylint: disable=no-member
self.assertEqual(expected, actual)
@patch('util.date_utils.ugettext', fake_ugettext(translations={
"DATE_TIME_FORMAT": "%b %d, %Y at %H:%M",
}))
def test_end_datetime_no_due_date(self):
"""verify that without a due date, the end date is an empty string"""
expected = ''
actual = self.ccx.end_datetime_text() # pylint: disable=no-member
self.assertEqual(expected, actual)
actual = self.ccx.end_datetime_text('DATE_TIME') # pylint: disable=no-member
self.assertEqual(expected, actual)
| agpl-3.0 |
VladimirTyrin/letsencrypt | acme/acme/errors.py | 10 | 2302 | """ACME errors."""
from acme.jose import errors as jose_errors
class Error(Exception):
"""Generic ACME error."""
class SchemaValidationError(jose_errors.DeserializationError):
"""JSON schema ACME object validation error."""
class ClientError(Error):
"""Network error."""
class UnexpectedUpdate(ClientError):
"""Unexpected update error."""
class NonceError(ClientError):
"""Server response nonce error."""
class BadNonce(NonceError):
"""Bad nonce error."""
def __init__(self, nonce, error, *args, **kwargs):
super(BadNonce, self).__init__(*args, **kwargs)
self.nonce = nonce
self.error = error
def __str__(self):
return 'Invalid nonce ({0!r}): {1}'.format(self.nonce, self.error)
class MissingNonce(NonceError):
"""Missing nonce error.
According to the specification an "ACME server MUST include an
Replay-Nonce header field in each successful response to a POST it
provides to a client (...)".
:ivar requests.Response response: HTTP Response
"""
def __init__(self, response, *args, **kwargs):
super(MissingNonce, self).__init__(*args, **kwargs)
self.response = response
def __str__(self):
return ('Server {0} response did not include a replay '
'nonce, headers: {1}'.format(
self.response.request.method, self.response.headers))
class PollError(ClientError):
"""Generic error when polling for authorization fails.
This might be caused by either timeout (`waiting` will be non-empty)
or by some authorization being invalid.
:ivar waiting: Priority queue with `datetime.datatime` (based on
``Retry-After``) as key, and original `.AuthorizationResource`
as value.
:ivar updated: Mapping from original `.AuthorizationResource`
to the most recently updated one
"""
def __init__(self, waiting, updated):
self.waiting = waiting
self.updated = updated
super(PollError, self).__init__()
@property
def timeout(self):
"""Was the error caused by timeout?"""
return bool(self.waiting)
def __repr__(self):
return '{0}(waiting={1!r}, updated={2!r})'.format(
self.__class__.__name__, self.waiting, self.updated)
| apache-2.0 |
yavuzovski/playground | python/django/RESTTest/.venv/lib/python3.4/site-packages/django/contrib/gis/geos/prepared.py | 180 | 1575 | from .base import GEOSBase
from .prototypes import prepared as capi
class PreparedGeometry(GEOSBase):
"""
A geometry that is prepared for performing certain operations.
At the moment this includes the contains covers, and intersects
operations.
"""
ptr_type = capi.PREPGEOM_PTR
destructor = capi.prepared_destroy
def __init__(self, geom):
# Keeping a reference to the original geometry object to prevent it
# from being garbage collected which could then crash the prepared one
# See #21662
self._base_geom = geom
from .geometry import GEOSGeometry
if not isinstance(geom, GEOSGeometry):
raise TypeError
self.ptr = capi.geos_prepare(geom.ptr)
def contains(self, other):
return capi.prepared_contains(self.ptr, other.ptr)
def contains_properly(self, other):
return capi.prepared_contains_properly(self.ptr, other.ptr)
def covers(self, other):
return capi.prepared_covers(self.ptr, other.ptr)
def intersects(self, other):
return capi.prepared_intersects(self.ptr, other.ptr)
def crosses(self, other):
return capi.prepared_crosses(self.ptr, other.ptr)
def disjoint(self, other):
return capi.prepared_disjoint(self.ptr, other.ptr)
def overlaps(self, other):
return capi.prepared_overlaps(self.ptr, other.ptr)
def touches(self, other):
return capi.prepared_touches(self.ptr, other.ptr)
def within(self, other):
return capi.prepared_within(self.ptr, other.ptr)
| gpl-3.0 |
GaetanCambier/CouchPotatoServer | couchpotato/core/media/movie/providers/trailer/youtube_dl/downloader/mplayer.py | 18 | 1555 | from __future__ import unicode_literals
import os
import subprocess
from .common import FileDownloader
from ..compat import compat_subprocess_get_DEVNULL
from ..utils import (
encodeFilename,
)
class MplayerFD(FileDownloader):
def real_download(self, filename, info_dict):
url = info_dict['url']
self.report_destination(filename)
tmpfilename = self.temp_name(filename)
args = [
'mplayer', '-really-quiet', '-vo', 'null', '-vc', 'dummy',
'-dumpstream', '-dumpfile', tmpfilename, url]
# Check for mplayer first
try:
subprocess.call(
['mplayer', '-h'],
stdout=compat_subprocess_get_DEVNULL(), stderr=subprocess.STDOUT)
except (OSError, IOError):
self.report_error('MMS or RTSP download detected but "%s" could not be run' % args[0])
return False
# Download using mplayer.
retval = subprocess.call(args)
if retval == 0:
fsize = os.path.getsize(encodeFilename(tmpfilename))
self.to_screen('\r[%s] %s bytes' % (args[0], fsize))
self.try_rename(tmpfilename, filename)
self._hook_progress({
'downloaded_bytes': fsize,
'total_bytes': fsize,
'filename': filename,
'status': 'finished',
})
return True
else:
self.to_stderr('\n')
self.report_error('mplayer exited with code %d' % retval)
return False
| gpl-3.0 |
quixey/scrapy-cluster | crawler/tests/tests_online.py | 1 | 3938 | '''
Online link spider test
'''
import unittest
from unittest import TestCase
import time
import sys
from os import path
sys.path.append(path.dirname(path.dirname(path.abspath(__file__))))
import scrapy
import redis
from redis.exceptions import ConnectionError
import json
import threading, time
from crawling.spiders.link_spider import LinkSpider
from scrapy.utils.project import get_project_settings
from twisted.internet import reactor
from scrapy.crawler import CrawlerRunner
from kafka import KafkaClient, SimpleConsumer
class CustomSpider(LinkSpider):
'''
Overridden link spider for testing
'''
name = "test-spider"
class TestLinkSpider(TestCase):
example_feed = "\x80\x02}q\x00(X\x0f\x00\x00\x00allowed_domainsq\x01NX"\
"\x0b\x00\x00\x00allow_regexq\x02NX\a\x00\x00\x00crawlidq\x03X\x19"\
"\x00\x00\x0001234567890abcdefghijklmnq\x04X\x03\x00\x00\x00urlq\x05X"\
"\x13\x00\x00\x00www.istresearch.comq\x06X\a\x00\x00\x00expiresq\aK"\
"\x00X\b\x00\x00\x00priorityq\bK\x01X\n\x00\x00\x00deny_regexq\tNX\b"\
"\x00\x00\x00spideridq\nX\x0b\x00\x00\x00test-spiderq\x0bX\x05\x00"\
"\x00\x00attrsq\x0cNX\x05\x00\x00\x00appidq\rX\a\x00\x00\x00testappq"\
"\x0eX\x06\x00\x00\x00cookieq\x0fNX\t\x00\x00\x00useragentq\x10NX\x0f"\
"\x00\x00\x00deny_extensionsq\x11NX\b\x00\x00\x00maxdepthq\x12K\x00u."
def setUp(self):
self.settings = get_project_settings()
self.settings.set('KAFKA_TOPIC_PREFIX', "demo_test")
# set up redis
self.redis_conn = redis.Redis(host=self.settings['REDIS_HOST'],
port=self.settings['REDIS_PORT'])
try:
self.redis_conn.info()
except ConnectionError:
print "Could not connect to Redis"
# plugin is essential to functionality
sys.exit(1)
# clear out older test keys if any
keys = self.redis_conn.keys("test-spider:*")
for key in keys:
self.redis_conn.delete(key)
# set up kafka to consumer potential result
self.kafka_conn = KafkaClient(self.settings['KAFKA_HOSTS'])
self.kafka_conn.ensure_topic_exists("demo_test.crawled_firehose")
self.consumer = SimpleConsumer(
self.kafka_conn,
"demo-id",
"demo_test.crawled_firehose",
buffer_size=1024*100,
fetch_size_bytes=1024*100,
max_buffer_size=None
)
# move cursor to end of kafka topic
self.consumer.seek(0, 2)
def test_crawler_process(self):
runner = CrawlerRunner(self.settings)
d = runner.crawl(CustomSpider)
d.addBoth(lambda _: reactor.stop())
# add crawl to redis
key = "test-spider:istresearch.com:queue"
self.redis_conn.zadd(key, self.example_feed, -99)
# run the spider, give 20 seconds to see the url, crawl it,
# and send to kafka. Then we kill the reactor
def thread_func():
time.sleep(20)
reactor.stop()
thread = threading.Thread(target=thread_func)
thread.start()
reactor.run()
# ensure it was sent out to kafka
message_count = 0
for message in self.consumer.get_messages():
if message is None:
break
else:
the_dict = json.loads(message.message.value)
if the_dict is not None and the_dict['appid'] == 'testapp' \
and the_dict['crawlid'] == '01234567890abcdefghijklmn':
message_count += 1
self.assertEquals(message_count, 1)
def tearDown(self):
keys = self.redis_conn.keys('stats:crawler:*:test-spider:*')
keys = keys + self.redis_conn.keys('test-spider:*')
for key in keys:
self.redis_conn.delete(key)
if __name__ == '__main__':
unittest.main()
| mit |
IllusionRom-deprecated/android_platform_external_chromium_org | chrome/common/extensions/docs/server2/update_server.py | 152 | 2046 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import getpass
import os
import subprocess
import sys
import build_server
if __name__ == '__main__':
additional_args = []
if len(sys.argv) > 1 and sys.argv[1].endswith('appcfg.py'):
appcfg_path = sys.argv[1]
additional_args = sys.argv[2:]
else:
appcfg_path = None
additional_args = sys.argv[1:]
for path in ['.',
os.path.join(os.environ['HOME'], 'local', 'google_appengine'),
os.path.join(os.environ['HOME'], 'google_appengine'),
os.getcwd()] + sys.path:
full_path = os.path.join(path, 'appcfg.py')
if os.path.exists(full_path):
appcfg_path = full_path
break
if appcfg_path is None:
print 'appcfg.py could not be found in default paths.'
print 'usage: update_server.py <path_to_appcfg.py> <appcfg_options>'
exit(1)
def run_appcfg():
server2_path = os.path.dirname(sys.argv[0])
subprocess.call([appcfg_path, 'update', server2_path] + additional_args)
build_server.main()
username = raw_input(
'Update github username/password (empty to skip)? ')
if username:
password = getpass.getpass()
with open('github_file_system.py') as f:
contents = f.read()
if 'USERNAME = None' not in contents:
print 'Error: Can\'t find "USERNAME = None" in github_file_system.py.'
exit(1)
if 'PASSWORD = None' not in contents:
print 'Error: Can\'t find "PASSWORD = None" in github_file_system.py.'
exit(1)
try:
with open('github_file_system.py', 'w+') as f:
f.write(
contents.replace('PASSWORD = None', 'PASSWORD = \'%s\'' % password)
.replace('USERNAME = None', 'USERNAME = \'%s\'' % username))
run_appcfg()
finally:
with open('github_file_system.py', 'w+') as f:
f.write(contents)
else:
run_appcfg()
| bsd-3-clause |
yfauser/maxwindownotify | setup.py | 1 | 1252 | from setuptools import setup
import io
def read(*filenames, **kwargs):
encoding = kwargs.get('encoding', 'utf-8')
sep = kwargs.get('sep', '\n')
buf = []
for filename in filenames:
with io.open(filename, encoding=encoding) as f:
buf.append(f.read())
return sep.join(buf)
long_description = read('README.rst')
setup(
name='maxwindownotify',
version='1.1.1',
packages=['maxwindownotify'],
package_data={'maxwindownotify':['*'], 'maxwindownotify':['notifier_modules/*']},
url='http://github.com/yfauser/maxwindownotify',
license='MIT',
author='yfauser',
author_email='yfauser@yahoo.de',
description='This little script (daemon) will poll for the status of all window sensors known to a MAX Cube system and check for open windows',
long_description=long_description,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: End Users/Desktop',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7'],
install_requires=['requests>=2.7.0', 'netaddr>=0.7.18'],
entry_points={
'console_scripts': ['maxwindownotify = maxwindownotify.maxwindownotify:main']
}
)
| mit |
hazybluedot/indie_helper | util.py | 1 | 2278 | import requests
import bleach
import sys
if sys.version < '3':
from urlparse import urlparse
text_type = unicode
text_types = [ str, unicode ]
binary_type = str
else:
from urllib.parse import urlparse
text_type = str
text_types = [ str ]
binary_type = bytes
def is_url(url):
try:
parts = urlparse(url)
except TypeError:
return False
return parts.scheme in [ 'http', 'https' ]
def flatten(item):
if type(item) in [ list, tuple ] and len(item) == 1:
return item[0]
else:
return item
#bleach.ALLOWED_TAGS + ['p']
ALLOWED_TAGS=bleach.ALLOWED_TAGS + ['p', 'span']
def clean(text):
return bleach.clean(text, tags=ALLOWED_TAGS)
def clean_url(url):
if url.startswith('javascript:'):
return '';
return url
def bleachify(entry, key=None):
## todo for each property
if key == 'url':
bleached = bleachify(entry)
return [ clean_url(u) for u in bleached ]
if hasattr(entry, 'items'):
return dict([ (prop, bleachify(value, prop)) for prop, value in entry.items() ])
elif type(entry) is list:
## to flatten the list-of-one values that mf2py generates
## I have revisited this and decided to keep single element lists as this seems to be part of the mf2 defined format
#if len(entry) == 1:
# return bleachify(entry[0])
#else:
return map(bleachify, entry)
elif type(entry) in text_types:
return clean(entry)
else:
print('unhandled type of entry: {0}'.format(type(entry)))
return None
def follow_redirects(url, max_depth):
"""perform http GET url, following any redirects up to max_depth.
return resolved url.
Raises TooManyRedirects exception if max_depth is exceeded"""
def _wrapped(url, depth, acc):
if depth > max_depth:
raise TooManyRedirects('following redirects on {0} exceeded maximum depth of {1}'.format(url, max_depth))
r = requests.head(url)
acc.append( { 'url': url, 'status_code': r.status_code} )
if r.status_code in [ 301, 302 ]:
return _wrapped(r.headers['Location'], depth+1, acc)
else:
return acc
return _wrapped(url, 0, [])
| gpl-3.0 |
jeenalee/servo | tests/wpt/web-platform-tests/tools/pywebsocket/src/example/internal_error_wsh.py | 465 | 1738 | # Copyright 2012, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from mod_pywebsocket import msgutil
def web_socket_do_extra_handshake(request):
pass
def web_socket_transfer_data(request):
raise msgutil.BadOperationException('Intentional')
# vi:sts=4 sw=4 et
| mpl-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.